Subversion Repositories Kolibri OS

Rev

Rev 5270 | Rev 6934 | Go to most recent revision | Show entire file | Regard whitespace | Details | Blame | Last modification | View Log | RSS feed

Rev 5270 Rev 6082
Line 20... Line 20...
20
 * atomic_read - read atomic variable
20
 * atomic_read - read atomic variable
21
 * @v: pointer of type atomic_t
21
 * @v: pointer of type atomic_t
22
 *
22
 *
23
 * Atomically reads the value of @v.
23
 * Atomically reads the value of @v.
24
 */
24
 */
25
static inline int atomic_read(const atomic_t *v)
25
static __always_inline int atomic_read(const atomic_t *v)
26
{
26
{
27
	return ACCESS_ONCE((v)->counter);
27
	return READ_ONCE((v)->counter);
28
}
28
}
Line 29... Line 29...
29
 
29
 
30
/**
30
/**
31
 * atomic_set - set atomic variable
31
 * atomic_set - set atomic variable
32
 * @v: pointer of type atomic_t
32
 * @v: pointer of type atomic_t
33
 * @i: required value
33
 * @i: required value
34
 *
34
 *
35
 * Atomically sets the value of @v to @i.
35
 * Atomically sets the value of @v to @i.
36
 */
36
 */
37
static inline void atomic_set(atomic_t *v, int i)
37
static __always_inline void atomic_set(atomic_t *v, int i)
38
{
38
{
39
	v->counter = i;
39
	WRITE_ONCE(v->counter, i);
Line 40... Line 40...
40
}
40
}
41
 
41
 
42
/**
42
/**
43
 * atomic_add - add integer to atomic variable
43
 * atomic_add - add integer to atomic variable
44
 * @i: integer value to add
44
 * @i: integer value to add
45
 * @v: pointer of type atomic_t
45
 * @v: pointer of type atomic_t
46
 *
46
 *
47
 * Atomically adds @i to @v.
47
 * Atomically adds @i to @v.
48
 */
48
 */
49
static inline void atomic_add(int i, atomic_t *v)
49
static __always_inline void atomic_add(int i, atomic_t *v)
50
{
50
{
51
	asm volatile(LOCK_PREFIX "addl %1,%0"
51
	asm volatile(LOCK_PREFIX "addl %1,%0"
52
		     : "+m" (v->counter)
52
		     : "+m" (v->counter)
Line 58... Line 58...
58
 * @i: integer value to subtract
58
 * @i: integer value to subtract
59
 * @v: pointer of type atomic_t
59
 * @v: pointer of type atomic_t
60
 *
60
 *
61
 * Atomically subtracts @i from @v.
61
 * Atomically subtracts @i from @v.
62
 */
62
 */
63
static inline void atomic_sub(int i, atomic_t *v)
63
static __always_inline void atomic_sub(int i, atomic_t *v)
64
{
64
{
65
	asm volatile(LOCK_PREFIX "subl %1,%0"
65
	asm volatile(LOCK_PREFIX "subl %1,%0"
66
		     : "+m" (v->counter)
66
		     : "+m" (v->counter)
67
		     : "ir" (i));
67
		     : "ir" (i));
68
}
68
}
Line 74... Line 74...
74
 *
74
 *
75
 * Atomically subtracts @i from @v and returns
75
 * Atomically subtracts @i from @v and returns
76
 * true if the result is zero, or false for all
76
 * true if the result is zero, or false for all
77
 * other cases.
77
 * other cases.
78
 */
78
 */
79
static inline int atomic_sub_and_test(int i, atomic_t *v)
79
static __always_inline int atomic_sub_and_test(int i, atomic_t *v)
80
{
80
{
81
	GEN_BINARY_RMWcc(LOCK_PREFIX "subl", v->counter, "er", i, "%0", "e");
81
	GEN_BINARY_RMWcc(LOCK_PREFIX "subl", v->counter, "er", i, "%0", "e");
82
}
82
}
Line 83... Line 83...
83
 
83
 
84
/**
84
/**
85
 * atomic_inc - increment atomic variable
85
 * atomic_inc - increment atomic variable
86
 * @v: pointer of type atomic_t
86
 * @v: pointer of type atomic_t
87
 *
87
 *
88
 * Atomically increments @v by 1.
88
 * Atomically increments @v by 1.
89
 */
89
 */
90
static inline void atomic_inc(atomic_t *v)
90
static __always_inline void atomic_inc(atomic_t *v)
91
{
91
{
92
	asm volatile(LOCK_PREFIX "incl %0"
92
	asm volatile(LOCK_PREFIX "incl %0"
93
		     : "+m" (v->counter));
93
		     : "+m" (v->counter));
Line 97... Line 97...
97
 * atomic_dec - decrement atomic variable
97
 * atomic_dec - decrement atomic variable
98
 * @v: pointer of type atomic_t
98
 * @v: pointer of type atomic_t
99
 *
99
 *
100
 * Atomically decrements @v by 1.
100
 * Atomically decrements @v by 1.
101
 */
101
 */
102
static inline void atomic_dec(atomic_t *v)
102
static __always_inline void atomic_dec(atomic_t *v)
103
{
103
{
104
	asm volatile(LOCK_PREFIX "decl %0"
104
	asm volatile(LOCK_PREFIX "decl %0"
105
		     : "+m" (v->counter));
105
		     : "+m" (v->counter));
106
}
106
}
Line 111... Line 111...
111
 *
111
 *
112
 * Atomically decrements @v by 1 and
112
 * Atomically decrements @v by 1 and
113
 * returns true if the result is 0, or false for all other
113
 * returns true if the result is 0, or false for all other
114
 * cases.
114
 * cases.
115
 */
115
 */
116
static inline int atomic_dec_and_test(atomic_t *v)
116
static __always_inline int atomic_dec_and_test(atomic_t *v)
117
{
117
{
118
	GEN_UNARY_RMWcc(LOCK_PREFIX "decl", v->counter, "%0", "e");
118
	GEN_UNARY_RMWcc(LOCK_PREFIX "decl", v->counter, "%0", "e");
119
}
119
}
Line 120... Line 120...
120
 
120
 
Line 124... Line 124...
124
 *
124
 *
125
 * Atomically increments @v by 1
125
 * Atomically increments @v by 1
126
 * and returns true if the result is zero, or false for all
126
 * and returns true if the result is zero, or false for all
127
 * other cases.
127
 * other cases.
128
 */
128
 */
129
static inline int atomic_inc_and_test(atomic_t *v)
129
static __always_inline int atomic_inc_and_test(atomic_t *v)
130
{
130
{
131
	GEN_UNARY_RMWcc(LOCK_PREFIX "incl", v->counter, "%0", "e");
131
	GEN_UNARY_RMWcc(LOCK_PREFIX "incl", v->counter, "%0", "e");
132
}
132
}
Line 133... Line 133...
133
 
133
 
Line 138... Line 138...
138
 *
138
 *
139
 * Atomically adds @i to @v and returns true
139
 * Atomically adds @i to @v and returns true
140
 * if the result is negative, or false when
140
 * if the result is negative, or false when
141
 * result is greater than or equal to zero.
141
 * result is greater than or equal to zero.
142
 */
142
 */
143
static inline int atomic_add_negative(int i, atomic_t *v)
143
static __always_inline int atomic_add_negative(int i, atomic_t *v)
144
{
144
{
145
	GEN_BINARY_RMWcc(LOCK_PREFIX "addl", v->counter, "er", i, "%0", "s");
145
	GEN_BINARY_RMWcc(LOCK_PREFIX "addl", v->counter, "er", i, "%0", "s");
146
}
146
}
Line 147... Line 147...
147
 
147
 
Line 150... Line 150...
150
 * @i: integer value to add
150
 * @i: integer value to add
151
 * @v: pointer of type atomic_t
151
 * @v: pointer of type atomic_t
152
 *
152
 *
153
 * Atomically adds @i to @v and returns @i + @v
153
 * Atomically adds @i to @v and returns @i + @v
154
 */
154
 */
155
static inline int atomic_add_return(int i, atomic_t *v)
155
static __always_inline int atomic_add_return(int i, atomic_t *v)
156
{
156
{
157
	return i + xadd(&v->counter, i);
157
	return i + xadd(&v->counter, i);
158
}
158
}
Line 159... Line 159...
159
 
159
 
Line 162... Line 162...
162
 * @v: pointer of type atomic_t
162
 * @v: pointer of type atomic_t
163
 * @i: integer value to subtract
163
 * @i: integer value to subtract
164
 *
164
 *
165
 * Atomically subtracts @i from @v and returns @v - @i
165
 * Atomically subtracts @i from @v and returns @v - @i
166
 */
166
 */
167
static inline int atomic_sub_return(int i, atomic_t *v)
167
static __always_inline int atomic_sub_return(int i, atomic_t *v)
168
{
168
{
169
	return atomic_add_return(-i, v);
169
	return atomic_add_return(-i, v);
170
}
170
}
Line 171... Line 171...
171
 
171
 
172
#define atomic_inc_return(v)  (atomic_add_return(1, v))
172
#define atomic_inc_return(v)  (atomic_add_return(1, v))
Line 173... Line 173...
173
#define atomic_dec_return(v)  (atomic_sub_return(1, v))
173
#define atomic_dec_return(v)  (atomic_sub_return(1, v))
174
 
174
 
175
static inline int atomic_cmpxchg(atomic_t *v, int old, int new)
175
static __always_inline int atomic_cmpxchg(atomic_t *v, int old, int new)
176
{
176
{
Line 177... Line 177...
177
	return cmpxchg(&v->counter, old, new);
177
	return cmpxchg(&v->counter, old, new);
178
}
178
}
179
 
179
 
180
static inline int atomic_xchg(atomic_t *v, int new)
180
static inline int atomic_xchg(atomic_t *v, int new)
Line -... Line 181...
-
 
181
{
-
 
182
	return xchg(&v->counter, new);
-
 
183
}
-
 
184
 
-
 
185
#define ATOMIC_OP(op)							\
-
 
186
static inline void atomic_##op(int i, atomic_t *v)			\
-
 
187
{									\
-
 
188
	asm volatile(LOCK_PREFIX #op"l %1,%0"				\
-
 
189
			: "+m" (v->counter)				\
-
 
190
			: "ir" (i)					\
-
 
191
			: "memory");					\
-
 
192
}
-
 
193
 
-
 
194
ATOMIC_OP(and)
-
 
195
ATOMIC_OP(or)
181
{
196
ATOMIC_OP(xor)
182
	return xchg(&v->counter, new);
197
 
183
}
198
#undef ATOMIC_OP
184
 
199
 
185
/**
200
/**
186
 * __atomic_add_unless - add unless the number is already a given value
201
 * __atomic_add_unless - add unless the number is already a given value
187
 * @v: pointer of type atomic_t
202
 * @v: pointer of type atomic_t
188
 * @a: the amount to add to v...
203
 * @a: the amount to add to v...
189
 * @u: ...unless v is equal to u.
204
 * @u: ...unless v is equal to u.
190
 *
205
 *
191
 * Atomically adds @a to @v, so long as @v was not already @u.
206
 * Atomically adds @a to @v, so long as @v was not already @u.
192
 * Returns the old value of @v.
207
 * Returns the old value of @v.
193
 */
208
 */
194
static inline int __atomic_add_unless(atomic_t *v, int a, int u)
209
static __always_inline int __atomic_add_unless(atomic_t *v, int a, int u)
195
{
210
{
Line 211... Line 226...
211
 * @v: pointer to type int
226
 * @v: pointer to type int
212
 *
227
 *
213
 * Atomically adds 1 to @v
228
 * Atomically adds 1 to @v
214
 * Returns the new value of @u
229
 * Returns the new value of @u
215
 */
230
 */
216
static inline short int atomic_inc_short(short int *v)
231
static __always_inline short int atomic_inc_short(short int *v)
217
{
232
{
218
	asm(LOCK_PREFIX "addw $1, %0" : "+m" (*v));
233
	asm(LOCK_PREFIX "addw $1, %0" : "+m" (*v));
219
	return *v;
234
	return *v;
220
}
235
}
Line 221... Line -...
221
 
-
 
222
/* These are x86-specific, used by some header files */
-
 
223
#define atomic_clear_mask(mask, addr)				\
-
 
224
	asm volatile(LOCK_PREFIX "andl %0,%1"			\
-
 
225
		     : : "r" (~(mask)), "m" (*(addr)) : "memory")
-
 
226
 
-
 
227
#define atomic_set_mask(mask, addr)				\
-
 
228
	asm volatile(LOCK_PREFIX "orl %0,%1"			\
-
 
229
		     : : "r" ((unsigned)(mask)), "m" (*(addr))	\
-
 
230
		     : "memory")
-
 
231
 
236
 
232
#ifdef CONFIG_X86_32
237
#ifdef CONFIG_X86_32
233
# include 
238
# include 
234
#else
239
#else
235
# include 
240
# include