Subversion Repositories Kolibri OS

Rev

Rev 6934 | Details | Compare with Previous | Last modification | View Log | RSS feed

Rev Author Line No. Line
5270 serge 1
#ifndef _ASM_X86_ATOMIC_H
2
#define _ASM_X86_ATOMIC_H
3
 
4
#include 
5
#include 
6
#include 
7
#include 
8
#include 
9
#include 
10
 
11
/*
12
 * Atomic operations that C can't guarantee us.  Useful for
13
 * resource counting etc..
14
 */
15
 
16
#define ATOMIC_INIT(i)	{ (i) }
17
 
18
/**
19
 * atomic_read - read atomic variable
20
 * @v: pointer of type atomic_t
21
 *
22
 * Atomically reads the value of @v.
23
 */
6082 serge 24
static __always_inline int atomic_read(const atomic_t *v)
5270 serge 25
{
6082 serge 26
	return READ_ONCE((v)->counter);
5270 serge 27
}
28
 
29
/**
30
 * atomic_set - set atomic variable
31
 * @v: pointer of type atomic_t
32
 * @i: required value
33
 *
34
 * Atomically sets the value of @v to @i.
35
 */
6082 serge 36
static __always_inline void atomic_set(atomic_t *v, int i)
5270 serge 37
{
6082 serge 38
	WRITE_ONCE(v->counter, i);
5270 serge 39
}
40
 
41
/**
42
 * atomic_add - add integer to atomic variable
43
 * @i: integer value to add
44
 * @v: pointer of type atomic_t
45
 *
46
 * Atomically adds @i to @v.
47
 */
6082 serge 48
static __always_inline void atomic_add(int i, atomic_t *v)
5270 serge 49
{
50
	asm volatile(LOCK_PREFIX "addl %1,%0"
51
		     : "+m" (v->counter)
52
		     : "ir" (i));
53
}
54
 
55
/**
56
 * atomic_sub - subtract integer from atomic variable
57
 * @i: integer value to subtract
58
 * @v: pointer of type atomic_t
59
 *
60
 * Atomically subtracts @i from @v.
61
 */
6082 serge 62
static __always_inline void atomic_sub(int i, atomic_t *v)
5270 serge 63
{
64
	asm volatile(LOCK_PREFIX "subl %1,%0"
65
		     : "+m" (v->counter)
66
		     : "ir" (i));
67
}
68
 
69
/**
70
 * atomic_sub_and_test - subtract value from variable and test result
71
 * @i: integer value to subtract
72
 * @v: pointer of type atomic_t
73
 *
74
 * Atomically subtracts @i from @v and returns
75
 * true if the result is zero, or false for all
76
 * other cases.
77
 */
6082 serge 78
static __always_inline int atomic_sub_and_test(int i, atomic_t *v)
5270 serge 79
{
80
	GEN_BINARY_RMWcc(LOCK_PREFIX "subl", v->counter, "er", i, "%0", "e");
81
}
82
 
83
/**
84
 * atomic_inc - increment atomic variable
85
 * @v: pointer of type atomic_t
86
 *
87
 * Atomically increments @v by 1.
88
 */
6082 serge 89
static __always_inline void atomic_inc(atomic_t *v)
5270 serge 90
{
91
	asm volatile(LOCK_PREFIX "incl %0"
92
		     : "+m" (v->counter));
93
}
94
 
95
/**
96
 * atomic_dec - decrement atomic variable
97
 * @v: pointer of type atomic_t
98
 *
99
 * Atomically decrements @v by 1.
100
 */
6082 serge 101
static __always_inline void atomic_dec(atomic_t *v)
5270 serge 102
{
103
	asm volatile(LOCK_PREFIX "decl %0"
104
		     : "+m" (v->counter));
105
}
106
 
107
/**
108
 * atomic_dec_and_test - decrement and test
109
 * @v: pointer of type atomic_t
110
 *
111
 * Atomically decrements @v by 1 and
112
 * returns true if the result is 0, or false for all other
113
 * cases.
114
 */
6082 serge 115
static __always_inline int atomic_dec_and_test(atomic_t *v)
5270 serge 116
{
117
	GEN_UNARY_RMWcc(LOCK_PREFIX "decl", v->counter, "%0", "e");
118
}
119
 
120
/**
121
 * atomic_inc_and_test - increment and test
122
 * @v: pointer of type atomic_t
123
 *
124
 * Atomically increments @v by 1
125
 * and returns true if the result is zero, or false for all
126
 * other cases.
127
 */
6082 serge 128
static __always_inline int atomic_inc_and_test(atomic_t *v)
5270 serge 129
{
130
	GEN_UNARY_RMWcc(LOCK_PREFIX "incl", v->counter, "%0", "e");
131
}
132
 
133
/**
134
 * atomic_add_negative - add and test if negative
135
 * @i: integer value to add
136
 * @v: pointer of type atomic_t
137
 *
138
 * Atomically adds @i to @v and returns true
139
 * if the result is negative, or false when
140
 * result is greater than or equal to zero.
141
 */
6082 serge 142
static __always_inline int atomic_add_negative(int i, atomic_t *v)
5270 serge 143
{
144
	GEN_BINARY_RMWcc(LOCK_PREFIX "addl", v->counter, "er", i, "%0", "s");
145
}
146
 
147
/**
148
 * atomic_add_return - add integer and return
149
 * @i: integer value to add
150
 * @v: pointer of type atomic_t
151
 *
152
 * Atomically adds @i to @v and returns @i + @v
153
 */
6082 serge 154
static __always_inline int atomic_add_return(int i, atomic_t *v)
5270 serge 155
{
156
	return i + xadd(&v->counter, i);
157
}
158
 
159
/**
160
 * atomic_sub_return - subtract integer and return
161
 * @v: pointer of type atomic_t
162
 * @i: integer value to subtract
163
 *
164
 * Atomically subtracts @i from @v and returns @v - @i
165
 */
6082 serge 166
static __always_inline int atomic_sub_return(int i, atomic_t *v)
5270 serge 167
{
168
	return atomic_add_return(-i, v);
169
}
170
 
171
#define atomic_inc_return(v)  (atomic_add_return(1, v))
172
#define atomic_dec_return(v)  (atomic_sub_return(1, v))
173
 
6082 serge 174
static __always_inline int atomic_cmpxchg(atomic_t *v, int old, int new)
5270 serge 175
{
176
	return cmpxchg(&v->counter, old, new);
177
}
178
 
179
static inline int atomic_xchg(atomic_t *v, int new)
180
{
181
	return xchg(&v->counter, new);
182
}
183
 
6082 serge 184
#define ATOMIC_OP(op)							\
185
static inline void atomic_##op(int i, atomic_t *v)			\
186
{									\
187
	asm volatile(LOCK_PREFIX #op"l %1,%0"				\
188
			: "+m" (v->counter)				\
189
			: "ir" (i)					\
190
			: "memory");					\
191
}
192
 
193
ATOMIC_OP(and)
194
ATOMIC_OP(or)
195
ATOMIC_OP(xor)
196
 
197
#undef ATOMIC_OP
198
 
5270 serge 199
/**
200
 * __atomic_add_unless - add unless the number is already a given value
201
 * @v: pointer of type atomic_t
202
 * @a: the amount to add to v...
203
 * @u: ...unless v is equal to u.
204
 *
205
 * Atomically adds @a to @v, so long as @v was not already @u.
206
 * Returns the old value of @v.
207
 */
6082 serge 208
static __always_inline int __atomic_add_unless(atomic_t *v, int a, int u)
5270 serge 209
{
210
	int c, old;
211
	c = atomic_read(v);
212
	for (;;) {
213
		if (unlikely(c == (u)))
214
			break;
215
		old = atomic_cmpxchg((v), c, c + (a));
216
		if (likely(old == c))
217
			break;
218
		c = old;
219
	}
220
	return c;
221
}
222
 
223
/**
224
 * atomic_inc_short - increment of a short integer
225
 * @v: pointer to type int
226
 *
227
 * Atomically adds 1 to @v
228
 * Returns the new value of @u
229
 */
6082 serge 230
static __always_inline short int atomic_inc_short(short int *v)
5270 serge 231
{
232
	asm(LOCK_PREFIX "addw $1, %0" : "+m" (*v));
233
	return *v;
234
}
235
 
236
#ifdef CONFIG_X86_32
237
# include 
238
#else
239
# include 
240
#endif
241
 
242
#endif /* _ASM_X86_ATOMIC_H */