Subversion Repositories Kolibri OS

Rev

Go to most recent revision | Details | Last modification | View Log | RSS feed

Rev Author Line No. Line
1408 serge 1
#ifndef _ASM_X86_ATOMIC_32_H
2
#define _ASM_X86_ATOMIC_32_H
3
 
4
#include 
5
#include 
6
//#include 
7
#include 
8
 
9
/*
10
 * Atomic operations that C can't guarantee us.  Useful for
11
 * resource counting etc..
12
 */
13
 
14
#define ATOMIC_INIT(i)	{ (i) }
15
 
16
/**
17
 * atomic_read - read atomic variable
18
 * @v: pointer of type atomic_t
19
 *
20
 * Atomically reads the value of @v.
21
 */
22
static inline int atomic_read(const atomic_t *v)
23
{
24
	return v->counter;
25
}
26
 
27
/**
28
 * atomic_set - set atomic variable
29
 * @v: pointer of type atomic_t
30
 * @i: required value
31
 *
32
 * Atomically sets the value of @v to @i.
33
 */
34
static inline void atomic_set(atomic_t *v, int i)
35
{
36
	v->counter = i;
37
}
38
 
39
/**
40
 * atomic_add - add integer to atomic variable
41
 * @i: integer value to add
42
 * @v: pointer of type atomic_t
43
 *
44
 * Atomically adds @i to @v.
45
 */
46
static inline void atomic_add(int i, atomic_t *v)
47
{
48
	asm volatile(LOCK_PREFIX "addl %1,%0"
49
		     : "+m" (v->counter)
50
		     : "ir" (i));
51
}
52
 
53
/**
54
 * atomic_sub - subtract integer from atomic variable
55
 * @i: integer value to subtract
56
 * @v: pointer of type atomic_t
57
 *
58
 * Atomically subtracts @i from @v.
59
 */
60
static inline void atomic_sub(int i, atomic_t *v)
61
{
62
	asm volatile(LOCK_PREFIX "subl %1,%0"
63
		     : "+m" (v->counter)
64
		     : "ir" (i));
65
}
66
 
67
/**
68
 * atomic_sub_and_test - subtract value from variable and test result
69
 * @i: integer value to subtract
70
 * @v: pointer of type atomic_t
71
 *
72
 * Atomically subtracts @i from @v and returns
73
 * true if the result is zero, or false for all
74
 * other cases.
75
 */
76
static inline int atomic_sub_and_test(int i, atomic_t *v)
77
{
78
	unsigned char c;
79
 
80
	asm volatile(LOCK_PREFIX "subl %2,%0; sete %1"
81
		     : "+m" (v->counter), "=qm" (c)
82
		     : "ir" (i) : "memory");
83
	return c;
84
}
85
 
86
/**
87
 * atomic_inc - increment atomic variable
88
 * @v: pointer of type atomic_t
89
 *
90
 * Atomically increments @v by 1.
91
 */
92
static inline void atomic_inc(atomic_t *v)
93
{
94
	asm volatile(LOCK_PREFIX "incl %0"
95
		     : "+m" (v->counter));
96
}
97
 
98
/**
99
 * atomic_dec - decrement atomic variable
100
 * @v: pointer of type atomic_t
101
 *
102
 * Atomically decrements @v by 1.
103
 */
104
static inline void atomic_dec(atomic_t *v)
105
{
106
	asm volatile(LOCK_PREFIX "decl %0"
107
		     : "+m" (v->counter));
108
}
109
 
110
/**
111
 * atomic_dec_and_test - decrement and test
112
 * @v: pointer of type atomic_t
113
 *
114
 * Atomically decrements @v by 1 and
115
 * returns true if the result is 0, or false for all other
116
 * cases.
117
 */
118
static inline int atomic_dec_and_test(atomic_t *v)
119
{
120
	unsigned char c;
121
 
122
	asm volatile(LOCK_PREFIX "decl %0; sete %1"
123
		     : "+m" (v->counter), "=qm" (c)
124
		     : : "memory");
125
	return c != 0;
126
}
127
 
128
/**
129
 * atomic_inc_and_test - increment and test
130
 * @v: pointer of type atomic_t
131
 *
132
 * Atomically increments @v by 1
133
 * and returns true if the result is zero, or false for all
134
 * other cases.
135
 */
136
static inline int atomic_inc_and_test(atomic_t *v)
137
{
138
	unsigned char c;
139
 
140
	asm volatile(LOCK_PREFIX "incl %0; sete %1"
141
		     : "+m" (v->counter), "=qm" (c)
142
		     : : "memory");
143
	return c != 0;
144
}
145
 
146
/**
147
 * atomic_add_negative - add and test if negative
148
 * @v: pointer of type atomic_t
149
 * @i: integer value to add
150
 *
151
 * Atomically adds @i to @v and returns true
152
 * if the result is negative, or false when
153
 * result is greater than or equal to zero.
154
 */
155
static inline int atomic_add_negative(int i, atomic_t *v)
156
{
157
	unsigned char c;
158
 
159
	asm volatile(LOCK_PREFIX "addl %2,%0; sets %1"
160
		     : "+m" (v->counter), "=qm" (c)
161
		     : "ir" (i) : "memory");
162
	return c;
163
}
164
 
165
/**
166
 * atomic_add_return - add integer and return
167
 * @v: pointer of type atomic_t
168
 * @i: integer value to add
169
 *
170
 * Atomically adds @i to @v and returns @i + @v
171
 */
172
static inline int atomic_add_return(int i, atomic_t *v)
173
{
174
	int __i;
175
#ifdef CONFIG_M386
176
	unsigned long flags;
177
	if (unlikely(boot_cpu_data.x86 <= 3))
178
		goto no_xadd;
179
#endif
180
	/* Modern 486+ processor */
181
	__i = i;
182
	asm volatile(LOCK_PREFIX "xaddl %0, %1"
183
		     : "+r" (i), "+m" (v->counter)
184
		     : : "memory");
185
	return i + __i;
186
 
187
#ifdef CONFIG_M386
188
no_xadd: /* Legacy 386 processor */
189
	local_irq_save(flags);
190
	__i = atomic_read(v);
191
	atomic_set(v, i + __i);
192
	local_irq_restore(flags);
193
	return i + __i;
194
#endif
195
}
196
 
197
/**
198
 * atomic_sub_return - subtract integer and return
199
 * @v: pointer of type atomic_t
200
 * @i: integer value to subtract
201
 *
202
 * Atomically subtracts @i from @v and returns @v - @i
203
 */
204
static inline int atomic_sub_return(int i, atomic_t *v)
205
{
206
	return atomic_add_return(-i, v);
207
}
208
 
209
static inline int atomic_cmpxchg(atomic_t *v, int old, int new)
210
{
211
	return cmpxchg(&v->counter, old, new);
212
}
213
 
214
static inline int atomic_xchg(atomic_t *v, int new)
215
{
216
	return xchg(&v->counter, new);
217
}
218
 
219
/**
220
 * atomic_add_unless - add unless the number is already a given value
221
 * @v: pointer of type atomic_t
222
 * @a: the amount to add to v...
223
 * @u: ...unless v is equal to u.
224
 *
225
 * Atomically adds @a to @v, so long as @v was not already @u.
226
 * Returns non-zero if @v was not @u, and zero otherwise.
227
 */
228
static inline int atomic_add_unless(atomic_t *v, int a, int u)
229
{
230
	int c, old;
231
	c = atomic_read(v);
232
	for (;;) {
233
		if (unlikely(c == (u)))
234
			break;
235
		old = atomic_cmpxchg((v), c, c + (a));
236
		if (likely(old == c))
237
			break;
238
		c = old;
239
	}
240
	return c != (u);
241
}
242
 
243
#define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
244
 
245
#define atomic_inc_return(v)  (atomic_add_return(1, v))
246
#define atomic_dec_return(v)  (atomic_sub_return(1, v))
247
 
248
/* These are x86-specific, used by some header files */
249
#define atomic_clear_mask(mask, addr)				\
250
	asm volatile(LOCK_PREFIX "andl %0,%1"			\
251
		     : : "r" (~(mask)), "m" (*(addr)) : "memory")
252
 
253
#define atomic_set_mask(mask, addr)				\
254
	asm volatile(LOCK_PREFIX "orl %0,%1"				\
255
		     : : "r" (mask), "m" (*(addr)) : "memory")
256
 
257
/* Atomic operations are already serializing on x86 */
258
#define smp_mb__before_atomic_dec()	barrier()
259
#define smp_mb__after_atomic_dec()	barrier()
260
#define smp_mb__before_atomic_inc()	barrier()
261
#define smp_mb__after_atomic_inc()	barrier()
262
 
263
/* An 64bit atomic type */
264
 
265
typedef struct {
266
	u64 __aligned(8) counter;
267
} atomic64_t;
268
 
269
#define ATOMIC64_INIT(val)	{ (val) }
270
 
271
extern u64 atomic64_cmpxchg(atomic64_t *ptr, u64 old_val, u64 new_val);
272
 
273
/**
274
 * atomic64_xchg - xchg atomic64 variable
275
 * @ptr:      pointer to type atomic64_t
276
 * @new_val:  value to assign
277
 *
278
 * Atomically xchgs the value of @ptr to @new_val and returns
279
 * the old value.
280
 */
281
extern u64 atomic64_xchg(atomic64_t *ptr, u64 new_val);
282
 
283
/**
284
 * atomic64_set - set atomic64 variable
285
 * @ptr:      pointer to type atomic64_t
286
 * @new_val:  value to assign
287
 *
288
 * Atomically sets the value of @ptr to @new_val.
289
 */
290
extern void atomic64_set(atomic64_t *ptr, u64 new_val);
291
 
292
/**
293
 * atomic64_read - read atomic64 variable
294
 * @ptr:      pointer to type atomic64_t
295
 *
296
 * Atomically reads the value of @ptr and returns it.
297
 */
298
static inline u64 atomic64_read(atomic64_t *ptr)
299
{
300
	u64 res;
301
 
302
	/*
303
	 * Note, we inline this atomic64_t primitive because
304
	 * it only clobbers EAX/EDX and leaves the others
305
	 * untouched. We also (somewhat subtly) rely on the
306
	 * fact that cmpxchg8b returns the current 64-bit value
307
	 * of the memory location we are touching:
308
	 */
309
	asm volatile(
310
		"mov %%ebx, %%eax\n\t"
311
		"mov %%ecx, %%edx\n\t"
312
		LOCK_PREFIX "cmpxchg8b %1\n"
313
			: "=&A" (res)
314
			: "m" (*ptr)
315
		);
316
 
317
	return res;
318
}
319
 
320
extern u64 atomic64_read(atomic64_t *ptr);
321
 
322
/**
323
 * atomic64_add_return - add and return
324
 * @delta: integer value to add
325
 * @ptr:   pointer to type atomic64_t
326
 *
327
 * Atomically adds @delta to @ptr and returns @delta + *@ptr
328
 */
329
extern u64 atomic64_add_return(u64 delta, atomic64_t *ptr);
330
 
331
/*
332
 * Other variants with different arithmetic operators:
333
 */
334
extern u64 atomic64_sub_return(u64 delta, atomic64_t *ptr);
335
extern u64 atomic64_inc_return(atomic64_t *ptr);
336
extern u64 atomic64_dec_return(atomic64_t *ptr);
337
 
338
/**
339
 * atomic64_add - add integer to atomic64 variable
340
 * @delta: integer value to add
341
 * @ptr:   pointer to type atomic64_t
342
 *
343
 * Atomically adds @delta to @ptr.
344
 */
345
extern void atomic64_add(u64 delta, atomic64_t *ptr);
346
 
347
/**
348
 * atomic64_sub - subtract the atomic64 variable
349
 * @delta: integer value to subtract
350
 * @ptr:   pointer to type atomic64_t
351
 *
352
 * Atomically subtracts @delta from @ptr.
353
 */
354
extern void atomic64_sub(u64 delta, atomic64_t *ptr);
355
 
356
/**
357
 * atomic64_sub_and_test - subtract value from variable and test result
358
 * @delta: integer value to subtract
359
 * @ptr:   pointer to type atomic64_t
360
 *
361
 * Atomically subtracts @delta from @ptr and returns
362
 * true if the result is zero, or false for all
363
 * other cases.
364
 */
365
extern int atomic64_sub_and_test(u64 delta, atomic64_t *ptr);
366
 
367
/**
368
 * atomic64_inc - increment atomic64 variable
369
 * @ptr: pointer to type atomic64_t
370
 *
371
 * Atomically increments @ptr by 1.
372
 */
373
extern void atomic64_inc(atomic64_t *ptr);
374
 
375
/**
376
 * atomic64_dec - decrement atomic64 variable
377
 * @ptr: pointer to type atomic64_t
378
 *
379
 * Atomically decrements @ptr by 1.
380
 */
381
extern void atomic64_dec(atomic64_t *ptr);
382
 
383
/**
384
 * atomic64_dec_and_test - decrement and test
385
 * @ptr: pointer to type atomic64_t
386
 *
387
 * Atomically decrements @ptr by 1 and
388
 * returns true if the result is 0, or false for all other
389
 * cases.
390
 */
391
extern int atomic64_dec_and_test(atomic64_t *ptr);
392
 
393
/**
394
 * atomic64_inc_and_test - increment and test
395
 * @ptr: pointer to type atomic64_t
396
 *
397
 * Atomically increments @ptr by 1
398
 * and returns true if the result is zero, or false for all
399
 * other cases.
400
 */
401
extern int atomic64_inc_and_test(atomic64_t *ptr);
402
 
403
/**
404
 * atomic64_add_negative - add and test if negative
405
 * @delta: integer value to add
406
 * @ptr:   pointer to type atomic64_t
407
 *
408
 * Atomically adds @delta to @ptr and returns true
409
 * if the result is negative, or false when
410
 * result is greater than or equal to zero.
411
 */
412
extern int atomic64_add_negative(u64 delta, atomic64_t *ptr);
413
 
414
#include 
415
#endif /* _ASM_X86_ATOMIC_32_H */