Subversion Repositories Kolibri OS

Rev

Rev 1964 | Go to most recent revision | Details | Compare with Previous | Last modification | View Log | RSS feed

Rev Author Line No. Line
1964 serge 1
#ifndef _ASM_X86_ATOMIC_32_H
2
#define _ASM_X86_ATOMIC_32_H
3
 
4
#include 
5
#include 
6
//#include 
7
#include 
8
 
9
/*
10
 * Atomic operations that C can't guarantee us.  Useful for
11
 * resource counting etc..
12
 */
13
 
14
#define ATOMIC_INIT(i)	{ (i) }
15
 
16
/**
17
 * atomic_read - read atomic variable
18
 * @v: pointer of type atomic_t
19
 *
20
 * Atomically reads the value of @v.
21
 */
22
static inline int atomic_read(const atomic_t *v)
23
{
24
	return v->counter;
25
}
26
 
27
/**
28
 * atomic_set - set atomic variable
29
 * @v: pointer of type atomic_t
30
 * @i: required value
31
 *
32
 * Atomically sets the value of @v to @i.
33
 */
34
static inline void atomic_set(atomic_t *v, int i)
35
{
36
	v->counter = i;
37
}
38
 
39
/**
40
 * atomic_add - add integer to atomic variable
41
 * @i: integer value to add
42
 * @v: pointer of type atomic_t
43
 *
44
 * Atomically adds @i to @v.
45
 */
46
static inline void atomic_add(int i, atomic_t *v)
47
{
48
	asm volatile(LOCK_PREFIX "addl %1,%0"
49
		     : "+m" (v->counter)
50
		     : "ir" (i));
51
}
52
 
53
/**
54
 * atomic_sub - subtract integer from atomic variable
55
 * @i: integer value to subtract
56
 * @v: pointer of type atomic_t
57
 *
58
 * Atomically subtracts @i from @v.
59
 */
60
static inline void atomic_sub(int i, atomic_t *v)
61
{
62
	asm volatile(LOCK_PREFIX "subl %1,%0"
63
		     : "+m" (v->counter)
64
		     : "ir" (i));
65
}
66
 
67
/**
68
 * atomic_sub_and_test - subtract value from variable and test result
69
 * @i: integer value to subtract
70
 * @v: pointer of type atomic_t
71
 *
72
 * Atomically subtracts @i from @v and returns
73
 * true if the result is zero, or false for all
74
 * other cases.
75
 */
76
static inline int atomic_sub_and_test(int i, atomic_t *v)
77
{
78
	unsigned char c;
79
 
80
	asm volatile(LOCK_PREFIX "subl %2,%0; sete %1"
81
		     : "+m" (v->counter), "=qm" (c)
82
		     : "ir" (i) : "memory");
83
	return c;
84
}
85
 
86
/**
87
 * atomic_inc - increment atomic variable
88
 * @v: pointer of type atomic_t
89
 *
90
 * Atomically increments @v by 1.
91
 */
92
static inline void atomic_inc(atomic_t *v)
93
{
94
	asm volatile(LOCK_PREFIX "incl %0"
95
		     : "+m" (v->counter));
96
}
97
 
98
/**
99
 * atomic_dec - decrement atomic variable
100
 * @v: pointer of type atomic_t
101
 *
102
 * Atomically decrements @v by 1.
103
 */
104
static inline void atomic_dec(atomic_t *v)
105
{
106
	asm volatile(LOCK_PREFIX "decl %0"
107
		     : "+m" (v->counter));
108
}
109
 
110
/**
111
 * atomic_dec_and_test - decrement and test
112
 * @v: pointer of type atomic_t
113
 *
114
 * Atomically decrements @v by 1 and
115
 * returns true if the result is 0, or false for all other
116
 * cases.
117
 */
118
static inline int atomic_dec_and_test(atomic_t *v)
119
{
120
	unsigned char c;
121
 
122
	asm volatile(LOCK_PREFIX "decl %0; sete %1"
123
		     : "+m" (v->counter), "=qm" (c)
124
		     : : "memory");
125
	return c != 0;
126
}
127
 
128
/**
129
 * atomic_inc_and_test - increment and test
130
 * @v: pointer of type atomic_t
131
 *
132
 * Atomically increments @v by 1
133
 * and returns true if the result is zero, or false for all
134
 * other cases.
135
 */
136
static inline int atomic_inc_and_test(atomic_t *v)
137
{
138
	unsigned char c;
139
 
140
	asm volatile(LOCK_PREFIX "incl %0; sete %1"
141
		     : "+m" (v->counter), "=qm" (c)
142
		     : : "memory");
143
	return c != 0;
144
}
145
 
146
/**
147
 * atomic_add_negative - add and test if negative
148
 * @v: pointer of type atomic_t
149
 * @i: integer value to add
150
 *
151
 * Atomically adds @i to @v and returns true
152
 * if the result is negative, or false when
153
 * result is greater than or equal to zero.
154
 */
155
static inline int atomic_add_negative(int i, atomic_t *v)
156
{
157
	unsigned char c;
158
 
159
	asm volatile(LOCK_PREFIX "addl %2,%0; sets %1"
160
		     : "+m" (v->counter), "=qm" (c)
161
		     : "ir" (i) : "memory");
162
	return c;
163
}
164
 
165
/**
166
 * atomic_add_return - add integer and return
167
 * @v: pointer of type atomic_t
168
 * @i: integer value to add
169
 *
170
 * Atomically adds @i to @v and returns @i + @v
171
 */
172
static inline int atomic_add_return(int i, atomic_t *v)
173
{
174
	int __i;
175
#ifdef CONFIG_M386
176
	unsigned long flags;
177
	if (unlikely(boot_cpu_data.x86 <= 3))
178
		goto no_xadd;
179
#endif
180
	/* Modern 486+ processor */
181
	__i = i;
182
	asm volatile(LOCK_PREFIX "xaddl %0, %1"
183
		     : "+r" (i), "+m" (v->counter)
184
		     : : "memory");
185
	return i + __i;
186
 
187
#ifdef CONFIG_M386
188
no_xadd: /* Legacy 386 processor */
189
	local_irq_save(flags);
190
	__i = atomic_read(v);
191
	atomic_set(v, i + __i);
192
	local_irq_restore(flags);
193
	return i + __i;
194
#endif
195
}
196
 
197
/**
198
 * atomic_sub_return - subtract integer and return
199
 * @v: pointer of type atomic_t
200
 * @i: integer value to subtract
201
 *
202
 * Atomically subtracts @i from @v and returns @v - @i
203
 */
204
static inline int atomic_sub_return(int i, atomic_t *v)
205
{
206
	return atomic_add_return(-i, v);
207
}
208
 
209
static inline int atomic_cmpxchg(atomic_t *v, int old, int new)
210
{
211
	return cmpxchg(&v->counter, old, new);
212
}
213
 
214
static inline int atomic_xchg(atomic_t *v, int new)
215
{
216
	return xchg(&v->counter, new);
217
}
218
 
219
/**
220
 * atomic_add_unless - add unless the number is already a given value
221
 * @v: pointer of type atomic_t
222
 * @a: the amount to add to v...
223
 * @u: ...unless v is equal to u.
224
 *
225
 * Atomically adds @a to @v, so long as @v was not already @u.
226
 * Returns non-zero if @v was not @u, and zero otherwise.
227
 */
228
static inline int atomic_add_unless(atomic_t *v, int a, int u)
229
{
230
	int c, old;
231
	c = atomic_read(v);
232
	for (;;) {
233
		if (unlikely(c == (u)))
234
			break;
235
		old = atomic_cmpxchg((v), c, c + (a));
236
		if (likely(old == c))
237
			break;
238
		c = old;
239
	}
240
	return c != (u);
241
}
242
 
243
#define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
244
 
245
#define atomic_inc_return(v)  (atomic_add_return(1, v))
246
#define atomic_dec_return(v)  (atomic_sub_return(1, v))
247
 
248
/* These are x86-specific, used by some header files */
249
#define atomic_clear_mask(mask, addr)				\
250
	asm volatile(LOCK_PREFIX "andl %0,%1"			\
251
		     : : "r" (~(mask)), "m" (*(addr)) : "memory")
252
 
253
#define atomic_set_mask(mask, addr)				\
254
	asm volatile(LOCK_PREFIX "orl %0,%1"				\
255
		     : : "r" (mask), "m" (*(addr)) : "memory")
256
 
257
/* Atomic operations are already serializing on x86 */
258
#define smp_mb__before_atomic_dec()	barrier()
259
#define smp_mb__after_atomic_dec()	barrier()
260
#define smp_mb__before_atomic_inc()	barrier()
261
#define smp_mb__after_atomic_inc()	barrier()
262
 
263
/* An 64bit atomic type */
264
 
265
typedef struct {
266
	u64 __aligned(8) counter;
267
} atomic64_t;
268
 
269
 
3031 serge 270
 
1964 serge 271
extern u64 atomic64_cmpxchg(atomic64_t *ptr, u64 old_val, u64 new_val);
272
 
273
/**
274
 * atomic64_xchg - xchg atomic64 variable
275
 * @ptr:      pointer to type atomic64_t
276
 * @new_val:  value to assign
277
 *
278
 * Atomically xchgs the value of @ptr to @new_val and returns
279
 * the old value.
280
 */
3031 serge 281
static inline long long atomic64_xchg(atomic64_t *v, long long n)
282
{
283
    long long o;
284
    unsigned high = (unsigned)(n >> 32);
285
    unsigned low = (unsigned)n;
1964 serge 286
 
3031 serge 287
    asm volatile(
288
    "1:                 \n\t"
289
    "cmpxchg8b (%%esi)  \n\t"
290
    "jnz 1b             \n\t"
291
    :"=&A" (o)
292
    :"S" (v), "b" (low), "c" (high)
293
    : "memory", "cc");
294
    return o;
295
}
296
 
1964 serge 297
/**
298
 * atomic64_set - set atomic64 variable
299
 * @ptr:      pointer to type atomic64_t
300
 * @new_val:  value to assign
301
 *
302
 * Atomically sets the value of @ptr to @new_val.
303
 */
304
 
3031 serge 305
static inline void atomic64_set(atomic64_t *v, long long i)
306
{
307
    unsigned high = (unsigned)(i >> 32);
308
    unsigned low = (unsigned)i;
309
    asm volatile (
310
    "1:                 \n\t"
311
    "cmpxchg8b (%%esi)  \n\t"
312
    "jnz 1b             \n\t"
313
    :
314
    :"S" (v), "b" (low), "c" (high)
315
    : "eax", "edx", "memory", "cc");
316
}
317
 
318
 
1964 serge 319
/**
320
 * atomic64_read - read atomic64 variable
321
 * @ptr:      pointer to type atomic64_t
322
 *
323
 * Atomically reads the value of @ptr and returns it.
324
 */
325
static inline u64 atomic64_read(atomic64_t *ptr)
326
{
327
	u64 res;
328
 
329
	/*
330
	 * Note, we inline this atomic64_t primitive because
331
	 * it only clobbers EAX/EDX and leaves the others
332
	 * untouched. We also (somewhat subtly) rely on the
333
	 * fact that cmpxchg8b returns the current 64-bit value
334
	 * of the memory location we are touching:
335
	 */
336
	asm volatile(
337
		"mov %%ebx, %%eax\n\t"
338
		"mov %%ecx, %%edx\n\t"
339
		LOCK_PREFIX "cmpxchg8b %1\n"
340
			: "=&A" (res)
341
			: "m" (*ptr)
342
		);
343
 
344
	return res;
345
}
346
 
347
 
348
/**
349
 * atomic64_add_return - add and return
350
 * @delta: integer value to add
351
 * @ptr:   pointer to type atomic64_t
352
 *
353
 * Atomically adds @delta to @ptr and returns @delta + *@ptr
354
 */
355
extern u64 atomic64_add_return(u64 delta, atomic64_t *ptr);
356
 
357
/*
358
 * Other variants with different arithmetic operators:
359
 */
360
extern u64 atomic64_sub_return(u64 delta, atomic64_t *ptr);
361
extern u64 atomic64_inc_return(atomic64_t *ptr);
362
extern u64 atomic64_dec_return(atomic64_t *ptr);
363
 
364
/**
365
 * atomic64_add - add integer to atomic64 variable
366
 * @delta: integer value to add
367
 * @ptr:   pointer to type atomic64_t
368
 *
369
 * Atomically adds @delta to @ptr.
370
 */
371
extern void atomic64_add(u64 delta, atomic64_t *ptr);
372
 
373
/**
374
 * atomic64_sub - subtract the atomic64 variable
375
 * @delta: integer value to subtract
376
 * @ptr:   pointer to type atomic64_t
377
 *
378
 * Atomically subtracts @delta from @ptr.
379
 */
380
extern void atomic64_sub(u64 delta, atomic64_t *ptr);
381
 
382
/**
383
 * atomic64_sub_and_test - subtract value from variable and test result
384
 * @delta: integer value to subtract
385
 * @ptr:   pointer to type atomic64_t
386
 *
387
 * Atomically subtracts @delta from @ptr and returns
388
 * true if the result is zero, or false for all
389
 * other cases.
390
 */
391
extern int atomic64_sub_and_test(u64 delta, atomic64_t *ptr);
392
 
393
/**
394
 * atomic64_inc - increment atomic64 variable
395
 * @ptr: pointer to type atomic64_t
396
 *
397
 * Atomically increments @ptr by 1.
398
 */
399
extern void atomic64_inc(atomic64_t *ptr);
400
 
401
/**
402
 * atomic64_dec - decrement atomic64 variable
403
 * @ptr: pointer to type atomic64_t
404
 *
405
 * Atomically decrements @ptr by 1.
406
 */
407
extern void atomic64_dec(atomic64_t *ptr);
408
 
409
/**
410
 * atomic64_dec_and_test - decrement and test
411
 * @ptr: pointer to type atomic64_t
412
 *
413
 * Atomically decrements @ptr by 1 and
414
 * returns true if the result is 0, or false for all other
415
 * cases.
416
 */
417
extern int atomic64_dec_and_test(atomic64_t *ptr);
418
 
419
/**
420
 * atomic64_inc_and_test - increment and test
421
 * @ptr: pointer to type atomic64_t
422
 *
423
 * Atomically increments @ptr by 1
424
 * and returns true if the result is zero, or false for all
425
 * other cases.
426
 */
427
extern int atomic64_inc_and_test(atomic64_t *ptr);
428
 
429
/**
430
 * atomic64_add_negative - add and test if negative
431
 * @delta: integer value to add
432
 * @ptr:   pointer to type atomic64_t
433
 *
434
 * Atomically adds @delta to @ptr and returns true
435
 * if the result is negative, or false when
436
 * result is greater than or equal to zero.
437
 */
438
extern int atomic64_add_negative(u64 delta, atomic64_t *ptr);
439
 
440
#include 
441
#endif /* _ASM_X86_ATOMIC_32_H */