Rev 5270 | Rev 6588 | Go to most recent revision | Only display areas with differences | Regard whitespace | Details | Blame | Last modification | View Log | RSS feed
Rev 5270 | Rev 6082 | ||
---|---|---|---|
1 | #ifndef _ASM_X86_ATOMIC64_32_H |
1 | #ifndef _ASM_X86_ATOMIC64_32_H |
2 | #define _ASM_X86_ATOMIC64_32_H |
2 | #define _ASM_X86_ATOMIC64_32_H |
3 | 3 | ||
4 | #include |
4 | #include |
5 | #include |
5 | #include |
6 | #include |
6 | #include |
7 | //#include |
7 | #include |
8 | 8 | ||
9 | /* An 64bit atomic type */ |
9 | /* An 64bit atomic type */ |
10 | 10 | ||
11 | typedef struct { |
11 | typedef struct { |
12 | u64 __aligned(8) counter; |
12 | u64 __aligned(8) counter; |
13 | } atomic64_t; |
13 | } atomic64_t; |
14 | 14 | ||
15 | #define ATOMIC64_INIT(val) { (val) } |
15 | #define ATOMIC64_INIT(val) { (val) } |
16 | 16 | ||
17 | #define __ATOMIC64_DECL(sym) void atomic64_##sym(atomic64_t *, ...) |
17 | #define __ATOMIC64_DECL(sym) void atomic64_##sym(atomic64_t *, ...) |
18 | #ifndef ATOMIC64_EXPORT |
18 | #ifndef ATOMIC64_EXPORT |
19 | #define ATOMIC64_DECL_ONE __ATOMIC64_DECL |
19 | #define ATOMIC64_DECL_ONE __ATOMIC64_DECL |
20 | #else |
20 | #else |
21 | #define ATOMIC64_DECL_ONE(sym) __ATOMIC64_DECL(sym); \ |
21 | #define ATOMIC64_DECL_ONE(sym) __ATOMIC64_DECL(sym); \ |
22 | ATOMIC64_EXPORT(atomic64_##sym) |
22 | ATOMIC64_EXPORT(atomic64_##sym) |
23 | #endif |
23 | #endif |
24 | 24 | ||
25 | #ifdef CONFIG_X86_CMPXCHG64 |
25 | #ifdef CONFIG_X86_CMPXCHG64 |
26 | #define __alternative_atomic64(f, g, out, in...) \ |
26 | #define __alternative_atomic64(f, g, out, in...) \ |
27 | asm volatile("call %P[func]" \ |
27 | asm volatile("call %P[func]" \ |
28 | : out : [func] "i" (atomic64_##g##_cx8), ## in) |
28 | : out : [func] "i" (atomic64_##g##_cx8), ## in) |
29 | 29 | ||
30 | #define ATOMIC64_DECL(sym) ATOMIC64_DECL_ONE(sym##_cx8) |
30 | #define ATOMIC64_DECL(sym) ATOMIC64_DECL_ONE(sym##_cx8) |
31 | #else |
31 | #else |
32 | #define __alternative_atomic64(f, g, out, in...) \ |
32 | #define __alternative_atomic64(f, g, out, in...) \ |
33 | alternative_call(atomic64_##f##_386, atomic64_##g##_cx8, \ |
33 | alternative_call(atomic64_##f##_386, atomic64_##g##_cx8, \ |
34 | X86_FEATURE_CX8, ASM_OUTPUT2(out), ## in) |
34 | X86_FEATURE_CX8, ASM_OUTPUT2(out), ## in) |
35 | 35 | ||
36 | #define ATOMIC64_DECL(sym) ATOMIC64_DECL_ONE(sym##_cx8); \ |
36 | #define ATOMIC64_DECL(sym) ATOMIC64_DECL_ONE(sym##_cx8); \ |
37 | ATOMIC64_DECL_ONE(sym##_386) |
37 | ATOMIC64_DECL_ONE(sym##_386) |
38 | 38 | ||
39 | ATOMIC64_DECL_ONE(add_386); |
39 | ATOMIC64_DECL_ONE(add_386); |
40 | ATOMIC64_DECL_ONE(sub_386); |
40 | ATOMIC64_DECL_ONE(sub_386); |
41 | ATOMIC64_DECL_ONE(inc_386); |
41 | ATOMIC64_DECL_ONE(inc_386); |
42 | ATOMIC64_DECL_ONE(dec_386); |
42 | ATOMIC64_DECL_ONE(dec_386); |
43 | #endif |
43 | #endif |
44 | 44 | ||
45 | #define alternative_atomic64(f, out, in...) \ |
45 | #define alternative_atomic64(f, out, in...) \ |
46 | __alternative_atomic64(f, f, ASM_OUTPUT2(out), ## in) |
46 | __alternative_atomic64(f, f, ASM_OUTPUT2(out), ## in) |
47 | 47 | ||
48 | ATOMIC64_DECL(read); |
48 | ATOMIC64_DECL(read); |
49 | ATOMIC64_DECL(set); |
49 | ATOMIC64_DECL(set); |
50 | ATOMIC64_DECL(xchg); |
50 | ATOMIC64_DECL(xchg); |
51 | ATOMIC64_DECL(add_return); |
51 | ATOMIC64_DECL(add_return); |
52 | ATOMIC64_DECL(sub_return); |
52 | ATOMIC64_DECL(sub_return); |
53 | ATOMIC64_DECL(inc_return); |
53 | ATOMIC64_DECL(inc_return); |
54 | ATOMIC64_DECL(dec_return); |
54 | ATOMIC64_DECL(dec_return); |
55 | ATOMIC64_DECL(dec_if_positive); |
55 | ATOMIC64_DECL(dec_if_positive); |
56 | ATOMIC64_DECL(inc_not_zero); |
56 | ATOMIC64_DECL(inc_not_zero); |
57 | ATOMIC64_DECL(add_unless); |
57 | ATOMIC64_DECL(add_unless); |
58 | 58 | ||
59 | #undef ATOMIC64_DECL |
59 | #undef ATOMIC64_DECL |
60 | #undef ATOMIC64_DECL_ONE |
60 | #undef ATOMIC64_DECL_ONE |
61 | #undef __ATOMIC64_DECL |
61 | #undef __ATOMIC64_DECL |
62 | #undef ATOMIC64_EXPORT |
62 | #undef ATOMIC64_EXPORT |
63 | 63 | ||
64 | /** |
64 | /** |
65 | * atomic64_cmpxchg - cmpxchg atomic64 variable |
65 | * atomic64_cmpxchg - cmpxchg atomic64 variable |
66 | * @v: pointer to type atomic64_t |
66 | * @v: pointer to type atomic64_t |
67 | * @o: expected value |
67 | * @o: expected value |
68 | * @n: new value |
68 | * @n: new value |
69 | * |
69 | * |
70 | * Atomically sets @v to @n if it was equal to @o and returns |
70 | * Atomically sets @v to @n if it was equal to @o and returns |
71 | * the old value. |
71 | * the old value. |
72 | */ |
72 | */ |
73 | 73 | ||
74 | static inline long long atomic64_cmpxchg(atomic64_t *v, long long o, long long n) |
74 | static inline long long atomic64_cmpxchg(atomic64_t *v, long long o, long long n) |
75 | { |
75 | { |
76 | return cmpxchg64(&v->counter, o, n); |
76 | return cmpxchg64(&v->counter, o, n); |
77 | } |
77 | } |
78 | 78 | ||
79 | /** |
79 | /** |
80 | * atomic64_xchg - xchg atomic64 variable |
80 | * atomic64_xchg - xchg atomic64 variable |
81 | * @v: pointer to type atomic64_t |
81 | * @v: pointer to type atomic64_t |
82 | * @n: value to assign |
82 | * @n: value to assign |
83 | * |
83 | * |
84 | * Atomically xchgs the value of @v to @n and returns |
84 | * Atomically xchgs the value of @v to @n and returns |
85 | * the old value. |
85 | * the old value. |
86 | */ |
86 | */ |
87 | static inline long long atomic64_xchg(atomic64_t *v, long long n) |
87 | static inline long long atomic64_xchg(atomic64_t *v, long long n) |
88 | { |
88 | { |
89 | long long o; |
89 | long long o; |
90 | unsigned high = (unsigned)(n >> 32); |
90 | unsigned high = (unsigned)(n >> 32); |
91 | unsigned low = (unsigned)n; |
91 | unsigned low = (unsigned)n; |
92 | 92 | ||
93 | asm volatile( |
93 | asm volatile( |
94 | "1: \n\t" |
94 | "1: \n\t" |
95 | "cmpxchg8b (%%esi) \n\t" |
95 | "cmpxchg8b (%%esi) \n\t" |
96 | "jnz 1b \n\t" |
96 | "jnz 1b \n\t" |
97 | :"=&A" (o) |
97 | :"=&A" (o) |
98 | :"S" (v), "b" (low), "c" (high) |
98 | :"S" (v), "b" (low), "c" (high) |
99 | : "memory", "cc"); |
99 | : "memory", "cc"); |
100 | return o; |
100 | return o; |
101 | } |
101 | } |
102 | 102 | ||
103 | /** |
103 | /** |
104 | * atomic64_set - set atomic64 variable |
104 | * atomic64_set - set atomic64 variable |
105 | * @v: pointer to type atomic64_t |
105 | * @v: pointer to type atomic64_t |
106 | * @i: value to assign |
106 | * @i: value to assign |
107 | * |
107 | * |
108 | * Atomically sets the value of @v to @n. |
108 | * Atomically sets the value of @v to @n. |
109 | */ |
109 | */ |
110 | static inline void atomic64_set(atomic64_t *v, long long i) |
110 | static inline void atomic64_set(atomic64_t *v, long long i) |
111 | { |
111 | { |
112 | __sync_lock_test_and_set((long long *)&v->counter, i); |
112 | __sync_lock_test_and_set((long long *)&v->counter, i); |
113 | } |
113 | } |
114 | 114 | ||
115 | /** |
115 | /** |
116 | * atomic64_read - read atomic64 variable |
116 | * atomic64_read - read atomic64 variable |
117 | * @v: pointer to type atomic64_t |
117 | * @v: pointer to type atomic64_t |
118 | * |
118 | * |
119 | * Atomically reads the value of @v and returns it. |
119 | * Atomically reads the value of @v and returns it. |
120 | */ |
120 | */ |
121 | static inline long long atomic64_read(const atomic64_t *v) |
121 | static inline long long atomic64_read(const atomic64_t *v) |
122 | { |
122 | { |
123 | return __sync_fetch_and_add( (long long *)&v->counter, 0); |
123 | return __sync_fetch_and_add( (long long *)&v->counter, 0); |
124 | } |
124 | } |
125 | 125 | ||
126 | /** |
126 | /** |
127 | * atomic64_add_return - add and return |
127 | * atomic64_add_return - add and return |
128 | * @i: integer value to add |
128 | * @i: integer value to add |
129 | * @v: pointer to type atomic64_t |
129 | * @v: pointer to type atomic64_t |
130 | * |
130 | * |
131 | * Atomically adds @i to @v and returns @i + *@v |
131 | * Atomically adds @i to @v and returns @i + *@v |
132 | */ |
132 | */ |
133 | static inline long long atomic64_add_return(long long i, atomic64_t *v) |
133 | static inline long long atomic64_add_return(long long i, atomic64_t *v) |
134 | { |
134 | { |
135 | alternative_atomic64(add_return, |
135 | alternative_atomic64(add_return, |
136 | ASM_OUTPUT2("+A" (i), "+c" (v)), |
136 | ASM_OUTPUT2("+A" (i), "+c" (v)), |
137 | ASM_NO_INPUT_CLOBBER("memory")); |
137 | ASM_NO_INPUT_CLOBBER("memory")); |
138 | return i; |
138 | return i; |
139 | } |
139 | } |
140 | 140 | ||
141 | /* |
141 | /* |
142 | * Other variants with different arithmetic operators: |
142 | * Other variants with different arithmetic operators: |
143 | */ |
143 | */ |
144 | static inline long long atomic64_sub_return(long long i, atomic64_t *v) |
144 | static inline long long atomic64_sub_return(long long i, atomic64_t *v) |
145 | { |
145 | { |
146 | alternative_atomic64(sub_return, |
146 | alternative_atomic64(sub_return, |
147 | ASM_OUTPUT2("+A" (i), "+c" (v)), |
147 | ASM_OUTPUT2("+A" (i), "+c" (v)), |
148 | ASM_NO_INPUT_CLOBBER("memory")); |
148 | ASM_NO_INPUT_CLOBBER("memory")); |
149 | return i; |
149 | return i; |
150 | } |
150 | } |
151 | 151 | ||
152 | static inline long long atomic64_inc_return(atomic64_t *v) |
152 | static inline long long atomic64_inc_return(atomic64_t *v) |
153 | { |
153 | { |
154 | long long a; |
154 | long long a; |
155 | alternative_atomic64(inc_return, "=&A" (a), |
155 | alternative_atomic64(inc_return, "=&A" (a), |
156 | "S" (v) : "memory", "ecx"); |
156 | "S" (v) : "memory", "ecx"); |
157 | return a; |
157 | return a; |
158 | } |
158 | } |
159 | 159 | ||
160 | static inline long long atomic64_dec_return(atomic64_t *v) |
160 | static inline long long atomic64_dec_return(atomic64_t *v) |
161 | { |
161 | { |
162 | long long a; |
162 | long long a; |
163 | alternative_atomic64(dec_return, "=&A" (a), |
163 | alternative_atomic64(dec_return, "=&A" (a), |
164 | "S" (v) : "memory", "ecx"); |
164 | "S" (v) : "memory", "ecx"); |
165 | return a; |
165 | return a; |
166 | } |
166 | } |
167 | 167 | ||
168 | /** |
168 | /** |
169 | * atomic64_add - add integer to atomic64 variable |
169 | * atomic64_add - add integer to atomic64 variable |
170 | * @i: integer value to add |
170 | * @i: integer value to add |
171 | * @v: pointer to type atomic64_t |
171 | * @v: pointer to type atomic64_t |
172 | * |
172 | * |
173 | * Atomically adds @i to @v. |
173 | * Atomically adds @i to @v. |
174 | */ |
174 | */ |
175 | static inline long long atomic64_add(long long i, atomic64_t *v) |
175 | static inline long long atomic64_add(long long i, atomic64_t *v) |
176 | { |
176 | { |
177 | __alternative_atomic64(add, add_return, |
177 | __alternative_atomic64(add, add_return, |
178 | ASM_OUTPUT2("+A" (i), "+c" (v)), |
178 | ASM_OUTPUT2("+A" (i), "+c" (v)), |
179 | ASM_NO_INPUT_CLOBBER("memory")); |
179 | ASM_NO_INPUT_CLOBBER("memory")); |
180 | return i; |
180 | return i; |
181 | } |
181 | } |
182 | 182 | ||
183 | /** |
183 | /** |
184 | * atomic64_sub - subtract the atomic64 variable |
184 | * atomic64_sub - subtract the atomic64 variable |
185 | * @i: integer value to subtract |
185 | * @i: integer value to subtract |
186 | * @v: pointer to type atomic64_t |
186 | * @v: pointer to type atomic64_t |
187 | * |
187 | * |
188 | * Atomically subtracts @i from @v. |
188 | * Atomically subtracts @i from @v. |
189 | */ |
189 | */ |
190 | static inline long long atomic64_sub(long long i, atomic64_t *v) |
190 | static inline long long atomic64_sub(long long i, atomic64_t *v) |
191 | { |
191 | { |
192 | __alternative_atomic64(sub, sub_return, |
192 | __alternative_atomic64(sub, sub_return, |
193 | ASM_OUTPUT2("+A" (i), "+c" (v)), |
193 | ASM_OUTPUT2("+A" (i), "+c" (v)), |
194 | ASM_NO_INPUT_CLOBBER("memory")); |
194 | ASM_NO_INPUT_CLOBBER("memory")); |
195 | return i; |
195 | return i; |
196 | } |
196 | } |
197 | 197 | ||
198 | /** |
198 | /** |
199 | * atomic64_sub_and_test - subtract value from variable and test result |
199 | * atomic64_sub_and_test - subtract value from variable and test result |
200 | * @i: integer value to subtract |
200 | * @i: integer value to subtract |
201 | * @v: pointer to type atomic64_t |
201 | * @v: pointer to type atomic64_t |
202 | * |
202 | * |
203 | * Atomically subtracts @i from @v and returns |
203 | * Atomically subtracts @i from @v and returns |
204 | * true if the result is zero, or false for all |
204 | * true if the result is zero, or false for all |
205 | * other cases. |
205 | * other cases. |
206 | */ |
206 | */ |
207 | static inline int atomic64_sub_and_test(long long i, atomic64_t *v) |
207 | static inline int atomic64_sub_and_test(long long i, atomic64_t *v) |
208 | { |
208 | { |
209 | return atomic64_sub_return(i, v) == 0; |
209 | return atomic64_sub_return(i, v) == 0; |
210 | } |
210 | } |
211 | 211 | ||
212 | /** |
212 | /** |
213 | * atomic64_inc - increment atomic64 variable |
213 | * atomic64_inc - increment atomic64 variable |
214 | * @v: pointer to type atomic64_t |
214 | * @v: pointer to type atomic64_t |
215 | * |
215 | * |
216 | * Atomically increments @v by 1. |
216 | * Atomically increments @v by 1. |
217 | */ |
217 | */ |
218 | static inline void atomic64_inc(atomic64_t *v) |
218 | static inline void atomic64_inc(atomic64_t *v) |
219 | { |
219 | { |
220 | __alternative_atomic64(inc, inc_return, /* no output */, |
220 | __alternative_atomic64(inc, inc_return, /* no output */, |
221 | "S" (v) : "memory", "eax", "ecx", "edx"); |
221 | "S" (v) : "memory", "eax", "ecx", "edx"); |
222 | } |
222 | } |
223 | 223 | ||
224 | /** |
224 | /** |
225 | * atomic64_dec - decrement atomic64 variable |
225 | * atomic64_dec - decrement atomic64 variable |
226 | * @v: pointer to type atomic64_t |
226 | * @v: pointer to type atomic64_t |
227 | * |
227 | * |
228 | * Atomically decrements @v by 1. |
228 | * Atomically decrements @v by 1. |
229 | */ |
229 | */ |
230 | static inline void atomic64_dec(atomic64_t *v) |
230 | static inline void atomic64_dec(atomic64_t *v) |
231 | { |
231 | { |
232 | __alternative_atomic64(dec, dec_return, /* no output */, |
232 | __alternative_atomic64(dec, dec_return, /* no output */, |
233 | "S" (v) : "memory", "eax", "ecx", "edx"); |
233 | "S" (v) : "memory", "eax", "ecx", "edx"); |
234 | } |
234 | } |
235 | 235 | ||
236 | /** |
236 | /** |
237 | * atomic64_dec_and_test - decrement and test |
237 | * atomic64_dec_and_test - decrement and test |
238 | * @v: pointer to type atomic64_t |
238 | * @v: pointer to type atomic64_t |
239 | * |
239 | * |
240 | * Atomically decrements @v by 1 and |
240 | * Atomically decrements @v by 1 and |
241 | * returns true if the result is 0, or false for all other |
241 | * returns true if the result is 0, or false for all other |
242 | * cases. |
242 | * cases. |
243 | */ |
243 | */ |
244 | static inline int atomic64_dec_and_test(atomic64_t *v) |
244 | static inline int atomic64_dec_and_test(atomic64_t *v) |
245 | { |
245 | { |
246 | return atomic64_dec_return(v) == 0; |
246 | return atomic64_dec_return(v) == 0; |
247 | } |
247 | } |
248 | 248 | ||
249 | /** |
249 | /** |
250 | * atomic64_inc_and_test - increment and test |
250 | * atomic64_inc_and_test - increment and test |
251 | * @v: pointer to type atomic64_t |
251 | * @v: pointer to type atomic64_t |
252 | * |
252 | * |
253 | * Atomically increments @v by 1 |
253 | * Atomically increments @v by 1 |
254 | * and returns true if the result is zero, or false for all |
254 | * and returns true if the result is zero, or false for all |
255 | * other cases. |
255 | * other cases. |
256 | */ |
256 | */ |
257 | static inline int atomic64_inc_and_test(atomic64_t *v) |
257 | static inline int atomic64_inc_and_test(atomic64_t *v) |
258 | { |
258 | { |
259 | return atomic64_inc_return(v) == 0; |
259 | return atomic64_inc_return(v) == 0; |
260 | } |
260 | } |
261 | 261 | ||
262 | /** |
262 | /** |
263 | * atomic64_add_negative - add and test if negative |
263 | * atomic64_add_negative - add and test if negative |
264 | * @i: integer value to add |
264 | * @i: integer value to add |
265 | * @v: pointer to type atomic64_t |
265 | * @v: pointer to type atomic64_t |
266 | * |
266 | * |
267 | * Atomically adds @i to @v and returns true |
267 | * Atomically adds @i to @v and returns true |
268 | * if the result is negative, or false when |
268 | * if the result is negative, or false when |
269 | * result is greater than or equal to zero. |
269 | * result is greater than or equal to zero. |
270 | */ |
270 | */ |
271 | static inline int atomic64_add_negative(long long i, atomic64_t *v) |
271 | static inline int atomic64_add_negative(long long i, atomic64_t *v) |
272 | { |
272 | { |
273 | return atomic64_add_return(i, v) < 0; |
273 | return atomic64_add_return(i, v) < 0; |
274 | } |
274 | } |
275 | 275 | ||
276 | /** |
276 | /** |
277 | * atomic64_add_unless - add unless the number is a given value |
277 | * atomic64_add_unless - add unless the number is a given value |
278 | * @v: pointer of type atomic64_t |
278 | * @v: pointer of type atomic64_t |
279 | * @a: the amount to add to v... |
279 | * @a: the amount to add to v... |
280 | * @u: ...unless v is equal to u. |
280 | * @u: ...unless v is equal to u. |
281 | * |
281 | * |
282 | * Atomically adds @a to @v, so long as it was not @u. |
282 | * Atomically adds @a to @v, so long as it was not @u. |
283 | * Returns non-zero if the add was done, zero otherwise. |
283 | * Returns non-zero if the add was done, zero otherwise. |
284 | */ |
284 | */ |
285 | static inline int atomic64_add_unless(atomic64_t *v, long long a, long long u) |
285 | static inline int atomic64_add_unless(atomic64_t *v, long long a, long long u) |
286 | { |
286 | { |
287 | unsigned low = (unsigned)u; |
287 | unsigned low = (unsigned)u; |
288 | unsigned high = (unsigned)(u >> 32); |
288 | unsigned high = (unsigned)(u >> 32); |
289 | alternative_atomic64(add_unless, |
289 | alternative_atomic64(add_unless, |
290 | ASM_OUTPUT2("+A" (a), "+c" (low), "+D" (high)), |
290 | ASM_OUTPUT2("+A" (a), "+c" (low), "+D" (high)), |
291 | "S" (v) : "memory"); |
291 | "S" (v) : "memory"); |
292 | return (int)a; |
292 | return (int)a; |
293 | } |
293 | } |
294 | 294 | ||
295 | 295 | ||
296 | static inline int atomic64_inc_not_zero(atomic64_t *v) |
296 | static inline int atomic64_inc_not_zero(atomic64_t *v) |
297 | { |
297 | { |
298 | int r; |
298 | int r; |
299 | alternative_atomic64(inc_not_zero, "=&a" (r), |
299 | alternative_atomic64(inc_not_zero, "=&a" (r), |
300 | "S" (v) : "ecx", "edx", "memory"); |
300 | "S" (v) : "ecx", "edx", "memory"); |
301 | return r; |
301 | return r; |
302 | } |
302 | } |
303 | 303 | ||
304 | static inline long long atomic64_dec_if_positive(atomic64_t *v) |
304 | static inline long long atomic64_dec_if_positive(atomic64_t *v) |
305 | { |
305 | { |
306 | long long r; |
306 | long long r; |
307 | alternative_atomic64(dec_if_positive, "=&A" (r), |
307 | alternative_atomic64(dec_if_positive, "=&A" (r), |
308 | "S" (v) : "ecx", "memory"); |
308 | "S" (v) : "ecx", "memory"); |
309 | return r; |
309 | return r; |
310 | } |
310 | } |
311 | 311 | ||
312 | #undef alternative_atomic64 |
312 | #undef alternative_atomic64 |
313 | #undef __alternative_atomic64 |
313 | #undef __alternative_atomic64 |
314 | 314 | ||
315 | #endif /* _ASM_X86_ATOMIC64_32_H */> |
315 | #endif /* _ASM_X86_ATOMIC64_32_H */> |