Rev 5270 | Go to most recent revision | Show entire file | Regard whitespace | Details | Blame | Last modification | View Log | RSS feed
Rev 5270 | Rev 6082 | ||
---|---|---|---|
Line 1... | Line 1... | ||
1 | /* Atomic operations usable in machine independent code */ |
1 | /* Atomic operations usable in machine independent code */ |
2 | #ifndef _LINUX_ATOMIC_H |
2 | #ifndef _LINUX_ATOMIC_H |
3 | #define _LINUX_ATOMIC_H |
3 | #define _LINUX_ATOMIC_H |
4 | #include |
4 | #include |
- | 5 | #include |
|
- | 6 | ||
- | 7 | /* |
|
- | 8 | * Relaxed variants of xchg, cmpxchg and some atomic operations. |
|
- | 9 | * |
|
- | 10 | * We support four variants: |
|
- | 11 | * |
|
- | 12 | * - Fully ordered: The default implementation, no suffix required. |
|
- | 13 | * - Acquire: Provides ACQUIRE semantics, _acquire suffix. |
|
- | 14 | * - Release: Provides RELEASE semantics, _release suffix. |
|
- | 15 | * - Relaxed: No ordering guarantees, _relaxed suffix. |
|
- | 16 | * |
|
- | 17 | * For compound atomics performing both a load and a store, ACQUIRE |
|
- | 18 | * semantics apply only to the load and RELEASE semantics only to the |
|
- | 19 | * store portion of the operation. Note that a failed cmpxchg_acquire |
|
- | 20 | * does -not- imply any memory ordering constraints. |
|
- | 21 | * |
|
- | 22 | * See Documentation/memory-barriers.txt for ACQUIRE/RELEASE definitions. |
|
- | 23 | */ |
|
- | 24 | ||
- | 25 | #ifndef atomic_read_acquire |
|
- | 26 | #define atomic_read_acquire(v) smp_load_acquire(&(v)->counter) |
|
- | 27 | #endif |
|
- | 28 | ||
- | 29 | #ifndef atomic_set_release |
|
- | 30 | #define atomic_set_release(v, i) smp_store_release(&(v)->counter, (i)) |
|
- | 31 | #endif |
|
- | 32 | ||
- | 33 | /* |
|
- | 34 | * The idea here is to build acquire/release variants by adding explicit |
|
- | 35 | * barriers on top of the relaxed variant. In the case where the relaxed |
|
- | 36 | * variant is already fully ordered, no additional barriers are needed. |
|
- | 37 | */ |
|
- | 38 | #define __atomic_op_acquire(op, args...) \ |
|
- | 39 | ({ \ |
|
- | 40 | typeof(op##_relaxed(args)) __ret = op##_relaxed(args); \ |
|
- | 41 | smp_mb__after_atomic(); \ |
|
- | 42 | __ret; \ |
|
- | 43 | }) |
|
- | 44 | ||
- | 45 | #define __atomic_op_release(op, args...) \ |
|
- | 46 | ({ \ |
|
- | 47 | smp_mb__before_atomic(); \ |
|
- | 48 | op##_relaxed(args); \ |
|
- | 49 | }) |
|
- | 50 | ||
- | 51 | #define __atomic_op_fence(op, args...) \ |
|
- | 52 | ({ \ |
|
- | 53 | typeof(op##_relaxed(args)) __ret; \ |
|
- | 54 | smp_mb__before_atomic(); \ |
|
- | 55 | __ret = op##_relaxed(args); \ |
|
- | 56 | smp_mb__after_atomic(); \ |
|
- | 57 | __ret; \ |
|
- | 58 | }) |
|
- | 59 | ||
- | 60 | /* atomic_add_return_relaxed */ |
|
- | 61 | #ifndef atomic_add_return_relaxed |
|
- | 62 | #define atomic_add_return_relaxed atomic_add_return |
|
- | 63 | #define atomic_add_return_acquire atomic_add_return |
|
- | 64 | #define atomic_add_return_release atomic_add_return |
|
- | 65 | ||
- | 66 | #else /* atomic_add_return_relaxed */ |
|
- | 67 | ||
- | 68 | #ifndef atomic_add_return_acquire |
|
- | 69 | #define atomic_add_return_acquire(...) \ |
|
- | 70 | __atomic_op_acquire(atomic_add_return, __VA_ARGS__) |
|
- | 71 | #endif |
|
- | 72 | ||
- | 73 | #ifndef atomic_add_return_release |
|
- | 74 | #define atomic_add_return_release(...) \ |
|
- | 75 | __atomic_op_release(atomic_add_return, __VA_ARGS__) |
|
- | 76 | #endif |
|
- | 77 | ||
- | 78 | #ifndef atomic_add_return |
|
- | 79 | #define atomic_add_return(...) \ |
|
- | 80 | __atomic_op_fence(atomic_add_return, __VA_ARGS__) |
|
- | 81 | #endif |
|
- | 82 | #endif /* atomic_add_return_relaxed */ |
|
- | 83 | ||
- | 84 | /* atomic_inc_return_relaxed */ |
|
- | 85 | #ifndef atomic_inc_return_relaxed |
|
- | 86 | #define atomic_inc_return_relaxed atomic_inc_return |
|
- | 87 | #define atomic_inc_return_acquire atomic_inc_return |
|
- | 88 | #define atomic_inc_return_release atomic_inc_return |
|
- | 89 | ||
- | 90 | #else /* atomic_inc_return_relaxed */ |
|
- | 91 | ||
- | 92 | #ifndef atomic_inc_return_acquire |
|
- | 93 | #define atomic_inc_return_acquire(...) \ |
|
- | 94 | __atomic_op_acquire(atomic_inc_return, __VA_ARGS__) |
|
- | 95 | #endif |
|
- | 96 | ||
- | 97 | #ifndef atomic_inc_return_release |
|
- | 98 | #define atomic_inc_return_release(...) \ |
|
- | 99 | __atomic_op_release(atomic_inc_return, __VA_ARGS__) |
|
- | 100 | #endif |
|
- | 101 | ||
- | 102 | #ifndef atomic_inc_return |
|
- | 103 | #define atomic_inc_return(...) \ |
|
- | 104 | __atomic_op_fence(atomic_inc_return, __VA_ARGS__) |
|
- | 105 | #endif |
|
- | 106 | #endif /* atomic_inc_return_relaxed */ |
|
- | 107 | ||
- | 108 | /* atomic_sub_return_relaxed */ |
|
- | 109 | #ifndef atomic_sub_return_relaxed |
|
- | 110 | #define atomic_sub_return_relaxed atomic_sub_return |
|
- | 111 | #define atomic_sub_return_acquire atomic_sub_return |
|
- | 112 | #define atomic_sub_return_release atomic_sub_return |
|
- | 113 | ||
- | 114 | #else /* atomic_sub_return_relaxed */ |
|
- | 115 | ||
- | 116 | #ifndef atomic_sub_return_acquire |
|
- | 117 | #define atomic_sub_return_acquire(...) \ |
|
- | 118 | __atomic_op_acquire(atomic_sub_return, __VA_ARGS__) |
|
- | 119 | #endif |
|
- | 120 | ||
- | 121 | #ifndef atomic_sub_return_release |
|
- | 122 | #define atomic_sub_return_release(...) \ |
|
- | 123 | __atomic_op_release(atomic_sub_return, __VA_ARGS__) |
|
- | 124 | #endif |
|
- | 125 | ||
- | 126 | #ifndef atomic_sub_return |
|
- | 127 | #define atomic_sub_return(...) \ |
|
- | 128 | __atomic_op_fence(atomic_sub_return, __VA_ARGS__) |
|
- | 129 | #endif |
|
- | 130 | #endif /* atomic_sub_return_relaxed */ |
|
- | 131 | ||
- | 132 | /* atomic_dec_return_relaxed */ |
|
- | 133 | #ifndef atomic_dec_return_relaxed |
|
- | 134 | #define atomic_dec_return_relaxed atomic_dec_return |
|
- | 135 | #define atomic_dec_return_acquire atomic_dec_return |
|
- | 136 | #define atomic_dec_return_release atomic_dec_return |
|
- | 137 | ||
- | 138 | #else /* atomic_dec_return_relaxed */ |
|
- | 139 | ||
- | 140 | #ifndef atomic_dec_return_acquire |
|
- | 141 | #define atomic_dec_return_acquire(...) \ |
|
- | 142 | __atomic_op_acquire(atomic_dec_return, __VA_ARGS__) |
|
- | 143 | #endif |
|
- | 144 | ||
- | 145 | #ifndef atomic_dec_return_release |
|
- | 146 | #define atomic_dec_return_release(...) \ |
|
- | 147 | __atomic_op_release(atomic_dec_return, __VA_ARGS__) |
|
- | 148 | #endif |
|
- | 149 | ||
- | 150 | #ifndef atomic_dec_return |
|
- | 151 | #define atomic_dec_return(...) \ |
|
- | 152 | __atomic_op_fence(atomic_dec_return, __VA_ARGS__) |
|
- | 153 | #endif |
|
- | 154 | #endif /* atomic_dec_return_relaxed */ |
|
- | 155 | ||
- | 156 | /* atomic_xchg_relaxed */ |
|
- | 157 | #ifndef atomic_xchg_relaxed |
|
- | 158 | #define atomic_xchg_relaxed atomic_xchg |
|
- | 159 | #define atomic_xchg_acquire atomic_xchg |
|
- | 160 | #define atomic_xchg_release atomic_xchg |
|
- | 161 | ||
- | 162 | #else /* atomic_xchg_relaxed */ |
|
- | 163 | ||
- | 164 | #ifndef atomic_xchg_acquire |
|
- | 165 | #define atomic_xchg_acquire(...) \ |
|
- | 166 | __atomic_op_acquire(atomic_xchg, __VA_ARGS__) |
|
- | 167 | #endif |
|
- | 168 | ||
- | 169 | #ifndef atomic_xchg_release |
|
- | 170 | #define atomic_xchg_release(...) \ |
|
- | 171 | __atomic_op_release(atomic_xchg, __VA_ARGS__) |
|
- | 172 | #endif |
|
- | 173 | ||
- | 174 | #ifndef atomic_xchg |
|
- | 175 | #define atomic_xchg(...) \ |
|
- | 176 | __atomic_op_fence(atomic_xchg, __VA_ARGS__) |
|
- | 177 | #endif |
|
- | 178 | #endif /* atomic_xchg_relaxed */ |
|
- | 179 | ||
- | 180 | /* atomic_cmpxchg_relaxed */ |
|
- | 181 | #ifndef atomic_cmpxchg_relaxed |
|
- | 182 | #define atomic_cmpxchg_relaxed atomic_cmpxchg |
|
- | 183 | #define atomic_cmpxchg_acquire atomic_cmpxchg |
|
- | 184 | #define atomic_cmpxchg_release atomic_cmpxchg |
|
- | 185 | ||
- | 186 | #else /* atomic_cmpxchg_relaxed */ |
|
- | 187 | ||
- | 188 | #ifndef atomic_cmpxchg_acquire |
|
- | 189 | #define atomic_cmpxchg_acquire(...) \ |
|
- | 190 | __atomic_op_acquire(atomic_cmpxchg, __VA_ARGS__) |
|
- | 191 | #endif |
|
- | 192 | ||
- | 193 | #ifndef atomic_cmpxchg_release |
|
- | 194 | #define atomic_cmpxchg_release(...) \ |
|
- | 195 | __atomic_op_release(atomic_cmpxchg, __VA_ARGS__) |
|
- | 196 | #endif |
|
- | 197 | ||
- | 198 | #ifndef atomic_cmpxchg |
|
- | 199 | #define atomic_cmpxchg(...) \ |
|
- | 200 | __atomic_op_fence(atomic_cmpxchg, __VA_ARGS__) |
|
- | 201 | #endif |
|
- | 202 | #endif /* atomic_cmpxchg_relaxed */ |
|
- | 203 | ||
- | 204 | #ifndef atomic64_read_acquire |
|
- | 205 | #define atomic64_read_acquire(v) smp_load_acquire(&(v)->counter) |
|
- | 206 | #endif |
|
- | 207 | ||
- | 208 | #ifndef atomic64_set_release |
|
- | 209 | #define atomic64_set_release(v, i) smp_store_release(&(v)->counter, (i)) |
|
- | 210 | #endif |
|
- | 211 | ||
- | 212 | /* atomic64_add_return_relaxed */ |
|
- | 213 | #ifndef atomic64_add_return_relaxed |
|
- | 214 | #define atomic64_add_return_relaxed atomic64_add_return |
|
- | 215 | #define atomic64_add_return_acquire atomic64_add_return |
|
- | 216 | #define atomic64_add_return_release atomic64_add_return |
|
- | 217 | ||
- | 218 | #else /* atomic64_add_return_relaxed */ |
|
- | 219 | ||
- | 220 | #ifndef atomic64_add_return_acquire |
|
- | 221 | #define atomic64_add_return_acquire(...) \ |
|
- | 222 | __atomic_op_acquire(atomic64_add_return, __VA_ARGS__) |
|
- | 223 | #endif |
|
- | 224 | ||
- | 225 | #ifndef atomic64_add_return_release |
|
- | 226 | #define atomic64_add_return_release(...) \ |
|
- | 227 | __atomic_op_release(atomic64_add_return, __VA_ARGS__) |
|
- | 228 | #endif |
|
- | 229 | ||
- | 230 | #ifndef atomic64_add_return |
|
- | 231 | #define atomic64_add_return(...) \ |
|
- | 232 | __atomic_op_fence(atomic64_add_return, __VA_ARGS__) |
|
- | 233 | #endif |
|
- | 234 | #endif /* atomic64_add_return_relaxed */ |
|
- | 235 | ||
- | 236 | /* atomic64_inc_return_relaxed */ |
|
- | 237 | #ifndef atomic64_inc_return_relaxed |
|
- | 238 | #define atomic64_inc_return_relaxed atomic64_inc_return |
|
- | 239 | #define atomic64_inc_return_acquire atomic64_inc_return |
|
- | 240 | #define atomic64_inc_return_release atomic64_inc_return |
|
- | 241 | ||
- | 242 | #else /* atomic64_inc_return_relaxed */ |
|
- | 243 | ||
- | 244 | #ifndef atomic64_inc_return_acquire |
|
- | 245 | #define atomic64_inc_return_acquire(...) \ |
|
- | 246 | __atomic_op_acquire(atomic64_inc_return, __VA_ARGS__) |
|
- | 247 | #endif |
|
- | 248 | ||
- | 249 | #ifndef atomic64_inc_return_release |
|
- | 250 | #define atomic64_inc_return_release(...) \ |
|
- | 251 | __atomic_op_release(atomic64_inc_return, __VA_ARGS__) |
|
- | 252 | #endif |
|
- | 253 | ||
- | 254 | #ifndef atomic64_inc_return |
|
- | 255 | #define atomic64_inc_return(...) \ |
|
- | 256 | __atomic_op_fence(atomic64_inc_return, __VA_ARGS__) |
|
- | 257 | #endif |
|
- | 258 | #endif /* atomic64_inc_return_relaxed */ |
|
- | 259 | ||
- | 260 | ||
- | 261 | /* atomic64_sub_return_relaxed */ |
|
- | 262 | #ifndef atomic64_sub_return_relaxed |
|
- | 263 | #define atomic64_sub_return_relaxed atomic64_sub_return |
|
- | 264 | #define atomic64_sub_return_acquire atomic64_sub_return |
|
- | 265 | #define atomic64_sub_return_release atomic64_sub_return |
|
- | 266 | ||
- | 267 | #else /* atomic64_sub_return_relaxed */ |
|
- | 268 | ||
- | 269 | #ifndef atomic64_sub_return_acquire |
|
- | 270 | #define atomic64_sub_return_acquire(...) \ |
|
- | 271 | __atomic_op_acquire(atomic64_sub_return, __VA_ARGS__) |
|
- | 272 | #endif |
|
- | 273 | ||
- | 274 | #ifndef atomic64_sub_return_release |
|
- | 275 | #define atomic64_sub_return_release(...) \ |
|
- | 276 | __atomic_op_release(atomic64_sub_return, __VA_ARGS__) |
|
- | 277 | #endif |
|
- | 278 | ||
- | 279 | #ifndef atomic64_sub_return |
|
- | 280 | #define atomic64_sub_return(...) \ |
|
- | 281 | __atomic_op_fence(atomic64_sub_return, __VA_ARGS__) |
|
- | 282 | #endif |
|
- | 283 | #endif /* atomic64_sub_return_relaxed */ |
|
- | 284 | ||
- | 285 | /* atomic64_dec_return_relaxed */ |
|
- | 286 | #ifndef atomic64_dec_return_relaxed |
|
- | 287 | #define atomic64_dec_return_relaxed atomic64_dec_return |
|
- | 288 | #define atomic64_dec_return_acquire atomic64_dec_return |
|
- | 289 | #define atomic64_dec_return_release atomic64_dec_return |
|
- | 290 | ||
- | 291 | #else /* atomic64_dec_return_relaxed */ |
|
- | 292 | ||
- | 293 | #ifndef atomic64_dec_return_acquire |
|
- | 294 | #define atomic64_dec_return_acquire(...) \ |
|
- | 295 | __atomic_op_acquire(atomic64_dec_return, __VA_ARGS__) |
|
- | 296 | #endif |
|
- | 297 | ||
- | 298 | #ifndef atomic64_dec_return_release |
|
- | 299 | #define atomic64_dec_return_release(...) \ |
|
- | 300 | __atomic_op_release(atomic64_dec_return, __VA_ARGS__) |
|
- | 301 | #endif |
|
- | 302 | ||
- | 303 | #ifndef atomic64_dec_return |
|
- | 304 | #define atomic64_dec_return(...) \ |
|
- | 305 | __atomic_op_fence(atomic64_dec_return, __VA_ARGS__) |
|
- | 306 | #endif |
|
- | 307 | #endif /* atomic64_dec_return_relaxed */ |
|
- | 308 | ||
- | 309 | /* atomic64_xchg_relaxed */ |
|
- | 310 | #ifndef atomic64_xchg_relaxed |
|
- | 311 | #define atomic64_xchg_relaxed atomic64_xchg |
|
- | 312 | #define atomic64_xchg_acquire atomic64_xchg |
|
- | 313 | #define atomic64_xchg_release atomic64_xchg |
|
- | 314 | ||
- | 315 | #else /* atomic64_xchg_relaxed */ |
|
- | 316 | ||
- | 317 | #ifndef atomic64_xchg_acquire |
|
- | 318 | #define atomic64_xchg_acquire(...) \ |
|
- | 319 | __atomic_op_acquire(atomic64_xchg, __VA_ARGS__) |
|
- | 320 | #endif |
|
- | 321 | ||
- | 322 | #ifndef atomic64_xchg_release |
|
- | 323 | #define atomic64_xchg_release(...) \ |
|
- | 324 | __atomic_op_release(atomic64_xchg, __VA_ARGS__) |
|
- | 325 | #endif |
|
- | 326 | ||
- | 327 | #ifndef atomic64_xchg |
|
- | 328 | #define atomic64_xchg(...) \ |
|
- | 329 | __atomic_op_fence(atomic64_xchg, __VA_ARGS__) |
|
- | 330 | #endif |
|
- | 331 | #endif /* atomic64_xchg_relaxed */ |
|
- | 332 | ||
- | 333 | /* atomic64_cmpxchg_relaxed */ |
|
- | 334 | #ifndef atomic64_cmpxchg_relaxed |
|
- | 335 | #define atomic64_cmpxchg_relaxed atomic64_cmpxchg |
|
- | 336 | #define atomic64_cmpxchg_acquire atomic64_cmpxchg |
|
- | 337 | #define atomic64_cmpxchg_release atomic64_cmpxchg |
|
- | 338 | ||
- | 339 | #else /* atomic64_cmpxchg_relaxed */ |
|
- | 340 | ||
- | 341 | #ifndef atomic64_cmpxchg_acquire |
|
- | 342 | #define atomic64_cmpxchg_acquire(...) \ |
|
- | 343 | __atomic_op_acquire(atomic64_cmpxchg, __VA_ARGS__) |
|
- | 344 | #endif |
|
- | 345 | ||
- | 346 | #ifndef atomic64_cmpxchg_release |
|
- | 347 | #define atomic64_cmpxchg_release(...) \ |
|
- | 348 | __atomic_op_release(atomic64_cmpxchg, __VA_ARGS__) |
|
- | 349 | #endif |
|
- | 350 | ||
- | 351 | #ifndef atomic64_cmpxchg |
|
- | 352 | #define atomic64_cmpxchg(...) \ |
|
- | 353 | __atomic_op_fence(atomic64_cmpxchg, __VA_ARGS__) |
|
- | 354 | #endif |
|
- | 355 | #endif /* atomic64_cmpxchg_relaxed */ |
|
- | 356 | ||
- | 357 | /* cmpxchg_relaxed */ |
|
- | 358 | #ifndef cmpxchg_relaxed |
|
- | 359 | #define cmpxchg_relaxed cmpxchg |
|
- | 360 | #define cmpxchg_acquire cmpxchg |
|
- | 361 | #define cmpxchg_release cmpxchg |
|
- | 362 | ||
- | 363 | #else /* cmpxchg_relaxed */ |
|
- | 364 | ||
- | 365 | #ifndef cmpxchg_acquire |
|
- | 366 | #define cmpxchg_acquire(...) \ |
|
- | 367 | __atomic_op_acquire(cmpxchg, __VA_ARGS__) |
|
- | 368 | #endif |
|
- | 369 | ||
- | 370 | #ifndef cmpxchg_release |
|
- | 371 | #define cmpxchg_release(...) \ |
|
- | 372 | __atomic_op_release(cmpxchg, __VA_ARGS__) |
|
- | 373 | #endif |
|
- | 374 | ||
- | 375 | #ifndef cmpxchg |
|
- | 376 | #define cmpxchg(...) \ |
|
- | 377 | __atomic_op_fence(cmpxchg, __VA_ARGS__) |
|
- | 378 | #endif |
|
- | 379 | #endif /* cmpxchg_relaxed */ |
|
- | 380 | ||
- | 381 | /* cmpxchg64_relaxed */ |
|
- | 382 | #ifndef cmpxchg64_relaxed |
|
- | 383 | #define cmpxchg64_relaxed cmpxchg64 |
|
- | 384 | #define cmpxchg64_acquire cmpxchg64 |
|
- | 385 | #define cmpxchg64_release cmpxchg64 |
|
- | 386 | ||
- | 387 | #else /* cmpxchg64_relaxed */ |
|
- | 388 | ||
- | 389 | #ifndef cmpxchg64_acquire |
|
- | 390 | #define cmpxchg64_acquire(...) \ |
|
- | 391 | __atomic_op_acquire(cmpxchg64, __VA_ARGS__) |
|
- | 392 | #endif |
|
- | 393 | ||
- | 394 | #ifndef cmpxchg64_release |
|
- | 395 | #define cmpxchg64_release(...) \ |
|
- | 396 | __atomic_op_release(cmpxchg64, __VA_ARGS__) |
|
- | 397 | #endif |
|
- | 398 | ||
- | 399 | #ifndef cmpxchg64 |
|
- | 400 | #define cmpxchg64(...) \ |
|
- | 401 | __atomic_op_fence(cmpxchg64, __VA_ARGS__) |
|
- | 402 | #endif |
|
- | 403 | #endif /* cmpxchg64_relaxed */ |
|
- | 404 | ||
- | 405 | /* xchg_relaxed */ |
|
- | 406 | #ifndef xchg_relaxed |
|
- | 407 | #define xchg_relaxed xchg |
|
- | 408 | #define xchg_acquire xchg |
|
- | 409 | #define xchg_release xchg |
|
- | 410 | ||
- | 411 | #else /* xchg_relaxed */ |
|
- | 412 | ||
- | 413 | #ifndef xchg_acquire |
|
- | 414 | #define xchg_acquire(...) __atomic_op_acquire(xchg, __VA_ARGS__) |
|
- | 415 | #endif |
|
- | 416 | ||
- | 417 | #ifndef xchg_release |
|
- | 418 | #define xchg_release(...) __atomic_op_release(xchg, __VA_ARGS__) |
|
- | 419 | #endif |
|
- | 420 | ||
- | 421 | #ifndef xchg |
|
- | 422 | #define xchg(...) __atomic_op_fence(xchg, __VA_ARGS__) |
|
- | 423 | #endif |
|
- | 424 | #endif /* xchg_relaxed */ |
|
Line 5... | Line 425... | ||
5 | 425 | ||
6 | /** |
426 | /** |
7 | * atomic_add_unless - add unless the number is already a given value |
427 | * atomic_add_unless - add unless the number is already a given value |
8 | * @v: pointer of type atomic_t |
428 | * @v: pointer of type atomic_t |
Line 26... | Line 446... | ||
26 | */ |
446 | */ |
27 | #ifndef atomic_inc_not_zero |
447 | #ifndef atomic_inc_not_zero |
28 | #define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0) |
448 | #define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0) |
29 | #endif |
449 | #endif |
Line -... | Line 450... | ||
- | 450 | ||
- | 451 | #ifndef atomic_andnot |
|
- | 452 | static inline void atomic_andnot(int i, atomic_t *v) |
|
- | 453 | { |
|
- | 454 | atomic_and(~i, v); |
|
- | 455 | } |
|
- | 456 | #endif |
|
- | 457 | ||
- | 458 | static inline __deprecated void atomic_clear_mask(unsigned int mask, atomic_t *v) |
|
- | 459 | { |
|
- | 460 | atomic_andnot(mask, v); |
|
- | 461 | } |
|
- | 462 | ||
- | 463 | static inline __deprecated void atomic_set_mask(unsigned int mask, atomic_t *v) |
|
- | 464 | { |
|
- | 465 | atomic_or(mask, v); |
|
- | 466 | } |
|
30 | 467 | ||
31 | /** |
468 | /** |
32 | * atomic_inc_not_zero_hint - increment if not null |
469 | * atomic_inc_not_zero_hint - increment if not null |
33 | * @v: pointer of type atomic_t |
470 | * @v: pointer of type atomic_t |
34 | * @hint: probable value of the atomic before the increment |
471 | * @hint: probable value of the atomic before the increment |
Line 109... | Line 546... | ||
109 | } |
546 | } |
110 | return dec; |
547 | return dec; |
111 | } |
548 | } |
112 | #endif |
549 | #endif |
Line 113... | Line 550... | ||
113 | 550 | ||
114 | #ifndef CONFIG_ARCH_HAS_ATOMIC_OR |
551 | #ifdef CONFIG_GENERIC_ATOMIC64 |
115 | static inline void atomic_or(int i, atomic_t *v) |
- | |
116 | { |
- | |
117 | int old; |
552 | #include |
Line 118... | Line -... | ||
118 | int new; |
- | |
119 | 553 | #endif |
|
120 | do { |
554 | |
- | 555 | #ifndef atomic64_andnot |
|
121 | old = atomic_read(v); |
556 | static inline void atomic64_andnot(long long i, atomic64_t *v) |
122 | new = old | i; |
557 | { |
123 | } while (atomic_cmpxchg(v, old, new) != old); |
558 | atomic64_and(~i, v); |
Line 124... | Line 559... | ||
124 | } |
559 | } |
125 | #endif /* #ifndef CONFIG_ARCH_HAS_ATOMIC_OR */ |
- | |
126 | - | ||
127 | #include |
560 | #endif |
128 | #ifdef CONFIG_GENERIC_ATOMIC64 |
561 |