53,24 → 53,24 |
* model and we should fall back to full barriers. |
*/ |
|
#define smp_store_release(p, v) \ |
#define __smp_store_release(p, v) \ |
do { \ |
compiletime_assert_atomic_type(*p); \ |
smp_mb(); \ |
__smp_mb(); \ |
WRITE_ONCE(*p, v); \ |
} while (0) |
|
#define smp_load_acquire(p) \ |
#define __smp_load_acquire(p) \ |
({ \ |
typeof(*p) ___p1 = READ_ONCE(*p); \ |
compiletime_assert_atomic_type(*p); \ |
smp_mb(); \ |
__smp_mb(); \ |
___p1; \ |
}) |
|
#else /* regular x86 TSO memory ordering */ |
|
#define smp_store_release(p, v) \ |
#define __smp_store_release(p, v) \ |
do { \ |
compiletime_assert_atomic_type(*p); \ |
barrier(); \ |
77,7 → 77,7 |
WRITE_ONCE(*p, v); \ |
} while (0) |
|
#define smp_load_acquire(p) \ |
#define __smp_load_acquire(p) \ |
({ \ |
typeof(*p) ___p1 = READ_ONCE(*p); \ |
compiletime_assert_atomic_type(*p); \ |
88,7 → 88,9 |
#endif |
|
/* Atomic operations are already serializing on x86 */ |
#define smp_mb__before_atomic() barrier() |
#define smp_mb__after_atomic() barrier() |
#define __smp_mb__before_atomic() barrier() |
#define __smp_mb__after_atomic() barrier() |
|
#include <asm-generic/barrier.h> |
|
#endif /* _ASM_X86_BARRIER_H */ |