Subversion Repositories Kolibri OS

Rev

Rev 6936 | Details | Compare with Previous | Last modification | View Log | RSS feed

Rev Author Line No. Line
5270 serge 1
#ifndef _ASM_X86_BARRIER_H
2
#define _ASM_X86_BARRIER_H
3
 
4
#include 
5
#include 
6
 
7
/*
8
 * Force strict CPU ordering.
7143 serge 9
 * And yes, this might be required on UP too when we're talking
5270 serge 10
 * to devices.
11
 */
12
 
13
#ifdef CONFIG_X86_32
14
/*
15
 * Some non-Intel clones support out of order store. wmb() ceases to be a
16
 * nop for these.
17
 */
18
#define mb() asm volatile ("lock; addl $0,0(%esp)")/*, "mfence", X86_FEATURE_XMM2) */
19
#define rmb() asm volatile("lock; addl $0,0(%esp)")/*, "lfence", X86_FEATURE_XMM2) */
20
#define wmb() asm volatile("lock; addl $0,0(%esp)")/*, "sfence", X86_FEATURE_XMM)  */
21
#else
22
#define mb() 	asm volatile("mfence":::"memory")
23
#define rmb()	asm volatile("lfence":::"memory")
24
#define wmb()	asm volatile("sfence" ::: "memory")
25
#endif
26
 
27
#ifdef CONFIG_X86_PPRO_FENCE
28
#define dma_rmb()	rmb()
29
#else
30
#define dma_rmb()	barrier()
31
#endif
32
#define dma_wmb()	barrier()
33
 
7143 serge 34
#define __smp_mb()	mb()
35
#define __smp_rmb()	dma_rmb()
36
#define __smp_wmb()	barrier()
37
#define __smp_store_mb(var, value) do { (void)xchg(&var, value); } while (0)
5270 serge 38
 
39
#if defined(CONFIG_X86_PPRO_FENCE)
40
 
41
/*
42
 * For this option x86 doesn't have a strong TSO memory
43
 * model and we should fall back to full barriers.
44
 */
45
 
6936 serge 46
#define __smp_store_release(p, v)					\
5270 serge 47
do {									\
48
	compiletime_assert_atomic_type(*p);				\
6936 serge 49
	__smp_mb();							\
6082 serge 50
	WRITE_ONCE(*p, v);						\
5270 serge 51
} while (0)
52
 
6936 serge 53
#define __smp_load_acquire(p)						\
5270 serge 54
({									\
6082 serge 55
	typeof(*p) ___p1 = READ_ONCE(*p);				\
5270 serge 56
	compiletime_assert_atomic_type(*p);				\
6936 serge 57
	__smp_mb();							\
5270 serge 58
	___p1;								\
59
})
60
 
61
#else /* regular x86 TSO memory ordering */
62
 
6936 serge 63
#define __smp_store_release(p, v)					\
5270 serge 64
do {									\
65
	compiletime_assert_atomic_type(*p);				\
66
	barrier();							\
6082 serge 67
	WRITE_ONCE(*p, v);						\
5270 serge 68
} while (0)
69
 
6936 serge 70
#define __smp_load_acquire(p)						\
5270 serge 71
({									\
6082 serge 72
	typeof(*p) ___p1 = READ_ONCE(*p);				\
5270 serge 73
	compiletime_assert_atomic_type(*p);				\
74
	barrier();							\
75
	___p1;								\
76
})
77
 
78
#endif
79
 
80
/* Atomic operations are already serializing on x86 */
6936 serge 81
#define __smp_mb__before_atomic()	barrier()
82
#define __smp_mb__after_atomic()	barrier()
5270 serge 83
 
6936 serge 84
#include 
85
 
5270 serge 86
#endif /* _ASM_X86_BARRIER_H */