Subversion Repositories Kolibri OS

Rev

Rev 6934 | Go to most recent revision | Details | Compare with Previous | Last modification | View Log | RSS feed

Rev Author Line No. Line
5270 serge 1
#ifndef _ASM_X86_BARRIER_H
2
#define _ASM_X86_BARRIER_H
3
 
4
#include 
5
#include 
6
 
7
/*
8
 * Force strict CPU ordering.
9
 * And yes, this is required on UP too when we're talking
10
 * to devices.
11
 */
12
 
13
#ifdef CONFIG_X86_32
14
/*
15
 * Some non-Intel clones support out of order store. wmb() ceases to be a
16
 * nop for these.
17
 */
18
#define mb() asm volatile ("lock; addl $0,0(%esp)")/*, "mfence", X86_FEATURE_XMM2) */
19
#define rmb() asm volatile("lock; addl $0,0(%esp)")/*, "lfence", X86_FEATURE_XMM2) */
20
#define wmb() asm volatile("lock; addl $0,0(%esp)")/*, "sfence", X86_FEATURE_XMM)  */
21
#else
22
#define mb() 	asm volatile("mfence":::"memory")
23
#define rmb()	asm volatile("lfence":::"memory")
24
#define wmb()	asm volatile("sfence" ::: "memory")
25
#endif
26
 
27
#ifdef CONFIG_X86_PPRO_FENCE
28
#define dma_rmb()	rmb()
29
#else
30
#define dma_rmb()	barrier()
31
#endif
32
#define dma_wmb()	barrier()
33
 
34
#ifdef CONFIG_SMP
35
#define smp_mb()	mb()
36
#define smp_rmb()	dma_rmb()
37
#define smp_wmb()	barrier()
6082 serge 38
#define smp_store_mb(var, value) do { (void)xchg(&var, value); } while (0)
5270 serge 39
#else /* !SMP */
40
#define smp_mb()	barrier()
41
#define smp_rmb()	barrier()
42
#define smp_wmb()	barrier()
6082 serge 43
#define smp_store_mb(var, value) do { WRITE_ONCE(var, value); barrier(); } while (0)
5270 serge 44
#endif /* SMP */
45
 
46
#define read_barrier_depends()		do { } while (0)
47
#define smp_read_barrier_depends()	do { } while (0)
48
 
49
#if defined(CONFIG_X86_PPRO_FENCE)
50
 
51
/*
52
 * For this option x86 doesn't have a strong TSO memory
53
 * model and we should fall back to full barriers.
54
 */
55
 
6936 serge 56
#define __smp_store_release(p, v)					\
5270 serge 57
do {									\
58
	compiletime_assert_atomic_type(*p);				\
6936 serge 59
	__smp_mb();							\
6082 serge 60
	WRITE_ONCE(*p, v);						\
5270 serge 61
} while (0)
62
 
6936 serge 63
#define __smp_load_acquire(p)						\
5270 serge 64
({									\
6082 serge 65
	typeof(*p) ___p1 = READ_ONCE(*p);				\
5270 serge 66
	compiletime_assert_atomic_type(*p);				\
6936 serge 67
	__smp_mb();							\
5270 serge 68
	___p1;								\
69
})
70
 
71
#else /* regular x86 TSO memory ordering */
72
 
6936 serge 73
#define __smp_store_release(p, v)					\
5270 serge 74
do {									\
75
	compiletime_assert_atomic_type(*p);				\
76
	barrier();							\
6082 serge 77
	WRITE_ONCE(*p, v);						\
5270 serge 78
} while (0)
79
 
6936 serge 80
#define __smp_load_acquire(p)						\
5270 serge 81
({									\
6082 serge 82
	typeof(*p) ___p1 = READ_ONCE(*p);				\
5270 serge 83
	compiletime_assert_atomic_type(*p);				\
84
	barrier();							\
85
	___p1;								\
86
})
87
 
88
#endif
89
 
90
/* Atomic operations are already serializing on x86 */
6936 serge 91
#define __smp_mb__before_atomic()	barrier()
92
#define __smp_mb__after_atomic()	barrier()
5270 serge 93
 
6936 serge 94
#include 
95
 
5270 serge 96
#endif /* _ASM_X86_BARRIER_H */