Rev 6934 | Go to most recent revision | Details | Compare with Previous | Last modification | View Log | RSS feed
Rev | Author | Line No. | Line |
---|---|---|---|
5270 | serge | 1 | #ifndef _ASM_X86_SPECIAL_INSNS_H |
2 | #define _ASM_X86_SPECIAL_INSNS_H |
||
3 | |||
4 | |||
5 | #ifdef __KERNEL__ |
||
6 | |||
7143 | serge | 7 | #include |
8 | |||
5270 | serge | 9 | static inline void native_clts(void) |
10 | { |
||
11 | asm volatile("clts"); |
||
12 | } |
||
13 | |||
14 | /* |
||
15 | * Volatile isn't enough to prevent the compiler from reordering the |
||
16 | * read/write functions for the control registers and messing everything up. |
||
17 | * A memory clobber would solve the problem, but would prevent reordering of |
||
18 | * all loads stores around it, which can hurt performance. Solution is to |
||
19 | * use a variable and mimic reads and writes to it to enforce serialization |
||
20 | */ |
||
21 | extern unsigned long __force_order; |
||
22 | |||
23 | static inline unsigned long native_read_cr0(void) |
||
24 | { |
||
25 | unsigned long val; |
||
26 | asm volatile("mov %%cr0,%0\n\t" : "=r" (val), "=m" (__force_order)); |
||
27 | return val; |
||
28 | } |
||
29 | |||
30 | static inline void native_write_cr0(unsigned long val) |
||
31 | { |
||
32 | asm volatile("mov %0,%%cr0": : "r" (val), "m" (__force_order)); |
||
33 | } |
||
34 | |||
35 | static inline unsigned long native_read_cr2(void) |
||
36 | { |
||
37 | unsigned long val; |
||
38 | asm volatile("mov %%cr2,%0\n\t" : "=r" (val), "=m" (__force_order)); |
||
39 | return val; |
||
40 | } |
||
41 | |||
42 | static inline void native_write_cr2(unsigned long val) |
||
43 | { |
||
44 | asm volatile("mov %0,%%cr2": : "r" (val), "m" (__force_order)); |
||
45 | } |
||
46 | |||
47 | static inline unsigned long native_read_cr3(void) |
||
48 | { |
||
49 | unsigned long val; |
||
50 | asm volatile("mov %%cr3,%0\n\t" : "=r" (val), "=m" (__force_order)); |
||
51 | return val; |
||
52 | } |
||
53 | |||
54 | static inline void native_write_cr3(unsigned long val) |
||
55 | { |
||
56 | asm volatile("mov %0,%%cr3": : "r" (val), "m" (__force_order)); |
||
57 | } |
||
58 | |||
59 | static inline unsigned long native_read_cr4(void) |
||
60 | { |
||
61 | unsigned long val; |
||
62 | asm volatile("mov %%cr4,%0\n\t" : "=r" (val), "=m" (__force_order)); |
||
63 | return val; |
||
64 | } |
||
65 | |||
66 | static inline unsigned long native_read_cr4_safe(void) |
||
67 | { |
||
68 | unsigned long val; |
||
69 | /* This could fault if %cr4 does not exist. In x86_64, a cr4 always |
||
70 | * exists, so it will never fail. */ |
||
71 | #ifdef CONFIG_X86_32 |
||
72 | asm volatile("1: mov %%cr4, %0\n" |
||
73 | "2:\n" |
||
74 | _ASM_EXTABLE(1b, 2b) |
||
75 | : "=r" (val), "=m" (__force_order) : "0" (0)); |
||
76 | #else |
||
77 | val = native_read_cr4(); |
||
78 | #endif |
||
79 | return val; |
||
80 | } |
||
81 | |||
82 | static inline void native_write_cr4(unsigned long val) |
||
83 | { |
||
84 | asm volatile("mov %0,%%cr4": : "r" (val), "m" (__force_order)); |
||
85 | } |
||
86 | |||
87 | #ifdef CONFIG_X86_64 |
||
88 | static inline unsigned long native_read_cr8(void) |
||
89 | { |
||
90 | unsigned long cr8; |
||
91 | asm volatile("movq %%cr8,%0" : "=r" (cr8)); |
||
92 | return cr8; |
||
93 | } |
||
94 | |||
95 | static inline void native_write_cr8(unsigned long val) |
||
96 | { |
||
97 | asm volatile("movq %0,%%cr8" :: "r" (val) : "memory"); |
||
98 | } |
||
99 | #endif |
||
100 | |||
7143 | serge | 101 | #ifdef CONFIG_X86_INTEL_MEMORY_PROTECTION_KEYS |
102 | static inline u32 __read_pkru(void) |
||
103 | { |
||
104 | u32 ecx = 0; |
||
105 | u32 edx, pkru; |
||
106 | |||
107 | /* |
||
108 | * "rdpkru" instruction. Places PKRU contents in to EAX, |
||
109 | * clears EDX and requires that ecx=0. |
||
110 | */ |
||
111 | asm volatile(".byte 0x0f,0x01,0xee\n\t" |
||
112 | : "=a" (pkru), "=d" (edx) |
||
113 | : "c" (ecx)); |
||
114 | return pkru; |
||
115 | } |
||
116 | |||
117 | static inline void __write_pkru(u32 pkru) |
||
118 | { |
||
119 | u32 ecx = 0, edx = 0; |
||
120 | |||
121 | /* |
||
122 | * "wrpkru" instruction. Loads contents in EAX to PKRU, |
||
123 | * requires that ecx = edx = 0. |
||
124 | */ |
||
125 | asm volatile(".byte 0x0f,0x01,0xef\n\t" |
||
126 | : : "a" (pkru), "c"(ecx), "d"(edx)); |
||
127 | } |
||
128 | #else |
||
129 | static inline u32 __read_pkru(void) |
||
130 | { |
||
131 | return 0; |
||
132 | } |
||
133 | |||
134 | static inline void __write_pkru(u32 pkru) |
||
135 | { |
||
136 | } |
||
137 | #endif |
||
138 | |||
5270 | serge | 139 | static inline void native_wbinvd(void) |
140 | { |
||
141 | asm volatile("wbinvd": : :"memory"); |
||
142 | } |
||
143 | |||
144 | extern asmlinkage void native_load_gs_index(unsigned); |
||
145 | |||
146 | #ifdef CONFIG_PARAVIRT |
||
147 | #include |
||
148 | #else |
||
149 | |||
150 | static inline unsigned long read_cr0(void) |
||
151 | { |
||
152 | return native_read_cr0(); |
||
153 | } |
||
154 | |||
155 | static inline void write_cr0(unsigned long x) |
||
156 | { |
||
157 | native_write_cr0(x); |
||
158 | } |
||
159 | |||
160 | static inline unsigned long read_cr2(void) |
||
161 | { |
||
162 | return native_read_cr2(); |
||
163 | } |
||
164 | |||
165 | static inline void write_cr2(unsigned long x) |
||
166 | { |
||
167 | native_write_cr2(x); |
||
168 | } |
||
169 | |||
170 | static inline unsigned long read_cr3(void) |
||
171 | { |
||
172 | return native_read_cr3(); |
||
173 | } |
||
174 | |||
175 | static inline void write_cr3(unsigned long x) |
||
176 | { |
||
177 | native_write_cr3(x); |
||
178 | } |
||
179 | |||
6082 | serge | 180 | static inline unsigned long __read_cr4(void) |
5270 | serge | 181 | { |
182 | return native_read_cr4(); |
||
183 | } |
||
184 | |||
6082 | serge | 185 | static inline unsigned long __read_cr4_safe(void) |
5270 | serge | 186 | { |
187 | return native_read_cr4_safe(); |
||
188 | } |
||
189 | |||
6082 | serge | 190 | static inline void __write_cr4(unsigned long x) |
5270 | serge | 191 | { |
192 | native_write_cr4(x); |
||
193 | } |
||
194 | |||
195 | static inline void wbinvd(void) |
||
196 | { |
||
197 | native_wbinvd(); |
||
198 | } |
||
199 | |||
200 | #ifdef CONFIG_X86_64 |
||
201 | |||
202 | static inline unsigned long read_cr8(void) |
||
203 | { |
||
204 | return native_read_cr8(); |
||
205 | } |
||
206 | |||
207 | static inline void write_cr8(unsigned long x) |
||
208 | { |
||
209 | native_write_cr8(x); |
||
210 | } |
||
211 | |||
212 | static inline void load_gs_index(unsigned selector) |
||
213 | { |
||
214 | native_load_gs_index(selector); |
||
215 | } |
||
216 | |||
217 | #endif |
||
218 | |||
219 | /* Clear the 'TS' bit */ |
||
220 | static inline void clts(void) |
||
221 | { |
||
222 | native_clts(); |
||
223 | } |
||
224 | |||
225 | #endif/* CONFIG_PARAVIRT */ |
||
226 | |||
227 | #define stts() write_cr0(read_cr0() | X86_CR0_TS) |
||
228 | |||
229 | static inline void clflush(volatile void *__p) |
||
230 | { |
||
231 | asm volatile("clflush %0" : "+m" (*(volatile char __force *)__p)); |
||
232 | } |
||
233 | |||
234 | static inline void clflushopt(volatile void *__p) |
||
235 | { |
||
236 | alternative_io(".byte " __stringify(NOP_DS_PREFIX) "; clflush %P0", |
||
237 | ".byte 0x66; clflush %P0", |
||
238 | X86_FEATURE_CLFLUSHOPT, |
||
239 | "+m" (*(volatile char __force *)__p)); |
||
240 | } |
||
241 | |||
242 | #define nop() asm volatile ("nop") |
||
243 | |||
244 | |||
245 | #endif /* __KERNEL__ */ |
||
246 | |||
247 | #endif /* _ASM_X86_SPECIAL_INSNS_H */ |