]>
Commit | Line | Data |
---|---|---|
f05e798a DH |
1 | #ifndef _ASM_X86_SPECIAL_INSNS_H |
2 | #define _ASM_X86_SPECIAL_INSNS_H | |
3 | ||
4 | ||
5 | #ifdef __KERNEL__ | |
6 | ||
719d359d RZ |
7 | #include <asm/nops.h> |
8 | ||
f05e798a DH |
9 | /* |
10 | * Volatile isn't enough to prevent the compiler from reordering the | |
11 | * read/write functions for the control registers and messing everything up. | |
12 | * A memory clobber would solve the problem, but would prevent reordering of | |
13 | * all loads stores around it, which can hurt performance. Solution is to | |
14 | * use a variable and mimic reads and writes to it to enforce serialization | |
15 | */ | |
1d10f6ee | 16 | extern unsigned long __force_order; |
f05e798a DH |
17 | |
18 | static inline unsigned long native_read_cr0(void) | |
19 | { | |
20 | unsigned long val; | |
21 | asm volatile("mov %%cr0,%0\n\t" : "=r" (val), "=m" (__force_order)); | |
22 | return val; | |
23 | } | |
24 | ||
25 | static inline void native_write_cr0(unsigned long val) | |
26 | { | |
27 | asm volatile("mov %0,%%cr0": : "r" (val), "m" (__force_order)); | |
28 | } | |
29 | ||
30 | static inline unsigned long native_read_cr2(void) | |
31 | { | |
32 | unsigned long val; | |
33 | asm volatile("mov %%cr2,%0\n\t" : "=r" (val), "=m" (__force_order)); | |
34 | return val; | |
35 | } | |
36 | ||
37 | static inline void native_write_cr2(unsigned long val) | |
38 | { | |
39 | asm volatile("mov %0,%%cr2": : "r" (val), "m" (__force_order)); | |
40 | } | |
41 | ||
6c690ee1 | 42 | static inline unsigned long __native_read_cr3(void) |
f05e798a DH |
43 | { |
44 | unsigned long val; | |
45 | asm volatile("mov %%cr3,%0\n\t" : "=r" (val), "=m" (__force_order)); | |
46 | return val; | |
47 | } | |
48 | ||
49 | static inline void native_write_cr3(unsigned long val) | |
50 | { | |
51 | asm volatile("mov %0,%%cr3": : "r" (val), "m" (__force_order)); | |
52 | } | |
53 | ||
54 | static inline unsigned long native_read_cr4(void) | |
55 | { | |
56 | unsigned long val; | |
f05e798a | 57 | #ifdef CONFIG_X86_32 |
1ef55be1 AL |
58 | /* |
59 | * This could fault if CR4 does not exist. Non-existent CR4 | |
60 | * is functionally equivalent to CR4 == 0. Keep it simple and pretend | |
61 | * that CR4 == 0 on CPUs that don't have CR4. | |
62 | */ | |
f05e798a DH |
63 | asm volatile("1: mov %%cr4, %0\n" |
64 | "2:\n" | |
65 | _ASM_EXTABLE(1b, 2b) | |
66 | : "=r" (val), "=m" (__force_order) : "0" (0)); | |
67 | #else | |
1ef55be1 AL |
68 | /* CR4 always exists on x86_64. */ |
69 | asm volatile("mov %%cr4,%0\n\t" : "=r" (val), "=m" (__force_order)); | |
f05e798a DH |
70 | #endif |
71 | return val; | |
72 | } | |
73 | ||
74 | static inline void native_write_cr4(unsigned long val) | |
75 | { | |
76 | asm volatile("mov %0,%%cr4": : "r" (val), "m" (__force_order)); | |
77 | } | |
78 | ||
79 | #ifdef CONFIG_X86_64 | |
80 | static inline unsigned long native_read_cr8(void) | |
81 | { | |
82 | unsigned long cr8; | |
83 | asm volatile("movq %%cr8,%0" : "=r" (cr8)); | |
84 | return cr8; | |
85 | } | |
86 | ||
87 | static inline void native_write_cr8(unsigned long val) | |
88 | { | |
89 | asm volatile("movq %0,%%cr8" :: "r" (val) : "memory"); | |
90 | } | |
91 | #endif | |
92 | ||
a927cb83 DH |
93 | #ifdef CONFIG_X86_INTEL_MEMORY_PROTECTION_KEYS |
94 | static inline u32 __read_pkru(void) | |
95 | { | |
96 | u32 ecx = 0; | |
97 | u32 edx, pkru; | |
98 | ||
99 | /* | |
100 | * "rdpkru" instruction. Places PKRU contents in to EAX, | |
101 | * clears EDX and requires that ecx=0. | |
102 | */ | |
103 | asm volatile(".byte 0x0f,0x01,0xee\n\t" | |
104 | : "=a" (pkru), "=d" (edx) | |
105 | : "c" (ecx)); | |
106 | return pkru; | |
107 | } | |
9e90199c XG |
108 | |
109 | static inline void __write_pkru(u32 pkru) | |
110 | { | |
111 | u32 ecx = 0, edx = 0; | |
112 | ||
113 | /* | |
114 | * "wrpkru" instruction. Loads contents in EAX to PKRU, | |
115 | * requires that ecx = edx = 0. | |
116 | */ | |
117 | asm volatile(".byte 0x0f,0x01,0xef\n\t" | |
118 | : : "a" (pkru), "c"(ecx), "d"(edx)); | |
119 | } | |
a927cb83 DH |
120 | #else |
121 | static inline u32 __read_pkru(void) | |
122 | { | |
123 | return 0; | |
124 | } | |
9e90199c XG |
125 | |
126 | static inline void __write_pkru(u32 pkru) | |
127 | { | |
128 | } | |
a927cb83 DH |
129 | #endif |
130 | ||
f05e798a DH |
131 | static inline void native_wbinvd(void) |
132 | { | |
133 | asm volatile("wbinvd": : :"memory"); | |
134 | } | |
135 | ||
277d5b40 | 136 | extern asmlinkage void native_load_gs_index(unsigned); |
f05e798a | 137 | |
87930019 JG |
138 | static inline unsigned long __read_cr4(void) |
139 | { | |
140 | return native_read_cr4(); | |
141 | } | |
142 | ||
f05e798a DH |
143 | #ifdef CONFIG_PARAVIRT |
144 | #include <asm/paravirt.h> | |
145 | #else | |
146 | ||
147 | static inline unsigned long read_cr0(void) | |
148 | { | |
149 | return native_read_cr0(); | |
150 | } | |
151 | ||
152 | static inline void write_cr0(unsigned long x) | |
153 | { | |
154 | native_write_cr0(x); | |
155 | } | |
156 | ||
157 | static inline unsigned long read_cr2(void) | |
158 | { | |
159 | return native_read_cr2(); | |
160 | } | |
161 | ||
162 | static inline void write_cr2(unsigned long x) | |
163 | { | |
164 | native_write_cr2(x); | |
165 | } | |
166 | ||
6c690ee1 AL |
167 | /* |
168 | * Careful! CR3 contains more than just an address. You probably want | |
169 | * read_cr3_pa() instead. | |
170 | */ | |
171 | static inline unsigned long __read_cr3(void) | |
f05e798a | 172 | { |
6c690ee1 | 173 | return __native_read_cr3(); |
f05e798a DH |
174 | } |
175 | ||
176 | static inline void write_cr3(unsigned long x) | |
177 | { | |
178 | native_write_cr3(x); | |
179 | } | |
180 | ||
1e02ce4c | 181 | static inline void __write_cr4(unsigned long x) |
f05e798a DH |
182 | { |
183 | native_write_cr4(x); | |
184 | } | |
185 | ||
186 | static inline void wbinvd(void) | |
187 | { | |
188 | native_wbinvd(); | |
189 | } | |
190 | ||
191 | #ifdef CONFIG_X86_64 | |
192 | ||
193 | static inline unsigned long read_cr8(void) | |
194 | { | |
195 | return native_read_cr8(); | |
196 | } | |
197 | ||
198 | static inline void write_cr8(unsigned long x) | |
199 | { | |
200 | native_write_cr8(x); | |
201 | } | |
202 | ||
203 | static inline void load_gs_index(unsigned selector) | |
204 | { | |
205 | native_load_gs_index(selector); | |
206 | } | |
207 | ||
208 | #endif | |
209 | ||
f05e798a DH |
210 | #endif/* CONFIG_PARAVIRT */ |
211 | ||
f05e798a DH |
212 | static inline void clflush(volatile void *__p) |
213 | { | |
214 | asm volatile("clflush %0" : "+m" (*(volatile char __force *)__p)); | |
215 | } | |
216 | ||
171699f7 RZ |
217 | static inline void clflushopt(volatile void *__p) |
218 | { | |
219 | alternative_io(".byte " __stringify(NOP_DS_PREFIX) "; clflush %P0", | |
220 | ".byte 0x66; clflush %P0", | |
221 | X86_FEATURE_CLFLUSHOPT, | |
222 | "+m" (*(volatile char __force *)__p)); | |
223 | } | |
224 | ||
d9dc64f3 RZ |
225 | static inline void clwb(volatile void *__p) |
226 | { | |
227 | volatile struct { char x[64]; } *p = __p; | |
228 | ||
229 | asm volatile(ALTERNATIVE_2( | |
230 | ".byte " __stringify(NOP_DS_PREFIX) "; clflush (%[pax])", | |
231 | ".byte 0x66; clflush (%[pax])", /* clflushopt (%%rax) */ | |
232 | X86_FEATURE_CLFLUSHOPT, | |
233 | ".byte 0x66, 0x0f, 0xae, 0x30", /* clwb (%%rax) */ | |
234 | X86_FEATURE_CLWB) | |
235 | : [p] "+m" (*p) | |
236 | : [pax] "a" (p)); | |
237 | } | |
238 | ||
f05e798a DH |
239 | #define nop() asm volatile ("nop") |
240 | ||
241 | ||
242 | #endif /* __KERNEL__ */ | |
243 | ||
244 | #endif /* _ASM_X86_SPECIAL_INSNS_H */ |