]>
Commit | Line | Data |
---|---|---|
b2441318 | 1 | /* SPDX-License-Identifier: GPL-2.0 */ |
f05e798a DH |
2 | #ifndef _ASM_X86_SPECIAL_INSNS_H |
3 | #define _ASM_X86_SPECIAL_INSNS_H | |
4 | ||
5 | ||
6 | #ifdef __KERNEL__ | |
7 | ||
719d359d RZ |
8 | #include <asm/nops.h> |
9 | ||
f05e798a DH |
10 | /* |
11 | * Volatile isn't enough to prevent the compiler from reordering the | |
12 | * read/write functions for the control registers and messing everything up. | |
13 | * A memory clobber would solve the problem, but would prevent reordering of | |
14 | * all loads stores around it, which can hurt performance. Solution is to | |
15 | * use a variable and mimic reads and writes to it to enforce serialization | |
16 | */ | |
1d10f6ee | 17 | extern unsigned long __force_order; |
f05e798a DH |
18 | |
19 | static inline unsigned long native_read_cr0(void) | |
20 | { | |
21 | unsigned long val; | |
22 | asm volatile("mov %%cr0,%0\n\t" : "=r" (val), "=m" (__force_order)); | |
23 | return val; | |
24 | } | |
25 | ||
26 | static inline void native_write_cr0(unsigned long val) | |
27 | { | |
28 | asm volatile("mov %0,%%cr0": : "r" (val), "m" (__force_order)); | |
29 | } | |
30 | ||
31 | static inline unsigned long native_read_cr2(void) | |
32 | { | |
33 | unsigned long val; | |
34 | asm volatile("mov %%cr2,%0\n\t" : "=r" (val), "=m" (__force_order)); | |
35 | return val; | |
36 | } | |
37 | ||
38 | static inline void native_write_cr2(unsigned long val) | |
39 | { | |
40 | asm volatile("mov %0,%%cr2": : "r" (val), "m" (__force_order)); | |
41 | } | |
42 | ||
6c690ee1 | 43 | static inline unsigned long __native_read_cr3(void) |
f05e798a DH |
44 | { |
45 | unsigned long val; | |
46 | asm volatile("mov %%cr3,%0\n\t" : "=r" (val), "=m" (__force_order)); | |
47 | return val; | |
48 | } | |
49 | ||
50 | static inline void native_write_cr3(unsigned long val) | |
51 | { | |
52 | asm volatile("mov %0,%%cr3": : "r" (val), "m" (__force_order)); | |
53 | } | |
54 | ||
55 | static inline unsigned long native_read_cr4(void) | |
56 | { | |
57 | unsigned long val; | |
f05e798a | 58 | #ifdef CONFIG_X86_32 |
1ef55be1 AL |
59 | /* |
60 | * This could fault if CR4 does not exist. Non-existent CR4 | |
61 | * is functionally equivalent to CR4 == 0. Keep it simple and pretend | |
62 | * that CR4 == 0 on CPUs that don't have CR4. | |
63 | */ | |
f05e798a DH |
64 | asm volatile("1: mov %%cr4, %0\n" |
65 | "2:\n" | |
66 | _ASM_EXTABLE(1b, 2b) | |
67 | : "=r" (val), "=m" (__force_order) : "0" (0)); | |
68 | #else | |
1ef55be1 AL |
69 | /* CR4 always exists on x86_64. */ |
70 | asm volatile("mov %%cr4,%0\n\t" : "=r" (val), "=m" (__force_order)); | |
f05e798a DH |
71 | #endif |
72 | return val; | |
73 | } | |
74 | ||
75 | static inline void native_write_cr4(unsigned long val) | |
76 | { | |
77 | asm volatile("mov %0,%%cr4": : "r" (val), "m" (__force_order)); | |
78 | } | |
79 | ||
80 | #ifdef CONFIG_X86_64 | |
81 | static inline unsigned long native_read_cr8(void) | |
82 | { | |
83 | unsigned long cr8; | |
84 | asm volatile("movq %%cr8,%0" : "=r" (cr8)); | |
85 | return cr8; | |
86 | } | |
87 | ||
88 | static inline void native_write_cr8(unsigned long val) | |
89 | { | |
90 | asm volatile("movq %0,%%cr8" :: "r" (val) : "memory"); | |
91 | } | |
92 | #endif | |
93 | ||
a927cb83 DH |
94 | #ifdef CONFIG_X86_INTEL_MEMORY_PROTECTION_KEYS |
95 | static inline u32 __read_pkru(void) | |
96 | { | |
97 | u32 ecx = 0; | |
98 | u32 edx, pkru; | |
99 | ||
100 | /* | |
101 | * "rdpkru" instruction. Places PKRU contents in to EAX, | |
102 | * clears EDX and requires that ecx=0. | |
103 | */ | |
104 | asm volatile(".byte 0x0f,0x01,0xee\n\t" | |
105 | : "=a" (pkru), "=d" (edx) | |
106 | : "c" (ecx)); | |
107 | return pkru; | |
108 | } | |
9e90199c XG |
109 | |
110 | static inline void __write_pkru(u32 pkru) | |
111 | { | |
112 | u32 ecx = 0, edx = 0; | |
113 | ||
114 | /* | |
115 | * "wrpkru" instruction. Loads contents in EAX to PKRU, | |
116 | * requires that ecx = edx = 0. | |
117 | */ | |
118 | asm volatile(".byte 0x0f,0x01,0xef\n\t" | |
119 | : : "a" (pkru), "c"(ecx), "d"(edx)); | |
120 | } | |
a927cb83 DH |
121 | #else |
122 | static inline u32 __read_pkru(void) | |
123 | { | |
124 | return 0; | |
125 | } | |
9e90199c XG |
126 | |
127 | static inline void __write_pkru(u32 pkru) | |
128 | { | |
129 | } | |
a927cb83 DH |
130 | #endif |
131 | ||
f05e798a DH |
132 | static inline void native_wbinvd(void) |
133 | { | |
134 | asm volatile("wbinvd": : :"memory"); | |
135 | } | |
136 | ||
277d5b40 | 137 | extern asmlinkage void native_load_gs_index(unsigned); |
f05e798a | 138 | |
87930019 JG |
139 | static inline unsigned long __read_cr4(void) |
140 | { | |
141 | return native_read_cr4(); | |
142 | } | |
143 | ||
f05e798a DH |
144 | #ifdef CONFIG_PARAVIRT |
145 | #include <asm/paravirt.h> | |
146 | #else | |
147 | ||
148 | static inline unsigned long read_cr0(void) | |
149 | { | |
150 | return native_read_cr0(); | |
151 | } | |
152 | ||
153 | static inline void write_cr0(unsigned long x) | |
154 | { | |
155 | native_write_cr0(x); | |
156 | } | |
157 | ||
158 | static inline unsigned long read_cr2(void) | |
159 | { | |
160 | return native_read_cr2(); | |
161 | } | |
162 | ||
163 | static inline void write_cr2(unsigned long x) | |
164 | { | |
165 | native_write_cr2(x); | |
166 | } | |
167 | ||
6c690ee1 AL |
168 | /* |
169 | * Careful! CR3 contains more than just an address. You probably want | |
170 | * read_cr3_pa() instead. | |
171 | */ | |
172 | static inline unsigned long __read_cr3(void) | |
f05e798a | 173 | { |
6c690ee1 | 174 | return __native_read_cr3(); |
f05e798a DH |
175 | } |
176 | ||
177 | static inline void write_cr3(unsigned long x) | |
178 | { | |
179 | native_write_cr3(x); | |
180 | } | |
181 | ||
1e02ce4c | 182 | static inline void __write_cr4(unsigned long x) |
f05e798a DH |
183 | { |
184 | native_write_cr4(x); | |
185 | } | |
186 | ||
187 | static inline void wbinvd(void) | |
188 | { | |
189 | native_wbinvd(); | |
190 | } | |
191 | ||
192 | #ifdef CONFIG_X86_64 | |
193 | ||
194 | static inline unsigned long read_cr8(void) | |
195 | { | |
196 | return native_read_cr8(); | |
197 | } | |
198 | ||
199 | static inline void write_cr8(unsigned long x) | |
200 | { | |
201 | native_write_cr8(x); | |
202 | } | |
203 | ||
204 | static inline void load_gs_index(unsigned selector) | |
205 | { | |
206 | native_load_gs_index(selector); | |
207 | } | |
208 | ||
209 | #endif | |
210 | ||
f05e798a DH |
211 | #endif/* CONFIG_PARAVIRT */ |
212 | ||
f05e798a DH |
213 | static inline void clflush(volatile void *__p) |
214 | { | |
215 | asm volatile("clflush %0" : "+m" (*(volatile char __force *)__p)); | |
216 | } | |
217 | ||
171699f7 RZ |
218 | static inline void clflushopt(volatile void *__p) |
219 | { | |
220 | alternative_io(".byte " __stringify(NOP_DS_PREFIX) "; clflush %P0", | |
221 | ".byte 0x66; clflush %P0", | |
222 | X86_FEATURE_CLFLUSHOPT, | |
223 | "+m" (*(volatile char __force *)__p)); | |
224 | } | |
225 | ||
d9dc64f3 RZ |
226 | static inline void clwb(volatile void *__p) |
227 | { | |
228 | volatile struct { char x[64]; } *p = __p; | |
229 | ||
230 | asm volatile(ALTERNATIVE_2( | |
231 | ".byte " __stringify(NOP_DS_PREFIX) "; clflush (%[pax])", | |
232 | ".byte 0x66; clflush (%[pax])", /* clflushopt (%%rax) */ | |
233 | X86_FEATURE_CLFLUSHOPT, | |
234 | ".byte 0x66, 0x0f, 0xae, 0x30", /* clwb (%%rax) */ | |
235 | X86_FEATURE_CLWB) | |
236 | : [p] "+m" (*p) | |
237 | : [pax] "a" (p)); | |
238 | } | |
239 | ||
f05e798a DH |
240 | #define nop() asm volatile ("nop") |
241 | ||
242 | ||
243 | #endif /* __KERNEL__ */ | |
244 | ||
245 | #endif /* _ASM_X86_SPECIAL_INSNS_H */ |