]>
Commit | Line | Data |
---|---|---|
b2441318 | 1 | // SPDX-License-Identifier: GPL-2.0 |
3f50dbc1 | 2 | /* |
1da177e4 LT |
3 | * User address space access functions. |
4 | * The non inlined parts of asm-i386/uaccess.h are here. | |
5 | * | |
6 | * Copyright 1997 Andi Kleen <ak@muc.de> | |
7 | * Copyright 1997 Linus Torvalds | |
8 | */ | |
e683014c | 9 | #include <linux/export.h> |
7c0f6ba6 | 10 | #include <linux/uaccess.h> |
1da177e4 | 11 | #include <asm/mmx.h> |
9c675128 | 12 | #include <asm/asm.h> |
1da177e4 | 13 | |
8bfcb396 TP |
14 | #ifdef CONFIG_X86_INTEL_USERCOPY |
15 | /* | |
16 | * Alignment at which movsl is preferred for bulk memory copies. | |
17 | */ | |
18 | struct movsl_mask movsl_mask __read_mostly; | |
19 | #endif | |
20 | ||
1da177e4 LT |
21 | static inline int __movsl_is_ok(unsigned long a1, unsigned long a2, unsigned long n) |
22 | { | |
23 | #ifdef CONFIG_X86_INTEL_USERCOPY | |
24 | if (n >= 64 && ((a1 ^ a2) & movsl_mask.mask)) | |
25 | return 0; | |
26 | #endif | |
27 | return 1; | |
28 | } | |
3f50dbc1 PC |
29 | #define movsl_is_ok(a1, a2, n) \ |
30 | __movsl_is_ok((unsigned long)(a1), (unsigned long)(a2), (n)) | |
1da177e4 | 31 | |
1da177e4 LT |
32 | /* |
33 | * Zero Userspace | |
34 | */ | |
35 | ||
36 | #define __do_clear_user(addr,size) \ | |
37 | do { \ | |
38 | int __d0; \ | |
3ee1afa3 | 39 | might_fault(); \ |
3f50dbc1 | 40 | __asm__ __volatile__( \ |
63bcff2a | 41 | ASM_STAC "\n" \ |
1da177e4 LT |
42 | "0: rep; stosl\n" \ |
43 | " movl %2,%0\n" \ | |
44 | "1: rep; stosb\n" \ | |
63bcff2a | 45 | "2: " ASM_CLAC "\n" \ |
1da177e4 LT |
46 | ".section .fixup,\"ax\"\n" \ |
47 | "3: lea 0(%2,%0,4),%0\n" \ | |
48 | " jmp 2b\n" \ | |
49 | ".previous\n" \ | |
28777441 PA |
50 | _ASM_EXTABLE(0b,3b) \ |
51 | _ASM_EXTABLE(1b,2b) \ | |
1da177e4 LT |
52 | : "=&c"(size), "=&D" (__d0) \ |
53 | : "r"(size & 3), "0"(size / 4), "1"(addr), "a"(0)); \ | |
54 | } while (0) | |
55 | ||
56 | /** | |
57 | * clear_user: - Zero a block of memory in user space. | |
58 | * @to: Destination address, in user space. | |
59 | * @n: Number of bytes to zero. | |
60 | * | |
61 | * Zero a block of memory in user space. | |
62 | * | |
63 | * Returns number of bytes that could not be cleared. | |
64 | * On success, this will be zero. | |
65 | */ | |
66 | unsigned long | |
67 | clear_user(void __user *to, unsigned long n) | |
68 | { | |
1d18ef48 | 69 | might_fault(); |
1da177e4 LT |
70 | if (access_ok(VERIFY_WRITE, to, n)) |
71 | __do_clear_user(to, n); | |
72 | return n; | |
73 | } | |
129f6946 | 74 | EXPORT_SYMBOL(clear_user); |
1da177e4 LT |
75 | |
76 | /** | |
77 | * __clear_user: - Zero a block of memory in user space, with less checking. | |
78 | * @to: Destination address, in user space. | |
79 | * @n: Number of bytes to zero. | |
80 | * | |
81 | * Zero a block of memory in user space. Caller must check | |
82 | * the specified block with access_ok() before calling this function. | |
83 | * | |
84 | * Returns number of bytes that could not be cleared. | |
85 | * On success, this will be zero. | |
86 | */ | |
87 | unsigned long | |
88 | __clear_user(void __user *to, unsigned long n) | |
89 | { | |
90 | __do_clear_user(to, n); | |
91 | return n; | |
92 | } | |
129f6946 | 93 | EXPORT_SYMBOL(__clear_user); |
1da177e4 | 94 | |
1da177e4 LT |
95 | #ifdef CONFIG_X86_INTEL_USERCOPY |
96 | static unsigned long | |
97 | __copy_user_intel(void __user *to, const void *from, unsigned long size) | |
98 | { | |
99 | int d0, d1; | |
100 | __asm__ __volatile__( | |
101 | " .align 2,0x90\n" | |
102 | "1: movl 32(%4), %%eax\n" | |
103 | " cmpl $67, %0\n" | |
104 | " jbe 3f\n" | |
105 | "2: movl 64(%4), %%eax\n" | |
106 | " .align 2,0x90\n" | |
107 | "3: movl 0(%4), %%eax\n" | |
108 | "4: movl 4(%4), %%edx\n" | |
109 | "5: movl %%eax, 0(%3)\n" | |
110 | "6: movl %%edx, 4(%3)\n" | |
111 | "7: movl 8(%4), %%eax\n" | |
112 | "8: movl 12(%4),%%edx\n" | |
113 | "9: movl %%eax, 8(%3)\n" | |
114 | "10: movl %%edx, 12(%3)\n" | |
115 | "11: movl 16(%4), %%eax\n" | |
116 | "12: movl 20(%4), %%edx\n" | |
117 | "13: movl %%eax, 16(%3)\n" | |
118 | "14: movl %%edx, 20(%3)\n" | |
119 | "15: movl 24(%4), %%eax\n" | |
120 | "16: movl 28(%4), %%edx\n" | |
121 | "17: movl %%eax, 24(%3)\n" | |
122 | "18: movl %%edx, 28(%3)\n" | |
123 | "19: movl 32(%4), %%eax\n" | |
124 | "20: movl 36(%4), %%edx\n" | |
125 | "21: movl %%eax, 32(%3)\n" | |
126 | "22: movl %%edx, 36(%3)\n" | |
127 | "23: movl 40(%4), %%eax\n" | |
128 | "24: movl 44(%4), %%edx\n" | |
129 | "25: movl %%eax, 40(%3)\n" | |
130 | "26: movl %%edx, 44(%3)\n" | |
131 | "27: movl 48(%4), %%eax\n" | |
132 | "28: movl 52(%4), %%edx\n" | |
133 | "29: movl %%eax, 48(%3)\n" | |
134 | "30: movl %%edx, 52(%3)\n" | |
135 | "31: movl 56(%4), %%eax\n" | |
136 | "32: movl 60(%4), %%edx\n" | |
137 | "33: movl %%eax, 56(%3)\n" | |
138 | "34: movl %%edx, 60(%3)\n" | |
139 | " addl $-64, %0\n" | |
140 | " addl $64, %4\n" | |
141 | " addl $64, %3\n" | |
142 | " cmpl $63, %0\n" | |
143 | " ja 1b\n" | |
144 | "35: movl %0, %%eax\n" | |
145 | " shrl $2, %0\n" | |
146 | " andl $3, %%eax\n" | |
147 | " cld\n" | |
148 | "99: rep; movsl\n" | |
149 | "36: movl %%eax, %0\n" | |
150 | "37: rep; movsb\n" | |
151 | "100:\n" | |
152 | ".section .fixup,\"ax\"\n" | |
153 | "101: lea 0(%%eax,%0,4),%0\n" | |
154 | " jmp 100b\n" | |
155 | ".previous\n" | |
9c675128 PA |
156 | _ASM_EXTABLE(1b,100b) |
157 | _ASM_EXTABLE(2b,100b) | |
158 | _ASM_EXTABLE(3b,100b) | |
159 | _ASM_EXTABLE(4b,100b) | |
160 | _ASM_EXTABLE(5b,100b) | |
161 | _ASM_EXTABLE(6b,100b) | |
162 | _ASM_EXTABLE(7b,100b) | |
163 | _ASM_EXTABLE(8b,100b) | |
164 | _ASM_EXTABLE(9b,100b) | |
165 | _ASM_EXTABLE(10b,100b) | |
166 | _ASM_EXTABLE(11b,100b) | |
167 | _ASM_EXTABLE(12b,100b) | |
168 | _ASM_EXTABLE(13b,100b) | |
169 | _ASM_EXTABLE(14b,100b) | |
170 | _ASM_EXTABLE(15b,100b) | |
171 | _ASM_EXTABLE(16b,100b) | |
172 | _ASM_EXTABLE(17b,100b) | |
173 | _ASM_EXTABLE(18b,100b) | |
174 | _ASM_EXTABLE(19b,100b) | |
175 | _ASM_EXTABLE(20b,100b) | |
176 | _ASM_EXTABLE(21b,100b) | |
177 | _ASM_EXTABLE(22b,100b) | |
178 | _ASM_EXTABLE(23b,100b) | |
179 | _ASM_EXTABLE(24b,100b) | |
180 | _ASM_EXTABLE(25b,100b) | |
181 | _ASM_EXTABLE(26b,100b) | |
182 | _ASM_EXTABLE(27b,100b) | |
183 | _ASM_EXTABLE(28b,100b) | |
184 | _ASM_EXTABLE(29b,100b) | |
185 | _ASM_EXTABLE(30b,100b) | |
186 | _ASM_EXTABLE(31b,100b) | |
187 | _ASM_EXTABLE(32b,100b) | |
188 | _ASM_EXTABLE(33b,100b) | |
189 | _ASM_EXTABLE(34b,100b) | |
190 | _ASM_EXTABLE(35b,100b) | |
191 | _ASM_EXTABLE(36b,100b) | |
192 | _ASM_EXTABLE(37b,100b) | |
193 | _ASM_EXTABLE(99b,101b) | |
1da177e4 LT |
194 | : "=&c"(size), "=&D" (d0), "=&S" (d1) |
195 | : "1"(to), "2"(from), "0"(size) | |
196 | : "eax", "edx", "memory"); | |
197 | return size; | |
198 | } | |
199 | ||
7c12d811 N |
200 | static unsigned long __copy_user_intel_nocache(void *to, |
201 | const void __user *from, unsigned long size) | |
202 | { | |
3f50dbc1 | 203 | int d0, d1; |
7c12d811 N |
204 | |
205 | __asm__ __volatile__( | |
206 | " .align 2,0x90\n" | |
207 | "0: movl 32(%4), %%eax\n" | |
208 | " cmpl $67, %0\n" | |
209 | " jbe 2f\n" | |
210 | "1: movl 64(%4), %%eax\n" | |
211 | " .align 2,0x90\n" | |
212 | "2: movl 0(%4), %%eax\n" | |
213 | "21: movl 4(%4), %%edx\n" | |
214 | " movnti %%eax, 0(%3)\n" | |
215 | " movnti %%edx, 4(%3)\n" | |
216 | "3: movl 8(%4), %%eax\n" | |
217 | "31: movl 12(%4),%%edx\n" | |
218 | " movnti %%eax, 8(%3)\n" | |
219 | " movnti %%edx, 12(%3)\n" | |
220 | "4: movl 16(%4), %%eax\n" | |
221 | "41: movl 20(%4), %%edx\n" | |
222 | " movnti %%eax, 16(%3)\n" | |
223 | " movnti %%edx, 20(%3)\n" | |
224 | "10: movl 24(%4), %%eax\n" | |
225 | "51: movl 28(%4), %%edx\n" | |
226 | " movnti %%eax, 24(%3)\n" | |
227 | " movnti %%edx, 28(%3)\n" | |
228 | "11: movl 32(%4), %%eax\n" | |
229 | "61: movl 36(%4), %%edx\n" | |
230 | " movnti %%eax, 32(%3)\n" | |
231 | " movnti %%edx, 36(%3)\n" | |
232 | "12: movl 40(%4), %%eax\n" | |
233 | "71: movl 44(%4), %%edx\n" | |
234 | " movnti %%eax, 40(%3)\n" | |
235 | " movnti %%edx, 44(%3)\n" | |
236 | "13: movl 48(%4), %%eax\n" | |
237 | "81: movl 52(%4), %%edx\n" | |
238 | " movnti %%eax, 48(%3)\n" | |
239 | " movnti %%edx, 52(%3)\n" | |
240 | "14: movl 56(%4), %%eax\n" | |
241 | "91: movl 60(%4), %%edx\n" | |
242 | " movnti %%eax, 56(%3)\n" | |
243 | " movnti %%edx, 60(%3)\n" | |
244 | " addl $-64, %0\n" | |
245 | " addl $64, %4\n" | |
246 | " addl $64, %3\n" | |
247 | " cmpl $63, %0\n" | |
248 | " ja 0b\n" | |
249 | " sfence \n" | |
250 | "5: movl %0, %%eax\n" | |
251 | " shrl $2, %0\n" | |
252 | " andl $3, %%eax\n" | |
253 | " cld\n" | |
254 | "6: rep; movsl\n" | |
255 | " movl %%eax,%0\n" | |
256 | "7: rep; movsb\n" | |
257 | "8:\n" | |
258 | ".section .fixup,\"ax\"\n" | |
259 | "9: lea 0(%%eax,%0,4),%0\n" | |
260 | "16: jmp 8b\n" | |
261 | ".previous\n" | |
9c675128 PA |
262 | _ASM_EXTABLE(0b,16b) |
263 | _ASM_EXTABLE(1b,16b) | |
264 | _ASM_EXTABLE(2b,16b) | |
265 | _ASM_EXTABLE(21b,16b) | |
266 | _ASM_EXTABLE(3b,16b) | |
267 | _ASM_EXTABLE(31b,16b) | |
268 | _ASM_EXTABLE(4b,16b) | |
269 | _ASM_EXTABLE(41b,16b) | |
270 | _ASM_EXTABLE(10b,16b) | |
271 | _ASM_EXTABLE(51b,16b) | |
272 | _ASM_EXTABLE(11b,16b) | |
273 | _ASM_EXTABLE(61b,16b) | |
274 | _ASM_EXTABLE(12b,16b) | |
275 | _ASM_EXTABLE(71b,16b) | |
276 | _ASM_EXTABLE(13b,16b) | |
277 | _ASM_EXTABLE(81b,16b) | |
278 | _ASM_EXTABLE(14b,16b) | |
279 | _ASM_EXTABLE(91b,16b) | |
280 | _ASM_EXTABLE(6b,9b) | |
281 | _ASM_EXTABLE(7b,16b) | |
7c12d811 N |
282 | : "=&c"(size), "=&D" (d0), "=&S" (d1) |
283 | : "1"(to), "2"(from), "0"(size) | |
284 | : "eax", "edx", "memory"); | |
285 | return size; | |
286 | } | |
287 | ||
1da177e4 | 288 | #else |
c22ce143 | 289 | |
1da177e4 LT |
290 | /* |
291 | * Leave these declared but undefined. They should not be any references to | |
292 | * them | |
293 | */ | |
c22ce143 HY |
294 | unsigned long __copy_user_intel(void __user *to, const void *from, |
295 | unsigned long size); | |
1da177e4 LT |
296 | #endif /* CONFIG_X86_INTEL_USERCOPY */ |
297 | ||
298 | /* Generic arbitrary sized copy. */ | |
3f50dbc1 | 299 | #define __copy_user(to, from, size) \ |
1da177e4 LT |
300 | do { \ |
301 | int __d0, __d1, __d2; \ | |
302 | __asm__ __volatile__( \ | |
303 | " cmp $7,%0\n" \ | |
304 | " jbe 1f\n" \ | |
305 | " movl %1,%0\n" \ | |
306 | " negl %0\n" \ | |
307 | " andl $7,%0\n" \ | |
308 | " subl %0,%3\n" \ | |
309 | "4: rep; movsb\n" \ | |
310 | " movl %3,%0\n" \ | |
311 | " shrl $2,%0\n" \ | |
312 | " andl $3,%3\n" \ | |
313 | " .align 2,0x90\n" \ | |
314 | "0: rep; movsl\n" \ | |
315 | " movl %3,%0\n" \ | |
316 | "1: rep; movsb\n" \ | |
317 | "2:\n" \ | |
318 | ".section .fixup,\"ax\"\n" \ | |
319 | "5: addl %3,%0\n" \ | |
320 | " jmp 2b\n" \ | |
321 | "3: lea 0(%3,%0,4),%0\n" \ | |
322 | " jmp 2b\n" \ | |
323 | ".previous\n" \ | |
9c675128 PA |
324 | _ASM_EXTABLE(4b,5b) \ |
325 | _ASM_EXTABLE(0b,3b) \ | |
326 | _ASM_EXTABLE(1b,2b) \ | |
1da177e4 LT |
327 | : "=&c"(size), "=&D" (__d0), "=&S" (__d1), "=r"(__d2) \ |
328 | : "3"(size), "0"(size), "1"(to), "2"(from) \ | |
329 | : "memory"); \ | |
330 | } while (0) | |
331 | ||
beba3a20 | 332 | unsigned long __copy_user_ll(void *to, const void *from, unsigned long n) |
1da177e4 | 333 | { |
63bcff2a | 334 | stac(); |
1da177e4 LT |
335 | if (movsl_is_ok(to, from, n)) |
336 | __copy_user(to, from, n); | |
337 | else | |
338 | n = __copy_user_intel(to, from, n); | |
63bcff2a | 339 | clac(); |
1da177e4 LT |
340 | return n; |
341 | } | |
beba3a20 | 342 | EXPORT_SYMBOL(__copy_user_ll); |
7c12d811 | 343 | |
7c12d811 N |
344 | unsigned long __copy_from_user_ll_nocache_nozero(void *to, const void __user *from, |
345 | unsigned long n) | |
346 | { | |
63bcff2a | 347 | stac(); |
7c12d811 | 348 | #ifdef CONFIG_X86_INTEL_USERCOPY |
054efb64 | 349 | if (n > 64 && static_cpu_has(X86_FEATURE_XMM2)) |
3f50dbc1 | 350 | n = __copy_user_intel_nocache(to, from, n); |
7c12d811 N |
351 | else |
352 | __copy_user(to, from, n); | |
353 | #else | |
3f50dbc1 | 354 | __copy_user(to, from, n); |
7c12d811 | 355 | #endif |
63bcff2a | 356 | clac(); |
7c12d811 N |
357 | return n; |
358 | } | |
914c8269 | 359 | EXPORT_SYMBOL(__copy_from_user_ll_nocache_nozero); |