]>
Commit | Line | Data |
---|---|---|
43697cb0 VG |
1 | /* |
2 | * Copyright (C) 2004, 2007-2010, 2011-2012 Synopsys, Inc. (www.synopsys.com) | |
3 | * | |
4 | * This program is free software; you can redistribute it and/or modify | |
5 | * it under the terms of the GNU General Public License version 2 as | |
6 | * published by the Free Software Foundation. | |
7 | * | |
8 | * vineetg: June 2010 | |
9 | * -__clear_user( ) called multiple times during elf load was byte loop | |
10 | * converted to do as much word clear as possible. | |
11 | * | |
12 | * vineetg: Dec 2009 | |
13 | * -Hand crafted constant propagation for "constant" copy sizes | |
14 | * -stock kernel shrunk by 33K at -O3 | |
15 | * | |
16 | * vineetg: Sept 2009 | |
17 | * -Added option to (UN)inline copy_(to|from)_user to reduce code sz | |
18 | * -kernel shrunk by 200K even at -O3 (gcc 4.2.1) | |
19 | * -Enabled when doing -Os | |
20 | * | |
21 | * Amit Bhor, Sameer Dhavale: Codito Technologies 2004 | |
22 | */ | |
23 | ||
24 | #ifndef _ASM_ARC_UACCESS_H | |
25 | #define _ASM_ARC_UACCESS_H | |
26 | ||
43697cb0 VG |
27 | #include <linux/string.h> /* for generic string functions */ |
28 | ||
29 | ||
db68ce10 | 30 | #define __kernel_ok (uaccess_kernel()) |
43697cb0 VG |
31 | |
32 | /* | |
2547476a | 33 | * Algorithmically, for __user_ok() we want do: |
43697cb0 VG |
34 | * (start < TASK_SIZE) && (start+len < TASK_SIZE) |
35 | * where TASK_SIZE could either be retrieved from thread_info->addr_limit or | |
36 | * emitted directly in code. | |
37 | * | |
38 | * This can however be rewritten as follows: | |
39 | * (len <= TASK_SIZE) && (start+len < TASK_SIZE) | |
40 | * | |
41 | * Because it essentially checks if buffer end is within limit and @len is | |
42 | * non-ngeative, which implies that buffer start will be within limit too. | |
43 | * | |
0752adfd | 44 | * The reason for rewriting being, for majority of cases, @len is generally |
43697cb0 VG |
45 | * compile time constant, causing first sub-expression to be compile time |
46 | * subsumed. | |
47 | * | |
48 | * The second part would generate weird large LIMMs e.g. (0x6000_0000 - 0x10), | |
49 | * so we check for TASK_SIZE using get_fs() since the addr_limit load from mem | |
50 | * would already have been done at this call site for __kernel_ok() | |
51 | * | |
52 | */ | |
53 | #define __user_ok(addr, sz) (((sz) <= TASK_SIZE) && \ | |
0752adfd | 54 | ((addr) <= (get_fs() - (sz)))) |
43697cb0 VG |
55 | #define __access_ok(addr, sz) (unlikely(__kernel_ok) || \ |
56 | likely(__user_ok((addr), (sz)))) | |
57 | ||
0a5eae45 VG |
58 | /*********** Single byte/hword/word copies ******************/ |
59 | ||
60 | #define __get_user_fn(sz, u, k) \ | |
61 | ({ \ | |
62 | long __ret = 0; /* success by default */ \ | |
63 | switch (sz) { \ | |
64 | case 1: __arc_get_user_one(*(k), u, "ldb", __ret); break; \ | |
65 | case 2: __arc_get_user_one(*(k), u, "ldw", __ret); break; \ | |
66 | case 4: __arc_get_user_one(*(k), u, "ld", __ret); break; \ | |
67 | case 8: __arc_get_user_one_64(*(k), u, __ret); break; \ | |
68 | } \ | |
69 | __ret; \ | |
70 | }) | |
71 | ||
72 | /* | |
73 | * Returns 0 on success, -EFAULT if not. | |
74 | * @ret already contains 0 - given that errors will be less likely | |
75 | * (hence +r asm constraint below). | |
76 | * In case of error, fixup code will make it -EFAULT | |
77 | */ | |
78 | #define __arc_get_user_one(dst, src, op, ret) \ | |
79 | __asm__ __volatile__( \ | |
80 | "1: "op" %1,[%2]\n" \ | |
81 | "2: ;nop\n" \ | |
82 | " .section .fixup, \"ax\"\n" \ | |
83 | " .align 4\n" \ | |
05d9d0b9 VG |
84 | "3: # return -EFAULT\n" \ |
85 | " mov %0, %3\n" \ | |
86 | " # zero out dst ptr\n" \ | |
87 | " mov %1, 0\n" \ | |
0a5eae45 VG |
88 | " j 2b\n" \ |
89 | " .previous\n" \ | |
90 | " .section __ex_table, \"a\"\n" \ | |
91 | " .align 4\n" \ | |
92 | " .word 1b,3b\n" \ | |
93 | " .previous\n" \ | |
94 | \ | |
95 | : "+r" (ret), "=r" (dst) \ | |
96 | : "r" (src), "ir" (-EFAULT)) | |
97 | ||
98 | #define __arc_get_user_one_64(dst, src, ret) \ | |
99 | __asm__ __volatile__( \ | |
100 | "1: ld %1,[%2]\n" \ | |
101 | "4: ld %R1,[%2, 4]\n" \ | |
102 | "2: ;nop\n" \ | |
103 | " .section .fixup, \"ax\"\n" \ | |
104 | " .align 4\n" \ | |
05d9d0b9 VG |
105 | "3: # return -EFAULT\n" \ |
106 | " mov %0, %3\n" \ | |
107 | " # zero out dst ptr\n" \ | |
108 | " mov %1, 0\n" \ | |
109 | " mov %R1, 0\n" \ | |
0a5eae45 VG |
110 | " j 2b\n" \ |
111 | " .previous\n" \ | |
112 | " .section __ex_table, \"a\"\n" \ | |
113 | " .align 4\n" \ | |
114 | " .word 1b,3b\n" \ | |
115 | " .word 4b,3b\n" \ | |
116 | " .previous\n" \ | |
117 | \ | |
118 | : "+r" (ret), "=r" (dst) \ | |
119 | : "r" (src), "ir" (-EFAULT)) | |
120 | ||
121 | #define __put_user_fn(sz, u, k) \ | |
122 | ({ \ | |
123 | long __ret = 0; /* success by default */ \ | |
124 | switch (sz) { \ | |
125 | case 1: __arc_put_user_one(*(k), u, "stb", __ret); break; \ | |
126 | case 2: __arc_put_user_one(*(k), u, "stw", __ret); break; \ | |
127 | case 4: __arc_put_user_one(*(k), u, "st", __ret); break; \ | |
128 | case 8: __arc_put_user_one_64(*(k), u, __ret); break; \ | |
129 | } \ | |
130 | __ret; \ | |
131 | }) | |
132 | ||
133 | #define __arc_put_user_one(src, dst, op, ret) \ | |
134 | __asm__ __volatile__( \ | |
135 | "1: "op" %1,[%2]\n" \ | |
136 | "2: ;nop\n" \ | |
137 | " .section .fixup, \"ax\"\n" \ | |
138 | " .align 4\n" \ | |
139 | "3: mov %0, %3\n" \ | |
140 | " j 2b\n" \ | |
141 | " .previous\n" \ | |
142 | " .section __ex_table, \"a\"\n" \ | |
143 | " .align 4\n" \ | |
144 | " .word 1b,3b\n" \ | |
145 | " .previous\n" \ | |
146 | \ | |
147 | : "+r" (ret) \ | |
148 | : "r" (src), "r" (dst), "ir" (-EFAULT)) | |
149 | ||
150 | #define __arc_put_user_one_64(src, dst, ret) \ | |
151 | __asm__ __volatile__( \ | |
152 | "1: st %1,[%2]\n" \ | |
153 | "4: st %R1,[%2, 4]\n" \ | |
154 | "2: ;nop\n" \ | |
155 | " .section .fixup, \"ax\"\n" \ | |
156 | " .align 4\n" \ | |
157 | "3: mov %0, %3\n" \ | |
158 | " j 2b\n" \ | |
159 | " .previous\n" \ | |
160 | " .section __ex_table, \"a\"\n" \ | |
161 | " .align 4\n" \ | |
162 | " .word 1b,3b\n" \ | |
163 | " .word 4b,3b\n" \ | |
164 | " .previous\n" \ | |
165 | \ | |
166 | : "+r" (ret) \ | |
167 | : "r" (src), "r" (dst), "ir" (-EFAULT)) | |
168 | ||
169 | ||
43697cb0 VG |
170 | static inline unsigned long |
171 | __arc_copy_from_user(void *to, const void __user *from, unsigned long n) | |
172 | { | |
173 | long res = 0; | |
174 | char val; | |
175 | unsigned long tmp1, tmp2, tmp3, tmp4; | |
176 | unsigned long orig_n = n; | |
177 | ||
178 | if (n == 0) | |
179 | return 0; | |
180 | ||
181 | /* unaligned */ | |
182 | if (((unsigned long)to & 0x3) || ((unsigned long)from & 0x3)) { | |
183 | ||
184 | unsigned char tmp; | |
185 | ||
186 | __asm__ __volatile__ ( | |
187 | " mov.f lp_count, %0 \n" | |
188 | " lpnz 2f \n" | |
189 | "1: ldb.ab %1, [%3, 1] \n" | |
190 | " stb.ab %1, [%2, 1] \n" | |
191 | " sub %0,%0,1 \n" | |
192 | "2: ;nop \n" | |
193 | " .section .fixup, \"ax\" \n" | |
194 | " .align 4 \n" | |
195 | "3: j 2b \n" | |
196 | " .previous \n" | |
197 | " .section __ex_table, \"a\" \n" | |
198 | " .align 4 \n" | |
199 | " .word 1b, 3b \n" | |
200 | " .previous \n" | |
201 | ||
202 | : "+r" (n), | |
203 | /* | |
204 | * Note as an '&' earlyclobber operand to make sure the | |
205 | * temporary register inside the loop is not the same as | |
206 | * FROM or TO. | |
207 | */ | |
208 | "=&r" (tmp), "+r" (to), "+r" (from) | |
209 | : | |
210 | : "lp_count", "lp_start", "lp_end", "memory"); | |
211 | ||
212 | return n; | |
213 | } | |
214 | ||
215 | /* | |
216 | * Hand-crafted constant propagation to reduce code sz of the | |
217 | * laddered copy 16x,8,4,2,1 | |
218 | */ | |
219 | if (__builtin_constant_p(orig_n)) { | |
220 | res = orig_n; | |
221 | ||
222 | if (orig_n / 16) { | |
223 | orig_n = orig_n % 16; | |
224 | ||
225 | __asm__ __volatile__( | |
226 | " lsr lp_count, %7,4 \n" | |
227 | " lp 3f \n" | |
228 | "1: ld.ab %3, [%2, 4] \n" | |
229 | "11: ld.ab %4, [%2, 4] \n" | |
230 | "12: ld.ab %5, [%2, 4] \n" | |
231 | "13: ld.ab %6, [%2, 4] \n" | |
232 | " st.ab %3, [%1, 4] \n" | |
233 | " st.ab %4, [%1, 4] \n" | |
234 | " st.ab %5, [%1, 4] \n" | |
235 | " st.ab %6, [%1, 4] \n" | |
236 | " sub %0,%0,16 \n" | |
237 | "3: ;nop \n" | |
238 | " .section .fixup, \"ax\" \n" | |
239 | " .align 4 \n" | |
240 | "4: j 3b \n" | |
241 | " .previous \n" | |
242 | " .section __ex_table, \"a\" \n" | |
243 | " .align 4 \n" | |
244 | " .word 1b, 4b \n" | |
245 | " .word 11b,4b \n" | |
246 | " .word 12b,4b \n" | |
247 | " .word 13b,4b \n" | |
248 | " .previous \n" | |
249 | : "+r" (res), "+r"(to), "+r"(from), | |
250 | "=r"(tmp1), "=r"(tmp2), "=r"(tmp3), "=r"(tmp4) | |
251 | : "ir"(n) | |
252 | : "lp_count", "memory"); | |
253 | } | |
254 | if (orig_n / 8) { | |
255 | orig_n = orig_n % 8; | |
256 | ||
257 | __asm__ __volatile__( | |
258 | "14: ld.ab %3, [%2,4] \n" | |
259 | "15: ld.ab %4, [%2,4] \n" | |
260 | " st.ab %3, [%1,4] \n" | |
261 | " st.ab %4, [%1,4] \n" | |
262 | " sub %0,%0,8 \n" | |
263 | "31: ;nop \n" | |
264 | " .section .fixup, \"ax\" \n" | |
265 | " .align 4 \n" | |
266 | "4: j 31b \n" | |
267 | " .previous \n" | |
268 | " .section __ex_table, \"a\" \n" | |
269 | " .align 4 \n" | |
270 | " .word 14b,4b \n" | |
271 | " .word 15b,4b \n" | |
272 | " .previous \n" | |
273 | : "+r" (res), "+r"(to), "+r"(from), | |
274 | "=r"(tmp1), "=r"(tmp2) | |
275 | : | |
276 | : "memory"); | |
277 | } | |
278 | if (orig_n / 4) { | |
279 | orig_n = orig_n % 4; | |
280 | ||
281 | __asm__ __volatile__( | |
282 | "16: ld.ab %3, [%2,4] \n" | |
283 | " st.ab %3, [%1,4] \n" | |
284 | " sub %0,%0,4 \n" | |
285 | "32: ;nop \n" | |
286 | " .section .fixup, \"ax\" \n" | |
287 | " .align 4 \n" | |
288 | "4: j 32b \n" | |
289 | " .previous \n" | |
290 | " .section __ex_table, \"a\" \n" | |
291 | " .align 4 \n" | |
292 | " .word 16b,4b \n" | |
293 | " .previous \n" | |
294 | : "+r" (res), "+r"(to), "+r"(from), "=r"(tmp1) | |
295 | : | |
296 | : "memory"); | |
297 | } | |
298 | if (orig_n / 2) { | |
299 | orig_n = orig_n % 2; | |
300 | ||
301 | __asm__ __volatile__( | |
302 | "17: ldw.ab %3, [%2,2] \n" | |
303 | " stw.ab %3, [%1,2] \n" | |
304 | " sub %0,%0,2 \n" | |
305 | "33: ;nop \n" | |
306 | " .section .fixup, \"ax\" \n" | |
307 | " .align 4 \n" | |
308 | "4: j 33b \n" | |
309 | " .previous \n" | |
310 | " .section __ex_table, \"a\" \n" | |
311 | " .align 4 \n" | |
312 | " .word 17b,4b \n" | |
313 | " .previous \n" | |
314 | : "+r" (res), "+r"(to), "+r"(from), "=r"(tmp1) | |
315 | : | |
316 | : "memory"); | |
317 | } | |
318 | if (orig_n & 1) { | |
319 | __asm__ __volatile__( | |
320 | "18: ldb.ab %3, [%2,2] \n" | |
321 | " stb.ab %3, [%1,2] \n" | |
322 | " sub %0,%0,1 \n" | |
323 | "34: ; nop \n" | |
324 | " .section .fixup, \"ax\" \n" | |
325 | " .align 4 \n" | |
326 | "4: j 34b \n" | |
327 | " .previous \n" | |
328 | " .section __ex_table, \"a\" \n" | |
329 | " .align 4 \n" | |
330 | " .word 18b,4b \n" | |
331 | " .previous \n" | |
332 | : "+r" (res), "+r"(to), "+r"(from), "=r"(tmp1) | |
333 | : | |
334 | : "memory"); | |
335 | } | |
336 | } else { /* n is NOT constant, so laddered copy of 16x,8,4,2,1 */ | |
337 | ||
338 | __asm__ __volatile__( | |
339 | " mov %0,%3 \n" | |
340 | " lsr.f lp_count, %3,4 \n" /* 16x bytes */ | |
341 | " lpnz 3f \n" | |
342 | "1: ld.ab %5, [%2, 4] \n" | |
343 | "11: ld.ab %6, [%2, 4] \n" | |
344 | "12: ld.ab %7, [%2, 4] \n" | |
345 | "13: ld.ab %8, [%2, 4] \n" | |
346 | " st.ab %5, [%1, 4] \n" | |
347 | " st.ab %6, [%1, 4] \n" | |
348 | " st.ab %7, [%1, 4] \n" | |
349 | " st.ab %8, [%1, 4] \n" | |
350 | " sub %0,%0,16 \n" | |
351 | "3: and.f %3,%3,0xf \n" /* stragglers */ | |
352 | " bz 34f \n" | |
353 | " bbit0 %3,3,31f \n" /* 8 bytes left */ | |
354 | "14: ld.ab %5, [%2,4] \n" | |
355 | "15: ld.ab %6, [%2,4] \n" | |
356 | " st.ab %5, [%1,4] \n" | |
357 | " st.ab %6, [%1,4] \n" | |
358 | " sub.f %0,%0,8 \n" | |
359 | "31: bbit0 %3,2,32f \n" /* 4 bytes left */ | |
360 | "16: ld.ab %5, [%2,4] \n" | |
361 | " st.ab %5, [%1,4] \n" | |
362 | " sub.f %0,%0,4 \n" | |
363 | "32: bbit0 %3,1,33f \n" /* 2 bytes left */ | |
364 | "17: ldw.ab %5, [%2,2] \n" | |
365 | " stw.ab %5, [%1,2] \n" | |
366 | " sub.f %0,%0,2 \n" | |
367 | "33: bbit0 %3,0,34f \n" | |
368 | "18: ldb.ab %5, [%2,1] \n" /* 1 byte left */ | |
369 | " stb.ab %5, [%1,1] \n" | |
370 | " sub.f %0,%0,1 \n" | |
371 | "34: ;nop \n" | |
372 | " .section .fixup, \"ax\" \n" | |
373 | " .align 4 \n" | |
374 | "4: j 34b \n" | |
375 | " .previous \n" | |
376 | " .section __ex_table, \"a\" \n" | |
377 | " .align 4 \n" | |
378 | " .word 1b, 4b \n" | |
379 | " .word 11b,4b \n" | |
380 | " .word 12b,4b \n" | |
381 | " .word 13b,4b \n" | |
382 | " .word 14b,4b \n" | |
383 | " .word 15b,4b \n" | |
384 | " .word 16b,4b \n" | |
385 | " .word 17b,4b \n" | |
386 | " .word 18b,4b \n" | |
387 | " .previous \n" | |
388 | : "=r" (res), "+r"(to), "+r"(from), "+r"(n), "=r"(val), | |
389 | "=r"(tmp1), "=r"(tmp2), "=r"(tmp3), "=r"(tmp4) | |
390 | : | |
391 | : "lp_count", "memory"); | |
392 | } | |
393 | ||
394 | return res; | |
395 | } | |
396 | ||
397 | extern unsigned long slowpath_copy_to_user(void __user *to, const void *from, | |
398 | unsigned long n); | |
399 | ||
400 | static inline unsigned long | |
401 | __arc_copy_to_user(void __user *to, const void *from, unsigned long n) | |
402 | { | |
403 | long res = 0; | |
404 | char val; | |
405 | unsigned long tmp1, tmp2, tmp3, tmp4; | |
406 | unsigned long orig_n = n; | |
407 | ||
408 | if (n == 0) | |
409 | return 0; | |
410 | ||
411 | /* unaligned */ | |
412 | if (((unsigned long)to & 0x3) || ((unsigned long)from & 0x3)) { | |
413 | ||
414 | unsigned char tmp; | |
415 | ||
416 | __asm__ __volatile__( | |
417 | " mov.f lp_count, %0 \n" | |
418 | " lpnz 3f \n" | |
419 | " ldb.ab %1, [%3, 1] \n" | |
420 | "1: stb.ab %1, [%2, 1] \n" | |
421 | " sub %0, %0, 1 \n" | |
422 | "3: ;nop \n" | |
423 | " .section .fixup, \"ax\" \n" | |
424 | " .align 4 \n" | |
425 | "4: j 3b \n" | |
426 | " .previous \n" | |
427 | " .section __ex_table, \"a\" \n" | |
428 | " .align 4 \n" | |
429 | " .word 1b, 4b \n" | |
430 | " .previous \n" | |
431 | ||
432 | : "+r" (n), | |
433 | /* Note as an '&' earlyclobber operand to make sure the | |
434 | * temporary register inside the loop is not the same as | |
435 | * FROM or TO. | |
436 | */ | |
437 | "=&r" (tmp), "+r" (to), "+r" (from) | |
438 | : | |
439 | : "lp_count", "lp_start", "lp_end", "memory"); | |
440 | ||
441 | return n; | |
442 | } | |
443 | ||
444 | if (__builtin_constant_p(orig_n)) { | |
445 | res = orig_n; | |
446 | ||
447 | if (orig_n / 16) { | |
448 | orig_n = orig_n % 16; | |
449 | ||
450 | __asm__ __volatile__( | |
451 | " lsr lp_count, %7,4 \n" | |
452 | " lp 3f \n" | |
453 | " ld.ab %3, [%2, 4] \n" | |
454 | " ld.ab %4, [%2, 4] \n" | |
455 | " ld.ab %5, [%2, 4] \n" | |
456 | " ld.ab %6, [%2, 4] \n" | |
457 | "1: st.ab %3, [%1, 4] \n" | |
458 | "11: st.ab %4, [%1, 4] \n" | |
459 | "12: st.ab %5, [%1, 4] \n" | |
460 | "13: st.ab %6, [%1, 4] \n" | |
461 | " sub %0, %0, 16 \n" | |
462 | "3:;nop \n" | |
463 | " .section .fixup, \"ax\" \n" | |
464 | " .align 4 \n" | |
465 | "4: j 3b \n" | |
466 | " .previous \n" | |
467 | " .section __ex_table, \"a\" \n" | |
468 | " .align 4 \n" | |
469 | " .word 1b, 4b \n" | |
470 | " .word 11b,4b \n" | |
471 | " .word 12b,4b \n" | |
472 | " .word 13b,4b \n" | |
473 | " .previous \n" | |
474 | : "+r" (res), "+r"(to), "+r"(from), | |
475 | "=r"(tmp1), "=r"(tmp2), "=r"(tmp3), "=r"(tmp4) | |
476 | : "ir"(n) | |
477 | : "lp_count", "memory"); | |
478 | } | |
479 | if (orig_n / 8) { | |
480 | orig_n = orig_n % 8; | |
481 | ||
482 | __asm__ __volatile__( | |
483 | " ld.ab %3, [%2,4] \n" | |
484 | " ld.ab %4, [%2,4] \n" | |
485 | "14: st.ab %3, [%1,4] \n" | |
486 | "15: st.ab %4, [%1,4] \n" | |
487 | " sub %0, %0, 8 \n" | |
488 | "31:;nop \n" | |
489 | " .section .fixup, \"ax\" \n" | |
490 | " .align 4 \n" | |
491 | "4: j 31b \n" | |
492 | " .previous \n" | |
493 | " .section __ex_table, \"a\" \n" | |
494 | " .align 4 \n" | |
495 | " .word 14b,4b \n" | |
496 | " .word 15b,4b \n" | |
497 | " .previous \n" | |
498 | : "+r" (res), "+r"(to), "+r"(from), | |
499 | "=r"(tmp1), "=r"(tmp2) | |
500 | : | |
501 | : "memory"); | |
502 | } | |
503 | if (orig_n / 4) { | |
504 | orig_n = orig_n % 4; | |
505 | ||
506 | __asm__ __volatile__( | |
507 | " ld.ab %3, [%2,4] \n" | |
508 | "16: st.ab %3, [%1,4] \n" | |
509 | " sub %0, %0, 4 \n" | |
510 | "32:;nop \n" | |
511 | " .section .fixup, \"ax\" \n" | |
512 | " .align 4 \n" | |
513 | "4: j 32b \n" | |
514 | " .previous \n" | |
515 | " .section __ex_table, \"a\" \n" | |
516 | " .align 4 \n" | |
517 | " .word 16b,4b \n" | |
518 | " .previous \n" | |
519 | : "+r" (res), "+r"(to), "+r"(from), "=r"(tmp1) | |
520 | : | |
521 | : "memory"); | |
522 | } | |
523 | if (orig_n / 2) { | |
524 | orig_n = orig_n % 2; | |
525 | ||
526 | __asm__ __volatile__( | |
527 | " ldw.ab %3, [%2,2] \n" | |
528 | "17: stw.ab %3, [%1,2] \n" | |
529 | " sub %0, %0, 2 \n" | |
530 | "33:;nop \n" | |
531 | " .section .fixup, \"ax\" \n" | |
532 | " .align 4 \n" | |
533 | "4: j 33b \n" | |
534 | " .previous \n" | |
535 | " .section __ex_table, \"a\" \n" | |
536 | " .align 4 \n" | |
537 | " .word 17b,4b \n" | |
538 | " .previous \n" | |
539 | : "+r" (res), "+r"(to), "+r"(from), "=r"(tmp1) | |
540 | : | |
541 | : "memory"); | |
542 | } | |
543 | if (orig_n & 1) { | |
544 | __asm__ __volatile__( | |
545 | " ldb.ab %3, [%2,1] \n" | |
546 | "18: stb.ab %3, [%1,1] \n" | |
547 | " sub %0, %0, 1 \n" | |
548 | "34: ;nop \n" | |
549 | " .section .fixup, \"ax\" \n" | |
550 | " .align 4 \n" | |
551 | "4: j 34b \n" | |
552 | " .previous \n" | |
553 | " .section __ex_table, \"a\" \n" | |
554 | " .align 4 \n" | |
555 | " .word 18b,4b \n" | |
556 | " .previous \n" | |
557 | : "+r" (res), "+r"(to), "+r"(from), "=r"(tmp1) | |
558 | : | |
559 | : "memory"); | |
560 | } | |
561 | } else { /* n is NOT constant, so laddered copy of 16x,8,4,2,1 */ | |
562 | ||
563 | __asm__ __volatile__( | |
564 | " mov %0,%3 \n" | |
565 | " lsr.f lp_count, %3,4 \n" /* 16x bytes */ | |
566 | " lpnz 3f \n" | |
567 | " ld.ab %5, [%2, 4] \n" | |
568 | " ld.ab %6, [%2, 4] \n" | |
569 | " ld.ab %7, [%2, 4] \n" | |
570 | " ld.ab %8, [%2, 4] \n" | |
571 | "1: st.ab %5, [%1, 4] \n" | |
572 | "11: st.ab %6, [%1, 4] \n" | |
573 | "12: st.ab %7, [%1, 4] \n" | |
574 | "13: st.ab %8, [%1, 4] \n" | |
575 | " sub %0, %0, 16 \n" | |
576 | "3: and.f %3,%3,0xf \n" /* stragglers */ | |
577 | " bz 34f \n" | |
578 | " bbit0 %3,3,31f \n" /* 8 bytes left */ | |
579 | " ld.ab %5, [%2,4] \n" | |
580 | " ld.ab %6, [%2,4] \n" | |
581 | "14: st.ab %5, [%1,4] \n" | |
582 | "15: st.ab %6, [%1,4] \n" | |
583 | " sub.f %0, %0, 8 \n" | |
584 | "31: bbit0 %3,2,32f \n" /* 4 bytes left */ | |
585 | " ld.ab %5, [%2,4] \n" | |
586 | "16: st.ab %5, [%1,4] \n" | |
587 | " sub.f %0, %0, 4 \n" | |
588 | "32: bbit0 %3,1,33f \n" /* 2 bytes left */ | |
589 | " ldw.ab %5, [%2,2] \n" | |
590 | "17: stw.ab %5, [%1,2] \n" | |
591 | " sub.f %0, %0, 2 \n" | |
592 | "33: bbit0 %3,0,34f \n" | |
593 | " ldb.ab %5, [%2,1] \n" /* 1 byte left */ | |
594 | "18: stb.ab %5, [%1,1] \n" | |
595 | " sub.f %0, %0, 1 \n" | |
596 | "34: ;nop \n" | |
597 | " .section .fixup, \"ax\" \n" | |
598 | " .align 4 \n" | |
599 | "4: j 34b \n" | |
600 | " .previous \n" | |
601 | " .section __ex_table, \"a\" \n" | |
602 | " .align 4 \n" | |
603 | " .word 1b, 4b \n" | |
604 | " .word 11b,4b \n" | |
605 | " .word 12b,4b \n" | |
606 | " .word 13b,4b \n" | |
607 | " .word 14b,4b \n" | |
608 | " .word 15b,4b \n" | |
609 | " .word 16b,4b \n" | |
610 | " .word 17b,4b \n" | |
611 | " .word 18b,4b \n" | |
612 | " .previous \n" | |
613 | : "=r" (res), "+r"(to), "+r"(from), "+r"(n), "=r"(val), | |
614 | "=r"(tmp1), "=r"(tmp2), "=r"(tmp3), "=r"(tmp4) | |
615 | : | |
616 | : "lp_count", "memory"); | |
617 | } | |
618 | ||
619 | return res; | |
620 | } | |
621 | ||
622 | static inline unsigned long __arc_clear_user(void __user *to, unsigned long n) | |
623 | { | |
624 | long res = n; | |
625 | unsigned char *d_char = to; | |
626 | ||
627 | __asm__ __volatile__( | |
628 | " bbit0 %0, 0, 1f \n" | |
629 | "75: stb.ab %2, [%0,1] \n" | |
630 | " sub %1, %1, 1 \n" | |
631 | "1: bbit0 %0, 1, 2f \n" | |
632 | "76: stw.ab %2, [%0,2] \n" | |
633 | " sub %1, %1, 2 \n" | |
634 | "2: asr.f lp_count, %1, 2 \n" | |
635 | " lpnz 3f \n" | |
636 | "77: st.ab %2, [%0,4] \n" | |
637 | " sub %1, %1, 4 \n" | |
638 | "3: bbit0 %1, 1, 4f \n" | |
639 | "78: stw.ab %2, [%0,2] \n" | |
640 | " sub %1, %1, 2 \n" | |
641 | "4: bbit0 %1, 0, 5f \n" | |
642 | "79: stb.ab %2, [%0,1] \n" | |
643 | " sub %1, %1, 1 \n" | |
644 | "5: \n" | |
645 | " .section .fixup, \"ax\" \n" | |
646 | " .align 4 \n" | |
647 | "3: j 5b \n" | |
648 | " .previous \n" | |
649 | " .section __ex_table, \"a\" \n" | |
650 | " .align 4 \n" | |
651 | " .word 75b, 3b \n" | |
652 | " .word 76b, 3b \n" | |
653 | " .word 77b, 3b \n" | |
654 | " .word 78b, 3b \n" | |
655 | " .word 79b, 3b \n" | |
656 | " .previous \n" | |
657 | : "+r"(d_char), "+r"(res) | |
658 | : "i"(0) | |
659 | : "lp_count", "lp_start", "lp_end", "memory"); | |
660 | ||
661 | return res; | |
662 | } | |
663 | ||
664 | static inline long | |
665 | __arc_strncpy_from_user(char *dst, const char __user *src, long count) | |
666 | { | |
8922bc30 | 667 | long res = 0; |
43697cb0 | 668 | char val; |
43697cb0 VG |
669 | |
670 | if (count == 0) | |
671 | return 0; | |
672 | ||
673 | __asm__ __volatile__( | |
8922bc30 | 674 | " lp 3f \n" |
43697cb0 | 675 | "1: ldb.ab %3, [%2, 1] \n" |
8922bc30 | 676 | " breq.d %3, 0, 3f \n" |
43697cb0 | 677 | " stb.ab %3, [%1, 1] \n" |
8922bc30 VG |
678 | " add %0, %0, 1 # Num of NON NULL bytes copied \n" |
679 | "3: \n" | |
43697cb0 VG |
680 | " .section .fixup, \"ax\" \n" |
681 | " .align 4 \n" | |
8922bc30 | 682 | "4: mov %0, %4 # sets @res as -EFAULT \n" |
43697cb0 VG |
683 | " j 3b \n" |
684 | " .previous \n" | |
685 | " .section __ex_table, \"a\" \n" | |
686 | " .align 4 \n" | |
687 | " .word 1b, 4b \n" | |
688 | " .previous \n" | |
8922bc30 VG |
689 | : "+r"(res), "+r"(dst), "+r"(src), "=r"(val) |
690 | : "g"(-EFAULT), "l"(count) | |
43697cb0 VG |
691 | : "memory"); |
692 | ||
693 | return res; | |
694 | } | |
695 | ||
696 | static inline long __arc_strnlen_user(const char __user *s, long n) | |
697 | { | |
698 | long res, tmp1, cnt; | |
699 | char val; | |
700 | ||
701 | __asm__ __volatile__( | |
702 | " mov %2, %1 \n" | |
703 | "1: ldb.ab %3, [%0, 1] \n" | |
704 | " breq.d %3, 0, 2f \n" | |
705 | " sub.f %2, %2, 1 \n" | |
706 | " bnz 1b \n" | |
707 | " sub %2, %2, 1 \n" | |
708 | "2: sub %0, %1, %2 \n" | |
709 | "3: ;nop \n" | |
710 | " .section .fixup, \"ax\" \n" | |
711 | " .align 4 \n" | |
712 | "4: mov %0, 0 \n" | |
713 | " j 3b \n" | |
714 | " .previous \n" | |
715 | " .section __ex_table, \"a\" \n" | |
716 | " .align 4 \n" | |
717 | " .word 1b, 4b \n" | |
718 | " .previous \n" | |
719 | : "=r"(res), "=r"(tmp1), "=r"(cnt), "=r"(val) | |
720 | : "0"(s), "1"(n) | |
721 | : "memory"); | |
722 | ||
723 | return res; | |
724 | } | |
725 | ||
726 | #ifndef CONFIG_CC_OPTIMIZE_FOR_SIZE | |
727 | #define __copy_from_user(t, f, n) __arc_copy_from_user(t, f, n) | |
728 | #define __copy_to_user(t, f, n) __arc_copy_to_user(t, f, n) | |
729 | #define __clear_user(d, n) __arc_clear_user(d, n) | |
730 | #define __strncpy_from_user(d, s, n) __arc_strncpy_from_user(d, s, n) | |
731 | #define __strnlen_user(s, n) __arc_strnlen_user(s, n) | |
732 | #else | |
733 | extern long arc_copy_from_user_noinline(void *to, const void __user * from, | |
734 | unsigned long n); | |
735 | extern long arc_copy_to_user_noinline(void __user *to, const void *from, | |
736 | unsigned long n); | |
737 | extern unsigned long arc_clear_user_noinline(void __user *to, | |
738 | unsigned long n); | |
739 | extern long arc_strncpy_from_user_noinline (char *dst, const char __user *src, | |
740 | long count); | |
741 | extern long arc_strnlen_user_noinline(const char __user *src, long n); | |
742 | ||
743 | #define __copy_from_user(t, f, n) arc_copy_from_user_noinline(t, f, n) | |
744 | #define __copy_to_user(t, f, n) arc_copy_to_user_noinline(t, f, n) | |
745 | #define __clear_user(d, n) arc_clear_user_noinline(d, n) | |
746 | #define __strncpy_from_user(d, s, n) arc_strncpy_from_user_noinline(d, s, n) | |
747 | #define __strnlen_user(s, n) arc_strnlen_user_noinline(s, n) | |
748 | ||
749 | #endif | |
750 | ||
751 | #include <asm-generic/uaccess.h> | |
752 | ||
43697cb0 | 753 | #endif |