]> git.proxmox.com Git - mirror_ubuntu-hirsute-kernel.git/blame - arch/m68k/include/asm/uaccess_mm.h
License cleanup: add SPDX GPL-2.0 license identifier to files with no license
[mirror_ubuntu-hirsute-kernel.git] / arch / m68k / include / asm / uaccess_mm.h
CommitLineData
b2441318 1/* SPDX-License-Identifier: GPL-2.0 */
1da177e4
LT
2#ifndef __M68K_UACCESS_H
3#define __M68K_UACCESS_H
4
5/*
6 * User space memory access functions
7 */
d94af931 8#include <linux/compiler.h>
d94af931 9#include <linux/types.h>
1da177e4
LT
10#include <asm/segment.h>
11
1da177e4 12/* We let the MMU do all checking */
70f9cac5
GU
13static inline int access_ok(int type, const void __user *addr,
14 unsigned long size)
15{
16 return 1;
17}
1da177e4 18
e08d703c
GU
19/*
20 * Not all varients of the 68k family support the notion of address spaces.
21 * The traditional 680x0 parts do, and they use the sfc/dfc registers and
22 * the "moves" instruction to access user space from kernel space. Other
23 * family members like ColdFire don't support this, and only have a single
24 * address space, and use the usual "move" instruction for user space access.
25 *
26 * Outside of this difference the user space access functions are the same.
27 * So lets keep the code simple and just define in what we need to use.
28 */
29#ifdef CONFIG_CPU_HAS_ADDRESS_SPACES
30#define MOVES "moves"
31#else
32#define MOVES "move"
33#endif
34
d94af931
RZ
35extern int __put_user_bad(void);
36extern int __get_user_bad(void);
37
38#define __put_user_asm(res, x, ptr, bwl, reg, err) \
39asm volatile ("\n" \
e08d703c 40 "1: "MOVES"."#bwl" %2,%1\n" \
d94af931
RZ
41 "2:\n" \
42 " .section .fixup,\"ax\"\n" \
43 " .even\n" \
44 "10: moveq.l %3,%0\n" \
45 " jra 2b\n" \
46 " .previous\n" \
47 "\n" \
48 " .section __ex_table,\"a\"\n" \
49 " .align 4\n" \
50 " .long 1b,10b\n" \
51 " .long 2b,10b\n" \
52 " .previous" \
53 : "+d" (res), "=m" (*(ptr)) \
54 : #reg (x), "i" (err))
1da177e4
LT
55
56/*
57 * These are the main single-value transfer routines. They automatically
58 * use the right size if we just have the right pointer type.
59 */
60
d94af931
RZ
61#define __put_user(x, ptr) \
62({ \
63 typeof(*(ptr)) __pu_val = (x); \
64 int __pu_err = 0; \
65 __chk_user_ptr(ptr); \
66 switch (sizeof (*(ptr))) { \
67 case 1: \
68 __put_user_asm(__pu_err, __pu_val, ptr, b, d, -EFAULT); \
69 break; \
70 case 2: \
631d8b67 71 __put_user_asm(__pu_err, __pu_val, ptr, w, r, -EFAULT); \
d94af931
RZ
72 break; \
73 case 4: \
74 __put_user_asm(__pu_err, __pu_val, ptr, l, r, -EFAULT); \
75 break; \
76 case 8: \
77 { \
b971018b 78 const void __user *__pu_ptr = (ptr); \
d94af931 79 asm volatile ("\n" \
e08d703c
GU
80 "1: "MOVES".l %2,(%1)+\n" \
81 "2: "MOVES".l %R2,(%1)\n" \
d94af931
RZ
82 "3:\n" \
83 " .section .fixup,\"ax\"\n" \
84 " .even\n" \
85 "10: movel %3,%0\n" \
86 " jra 3b\n" \
87 " .previous\n" \
88 "\n" \
89 " .section __ex_table,\"a\"\n" \
90 " .align 4\n" \
91 " .long 1b,10b\n" \
92 " .long 2b,10b\n" \
93 " .long 3b,10b\n" \
94 " .previous" \
95 : "+d" (__pu_err), "+a" (__pu_ptr) \
96 : "r" (__pu_val), "i" (-EFAULT) \
97 : "memory"); \
98 break; \
99 } \
100 default: \
101 __pu_err = __put_user_bad(); \
102 break; \
103 } \
104 __pu_err; \
1da177e4 105})
d94af931 106#define put_user(x, ptr) __put_user(x, ptr)
1da177e4 107
1da177e4 108
09a2f7cf
MT
109#define __get_user_asm(res, x, ptr, type, bwl, reg, err) ({ \
110 type __gu_val; \
111 asm volatile ("\n" \
112 "1: "MOVES"."#bwl" %2,%1\n" \
113 "2:\n" \
114 " .section .fixup,\"ax\"\n" \
115 " .even\n" \
116 "10: move.l %3,%0\n" \
117 " sub.l %1,%1\n" \
118 " jra 2b\n" \
119 " .previous\n" \
120 "\n" \
121 " .section __ex_table,\"a\"\n" \
122 " .align 4\n" \
123 " .long 1b,10b\n" \
124 " .previous" \
125 : "+d" (res), "=&" #reg (__gu_val) \
126 : "m" (*(ptr)), "i" (err)); \
127 (x) = (__force typeof(*(ptr)))(__force unsigned long)__gu_val; \
1da177e4 128})
1da177e4 129
d94af931
RZ
130#define __get_user(x, ptr) \
131({ \
132 int __gu_err = 0; \
133 __chk_user_ptr(ptr); \
134 switch (sizeof(*(ptr))) { \
135 case 1: \
136 __get_user_asm(__gu_err, x, ptr, u8, b, d, -EFAULT); \
137 break; \
138 case 2: \
631d8b67 139 __get_user_asm(__gu_err, x, ptr, u16, w, r, -EFAULT); \
d94af931
RZ
140 break; \
141 case 4: \
142 __get_user_asm(__gu_err, x, ptr, u32, l, r, -EFAULT); \
143 break; \
144/* case 8: disabled because gcc-4.1 has a broken typeof \
145 { \
146 const void *__gu_ptr = (ptr); \
147 u64 __gu_val; \
148 asm volatile ("\n" \
e08d703c
GU
149 "1: "MOVES".l (%2)+,%1\n" \
150 "2: "MOVES".l (%2),%R1\n" \
d94af931
RZ
151 "3:\n" \
152 " .section .fixup,\"ax\"\n" \
153 " .even\n" \
154 "10: move.l %3,%0\n" \
155 " sub.l %1,%1\n" \
156 " sub.l %R1,%R1\n" \
157 " jra 3b\n" \
158 " .previous\n" \
159 "\n" \
160 " .section __ex_table,\"a\"\n" \
161 " .align 4\n" \
162 " .long 1b,10b\n" \
163 " .long 2b,10b\n" \
164 " .previous" \
165 : "+d" (__gu_err), "=&r" (__gu_val), \
166 "+a" (__gu_ptr) \
167 : "i" (-EFAULT) \
168 : "memory"); \
09a2f7cf 169 (x) = (__force typeof(*(ptr)))__gu_val; \
d94af931
RZ
170 break; \
171 } */ \
172 default: \
173 __gu_err = __get_user_bad(); \
174 break; \
175 } \
176 __gu_err; \
177})
178#define get_user(x, ptr) __get_user(x, ptr)
1da177e4 179
d94af931
RZ
180unsigned long __generic_copy_from_user(void *to, const void __user *from, unsigned long n);
181unsigned long __generic_copy_to_user(void __user *to, const void *from, unsigned long n);
1da177e4 182
7cefa5a0
AV
183#define __suffix0
184#define __suffix1 b
185#define __suffix2 w
186#define __suffix4 l
187
188#define ____constant_copy_from_user_asm(res, to, from, tmp, n1, n2, n3, s1, s2, s3)\
53617825 189 asm volatile ("\n" \
e08d703c 190 "1: "MOVES"."#s1" (%2)+,%3\n" \
53617825 191 " move."#s1" %3,(%1)+\n" \
7cefa5a0 192 " .ifnc \""#s2"\",\"\"\n" \
e08d703c 193 "2: "MOVES"."#s2" (%2)+,%3\n" \
53617825
RZ
194 " move."#s2" %3,(%1)+\n" \
195 " .ifnc \""#s3"\",\"\"\n" \
e08d703c 196 "3: "MOVES"."#s3" (%2)+,%3\n" \
53617825
RZ
197 " move."#s3" %3,(%1)+\n" \
198 " .endif\n" \
7cefa5a0 199 " .endif\n" \
53617825
RZ
200 "4:\n" \
201 " .section __ex_table,\"a\"\n" \
202 " .align 4\n" \
203 " .long 1b,10f\n" \
7cefa5a0 204 " .ifnc \""#s2"\",\"\"\n" \
53617825
RZ
205 " .long 2b,20f\n" \
206 " .ifnc \""#s3"\",\"\"\n" \
207 " .long 3b,30f\n" \
208 " .endif\n" \
7cefa5a0 209 " .endif\n" \
53617825
RZ
210 " .previous\n" \
211 "\n" \
212 " .section .fixup,\"ax\"\n" \
213 " .even\n" \
7cefa5a0
AV
214 "10: addq.l #"#n1",%0\n" \
215 " .ifnc \""#s2"\",\"\"\n" \
216 "20: addq.l #"#n2",%0\n" \
53617825 217 " .ifnc \""#s3"\",\"\"\n" \
7cefa5a0
AV
218 "30: addq.l #"#n3",%0\n" \
219 " .endif\n" \
53617825 220 " .endif\n" \
53617825
RZ
221 " jra 4b\n" \
222 " .previous\n" \
223 : "+d" (res), "+&a" (to), "+a" (from), "=&d" (tmp) \
224 : : "memory")
225
7cefa5a0
AV
226#define ___constant_copy_from_user_asm(res, to, from, tmp, n1, n2, n3, s1, s2, s3)\
227 ____constant_copy_from_user_asm(res, to, from, tmp, n1, n2, n3, s1, s2, s3)
228#define __constant_copy_from_user_asm(res, to, from, tmp, n1, n2, n3) \
229 ___constant_copy_from_user_asm(res, to, from, tmp, n1, n2, n3, \
230 __suffix##n1, __suffix##n2, __suffix##n3)
231
d94af931
RZ
232static __always_inline unsigned long
233__constant_copy_from_user(void *to, const void __user *from, unsigned long n)
1da177e4 234{
d94af931 235 unsigned long res = 0, tmp;
1da177e4 236
d94af931 237 switch (n) {
1da177e4 238 case 1:
7cefa5a0 239 __constant_copy_from_user_asm(res, to, from, tmp, 1, 0, 0);
53617825 240 break;
1da177e4 241 case 2:
7cefa5a0 242 __constant_copy_from_user_asm(res, to, from, tmp, 2, 0, 0);
53617825
RZ
243 break;
244 case 3:
7cefa5a0 245 __constant_copy_from_user_asm(res, to, from, tmp, 2, 1, 0);
53617825 246 break;
d94af931 247 case 4:
7cefa5a0 248 __constant_copy_from_user_asm(res, to, from, tmp, 4, 0, 0);
53617825
RZ
249 break;
250 case 5:
7cefa5a0 251 __constant_copy_from_user_asm(res, to, from, tmp, 4, 1, 0);
53617825
RZ
252 break;
253 case 6:
7cefa5a0 254 __constant_copy_from_user_asm(res, to, from, tmp, 4, 2, 0);
53617825
RZ
255 break;
256 case 7:
7cefa5a0 257 __constant_copy_from_user_asm(res, to, from, tmp, 4, 2, 1);
53617825
RZ
258 break;
259 case 8:
7cefa5a0 260 __constant_copy_from_user_asm(res, to, from, tmp, 4, 4, 0);
53617825
RZ
261 break;
262 case 9:
7cefa5a0 263 __constant_copy_from_user_asm(res, to, from, tmp, 4, 4, 1);
53617825
RZ
264 break;
265 case 10:
7cefa5a0 266 __constant_copy_from_user_asm(res, to, from, tmp, 4, 4, 2);
53617825
RZ
267 break;
268 case 12:
7cefa5a0 269 __constant_copy_from_user_asm(res, to, from, tmp, 4, 4, 4);
53617825
RZ
270 break;
271 default:
272 /* we limit the inlined version to 3 moves */
273 return __generic_copy_from_user(to, from, n);
1da177e4 274 }
1da177e4 275
d94af931
RZ
276 return res;
277}
1da177e4 278
53617825
RZ
279#define __constant_copy_to_user_asm(res, to, from, tmp, n, s1, s2, s3) \
280 asm volatile ("\n" \
281 " move."#s1" (%2)+,%3\n" \
e08d703c 282 "11: "MOVES"."#s1" %3,(%1)+\n" \
53617825 283 "12: move."#s2" (%2)+,%3\n" \
e08d703c 284 "21: "MOVES"."#s2" %3,(%1)+\n" \
53617825
RZ
285 "22:\n" \
286 " .ifnc \""#s3"\",\"\"\n" \
287 " move."#s3" (%2)+,%3\n" \
e08d703c 288 "31: "MOVES"."#s3" %3,(%1)+\n" \
53617825
RZ
289 "32:\n" \
290 " .endif\n" \
291 "4:\n" \
292 "\n" \
293 " .section __ex_table,\"a\"\n" \
294 " .align 4\n" \
295 " .long 11b,5f\n" \
296 " .long 12b,5f\n" \
297 " .long 21b,5f\n" \
298 " .long 22b,5f\n" \
299 " .ifnc \""#s3"\",\"\"\n" \
300 " .long 31b,5f\n" \
301 " .long 32b,5f\n" \
302 " .endif\n" \
303 " .previous\n" \
304 "\n" \
305 " .section .fixup,\"ax\"\n" \
306 " .even\n" \
307 "5: moveq.l #"#n",%0\n" \
308 " jra 4b\n" \
309 " .previous\n" \
310 : "+d" (res), "+a" (to), "+a" (from), "=&d" (tmp) \
311 : : "memory")
312
d94af931 313static __always_inline unsigned long
11c40f8a 314__constant_copy_to_user(void __user *to, const void *from, unsigned long n)
1da177e4 315{
d94af931
RZ
316 unsigned long res = 0, tmp;
317
d94af931 318 switch (n) {
1da177e4 319 case 1:
b971018b 320 __put_user_asm(res, *(u8 *)from, (u8 __user *)to, b, d, 1);
53617825 321 break;
1da177e4 322 case 2:
631d8b67 323 __put_user_asm(res, *(u16 *)from, (u16 __user *)to, w, r, 2);
53617825
RZ
324 break;
325 case 3:
326 __constant_copy_to_user_asm(res, to, from, tmp, 3, w, b,);
327 break;
d94af931 328 case 4:
b971018b 329 __put_user_asm(res, *(u32 *)from, (u32 __user *)to, l, r, 4);
53617825
RZ
330 break;
331 case 5:
332 __constant_copy_to_user_asm(res, to, from, tmp, 5, l, b,);
333 break;
334 case 6:
335 __constant_copy_to_user_asm(res, to, from, tmp, 6, l, w,);
336 break;
337 case 7:
338 __constant_copy_to_user_asm(res, to, from, tmp, 7, l, w, b);
339 break;
340 case 8:
341 __constant_copy_to_user_asm(res, to, from, tmp, 8, l, l,);
342 break;
343 case 9:
344 __constant_copy_to_user_asm(res, to, from, tmp, 9, l, l, b);
345 break;
346 case 10:
347 __constant_copy_to_user_asm(res, to, from, tmp, 10, l, l, w);
348 break;
349 case 12:
350 __constant_copy_to_user_asm(res, to, from, tmp, 12, l, l, l);
351 break;
352 default:
353 /* limit the inlined version to 3 moves */
354 return __generic_copy_to_user(to, from, n);
1da177e4 355 }
d94af931 356
d94af931 357 return res;
1da177e4
LT
358}
359
29be02eb
AV
360static inline unsigned long
361raw_copy_from_user(void *to, const void __user *from, unsigned long n)
362{
363 if (__builtin_constant_p(n))
364 return __constant_copy_from_user(to, from, n);
365 return __generic_copy_from_user(to, from, n);
366}
1da177e4 367
7cefa5a0 368static inline unsigned long
29be02eb 369raw_copy_to_user(void __user *to, const void *from, unsigned long n)
7cefa5a0 370{
29be02eb
AV
371 if (__builtin_constant_p(n))
372 return __constant_copy_to_user(to, from, n);
373 return __generic_copy_to_user(to, from, n);
7cefa5a0 374}
29be02eb
AV
375#define INLINE_COPY_FROM_USER
376#define INLINE_COPY_TO_USER
1da177e4 377
d8ce7263 378#define user_addr_max() \
db68ce10 379 (uaccess_kernel() ? ~0UL : TASK_SIZE)
d8ce7263
GU
380
381extern long strncpy_from_user(char *dst, const char __user *src, long count);
d8ce7263
GU
382extern __must_check long strnlen_user(const char __user *str, long n);
383
3c46bdca
GU
384unsigned long __clear_user(void __user *to, unsigned long n);
385
386#define clear_user __clear_user
1da177e4 387
1da177e4 388#endif /* _M68K_UACCESS_H */