Loading...
1/* SPDX-License-Identifier: GPL-2.0 */
2#ifndef __M68K_UACCESS_H
3#define __M68K_UACCESS_H
4
5#ifdef CONFIG_MMU
6
7/*
8 * User space memory access functions
9 */
10#include <linux/compiler.h>
11#include <linux/types.h>
12#include <asm/extable.h>
13#include <asm-generic/access_ok.h>
14
15/*
16 * Not all varients of the 68k family support the notion of address spaces.
17 * The traditional 680x0 parts do, and they use the sfc/dfc registers and
18 * the "moves" instruction to access user space from kernel space. Other
19 * family members like ColdFire don't support this, and only have a single
20 * address space, and use the usual "move" instruction for user space access.
21 *
22 * Outside of this difference the user space access functions are the same.
23 * So lets keep the code simple and just define in what we need to use.
24 */
25#ifdef CONFIG_CPU_HAS_ADDRESS_SPACES
26#define MOVES "moves"
27#else
28#define MOVES "move"
29#endif
30
31#define __put_user_asm(inst, res, x, ptr, bwl, reg, err) \
32asm volatile ("\n" \
33 "1: "inst"."#bwl" %2,%1\n" \
34 "2:\n" \
35 " .section .fixup,\"ax\"\n" \
36 " .even\n" \
37 "10: moveq.l %3,%0\n" \
38 " jra 2b\n" \
39 " .previous\n" \
40 "\n" \
41 " .section __ex_table,\"a\"\n" \
42 " .align 4\n" \
43 " .long 1b,10b\n" \
44 " .long 2b,10b\n" \
45 " .previous" \
46 : "+d" (res), "=m" (*(ptr)) \
47 : #reg (x), "i" (err))
48
49#define __put_user_asm8(inst, res, x, ptr) \
50do { \
51 const void *__pu_ptr = (const void __force *)(ptr); \
52 \
53 asm volatile ("\n" \
54 "1: "inst".l %2,(%1)+\n" \
55 "2: "inst".l %R2,(%1)\n" \
56 "3:\n" \
57 " .section .fixup,\"ax\"\n" \
58 " .even\n" \
59 "10: movel %3,%0\n" \
60 " jra 3b\n" \
61 " .previous\n" \
62 "\n" \
63 " .section __ex_table,\"a\"\n" \
64 " .align 4\n" \
65 " .long 1b,10b\n" \
66 " .long 2b,10b\n" \
67 " .long 3b,10b\n" \
68 " .previous" \
69 : "+d" (res), "+a" (__pu_ptr) \
70 : "r" (x), "i" (-EFAULT) \
71 : "memory"); \
72} while (0)
73
74/*
75 * These are the main single-value transfer routines. They automatically
76 * use the right size if we just have the right pointer type.
77 */
78
79#define __put_user(x, ptr) \
80({ \
81 typeof(*(ptr)) __pu_val = (x); \
82 int __pu_err = 0; \
83 __chk_user_ptr(ptr); \
84 switch (sizeof (*(ptr))) { \
85 case 1: \
86 __put_user_asm(MOVES, __pu_err, __pu_val, ptr, b, d, -EFAULT); \
87 break; \
88 case 2: \
89 __put_user_asm(MOVES, __pu_err, __pu_val, ptr, w, r, -EFAULT); \
90 break; \
91 case 4: \
92 __put_user_asm(MOVES, __pu_err, __pu_val, ptr, l, r, -EFAULT); \
93 break; \
94 case 8: \
95 __put_user_asm8(MOVES, __pu_err, __pu_val, ptr); \
96 break; \
97 default: \
98 BUILD_BUG(); \
99 } \
100 __pu_err; \
101})
102#define put_user(x, ptr) __put_user(x, ptr)
103
104
105#define __get_user_asm(inst, res, x, ptr, type, bwl, reg, err) ({ \
106 type __gu_val; \
107 asm volatile ("\n" \
108 "1: "inst"."#bwl" %2,%1\n" \
109 "2:\n" \
110 " .section .fixup,\"ax\"\n" \
111 " .even\n" \
112 "10: move.l %3,%0\n" \
113 " sub.l %1,%1\n" \
114 " jra 2b\n" \
115 " .previous\n" \
116 "\n" \
117 " .section __ex_table,\"a\"\n" \
118 " .align 4\n" \
119 " .long 1b,10b\n" \
120 " .previous" \
121 : "+d" (res), "=&" #reg (__gu_val) \
122 : "m" (*(ptr)), "i" (err)); \
123 (x) = (__force typeof(*(ptr)))(__force unsigned long)__gu_val; \
124})
125
126#define __get_user_asm8(inst, res, x, ptr) \
127do { \
128 const void *__gu_ptr = (const void __force *)(ptr); \
129 union { \
130 u64 l; \
131 __typeof__(*(ptr)) t; \
132 } __gu_val; \
133 \
134 asm volatile ("\n" \
135 "1: "inst".l (%2)+,%1\n" \
136 "2: "inst".l (%2),%R1\n" \
137 "3:\n" \
138 " .section .fixup,\"ax\"\n" \
139 " .even\n" \
140 "10: move.l %3,%0\n" \
141 " sub.l %1,%1\n" \
142 " sub.l %R1,%R1\n" \
143 " jra 3b\n" \
144 " .previous\n" \
145 "\n" \
146 " .section __ex_table,\"a\"\n" \
147 " .align 4\n" \
148 " .long 1b,10b\n" \
149 " .long 2b,10b\n" \
150 " .previous" \
151 : "+d" (res), "=&r" (__gu_val.l), \
152 "+a" (__gu_ptr) \
153 : "i" (-EFAULT) \
154 : "memory"); \
155 (x) = __gu_val.t; \
156} while (0)
157
158#define __get_user(x, ptr) \
159({ \
160 int __gu_err = 0; \
161 __chk_user_ptr(ptr); \
162 switch (sizeof(*(ptr))) { \
163 case 1: \
164 __get_user_asm(MOVES, __gu_err, x, ptr, u8, b, d, -EFAULT); \
165 break; \
166 case 2: \
167 __get_user_asm(MOVES, __gu_err, x, ptr, u16, w, r, -EFAULT); \
168 break; \
169 case 4: \
170 __get_user_asm(MOVES, __gu_err, x, ptr, u32, l, r, -EFAULT); \
171 break; \
172 case 8: \
173 __get_user_asm8(MOVES, __gu_err, x, ptr); \
174 break; \
175 default: \
176 BUILD_BUG(); \
177 } \
178 __gu_err; \
179})
180#define get_user(x, ptr) __get_user(x, ptr)
181
182unsigned long __generic_copy_from_user(void *to, const void __user *from, unsigned long n);
183unsigned long __generic_copy_to_user(void __user *to, const void *from, unsigned long n);
184
185#define __suffix0
186#define __suffix1 b
187#define __suffix2 w
188#define __suffix4 l
189
190#define ____constant_copy_from_user_asm(res, to, from, tmp, n1, n2, n3, s1, s2, s3)\
191 asm volatile ("\n" \
192 "1: "MOVES"."#s1" (%2)+,%3\n" \
193 " move."#s1" %3,(%1)+\n" \
194 " .ifnc \""#s2"\",\"\"\n" \
195 "2: "MOVES"."#s2" (%2)+,%3\n" \
196 " move."#s2" %3,(%1)+\n" \
197 " .ifnc \""#s3"\",\"\"\n" \
198 "3: "MOVES"."#s3" (%2)+,%3\n" \
199 " move."#s3" %3,(%1)+\n" \
200 " .endif\n" \
201 " .endif\n" \
202 "4:\n" \
203 " .section __ex_table,\"a\"\n" \
204 " .align 4\n" \
205 " .long 1b,10f\n" \
206 " .ifnc \""#s2"\",\"\"\n" \
207 " .long 2b,20f\n" \
208 " .ifnc \""#s3"\",\"\"\n" \
209 " .long 3b,30f\n" \
210 " .endif\n" \
211 " .endif\n" \
212 " .previous\n" \
213 "\n" \
214 " .section .fixup,\"ax\"\n" \
215 " .even\n" \
216 "10: addq.l #"#n1",%0\n" \
217 " .ifnc \""#s2"\",\"\"\n" \
218 "20: addq.l #"#n2",%0\n" \
219 " .ifnc \""#s3"\",\"\"\n" \
220 "30: addq.l #"#n3",%0\n" \
221 " .endif\n" \
222 " .endif\n" \
223 " jra 4b\n" \
224 " .previous\n" \
225 : "+d" (res), "+&a" (to), "+a" (from), "=&d" (tmp) \
226 : : "memory")
227
228#define ___constant_copy_from_user_asm(res, to, from, tmp, n1, n2, n3, s1, s2, s3)\
229 ____constant_copy_from_user_asm(res, to, from, tmp, n1, n2, n3, s1, s2, s3)
230#define __constant_copy_from_user_asm(res, to, from, tmp, n1, n2, n3) \
231 ___constant_copy_from_user_asm(res, to, from, tmp, n1, n2, n3, \
232 __suffix##n1, __suffix##n2, __suffix##n3)
233
234static __always_inline unsigned long
235__constant_copy_from_user(void *to, const void __user *from, unsigned long n)
236{
237 unsigned long res = 0, tmp;
238
239 switch (n) {
240 case 1:
241 __constant_copy_from_user_asm(res, to, from, tmp, 1, 0, 0);
242 break;
243 case 2:
244 __constant_copy_from_user_asm(res, to, from, tmp, 2, 0, 0);
245 break;
246 case 3:
247 __constant_copy_from_user_asm(res, to, from, tmp, 2, 1, 0);
248 break;
249 case 4:
250 __constant_copy_from_user_asm(res, to, from, tmp, 4, 0, 0);
251 break;
252 case 5:
253 __constant_copy_from_user_asm(res, to, from, tmp, 4, 1, 0);
254 break;
255 case 6:
256 __constant_copy_from_user_asm(res, to, from, tmp, 4, 2, 0);
257 break;
258 case 7:
259 __constant_copy_from_user_asm(res, to, from, tmp, 4, 2, 1);
260 break;
261 case 8:
262 __constant_copy_from_user_asm(res, to, from, tmp, 4, 4, 0);
263 break;
264 case 9:
265 __constant_copy_from_user_asm(res, to, from, tmp, 4, 4, 1);
266 break;
267 case 10:
268 __constant_copy_from_user_asm(res, to, from, tmp, 4, 4, 2);
269 break;
270 case 12:
271 __constant_copy_from_user_asm(res, to, from, tmp, 4, 4, 4);
272 break;
273 default:
274 /* we limit the inlined version to 3 moves */
275 return __generic_copy_from_user(to, from, n);
276 }
277
278 return res;
279}
280
281#define __constant_copy_to_user_asm(res, to, from, tmp, n, s1, s2, s3) \
282 asm volatile ("\n" \
283 " move."#s1" (%2)+,%3\n" \
284 "11: "MOVES"."#s1" %3,(%1)+\n" \
285 "12: move."#s2" (%2)+,%3\n" \
286 "21: "MOVES"."#s2" %3,(%1)+\n" \
287 "22:\n" \
288 " .ifnc \""#s3"\",\"\"\n" \
289 " move."#s3" (%2)+,%3\n" \
290 "31: "MOVES"."#s3" %3,(%1)+\n" \
291 "32:\n" \
292 " .endif\n" \
293 "4:\n" \
294 "\n" \
295 " .section __ex_table,\"a\"\n" \
296 " .align 4\n" \
297 " .long 11b,5f\n" \
298 " .long 12b,5f\n" \
299 " .long 21b,5f\n" \
300 " .long 22b,5f\n" \
301 " .ifnc \""#s3"\",\"\"\n" \
302 " .long 31b,5f\n" \
303 " .long 32b,5f\n" \
304 " .endif\n" \
305 " .previous\n" \
306 "\n" \
307 " .section .fixup,\"ax\"\n" \
308 " .even\n" \
309 "5: moveq.l #"#n",%0\n" \
310 " jra 4b\n" \
311 " .previous\n" \
312 : "+d" (res), "+a" (to), "+a" (from), "=&d" (tmp) \
313 : : "memory")
314
315static __always_inline unsigned long
316__constant_copy_to_user(void __user *to, const void *from, unsigned long n)
317{
318 unsigned long res = 0, tmp;
319
320 switch (n) {
321 case 1:
322 __put_user_asm(MOVES, res, *(u8 *)from, (u8 __user *)to,
323 b, d, 1);
324 break;
325 case 2:
326 __put_user_asm(MOVES, res, *(u16 *)from, (u16 __user *)to,
327 w, r, 2);
328 break;
329 case 3:
330 __constant_copy_to_user_asm(res, to, from, tmp, 3, w, b,);
331 break;
332 case 4:
333 __put_user_asm(MOVES, res, *(u32 *)from, (u32 __user *)to,
334 l, r, 4);
335 break;
336 case 5:
337 __constant_copy_to_user_asm(res, to, from, tmp, 5, l, b,);
338 break;
339 case 6:
340 __constant_copy_to_user_asm(res, to, from, tmp, 6, l, w,);
341 break;
342 case 7:
343 __constant_copy_to_user_asm(res, to, from, tmp, 7, l, w, b);
344 break;
345 case 8:
346 __constant_copy_to_user_asm(res, to, from, tmp, 8, l, l,);
347 break;
348 case 9:
349 __constant_copy_to_user_asm(res, to, from, tmp, 9, l, l, b);
350 break;
351 case 10:
352 __constant_copy_to_user_asm(res, to, from, tmp, 10, l, l, w);
353 break;
354 case 12:
355 __constant_copy_to_user_asm(res, to, from, tmp, 12, l, l, l);
356 break;
357 default:
358 /* limit the inlined version to 3 moves */
359 return __generic_copy_to_user(to, from, n);
360 }
361
362 return res;
363}
364
365static inline unsigned long
366raw_copy_from_user(void *to, const void __user *from, unsigned long n)
367{
368 if (__builtin_constant_p(n))
369 return __constant_copy_from_user(to, from, n);
370 return __generic_copy_from_user(to, from, n);
371}
372
373static inline unsigned long
374raw_copy_to_user(void __user *to, const void *from, unsigned long n)
375{
376 if (__builtin_constant_p(n))
377 return __constant_copy_to_user(to, from, n);
378 return __generic_copy_to_user(to, from, n);
379}
380#define INLINE_COPY_FROM_USER
381#define INLINE_COPY_TO_USER
382
383#define __get_kernel_nofault(dst, src, type, err_label) \
384do { \
385 type *__gk_dst = (type *)(dst); \
386 type *__gk_src = (type *)(src); \
387 int __gk_err = 0; \
388 \
389 switch (sizeof(type)) { \
390 case 1: \
391 __get_user_asm("move", __gk_err, *__gk_dst, __gk_src, \
392 u8, b, d, -EFAULT); \
393 break; \
394 case 2: \
395 __get_user_asm("move", __gk_err, *__gk_dst, __gk_src, \
396 u16, w, r, -EFAULT); \
397 break; \
398 case 4: \
399 __get_user_asm("move", __gk_err, *__gk_dst, __gk_src, \
400 u32, l, r, -EFAULT); \
401 break; \
402 case 8: \
403 __get_user_asm8("move", __gk_err, *__gk_dst, __gk_src); \
404 break; \
405 default: \
406 BUILD_BUG(); \
407 } \
408 if (unlikely(__gk_err)) \
409 goto err_label; \
410} while (0)
411
412#define __put_kernel_nofault(dst, src, type, err_label) \
413do { \
414 type __pk_src = *(type *)(src); \
415 type *__pk_dst = (type *)(dst); \
416 int __pk_err = 0; \
417 \
418 switch (sizeof(type)) { \
419 case 1: \
420 __put_user_asm("move", __pk_err, __pk_src, __pk_dst, \
421 b, d, -EFAULT); \
422 break; \
423 case 2: \
424 __put_user_asm("move", __pk_err, __pk_src, __pk_dst, \
425 w, r, -EFAULT); \
426 break; \
427 case 4: \
428 __put_user_asm("move", __pk_err, __pk_src, __pk_dst, \
429 l, r, -EFAULT); \
430 break; \
431 case 8: \
432 __put_user_asm8("move", __pk_err, __pk_src, __pk_dst); \
433 break; \
434 default: \
435 BUILD_BUG(); \
436 } \
437 if (unlikely(__pk_err)) \
438 goto err_label; \
439} while (0)
440
441extern long strncpy_from_user(char *dst, const char __user *src, long count);
442extern __must_check long strnlen_user(const char __user *str, long n);
443
444unsigned long __clear_user(void __user *to, unsigned long n);
445
446#define clear_user __clear_user
447
448#else /* !CONFIG_MMU */
449#include <asm-generic/uaccess.h>
450#endif
451
452#endif /* _M68K_UACCESS_H */
1/* SPDX-License-Identifier: GPL-2.0 */
2#ifndef __M68K_UACCESS_H
3#define __M68K_UACCESS_H
4
5#ifdef CONFIG_MMU
6
7/*
8 * User space memory access functions
9 */
10#include <linux/compiler.h>
11#include <linux/types.h>
12#include <asm/segment.h>
13#include <asm/extable.h>
14
15/* We let the MMU do all checking */
16static inline int access_ok(const void __user *addr,
17 unsigned long size)
18{
19 return 1;
20}
21
22/*
23 * Not all varients of the 68k family support the notion of address spaces.
24 * The traditional 680x0 parts do, and they use the sfc/dfc registers and
25 * the "moves" instruction to access user space from kernel space. Other
26 * family members like ColdFire don't support this, and only have a single
27 * address space, and use the usual "move" instruction for user space access.
28 *
29 * Outside of this difference the user space access functions are the same.
30 * So lets keep the code simple and just define in what we need to use.
31 */
32#ifdef CONFIG_CPU_HAS_ADDRESS_SPACES
33#define MOVES "moves"
34#else
35#define MOVES "move"
36#endif
37
38extern int __put_user_bad(void);
39extern int __get_user_bad(void);
40
41#define __put_user_asm(res, x, ptr, bwl, reg, err) \
42asm volatile ("\n" \
43 "1: "MOVES"."#bwl" %2,%1\n" \
44 "2:\n" \
45 " .section .fixup,\"ax\"\n" \
46 " .even\n" \
47 "10: moveq.l %3,%0\n" \
48 " jra 2b\n" \
49 " .previous\n" \
50 "\n" \
51 " .section __ex_table,\"a\"\n" \
52 " .align 4\n" \
53 " .long 1b,10b\n" \
54 " .long 2b,10b\n" \
55 " .previous" \
56 : "+d" (res), "=m" (*(ptr)) \
57 : #reg (x), "i" (err))
58
59/*
60 * These are the main single-value transfer routines. They automatically
61 * use the right size if we just have the right pointer type.
62 */
63
64#define __put_user(x, ptr) \
65({ \
66 typeof(*(ptr)) __pu_val = (x); \
67 int __pu_err = 0; \
68 __chk_user_ptr(ptr); \
69 switch (sizeof (*(ptr))) { \
70 case 1: \
71 __put_user_asm(__pu_err, __pu_val, ptr, b, d, -EFAULT); \
72 break; \
73 case 2: \
74 __put_user_asm(__pu_err, __pu_val, ptr, w, r, -EFAULT); \
75 break; \
76 case 4: \
77 __put_user_asm(__pu_err, __pu_val, ptr, l, r, -EFAULT); \
78 break; \
79 case 8: \
80 { \
81 const void __user *__pu_ptr = (ptr); \
82 asm volatile ("\n" \
83 "1: "MOVES".l %2,(%1)+\n" \
84 "2: "MOVES".l %R2,(%1)\n" \
85 "3:\n" \
86 " .section .fixup,\"ax\"\n" \
87 " .even\n" \
88 "10: movel %3,%0\n" \
89 " jra 3b\n" \
90 " .previous\n" \
91 "\n" \
92 " .section __ex_table,\"a\"\n" \
93 " .align 4\n" \
94 " .long 1b,10b\n" \
95 " .long 2b,10b\n" \
96 " .long 3b,10b\n" \
97 " .previous" \
98 : "+d" (__pu_err), "+a" (__pu_ptr) \
99 : "r" (__pu_val), "i" (-EFAULT) \
100 : "memory"); \
101 break; \
102 } \
103 default: \
104 __pu_err = __put_user_bad(); \
105 break; \
106 } \
107 __pu_err; \
108})
109#define put_user(x, ptr) __put_user(x, ptr)
110
111
112#define __get_user_asm(res, x, ptr, type, bwl, reg, err) ({ \
113 type __gu_val; \
114 asm volatile ("\n" \
115 "1: "MOVES"."#bwl" %2,%1\n" \
116 "2:\n" \
117 " .section .fixup,\"ax\"\n" \
118 " .even\n" \
119 "10: move.l %3,%0\n" \
120 " sub.l %1,%1\n" \
121 " jra 2b\n" \
122 " .previous\n" \
123 "\n" \
124 " .section __ex_table,\"a\"\n" \
125 " .align 4\n" \
126 " .long 1b,10b\n" \
127 " .previous" \
128 : "+d" (res), "=&" #reg (__gu_val) \
129 : "m" (*(ptr)), "i" (err)); \
130 (x) = (__force typeof(*(ptr)))(__force unsigned long)__gu_val; \
131})
132
133#define __get_user(x, ptr) \
134({ \
135 int __gu_err = 0; \
136 __chk_user_ptr(ptr); \
137 switch (sizeof(*(ptr))) { \
138 case 1: \
139 __get_user_asm(__gu_err, x, ptr, u8, b, d, -EFAULT); \
140 break; \
141 case 2: \
142 __get_user_asm(__gu_err, x, ptr, u16, w, r, -EFAULT); \
143 break; \
144 case 4: \
145 __get_user_asm(__gu_err, x, ptr, u32, l, r, -EFAULT); \
146 break; \
147 case 8: { \
148 const void __user *__gu_ptr = (ptr); \
149 union { \
150 u64 l; \
151 __typeof__(*(ptr)) t; \
152 } __gu_val; \
153 asm volatile ("\n" \
154 "1: "MOVES".l (%2)+,%1\n" \
155 "2: "MOVES".l (%2),%R1\n" \
156 "3:\n" \
157 " .section .fixup,\"ax\"\n" \
158 " .even\n" \
159 "10: move.l %3,%0\n" \
160 " sub.l %1,%1\n" \
161 " sub.l %R1,%R1\n" \
162 " jra 3b\n" \
163 " .previous\n" \
164 "\n" \
165 " .section __ex_table,\"a\"\n" \
166 " .align 4\n" \
167 " .long 1b,10b\n" \
168 " .long 2b,10b\n" \
169 " .previous" \
170 : "+d" (__gu_err), "=&r" (__gu_val.l), \
171 "+a" (__gu_ptr) \
172 : "i" (-EFAULT) \
173 : "memory"); \
174 (x) = __gu_val.t; \
175 break; \
176 } \
177 default: \
178 __gu_err = __get_user_bad(); \
179 break; \
180 } \
181 __gu_err; \
182})
183#define get_user(x, ptr) __get_user(x, ptr)
184
185unsigned long __generic_copy_from_user(void *to, const void __user *from, unsigned long n);
186unsigned long __generic_copy_to_user(void __user *to, const void *from, unsigned long n);
187
188#define __suffix0
189#define __suffix1 b
190#define __suffix2 w
191#define __suffix4 l
192
193#define ____constant_copy_from_user_asm(res, to, from, tmp, n1, n2, n3, s1, s2, s3)\
194 asm volatile ("\n" \
195 "1: "MOVES"."#s1" (%2)+,%3\n" \
196 " move."#s1" %3,(%1)+\n" \
197 " .ifnc \""#s2"\",\"\"\n" \
198 "2: "MOVES"."#s2" (%2)+,%3\n" \
199 " move."#s2" %3,(%1)+\n" \
200 " .ifnc \""#s3"\",\"\"\n" \
201 "3: "MOVES"."#s3" (%2)+,%3\n" \
202 " move."#s3" %3,(%1)+\n" \
203 " .endif\n" \
204 " .endif\n" \
205 "4:\n" \
206 " .section __ex_table,\"a\"\n" \
207 " .align 4\n" \
208 " .long 1b,10f\n" \
209 " .ifnc \""#s2"\",\"\"\n" \
210 " .long 2b,20f\n" \
211 " .ifnc \""#s3"\",\"\"\n" \
212 " .long 3b,30f\n" \
213 " .endif\n" \
214 " .endif\n" \
215 " .previous\n" \
216 "\n" \
217 " .section .fixup,\"ax\"\n" \
218 " .even\n" \
219 "10: addq.l #"#n1",%0\n" \
220 " .ifnc \""#s2"\",\"\"\n" \
221 "20: addq.l #"#n2",%0\n" \
222 " .ifnc \""#s3"\",\"\"\n" \
223 "30: addq.l #"#n3",%0\n" \
224 " .endif\n" \
225 " .endif\n" \
226 " jra 4b\n" \
227 " .previous\n" \
228 : "+d" (res), "+&a" (to), "+a" (from), "=&d" (tmp) \
229 : : "memory")
230
231#define ___constant_copy_from_user_asm(res, to, from, tmp, n1, n2, n3, s1, s2, s3)\
232 ____constant_copy_from_user_asm(res, to, from, tmp, n1, n2, n3, s1, s2, s3)
233#define __constant_copy_from_user_asm(res, to, from, tmp, n1, n2, n3) \
234 ___constant_copy_from_user_asm(res, to, from, tmp, n1, n2, n3, \
235 __suffix##n1, __suffix##n2, __suffix##n3)
236
237static __always_inline unsigned long
238__constant_copy_from_user(void *to, const void __user *from, unsigned long n)
239{
240 unsigned long res = 0, tmp;
241
242 switch (n) {
243 case 1:
244 __constant_copy_from_user_asm(res, to, from, tmp, 1, 0, 0);
245 break;
246 case 2:
247 __constant_copy_from_user_asm(res, to, from, tmp, 2, 0, 0);
248 break;
249 case 3:
250 __constant_copy_from_user_asm(res, to, from, tmp, 2, 1, 0);
251 break;
252 case 4:
253 __constant_copy_from_user_asm(res, to, from, tmp, 4, 0, 0);
254 break;
255 case 5:
256 __constant_copy_from_user_asm(res, to, from, tmp, 4, 1, 0);
257 break;
258 case 6:
259 __constant_copy_from_user_asm(res, to, from, tmp, 4, 2, 0);
260 break;
261 case 7:
262 __constant_copy_from_user_asm(res, to, from, tmp, 4, 2, 1);
263 break;
264 case 8:
265 __constant_copy_from_user_asm(res, to, from, tmp, 4, 4, 0);
266 break;
267 case 9:
268 __constant_copy_from_user_asm(res, to, from, tmp, 4, 4, 1);
269 break;
270 case 10:
271 __constant_copy_from_user_asm(res, to, from, tmp, 4, 4, 2);
272 break;
273 case 12:
274 __constant_copy_from_user_asm(res, to, from, tmp, 4, 4, 4);
275 break;
276 default:
277 /* we limit the inlined version to 3 moves */
278 return __generic_copy_from_user(to, from, n);
279 }
280
281 return res;
282}
283
284#define __constant_copy_to_user_asm(res, to, from, tmp, n, s1, s2, s3) \
285 asm volatile ("\n" \
286 " move."#s1" (%2)+,%3\n" \
287 "11: "MOVES"."#s1" %3,(%1)+\n" \
288 "12: move."#s2" (%2)+,%3\n" \
289 "21: "MOVES"."#s2" %3,(%1)+\n" \
290 "22:\n" \
291 " .ifnc \""#s3"\",\"\"\n" \
292 " move."#s3" (%2)+,%3\n" \
293 "31: "MOVES"."#s3" %3,(%1)+\n" \
294 "32:\n" \
295 " .endif\n" \
296 "4:\n" \
297 "\n" \
298 " .section __ex_table,\"a\"\n" \
299 " .align 4\n" \
300 " .long 11b,5f\n" \
301 " .long 12b,5f\n" \
302 " .long 21b,5f\n" \
303 " .long 22b,5f\n" \
304 " .ifnc \""#s3"\",\"\"\n" \
305 " .long 31b,5f\n" \
306 " .long 32b,5f\n" \
307 " .endif\n" \
308 " .previous\n" \
309 "\n" \
310 " .section .fixup,\"ax\"\n" \
311 " .even\n" \
312 "5: moveq.l #"#n",%0\n" \
313 " jra 4b\n" \
314 " .previous\n" \
315 : "+d" (res), "+a" (to), "+a" (from), "=&d" (tmp) \
316 : : "memory")
317
318static __always_inline unsigned long
319__constant_copy_to_user(void __user *to, const void *from, unsigned long n)
320{
321 unsigned long res = 0, tmp;
322
323 switch (n) {
324 case 1:
325 __put_user_asm(res, *(u8 *)from, (u8 __user *)to, b, d, 1);
326 break;
327 case 2:
328 __put_user_asm(res, *(u16 *)from, (u16 __user *)to, w, r, 2);
329 break;
330 case 3:
331 __constant_copy_to_user_asm(res, to, from, tmp, 3, w, b,);
332 break;
333 case 4:
334 __put_user_asm(res, *(u32 *)from, (u32 __user *)to, l, r, 4);
335 break;
336 case 5:
337 __constant_copy_to_user_asm(res, to, from, tmp, 5, l, b,);
338 break;
339 case 6:
340 __constant_copy_to_user_asm(res, to, from, tmp, 6, l, w,);
341 break;
342 case 7:
343 __constant_copy_to_user_asm(res, to, from, tmp, 7, l, w, b);
344 break;
345 case 8:
346 __constant_copy_to_user_asm(res, to, from, tmp, 8, l, l,);
347 break;
348 case 9:
349 __constant_copy_to_user_asm(res, to, from, tmp, 9, l, l, b);
350 break;
351 case 10:
352 __constant_copy_to_user_asm(res, to, from, tmp, 10, l, l, w);
353 break;
354 case 12:
355 __constant_copy_to_user_asm(res, to, from, tmp, 12, l, l, l);
356 break;
357 default:
358 /* limit the inlined version to 3 moves */
359 return __generic_copy_to_user(to, from, n);
360 }
361
362 return res;
363}
364
365static inline unsigned long
366raw_copy_from_user(void *to, const void __user *from, unsigned long n)
367{
368 if (__builtin_constant_p(n))
369 return __constant_copy_from_user(to, from, n);
370 return __generic_copy_from_user(to, from, n);
371}
372
373static inline unsigned long
374raw_copy_to_user(void __user *to, const void *from, unsigned long n)
375{
376 if (__builtin_constant_p(n))
377 return __constant_copy_to_user(to, from, n);
378 return __generic_copy_to_user(to, from, n);
379}
380#define INLINE_COPY_FROM_USER
381#define INLINE_COPY_TO_USER
382
383#define user_addr_max() \
384 (uaccess_kernel() ? ~0UL : TASK_SIZE)
385
386extern long strncpy_from_user(char *dst, const char __user *src, long count);
387extern __must_check long strnlen_user(const char __user *str, long n);
388
389unsigned long __clear_user(void __user *to, unsigned long n);
390
391#define clear_user __clear_user
392
393#else /* !CONFIG_MMU */
394#include <asm-generic/uaccess.h>
395#endif
396
397#endif /* _M68K_UACCESS_H */