Loading...
1/*
2 * User space memory access functions
3 *
4 * Copyright (C) 1999, 2002 Niibe Yutaka
5 * Copyright (C) 2003 - 2008 Paul Mundt
6 *
7 * Based on:
8 * MIPS implementation version 1.15 by
9 * Copyright (C) 1996, 1997, 1998 by Ralf Baechle
10 * and i386 version.
11 */
12#ifndef __ASM_SH_UACCESS_32_H
13#define __ASM_SH_UACCESS_32_H
14
15#define __get_user_size(x,ptr,size,retval) \
16do { \
17 retval = 0; \
18 switch (size) { \
19 case 1: \
20 __get_user_asm(x, ptr, retval, "b"); \
21 break; \
22 case 2: \
23 __get_user_asm(x, ptr, retval, "w"); \
24 break; \
25 case 4: \
26 __get_user_asm(x, ptr, retval, "l"); \
27 break; \
28 default: \
29 __get_user_unknown(); \
30 break; \
31 } \
32} while (0)
33
34#ifdef CONFIG_MMU
35#define __get_user_asm(x, addr, err, insn) \
36({ \
37__asm__ __volatile__( \
38 "1:\n\t" \
39 "mov." insn " %2, %1\n\t" \
40 "2:\n" \
41 ".section .fixup,\"ax\"\n" \
42 "3:\n\t" \
43 "mov #0, %1\n\t" \
44 "mov.l 4f, %0\n\t" \
45 "jmp @%0\n\t" \
46 " mov %3, %0\n\t" \
47 ".balign 4\n" \
48 "4: .long 2b\n\t" \
49 ".previous\n" \
50 ".section __ex_table,\"a\"\n\t" \
51 ".long 1b, 3b\n\t" \
52 ".previous" \
53 :"=&r" (err), "=&r" (x) \
54 :"m" (__m(addr)), "i" (-EFAULT), "0" (err)); })
55#else
56#define __get_user_asm(x, addr, err, insn) \
57do { \
58 __asm__ __volatile__ ( \
59 "mov." insn " %1, %0\n\t" \
60 : "=&r" (x) \
61 : "m" (__m(addr)) \
62 ); \
63} while (0)
64#endif /* CONFIG_MMU */
65
66extern void __get_user_unknown(void);
67
68#define __put_user_size(x,ptr,size,retval) \
69do { \
70 retval = 0; \
71 switch (size) { \
72 case 1: \
73 __put_user_asm(x, ptr, retval, "b"); \
74 break; \
75 case 2: \
76 __put_user_asm(x, ptr, retval, "w"); \
77 break; \
78 case 4: \
79 __put_user_asm(x, ptr, retval, "l"); \
80 break; \
81 case 8: \
82 __put_user_u64(x, ptr, retval); \
83 break; \
84 default: \
85 __put_user_unknown(); \
86 } \
87} while (0)
88
89#ifdef CONFIG_MMU
90#define __put_user_asm(x, addr, err, insn) \
91do { \
92 __asm__ __volatile__ ( \
93 "1:\n\t" \
94 "mov." insn " %1, %2\n\t" \
95 "2:\n" \
96 ".section .fixup,\"ax\"\n" \
97 "3:\n\t" \
98 "mov.l 4f, %0\n\t" \
99 "jmp @%0\n\t" \
100 " mov %3, %0\n\t" \
101 ".balign 4\n" \
102 "4: .long 2b\n\t" \
103 ".previous\n" \
104 ".section __ex_table,\"a\"\n\t" \
105 ".long 1b, 3b\n\t" \
106 ".previous" \
107 : "=&r" (err) \
108 : "r" (x), "m" (__m(addr)), "i" (-EFAULT), \
109 "0" (err) \
110 : "memory" \
111 ); \
112} while (0)
113#else
114#define __put_user_asm(x, addr, err, insn) \
115do { \
116 __asm__ __volatile__ ( \
117 "mov." insn " %0, %1\n\t" \
118 : /* no outputs */ \
119 : "r" (x), "m" (__m(addr)) \
120 : "memory" \
121 ); \
122} while (0)
123#endif /* CONFIG_MMU */
124
125#if defined(CONFIG_CPU_LITTLE_ENDIAN)
126#define __put_user_u64(val,addr,retval) \
127({ \
128__asm__ __volatile__( \
129 "1:\n\t" \
130 "mov.l %R1,%2\n\t" \
131 "mov.l %S1,%T2\n\t" \
132 "2:\n" \
133 ".section .fixup,\"ax\"\n" \
134 "3:\n\t" \
135 "mov.l 4f,%0\n\t" \
136 "jmp @%0\n\t" \
137 " mov %3,%0\n\t" \
138 ".balign 4\n" \
139 "4: .long 2b\n\t" \
140 ".previous\n" \
141 ".section __ex_table,\"a\"\n\t" \
142 ".long 1b, 3b\n\t" \
143 ".previous" \
144 : "=r" (retval) \
145 : "r" (val), "m" (__m(addr)), "i" (-EFAULT), "0" (retval) \
146 : "memory"); })
147#else
148#define __put_user_u64(val,addr,retval) \
149({ \
150__asm__ __volatile__( \
151 "1:\n\t" \
152 "mov.l %S1,%2\n\t" \
153 "mov.l %R1,%T2\n\t" \
154 "2:\n" \
155 ".section .fixup,\"ax\"\n" \
156 "3:\n\t" \
157 "mov.l 4f,%0\n\t" \
158 "jmp @%0\n\t" \
159 " mov %3,%0\n\t" \
160 ".balign 4\n" \
161 "4: .long 2b\n\t" \
162 ".previous\n" \
163 ".section __ex_table,\"a\"\n\t" \
164 ".long 1b, 3b\n\t" \
165 ".previous" \
166 : "=r" (retval) \
167 : "r" (val), "m" (__m(addr)), "i" (-EFAULT), "0" (retval) \
168 : "memory"); })
169#endif
170
171extern void __put_user_unknown(void);
172
173static inline int
174__strncpy_from_user(unsigned long __dest, unsigned long __user __src, int __count)
175{
176 __kernel_size_t res;
177 unsigned long __dummy, _d, _s, _c;
178
179 __asm__ __volatile__(
180 "9:\n"
181 "mov.b @%2+, %1\n\t"
182 "cmp/eq #0, %1\n\t"
183 "bt/s 2f\n"
184 "1:\n"
185 "mov.b %1, @%3\n\t"
186 "dt %4\n\t"
187 "bf/s 9b\n\t"
188 " add #1, %3\n\t"
189 "2:\n\t"
190 "sub %4, %0\n"
191 "3:\n"
192 ".section .fixup,\"ax\"\n"
193 "4:\n\t"
194 "mov.l 5f, %1\n\t"
195 "jmp @%1\n\t"
196 " mov %9, %0\n\t"
197 ".balign 4\n"
198 "5: .long 3b\n"
199 ".previous\n"
200 ".section __ex_table,\"a\"\n"
201 " .balign 4\n"
202 " .long 9b,4b\n"
203 ".previous"
204 : "=r" (res), "=&z" (__dummy), "=r" (_s), "=r" (_d), "=r"(_c)
205 : "0" (__count), "2" (__src), "3" (__dest), "4" (__count),
206 "i" (-EFAULT)
207 : "memory", "t");
208
209 return res;
210}
211
212/*
213 * Return the size of a string (including the ending 0 even when we have
214 * exceeded the maximum string length).
215 */
216static inline long __strnlen_user(const char __user *__s, long __n)
217{
218 unsigned long res;
219 unsigned long __dummy;
220
221 __asm__ __volatile__(
222 "1:\t"
223 "mov.b @(%0,%3), %1\n\t"
224 "cmp/eq %4, %0\n\t"
225 "bt/s 2f\n\t"
226 " add #1, %0\n\t"
227 "tst %1, %1\n\t"
228 "bf 1b\n\t"
229 "2:\n"
230 ".section .fixup,\"ax\"\n"
231 "3:\n\t"
232 "mov.l 4f, %1\n\t"
233 "jmp @%1\n\t"
234 " mov #0, %0\n"
235 ".balign 4\n"
236 "4: .long 2b\n"
237 ".previous\n"
238 ".section __ex_table,\"a\"\n"
239 " .balign 4\n"
240 " .long 1b,3b\n"
241 ".previous"
242 : "=z" (res), "=&r" (__dummy)
243 : "0" (0), "r" (__s), "r" (__n)
244 : "t");
245 return res;
246}
247
248#endif /* __ASM_SH_UACCESS_32_H */
1/* SPDX-License-Identifier: GPL-2.0 */
2/*
3 * User space memory access functions
4 *
5 * Copyright (C) 1999, 2002 Niibe Yutaka
6 * Copyright (C) 2003 - 2008 Paul Mundt
7 *
8 * Based on:
9 * MIPS implementation version 1.15 by
10 * Copyright (C) 1996, 1997, 1998 by Ralf Baechle
11 * and i386 version.
12 */
13#ifndef __ASM_SH_UACCESS_32_H
14#define __ASM_SH_UACCESS_32_H
15
16#define __get_user_size(x,ptr,size,retval) \
17do { \
18 retval = 0; \
19 switch (size) { \
20 case 1: \
21 __get_user_asm(x, ptr, retval, "b"); \
22 break; \
23 case 2: \
24 __get_user_asm(x, ptr, retval, "w"); \
25 break; \
26 case 4: \
27 __get_user_asm(x, ptr, retval, "l"); \
28 break; \
29 case 8: \
30 __get_user_u64(x, ptr, retval); \
31 break; \
32 default: \
33 __get_user_unknown(); \
34 break; \
35 } \
36} while (0)
37
38#ifdef CONFIG_MMU
39#define __get_user_asm(x, addr, err, insn) \
40({ \
41__asm__ __volatile__( \
42 "1:\n\t" \
43 "mov." insn " %2, %1\n\t" \
44 "2:\n" \
45 ".section .fixup,\"ax\"\n" \
46 "3:\n\t" \
47 "mov #0, %1\n\t" \
48 "mov.l 4f, %0\n\t" \
49 "jmp @%0\n\t" \
50 " mov %3, %0\n\t" \
51 ".balign 4\n" \
52 "4: .long 2b\n\t" \
53 ".previous\n" \
54 ".section __ex_table,\"a\"\n\t" \
55 ".long 1b, 3b\n\t" \
56 ".previous" \
57 :"=&r" (err), "=&r" (x) \
58 :"m" (__m(addr)), "i" (-EFAULT), "0" (err)); })
59#else
60#define __get_user_asm(x, addr, err, insn) \
61do { \
62 __asm__ __volatile__ ( \
63 "mov." insn " %1, %0\n\t" \
64 : "=&r" (x) \
65 : "m" (__m(addr)) \
66 ); \
67} while (0)
68#endif /* CONFIG_MMU */
69
70extern void __get_user_unknown(void);
71
72#if defined(CONFIG_CPU_LITTLE_ENDIAN)
73#define __get_user_u64(x, addr, err) \
74({ \
75__asm__ __volatile__( \
76 "1:\n\t" \
77 "mov.l %2,%R1\n\t" \
78 "mov.l %T2,%S1\n\t" \
79 "2:\n" \
80 ".section .fixup,\"ax\"\n" \
81 "3:\n\t" \
82 "mov #0,%R1\n\t" \
83 "mov #0,%S1\n\t" \
84 "mov.l 4f, %0\n\t" \
85 "jmp @%0\n\t" \
86 " mov %3, %0\n\t" \
87 ".balign 4\n" \
88 "4: .long 2b\n\t" \
89 ".previous\n" \
90 ".section __ex_table,\"a\"\n\t" \
91 ".long 1b, 3b\n\t" \
92 ".long 1b + 2, 3b\n\t" \
93 ".previous" \
94 :"=&r" (err), "=&r" (x) \
95 :"m" (__m(addr)), "i" (-EFAULT), "0" (err)); })
96#else
97#define __get_user_u64(x, addr, err) \
98({ \
99__asm__ __volatile__( \
100 "1:\n\t" \
101 "mov.l %2,%S1\n\t" \
102 "mov.l %T2,%R1\n\t" \
103 "2:\n" \
104 ".section .fixup,\"ax\"\n" \
105 "3:\n\t" \
106 "mov #0,%S1\n\t" \
107 "mov #0,%R1\n\t" \
108 "mov.l 4f, %0\n\t" \
109 "jmp @%0\n\t" \
110 " mov %3, %0\n\t" \
111 ".balign 4\n" \
112 "4: .long 2b\n\t" \
113 ".previous\n" \
114 ".section __ex_table,\"a\"\n\t" \
115 ".long 1b, 3b\n\t" \
116 ".long 1b + 2, 3b\n\t" \
117 ".previous" \
118 :"=&r" (err), "=&r" (x) \
119 :"m" (__m(addr)), "i" (-EFAULT), "0" (err)); })
120#endif
121
122#define __put_user_size(x,ptr,size,retval) \
123do { \
124 retval = 0; \
125 switch (size) { \
126 case 1: \
127 __put_user_asm(x, ptr, retval, "b"); \
128 break; \
129 case 2: \
130 __put_user_asm(x, ptr, retval, "w"); \
131 break; \
132 case 4: \
133 __put_user_asm(x, ptr, retval, "l"); \
134 break; \
135 case 8: \
136 __put_user_u64(x, ptr, retval); \
137 break; \
138 default: \
139 __put_user_unknown(); \
140 } \
141} while (0)
142
143#ifdef CONFIG_MMU
144#define __put_user_asm(x, addr, err, insn) \
145do { \
146 __asm__ __volatile__ ( \
147 "1:\n\t" \
148 "mov." insn " %1, %2\n\t" \
149 "2:\n" \
150 ".section .fixup,\"ax\"\n" \
151 "3:\n\t" \
152 "mov.l 4f, %0\n\t" \
153 "jmp @%0\n\t" \
154 " mov %3, %0\n\t" \
155 ".balign 4\n" \
156 "4: .long 2b\n\t" \
157 ".previous\n" \
158 ".section __ex_table,\"a\"\n\t" \
159 ".long 1b, 3b\n\t" \
160 ".previous" \
161 : "=&r" (err) \
162 : "r" (x), "m" (__m(addr)), "i" (-EFAULT), \
163 "0" (err) \
164 : "memory" \
165 ); \
166} while (0)
167#else
168#define __put_user_asm(x, addr, err, insn) \
169do { \
170 __asm__ __volatile__ ( \
171 "mov." insn " %0, %1\n\t" \
172 : /* no outputs */ \
173 : "r" (x), "m" (__m(addr)) \
174 : "memory" \
175 ); \
176} while (0)
177#endif /* CONFIG_MMU */
178
179#if defined(CONFIG_CPU_LITTLE_ENDIAN)
180#define __put_user_u64(val,addr,retval) \
181({ \
182__asm__ __volatile__( \
183 "1:\n\t" \
184 "mov.l %R1,%2\n\t" \
185 "mov.l %S1,%T2\n\t" \
186 "2:\n" \
187 ".section .fixup,\"ax\"\n" \
188 "3:\n\t" \
189 "mov.l 4f,%0\n\t" \
190 "jmp @%0\n\t" \
191 " mov %3,%0\n\t" \
192 ".balign 4\n" \
193 "4: .long 2b\n\t" \
194 ".previous\n" \
195 ".section __ex_table,\"a\"\n\t" \
196 ".long 1b, 3b\n\t" \
197 ".previous" \
198 : "=r" (retval) \
199 : "r" (val), "m" (__m(addr)), "i" (-EFAULT), "0" (retval) \
200 : "memory"); })
201#else
202#define __put_user_u64(val,addr,retval) \
203({ \
204__asm__ __volatile__( \
205 "1:\n\t" \
206 "mov.l %S1,%2\n\t" \
207 "mov.l %R1,%T2\n\t" \
208 "2:\n" \
209 ".section .fixup,\"ax\"\n" \
210 "3:\n\t" \
211 "mov.l 4f,%0\n\t" \
212 "jmp @%0\n\t" \
213 " mov %3,%0\n\t" \
214 ".balign 4\n" \
215 "4: .long 2b\n\t" \
216 ".previous\n" \
217 ".section __ex_table,\"a\"\n\t" \
218 ".long 1b, 3b\n\t" \
219 ".previous" \
220 : "=r" (retval) \
221 : "r" (val), "m" (__m(addr)), "i" (-EFAULT), "0" (retval) \
222 : "memory"); })
223#endif
224
225extern void __put_user_unknown(void);
226
227#endif /* __ASM_SH_UACCESS_32_H */