Loading...
1#include <linux/linkage.h>
2#include <asm/asm.h>
3#include <asm/csr.h>
4
5 .altmacro
6 .macro fixup op reg addr lbl
7 LOCAL _epc
8_epc:
9 \op \reg, \addr
10 .section __ex_table,"a"
11 .balign RISCV_SZPTR
12 RISCV_PTR _epc, \lbl
13 .previous
14 .endm
15
16ENTRY(__asm_copy_to_user)
17ENTRY(__asm_copy_from_user)
18
19 /* Enable access to user memory */
20 li t6, SR_SUM
21 csrs CSR_SSTATUS, t6
22
23 add a3, a1, a2
24 /* Use word-oriented copy only if low-order bits match */
25 andi t0, a0, SZREG-1
26 andi t1, a1, SZREG-1
27 bne t0, t1, 2f
28
29 addi t0, a1, SZREG-1
30 andi t1, a3, ~(SZREG-1)
31 andi t0, t0, ~(SZREG-1)
32 /*
33 * a3: terminal address of source region
34 * t0: lowest XLEN-aligned address in source
35 * t1: highest XLEN-aligned address in source
36 */
37 bgeu t0, t1, 2f
38 bltu a1, t0, 4f
391:
40 fixup REG_L, t2, (a1), 10f
41 fixup REG_S, t2, (a0), 10f
42 addi a1, a1, SZREG
43 addi a0, a0, SZREG
44 bltu a1, t1, 1b
452:
46 bltu a1, a3, 5f
47
483:
49 /* Disable access to user memory */
50 csrc CSR_SSTATUS, t6
51 li a0, 0
52 ret
534: /* Edge case: unalignment */
54 fixup lbu, t2, (a1), 10f
55 fixup sb, t2, (a0), 10f
56 addi a1, a1, 1
57 addi a0, a0, 1
58 bltu a1, t0, 4b
59 j 1b
605: /* Edge case: remainder */
61 fixup lbu, t2, (a1), 10f
62 fixup sb, t2, (a0), 10f
63 addi a1, a1, 1
64 addi a0, a0, 1
65 bltu a1, a3, 5b
66 j 3b
67ENDPROC(__asm_copy_to_user)
68ENDPROC(__asm_copy_from_user)
69
70
71ENTRY(__clear_user)
72
73 /* Enable access to user memory */
74 li t6, SR_SUM
75 csrs CSR_SSTATUS, t6
76
77 add a3, a0, a1
78 addi t0, a0, SZREG-1
79 andi t1, a3, ~(SZREG-1)
80 andi t0, t0, ~(SZREG-1)
81 /*
82 * a3: terminal address of target region
83 * t0: lowest doubleword-aligned address in target region
84 * t1: highest doubleword-aligned address in target region
85 */
86 bgeu t0, t1, 2f
87 bltu a0, t0, 4f
881:
89 fixup REG_S, zero, (a0), 11f
90 addi a0, a0, SZREG
91 bltu a0, t1, 1b
922:
93 bltu a0, a3, 5f
94
953:
96 /* Disable access to user memory */
97 csrc CSR_SSTATUS, t6
98 li a0, 0
99 ret
1004: /* Edge case: unalignment */
101 fixup sb, zero, (a0), 11f
102 addi a0, a0, 1
103 bltu a0, t0, 4b
104 j 1b
1055: /* Edge case: remainder */
106 fixup sb, zero, (a0), 11f
107 addi a0, a0, 1
108 bltu a0, a3, 5b
109 j 3b
110ENDPROC(__clear_user)
111
112 .section .fixup,"ax"
113 .balign 4
114 /* Fixup code for __copy_user(10) and __clear_user(11) */
11510:
116 /* Disable access to user memory */
117 csrs CSR_SSTATUS, t6
118 mv a0, a2
119 ret
12011:
121 csrs CSR_SSTATUS, t6
122 mv a0, a1
123 ret
124 .previous
1#include <linux/linkage.h>
2#include <linux/export.h>
3#include <asm/asm.h>
4#include <asm/asm-extable.h>
5#include <asm/csr.h>
6#include <asm/hwcap.h>
7#include <asm/alternative-macros.h>
8
9 .macro fixup op reg addr lbl
10100:
11 \op \reg, \addr
12 _asm_extable 100b, \lbl
13 .endm
14
15SYM_FUNC_START(__asm_copy_to_user)
16#ifdef CONFIG_RISCV_ISA_V
17 ALTERNATIVE("j fallback_scalar_usercopy", "nop", 0, RISCV_ISA_EXT_v, CONFIG_RISCV_ISA_V)
18 REG_L t0, riscv_v_usercopy_threshold
19 bltu a2, t0, fallback_scalar_usercopy
20 tail enter_vector_usercopy
21#endif
22SYM_FUNC_START(fallback_scalar_usercopy)
23
24 /* Enable access to user memory */
25 li t6, SR_SUM
26 csrs CSR_STATUS, t6
27
28 /*
29 * Save the terminal address which will be used to compute the number
30 * of bytes copied in case of a fixup exception.
31 */
32 add t5, a0, a2
33
34 /*
35 * Register allocation for code below:
36 * a0 - start of uncopied dst
37 * a1 - start of uncopied src
38 * a2 - size
39 * t0 - end of uncopied dst
40 */
41 add t0, a0, a2
42
43 /*
44 * Use byte copy only if too small.
45 * SZREG holds 4 for RV32 and 8 for RV64
46 */
47 li a3, 9*SZREG /* size must be larger than size in word_copy */
48 bltu a2, a3, .Lbyte_copy_tail
49
50 /*
51 * Copy first bytes until dst is aligned to word boundary.
52 * a0 - start of dst
53 * t1 - start of aligned dst
54 */
55 addi t1, a0, SZREG-1
56 andi t1, t1, ~(SZREG-1)
57 /* dst is already aligned, skip */
58 beq a0, t1, .Lskip_align_dst
591:
60 /* a5 - one byte for copying data */
61 fixup lb a5, 0(a1), 10f
62 addi a1, a1, 1 /* src */
63 fixup sb a5, 0(a0), 10f
64 addi a0, a0, 1 /* dst */
65 bltu a0, t1, 1b /* t1 - start of aligned dst */
66
67.Lskip_align_dst:
68 /*
69 * Now dst is aligned.
70 * Use shift-copy if src is misaligned.
71 * Use word-copy if both src and dst are aligned because
72 * can not use shift-copy which do not require shifting
73 */
74 /* a1 - start of src */
75 andi a3, a1, SZREG-1
76 bnez a3, .Lshift_copy
77
78.Lword_copy:
79 /*
80 * Both src and dst are aligned, unrolled word copy
81 *
82 * a0 - start of aligned dst
83 * a1 - start of aligned src
84 * t0 - end of aligned dst
85 */
86 addi t0, t0, -(8*SZREG) /* not to over run */
872:
88 fixup REG_L a4, 0(a1), 10f
89 fixup REG_L a5, SZREG(a1), 10f
90 fixup REG_L a6, 2*SZREG(a1), 10f
91 fixup REG_L a7, 3*SZREG(a1), 10f
92 fixup REG_L t1, 4*SZREG(a1), 10f
93 fixup REG_L t2, 5*SZREG(a1), 10f
94 fixup REG_L t3, 6*SZREG(a1), 10f
95 fixup REG_L t4, 7*SZREG(a1), 10f
96 fixup REG_S a4, 0(a0), 10f
97 fixup REG_S a5, SZREG(a0), 10f
98 fixup REG_S a6, 2*SZREG(a0), 10f
99 fixup REG_S a7, 3*SZREG(a0), 10f
100 fixup REG_S t1, 4*SZREG(a0), 10f
101 fixup REG_S t2, 5*SZREG(a0), 10f
102 fixup REG_S t3, 6*SZREG(a0), 10f
103 fixup REG_S t4, 7*SZREG(a0), 10f
104 addi a0, a0, 8*SZREG
105 addi a1, a1, 8*SZREG
106 bltu a0, t0, 2b
107
108 addi t0, t0, 8*SZREG /* revert to original value */
109 j .Lbyte_copy_tail
110
111.Lshift_copy:
112
113 /*
114 * Word copy with shifting.
115 * For misaligned copy we still perform aligned word copy, but
116 * we need to use the value fetched from the previous iteration and
117 * do some shifts.
118 * This is safe because reading is less than a word size.
119 *
120 * a0 - start of aligned dst
121 * a1 - start of src
122 * a3 - a1 & mask:(SZREG-1)
123 * t0 - end of uncopied dst
124 * t1 - end of aligned dst
125 */
126 /* calculating aligned word boundary for dst */
127 andi t1, t0, ~(SZREG-1)
128 /* Converting unaligned src to aligned src */
129 andi a1, a1, ~(SZREG-1)
130
131 /*
132 * Calculate shifts
133 * t3 - prev shift
134 * t4 - current shift
135 */
136 slli t3, a3, 3 /* converting bytes in a3 to bits */
137 li a5, SZREG*8
138 sub t4, a5, t3
139
140 /* Load the first word to combine with second word */
141 fixup REG_L a5, 0(a1), 10f
142
1433:
144 /* Main shifting copy
145 *
146 * a0 - start of aligned dst
147 * a1 - start of aligned src
148 * t1 - end of aligned dst
149 */
150
151 /* At least one iteration will be executed */
152 srl a4, a5, t3
153 fixup REG_L a5, SZREG(a1), 10f
154 addi a1, a1, SZREG
155 sll a2, a5, t4
156 or a2, a2, a4
157 fixup REG_S a2, 0(a0), 10f
158 addi a0, a0, SZREG
159 bltu a0, t1, 3b
160
161 /* Revert src to original unaligned value */
162 add a1, a1, a3
163
164.Lbyte_copy_tail:
165 /*
166 * Byte copy anything left.
167 *
168 * a0 - start of remaining dst
169 * a1 - start of remaining src
170 * t0 - end of remaining dst
171 */
172 bgeu a0, t0, .Lout_copy_user /* check if end of copy */
1734:
174 fixup lb a5, 0(a1), 10f
175 addi a1, a1, 1 /* src */
176 fixup sb a5, 0(a0), 10f
177 addi a0, a0, 1 /* dst */
178 bltu a0, t0, 4b /* t0 - end of dst */
179
180.Lout_copy_user:
181 /* Disable access to user memory */
182 csrc CSR_STATUS, t6
183 li a0, 0
184 ret
185
186 /* Exception fixup code */
18710:
188 /* Disable access to user memory */
189 csrc CSR_STATUS, t6
190 sub a0, t5, a0
191 ret
192SYM_FUNC_END(__asm_copy_to_user)
193SYM_FUNC_END(fallback_scalar_usercopy)
194EXPORT_SYMBOL(__asm_copy_to_user)
195SYM_FUNC_ALIAS(__asm_copy_from_user, __asm_copy_to_user)
196EXPORT_SYMBOL(__asm_copy_from_user)
197
198
199SYM_FUNC_START(__clear_user)
200
201 /* Enable access to user memory */
202 li t6, SR_SUM
203 csrs CSR_STATUS, t6
204
205 add a3, a0, a1
206 addi t0, a0, SZREG-1
207 andi t1, a3, ~(SZREG-1)
208 andi t0, t0, ~(SZREG-1)
209 /*
210 * a3: terminal address of target region
211 * t0: lowest doubleword-aligned address in target region
212 * t1: highest doubleword-aligned address in target region
213 */
214 bgeu t0, t1, 2f
215 bltu a0, t0, 4f
2161:
217 fixup REG_S, zero, (a0), 11f
218 addi a0, a0, SZREG
219 bltu a0, t1, 1b
2202:
221 bltu a0, a3, 5f
222
2233:
224 /* Disable access to user memory */
225 csrc CSR_STATUS, t6
226 li a0, 0
227 ret
2284: /* Edge case: unalignment */
229 fixup sb, zero, (a0), 11f
230 addi a0, a0, 1
231 bltu a0, t0, 4b
232 j 1b
2335: /* Edge case: remainder */
234 fixup sb, zero, (a0), 11f
235 addi a0, a0, 1
236 bltu a0, a3, 5b
237 j 3b
238
239 /* Exception fixup code */
24011:
241 /* Disable access to user memory */
242 csrc CSR_STATUS, t6
243 sub a0, a3, a0
244 ret
245SYM_FUNC_END(__clear_user)
246EXPORT_SYMBOL(__clear_user)