Loading...
1/*
2 * This file is subject to the terms and conditions of the GNU General Public
3 * License. See the file "COPYING" in the main directory of this archive
4 * for more details.
5 *
6 * Copyright (C) 1998, 1999, 2000 by Ralf Baechle
7 * Copyright (C) 1999, 2000 Silicon Graphics, Inc.
8 * Copyright (C) 2007 by Maciej W. Rozycki
9 * Copyright (C) 2011, 2012 MIPS Technologies, Inc.
10 */
11#include <asm/asm.h>
12#include <asm/asm-offsets.h>
13#include <asm/export.h>
14#include <asm/regdef.h>
15
16#if LONGSIZE == 4
17#define LONG_S_L swl
18#define LONG_S_R swr
19#else
20#define LONG_S_L sdl
21#define LONG_S_R sdr
22#endif
23
24#ifdef CONFIG_CPU_MICROMIPS
25#define STORSIZE (LONGSIZE * 2)
26#define STORMASK (STORSIZE - 1)
27#define FILL64RG t8
28#define FILLPTRG t7
29#undef LONG_S
30#define LONG_S LONG_SP
31#else
32#define STORSIZE LONGSIZE
33#define STORMASK LONGMASK
34#define FILL64RG a1
35#define FILLPTRG t0
36#endif
37
38#define LEGACY_MODE 1
39#define EVA_MODE 2
40
41/*
42 * No need to protect it with EVA #ifdefery. The generated block of code
43 * will never be assembled if EVA is not enabled.
44 */
45#define __EVAFY(insn, reg, addr) __BUILD_EVA_INSN(insn##e, reg, addr)
46#define ___BUILD_EVA_INSN(insn, reg, addr) __EVAFY(insn, reg, addr)
47
48#define EX(insn,reg,addr,handler) \
49 .if \mode == LEGACY_MODE; \
509: insn reg, addr; \
51 .else; \
529: ___BUILD_EVA_INSN(insn, reg, addr); \
53 .endif; \
54 .section __ex_table,"a"; \
55 PTR_WD 9b, handler; \
56 .previous
57
58 .macro f_fill64 dst, offset, val, fixup, mode
59 EX(LONG_S, \val, (\offset + 0 * STORSIZE)(\dst), \fixup)
60 EX(LONG_S, \val, (\offset + 1 * STORSIZE)(\dst), \fixup)
61 EX(LONG_S, \val, (\offset + 2 * STORSIZE)(\dst), \fixup)
62 EX(LONG_S, \val, (\offset + 3 * STORSIZE)(\dst), \fixup)
63#if ((defined(CONFIG_CPU_MICROMIPS) && (LONGSIZE == 4)) || !defined(CONFIG_CPU_MICROMIPS))
64 EX(LONG_S, \val, (\offset + 4 * STORSIZE)(\dst), \fixup)
65 EX(LONG_S, \val, (\offset + 5 * STORSIZE)(\dst), \fixup)
66 EX(LONG_S, \val, (\offset + 6 * STORSIZE)(\dst), \fixup)
67 EX(LONG_S, \val, (\offset + 7 * STORSIZE)(\dst), \fixup)
68#endif
69#if (!defined(CONFIG_CPU_MICROMIPS) && (LONGSIZE == 4))
70 EX(LONG_S, \val, (\offset + 8 * STORSIZE)(\dst), \fixup)
71 EX(LONG_S, \val, (\offset + 9 * STORSIZE)(\dst), \fixup)
72 EX(LONG_S, \val, (\offset + 10 * STORSIZE)(\dst), \fixup)
73 EX(LONG_S, \val, (\offset + 11 * STORSIZE)(\dst), \fixup)
74 EX(LONG_S, \val, (\offset + 12 * STORSIZE)(\dst), \fixup)
75 EX(LONG_S, \val, (\offset + 13 * STORSIZE)(\dst), \fixup)
76 EX(LONG_S, \val, (\offset + 14 * STORSIZE)(\dst), \fixup)
77 EX(LONG_S, \val, (\offset + 15 * STORSIZE)(\dst), \fixup)
78#endif
79 .endm
80
81 .align 5
82
83 /*
84 * Macro to generate the __bzero{,_user} symbol
85 * Arguments:
86 * mode: LEGACY_MODE or EVA_MODE
87 */
88 .macro __BUILD_BZERO mode
89 /* Initialize __memset if this is the first time we call this macro */
90 .ifnotdef __memset
91 .set __memset, 1
92 .hidden __memset /* Make sure it does not leak */
93 .endif
94
95 sltiu t0, a2, STORSIZE /* very small region? */
96 .set noreorder
97 bnez t0, .Lsmall_memset\@
98 andi t0, a0, STORMASK /* aligned? */
99 .set reorder
100
101#ifdef CONFIG_CPU_MICROMIPS
102 move t8, a1 /* used by 'swp' instruction */
103 move t9, a1
104#endif
105 .set noreorder
106#ifndef CONFIG_CPU_DADDI_WORKAROUNDS
107 beqz t0, 1f
108 PTR_SUBU t0, STORSIZE /* alignment in bytes */
109#else
110 .set noat
111 li AT, STORSIZE
112 beqz t0, 1f
113 PTR_SUBU t0, AT /* alignment in bytes */
114 .set at
115#endif
116 .set reorder
117
118#ifndef CONFIG_CPU_NO_LOAD_STORE_LR
119 R10KCBARRIER(0(ra))
120#ifdef __MIPSEB__
121 EX(LONG_S_L, a1, (a0), .Lfirst_fixup\@) /* make word/dword aligned */
122#else
123 EX(LONG_S_R, a1, (a0), .Lfirst_fixup\@) /* make word/dword aligned */
124#endif
125 PTR_SUBU a0, t0 /* long align ptr */
126 PTR_ADDU a2, t0 /* correct size */
127
128#else /* CONFIG_CPU_NO_LOAD_STORE_LR */
129#define STORE_BYTE(N) \
130 EX(sb, a1, N(a0), .Lbyte_fixup\@); \
131 .set noreorder; \
132 beqz t0, 0f; \
133 PTR_ADDU t0, 1; \
134 .set reorder;
135
136 PTR_ADDU a2, t0 /* correct size */
137 PTR_ADDU t0, 1
138 STORE_BYTE(0)
139 STORE_BYTE(1)
140#if LONGSIZE == 4
141 EX(sb, a1, 2(a0), .Lbyte_fixup\@)
142#else
143 STORE_BYTE(2)
144 STORE_BYTE(3)
145 STORE_BYTE(4)
146 STORE_BYTE(5)
147 EX(sb, a1, 6(a0), .Lbyte_fixup\@)
148#endif
1490:
150 ori a0, STORMASK
151 xori a0, STORMASK
152 PTR_ADDIU a0, STORSIZE
153#endif /* CONFIG_CPU_NO_LOAD_STORE_LR */
1541: ori t1, a2, 0x3f /* # of full blocks */
155 xori t1, 0x3f
156 andi t0, a2, 0x40-STORSIZE
157 beqz t1, .Lmemset_partial\@ /* no block to fill */
158
159 PTR_ADDU t1, a0 /* end address */
1601: PTR_ADDIU a0, 64
161 R10KCBARRIER(0(ra))
162 f_fill64 a0, -64, FILL64RG, .Lfwd_fixup\@, \mode
163 bne t1, a0, 1b
164
165.Lmemset_partial\@:
166 R10KCBARRIER(0(ra))
167 PTR_LA t1, 2f /* where to start */
168#ifdef CONFIG_CPU_MICROMIPS
169 LONG_SRL t7, t0, 1
170#endif
171#if LONGSIZE == 4
172 PTR_SUBU t1, FILLPTRG
173#else
174 .set noat
175 LONG_SRL AT, FILLPTRG, 1
176 PTR_SUBU t1, AT
177 .set at
178#endif
179 PTR_ADDU a0, t0 /* dest ptr */
180 jr t1
181
182 /* ... but first do longs ... */
183 f_fill64 a0, -64, FILL64RG, .Lpartial_fixup\@, \mode
1842: andi a2, STORMASK /* At most one long to go */
185
186 .set noreorder
187 beqz a2, 1f
188#ifndef CONFIG_CPU_NO_LOAD_STORE_LR
189 PTR_ADDU a0, a2 /* What's left */
190 .set reorder
191 R10KCBARRIER(0(ra))
192#ifdef __MIPSEB__
193 EX(LONG_S_R, a1, -1(a0), .Llast_fixup\@)
194#else
195 EX(LONG_S_L, a1, -1(a0), .Llast_fixup\@)
196#endif
197#else /* CONFIG_CPU_NO_LOAD_STORE_LR */
198 PTR_SUBU t0, $0, a2
199 .set reorder
200 move a2, zero /* No remaining longs */
201 PTR_ADDIU t0, 1
202 STORE_BYTE(0)
203 STORE_BYTE(1)
204#if LONGSIZE == 4
205 EX(sb, a1, 2(a0), .Lbyte_fixup\@)
206#else
207 STORE_BYTE(2)
208 STORE_BYTE(3)
209 STORE_BYTE(4)
210 STORE_BYTE(5)
211 EX(sb, a1, 6(a0), .Lbyte_fixup\@)
212#endif
2130:
214#endif /* CONFIG_CPU_NO_LOAD_STORE_LR */
2151: move a2, zero
216 jr ra
217
218.Lsmall_memset\@:
219 PTR_ADDU t1, a0, a2
220 beqz a2, 2f
221
2221: PTR_ADDIU a0, 1 /* fill bytewise */
223 R10KCBARRIER(0(ra))
224 .set noreorder
225 bne t1, a0, 1b
226 EX(sb, a1, -1(a0), .Lsmall_fixup\@)
227 .set reorder
228
2292: move a2, zero
230 jr ra /* done */
231 .if __memset == 1
232 END(memset)
233 .set __memset, 0
234 .hidden __memset
235 .endif
236
237#ifdef CONFIG_CPU_NO_LOAD_STORE_LR
238.Lbyte_fixup\@:
239 /*
240 * unset_bytes = (#bytes - (#unaligned bytes)) - (-#unaligned bytes remaining + 1) + 1
241 * a2 = a2 - t0 + 1
242 */
243 PTR_SUBU a2, t0
244 PTR_ADDIU a2, 1
245 jr ra
246#endif /* CONFIG_CPU_NO_LOAD_STORE_LR */
247
248.Lfirst_fixup\@:
249 /* unset_bytes already in a2 */
250 jr ra
251
252.Lfwd_fixup\@:
253 /*
254 * unset_bytes = partial_start_addr + #bytes - fault_addr
255 * a2 = t1 + (a2 & 3f) - $28->task->BUADDR
256 */
257 PTR_L t0, TI_TASK($28)
258 andi a2, 0x3f
259 LONG_L t0, THREAD_BUADDR(t0)
260 LONG_ADDU a2, t1
261 LONG_SUBU a2, t0
262 jr ra
263
264.Lpartial_fixup\@:
265 /*
266 * unset_bytes = partial_end_addr + #bytes - fault_addr
267 * a2 = a0 + (a2 & STORMASK) - $28->task->BUADDR
268 */
269 PTR_L t0, TI_TASK($28)
270 andi a2, STORMASK
271 LONG_L t0, THREAD_BUADDR(t0)
272 LONG_ADDU a2, a0
273 LONG_SUBU a2, t0
274 jr ra
275
276.Llast_fixup\@:
277 /* unset_bytes already in a2 */
278 jr ra
279
280.Lsmall_fixup\@:
281 /*
282 * unset_bytes = end_addr - current_addr + 1
283 * a2 = t1 - a0 + 1
284 */
285 PTR_SUBU a2, t1, a0
286 PTR_ADDIU a2, 1
287 jr ra
288
289 .endm
290
291/*
292 * memset(void *s, int c, size_t n)
293 *
294 * a0: start of area to clear
295 * a1: char to fill with
296 * a2: size of area to clear
297 */
298
299LEAF(memset)
300EXPORT_SYMBOL(memset)
301 move v0, a0 /* result */
302 beqz a1, 1f
303
304 andi a1, 0xff /* spread fillword */
305 LONG_SLL t1, a1, 8
306 or a1, t1
307 LONG_SLL t1, a1, 16
308#if LONGSIZE == 8
309 or a1, t1
310 LONG_SLL t1, a1, 32
311#endif
312 or a1, t1
3131:
314#ifndef CONFIG_EVA
315FEXPORT(__bzero)
316EXPORT_SYMBOL(__bzero)
317#endif
318 __BUILD_BZERO LEGACY_MODE
319
320#ifdef CONFIG_EVA
321LEAF(__bzero)
322EXPORT_SYMBOL(__bzero)
323 __BUILD_BZERO EVA_MODE
324END(__bzero)
325#endif
1/*
2 * This file is subject to the terms and conditions of the GNU General Public
3 * License. See the file "COPYING" in the main directory of this archive
4 * for more details.
5 *
6 * Copyright (C) 1998, 1999, 2000 by Ralf Baechle
7 * Copyright (C) 1999, 2000 Silicon Graphics, Inc.
8 * Copyright (C) 2007 by Maciej W. Rozycki
9 * Copyright (C) 2011, 2012 MIPS Technologies, Inc.
10 */
11#include <asm/asm.h>
12#include <asm/asm-offsets.h>
13#include <asm/regdef.h>
14
15#if LONGSIZE == 4
16#define LONG_S_L swl
17#define LONG_S_R swr
18#else
19#define LONG_S_L sdl
20#define LONG_S_R sdr
21#endif
22
23#ifdef CONFIG_CPU_MICROMIPS
24#define STORSIZE (LONGSIZE * 2)
25#define STORMASK (STORSIZE - 1)
26#define FILL64RG t8
27#define FILLPTRG t7
28#undef LONG_S
29#define LONG_S LONG_SP
30#else
31#define STORSIZE LONGSIZE
32#define STORMASK LONGMASK
33#define FILL64RG a1
34#define FILLPTRG t0
35#endif
36
37#define LEGACY_MODE 1
38#define EVA_MODE 2
39
40/*
41 * No need to protect it with EVA #ifdefery. The generated block of code
42 * will never be assembled if EVA is not enabled.
43 */
44#define __EVAFY(insn, reg, addr) __BUILD_EVA_INSN(insn##e, reg, addr)
45#define ___BUILD_EVA_INSN(insn, reg, addr) __EVAFY(insn, reg, addr)
46
47#define EX(insn,reg,addr,handler) \
48 .if \mode == LEGACY_MODE; \
499: insn reg, addr; \
50 .else; \
519: ___BUILD_EVA_INSN(insn, reg, addr); \
52 .endif; \
53 .section __ex_table,"a"; \
54 PTR 9b, handler; \
55 .previous
56
57 .macro f_fill64 dst, offset, val, fixup, mode
58 EX(LONG_S, \val, (\offset + 0 * STORSIZE)(\dst), \fixup)
59 EX(LONG_S, \val, (\offset + 1 * STORSIZE)(\dst), \fixup)
60 EX(LONG_S, \val, (\offset + 2 * STORSIZE)(\dst), \fixup)
61 EX(LONG_S, \val, (\offset + 3 * STORSIZE)(\dst), \fixup)
62#if ((defined(CONFIG_CPU_MICROMIPS) && (LONGSIZE == 4)) || !defined(CONFIG_CPU_MICROMIPS))
63 EX(LONG_S, \val, (\offset + 4 * STORSIZE)(\dst), \fixup)
64 EX(LONG_S, \val, (\offset + 5 * STORSIZE)(\dst), \fixup)
65 EX(LONG_S, \val, (\offset + 6 * STORSIZE)(\dst), \fixup)
66 EX(LONG_S, \val, (\offset + 7 * STORSIZE)(\dst), \fixup)
67#endif
68#if (!defined(CONFIG_CPU_MICROMIPS) && (LONGSIZE == 4))
69 EX(LONG_S, \val, (\offset + 8 * STORSIZE)(\dst), \fixup)
70 EX(LONG_S, \val, (\offset + 9 * STORSIZE)(\dst), \fixup)
71 EX(LONG_S, \val, (\offset + 10 * STORSIZE)(\dst), \fixup)
72 EX(LONG_S, \val, (\offset + 11 * STORSIZE)(\dst), \fixup)
73 EX(LONG_S, \val, (\offset + 12 * STORSIZE)(\dst), \fixup)
74 EX(LONG_S, \val, (\offset + 13 * STORSIZE)(\dst), \fixup)
75 EX(LONG_S, \val, (\offset + 14 * STORSIZE)(\dst), \fixup)
76 EX(LONG_S, \val, (\offset + 15 * STORSIZE)(\dst), \fixup)
77#endif
78 .endm
79
80 .set noreorder
81 .align 5
82
83 /*
84 * Macro to generate the __bzero{,_user} symbol
85 * Arguments:
86 * mode: LEGACY_MODE or EVA_MODE
87 */
88 .macro __BUILD_BZERO mode
89 /* Initialize __memset if this is the first time we call this macro */
90 .ifnotdef __memset
91 .set __memset, 1
92 .hidden __memset /* Make sure it does not leak */
93 .endif
94
95 sltiu t0, a2, STORSIZE /* very small region? */
96 bnez t0, .Lsmall_memset\@
97 andi t0, a0, STORMASK /* aligned? */
98
99#ifdef CONFIG_CPU_MICROMIPS
100 move t8, a1 /* used by 'swp' instruction */
101 move t9, a1
102#endif
103#ifndef CONFIG_CPU_DADDI_WORKAROUNDS
104 beqz t0, 1f
105 PTR_SUBU t0, STORSIZE /* alignment in bytes */
106#else
107 .set noat
108 li AT, STORSIZE
109 beqz t0, 1f
110 PTR_SUBU t0, AT /* alignment in bytes */
111 .set at
112#endif
113
114 R10KCBARRIER(0(ra))
115#ifdef __MIPSEB__
116 EX(LONG_S_L, a1, (a0), .Lfirst_fixup\@) /* make word/dword aligned */
117#endif
118#ifdef __MIPSEL__
119 EX(LONG_S_R, a1, (a0), .Lfirst_fixup\@) /* make word/dword aligned */
120#endif
121 PTR_SUBU a0, t0 /* long align ptr */
122 PTR_ADDU a2, t0 /* correct size */
123
1241: ori t1, a2, 0x3f /* # of full blocks */
125 xori t1, 0x3f
126 beqz t1, .Lmemset_partial\@ /* no block to fill */
127 andi t0, a2, 0x40-STORSIZE
128
129 PTR_ADDU t1, a0 /* end address */
130 .set reorder
1311: PTR_ADDIU a0, 64
132 R10KCBARRIER(0(ra))
133 f_fill64 a0, -64, FILL64RG, .Lfwd_fixup\@, \mode
134 bne t1, a0, 1b
135 .set noreorder
136
137.Lmemset_partial\@:
138 R10KCBARRIER(0(ra))
139 PTR_LA t1, 2f /* where to start */
140#ifdef CONFIG_CPU_MICROMIPS
141 LONG_SRL t7, t0, 1
142#endif
143#if LONGSIZE == 4
144 PTR_SUBU t1, FILLPTRG
145#else
146 .set noat
147 LONG_SRL AT, FILLPTRG, 1
148 PTR_SUBU t1, AT
149 .set at
150#endif
151 jr t1
152 PTR_ADDU a0, t0 /* dest ptr */
153
154 .set push
155 .set noreorder
156 .set nomacro
157 /* ... but first do longs ... */
158 f_fill64 a0, -64, FILL64RG, .Lpartial_fixup\@, \mode
1592: .set pop
160 andi a2, STORMASK /* At most one long to go */
161
162 beqz a2, 1f
163 PTR_ADDU a0, a2 /* What's left */
164 R10KCBARRIER(0(ra))
165#ifdef __MIPSEB__
166 EX(LONG_S_R, a1, -1(a0), .Llast_fixup\@)
167#endif
168#ifdef __MIPSEL__
169 EX(LONG_S_L, a1, -1(a0), .Llast_fixup\@)
170#endif
1711: jr ra
172 move a2, zero
173
174.Lsmall_memset\@:
175 beqz a2, 2f
176 PTR_ADDU t1, a0, a2
177
1781: PTR_ADDIU a0, 1 /* fill bytewise */
179 R10KCBARRIER(0(ra))
180 bne t1, a0, 1b
181 sb a1, -1(a0)
182
1832: jr ra /* done */
184 move a2, zero
185 .if __memset == 1
186 END(memset)
187 .set __memset, 0
188 .hidden __memset
189 .endif
190
191.Lfirst_fixup\@:
192 jr ra
193 nop
194
195.Lfwd_fixup\@:
196 PTR_L t0, TI_TASK($28)
197 andi a2, 0x3f
198 LONG_L t0, THREAD_BUADDR(t0)
199 LONG_ADDU a2, t1
200 jr ra
201 LONG_SUBU a2, t0
202
203.Lpartial_fixup\@:
204 PTR_L t0, TI_TASK($28)
205 andi a2, STORMASK
206 LONG_L t0, THREAD_BUADDR(t0)
207 LONG_ADDU a2, t1
208 jr ra
209 LONG_SUBU a2, t0
210
211.Llast_fixup\@:
212 jr ra
213 andi v1, a2, STORMASK
214
215 .endm
216
217/*
218 * memset(void *s, int c, size_t n)
219 *
220 * a0: start of area to clear
221 * a1: char to fill with
222 * a2: size of area to clear
223 */
224
225LEAF(memset)
226 beqz a1, 1f
227 move v0, a0 /* result */
228
229 andi a1, 0xff /* spread fillword */
230 LONG_SLL t1, a1, 8
231 or a1, t1
232 LONG_SLL t1, a1, 16
233#if LONGSIZE == 8
234 or a1, t1
235 LONG_SLL t1, a1, 32
236#endif
237 or a1, t1
2381:
239#ifndef CONFIG_EVA
240FEXPORT(__bzero)
241#endif
242 __BUILD_BZERO LEGACY_MODE
243
244#ifdef CONFIG_EVA
245LEAF(__bzero)
246 __BUILD_BZERO EVA_MODE
247END(__bzero)
248#endif