Loading...
1/* SPDX-License-Identifier: GPL-2.0 */
2/* clear_page.S: UltraSparc optimized copy page.
3 *
4 * Copyright (C) 1996, 1998, 1999, 2000, 2004 David S. Miller (davem@redhat.com)
5 * Copyright (C) 1997 Jakub Jelinek (jakub@redhat.com)
6 */
7
8#include <asm/visasm.h>
9#include <asm/thread_info.h>
10#include <asm/page.h>
11#include <linux/pgtable.h>
12#include <asm/spitfire.h>
13#include <asm/head.h>
14#include <asm/export.h>
15
16 /* What we used to do was lock a TLB entry into a specific
17 * TLB slot, clear the page with interrupts disabled, then
18 * restore the original TLB entry. This was great for
19 * disturbing the TLB as little as possible, but it meant
20 * we had to keep interrupts disabled for a long time.
21 *
22 * Now, we simply use the normal TLB loading mechanism,
23 * and this makes the cpu choose a slot all by itself.
24 * Then we do a normal TLB flush on exit. We need only
25 * disable preemption during the clear.
26 */
27
28#define DCACHE_SIZE (PAGE_SIZE * 2)
29
30#if (PAGE_SHIFT == 13)
31#define PAGE_SIZE_REM 0x80
32#elif (PAGE_SHIFT == 16)
33#define PAGE_SIZE_REM 0x100
34#else
35#error Wrong PAGE_SHIFT specified
36#endif
37
38#define TOUCH(reg0, reg1, reg2, reg3, reg4, reg5, reg6, reg7) \
39 fsrc2 %reg0, %f48; fsrc2 %reg1, %f50; \
40 fsrc2 %reg2, %f52; fsrc2 %reg3, %f54; \
41 fsrc2 %reg4, %f56; fsrc2 %reg5, %f58; \
42 fsrc2 %reg6, %f60; fsrc2 %reg7, %f62;
43
44 .text
45
46 .align 32
47 .globl copy_user_page
48 .type copy_user_page,#function
49 EXPORT_SYMBOL(copy_user_page)
50copy_user_page: /* %o0=dest, %o1=src, %o2=vaddr */
51 lduw [%g6 + TI_PRE_COUNT], %o4
52 sethi %hi(PAGE_OFFSET), %g2
53 sethi %hi(PAGE_SIZE), %o3
54
55 ldx [%g2 + %lo(PAGE_OFFSET)], %g2
56 sethi %hi(PAGE_KERNEL_LOCKED), %g3
57
58 ldx [%g3 + %lo(PAGE_KERNEL_LOCKED)], %g3
59 sub %o0, %g2, %g1 ! dest paddr
60
61 sub %o1, %g2, %g2 ! src paddr
62
63 and %o2, %o3, %o0 ! vaddr D-cache alias bit
64 or %g1, %g3, %g1 ! dest TTE data
65
66 or %g2, %g3, %g2 ! src TTE data
67 sethi %hi(TLBTEMP_BASE), %o3
68
69 sethi %hi(DCACHE_SIZE), %o1
70 add %o0, %o3, %o0 ! dest TTE vaddr
71
72 add %o4, 1, %o2
73 add %o0, %o1, %o1 ! src TTE vaddr
74
75 /* Disable preemption. */
76 mov TLB_TAG_ACCESS, %g3
77 stw %o2, [%g6 + TI_PRE_COUNT]
78
79 /* Load TLB entries. */
80 rdpr %pstate, %o2
81 wrpr %o2, PSTATE_IE, %pstate
82 stxa %o0, [%g3] ASI_DMMU
83 stxa %g1, [%g0] ASI_DTLB_DATA_IN
84 membar #Sync
85 stxa %o1, [%g3] ASI_DMMU
86 stxa %g2, [%g0] ASI_DTLB_DATA_IN
87 membar #Sync
88 wrpr %o2, 0x0, %pstate
89
90cheetah_copy_page_insn:
91 ba,pt %xcc, 9f
92 nop
93
941:
95 VISEntryHalf
96 membar #StoreLoad | #StoreStore | #LoadStore
97 sethi %hi((PAGE_SIZE/64)-2), %o2
98 mov %o0, %g1
99 prefetch [%o1 + 0x000], #one_read
100 or %o2, %lo((PAGE_SIZE/64)-2), %o2
101 prefetch [%o1 + 0x040], #one_read
102 prefetch [%o1 + 0x080], #one_read
103 prefetch [%o1 + 0x0c0], #one_read
104 ldd [%o1 + 0x000], %f0
105 prefetch [%o1 + 0x100], #one_read
106 ldd [%o1 + 0x008], %f2
107 prefetch [%o1 + 0x140], #one_read
108 ldd [%o1 + 0x010], %f4
109 prefetch [%o1 + 0x180], #one_read
110 fsrc2 %f0, %f16
111 ldd [%o1 + 0x018], %f6
112 fsrc2 %f2, %f18
113 ldd [%o1 + 0x020], %f8
114 fsrc2 %f4, %f20
115 ldd [%o1 + 0x028], %f10
116 fsrc2 %f6, %f22
117 ldd [%o1 + 0x030], %f12
118 fsrc2 %f8, %f24
119 ldd [%o1 + 0x038], %f14
120 fsrc2 %f10, %f26
121 ldd [%o1 + 0x040], %f0
1221: ldd [%o1 + 0x048], %f2
123 fsrc2 %f12, %f28
124 ldd [%o1 + 0x050], %f4
125 fsrc2 %f14, %f30
126 stda %f16, [%o0] ASI_BLK_P
127 ldd [%o1 + 0x058], %f6
128 fsrc2 %f0, %f16
129 ldd [%o1 + 0x060], %f8
130 fsrc2 %f2, %f18
131 ldd [%o1 + 0x068], %f10
132 fsrc2 %f4, %f20
133 ldd [%o1 + 0x070], %f12
134 fsrc2 %f6, %f22
135 ldd [%o1 + 0x078], %f14
136 fsrc2 %f8, %f24
137 ldd [%o1 + 0x080], %f0
138 prefetch [%o1 + 0x180], #one_read
139 fsrc2 %f10, %f26
140 subcc %o2, 1, %o2
141 add %o0, 0x40, %o0
142 bne,pt %xcc, 1b
143 add %o1, 0x40, %o1
144
145 ldd [%o1 + 0x048], %f2
146 fsrc2 %f12, %f28
147 ldd [%o1 + 0x050], %f4
148 fsrc2 %f14, %f30
149 stda %f16, [%o0] ASI_BLK_P
150 ldd [%o1 + 0x058], %f6
151 fsrc2 %f0, %f16
152 ldd [%o1 + 0x060], %f8
153 fsrc2 %f2, %f18
154 ldd [%o1 + 0x068], %f10
155 fsrc2 %f4, %f20
156 ldd [%o1 + 0x070], %f12
157 fsrc2 %f6, %f22
158 add %o0, 0x40, %o0
159 ldd [%o1 + 0x078], %f14
160 fsrc2 %f8, %f24
161 fsrc2 %f10, %f26
162 fsrc2 %f12, %f28
163 fsrc2 %f14, %f30
164 stda %f16, [%o0] ASI_BLK_P
165 membar #Sync
166 VISExitHalf
167 ba,pt %xcc, 5f
168 nop
169
1709:
171 VISEntry
172 ldub [%g6 + TI_FAULT_CODE], %g3
173 mov %o0, %g1
174 cmp %g3, 0
175 rd %asi, %g3
176 be,a,pt %icc, 1f
177 wr %g0, ASI_BLK_P, %asi
178 wr %g0, ASI_BLK_COMMIT_P, %asi
1791: ldda [%o1] ASI_BLK_P, %f0
180 add %o1, 0x40, %o1
181 ldda [%o1] ASI_BLK_P, %f16
182 add %o1, 0x40, %o1
183 sethi %hi(PAGE_SIZE), %o2
1841: TOUCH(f0, f2, f4, f6, f8, f10, f12, f14)
185 ldda [%o1] ASI_BLK_P, %f32
186 stda %f48, [%o0] %asi
187 add %o1, 0x40, %o1
188 sub %o2, 0x40, %o2
189 add %o0, 0x40, %o0
190 TOUCH(f16, f18, f20, f22, f24, f26, f28, f30)
191 ldda [%o1] ASI_BLK_P, %f0
192 stda %f48, [%o0] %asi
193 add %o1, 0x40, %o1
194 sub %o2, 0x40, %o2
195 add %o0, 0x40, %o0
196 TOUCH(f32, f34, f36, f38, f40, f42, f44, f46)
197 ldda [%o1] ASI_BLK_P, %f16
198 stda %f48, [%o0] %asi
199 sub %o2, 0x40, %o2
200 add %o1, 0x40, %o1
201 cmp %o2, PAGE_SIZE_REM
202 bne,pt %xcc, 1b
203 add %o0, 0x40, %o0
204#if (PAGE_SHIFT == 16)
205 TOUCH(f0, f2, f4, f6, f8, f10, f12, f14)
206 ldda [%o1] ASI_BLK_P, %f32
207 stda %f48, [%o0] %asi
208 add %o1, 0x40, %o1
209 sub %o2, 0x40, %o2
210 add %o0, 0x40, %o0
211 TOUCH(f16, f18, f20, f22, f24, f26, f28, f30)
212 ldda [%o1] ASI_BLK_P, %f0
213 stda %f48, [%o0] %asi
214 add %o1, 0x40, %o1
215 sub %o2, 0x40, %o2
216 add %o0, 0x40, %o0
217 membar #Sync
218 stda %f32, [%o0] %asi
219 add %o0, 0x40, %o0
220 stda %f0, [%o0] %asi
221#else
222 membar #Sync
223 stda %f0, [%o0] %asi
224 add %o0, 0x40, %o0
225 stda %f16, [%o0] %asi
226#endif
227 membar #Sync
228 wr %g3, 0x0, %asi
229 VISExit
230
2315:
232 stxa %g0, [%g1] ASI_DMMU_DEMAP
233 membar #Sync
234
235 sethi %hi(DCACHE_SIZE), %g2
236 stxa %g0, [%g1 + %g2] ASI_DMMU_DEMAP
237 membar #Sync
238
239 retl
240 stw %o4, [%g6 + TI_PRE_COUNT]
241
242 .size copy_user_page, .-copy_user_page
243
244 .globl cheetah_patch_copy_page
245cheetah_patch_copy_page:
246 sethi %hi(0x01000000), %o1 ! NOP
247 sethi %hi(cheetah_copy_page_insn), %o0
248 or %o0, %lo(cheetah_copy_page_insn), %o0
249 stw %o1, [%o0]
250 membar #StoreStore
251 flush %o0
252 retl
253 nop
1/* clear_page.S: UltraSparc optimized copy page.
2 *
3 * Copyright (C) 1996, 1998, 1999, 2000, 2004 David S. Miller (davem@redhat.com)
4 * Copyright (C) 1997 Jakub Jelinek (jakub@redhat.com)
5 */
6
7#include <asm/visasm.h>
8#include <asm/thread_info.h>
9#include <asm/page.h>
10#include <asm/pgtable.h>
11#include <asm/spitfire.h>
12#include <asm/head.h>
13
14 /* What we used to do was lock a TLB entry into a specific
15 * TLB slot, clear the page with interrupts disabled, then
16 * restore the original TLB entry. This was great for
17 * disturbing the TLB as little as possible, but it meant
18 * we had to keep interrupts disabled for a long time.
19 *
20 * Now, we simply use the normal TLB loading mechanism,
21 * and this makes the cpu choose a slot all by itself.
22 * Then we do a normal TLB flush on exit. We need only
23 * disable preemption during the clear.
24 */
25
26#define DCACHE_SIZE (PAGE_SIZE * 2)
27
28#if (PAGE_SHIFT == 13)
29#define PAGE_SIZE_REM 0x80
30#elif (PAGE_SHIFT == 16)
31#define PAGE_SIZE_REM 0x100
32#else
33#error Wrong PAGE_SHIFT specified
34#endif
35
36#define TOUCH(reg0, reg1, reg2, reg3, reg4, reg5, reg6, reg7) \
37 fmovd %reg0, %f48; fmovd %reg1, %f50; \
38 fmovd %reg2, %f52; fmovd %reg3, %f54; \
39 fmovd %reg4, %f56; fmovd %reg5, %f58; \
40 fmovd %reg6, %f60; fmovd %reg7, %f62;
41
42 .text
43
44 .align 32
45 .globl copy_user_page
46 .type copy_user_page,#function
47copy_user_page: /* %o0=dest, %o1=src, %o2=vaddr */
48 lduw [%g6 + TI_PRE_COUNT], %o4
49 sethi %uhi(PAGE_OFFSET), %g2
50 sethi %hi(PAGE_SIZE), %o3
51
52 sllx %g2, 32, %g2
53 sethi %hi(PAGE_KERNEL_LOCKED), %g3
54
55 ldx [%g3 + %lo(PAGE_KERNEL_LOCKED)], %g3
56 sub %o0, %g2, %g1 ! dest paddr
57
58 sub %o1, %g2, %g2 ! src paddr
59
60 and %o2, %o3, %o0 ! vaddr D-cache alias bit
61 or %g1, %g3, %g1 ! dest TTE data
62
63 or %g2, %g3, %g2 ! src TTE data
64 sethi %hi(TLBTEMP_BASE), %o3
65
66 sethi %hi(DCACHE_SIZE), %o1
67 add %o0, %o3, %o0 ! dest TTE vaddr
68
69 add %o4, 1, %o2
70 add %o0, %o1, %o1 ! src TTE vaddr
71
72 /* Disable preemption. */
73 mov TLB_TAG_ACCESS, %g3
74 stw %o2, [%g6 + TI_PRE_COUNT]
75
76 /* Load TLB entries. */
77 rdpr %pstate, %o2
78 wrpr %o2, PSTATE_IE, %pstate
79 stxa %o0, [%g3] ASI_DMMU
80 stxa %g1, [%g0] ASI_DTLB_DATA_IN
81 membar #Sync
82 stxa %o1, [%g3] ASI_DMMU
83 stxa %g2, [%g0] ASI_DTLB_DATA_IN
84 membar #Sync
85 wrpr %o2, 0x0, %pstate
86
87cheetah_copy_page_insn:
88 ba,pt %xcc, 9f
89 nop
90
911:
92 VISEntryHalf
93 membar #StoreLoad | #StoreStore | #LoadStore
94 sethi %hi((PAGE_SIZE/64)-2), %o2
95 mov %o0, %g1
96 prefetch [%o1 + 0x000], #one_read
97 or %o2, %lo((PAGE_SIZE/64)-2), %o2
98 prefetch [%o1 + 0x040], #one_read
99 prefetch [%o1 + 0x080], #one_read
100 prefetch [%o1 + 0x0c0], #one_read
101 ldd [%o1 + 0x000], %f0
102 prefetch [%o1 + 0x100], #one_read
103 ldd [%o1 + 0x008], %f2
104 prefetch [%o1 + 0x140], #one_read
105 ldd [%o1 + 0x010], %f4
106 prefetch [%o1 + 0x180], #one_read
107 fmovd %f0, %f16
108 ldd [%o1 + 0x018], %f6
109 fmovd %f2, %f18
110 ldd [%o1 + 0x020], %f8
111 fmovd %f4, %f20
112 ldd [%o1 + 0x028], %f10
113 fmovd %f6, %f22
114 ldd [%o1 + 0x030], %f12
115 fmovd %f8, %f24
116 ldd [%o1 + 0x038], %f14
117 fmovd %f10, %f26
118 ldd [%o1 + 0x040], %f0
1191: ldd [%o1 + 0x048], %f2
120 fmovd %f12, %f28
121 ldd [%o1 + 0x050], %f4
122 fmovd %f14, %f30
123 stda %f16, [%o0] ASI_BLK_P
124 ldd [%o1 + 0x058], %f6
125 fmovd %f0, %f16
126 ldd [%o1 + 0x060], %f8
127 fmovd %f2, %f18
128 ldd [%o1 + 0x068], %f10
129 fmovd %f4, %f20
130 ldd [%o1 + 0x070], %f12
131 fmovd %f6, %f22
132 ldd [%o1 + 0x078], %f14
133 fmovd %f8, %f24
134 ldd [%o1 + 0x080], %f0
135 prefetch [%o1 + 0x180], #one_read
136 fmovd %f10, %f26
137 subcc %o2, 1, %o2
138 add %o0, 0x40, %o0
139 bne,pt %xcc, 1b
140 add %o1, 0x40, %o1
141
142 ldd [%o1 + 0x048], %f2
143 fmovd %f12, %f28
144 ldd [%o1 + 0x050], %f4
145 fmovd %f14, %f30
146 stda %f16, [%o0] ASI_BLK_P
147 ldd [%o1 + 0x058], %f6
148 fmovd %f0, %f16
149 ldd [%o1 + 0x060], %f8
150 fmovd %f2, %f18
151 ldd [%o1 + 0x068], %f10
152 fmovd %f4, %f20
153 ldd [%o1 + 0x070], %f12
154 fmovd %f6, %f22
155 add %o0, 0x40, %o0
156 ldd [%o1 + 0x078], %f14
157 fmovd %f8, %f24
158 fmovd %f10, %f26
159 fmovd %f12, %f28
160 fmovd %f14, %f30
161 stda %f16, [%o0] ASI_BLK_P
162 membar #Sync
163 VISExitHalf
164 ba,pt %xcc, 5f
165 nop
166
1679:
168 VISEntry
169 ldub [%g6 + TI_FAULT_CODE], %g3
170 mov %o0, %g1
171 cmp %g3, 0
172 rd %asi, %g3
173 be,a,pt %icc, 1f
174 wr %g0, ASI_BLK_P, %asi
175 wr %g0, ASI_BLK_COMMIT_P, %asi
1761: ldda [%o1] ASI_BLK_P, %f0
177 add %o1, 0x40, %o1
178 ldda [%o1] ASI_BLK_P, %f16
179 add %o1, 0x40, %o1
180 sethi %hi(PAGE_SIZE), %o2
1811: TOUCH(f0, f2, f4, f6, f8, f10, f12, f14)
182 ldda [%o1] ASI_BLK_P, %f32
183 stda %f48, [%o0] %asi
184 add %o1, 0x40, %o1
185 sub %o2, 0x40, %o2
186 add %o0, 0x40, %o0
187 TOUCH(f16, f18, f20, f22, f24, f26, f28, f30)
188 ldda [%o1] ASI_BLK_P, %f0
189 stda %f48, [%o0] %asi
190 add %o1, 0x40, %o1
191 sub %o2, 0x40, %o2
192 add %o0, 0x40, %o0
193 TOUCH(f32, f34, f36, f38, f40, f42, f44, f46)
194 ldda [%o1] ASI_BLK_P, %f16
195 stda %f48, [%o0] %asi
196 sub %o2, 0x40, %o2
197 add %o1, 0x40, %o1
198 cmp %o2, PAGE_SIZE_REM
199 bne,pt %xcc, 1b
200 add %o0, 0x40, %o0
201#if (PAGE_SHIFT == 16)
202 TOUCH(f0, f2, f4, f6, f8, f10, f12, f14)
203 ldda [%o1] ASI_BLK_P, %f32
204 stda %f48, [%o0] %asi
205 add %o1, 0x40, %o1
206 sub %o2, 0x40, %o2
207 add %o0, 0x40, %o0
208 TOUCH(f16, f18, f20, f22, f24, f26, f28, f30)
209 ldda [%o1] ASI_BLK_P, %f0
210 stda %f48, [%o0] %asi
211 add %o1, 0x40, %o1
212 sub %o2, 0x40, %o2
213 add %o0, 0x40, %o0
214 membar #Sync
215 stda %f32, [%o0] %asi
216 add %o0, 0x40, %o0
217 stda %f0, [%o0] %asi
218#else
219 membar #Sync
220 stda %f0, [%o0] %asi
221 add %o0, 0x40, %o0
222 stda %f16, [%o0] %asi
223#endif
224 membar #Sync
225 wr %g3, 0x0, %asi
226 VISExit
227
2285:
229 stxa %g0, [%g1] ASI_DMMU_DEMAP
230 membar #Sync
231
232 sethi %hi(DCACHE_SIZE), %g2
233 stxa %g0, [%g1 + %g2] ASI_DMMU_DEMAP
234 membar #Sync
235
236 retl
237 stw %o4, [%g6 + TI_PRE_COUNT]
238
239 .size copy_user_page, .-copy_user_page
240
241 .globl cheetah_patch_copy_page
242cheetah_patch_copy_page:
243 sethi %hi(0x01000000), %o1 ! NOP
244 sethi %hi(cheetah_copy_page_insn), %o0
245 or %o0, %lo(cheetah_copy_page_insn), %o0
246 stw %o1, [%o0]
247 membar #StoreStore
248 flush %o0
249 retl
250 nop