Loading...
1/* arch/sparc64/kernel/ktlb.S: Kernel mapping TLB miss handling.
2 *
3 * Copyright (C) 1995, 1997, 2005, 2008 David S. Miller <davem@davemloft.net>
4 * Copyright (C) 1996 Eddie C. Dost (ecd@brainaid.de)
5 * Copyright (C) 1996 Miguel de Icaza (miguel@nuclecu.unam.mx)
6 * Copyright (C) 1996,98,99 Jakub Jelinek (jj@sunsite.mff.cuni.cz)
7 */
8
9#include <asm/head.h>
10#include <asm/asi.h>
11#include <asm/page.h>
12#include <asm/pgtable.h>
13#include <asm/tsb.h>
14
15 .text
16 .align 32
17
18kvmap_itlb:
19 /* g6: TAG TARGET */
20 mov TLB_TAG_ACCESS, %g4
21 ldxa [%g4] ASI_IMMU, %g4
22
23 /* The kernel executes in context zero, therefore we do not
24 * need to clear the context ID bits out of %g4 here.
25 */
26
27 /* sun4v_itlb_miss branches here with the missing virtual
28 * address already loaded into %g4
29 */
30kvmap_itlb_4v:
31
32 /* Catch kernel NULL pointer calls. */
33 sethi %hi(PAGE_SIZE), %g5
34 cmp %g4, %g5
35 blu,pn %xcc, kvmap_itlb_longpath
36 nop
37
38 KERN_TSB_LOOKUP_TL1(%g4, %g6, %g5, %g1, %g2, %g3, kvmap_itlb_load)
39
40kvmap_itlb_tsb_miss:
41 sethi %hi(LOW_OBP_ADDRESS), %g5
42 cmp %g4, %g5
43 blu,pn %xcc, kvmap_itlb_vmalloc_addr
44 mov 0x1, %g5
45 sllx %g5, 32, %g5
46 cmp %g4, %g5
47 blu,pn %xcc, kvmap_itlb_obp
48 nop
49
50kvmap_itlb_vmalloc_addr:
51 KERN_PGTABLE_WALK(%g4, %g5, %g2, kvmap_itlb_longpath)
52
53 TSB_LOCK_TAG(%g1, %g2, %g7)
54 TSB_WRITE(%g1, %g5, %g6)
55
56 /* fallthrough to TLB load */
57
58kvmap_itlb_load:
59
60661: stxa %g5, [%g0] ASI_ITLB_DATA_IN
61 retry
62 .section .sun4v_2insn_patch, "ax"
63 .word 661b
64 nop
65 nop
66 .previous
67
68 /* For sun4v the ASI_ITLB_DATA_IN store and the retry
69 * instruction get nop'd out and we get here to branch
70 * to the sun4v tlb load code. The registers are setup
71 * as follows:
72 *
73 * %g4: vaddr
74 * %g5: PTE
75 * %g6: TAG
76 *
77 * The sun4v TLB load wants the PTE in %g3 so we fix that
78 * up here.
79 */
80 ba,pt %xcc, sun4v_itlb_load
81 mov %g5, %g3
82
83kvmap_itlb_longpath:
84
85661: rdpr %pstate, %g5
86 wrpr %g5, PSTATE_AG | PSTATE_MG, %pstate
87 .section .sun4v_2insn_patch, "ax"
88 .word 661b
89 SET_GL(1)
90 nop
91 .previous
92
93 rdpr %tpc, %g5
94 ba,pt %xcc, sparc64_realfault_common
95 mov FAULT_CODE_ITLB, %g4
96
97kvmap_itlb_obp:
98 OBP_TRANS_LOOKUP(%g4, %g5, %g2, %g3, kvmap_itlb_longpath)
99
100 TSB_LOCK_TAG(%g1, %g2, %g7)
101
102 TSB_WRITE(%g1, %g5, %g6)
103
104 ba,pt %xcc, kvmap_itlb_load
105 nop
106
107kvmap_dtlb_obp:
108 OBP_TRANS_LOOKUP(%g4, %g5, %g2, %g3, kvmap_dtlb_longpath)
109
110 TSB_LOCK_TAG(%g1, %g2, %g7)
111
112 TSB_WRITE(%g1, %g5, %g6)
113
114 ba,pt %xcc, kvmap_dtlb_load
115 nop
116
117kvmap_linear_early:
118 sethi %hi(kern_linear_pte_xor), %g7
119 ldx [%g7 + %lo(kern_linear_pte_xor)], %g2
120 ba,pt %xcc, kvmap_dtlb_tsb4m_load
121 xor %g2, %g4, %g5
122
123 .align 32
124kvmap_dtlb_tsb4m_load:
125 TSB_LOCK_TAG(%g1, %g2, %g7)
126 TSB_WRITE(%g1, %g5, %g6)
127 ba,pt %xcc, kvmap_dtlb_load
128 nop
129
130kvmap_dtlb:
131 /* %g6: TAG TARGET */
132 mov TLB_TAG_ACCESS, %g4
133 ldxa [%g4] ASI_DMMU, %g4
134
135 /* The kernel executes in context zero, therefore we do not
136 * need to clear the context ID bits out of %g4 here.
137 */
138
139 /* sun4v_dtlb_miss branches here with the missing virtual
140 * address already loaded into %g4
141 */
142kvmap_dtlb_4v:
143 brgez,pn %g4, kvmap_dtlb_nonlinear
144 nop
145
146#ifdef CONFIG_DEBUG_PAGEALLOC
147 /* Index through the base page size TSB even for linear
148 * mappings when using page allocation debugging.
149 */
150 KERN_TSB_LOOKUP_TL1(%g4, %g6, %g5, %g1, %g2, %g3, kvmap_dtlb_load)
151#else
152 /* Correct TAG_TARGET is already in %g6, check 4mb TSB. */
153 KERN_TSB4M_LOOKUP_TL1(%g6, %g5, %g1, %g2, %g3, kvmap_dtlb_load)
154#endif
155 /* Linear mapping TSB lookup failed. Fallthrough to kernel
156 * page table based lookup.
157 */
158 .globl kvmap_linear_patch
159kvmap_linear_patch:
160 ba,a,pt %xcc, kvmap_linear_early
161
162kvmap_dtlb_vmalloc_addr:
163 KERN_PGTABLE_WALK(%g4, %g5, %g2, kvmap_dtlb_longpath)
164
165 TSB_LOCK_TAG(%g1, %g2, %g7)
166 TSB_WRITE(%g1, %g5, %g6)
167
168 /* fallthrough to TLB load */
169
170kvmap_dtlb_load:
171
172661: stxa %g5, [%g0] ASI_DTLB_DATA_IN ! Reload TLB
173 retry
174 .section .sun4v_2insn_patch, "ax"
175 .word 661b
176 nop
177 nop
178 .previous
179
180 /* For sun4v the ASI_DTLB_DATA_IN store and the retry
181 * instruction get nop'd out and we get here to branch
182 * to the sun4v tlb load code. The registers are setup
183 * as follows:
184 *
185 * %g4: vaddr
186 * %g5: PTE
187 * %g6: TAG
188 *
189 * The sun4v TLB load wants the PTE in %g3 so we fix that
190 * up here.
191 */
192 ba,pt %xcc, sun4v_dtlb_load
193 mov %g5, %g3
194
195#ifdef CONFIG_SPARSEMEM_VMEMMAP
196kvmap_vmemmap:
197 KERN_PGTABLE_WALK(%g4, %g5, %g2, kvmap_dtlb_longpath)
198 ba,a,pt %xcc, kvmap_dtlb_load
199#endif
200
201kvmap_dtlb_nonlinear:
202 /* Catch kernel NULL pointer derefs. */
203 sethi %hi(PAGE_SIZE), %g5
204 cmp %g4, %g5
205 bleu,pn %xcc, kvmap_dtlb_longpath
206 nop
207
208#ifdef CONFIG_SPARSEMEM_VMEMMAP
209 /* Do not use the TSB for vmemmap. */
210 sethi %hi(VMEMMAP_BASE), %g5
211 ldx [%g5 + %lo(VMEMMAP_BASE)], %g5
212 cmp %g4,%g5
213 bgeu,pn %xcc, kvmap_vmemmap
214 nop
215#endif
216
217 KERN_TSB_LOOKUP_TL1(%g4, %g6, %g5, %g1, %g2, %g3, kvmap_dtlb_load)
218
219kvmap_dtlb_tsbmiss:
220 sethi %hi(MODULES_VADDR), %g5
221 cmp %g4, %g5
222 blu,pn %xcc, kvmap_dtlb_longpath
223 sethi %hi(VMALLOC_END), %g5
224 ldx [%g5 + %lo(VMALLOC_END)], %g5
225 cmp %g4, %g5
226 bgeu,pn %xcc, kvmap_dtlb_longpath
227 nop
228
229kvmap_check_obp:
230 sethi %hi(LOW_OBP_ADDRESS), %g5
231 cmp %g4, %g5
232 blu,pn %xcc, kvmap_dtlb_vmalloc_addr
233 mov 0x1, %g5
234 sllx %g5, 32, %g5
235 cmp %g4, %g5
236 blu,pn %xcc, kvmap_dtlb_obp
237 nop
238 ba,pt %xcc, kvmap_dtlb_vmalloc_addr
239 nop
240
241kvmap_dtlb_longpath:
242
243661: rdpr %pstate, %g5
244 wrpr %g5, PSTATE_AG | PSTATE_MG, %pstate
245 .section .sun4v_2insn_patch, "ax"
246 .word 661b
247 SET_GL(1)
248 ldxa [%g0] ASI_SCRATCHPAD, %g5
249 .previous
250
251 rdpr %tl, %g3
252 cmp %g3, 1
253
254661: mov TLB_TAG_ACCESS, %g4
255 ldxa [%g4] ASI_DMMU, %g5
256 .section .sun4v_2insn_patch, "ax"
257 .word 661b
258 ldx [%g5 + HV_FAULT_D_ADDR_OFFSET], %g5
259 nop
260 .previous
261
262 /* The kernel executes in context zero, therefore we do not
263 * need to clear the context ID bits out of %g5 here.
264 */
265
266 be,pt %xcc, sparc64_realfault_common
267 mov FAULT_CODE_DTLB, %g4
268 ba,pt %xcc, winfix_trampoline
269 nop
1/* arch/sparc64/kernel/ktlb.S: Kernel mapping TLB miss handling.
2 *
3 * Copyright (C) 1995, 1997, 2005, 2008 David S. Miller <davem@davemloft.net>
4 * Copyright (C) 1996 Eddie C. Dost (ecd@brainaid.de)
5 * Copyright (C) 1996 Miguel de Icaza (miguel@nuclecu.unam.mx)
6 * Copyright (C) 1996,98,99 Jakub Jelinek (jj@sunsite.mff.cuni.cz)
7 */
8
9#include <asm/head.h>
10#include <asm/asi.h>
11#include <asm/page.h>
12#include <asm/pgtable.h>
13#include <asm/tsb.h>
14
15 .text
16 .align 32
17
18kvmap_itlb:
19 /* g6: TAG TARGET */
20 mov TLB_TAG_ACCESS, %g4
21 ldxa [%g4] ASI_IMMU, %g4
22
23 /* sun4v_itlb_miss branches here with the missing virtual
24 * address already loaded into %g4
25 */
26kvmap_itlb_4v:
27
28 /* Catch kernel NULL pointer calls. */
29 sethi %hi(PAGE_SIZE), %g5
30 cmp %g4, %g5
31 blu,pn %xcc, kvmap_itlb_longpath
32 nop
33
34 KERN_TSB_LOOKUP_TL1(%g4, %g6, %g5, %g1, %g2, %g3, kvmap_itlb_load)
35
36kvmap_itlb_tsb_miss:
37 sethi %hi(LOW_OBP_ADDRESS), %g5
38 cmp %g4, %g5
39 blu,pn %xcc, kvmap_itlb_vmalloc_addr
40 mov 0x1, %g5
41 sllx %g5, 32, %g5
42 cmp %g4, %g5
43 blu,pn %xcc, kvmap_itlb_obp
44 nop
45
46kvmap_itlb_vmalloc_addr:
47 KERN_PGTABLE_WALK(%g4, %g5, %g2, kvmap_itlb_longpath)
48
49 TSB_LOCK_TAG(%g1, %g2, %g7)
50 TSB_WRITE(%g1, %g5, %g6)
51
52 /* fallthrough to TLB load */
53
54kvmap_itlb_load:
55
56661: stxa %g5, [%g0] ASI_ITLB_DATA_IN
57 retry
58 .section .sun4v_2insn_patch, "ax"
59 .word 661b
60 nop
61 nop
62 .previous
63
64 /* For sun4v the ASI_ITLB_DATA_IN store and the retry
65 * instruction get nop'd out and we get here to branch
66 * to the sun4v tlb load code. The registers are setup
67 * as follows:
68 *
69 * %g4: vaddr
70 * %g5: PTE
71 * %g6: TAG
72 *
73 * The sun4v TLB load wants the PTE in %g3 so we fix that
74 * up here.
75 */
76 ba,pt %xcc, sun4v_itlb_load
77 mov %g5, %g3
78
79kvmap_itlb_longpath:
80
81661: rdpr %pstate, %g5
82 wrpr %g5, PSTATE_AG | PSTATE_MG, %pstate
83 .section .sun4v_2insn_patch, "ax"
84 .word 661b
85 SET_GL(1)
86 nop
87 .previous
88
89 rdpr %tpc, %g5
90 ba,pt %xcc, sparc64_realfault_common
91 mov FAULT_CODE_ITLB, %g4
92
93kvmap_itlb_obp:
94 OBP_TRANS_LOOKUP(%g4, %g5, %g2, %g3, kvmap_itlb_longpath)
95
96 TSB_LOCK_TAG(%g1, %g2, %g7)
97
98 TSB_WRITE(%g1, %g5, %g6)
99
100 ba,pt %xcc, kvmap_itlb_load
101 nop
102
103kvmap_dtlb_obp:
104 OBP_TRANS_LOOKUP(%g4, %g5, %g2, %g3, kvmap_dtlb_longpath)
105
106 TSB_LOCK_TAG(%g1, %g2, %g7)
107
108 TSB_WRITE(%g1, %g5, %g6)
109
110 ba,pt %xcc, kvmap_dtlb_load
111 nop
112
113kvmap_linear_early:
114 sethi %hi(kern_linear_pte_xor), %g7
115 ldx [%g7 + %lo(kern_linear_pte_xor)], %g2
116 ba,pt %xcc, kvmap_dtlb_tsb4m_load
117 xor %g2, %g4, %g5
118
119 .align 32
120kvmap_dtlb_tsb4m_load:
121 TSB_LOCK_TAG(%g1, %g2, %g7)
122 TSB_WRITE(%g1, %g5, %g6)
123 ba,pt %xcc, kvmap_dtlb_load
124 nop
125
126kvmap_dtlb:
127 /* %g6: TAG TARGET */
128 mov TLB_TAG_ACCESS, %g4
129 ldxa [%g4] ASI_DMMU, %g4
130
131 /* sun4v_dtlb_miss branches here with the missing virtual
132 * address already loaded into %g4
133 */
134kvmap_dtlb_4v:
135 brgez,pn %g4, kvmap_dtlb_nonlinear
136 nop
137
138#ifdef CONFIG_DEBUG_PAGEALLOC
139 /* Index through the base page size TSB even for linear
140 * mappings when using page allocation debugging.
141 */
142 KERN_TSB_LOOKUP_TL1(%g4, %g6, %g5, %g1, %g2, %g3, kvmap_dtlb_load)
143#else
144 /* Correct TAG_TARGET is already in %g6, check 4mb TSB. */
145 KERN_TSB4M_LOOKUP_TL1(%g6, %g5, %g1, %g2, %g3, kvmap_dtlb_load)
146#endif
147 /* Linear mapping TSB lookup failed. Fallthrough to kernel
148 * page table based lookup.
149 */
150 .globl kvmap_linear_patch
151kvmap_linear_patch:
152 ba,a,pt %xcc, kvmap_linear_early
153
154kvmap_dtlb_vmalloc_addr:
155 KERN_PGTABLE_WALK(%g4, %g5, %g2, kvmap_dtlb_longpath)
156
157 TSB_LOCK_TAG(%g1, %g2, %g7)
158 TSB_WRITE(%g1, %g5, %g6)
159
160 /* fallthrough to TLB load */
161
162kvmap_dtlb_load:
163
164661: stxa %g5, [%g0] ASI_DTLB_DATA_IN ! Reload TLB
165 retry
166 .section .sun4v_2insn_patch, "ax"
167 .word 661b
168 nop
169 nop
170 .previous
171
172 /* For sun4v the ASI_DTLB_DATA_IN store and the retry
173 * instruction get nop'd out and we get here to branch
174 * to the sun4v tlb load code. The registers are setup
175 * as follows:
176 *
177 * %g4: vaddr
178 * %g5: PTE
179 * %g6: TAG
180 *
181 * The sun4v TLB load wants the PTE in %g3 so we fix that
182 * up here.
183 */
184 ba,pt %xcc, sun4v_dtlb_load
185 mov %g5, %g3
186
187#ifdef CONFIG_SPARSEMEM_VMEMMAP
188kvmap_vmemmap:
189 KERN_PGTABLE_WALK(%g4, %g5, %g2, kvmap_dtlb_longpath)
190 ba,a,pt %xcc, kvmap_dtlb_load
191#endif
192
193kvmap_dtlb_nonlinear:
194 /* Catch kernel NULL pointer derefs. */
195 sethi %hi(PAGE_SIZE), %g5
196 cmp %g4, %g5
197 bleu,pn %xcc, kvmap_dtlb_longpath
198 nop
199
200#ifdef CONFIG_SPARSEMEM_VMEMMAP
201 /* Do not use the TSB for vmemmap. */
202 sethi %hi(VMEMMAP_BASE), %g5
203 ldx [%g5 + %lo(VMEMMAP_BASE)], %g5
204 cmp %g4,%g5
205 bgeu,pn %xcc, kvmap_vmemmap
206 nop
207#endif
208
209 KERN_TSB_LOOKUP_TL1(%g4, %g6, %g5, %g1, %g2, %g3, kvmap_dtlb_load)
210
211kvmap_dtlb_tsbmiss:
212 sethi %hi(MODULES_VADDR), %g5
213 cmp %g4, %g5
214 blu,pn %xcc, kvmap_dtlb_longpath
215 sethi %hi(VMALLOC_END), %g5
216 ldx [%g5 + %lo(VMALLOC_END)], %g5
217 cmp %g4, %g5
218 bgeu,pn %xcc, kvmap_dtlb_longpath
219 nop
220
221kvmap_check_obp:
222 sethi %hi(LOW_OBP_ADDRESS), %g5
223 cmp %g4, %g5
224 blu,pn %xcc, kvmap_dtlb_vmalloc_addr
225 mov 0x1, %g5
226 sllx %g5, 32, %g5
227 cmp %g4, %g5
228 blu,pn %xcc, kvmap_dtlb_obp
229 nop
230 ba,pt %xcc, kvmap_dtlb_vmalloc_addr
231 nop
232
233kvmap_dtlb_longpath:
234
235661: rdpr %pstate, %g5
236 wrpr %g5, PSTATE_AG | PSTATE_MG, %pstate
237 .section .sun4v_2insn_patch, "ax"
238 .word 661b
239 SET_GL(1)
240 ldxa [%g0] ASI_SCRATCHPAD, %g5
241 .previous
242
243 rdpr %tl, %g3
244 cmp %g3, 1
245
246661: mov TLB_TAG_ACCESS, %g4
247 ldxa [%g4] ASI_DMMU, %g5
248 .section .sun4v_2insn_patch, "ax"
249 .word 661b
250 ldx [%g5 + HV_FAULT_D_ADDR_OFFSET], %g5
251 nop
252 .previous
253
254 be,pt %xcc, sparc64_realfault_common
255 mov FAULT_CODE_DTLB, %g4
256 ba,pt %xcc, winfix_trampoline
257 nop