aboutsummaryrefslogtreecommitdiffstats
path: root/arch/sparc/kernel/ktlb.S
diff options
context:
space:
mode:
authorSam Ravnborg <sam@ravnborg.org>2008-12-03 06:11:52 -0500
committerDavid S. Miller <davem@davemloft.net>2008-12-04 12:17:21 -0500
commita88b5ba8bd8ac18aad65ee6c6a254e2e74876db3 (patch)
treeeb3d0ffaf53c3f7ec6083752c2097cecd1cb892a /arch/sparc/kernel/ktlb.S
parentd670bd4f803c8b646acd20f3ba21e65458293faf (diff)
sparc,sparc64: unify kernel/
o Move all files from sparc64/kernel/ to sparc/kernel - rename as appropriate o Update sparc/Makefile to the changes o Update sparc/kernel/Makefile to include the sparc64 files NOTE: This commit changes link order on sparc64! Link order had to change for either of sparc32 and sparc64. And assuming sparc64 see more testing than sparc32 change link order on sparc64 where issues will be caught faster. Signed-off-by: Sam Ravnborg <sam@ravnborg.org> Signed-off-by: David S. Miller <davem@davemloft.net>
Diffstat (limited to 'arch/sparc/kernel/ktlb.S')
-rw-r--r--arch/sparc/kernel/ktlb.S304
1 files changed, 304 insertions, 0 deletions
diff --git a/arch/sparc/kernel/ktlb.S b/arch/sparc/kernel/ktlb.S
new file mode 100644
index 000000000000..cef8defcd7a9
--- /dev/null
+++ b/arch/sparc/kernel/ktlb.S
@@ -0,0 +1,304 @@
1/* arch/sparc64/kernel/ktlb.S: Kernel mapping TLB miss handling.
2 *
3 * Copyright (C) 1995, 1997, 2005, 2008 David S. Miller <davem@davemloft.net>
4 * Copyright (C) 1996 Eddie C. Dost (ecd@brainaid.de)
5 * Copyright (C) 1996 Miguel de Icaza (miguel@nuclecu.unam.mx)
6 * Copyright (C) 1996,98,99 Jakub Jelinek (jj@sunsite.mff.cuni.cz)
7 */
8
9#include <asm/head.h>
10#include <asm/asi.h>
11#include <asm/page.h>
12#include <asm/pgtable.h>
13#include <asm/tsb.h>
14
15 .text
16 .align 32
17
18kvmap_itlb:
19 /* g6: TAG TARGET */
20 mov TLB_TAG_ACCESS, %g4
21 ldxa [%g4] ASI_IMMU, %g4
22
23 /* sun4v_itlb_miss branches here with the missing virtual
24 * address already loaded into %g4
25 */
26kvmap_itlb_4v:
27
28kvmap_itlb_nonlinear:
29 /* Catch kernel NULL pointer calls. */
30 sethi %hi(PAGE_SIZE), %g5
31 cmp %g4, %g5
32 bleu,pn %xcc, kvmap_dtlb_longpath
33 nop
34
35 KERN_TSB_LOOKUP_TL1(%g4, %g6, %g5, %g1, %g2, %g3, kvmap_itlb_load)
36
37kvmap_itlb_tsb_miss:
38 sethi %hi(LOW_OBP_ADDRESS), %g5
39 cmp %g4, %g5
40 blu,pn %xcc, kvmap_itlb_vmalloc_addr
41 mov 0x1, %g5
42 sllx %g5, 32, %g5
43 cmp %g4, %g5
44 blu,pn %xcc, kvmap_itlb_obp
45 nop
46
47kvmap_itlb_vmalloc_addr:
48 KERN_PGTABLE_WALK(%g4, %g5, %g2, kvmap_itlb_longpath)
49
50 KTSB_LOCK_TAG(%g1, %g2, %g7)
51
52 /* Load and check PTE. */
53 ldxa [%g5] ASI_PHYS_USE_EC, %g5
54 mov 1, %g7
55 sllx %g7, TSB_TAG_INVALID_BIT, %g7
56 brgez,a,pn %g5, kvmap_itlb_longpath
57 KTSB_STORE(%g1, %g7)
58
59 KTSB_WRITE(%g1, %g5, %g6)
60
61 /* fallthrough to TLB load */
62
63kvmap_itlb_load:
64
65661: stxa %g5, [%g0] ASI_ITLB_DATA_IN
66 retry
67 .section .sun4v_2insn_patch, "ax"
68 .word 661b
69 nop
70 nop
71 .previous
72
73 /* For sun4v the ASI_ITLB_DATA_IN store and the retry
74 * instruction get nop'd out and we get here to branch
75 * to the sun4v tlb load code. The registers are setup
76 * as follows:
77 *
78 * %g4: vaddr
79 * %g5: PTE
80 * %g6: TAG
81 *
82 * The sun4v TLB load wants the PTE in %g3 so we fix that
83 * up here.
84 */
85 ba,pt %xcc, sun4v_itlb_load
86 mov %g5, %g3
87
88kvmap_itlb_longpath:
89
90661: rdpr %pstate, %g5
91 wrpr %g5, PSTATE_AG | PSTATE_MG, %pstate
92 .section .sun4v_2insn_patch, "ax"
93 .word 661b
94 SET_GL(1)
95 nop
96 .previous
97
98 rdpr %tpc, %g5
99 ba,pt %xcc, sparc64_realfault_common
100 mov FAULT_CODE_ITLB, %g4
101
102kvmap_itlb_obp:
103 OBP_TRANS_LOOKUP(%g4, %g5, %g2, %g3, kvmap_itlb_longpath)
104
105 KTSB_LOCK_TAG(%g1, %g2, %g7)
106
107 KTSB_WRITE(%g1, %g5, %g6)
108
109 ba,pt %xcc, kvmap_itlb_load
110 nop
111
112kvmap_dtlb_obp:
113 OBP_TRANS_LOOKUP(%g4, %g5, %g2, %g3, kvmap_dtlb_longpath)
114
115 KTSB_LOCK_TAG(%g1, %g2, %g7)
116
117 KTSB_WRITE(%g1, %g5, %g6)
118
119 ba,pt %xcc, kvmap_dtlb_load
120 nop
121
122 .align 32
123kvmap_dtlb_tsb4m_load:
124 KTSB_LOCK_TAG(%g1, %g2, %g7)
125 KTSB_WRITE(%g1, %g5, %g6)
126 ba,pt %xcc, kvmap_dtlb_load
127 nop
128
129kvmap_dtlb:
130 /* %g6: TAG TARGET */
131 mov TLB_TAG_ACCESS, %g4
132 ldxa [%g4] ASI_DMMU, %g4
133
134 /* sun4v_dtlb_miss branches here with the missing virtual
135 * address already loaded into %g4
136 */
137kvmap_dtlb_4v:
138 brgez,pn %g4, kvmap_dtlb_nonlinear
139 nop
140
141#ifdef CONFIG_DEBUG_PAGEALLOC
142 /* Index through the base page size TSB even for linear
143 * mappings when using page allocation debugging.
144 */
145 KERN_TSB_LOOKUP_TL1(%g4, %g6, %g5, %g1, %g2, %g3, kvmap_dtlb_load)
146#else
147 /* Correct TAG_TARGET is already in %g6, check 4mb TSB. */
148 KERN_TSB4M_LOOKUP_TL1(%g6, %g5, %g1, %g2, %g3, kvmap_dtlb_load)
149#endif
150 /* TSB entry address left in %g1, lookup linear PTE.
151 * Must preserve %g1 and %g6 (TAG).
152 */
153kvmap_dtlb_tsb4m_miss:
154 sethi %hi(kpte_linear_bitmap), %g2
155 or %g2, %lo(kpte_linear_bitmap), %g2
156
157 /* Clear the PAGE_OFFSET top virtual bits, then shift
158 * down to get a 256MB physical address index.
159 */
160 sllx %g4, 21, %g5
161 mov 1, %g7
162 srlx %g5, 21 + 28, %g5
163
164 /* Don't try this at home kids... this depends upon srlx
165 * only taking the low 6 bits of the shift count in %g5.
166 */
167 sllx %g7, %g5, %g7
168
169 /* Divide by 64 to get the offset into the bitmask. */
170 srlx %g5, 6, %g5
171 sllx %g5, 3, %g5
172
173 /* kern_linear_pte_xor[((mask & bit) ? 1 : 0)] */
174 ldx [%g2 + %g5], %g2
175 andcc %g2, %g7, %g0
176 sethi %hi(kern_linear_pte_xor), %g5
177 or %g5, %lo(kern_linear_pte_xor), %g5
178 bne,a,pt %xcc, 1f
179 add %g5, 8, %g5
180
1811: ldx [%g5], %g2
182
183 .globl kvmap_linear_patch
184kvmap_linear_patch:
185 ba,pt %xcc, kvmap_dtlb_tsb4m_load
186 xor %g2, %g4, %g5
187
188kvmap_dtlb_vmalloc_addr:
189 KERN_PGTABLE_WALK(%g4, %g5, %g2, kvmap_dtlb_longpath)
190
191 KTSB_LOCK_TAG(%g1, %g2, %g7)
192
193 /* Load and check PTE. */
194 ldxa [%g5] ASI_PHYS_USE_EC, %g5
195 mov 1, %g7
196 sllx %g7, TSB_TAG_INVALID_BIT, %g7
197 brgez,a,pn %g5, kvmap_dtlb_longpath
198 KTSB_STORE(%g1, %g7)
199
200 KTSB_WRITE(%g1, %g5, %g6)
201
202 /* fallthrough to TLB load */
203
204kvmap_dtlb_load:
205
206661: stxa %g5, [%g0] ASI_DTLB_DATA_IN ! Reload TLB
207 retry
208 .section .sun4v_2insn_patch, "ax"
209 .word 661b
210 nop
211 nop
212 .previous
213
214 /* For sun4v the ASI_DTLB_DATA_IN store and the retry
215 * instruction get nop'd out and we get here to branch
216 * to the sun4v tlb load code. The registers are setup
217 * as follows:
218 *
219 * %g4: vaddr
220 * %g5: PTE
221 * %g6: TAG
222 *
223 * The sun4v TLB load wants the PTE in %g3 so we fix that
224 * up here.
225 */
226 ba,pt %xcc, sun4v_dtlb_load
227 mov %g5, %g3
228
229#ifdef CONFIG_SPARSEMEM_VMEMMAP
230kvmap_vmemmap:
231 sub %g4, %g5, %g5
232 srlx %g5, 22, %g5
233 sethi %hi(vmemmap_table), %g1
234 sllx %g5, 3, %g5
235 or %g1, %lo(vmemmap_table), %g1
236 ba,pt %xcc, kvmap_dtlb_load
237 ldx [%g1 + %g5], %g5
238#endif
239
240kvmap_dtlb_nonlinear:
241 /* Catch kernel NULL pointer derefs. */
242 sethi %hi(PAGE_SIZE), %g5
243 cmp %g4, %g5
244 bleu,pn %xcc, kvmap_dtlb_longpath
245 nop
246
247#ifdef CONFIG_SPARSEMEM_VMEMMAP
248 /* Do not use the TSB for vmemmap. */
249 mov (VMEMMAP_BASE >> 24), %g5
250 sllx %g5, 24, %g5
251 cmp %g4,%g5
252 bgeu,pn %xcc, kvmap_vmemmap
253 nop
254#endif
255
256 KERN_TSB_LOOKUP_TL1(%g4, %g6, %g5, %g1, %g2, %g3, kvmap_dtlb_load)
257
258kvmap_dtlb_tsbmiss:
259 sethi %hi(MODULES_VADDR), %g5
260 cmp %g4, %g5
261 blu,pn %xcc, kvmap_dtlb_longpath
262 mov (VMALLOC_END >> 24), %g5
263 sllx %g5, 24, %g5
264 cmp %g4, %g5
265 bgeu,pn %xcc, kvmap_dtlb_longpath
266 nop
267
268kvmap_check_obp:
269 sethi %hi(LOW_OBP_ADDRESS), %g5
270 cmp %g4, %g5
271 blu,pn %xcc, kvmap_dtlb_vmalloc_addr
272 mov 0x1, %g5
273 sllx %g5, 32, %g5
274 cmp %g4, %g5
275 blu,pn %xcc, kvmap_dtlb_obp
276 nop
277 ba,pt %xcc, kvmap_dtlb_vmalloc_addr
278 nop
279
280kvmap_dtlb_longpath:
281
282661: rdpr %pstate, %g5
283 wrpr %g5, PSTATE_AG | PSTATE_MG, %pstate
284 .section .sun4v_2insn_patch, "ax"
285 .word 661b
286 SET_GL(1)
287 ldxa [%g0] ASI_SCRATCHPAD, %g5
288 .previous
289
290 rdpr %tl, %g3
291 cmp %g3, 1
292
293661: mov TLB_TAG_ACCESS, %g4
294 ldxa [%g4] ASI_DMMU, %g5
295 .section .sun4v_2insn_patch, "ax"
296 .word 661b
297 ldx [%g5 + HV_FAULT_D_ADDR_OFFSET], %g5
298 nop
299 .previous
300
301 be,pt %xcc, sparc64_realfault_common
302 mov FAULT_CODE_DTLB, %g4
303 ba,pt %xcc, winfix_trampoline
304 nop