aboutsummaryrefslogtreecommitdiffstats
path: root/arch/sparc64/kernel/head.S
diff options
context:
space:
mode:
Diffstat (limited to 'arch/sparc64/kernel/head.S')
-rw-r--r--arch/sparc64/kernel/head.S556
1 files changed, 160 insertions, 396 deletions
diff --git a/arch/sparc64/kernel/head.S b/arch/sparc64/kernel/head.S
index 1fa06c4e3bdb..ecc748fb9ad7 100644
--- a/arch/sparc64/kernel/head.S
+++ b/arch/sparc64/kernel/head.S
@@ -80,15 +80,165 @@ sparc_ramdisk_image64:
80 .xword 0 80 .xword 0
81 .word _end 81 .word _end
82 82
83 /* We must be careful, 32-bit OpenBOOT will get confused if it 83 /* PROM cif handler code address is in %o4. */
84 * tries to save away a register window to a 64-bit kernel 84sparc64_boot:
85 * stack address. Flush all windows, disable interrupts, 851: rd %pc, %g7
86 * remap if necessary, jump onto kernel trap table, then kernel 86 set 1b, %g1
87 * stack, or else we die. 87 cmp %g1, %g7
88 be,pn %xcc, sparc64_boot_after_remap
89 mov %o4, %l7
90
91 /* We need to remap the kernel. Use position independant
92 * code to remap us to KERNBASE.
88 * 93 *
89 * PROM entry point is on %o4 94 * SILO can invoke us with 32-bit address masking enabled,
95 * so make sure that's clear.
90 */ 96 */
91sparc64_boot: 97 rdpr %pstate, %g1
98 andn %g1, PSTATE_AM, %g1
99 wrpr %g1, 0x0, %pstate
100 ba,a,pt %xcc, 1f
101
102 .globl prom_finddev_name, prom_chosen_path
103 .globl prom_getprop_name, prom_mmu_name
104 .globl prom_callmethod_name, prom_translate_name
105 .globl prom_map_name, prom_unmap_name, prom_mmu_ihandle_cache
106 .globl prom_boot_mapped_pc, prom_boot_mapping_mode
107 .globl prom_boot_mapping_phys_high, prom_boot_mapping_phys_low
108prom_finddev_name:
109 .asciz "finddevice"
110prom_chosen_path:
111 .asciz "/chosen"
112prom_getprop_name:
113 .asciz "getprop"
114prom_mmu_name:
115 .asciz "mmu"
116prom_callmethod_name:
117 .asciz "call-method"
118prom_translate_name:
119 .asciz "translate"
120prom_map_name:
121 .asciz "map"
122prom_unmap_name:
123 .asciz "unmap"
124 .align 4
125prom_mmu_ihandle_cache:
126 .word 0
127prom_boot_mapped_pc:
128 .word 0
129prom_boot_mapping_mode:
130 .word 0
131 .align 8
132prom_boot_mapping_phys_high:
133 .xword 0
134prom_boot_mapping_phys_low:
135 .xword 0
1361:
137 rd %pc, %l0
138 mov (1b - prom_finddev_name), %l1
139 mov (1b - prom_chosen_path), %l2
140 mov (1b - prom_boot_mapped_pc), %l3
141 sub %l0, %l1, %l1
142 sub %l0, %l2, %l2
143 sub %l0, %l3, %l3
144 stw %l0, [%l3]
145 sub %sp, (192 + 128), %sp
146
147 /* chosen_node = prom_finddevice("/chosen") */
148 stx %l1, [%sp + 2047 + 128 + 0x00] ! service, "finddevice"
149 mov 1, %l3
150 stx %l3, [%sp + 2047 + 128 + 0x08] ! num_args, 1
151 stx %l3, [%sp + 2047 + 128 + 0x10] ! num_rets, 1
152 stx %l2, [%sp + 2047 + 128 + 0x18] ! arg1, "/chosen"
153 stx %g0, [%sp + 2047 + 128 + 0x20] ! ret1
154 call %l7
155 add %sp, (2047 + 128), %o0 ! argument array
156
157 ldx [%sp + 2047 + 128 + 0x20], %l4 ! chosen device node
158
159 mov (1b - prom_getprop_name), %l1
160 mov (1b - prom_mmu_name), %l2
161 mov (1b - prom_mmu_ihandle_cache), %l5
162 sub %l0, %l1, %l1
163 sub %l0, %l2, %l2
164 sub %l0, %l5, %l5
165
166 /* prom_mmu_ihandle_cache = prom_getint(chosen_node, "mmu") */
167 stx %l1, [%sp + 2047 + 128 + 0x00] ! service, "getprop"
168 mov 4, %l3
169 stx %l3, [%sp + 2047 + 128 + 0x08] ! num_args, 4
170 mov 1, %l3
171 stx %l3, [%sp + 2047 + 128 + 0x10] ! num_rets, 1
172 stx %l4, [%sp + 2047 + 128 + 0x18] ! arg1, chosen_node
173 stx %l2, [%sp + 2047 + 128 + 0x20] ! arg2, "mmu"
174 stx %l5, [%sp + 2047 + 128 + 0x28] ! arg3, &prom_mmu_ihandle_cache
175 mov 4, %l3
176 stx %l3, [%sp + 2047 + 128 + 0x30] ! arg4, sizeof(arg3)
177 stx %g0, [%sp + 2047 + 128 + 0x38] ! ret1
178 call %l7
179 add %sp, (2047 + 128), %o0 ! argument array
180
181 mov (1b - prom_callmethod_name), %l1
182 mov (1b - prom_translate_name), %l2
183 sub %l0, %l1, %l1
184 sub %l0, %l2, %l2
185 lduw [%l5], %l5 ! prom_mmu_ihandle_cache
186
187 stx %l1, [%sp + 2047 + 128 + 0x00] ! service, "call-method"
188 mov 3, %l3
189 stx %l3, [%sp + 2047 + 128 + 0x08] ! num_args, 3
190 mov 5, %l3
191 stx %l3, [%sp + 2047 + 128 + 0x10] ! num_rets, 5
192 stx %l2, [%sp + 2047 + 128 + 0x18] ! arg1: "translate"
193 stx %l5, [%sp + 2047 + 128 + 0x20] ! arg2: prom_mmu_ihandle_cache
194 srlx %l0, 22, %l3
195 sllx %l3, 22, %l3
196 stx %l3, [%sp + 2047 + 128 + 0x28] ! arg3: vaddr, our PC
197 stx %g0, [%sp + 2047 + 128 + 0x30] ! res1
198 stx %g0, [%sp + 2047 + 128 + 0x38] ! res2
199 stx %g0, [%sp + 2047 + 128 + 0x40] ! res3
200 stx %g0, [%sp + 2047 + 128 + 0x48] ! res4
201 stx %g0, [%sp + 2047 + 128 + 0x50] ! res5
202 call %l7
203 add %sp, (2047 + 128), %o0 ! argument array
204
205 ldx [%sp + 2047 + 128 + 0x40], %l1 ! translation mode
206 mov (1b - prom_boot_mapping_mode), %l4
207 sub %l0, %l4, %l4
208 stw %l1, [%l4]
209 mov (1b - prom_boot_mapping_phys_high), %l4
210 sub %l0, %l4, %l4
211 ldx [%sp + 2047 + 128 + 0x48], %l2 ! physaddr high
212 stx %l2, [%l4 + 0x0]
213 ldx [%sp + 2047 + 128 + 0x50], %l3 ! physaddr low
214 stx %l3, [%l4 + 0x8]
215
216 /* Leave service as-is, "call-method" */
217 mov 7, %l3
218 stx %l3, [%sp + 2047 + 128 + 0x08] ! num_args, 7
219 mov 1, %l3
220 stx %l3, [%sp + 2047 + 128 + 0x10] ! num_rets, 1
221 mov (1b - prom_map_name), %l3
222 sub %l0, %l3, %l3
223 stx %l3, [%sp + 2047 + 128 + 0x18] ! arg1: "map"
224 /* Leave arg2 as-is, prom_mmu_ihandle_cache */
225 mov -1, %l3
226 stx %l3, [%sp + 2047 + 128 + 0x28] ! arg3: mode (-1 default)
227 sethi %hi(8 * 1024 * 1024), %l3
228 stx %l3, [%sp + 2047 + 128 + 0x30] ! arg4: size (8MB)
229 sethi %hi(KERNBASE), %l3
230 stx %l3, [%sp + 2047 + 128 + 0x38] ! arg5: vaddr (KERNBASE)
231 stx %g0, [%sp + 2047 + 128 + 0x40] ! arg6: empty
232 mov (1b - prom_boot_mapping_phys_low), %l3
233 sub %l0, %l3, %l3
234 ldx [%l3], %l3
235 stx %l3, [%sp + 2047 + 128 + 0x48] ! arg7: phys addr
236 call %l7
237 add %sp, (2047 + 128), %o0 ! argument array
238
239 add %sp, (192 + 128), %sp
240
241sparc64_boot_after_remap:
92 BRANCH_IF_CHEETAH_BASE(g1,g7,cheetah_boot) 242 BRANCH_IF_CHEETAH_BASE(g1,g7,cheetah_boot)
93 BRANCH_IF_CHEETAH_PLUS_OR_FOLLOWON(g1,g7,cheetah_plus_boot) 243 BRANCH_IF_CHEETAH_PLUS_OR_FOLLOWON(g1,g7,cheetah_plus_boot)
94 ba,pt %xcc, spitfire_boot 244 ba,pt %xcc, spitfire_boot
@@ -125,185 +275,7 @@ cheetah_generic_boot:
125 stxa %g0, [%g3] ASI_IMMU 275 stxa %g0, [%g3] ASI_IMMU
126 membar #Sync 276 membar #Sync
127 277
128 wrpr %g0, (PSTATE_PRIV|PSTATE_PEF|PSTATE_IE), %pstate 278 ba,a,pt %xcc, jump_to_sun4u_init
129 wr %g0, 0, %fprs
130
131 /* Just like for Spitfire, we probe itlb-2 for a mapping which
132 * matches our current %pc. We take the physical address in
133 * that mapping and use it to make our own.
134 */
135
136 /* %g5 holds the tlb data */
137 sethi %uhi(_PAGE_VALID | _PAGE_SZ4MB), %g5
138 sllx %g5, 32, %g5
139 or %g5, (_PAGE_CP | _PAGE_CV | _PAGE_P | _PAGE_L | _PAGE_W | _PAGE_G), %g5
140
141 /* Put PADDR tlb data mask into %g3. */
142 sethi %uhi(_PAGE_PADDR), %g3
143 or %g3, %ulo(_PAGE_PADDR), %g3
144 sllx %g3, 32, %g3
145 sethi %hi(_PAGE_PADDR), %g7
146 or %g7, %lo(_PAGE_PADDR), %g7
147 or %g3, %g7, %g3
148
149 set 2 << 16, %l0 /* TLB entry walker. */
150 set 0x1fff, %l2 /* Page mask. */
151 rd %pc, %l3
152 andn %l3, %l2, %g2 /* vaddr comparator */
153
1541: ldxa [%l0] ASI_ITLB_TAG_READ, %g1
155 membar #Sync
156 andn %g1, %l2, %g1
157 cmp %g1, %g2
158 be,pn %xcc, cheetah_got_tlbentry
159 nop
160 and %l0, (127 << 3), %g1
161 cmp %g1, (127 << 3)
162 blu,pt %xcc, 1b
163 add %l0, (1 << 3), %l0
164
165 /* Search the small TLB. OBP never maps us like that but
166 * newer SILO can.
167 */
168 clr %l0
169
1701: ldxa [%l0] ASI_ITLB_TAG_READ, %g1
171 membar #Sync
172 andn %g1, %l2, %g1
173 cmp %g1, %g2
174 be,pn %xcc, cheetah_got_tlbentry
175 nop
176 cmp %l0, (15 << 3)
177 blu,pt %xcc, 1b
178 add %l0, (1 << 3), %l0
179
180 /* BUG() if we get here... */
181 ta 0x5
182
183cheetah_got_tlbentry:
184 ldxa [%l0] ASI_ITLB_DATA_ACCESS, %g0
185 ldxa [%l0] ASI_ITLB_DATA_ACCESS, %g1
186 membar #Sync
187 and %g1, %g3, %g1
188 set 0x5fff, %l0
189 andn %g1, %l0, %g1
190 or %g5, %g1, %g5
191
192 /* Clear out any KERNBASE area entries. */
193 set 2 << 16, %l0
194 sethi %hi(KERNBASE), %g3
195 sethi %hi(KERNBASE<<1), %g7
196 mov TLB_TAG_ACCESS, %l7
197
198 /* First, check ITLB */
1991: ldxa [%l0] ASI_ITLB_TAG_READ, %g1
200 membar #Sync
201 andn %g1, %l2, %g1
202 cmp %g1, %g3
203 blu,pn %xcc, 2f
204 cmp %g1, %g7
205 bgeu,pn %xcc, 2f
206 nop
207 stxa %g0, [%l7] ASI_IMMU
208 membar #Sync
209 stxa %g0, [%l0] ASI_ITLB_DATA_ACCESS
210 membar #Sync
211
2122: and %l0, (127 << 3), %g1
213 cmp %g1, (127 << 3)
214 blu,pt %xcc, 1b
215 add %l0, (1 << 3), %l0
216
217 /* Next, check DTLB */
218 set 2 << 16, %l0
2191: ldxa [%l0] ASI_DTLB_TAG_READ, %g1
220 membar #Sync
221 andn %g1, %l2, %g1
222 cmp %g1, %g3
223 blu,pn %xcc, 2f
224 cmp %g1, %g7
225 bgeu,pn %xcc, 2f
226 nop
227 stxa %g0, [%l7] ASI_DMMU
228 membar #Sync
229 stxa %g0, [%l0] ASI_DTLB_DATA_ACCESS
230 membar #Sync
231
2322: and %l0, (511 << 3), %g1
233 cmp %g1, (511 << 3)
234 blu,pt %xcc, 1b
235 add %l0, (1 << 3), %l0
236
237 /* On Cheetah+, have to check second DTLB. */
238 BRANCH_IF_CHEETAH_PLUS_OR_FOLLOWON(g1,l0,2f)
239 ba,pt %xcc, 9f
240 nop
241
2422: set 3 << 16, %l0
2431: ldxa [%l0] ASI_DTLB_TAG_READ, %g1
244 membar #Sync
245 andn %g1, %l2, %g1
246 cmp %g1, %g3
247 blu,pn %xcc, 2f
248 cmp %g1, %g7
249 bgeu,pn %xcc, 2f
250 nop
251 stxa %g0, [%l7] ASI_DMMU
252 membar #Sync
253 stxa %g0, [%l0] ASI_DTLB_DATA_ACCESS
254 membar #Sync
255
2562: and %l0, (511 << 3), %g1
257 cmp %g1, (511 << 3)
258 blu,pt %xcc, 1b
259 add %l0, (1 << 3), %l0
260
2619:
262
263 /* Now lock the TTE we created into ITLB-0 and DTLB-0,
264 * entry 15 (and maybe 14 too).
265 */
266 sethi %hi(KERNBASE), %g3
267 set (0 << 16) | (15 << 3), %g7
268 stxa %g3, [%l7] ASI_DMMU
269 membar #Sync
270 stxa %g5, [%g7] ASI_DTLB_DATA_ACCESS
271 membar #Sync
272 stxa %g3, [%l7] ASI_IMMU
273 membar #Sync
274 stxa %g5, [%g7] ASI_ITLB_DATA_ACCESS
275 membar #Sync
276 flush %g3
277 membar #Sync
278 sethi %hi(_end), %g3 /* Check for bigkernel case */
279 or %g3, %lo(_end), %g3
280 srl %g3, 23, %g3 /* Check if _end > 8M */
281 brz,pt %g3, 1f
282 sethi %hi(KERNBASE), %g3 /* Restore for fixup code below */
283 sethi %hi(0x400000), %g3
284 or %g3, %lo(0x400000), %g3
285 add %g5, %g3, %g5 /* New tte data */
286 andn %g5, (_PAGE_G), %g5
287 sethi %hi(KERNBASE+0x400000), %g3
288 or %g3, %lo(KERNBASE+0x400000), %g3
289 set (0 << 16) | (14 << 3), %g7
290 stxa %g3, [%l7] ASI_DMMU
291 membar #Sync
292 stxa %g5, [%g7] ASI_DTLB_DATA_ACCESS
293 membar #Sync
294 stxa %g3, [%l7] ASI_IMMU
295 membar #Sync
296 stxa %g5, [%g7] ASI_ITLB_DATA_ACCESS
297 membar #Sync
298 flush %g3
299 membar #Sync
300 sethi %hi(KERNBASE), %g3 /* Restore for fixup code below */
301 ba,pt %xcc, 1f
302 nop
303
3041: set sun4u_init, %g2
305 jmpl %g2 + %g0, %g0
306 nop
307 279
308spitfire_boot: 280spitfire_boot:
309 /* Typically PROM has already enabled both MMU's and both on-chip 281 /* Typically PROM has already enabled both MMU's and both on-chip
@@ -313,6 +285,7 @@ spitfire_boot:
313 stxa %g1, [%g0] ASI_LSU_CONTROL 285 stxa %g1, [%g0] ASI_LSU_CONTROL
314 membar #Sync 286 membar #Sync
315 287
288jump_to_sun4u_init:
316 /* 289 /*
317 * Make sure we are in privileged mode, have address masking, 290 * Make sure we are in privileged mode, have address masking,
318 * using the ordinary globals and have enabled floating 291 * using the ordinary globals and have enabled floating
@@ -324,151 +297,6 @@ spitfire_boot:
324 wrpr %g0, (PSTATE_PRIV|PSTATE_PEF|PSTATE_IE), %pstate 297 wrpr %g0, (PSTATE_PRIV|PSTATE_PEF|PSTATE_IE), %pstate
325 wr %g0, 0, %fprs 298 wr %g0, 0, %fprs
326 299
327spitfire_create_mappings:
328 /* %g5 holds the tlb data */
329 sethi %uhi(_PAGE_VALID | _PAGE_SZ4MB), %g5
330 sllx %g5, 32, %g5
331 or %g5, (_PAGE_CP | _PAGE_CV | _PAGE_P | _PAGE_L | _PAGE_W | _PAGE_G), %g5
332
333 /* Base of physical memory cannot reliably be assumed to be
334 * at 0x0! Figure out where it happens to be. -DaveM
335 */
336
337 /* Put PADDR tlb data mask into %g3. */
338 sethi %uhi(_PAGE_PADDR_SF), %g3
339 or %g3, %ulo(_PAGE_PADDR_SF), %g3
340 sllx %g3, 32, %g3
341 sethi %hi(_PAGE_PADDR_SF), %g7
342 or %g7, %lo(_PAGE_PADDR_SF), %g7
343 or %g3, %g7, %g3
344
345 /* Walk through entire ITLB, looking for entry which maps
346 * our %pc currently, stick PADDR from there into %g5 tlb data.
347 */
348 clr %l0 /* TLB entry walker. */
349 set 0x1fff, %l2 /* Page mask. */
350 rd %pc, %l3
351 andn %l3, %l2, %g2 /* vaddr comparator */
3521:
353 /* Yes, the nops seem to be necessary for now, don't ask me why. -DaveM */
354 ldxa [%l0] ASI_ITLB_TAG_READ, %g1
355 nop
356 nop
357 nop
358 andn %g1, %l2, %g1 /* Get vaddr */
359 cmp %g1, %g2
360 be,a,pn %xcc, spitfire_got_tlbentry
361 ldxa [%l0] ASI_ITLB_DATA_ACCESS, %g1
362 cmp %l0, (63 << 3)
363 blu,pt %xcc, 1b
364 add %l0, (1 << 3), %l0
365
366 /* BUG() if we get here... */
367 ta 0x5
368
369spitfire_got_tlbentry:
370 /* Nops here again, perhaps Cheetah/Blackbird are better behaved... */
371 nop
372 nop
373 nop
374 and %g1, %g3, %g1 /* Mask to just get paddr bits. */
375 set 0x5fff, %l3 /* Mask offset to get phys base. */
376 andn %g1, %l3, %g1
377
378 /* NOTE: We hold on to %g1 paddr base as we need it below to lock
379 * NOTE: the PROM cif code into the TLB.
380 */
381
382 or %g5, %g1, %g5 /* Or it into TAG being built. */
383
384 clr %l0 /* TLB entry walker. */
385 sethi %hi(KERNBASE), %g3 /* 4M lower limit */
386 sethi %hi(KERNBASE<<1), %g7 /* 8M upper limit */
387 mov TLB_TAG_ACCESS, %l7
3881:
389 /* Yes, the nops seem to be necessary for now, don't ask me why. -DaveM */
390 ldxa [%l0] ASI_ITLB_TAG_READ, %g1
391 nop
392 nop
393 nop
394 andn %g1, %l2, %g1 /* Get vaddr */
395 cmp %g1, %g3
396 blu,pn %xcc, 2f
397 cmp %g1, %g7
398 bgeu,pn %xcc, 2f
399 nop
400 stxa %g0, [%l7] ASI_IMMU
401 stxa %g0, [%l0] ASI_ITLB_DATA_ACCESS
402 membar #Sync
4032:
404 cmp %l0, (63 << 3)
405 blu,pt %xcc, 1b
406 add %l0, (1 << 3), %l0
407
408 nop; nop; nop
409
410 clr %l0 /* TLB entry walker. */
4111:
412 /* Yes, the nops seem to be necessary for now, don't ask me why. -DaveM */
413 ldxa [%l0] ASI_DTLB_TAG_READ, %g1
414 nop
415 nop
416 nop
417 andn %g1, %l2, %g1 /* Get vaddr */
418 cmp %g1, %g3
419 blu,pn %xcc, 2f
420 cmp %g1, %g7
421 bgeu,pn %xcc, 2f
422 nop
423 stxa %g0, [%l7] ASI_DMMU
424 stxa %g0, [%l0] ASI_DTLB_DATA_ACCESS
425 membar #Sync
4262:
427 cmp %l0, (63 << 3)
428 blu,pt %xcc, 1b
429 add %l0, (1 << 3), %l0
430
431 nop; nop; nop
432
433
434 /* PROM never puts any TLB entries into the MMU with the lock bit
435 * set. So we gladly use tlb entry 63 for KERNBASE. And maybe 62 too.
436 */
437
438 sethi %hi(KERNBASE), %g3
439 mov (63 << 3), %g7
440 stxa %g3, [%l7] ASI_DMMU /* KERNBASE into TLB TAG */
441 stxa %g5, [%g7] ASI_DTLB_DATA_ACCESS /* TTE into TLB DATA */
442 membar #Sync
443 stxa %g3, [%l7] ASI_IMMU /* KERNBASE into TLB TAG */
444 stxa %g5, [%g7] ASI_ITLB_DATA_ACCESS /* TTE into TLB DATA */
445 membar #Sync
446 flush %g3
447 membar #Sync
448 sethi %hi(_end), %g3 /* Check for bigkernel case */
449 or %g3, %lo(_end), %g3
450 srl %g3, 23, %g3 /* Check if _end > 8M */
451 brz,pt %g3, 2f
452 sethi %hi(KERNBASE), %g3 /* Restore for fixup code below */
453 sethi %hi(0x400000), %g3
454 or %g3, %lo(0x400000), %g3
455 add %g5, %g3, %g5 /* New tte data */
456 andn %g5, (_PAGE_G), %g5
457 sethi %hi(KERNBASE+0x400000), %g3
458 or %g3, %lo(KERNBASE+0x400000), %g3
459 mov (62 << 3), %g7
460 stxa %g3, [%l7] ASI_DMMU
461 stxa %g5, [%g7] ASI_DTLB_DATA_ACCESS
462 membar #Sync
463 stxa %g3, [%l7] ASI_IMMU
464 stxa %g5, [%g7] ASI_ITLB_DATA_ACCESS
465 membar #Sync
466 flush %g3
467 membar #Sync
468 sethi %hi(KERNBASE), %g3 /* Restore for fixup code below */
4692: ba,pt %xcc, 1f
470 nop
4711:
472 set sun4u_init, %g2 300 set sun4u_init, %g2
473 jmpl %g2 + %g0, %g0 301 jmpl %g2 + %g0, %g0
474 nop 302 nop
@@ -483,38 +311,12 @@ sun4u_init:
483 stxa %g0, [%g7] ASI_DMMU 311 stxa %g0, [%g7] ASI_DMMU
484 membar #Sync 312 membar #Sync
485 313
486 /* We are now safely (we hope) in Nucleus context (0), rewrite
487 * the KERNBASE TTE's so they no longer have the global bit set.
488 * Don't forget to setup TAG_ACCESS first 8-)
489 */
490 mov TLB_TAG_ACCESS, %g2
491 stxa %g3, [%g2] ASI_IMMU
492 stxa %g3, [%g2] ASI_DMMU
493 membar #Sync
494
495 BRANCH_IF_ANY_CHEETAH(g1,g7,cheetah_tlb_fixup) 314 BRANCH_IF_ANY_CHEETAH(g1,g7,cheetah_tlb_fixup)
496 315
497 ba,pt %xcc, spitfire_tlb_fixup 316 ba,pt %xcc, spitfire_tlb_fixup
498 nop 317 nop
499 318
500cheetah_tlb_fixup: 319cheetah_tlb_fixup:
501 set (0 << 16) | (15 << 3), %g7
502 ldxa [%g7] ASI_ITLB_DATA_ACCESS, %g0
503 ldxa [%g7] ASI_ITLB_DATA_ACCESS, %g1
504 andn %g1, (_PAGE_G), %g1
505 stxa %g1, [%g7] ASI_ITLB_DATA_ACCESS
506 membar #Sync
507
508 ldxa [%g7] ASI_DTLB_DATA_ACCESS, %g0
509 ldxa [%g7] ASI_DTLB_DATA_ACCESS, %g1
510 andn %g1, (_PAGE_G), %g1
511 stxa %g1, [%g7] ASI_DTLB_DATA_ACCESS
512 membar #Sync
513
514 /* Kill instruction prefetch queues. */
515 flush %g3
516 membar #Sync
517
518 mov 2, %g2 /* Set TLB type to cheetah+. */ 320 mov 2, %g2 /* Set TLB type to cheetah+. */
519 BRANCH_IF_CHEETAH_PLUS_OR_FOLLOWON(g1,g7,1f) 321 BRANCH_IF_CHEETAH_PLUS_OR_FOLLOWON(g1,g7,1f)
520 322
@@ -551,21 +353,6 @@ cheetah_tlb_fixup:
551 nop 353 nop
552 354
553spitfire_tlb_fixup: 355spitfire_tlb_fixup:
554 mov (63 << 3), %g7
555 ldxa [%g7] ASI_ITLB_DATA_ACCESS, %g1
556 andn %g1, (_PAGE_G), %g1
557 stxa %g1, [%g7] ASI_ITLB_DATA_ACCESS
558 membar #Sync
559
560 ldxa [%g7] ASI_DTLB_DATA_ACCESS, %g1
561 andn %g1, (_PAGE_G), %g1
562 stxa %g1, [%g7] ASI_DTLB_DATA_ACCESS
563 membar #Sync
564
565 /* Kill instruction prefetch queues. */
566 flush %g3
567 membar #Sync
568
569 /* Set TLB type to spitfire. */ 356 /* Set TLB type to spitfire. */
570 mov 0, %g2 357 mov 0, %g2
571 sethi %hi(tlb_type), %g1 358 sethi %hi(tlb_type), %g1
@@ -578,24 +365,6 @@ tlb_fixup_done:
578 mov %sp, %l6 365 mov %sp, %l6
579 mov %o4, %l7 366 mov %o4, %l7
580 367
581#if 0 /* We don't do it like this anymore, but for historical hack value
582 * I leave this snippet here to show how crazy we can be sometimes. 8-)
583 */
584
585 /* Setup "Linux Current Register", thanks Sun 8-) */
586 wr %g0, 0x1, %pcr
587
588 /* Blackbird errata workaround. See commentary in
589 * smp.c:smp_percpu_timer_interrupt() for more
590 * information.
591 */
592 ba,pt %xcc, 99f
593 nop
594 .align 64
59599: wr %g6, %g0, %pic
596 rd %pic, %g0
597#endif
598
599 wr %g0, ASI_P, %asi 368 wr %g0, ASI_P, %asi
600 mov 1, %g1 369 mov 1, %g1
601 sllx %g1, THREAD_SHIFT, %g1 370 sllx %g1, THREAD_SHIFT, %g1
@@ -756,12 +525,7 @@ bootup_user_stack_end:
756 525
757#include "ttable.S" 526#include "ttable.S"
758#include "systbls.S" 527#include "systbls.S"
759 528#include "ktlb.S"
760 .align 1024
761 .globl swapper_pg_dir
762swapper_pg_dir:
763 .word 0
764
765#include "etrap.S" 529#include "etrap.S"
766#include "rtrap.S" 530#include "rtrap.S"
767#include "winfixup.S" 531#include "winfixup.S"