aboutsummaryrefslogtreecommitdiffstats
path: root/arch/sparc64/kernel/trampoline.S
diff options
context:
space:
mode:
Diffstat (limited to 'arch/sparc64/kernel/trampoline.S')
-rw-r--r--arch/sparc64/kernel/trampoline.S368
1 files changed, 368 insertions, 0 deletions
diff --git a/arch/sparc64/kernel/trampoline.S b/arch/sparc64/kernel/trampoline.S
new file mode 100644
index 000000000000..2c8f9344b4ee
--- /dev/null
+++ b/arch/sparc64/kernel/trampoline.S
@@ -0,0 +1,368 @@
1/* $Id: trampoline.S,v 1.26 2002/02/09 19:49:30 davem Exp $
2 * trampoline.S: Jump start slave processors on sparc64.
3 *
4 * Copyright (C) 1997 David S. Miller (davem@caip.rutgers.edu)
5 */
6
7#include <asm/head.h>
8#include <asm/asi.h>
9#include <asm/lsu.h>
10#include <asm/dcr.h>
11#include <asm/dcu.h>
12#include <asm/pstate.h>
13#include <asm/page.h>
14#include <asm/pgtable.h>
15#include <asm/spitfire.h>
16#include <asm/processor.h>
17#include <asm/thread_info.h>
18#include <asm/mmu.h>
19
20 .data
21 .align 8
22call_method:
23 .asciz "call-method"
24 .align 8
25itlb_load:
26 .asciz "SUNW,itlb-load"
27 .align 8
28dtlb_load:
29 .asciz "SUNW,dtlb-load"
30
31 .text
32 .align 8
33 .globl sparc64_cpu_startup, sparc64_cpu_startup_end
34sparc64_cpu_startup:
35 flushw
36
37 BRANCH_IF_CHEETAH_BASE(g1,g5,cheetah_startup)
38 BRANCH_IF_CHEETAH_PLUS_OR_FOLLOWON(g1,g5,cheetah_plus_startup)
39
40 ba,pt %xcc, spitfire_startup
41 nop
42
43cheetah_plus_startup:
44 /* Preserve OBP chosen DCU and DCR register settings. */
45 ba,pt %xcc, cheetah_generic_startup
46 nop
47
48cheetah_startup:
49 mov DCR_BPE | DCR_RPE | DCR_SI | DCR_IFPOE | DCR_MS, %g1
50 wr %g1, %asr18
51
52 sethi %uhi(DCU_ME|DCU_RE|DCU_HPE|DCU_SPE|DCU_SL|DCU_WE), %g5
53 or %g5, %ulo(DCU_ME|DCU_RE|DCU_HPE|DCU_SPE|DCU_SL|DCU_WE), %g5
54 sllx %g5, 32, %g5
55 or %g5, DCU_DM | DCU_IM | DCU_DC | DCU_IC, %g5
56 stxa %g5, [%g0] ASI_DCU_CONTROL_REG
57 membar #Sync
58
59cheetah_generic_startup:
60 mov TSB_EXTENSION_P, %g3
61 stxa %g0, [%g3] ASI_DMMU
62 stxa %g0, [%g3] ASI_IMMU
63 membar #Sync
64
65 mov TSB_EXTENSION_S, %g3
66 stxa %g0, [%g3] ASI_DMMU
67 membar #Sync
68
69 mov TSB_EXTENSION_N, %g3
70 stxa %g0, [%g3] ASI_DMMU
71 stxa %g0, [%g3] ASI_IMMU
72 membar #Sync
73
74 /* Disable STICK_INT interrupts. */
75 sethi %hi(0x80000000), %g5
76 sllx %g5, 32, %g5
77 wr %g5, %asr25
78
79 ba,pt %xcc, startup_continue
80 nop
81
82spitfire_startup:
83 mov (LSU_CONTROL_IC | LSU_CONTROL_DC | LSU_CONTROL_IM | LSU_CONTROL_DM), %g1
84 stxa %g1, [%g0] ASI_LSU_CONTROL
85 membar #Sync
86
87startup_continue:
88 wrpr %g0, 15, %pil
89
90 sethi %hi(0x80000000), %g2
91 sllx %g2, 32, %g2
92 wr %g2, 0, %tick_cmpr
93
94 /* Call OBP by hand to lock KERNBASE into i/d tlbs.
95 * We lock 2 consequetive entries if we are 'bigkernel'.
96 */
97 mov %o0, %l0
98
99 sethi %hi(prom_entry_lock), %g2
1001: ldstub [%g2 + %lo(prom_entry_lock)], %g1
101 brnz,pn %g1, 1b
102 membar #StoreLoad | #StoreStore
103
104 sethi %hi(p1275buf), %g2
105 or %g2, %lo(p1275buf), %g2
106 ldx [%g2 + 0x10], %l2
107 mov %sp, %l1
108 add %l2, -(192 + 128), %sp
109 flushw
110
111 sethi %hi(call_method), %g2
112 or %g2, %lo(call_method), %g2
113 stx %g2, [%sp + 2047 + 128 + 0x00]
114 mov 5, %g2
115 stx %g2, [%sp + 2047 + 128 + 0x08]
116 mov 1, %g2
117 stx %g2, [%sp + 2047 + 128 + 0x10]
118 sethi %hi(itlb_load), %g2
119 or %g2, %lo(itlb_load), %g2
120 stx %g2, [%sp + 2047 + 128 + 0x18]
121 sethi %hi(mmu_ihandle_cache), %g2
122 lduw [%g2 + %lo(mmu_ihandle_cache)], %g2
123 stx %g2, [%sp + 2047 + 128 + 0x20]
124 sethi %hi(KERNBASE), %g2
125 stx %g2, [%sp + 2047 + 128 + 0x28]
126 sethi %hi(kern_locked_tte_data), %g2
127 ldx [%g2 + %lo(kern_locked_tte_data)], %g2
128 stx %g2, [%sp + 2047 + 128 + 0x30]
129
130 mov 15, %g2
131 BRANCH_IF_ANY_CHEETAH(g1,g5,1f)
132
133 mov 63, %g2
1341:
135 stx %g2, [%sp + 2047 + 128 + 0x38]
136 sethi %hi(p1275buf), %g2
137 or %g2, %lo(p1275buf), %g2
138 ldx [%g2 + 0x08], %o1
139 call %o1
140 add %sp, (2047 + 128), %o0
141
142 sethi %hi(bigkernel), %g2
143 lduw [%g2 + %lo(bigkernel)], %g2
144 cmp %g2, 0
145 be,pt %icc, do_dtlb
146 nop
147
148 sethi %hi(call_method), %g2
149 or %g2, %lo(call_method), %g2
150 stx %g2, [%sp + 2047 + 128 + 0x00]
151 mov 5, %g2
152 stx %g2, [%sp + 2047 + 128 + 0x08]
153 mov 1, %g2
154 stx %g2, [%sp + 2047 + 128 + 0x10]
155 sethi %hi(itlb_load), %g2
156 or %g2, %lo(itlb_load), %g2
157 stx %g2, [%sp + 2047 + 128 + 0x18]
158 sethi %hi(mmu_ihandle_cache), %g2
159 lduw [%g2 + %lo(mmu_ihandle_cache)], %g2
160 stx %g2, [%sp + 2047 + 128 + 0x20]
161 sethi %hi(KERNBASE + 0x400000), %g2
162 stx %g2, [%sp + 2047 + 128 + 0x28]
163 sethi %hi(kern_locked_tte_data), %g2
164 ldx [%g2 + %lo(kern_locked_tte_data)], %g2
165 sethi %hi(0x400000), %g1
166 add %g2, %g1, %g2
167 stx %g2, [%sp + 2047 + 128 + 0x30]
168
169 mov 14, %g2
170 BRANCH_IF_ANY_CHEETAH(g1,g5,1f)
171
172 mov 62, %g2
1731:
174 stx %g2, [%sp + 2047 + 128 + 0x38]
175 sethi %hi(p1275buf), %g2
176 or %g2, %lo(p1275buf), %g2
177 ldx [%g2 + 0x08], %o1
178 call %o1
179 add %sp, (2047 + 128), %o0
180
181do_dtlb:
182 sethi %hi(call_method), %g2
183 or %g2, %lo(call_method), %g2
184 stx %g2, [%sp + 2047 + 128 + 0x00]
185 mov 5, %g2
186 stx %g2, [%sp + 2047 + 128 + 0x08]
187 mov 1, %g2
188 stx %g2, [%sp + 2047 + 128 + 0x10]
189 sethi %hi(dtlb_load), %g2
190 or %g2, %lo(dtlb_load), %g2
191 stx %g2, [%sp + 2047 + 128 + 0x18]
192 sethi %hi(mmu_ihandle_cache), %g2
193 lduw [%g2 + %lo(mmu_ihandle_cache)], %g2
194 stx %g2, [%sp + 2047 + 128 + 0x20]
195 sethi %hi(KERNBASE), %g2
196 stx %g2, [%sp + 2047 + 128 + 0x28]
197 sethi %hi(kern_locked_tte_data), %g2
198 ldx [%g2 + %lo(kern_locked_tte_data)], %g2
199 stx %g2, [%sp + 2047 + 128 + 0x30]
200
201 mov 15, %g2
202 BRANCH_IF_ANY_CHEETAH(g1,g5,1f)
203
204 mov 63, %g2
2051:
206
207 stx %g2, [%sp + 2047 + 128 + 0x38]
208 sethi %hi(p1275buf), %g2
209 or %g2, %lo(p1275buf), %g2
210 ldx [%g2 + 0x08], %o1
211 call %o1
212 add %sp, (2047 + 128), %o0
213
214 sethi %hi(bigkernel), %g2
215 lduw [%g2 + %lo(bigkernel)], %g2
216 cmp %g2, 0
217 be,pt %icc, do_unlock
218 nop
219
220 sethi %hi(call_method), %g2
221 or %g2, %lo(call_method), %g2
222 stx %g2, [%sp + 2047 + 128 + 0x00]
223 mov 5, %g2
224 stx %g2, [%sp + 2047 + 128 + 0x08]
225 mov 1, %g2
226 stx %g2, [%sp + 2047 + 128 + 0x10]
227 sethi %hi(dtlb_load), %g2
228 or %g2, %lo(dtlb_load), %g2
229 stx %g2, [%sp + 2047 + 128 + 0x18]
230 sethi %hi(mmu_ihandle_cache), %g2
231 lduw [%g2 + %lo(mmu_ihandle_cache)], %g2
232 stx %g2, [%sp + 2047 + 128 + 0x20]
233 sethi %hi(KERNBASE + 0x400000), %g2
234 stx %g2, [%sp + 2047 + 128 + 0x28]
235 sethi %hi(kern_locked_tte_data), %g2
236 ldx [%g2 + %lo(kern_locked_tte_data)], %g2
237 sethi %hi(0x400000), %g1
238 add %g2, %g1, %g2
239 stx %g2, [%sp + 2047 + 128 + 0x30]
240
241 mov 14, %g2
242 BRANCH_IF_ANY_CHEETAH(g1,g5,1f)
243
244 mov 62, %g2
2451:
246
247 stx %g2, [%sp + 2047 + 128 + 0x38]
248 sethi %hi(p1275buf), %g2
249 or %g2, %lo(p1275buf), %g2
250 ldx [%g2 + 0x08], %o1
251 call %o1
252 add %sp, (2047 + 128), %o0
253
254do_unlock:
255 sethi %hi(prom_entry_lock), %g2
256 stb %g0, [%g2 + %lo(prom_entry_lock)]
257 membar #StoreStore | #StoreLoad
258
259 mov %l1, %sp
260 flushw
261
262 mov %l0, %o0
263
264 wrpr %g0, (PSTATE_PRIV | PSTATE_PEF), %pstate
265 wr %g0, 0, %fprs
266
267 /* XXX Buggy PROM... */
268 srl %o0, 0, %o0
269 ldx [%o0], %g6
270
271 wr %g0, ASI_P, %asi
272
273 mov PRIMARY_CONTEXT, %g7
274 stxa %g0, [%g7] ASI_DMMU
275 membar #Sync
276 mov SECONDARY_CONTEXT, %g7
277 stxa %g0, [%g7] ASI_DMMU
278 membar #Sync
279
280 mov 1, %g5
281 sllx %g5, THREAD_SHIFT, %g5
282 sub %g5, (STACKFRAME_SZ + STACK_BIAS), %g5
283 add %g6, %g5, %sp
284 mov 0, %fp
285
286 wrpr %g0, 0, %wstate
287 wrpr %g0, 0, %tl
288
289 /* Setup the trap globals, then we can resurface. */
290 rdpr %pstate, %o1
291 mov %g6, %o2
292 wrpr %o1, PSTATE_AG, %pstate
293 sethi %hi(sparc64_ttable_tl0), %g5
294 wrpr %g5, %tba
295 mov %o2, %g6
296
297 wrpr %o1, PSTATE_MG, %pstate
298#define KERN_HIGHBITS ((_PAGE_VALID|_PAGE_SZ4MB)^0xfffff80000000000)
299#define KERN_LOWBITS (_PAGE_CP | _PAGE_CV | _PAGE_P | _PAGE_W)
300
301 mov TSB_REG, %g1
302 stxa %g0, [%g1] ASI_DMMU
303 membar #Sync
304 mov TLB_SFSR, %g1
305 sethi %uhi(KERN_HIGHBITS), %g2
306 or %g2, %ulo(KERN_HIGHBITS), %g2
307 sllx %g2, 32, %g2
308 or %g2, KERN_LOWBITS, %g2
309
310 BRANCH_IF_ANY_CHEETAH(g3,g7,9f)
311
312 ba,pt %xcc, 1f
313 nop
314
3159:
316 sethi %uhi(VPTE_BASE_CHEETAH), %g3
317 or %g3, %ulo(VPTE_BASE_CHEETAH), %g3
318 ba,pt %xcc, 2f
319 sllx %g3, 32, %g3
3201:
321 sethi %uhi(VPTE_BASE_SPITFIRE), %g3
322 or %g3, %ulo(VPTE_BASE_SPITFIRE), %g3
323 sllx %g3, 32, %g3
324
3252:
326 clr %g7
327#undef KERN_HIGHBITS
328#undef KERN_LOWBITS
329
330 wrpr %o1, 0x0, %pstate
331 ldx [%g6 + TI_TASK], %g4
332
333 wrpr %g0, 0, %wstate
334
335 call init_irqwork_curcpu
336 nop
337
338 BRANCH_IF_CHEETAH_PLUS_OR_FOLLOWON(g2,g3,1f)
339 ba,pt %xcc, 2f
340 nop
341
3421: /* Start using proper page size encodings in ctx register. */
343 sethi %uhi(CTX_CHEETAH_PLUS_NUC), %g3
344 mov PRIMARY_CONTEXT, %g1
345 sllx %g3, 32, %g3
346 sethi %hi(CTX_CHEETAH_PLUS_CTX0), %g2
347 or %g3, %g2, %g3
348 stxa %g3, [%g1] ASI_DMMU
349 membar #Sync
350
3512:
352 rdpr %pstate, %o1
353 or %o1, PSTATE_IE, %o1
354 wrpr %o1, 0, %pstate
355
356 call prom_set_trap_table
357 sethi %hi(sparc64_ttable_tl0), %o0
358
359 call smp_callin
360 nop
361 call cpu_idle
362 mov 0, %o0
363 call cpu_panic
364 nop
3651: b,a,pt %xcc, 1b
366
367 .align 8
368sparc64_cpu_startup_end: