aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--arch/sparc64/kernel/ktlb.S67
-rw-r--r--arch/sparc64/kernel/sun4v_tlb_miss.S14
-rw-r--r--include/asm-sparc64/ttable.h10
3 files changed, 73 insertions, 18 deletions
diff --git a/arch/sparc64/kernel/ktlb.S b/arch/sparc64/kernel/ktlb.S
index f6bb2e08964a..2d333ab4b91b 100644
--- a/arch/sparc64/kernel/ktlb.S
+++ b/arch/sparc64/kernel/ktlb.S
@@ -48,7 +48,7 @@ kvmap_itlb_tsb_miss:
48kvmap_itlb_vmalloc_addr: 48kvmap_itlb_vmalloc_addr:
49 KERN_PGTABLE_WALK(%g4, %g5, %g2, kvmap_itlb_longpath) 49 KERN_PGTABLE_WALK(%g4, %g5, %g2, kvmap_itlb_longpath)
50 50
51 KTSB_LOCK_TAG(%g1, %g2, %g4) 51 KTSB_LOCK_TAG(%g1, %g2, %g7)
52 52
53 /* Load and check PTE. */ 53 /* Load and check PTE. */
54 ldxa [%g5] ASI_PHYS_USE_EC, %g5 54 ldxa [%g5] ASI_PHYS_USE_EC, %g5
@@ -60,8 +60,29 @@ kvmap_itlb_vmalloc_addr:
60 /* fallthrough to TLB load */ 60 /* fallthrough to TLB load */
61 61
62kvmap_itlb_load: 62kvmap_itlb_load:
63 stxa %g5, [%g0] ASI_ITLB_DATA_IN ! Reload TLB 63
64661: stxa %g5, [%g0] ASI_ITLB_DATA_IN
64 retry 65 retry
66 .section .sun4v_2insn_patch, "ax"
67 .word 661b
68 nop
69 nop
70 .previous
71
72 /* For sun4v the ASI_ITLB_DATA_IN store and the retry
73 * instruction get nop'd out and we get here to branch
74 * to the sun4v tlb load code. The registers are setup
75 * as follows:
76 *
77 * %g4: vaddr
78 * %g5: PTE
79 * %g6: TAG
80 *
81 * The sun4v TLB load wants the PTE in %g3 so we fix that
82 * up here.
83 */
84 ba,pt %xcc, sun4v_itlb_load
85 mov %g5, %g3
65 86
66kvmap_itlb_longpath: 87kvmap_itlb_longpath:
67 88
@@ -80,7 +101,7 @@ kvmap_itlb_longpath:
80kvmap_itlb_obp: 101kvmap_itlb_obp:
81 OBP_TRANS_LOOKUP(%g4, %g5, %g2, %g3, kvmap_itlb_longpath) 102 OBP_TRANS_LOOKUP(%g4, %g5, %g2, %g3, kvmap_itlb_longpath)
82 103
83 KTSB_LOCK_TAG(%g1, %g2, %g4) 104 KTSB_LOCK_TAG(%g1, %g2, %g7)
84 105
85 KTSB_WRITE(%g1, %g5, %g6) 106 KTSB_WRITE(%g1, %g5, %g6)
86 107
@@ -90,7 +111,7 @@ kvmap_itlb_obp:
90kvmap_dtlb_obp: 111kvmap_dtlb_obp:
91 OBP_TRANS_LOOKUP(%g4, %g5, %g2, %g3, kvmap_dtlb_longpath) 112 OBP_TRANS_LOOKUP(%g4, %g5, %g2, %g3, kvmap_dtlb_longpath)
92 113
93 KTSB_LOCK_TAG(%g1, %g2, %g4) 114 KTSB_LOCK_TAG(%g1, %g2, %g7)
94 115
95 KTSB_WRITE(%g1, %g5, %g6) 116 KTSB_WRITE(%g1, %g5, %g6)
96 117
@@ -129,7 +150,7 @@ kvmap_linear_patch:
129kvmap_dtlb_vmalloc_addr: 150kvmap_dtlb_vmalloc_addr:
130 KERN_PGTABLE_WALK(%g4, %g5, %g2, kvmap_dtlb_longpath) 151 KERN_PGTABLE_WALK(%g4, %g5, %g2, kvmap_dtlb_longpath)
131 152
132 KTSB_LOCK_TAG(%g1, %g2, %g4) 153 KTSB_LOCK_TAG(%g1, %g2, %g7)
133 154
134 /* Load and check PTE. */ 155 /* Load and check PTE. */
135 ldxa [%g5] ASI_PHYS_USE_EC, %g5 156 ldxa [%g5] ASI_PHYS_USE_EC, %g5
@@ -141,8 +162,29 @@ kvmap_dtlb_vmalloc_addr:
141 /* fallthrough to TLB load */ 162 /* fallthrough to TLB load */
142 163
143kvmap_dtlb_load: 164kvmap_dtlb_load:
144 stxa %g5, [%g0] ASI_DTLB_DATA_IN ! Reload TLB 165
166661: stxa %g5, [%g0] ASI_DTLB_DATA_IN ! Reload TLB
145 retry 167 retry
168 .section .sun4v_2insn_patch, "ax"
169 .word 661b
170 nop
171 nop
172 .previous
173
174 /* For sun4v the ASI_DTLB_DATA_IN store and the retry
175 * instruction get nop'd out and we get here to branch
176 * to the sun4v tlb load code. The registers are setup
177 * as follows:
178 *
179 * %g4: vaddr
180 * %g5: PTE
181 * %g6: TAG
182 *
183 * The sun4v TLB load wants the PTE in %g3 so we fix that
184 * up here.
185 */
186 ba,pt %xcc, sun4v_dtlb_load
187 mov %g5, %g3
146 188
147kvmap_dtlb_nonlinear: 189kvmap_dtlb_nonlinear:
148 /* Catch kernel NULL pointer derefs. */ 190 /* Catch kernel NULL pointer derefs. */
@@ -185,10 +227,17 @@ kvmap_dtlb_longpath:
185 nop 227 nop
186 .previous 228 .previous
187 229
188 rdpr %tl, %g4 230 rdpr %tl, %g3
189 cmp %g4, 1 231 cmp %g3, 1
190 mov TLB_TAG_ACCESS, %g4 232
233661: mov TLB_TAG_ACCESS, %g4
191 ldxa [%g4] ASI_DMMU, %g5 234 ldxa [%g4] ASI_DMMU, %g5
235 .section .sun4v_2insn_patch, "ax"
236 .word 661b
237 mov %g4, %g5
238 nop
239 .previous
240
192 be,pt %xcc, sparc64_realfault_common 241 be,pt %xcc, sparc64_realfault_common
193 mov FAULT_CODE_DTLB, %g4 242 mov FAULT_CODE_DTLB, %g4
194 ba,pt %xcc, winfix_trampoline 243 ba,pt %xcc, winfix_trampoline
diff --git a/arch/sparc64/kernel/sun4v_tlb_miss.S b/arch/sparc64/kernel/sun4v_tlb_miss.S
index f7129137f9a4..597359ced233 100644
--- a/arch/sparc64/kernel/sun4v_tlb_miss.S
+++ b/arch/sparc64/kernel/sun4v_tlb_miss.S
@@ -96,7 +96,7 @@ sun4v_dtlb_miss:
96 96
97 /* Load UTSB reg into %g1. */ 97 /* Load UTSB reg into %g1. */
98 mov SCRATCHPAD_UTSBREG1, %g1 98 mov SCRATCHPAD_UTSBREG1, %g1
99 ldxa [%g1 + %g1] ASI_SCRATCHPAD, %g1 99 ldxa [%g1] ASI_SCRATCHPAD, %g1
100 100
101 LOAD_DTLB_INFO(%g2, %g4, %g5) 101 LOAD_DTLB_INFO(%g2, %g4, %g5)
102 COMPUTE_TAG_TARGET(%g6, %g4, %g5, %g3, kvmap_dtlb_4v) 102 COMPUTE_TAG_TARGET(%g6, %g4, %g5, %g3, kvmap_dtlb_4v)
@@ -149,14 +149,19 @@ sun4v_dtlb_prot:
149 * SCRATCHPAD_MMU_MISS contents in %g2. 149 * SCRATCHPAD_MMU_MISS contents in %g2.
150 */ 150 */
151sun4v_itsb_miss: 151sun4v_itsb_miss:
152 ba,pt %xcc, sun4v_tsb_miss_common 152 mov SCRATCHPAD_UTSBREG1, %g1
153 ldxa [%g1] ASI_SCRATCHPAD, %g1
154 brz,pn %g5, kvmap_itlb_4v
153 mov FAULT_CODE_ITLB, %g3 155 mov FAULT_CODE_ITLB, %g3
154 156
155 /* Called from trap table with TAG TARGET placed into 157 /* Called from trap table with TAG TARGET placed into
156 * %g6 and SCRATCHPAD_UTSBREG1 contents in %g1. 158 * %g6 and SCRATCHPAD_UTSBREG1 contents in %g1.
157 */ 159 */
158sun4v_dtsb_miss: 160sun4v_dtsb_miss:
159 mov FAULT_CODE_DTLB, %g3 161 mov SCRATCHPAD_UTSBREG1, %g1
162 ldxa [%g1] ASI_SCRATCHPAD, %g1
163 brz,pn %g5, kvmap_dtlb_4v
164 mov FAULT_CODE_DTLB, %g3
160 165
161 /* Create TSB pointer into %g1. This is something like: 166 /* Create TSB pointer into %g1. This is something like:
162 * 167 *
@@ -312,7 +317,8 @@ sun4v_stdfmna:
312 or %g2, %lo(OLD), %g2; \ 317 or %g2, %lo(OLD), %g2; \
313 sub %g1, %g2, %g1; \ 318 sub %g1, %g2, %g1; \
314 sethi %hi(BRANCH_ALWAYS), %g3; \ 319 sethi %hi(BRANCH_ALWAYS), %g3; \
315 srl %g1, 2, %g1; \ 320 sll %g1, 11, %g1; \
321 srl %g1, 11 + 2, %g1; \
316 or %g3, %lo(BRANCH_ALWAYS), %g3; \ 322 or %g3, %lo(BRANCH_ALWAYS), %g3; \
317 or %g3, %g1, %g3; \ 323 or %g3, %g1, %g3; \
318 stw %g3, [%g2]; \ 324 stw %g3, [%g2]; \
diff --git a/include/asm-sparc64/ttable.h b/include/asm-sparc64/ttable.h
index 6bb86a7a5b42..9e28b240f3aa 100644
--- a/include/asm-sparc64/ttable.h
+++ b/include/asm-sparc64/ttable.h
@@ -186,19 +186,19 @@
186 ldx [%g2 + HV_FAULT_I_CTX_OFFSET], %g5; \ 186 ldx [%g2 + HV_FAULT_I_CTX_OFFSET], %g5; \
187 srlx %g4, 22, %g7; \ 187 srlx %g4, 22, %g7; \
188 sllx %g5, 48, %g6; \ 188 sllx %g5, 48, %g6; \
189 brz,pn %g5, kvmap_itlb_4v; \ 189 ba,pt %xcc, sun4v_itsb_miss; \
190 or %g6, %g7, %g6; \ 190 or %g6, %g7, %g6; \
191 ba,a,pt %xcc, sun4v_itsb_miss; 191 nop;
192 192
193#define SUN4V_DTSB_MISS \ 193#define SUN4V_DTSB_MISS \
194 ldxa [%g0] ASI_SCRATCHPAD, %g2; \ 194 ldxa [%g0] ASI_SCRATCHPAD, %g2; \
195 ldx [%g2 + HV_FAULT_D_ADDR_OFFSET], %g4; \ 195 ldx [%g2 + HV_FAULT_D_ADDR_OFFSET], %g4; \
196 ldx [%g2 + HV_FAULT_D_CTX_OFFSET], %g5; \ 196 ldx [%g2 + HV_FAULT_D_CTX_OFFSET], %g5; \
197 srlx %g4, 22, %g7; \ 197 srlx %g4, 22, %g7; \
198 sllx %g5, 48, %g6; \ 198 sllx %g5, 48, %g6; \
199 brz,pn %g5, kvmap_dtlb_4v; \ 199 ba,pt %xcc, sun4v_dtsb_miss; \
200 or %g6, %g7, %g6; \ 200 or %g6, %g7, %g6; \
201 ba,a,pt %xcc, sun4v_dtsb_miss; 201 nop;
202 202
203/* Before touching these macros, you owe it to yourself to go and 203/* Before touching these macros, you owe it to yourself to go and
204 * see how arch/sparc64/kernel/winfixup.S works... -DaveM 204 * see how arch/sparc64/kernel/winfixup.S works... -DaveM