diff options
author | David S. Miller <davem@sunset.davemloft.net> | 2006-02-11 15:21:20 -0500 |
---|---|---|
committer | David S. Miller <davem@sunset.davemloft.net> | 2006-03-20 04:12:23 -0500 |
commit | 459b6e621e0e15315c25bac47fa7113e5818d45d (patch) | |
tree | 4bbff0ec1dafb7fba8b247c84ad708f54cc687fe /arch/sparc64/kernel/ktlb.S | |
parent | fd05068d7b22b64211f9202aa67ad44b51d44242 (diff) |
[SPARC64]: Fix some SUN4V TLB miss bugs.
Code patching did not sign extend negative branch
offsets correctly.
Kernel TLB miss path needs patching and %g4 register
preservation in order to handle SUN4V correctly.
Signed-off-by: David S. Miller <davem@davemloft.net>
Diffstat (limited to 'arch/sparc64/kernel/ktlb.S')
-rw-r--r-- | arch/sparc64/kernel/ktlb.S | 67 |
1 files changed, 58 insertions, 9 deletions
diff --git a/arch/sparc64/kernel/ktlb.S b/arch/sparc64/kernel/ktlb.S index f6bb2e08964a..2d333ab4b91b 100644 --- a/arch/sparc64/kernel/ktlb.S +++ b/arch/sparc64/kernel/ktlb.S | |||
@@ -48,7 +48,7 @@ kvmap_itlb_tsb_miss: | |||
48 | kvmap_itlb_vmalloc_addr: | 48 | kvmap_itlb_vmalloc_addr: |
49 | KERN_PGTABLE_WALK(%g4, %g5, %g2, kvmap_itlb_longpath) | 49 | KERN_PGTABLE_WALK(%g4, %g5, %g2, kvmap_itlb_longpath) |
50 | 50 | ||
51 | KTSB_LOCK_TAG(%g1, %g2, %g4) | 51 | KTSB_LOCK_TAG(%g1, %g2, %g7) |
52 | 52 | ||
53 | /* Load and check PTE. */ | 53 | /* Load and check PTE. */ |
54 | ldxa [%g5] ASI_PHYS_USE_EC, %g5 | 54 | ldxa [%g5] ASI_PHYS_USE_EC, %g5 |
@@ -60,8 +60,29 @@ kvmap_itlb_vmalloc_addr: | |||
60 | /* fallthrough to TLB load */ | 60 | /* fallthrough to TLB load */ |
61 | 61 | ||
62 | kvmap_itlb_load: | 62 | kvmap_itlb_load: |
63 | stxa %g5, [%g0] ASI_ITLB_DATA_IN ! Reload TLB | 63 | |
64 | 661: stxa %g5, [%g0] ASI_ITLB_DATA_IN | ||
64 | retry | 65 | retry |
66 | .section .sun4v_2insn_patch, "ax" | ||
67 | .word 661b | ||
68 | nop | ||
69 | nop | ||
70 | .previous | ||
71 | |||
72 | /* For sun4v the ASI_ITLB_DATA_IN store and the retry | ||
73 | * instruction get nop'd out and we get here to branch | ||
74 | * to the sun4v tlb load code. The registers are setup | ||
75 | * as follows: | ||
76 | * | ||
77 | * %g4: vaddr | ||
78 | * %g5: PTE | ||
79 | * %g6: TAG | ||
80 | * | ||
81 | * The sun4v TLB load wants the PTE in %g3 so we fix that | ||
82 | * up here. | ||
83 | */ | ||
84 | ba,pt %xcc, sun4v_itlb_load | ||
85 | mov %g5, %g3 | ||
65 | 86 | ||
66 | kvmap_itlb_longpath: | 87 | kvmap_itlb_longpath: |
67 | 88 | ||
@@ -80,7 +101,7 @@ kvmap_itlb_longpath: | |||
80 | kvmap_itlb_obp: | 101 | kvmap_itlb_obp: |
81 | OBP_TRANS_LOOKUP(%g4, %g5, %g2, %g3, kvmap_itlb_longpath) | 102 | OBP_TRANS_LOOKUP(%g4, %g5, %g2, %g3, kvmap_itlb_longpath) |
82 | 103 | ||
83 | KTSB_LOCK_TAG(%g1, %g2, %g4) | 104 | KTSB_LOCK_TAG(%g1, %g2, %g7) |
84 | 105 | ||
85 | KTSB_WRITE(%g1, %g5, %g6) | 106 | KTSB_WRITE(%g1, %g5, %g6) |
86 | 107 | ||
@@ -90,7 +111,7 @@ kvmap_itlb_obp: | |||
90 | kvmap_dtlb_obp: | 111 | kvmap_dtlb_obp: |
91 | OBP_TRANS_LOOKUP(%g4, %g5, %g2, %g3, kvmap_dtlb_longpath) | 112 | OBP_TRANS_LOOKUP(%g4, %g5, %g2, %g3, kvmap_dtlb_longpath) |
92 | 113 | ||
93 | KTSB_LOCK_TAG(%g1, %g2, %g4) | 114 | KTSB_LOCK_TAG(%g1, %g2, %g7) |
94 | 115 | ||
95 | KTSB_WRITE(%g1, %g5, %g6) | 116 | KTSB_WRITE(%g1, %g5, %g6) |
96 | 117 | ||
@@ -129,7 +150,7 @@ kvmap_linear_patch: | |||
129 | kvmap_dtlb_vmalloc_addr: | 150 | kvmap_dtlb_vmalloc_addr: |
130 | KERN_PGTABLE_WALK(%g4, %g5, %g2, kvmap_dtlb_longpath) | 151 | KERN_PGTABLE_WALK(%g4, %g5, %g2, kvmap_dtlb_longpath) |
131 | 152 | ||
132 | KTSB_LOCK_TAG(%g1, %g2, %g4) | 153 | KTSB_LOCK_TAG(%g1, %g2, %g7) |
133 | 154 | ||
134 | /* Load and check PTE. */ | 155 | /* Load and check PTE. */ |
135 | ldxa [%g5] ASI_PHYS_USE_EC, %g5 | 156 | ldxa [%g5] ASI_PHYS_USE_EC, %g5 |
@@ -141,8 +162,29 @@ kvmap_dtlb_vmalloc_addr: | |||
141 | /* fallthrough to TLB load */ | 162 | /* fallthrough to TLB load */ |
142 | 163 | ||
143 | kvmap_dtlb_load: | 164 | kvmap_dtlb_load: |
144 | stxa %g5, [%g0] ASI_DTLB_DATA_IN ! Reload TLB | 165 | |
166 | 661: stxa %g5, [%g0] ASI_DTLB_DATA_IN ! Reload TLB | ||
145 | retry | 167 | retry |
168 | .section .sun4v_2insn_patch, "ax" | ||
169 | .word 661b | ||
170 | nop | ||
171 | nop | ||
172 | .previous | ||
173 | |||
174 | /* For sun4v the ASI_DTLB_DATA_IN store and the retry | ||
175 | * instruction get nop'd out and we get here to branch | ||
176 | * to the sun4v tlb load code. The registers are setup | ||
177 | * as follows: | ||
178 | * | ||
179 | * %g4: vaddr | ||
180 | * %g5: PTE | ||
181 | * %g6: TAG | ||
182 | * | ||
183 | * The sun4v TLB load wants the PTE in %g3 so we fix that | ||
184 | * up here. | ||
185 | */ | ||
186 | ba,pt %xcc, sun4v_dtlb_load | ||
187 | mov %g5, %g3 | ||
146 | 188 | ||
147 | kvmap_dtlb_nonlinear: | 189 | kvmap_dtlb_nonlinear: |
148 | /* Catch kernel NULL pointer derefs. */ | 190 | /* Catch kernel NULL pointer derefs. */ |
@@ -185,10 +227,17 @@ kvmap_dtlb_longpath: | |||
185 | nop | 227 | nop |
186 | .previous | 228 | .previous |
187 | 229 | ||
188 | rdpr %tl, %g4 | 230 | rdpr %tl, %g3 |
189 | cmp %g4, 1 | 231 | cmp %g3, 1 |
190 | mov TLB_TAG_ACCESS, %g4 | 232 | |
233 | 661: mov TLB_TAG_ACCESS, %g4 | ||
191 | ldxa [%g4] ASI_DMMU, %g5 | 234 | ldxa [%g4] ASI_DMMU, %g5 |
235 | .section .sun4v_2insn_patch, "ax" | ||
236 | .word 661b | ||
237 | mov %g4, %g5 | ||
238 | nop | ||
239 | .previous | ||
240 | |||
192 | be,pt %xcc, sparc64_realfault_common | 241 | be,pt %xcc, sparc64_realfault_common |
193 | mov FAULT_CODE_DTLB, %g4 | 242 | mov FAULT_CODE_DTLB, %g4 |
194 | ba,pt %xcc, winfix_trampoline | 243 | ba,pt %xcc, winfix_trampoline |