diff options
Diffstat (limited to 'arch/sparc/kernel')
-rw-r--r-- | arch/sparc/kernel/ktlb.S | 108 | ||||
-rw-r--r-- | arch/sparc/kernel/vmlinux.lds.S | 5 |
2 files changed, 9 insertions, 104 deletions
diff --git a/arch/sparc/kernel/ktlb.S b/arch/sparc/kernel/ktlb.S index 605d49204580..94a1e6648bd0 100644 --- a/arch/sparc/kernel/ktlb.S +++ b/arch/sparc/kernel/ktlb.S | |||
@@ -47,14 +47,6 @@ kvmap_itlb_vmalloc_addr: | |||
47 | KERN_PGTABLE_WALK(%g4, %g5, %g2, kvmap_itlb_longpath) | 47 | KERN_PGTABLE_WALK(%g4, %g5, %g2, kvmap_itlb_longpath) |
48 | 48 | ||
49 | TSB_LOCK_TAG(%g1, %g2, %g7) | 49 | TSB_LOCK_TAG(%g1, %g2, %g7) |
50 | |||
51 | /* Load and check PTE. */ | ||
52 | ldxa [%g5] ASI_PHYS_USE_EC, %g5 | ||
53 | mov 1, %g7 | ||
54 | sllx %g7, TSB_TAG_INVALID_BIT, %g7 | ||
55 | brgez,a,pn %g5, kvmap_itlb_longpath | ||
56 | TSB_STORE(%g1, %g7) | ||
57 | |||
58 | TSB_WRITE(%g1, %g5, %g6) | 50 | TSB_WRITE(%g1, %g5, %g6) |
59 | 51 | ||
60 | /* fallthrough to TLB load */ | 52 | /* fallthrough to TLB load */ |
@@ -118,6 +110,12 @@ kvmap_dtlb_obp: | |||
118 | ba,pt %xcc, kvmap_dtlb_load | 110 | ba,pt %xcc, kvmap_dtlb_load |
119 | nop | 111 | nop |
120 | 112 | ||
113 | kvmap_linear_early: | ||
114 | sethi %hi(kern_linear_pte_xor), %g7 | ||
115 | ldx [%g7 + %lo(kern_linear_pte_xor)], %g2 | ||
116 | ba,pt %xcc, kvmap_dtlb_tsb4m_load | ||
117 | xor %g2, %g4, %g5 | ||
118 | |||
121 | .align 32 | 119 | .align 32 |
122 | kvmap_dtlb_tsb4m_load: | 120 | kvmap_dtlb_tsb4m_load: |
123 | TSB_LOCK_TAG(%g1, %g2, %g7) | 121 | TSB_LOCK_TAG(%g1, %g2, %g7) |
@@ -146,105 +144,17 @@ kvmap_dtlb_4v: | |||
146 | /* Correct TAG_TARGET is already in %g6, check 4mb TSB. */ | 144 | /* Correct TAG_TARGET is already in %g6, check 4mb TSB. */ |
147 | KERN_TSB4M_LOOKUP_TL1(%g6, %g5, %g1, %g2, %g3, kvmap_dtlb_load) | 145 | KERN_TSB4M_LOOKUP_TL1(%g6, %g5, %g1, %g2, %g3, kvmap_dtlb_load) |
148 | #endif | 146 | #endif |
149 | /* TSB entry address left in %g1, lookup linear PTE. | 147 | /* Linear mapping TSB lookup failed. Fallthrough to kernel |
150 | * Must preserve %g1 and %g6 (TAG). | 148 | * page table based lookup. |
151 | */ | ||
152 | kvmap_dtlb_tsb4m_miss: | ||
153 | /* Clear the PAGE_OFFSET top virtual bits, shift | ||
154 | * down to get PFN, and make sure PFN is in range. | ||
155 | */ | ||
156 | 661: sllx %g4, 0, %g5 | ||
157 | .section .page_offset_shift_patch, "ax" | ||
158 | .word 661b | ||
159 | .previous | ||
160 | |||
161 | /* Check to see if we know about valid memory at the 4MB | ||
162 | * chunk this physical address will reside within. | ||
163 | */ | 149 | */ |
164 | 661: srlx %g5, MAX_PHYS_ADDRESS_BITS, %g2 | ||
165 | .section .page_offset_shift_patch, "ax" | ||
166 | .word 661b | ||
167 | .previous | ||
168 | |||
169 | brnz,pn %g2, kvmap_dtlb_longpath | ||
170 | nop | ||
171 | |||
172 | /* This unconditional branch and delay-slot nop gets patched | ||
173 | * by the sethi sequence once the bitmap is properly setup. | ||
174 | */ | ||
175 | .globl valid_addr_bitmap_insn | ||
176 | valid_addr_bitmap_insn: | ||
177 | ba,pt %xcc, 2f | ||
178 | nop | ||
179 | .subsection 2 | ||
180 | .globl valid_addr_bitmap_patch | ||
181 | valid_addr_bitmap_patch: | ||
182 | sethi %hi(sparc64_valid_addr_bitmap), %g7 | ||
183 | or %g7, %lo(sparc64_valid_addr_bitmap), %g7 | ||
184 | .previous | ||
185 | |||
186 | 661: srlx %g5, ILOG2_4MB, %g2 | ||
187 | .section .page_offset_shift_patch, "ax" | ||
188 | .word 661b | ||
189 | .previous | ||
190 | |||
191 | srlx %g2, 6, %g5 | ||
192 | and %g2, 63, %g2 | ||
193 | sllx %g5, 3, %g5 | ||
194 | ldx [%g7 + %g5], %g5 | ||
195 | mov 1, %g7 | ||
196 | sllx %g7, %g2, %g7 | ||
197 | andcc %g5, %g7, %g0 | ||
198 | be,pn %xcc, kvmap_dtlb_longpath | ||
199 | |||
200 | 2: sethi %hi(kpte_linear_bitmap), %g2 | ||
201 | |||
202 | /* Get the 256MB physical address index. */ | ||
203 | 661: sllx %g4, 0, %g5 | ||
204 | .section .page_offset_shift_patch, "ax" | ||
205 | .word 661b | ||
206 | .previous | ||
207 | |||
208 | or %g2, %lo(kpte_linear_bitmap), %g2 | ||
209 | |||
210 | 661: srlx %g5, ILOG2_256MB, %g5 | ||
211 | .section .page_offset_shift_patch, "ax" | ||
212 | .word 661b | ||
213 | .previous | ||
214 | |||
215 | and %g5, (32 - 1), %g7 | ||
216 | |||
217 | /* Divide by 32 to get the offset into the bitmask. */ | ||
218 | srlx %g5, 5, %g5 | ||
219 | add %g7, %g7, %g7 | ||
220 | sllx %g5, 3, %g5 | ||
221 | |||
222 | /* kern_linear_pte_xor[(mask >> shift) & 3)] */ | ||
223 | ldx [%g2 + %g5], %g2 | ||
224 | srlx %g2, %g7, %g7 | ||
225 | sethi %hi(kern_linear_pte_xor), %g5 | ||
226 | and %g7, 3, %g7 | ||
227 | or %g5, %lo(kern_linear_pte_xor), %g5 | ||
228 | sllx %g7, 3, %g7 | ||
229 | ldx [%g5 + %g7], %g2 | ||
230 | |||
231 | .globl kvmap_linear_patch | 150 | .globl kvmap_linear_patch |
232 | kvmap_linear_patch: | 151 | kvmap_linear_patch: |
233 | ba,pt %xcc, kvmap_dtlb_tsb4m_load | 152 | ba,a,pt %xcc, kvmap_linear_early |
234 | xor %g2, %g4, %g5 | ||
235 | 153 | ||
236 | kvmap_dtlb_vmalloc_addr: | 154 | kvmap_dtlb_vmalloc_addr: |
237 | KERN_PGTABLE_WALK(%g4, %g5, %g2, kvmap_dtlb_longpath) | 155 | KERN_PGTABLE_WALK(%g4, %g5, %g2, kvmap_dtlb_longpath) |
238 | 156 | ||
239 | TSB_LOCK_TAG(%g1, %g2, %g7) | 157 | TSB_LOCK_TAG(%g1, %g2, %g7) |
240 | |||
241 | /* Load and check PTE. */ | ||
242 | ldxa [%g5] ASI_PHYS_USE_EC, %g5 | ||
243 | mov 1, %g7 | ||
244 | sllx %g7, TSB_TAG_INVALID_BIT, %g7 | ||
245 | brgez,a,pn %g5, kvmap_dtlb_longpath | ||
246 | TSB_STORE(%g1, %g7) | ||
247 | |||
248 | TSB_WRITE(%g1, %g5, %g6) | 158 | TSB_WRITE(%g1, %g5, %g6) |
249 | 159 | ||
250 | /* fallthrough to TLB load */ | 160 | /* fallthrough to TLB load */ |
diff --git a/arch/sparc/kernel/vmlinux.lds.S b/arch/sparc/kernel/vmlinux.lds.S index 932ff90fd760..0bacceb19150 100644 --- a/arch/sparc/kernel/vmlinux.lds.S +++ b/arch/sparc/kernel/vmlinux.lds.S | |||
@@ -122,11 +122,6 @@ SECTIONS | |||
122 | *(.swapper_4m_tsb_phys_patch) | 122 | *(.swapper_4m_tsb_phys_patch) |
123 | __swapper_4m_tsb_phys_patch_end = .; | 123 | __swapper_4m_tsb_phys_patch_end = .; |
124 | } | 124 | } |
125 | .page_offset_shift_patch : { | ||
126 | __page_offset_shift_patch = .; | ||
127 | *(.page_offset_shift_patch) | ||
128 | __page_offset_shift_patch_end = .; | ||
129 | } | ||
130 | .popc_3insn_patch : { | 125 | .popc_3insn_patch : { |
131 | __popc_3insn_patch = .; | 126 | __popc_3insn_patch = .; |
132 | *(.popc_3insn_patch) | 127 | *(.popc_3insn_patch) |