diff options
author | David S. Miller <davem@sunset.davemloft.net> | 2005-09-22 00:49:32 -0400 |
---|---|---|
committer | David S. Miller <davem@sunset.davemloft.net> | 2005-09-22 00:49:32 -0400 |
commit | 1ac4f5ebaa496a23ab4a148c9864d7e30a6c6cd3 (patch) | |
tree | 1ab1e111f596b8c66c741e63f14d721cb1818c7a /arch/sparc64/kernel | |
parent | 059deb693ec191e563ec69533d24f3feff0b78cd (diff) |
[SPARC64]: Remove ktlb.S instruction patching.
This was kind of ugly, and actually buggy. The bug was that
we didn't handle a machine with memory starting > 4GB. If
the 'prompmd' was allocated in physical memory > 4GB we'd
croak because the obp_iaddr_patch and obp_daddr_patch things
only supported a 32-bit physical address.
So fix this by just loading the appropriate values from two
variables in the kernel image, which is locked into the TLB
and thus accesses to them can't cause a recursive TLB miss.
Signed-off-by: David S. Miller <davem@davemloft.net>
Diffstat (limited to 'arch/sparc64/kernel')
-rw-r--r-- | arch/sparc64/kernel/ktlb.S | 31 |
1 files changed, 12 insertions, 19 deletions
diff --git a/arch/sparc64/kernel/ktlb.S b/arch/sparc64/kernel/ktlb.S index b7176792c9a2..a591bc0ebc7b 100644 --- a/arch/sparc64/kernel/ktlb.S +++ b/arch/sparc64/kernel/ktlb.S | |||
@@ -15,8 +15,6 @@ | |||
15 | .text | 15 | .text |
16 | .align 32 | 16 | .align 32 |
17 | 17 | ||
18 | .globl sparc64_vpte_patchme1 | ||
19 | .globl sparc64_vpte_patchme2 | ||
20 | /* | 18 | /* |
21 | * On a second level vpte miss, check whether the original fault is to the OBP | 19 | * On a second level vpte miss, check whether the original fault is to the OBP |
22 | * range (note that this is only possible for instruction miss, data misses to | 20 | * range (note that this is only possible for instruction miss, data misses to |
@@ -33,18 +31,17 @@ sparc64_vpte_nucleus: | |||
33 | */ | 31 | */ |
34 | sethi %hi(LOW_OBP_ADDRESS), %g5 | 32 | sethi %hi(LOW_OBP_ADDRESS), %g5 |
35 | cmp %g4, %g5 | 33 | cmp %g4, %g5 |
36 | blu,pn %xcc, sparc64_vpte_patchme1 | 34 | blu,pn %xcc, kern_vpte |
37 | mov 0x1, %g5 | 35 | mov 0x1, %g5 |
38 | sllx %g5, 32, %g5 | 36 | sllx %g5, 32, %g5 |
39 | cmp %g4, %g5 | 37 | cmp %g4, %g5 |
40 | blu,pn %xcc, obp_iaddr_patch | 38 | blu,pn %xcc, vpte_insn_obp |
41 | nop | 39 | nop |
42 | 40 | ||
43 | /* These two instructions are patched by paginig_init(). */ | 41 | /* These two instructions are patched by paginig_init(). */ |
44 | sparc64_vpte_patchme1: | 42 | kern_vpte: |
45 | sethi %hi(0), %g5 | 43 | sethi %hi(swapper_pgd_zero), %g5 |
46 | sparc64_vpte_patchme2: | 44 | lduw [%g5 + %lo(swapper_pgd_zero)], %g5 |
47 | or %g5, %lo(0), %g5 | ||
48 | 45 | ||
49 | /* With kernel PGD in %g5, branch back into dtlb_backend. */ | 46 | /* With kernel PGD in %g5, branch back into dtlb_backend. */ |
50 | ba,pt %xcc, sparc64_kpte_continue | 47 | ba,pt %xcc, sparc64_kpte_continue |
@@ -60,11 +57,9 @@ vpte_noent: | |||
60 | stxa %g4, [%g1 + %g1] ASI_DMMU | 57 | stxa %g4, [%g1 + %g1] ASI_DMMU |
61 | done | 58 | done |
62 | 59 | ||
63 | .globl obp_iaddr_patch | 60 | vpte_insn_obp: |
64 | obp_iaddr_patch: | 61 | sethi %hi(prom_pmd_phys), %g5 |
65 | /* These two instructions patched by inherit_prom_mappings(). */ | 62 | ldx [%g5 + %lo(prom_pmd_phys)], %g5 |
66 | sethi %hi(0), %g5 | ||
67 | or %g5, %lo(0), %g5 | ||
68 | 63 | ||
69 | /* Behave as if we are at TL0. */ | 64 | /* Behave as if we are at TL0. */ |
70 | wrpr %g0, 1, %tl | 65 | wrpr %g0, 1, %tl |
@@ -100,11 +95,9 @@ obp_iaddr_patch: | |||
100 | stxa %g5, [%g0] ASI_ITLB_DATA_IN | 95 | stxa %g5, [%g0] ASI_ITLB_DATA_IN |
101 | retry | 96 | retry |
102 | 97 | ||
103 | .globl obp_daddr_patch | 98 | kvmap_do_obp: |
104 | obp_daddr_patch: | 99 | sethi %hi(prom_pmd_phys), %g5 |
105 | /* These two instructions patched by inherit_prom_mappings(). */ | 100 | ldx [%g5 + %lo(prom_pmd_phys)], %g5 |
106 | sethi %hi(0), %g5 | ||
107 | or %g5, %lo(0), %g5 | ||
108 | 101 | ||
109 | /* Get PMD offset. */ | 102 | /* Get PMD offset. */ |
110 | srlx %g4, 23, %g6 | 103 | srlx %g4, 23, %g6 |
@@ -159,7 +152,7 @@ kvmap_check_obp: | |||
159 | mov 0x1, %g5 | 152 | mov 0x1, %g5 |
160 | sllx %g5, 32, %g5 | 153 | sllx %g5, 32, %g5 |
161 | cmp %g4, %g5 | 154 | cmp %g4, %g5 |
162 | blu,pn %xcc, obp_daddr_patch | 155 | blu,pn %xcc, kvmap_do_obp |
163 | nop | 156 | nop |
164 | 157 | ||
165 | kvmap_vmalloc_addr: | 158 | kvmap_vmalloc_addr: |