diff options
-rw-r--r-- | arch/sparc64/kernel/dtlb_miss.S | 4 | ||||
-rw-r--r-- | arch/sparc64/kernel/itlb_miss.S | 12 | ||||
-rw-r--r-- | arch/sparc64/kernel/ktlb.S | 14 | ||||
-rw-r--r-- | arch/sparc64/kernel/sun4v_tlb_miss.S | 39 | ||||
-rw-r--r-- | arch/sparc64/kernel/tsb.S | 17 | ||||
-rw-r--r-- | arch/sparc64/mm/init.c | 4 | ||||
-rw-r--r-- | arch/sparc64/mm/tsb.c | 25 | ||||
-rw-r--r-- | include/asm-sparc64/tsb.h | 8 | ||||
-rw-r--r-- | include/asm-sparc64/ttable.h | 12 |
9 files changed, 78 insertions, 57 deletions
diff --git a/arch/sparc64/kernel/dtlb_miss.S b/arch/sparc64/kernel/dtlb_miss.S index 2ef6f6e6e72b..09a6a15a7105 100644 --- a/arch/sparc64/kernel/dtlb_miss.S +++ b/arch/sparc64/kernel/dtlb_miss.S | |||
@@ -2,10 +2,10 @@ | |||
2 | ldxa [%g0] ASI_DMMU_TSB_8KB_PTR, %g1 ! Get TSB 8K pointer | 2 | ldxa [%g0] ASI_DMMU_TSB_8KB_PTR, %g1 ! Get TSB 8K pointer |
3 | ldxa [%g0] ASI_DMMU, %g6 ! Get TAG TARGET | 3 | ldxa [%g0] ASI_DMMU, %g6 ! Get TAG TARGET |
4 | srlx %g6, 48, %g5 ! Get context | 4 | srlx %g6, 48, %g5 ! Get context |
5 | sllx %g6, 22, %g6 ! Zero out context | ||
5 | brz,pn %g5, kvmap_dtlb ! Context 0 processing | 6 | brz,pn %g5, kvmap_dtlb ! Context 0 processing |
6 | nop ! Delay slot (fill me) | 7 | srlx %g6, 22, %g6 ! Delay slot |
7 | TSB_LOAD_QUAD(%g1, %g4) ! Load TSB entry | 8 | TSB_LOAD_QUAD(%g1, %g4) ! Load TSB entry |
8 | nop ! Push branch to next I$ line | ||
9 | cmp %g4, %g6 ! Compare TAG | 9 | cmp %g4, %g6 ! Compare TAG |
10 | 10 | ||
11 | /* DTLB ** ICACHE line 2: TSB compare and TLB load */ | 11 | /* DTLB ** ICACHE line 2: TSB compare and TLB load */ |
diff --git a/arch/sparc64/kernel/itlb_miss.S b/arch/sparc64/kernel/itlb_miss.S index 730caa4a1506..6dfe3968c379 100644 --- a/arch/sparc64/kernel/itlb_miss.S +++ b/arch/sparc64/kernel/itlb_miss.S | |||
@@ -2,25 +2,25 @@ | |||
2 | ldxa [%g0] ASI_IMMU_TSB_8KB_PTR, %g1 ! Get TSB 8K pointer | 2 | ldxa [%g0] ASI_IMMU_TSB_8KB_PTR, %g1 ! Get TSB 8K pointer |
3 | ldxa [%g0] ASI_IMMU, %g6 ! Get TAG TARGET | 3 | ldxa [%g0] ASI_IMMU, %g6 ! Get TAG TARGET |
4 | srlx %g6, 48, %g5 ! Get context | 4 | srlx %g6, 48, %g5 ! Get context |
5 | sllx %g6, 22, %g6 ! Zero out context | ||
5 | brz,pn %g5, kvmap_itlb ! Context 0 processing | 6 | brz,pn %g5, kvmap_itlb ! Context 0 processing |
6 | nop ! Delay slot (fill me) | 7 | srlx %g6, 22, %g6 ! Delay slot |
7 | TSB_LOAD_QUAD(%g1, %g4) ! Load TSB entry | 8 | TSB_LOAD_QUAD(%g1, %g4) ! Load TSB entry |
8 | cmp %g4, %g6 ! Compare TAG | 9 | cmp %g4, %g6 ! Compare TAG |
9 | sethi %hi(PAGE_EXEC), %g4 ! Setup exec check | ||
10 | 10 | ||
11 | /* ITLB ** ICACHE line 2: TSB compare and TLB load */ | 11 | /* ITLB ** ICACHE line 2: TSB compare and TLB load */ |
12 | sethi %hi(PAGE_EXEC), %g4 ! Setup exec check | ||
12 | ldx [%g4 + %lo(PAGE_EXEC)], %g4 | 13 | ldx [%g4 + %lo(PAGE_EXEC)], %g4 |
13 | bne,pn %xcc, tsb_miss_itlb ! Miss | 14 | bne,pn %xcc, tsb_miss_itlb ! Miss |
14 | mov FAULT_CODE_ITLB, %g3 | 15 | mov FAULT_CODE_ITLB, %g3 |
15 | andcc %g5, %g4, %g0 ! Executable? | 16 | andcc %g5, %g4, %g0 ! Executable? |
16 | be,pn %xcc, tsb_do_fault | 17 | be,pn %xcc, tsb_do_fault |
17 | nop ! Delay slot, fill me | 18 | nop ! Delay slot, fill me |
18 | stxa %g5, [%g0] ASI_ITLB_DATA_IN ! Load TLB | 19 | nop |
19 | retry ! Trap done | ||
20 | 20 | ||
21 | /* ITLB ** ICACHE line 3: */ | 21 | /* ITLB ** ICACHE line 3: */ |
22 | nop | 22 | stxa %g5, [%g0] ASI_ITLB_DATA_IN ! Load TLB |
23 | nop | 23 | retry ! Trap done |
24 | nop | 24 | nop |
25 | nop | 25 | nop |
26 | nop | 26 | nop |
diff --git a/arch/sparc64/kernel/ktlb.S b/arch/sparc64/kernel/ktlb.S index 47dfd45971e8..ac29da915d09 100644 --- a/arch/sparc64/kernel/ktlb.S +++ b/arch/sparc64/kernel/ktlb.S | |||
@@ -52,8 +52,10 @@ kvmap_itlb_vmalloc_addr: | |||
52 | 52 | ||
53 | /* Load and check PTE. */ | 53 | /* Load and check PTE. */ |
54 | ldxa [%g5] ASI_PHYS_USE_EC, %g5 | 54 | ldxa [%g5] ASI_PHYS_USE_EC, %g5 |
55 | mov 1, %g7 | ||
56 | sllx %g7, TSB_TAG_INVALID_BIT, %g7 | ||
55 | brgez,a,pn %g5, kvmap_itlb_longpath | 57 | brgez,a,pn %g5, kvmap_itlb_longpath |
56 | KTSB_STORE(%g1, %g0) | 58 | KTSB_STORE(%g1, %g7) |
57 | 59 | ||
58 | KTSB_WRITE(%g1, %g5, %g6) | 60 | KTSB_WRITE(%g1, %g5, %g6) |
59 | 61 | ||
@@ -146,8 +148,10 @@ kvmap_dtlb_vmalloc_addr: | |||
146 | 148 | ||
147 | /* Load and check PTE. */ | 149 | /* Load and check PTE. */ |
148 | ldxa [%g5] ASI_PHYS_USE_EC, %g5 | 150 | ldxa [%g5] ASI_PHYS_USE_EC, %g5 |
151 | mov 1, %g7 | ||
152 | sllx %g7, TSB_TAG_INVALID_BIT, %g7 | ||
149 | brgez,a,pn %g5, kvmap_dtlb_longpath | 153 | brgez,a,pn %g5, kvmap_dtlb_longpath |
150 | KTSB_STORE(%g1, %g0) | 154 | KTSB_STORE(%g1, %g7) |
151 | 155 | ||
152 | KTSB_WRITE(%g1, %g5, %g6) | 156 | KTSB_WRITE(%g1, %g5, %g6) |
153 | 157 | ||
@@ -215,8 +219,8 @@ kvmap_dtlb_longpath: | |||
215 | wrpr %g5, PSTATE_AG | PSTATE_MG, %pstate | 219 | wrpr %g5, PSTATE_AG | PSTATE_MG, %pstate |
216 | .section .sun4v_2insn_patch, "ax" | 220 | .section .sun4v_2insn_patch, "ax" |
217 | .word 661b | 221 | .word 661b |
218 | nop | 222 | SET_GL(1) |
219 | nop | 223 | ldxa [%g0] ASI_SCRATCHPAD, %g5 |
220 | .previous | 224 | .previous |
221 | 225 | ||
222 | rdpr %tl, %g3 | 226 | rdpr %tl, %g3 |
@@ -226,7 +230,7 @@ kvmap_dtlb_longpath: | |||
226 | ldxa [%g4] ASI_DMMU, %g5 | 230 | ldxa [%g4] ASI_DMMU, %g5 |
227 | .section .sun4v_2insn_patch, "ax" | 231 | .section .sun4v_2insn_patch, "ax" |
228 | .word 661b | 232 | .word 661b |
229 | mov %g4, %g5 | 233 | ldx [%g5 + HV_FAULT_D_ADDR_OFFSET], %g5 |
230 | nop | 234 | nop |
231 | .previous | 235 | .previous |
232 | 236 | ||
diff --git a/arch/sparc64/kernel/sun4v_tlb_miss.S b/arch/sparc64/kernel/sun4v_tlb_miss.S index 244d50de8499..57ccdaec7ccb 100644 --- a/arch/sparc64/kernel/sun4v_tlb_miss.S +++ b/arch/sparc64/kernel/sun4v_tlb_miss.S | |||
@@ -16,15 +16,14 @@ | |||
16 | ldx [BASE + HV_FAULT_D_ADDR_OFFSET], VADDR; \ | 16 | ldx [BASE + HV_FAULT_D_ADDR_OFFSET], VADDR; \ |
17 | ldx [BASE + HV_FAULT_D_CTX_OFFSET], CTX; | 17 | ldx [BASE + HV_FAULT_D_CTX_OFFSET], CTX; |
18 | 18 | ||
19 | /* DEST = (CTX << 48) | (VADDR >> 22) | 19 | /* DEST = (VADDR >> 22) |
20 | * | 20 | * |
21 | * Branch to ZERO_CTX_LABEL is context is zero. | 21 | * Branch to ZERO_CTX_LABEL is context is zero. |
22 | */ | 22 | */ |
23 | #define COMPUTE_TAG_TARGET(DEST, VADDR, CTX, TMP, ZERO_CTX_LABEL) \ | 23 | #define COMPUTE_TAG_TARGET(DEST, VADDR, CTX, ZERO_CTX_LABEL) \ |
24 | srlx VADDR, 22, TMP; \ | 24 | srlx VADDR, 22, DEST; \ |
25 | sllx CTX, 48, DEST; \ | ||
26 | brz,pn CTX, ZERO_CTX_LABEL; \ | 25 | brz,pn CTX, ZERO_CTX_LABEL; \ |
27 | or DEST, TMP, DEST; | 26 | nop; |
28 | 27 | ||
29 | /* Create TSB pointer. This is something like: | 28 | /* Create TSB pointer. This is something like: |
30 | * | 29 | * |
@@ -53,7 +52,7 @@ sun4v_itlb_miss: | |||
53 | ldxa [%g1] ASI_SCRATCHPAD, %g1 | 52 | ldxa [%g1] ASI_SCRATCHPAD, %g1 |
54 | 53 | ||
55 | LOAD_ITLB_INFO(%g2, %g4, %g5) | 54 | LOAD_ITLB_INFO(%g2, %g4, %g5) |
56 | COMPUTE_TAG_TARGET(%g6, %g4, %g5, %g3, kvmap_itlb_4v) | 55 | COMPUTE_TAG_TARGET(%g6, %g4, %g5, kvmap_itlb_4v) |
57 | COMPUTE_TSB_PTR(%g1, %g4, %g3, %g7) | 56 | COMPUTE_TSB_PTR(%g1, %g4, %g3, %g7) |
58 | 57 | ||
59 | /* Load TSB tag/pte into %g2/%g3 and compare the tag. */ | 58 | /* Load TSB tag/pte into %g2/%g3 and compare the tag. */ |
@@ -72,15 +71,15 @@ sun4v_itlb_miss: | |||
72 | * | 71 | * |
73 | * %g3: PTE | 72 | * %g3: PTE |
74 | * %g4: vaddr | 73 | * %g4: vaddr |
75 | * %g6: TAG TARGET (only "CTX << 48" part matters) | ||
76 | */ | 74 | */ |
77 | sun4v_itlb_load: | 75 | sun4v_itlb_load: |
76 | ldxa [%g0] ASI_SCRATCHPAD, %g6 | ||
78 | mov %o0, %g1 ! save %o0 | 77 | mov %o0, %g1 ! save %o0 |
79 | mov %o1, %g2 ! save %o1 | 78 | mov %o1, %g2 ! save %o1 |
80 | mov %o2, %g5 ! save %o2 | 79 | mov %o2, %g5 ! save %o2 |
81 | mov %o3, %g7 ! save %o3 | 80 | mov %o3, %g7 ! save %o3 |
82 | mov %g4, %o0 ! vaddr | 81 | mov %g4, %o0 ! vaddr |
83 | srlx %g6, 48, %o1 ! ctx | 82 | ldx [%g6 + HV_FAULT_I_CTX_OFFSET], %o1 ! ctx |
84 | mov %g3, %o2 ! PTE | 83 | mov %g3, %o2 ! PTE |
85 | mov HV_MMU_IMMU, %o3 ! flags | 84 | mov HV_MMU_IMMU, %o3 ! flags |
86 | ta HV_MMU_MAP_ADDR_TRAP | 85 | ta HV_MMU_MAP_ADDR_TRAP |
@@ -101,7 +100,7 @@ sun4v_dtlb_miss: | |||
101 | ldxa [%g1] ASI_SCRATCHPAD, %g1 | 100 | ldxa [%g1] ASI_SCRATCHPAD, %g1 |
102 | 101 | ||
103 | LOAD_DTLB_INFO(%g2, %g4, %g5) | 102 | LOAD_DTLB_INFO(%g2, %g4, %g5) |
104 | COMPUTE_TAG_TARGET(%g6, %g4, %g5, %g3, kvmap_dtlb_4v) | 103 | COMPUTE_TAG_TARGET(%g6, %g4, %g5, kvmap_dtlb_4v) |
105 | COMPUTE_TSB_PTR(%g1, %g4, %g3, %g7) | 104 | COMPUTE_TSB_PTR(%g1, %g4, %g3, %g7) |
106 | 105 | ||
107 | /* Load TSB tag/pte into %g2/%g3 and compare the tag. */ | 106 | /* Load TSB tag/pte into %g2/%g3 and compare the tag. */ |
@@ -115,15 +114,15 @@ sun4v_dtlb_miss: | |||
115 | * | 114 | * |
116 | * %g3: PTE | 115 | * %g3: PTE |
117 | * %g4: vaddr | 116 | * %g4: vaddr |
118 | * %g6: TAG TARGET (only "CTX << 48" part matters) | ||
119 | */ | 117 | */ |
120 | sun4v_dtlb_load: | 118 | sun4v_dtlb_load: |
119 | ldxa [%g0] ASI_SCRATCHPAD, %g6 | ||
121 | mov %o0, %g1 ! save %o0 | 120 | mov %o0, %g1 ! save %o0 |
122 | mov %o1, %g2 ! save %o1 | 121 | mov %o1, %g2 ! save %o1 |
123 | mov %o2, %g5 ! save %o2 | 122 | mov %o2, %g5 ! save %o2 |
124 | mov %o3, %g7 ! save %o3 | 123 | mov %o3, %g7 ! save %o3 |
125 | mov %g4, %o0 ! vaddr | 124 | mov %g4, %o0 ! vaddr |
126 | srlx %g6, 48, %o1 ! ctx | 125 | ldx [%g6 + HV_FAULT_D_CTX_OFFSET], %o1 ! ctx |
127 | mov %g3, %o2 ! PTE | 126 | mov %g3, %o2 ! PTE |
128 | mov HV_MMU_DMMU, %o3 ! flags | 127 | mov HV_MMU_DMMU, %o3 ! flags |
129 | ta HV_MMU_MAP_ADDR_TRAP | 128 | ta HV_MMU_MAP_ADDR_TRAP |
@@ -136,16 +135,18 @@ sun4v_dtlb_load: | |||
136 | retry | 135 | retry |
137 | 136 | ||
138 | sun4v_dtlb_prot: | 137 | sun4v_dtlb_prot: |
138 | SET_GL(1) | ||
139 | |||
139 | /* Load MMU Miss base into %g2. */ | 140 | /* Load MMU Miss base into %g2. */ |
140 | ldxa [%g0] ASI_SCRATCHPAD, %g2 | 141 | ldxa [%g0] ASI_SCRATCHPAD, %g5 |
141 | 142 | ||
142 | ldx [%g2 + HV_FAULT_D_ADDR_OFFSET], %g5 | 143 | ldx [%g5 + HV_FAULT_D_ADDR_OFFSET], %g5 |
143 | rdpr %tl, %g1 | 144 | rdpr %tl, %g1 |
144 | cmp %g1, 1 | 145 | cmp %g1, 1 |
145 | bgu,pn %xcc, winfix_trampoline | 146 | bgu,pn %xcc, winfix_trampoline |
146 | nop | 147 | nop |
147 | ba,pt %xcc, sparc64_realfault_common | 148 | ba,pt %xcc, sparc64_realfault_common |
148 | mov FAULT_CODE_DTLB | FAULT_CODE_WRITE, %g4 | 149 | mov FAULT_CODE_DTLB | FAULT_CODE_WRITE, %g4 |
149 | 150 | ||
150 | /* Called from trap table with TAG TARGET placed into | 151 | /* Called from trap table with TAG TARGET placed into |
151 | * %g6, SCRATCHPAD_UTSBREG1 contents in %g1, and | 152 | * %g6, SCRATCHPAD_UTSBREG1 contents in %g1, and |
@@ -189,7 +190,8 @@ sun4v_itlb_error: | |||
189 | sethi %hi(sun4v_err_itlb_vaddr), %g1 | 190 | sethi %hi(sun4v_err_itlb_vaddr), %g1 |
190 | stx %g4, [%g1 + %lo(sun4v_err_itlb_vaddr)] | 191 | stx %g4, [%g1 + %lo(sun4v_err_itlb_vaddr)] |
191 | sethi %hi(sun4v_err_itlb_ctx), %g1 | 192 | sethi %hi(sun4v_err_itlb_ctx), %g1 |
192 | srlx %g6, 48, %o1 ! ctx | 193 | ldxa [%g0] ASI_SCRATCHPAD, %g6 |
194 | ldx [%g6 + HV_FAULT_I_CTX_OFFSET], %o1 | ||
193 | stx %o1, [%g1 + %lo(sun4v_err_itlb_ctx)] | 195 | stx %o1, [%g1 + %lo(sun4v_err_itlb_ctx)] |
194 | sethi %hi(sun4v_err_itlb_pte), %g1 | 196 | sethi %hi(sun4v_err_itlb_pte), %g1 |
195 | stx %g3, [%g1 + %lo(sun4v_err_itlb_pte)] | 197 | stx %g3, [%g1 + %lo(sun4v_err_itlb_pte)] |
@@ -214,7 +216,8 @@ sun4v_dtlb_error: | |||
214 | sethi %hi(sun4v_err_dtlb_vaddr), %g1 | 216 | sethi %hi(sun4v_err_dtlb_vaddr), %g1 |
215 | stx %g4, [%g1 + %lo(sun4v_err_dtlb_vaddr)] | 217 | stx %g4, [%g1 + %lo(sun4v_err_dtlb_vaddr)] |
216 | sethi %hi(sun4v_err_dtlb_ctx), %g1 | 218 | sethi %hi(sun4v_err_dtlb_ctx), %g1 |
217 | srlx %g6, 48, %o1 ! ctx | 219 | ldxa [%g0] ASI_SCRATCHPAD, %g6 |
220 | ldx [%g6 + HV_FAULT_D_CTX_OFFSET], %o1 | ||
218 | stx %o1, [%g1 + %lo(sun4v_err_dtlb_ctx)] | 221 | stx %o1, [%g1 + %lo(sun4v_err_dtlb_ctx)] |
219 | sethi %hi(sun4v_err_dtlb_pte), %g1 | 222 | sethi %hi(sun4v_err_dtlb_pte), %g1 |
220 | stx %g3, [%g1 + %lo(sun4v_err_dtlb_pte)] | 223 | stx %g3, [%g1 + %lo(sun4v_err_dtlb_pte)] |
diff --git a/arch/sparc64/kernel/tsb.S b/arch/sparc64/kernel/tsb.S index a17259cf34b8..cc225c0563c3 100644 --- a/arch/sparc64/kernel/tsb.S +++ b/arch/sparc64/kernel/tsb.S | |||
@@ -36,7 +36,7 @@ tsb_miss_itlb: | |||
36 | /* At this point we have: | 36 | /* At this point we have: |
37 | * %g4 -- missing virtual address | 37 | * %g4 -- missing virtual address |
38 | * %g1 -- TSB entry address | 38 | * %g1 -- TSB entry address |
39 | * %g6 -- TAG TARGET ((vaddr >> 22) | (ctx << 48)) | 39 | * %g6 -- TAG TARGET (vaddr >> 22) |
40 | */ | 40 | */ |
41 | tsb_miss_page_table_walk: | 41 | tsb_miss_page_table_walk: |
42 | TRAP_LOAD_PGD_PHYS(%g7, %g5) | 42 | TRAP_LOAD_PGD_PHYS(%g7, %g5) |
@@ -50,8 +50,10 @@ tsb_reload: | |||
50 | 50 | ||
51 | /* Load and check PTE. */ | 51 | /* Load and check PTE. */ |
52 | ldxa [%g5] ASI_PHYS_USE_EC, %g5 | 52 | ldxa [%g5] ASI_PHYS_USE_EC, %g5 |
53 | mov 1, %g7 | ||
54 | sllx %g7, TSB_TAG_INVALID_BIT, %g7 | ||
53 | brgez,a,pn %g5, tsb_do_fault | 55 | brgez,a,pn %g5, tsb_do_fault |
54 | TSB_STORE(%g1, %g0) | 56 | TSB_STORE(%g1, %g7) |
55 | 57 | ||
56 | /* If it is larger than the base page size, don't | 58 | /* If it is larger than the base page size, don't |
57 | * bother putting it into the TSB. | 59 | * bother putting it into the TSB. |
@@ -62,8 +64,10 @@ tsb_reload: | |||
62 | sethi %hi(_PAGE_SZBITS), %g7 | 64 | sethi %hi(_PAGE_SZBITS), %g7 |
63 | ldx [%g7 + %lo(_PAGE_SZBITS)], %g7 | 65 | ldx [%g7 + %lo(_PAGE_SZBITS)], %g7 |
64 | cmp %g2, %g7 | 66 | cmp %g2, %g7 |
67 | mov 1, %g7 | ||
68 | sllx %g7, TSB_TAG_INVALID_BIT, %g7 | ||
65 | bne,a,pn %xcc, tsb_tlb_reload | 69 | bne,a,pn %xcc, tsb_tlb_reload |
66 | TSB_STORE(%g1, %g0) | 70 | TSB_STORE(%g1, %g7) |
67 | 71 | ||
68 | TSB_WRITE(%g1, %g5, %g6) | 72 | TSB_WRITE(%g1, %g5, %g6) |
69 | 73 | ||
@@ -136,7 +140,7 @@ tsb_do_fault: | |||
136 | .section .sun4v_2insn_patch, "ax" | 140 | .section .sun4v_2insn_patch, "ax" |
137 | .word 661b | 141 | .word 661b |
138 | SET_GL(1) | 142 | SET_GL(1) |
139 | ldxa [%g0] ASI_SCRATCHPAD, %g2 | 143 | ldxa [%g0] ASI_SCRATCHPAD, %g4 |
140 | .previous | 144 | .previous |
141 | 145 | ||
142 | bne,pn %xcc, tsb_do_itlb_fault | 146 | bne,pn %xcc, tsb_do_itlb_fault |
@@ -150,7 +154,7 @@ tsb_do_dtlb_fault: | |||
150 | ldxa [%g4] ASI_DMMU, %g5 | 154 | ldxa [%g4] ASI_DMMU, %g5 |
151 | .section .sun4v_2insn_patch, "ax" | 155 | .section .sun4v_2insn_patch, "ax" |
152 | .word 661b | 156 | .word 661b |
153 | ldx [%g2 + HV_FAULT_D_ADDR_OFFSET], %g5 | 157 | ldx [%g4 + HV_FAULT_D_ADDR_OFFSET], %g5 |
154 | nop | 158 | nop |
155 | .previous | 159 | .previous |
156 | 160 | ||
@@ -217,8 +221,9 @@ tsb_flush: | |||
217 | bne,pn %icc, 1b | 221 | bne,pn %icc, 1b |
218 | membar #LoadLoad | 222 | membar #LoadLoad |
219 | cmp %g1, %o1 | 223 | cmp %g1, %o1 |
224 | mov 1, %o3 | ||
220 | bne,pt %xcc, 2f | 225 | bne,pt %xcc, 2f |
221 | clr %o3 | 226 | sllx %o3, TSB_TAG_INVALID_BIT, %o3 |
222 | TSB_CAS_TAG(%o0, %g1, %o3) | 227 | TSB_CAS_TAG(%o0, %g1, %o3) |
223 | cmp %g1, %o3 | 228 | cmp %g1, %o3 |
224 | bne,pn %xcc, 1b | 229 | bne,pn %xcc, 1b |
diff --git a/arch/sparc64/mm/init.c b/arch/sparc64/mm/init.c index bd9e3205674b..aa2aec6373c3 100644 --- a/arch/sparc64/mm/init.c +++ b/arch/sparc64/mm/init.c | |||
@@ -296,7 +296,7 @@ void update_mmu_cache(struct vm_area_struct *vma, unsigned long address, pte_t p | |||
296 | 296 | ||
297 | tsb = &mm->context.tsb[(address >> PAGE_SHIFT) & | 297 | tsb = &mm->context.tsb[(address >> PAGE_SHIFT) & |
298 | (mm->context.tsb_nentries - 1UL)]; | 298 | (mm->context.tsb_nentries - 1UL)]; |
299 | tag = (address >> 22UL) | CTX_HWBITS(mm->context) << 48UL; | 299 | tag = (address >> 22UL); |
300 | tsb_insert(tsb, tag, pte_val(pte)); | 300 | tsb_insert(tsb, tag, pte_val(pte)); |
301 | } | 301 | } |
302 | } | 302 | } |
@@ -1110,6 +1110,8 @@ void __init paging_init(void) | |||
1110 | kern_base = (prom_boot_mapping_phys_low >> 22UL) << 22UL; | 1110 | kern_base = (prom_boot_mapping_phys_low >> 22UL) << 22UL; |
1111 | kern_size = (unsigned long)&_end - (unsigned long)KERNBASE; | 1111 | kern_size = (unsigned long)&_end - (unsigned long)KERNBASE; |
1112 | 1112 | ||
1113 | memset(swapper_tsb, 0x40, sizeof(swapper_tsb)); | ||
1114 | |||
1113 | if (tlb_type == hypervisor) | 1115 | if (tlb_type == hypervisor) |
1114 | sun4v_pgprot_init(); | 1116 | sun4v_pgprot_init(); |
1115 | else | 1117 | else |
diff --git a/arch/sparc64/mm/tsb.c b/arch/sparc64/mm/tsb.c index 3c1ff05038b1..353cb060561b 100644 --- a/arch/sparc64/mm/tsb.c +++ b/arch/sparc64/mm/tsb.c | |||
@@ -20,9 +20,9 @@ static inline unsigned long tsb_hash(unsigned long vaddr, unsigned long nentries | |||
20 | return vaddr & (nentries - 1); | 20 | return vaddr & (nentries - 1); |
21 | } | 21 | } |
22 | 22 | ||
23 | static inline int tag_compare(unsigned long tag, unsigned long vaddr, unsigned long context) | 23 | static inline int tag_compare(unsigned long tag, unsigned long vaddr) |
24 | { | 24 | { |
25 | return (tag == ((vaddr >> 22) | (context << 48))); | 25 | return (tag == (vaddr >> 22)); |
26 | } | 26 | } |
27 | 27 | ||
28 | /* TSB flushes need only occur on the processor initiating the address | 28 | /* TSB flushes need only occur on the processor initiating the address |
@@ -38,8 +38,8 @@ void flush_tsb_kernel_range(unsigned long start, unsigned long end) | |||
38 | unsigned long hash = tsb_hash(v, KERNEL_TSB_NENTRIES); | 38 | unsigned long hash = tsb_hash(v, KERNEL_TSB_NENTRIES); |
39 | struct tsb *ent = &swapper_tsb[hash]; | 39 | struct tsb *ent = &swapper_tsb[hash]; |
40 | 40 | ||
41 | if (tag_compare(ent->tag, v, 0)) { | 41 | if (tag_compare(ent->tag, v)) { |
42 | ent->tag = 0UL; | 42 | ent->tag = (1UL << TSB_TAG_INVALID_BIT); |
43 | membar_storeload_storestore(); | 43 | membar_storeload_storestore(); |
44 | } | 44 | } |
45 | } | 45 | } |
@@ -50,14 +50,9 @@ void flush_tsb_user(struct mmu_gather *mp) | |||
50 | struct mm_struct *mm = mp->mm; | 50 | struct mm_struct *mm = mp->mm; |
51 | struct tsb *tsb = mm->context.tsb; | 51 | struct tsb *tsb = mm->context.tsb; |
52 | unsigned long nentries = mm->context.tsb_nentries; | 52 | unsigned long nentries = mm->context.tsb_nentries; |
53 | unsigned long ctx, base; | 53 | unsigned long base; |
54 | int i; | 54 | int i; |
55 | 55 | ||
56 | if (unlikely(!CTX_VALID(mm->context))) | ||
57 | return; | ||
58 | |||
59 | ctx = CTX_HWBITS(mm->context); | ||
60 | |||
61 | if (tlb_type == cheetah_plus || tlb_type == hypervisor) | 56 | if (tlb_type == cheetah_plus || tlb_type == hypervisor) |
62 | base = __pa(tsb); | 57 | base = __pa(tsb); |
63 | else | 58 | else |
@@ -71,7 +66,7 @@ void flush_tsb_user(struct mmu_gather *mp) | |||
71 | 66 | ||
72 | hash = tsb_hash(v, nentries); | 67 | hash = tsb_hash(v, nentries); |
73 | ent = base + (hash * sizeof(struct tsb)); | 68 | ent = base + (hash * sizeof(struct tsb)); |
74 | tag = (v >> 22UL) | (ctx << 48UL); | 69 | tag = (v >> 22UL); |
75 | 70 | ||
76 | tsb_flush(ent, tag); | 71 | tsb_flush(ent, tag); |
77 | } | 72 | } |
@@ -243,7 +238,8 @@ static void copy_tsb(struct tsb *old_tsb, unsigned long old_size, | |||
243 | "i" (ASI_NUCLEUS_QUAD_LDD)); | 238 | "i" (ASI_NUCLEUS_QUAD_LDD)); |
244 | } | 239 | } |
245 | 240 | ||
246 | if (!tag || (tag & (1UL << TSB_TAG_LOCK_BIT))) | 241 | if (tag & ((1UL << TSB_TAG_LOCK_BIT) | |
242 | (1UL << TSB_TAG_INVALID_BIT))) | ||
247 | continue; | 243 | continue; |
248 | 244 | ||
249 | /* We only put base page size PTEs into the TSB, | 245 | /* We only put base page size PTEs into the TSB, |
@@ -315,10 +311,13 @@ void tsb_grow(struct mm_struct *mm, unsigned long rss, gfp_t gfp_flags) | |||
315 | break; | 311 | break; |
316 | } | 312 | } |
317 | 313 | ||
318 | page = alloc_pages(gfp_flags | __GFP_ZERO, get_order(size)); | 314 | page = alloc_pages(gfp_flags, get_order(size)); |
319 | if (unlikely(!page)) | 315 | if (unlikely(!page)) |
320 | return; | 316 | return; |
321 | 317 | ||
318 | /* Mark all tags as invalid. */ | ||
319 | memset(page_address(page), 0x40, size); | ||
320 | |||
322 | if (size == max_tsb_size) | 321 | if (size == max_tsb_size) |
323 | mm->context.tsb_rss_limit = ~0UL; | 322 | mm->context.tsb_rss_limit = ~0UL; |
324 | else | 323 | else |
diff --git a/include/asm-sparc64/tsb.h b/include/asm-sparc64/tsb.h index 7f3abc32c4dd..6e6768067e38 100644 --- a/include/asm-sparc64/tsb.h +++ b/include/asm-sparc64/tsb.h | |||
@@ -12,6 +12,8 @@ | |||
12 | * | 12 | * |
13 | * ldxa [%g0] ASI_{D,I}MMU_TSB_8KB_PTR, %g1 | 13 | * ldxa [%g0] ASI_{D,I}MMU_TSB_8KB_PTR, %g1 |
14 | * ldxa [%g0] ASI_{D,I}MMU, %g6 | 14 | * ldxa [%g0] ASI_{D,I}MMU, %g6 |
15 | * sllx %g6, 22, %g6 | ||
16 | * srlx %g6, 22, %g6 | ||
15 | * ldda [%g1] ASI_NUCLEUS_QUAD_LDD, %g4 | 17 | * ldda [%g1] ASI_NUCLEUS_QUAD_LDD, %g4 |
16 | * cmp %g4, %g6 | 18 | * cmp %g4, %g6 |
17 | * bne,pn %xcc, tsb_miss_{d,i}tlb | 19 | * bne,pn %xcc, tsb_miss_{d,i}tlb |
@@ -29,6 +31,9 @@ | |||
29 | * ------------------------------------------------- | 31 | * ------------------------------------------------- |
30 | * 63 61 60 48 47 42 41 0 | 32 | * 63 61 60 48 47 42 41 0 |
31 | * | 33 | * |
34 | * But actually, since we use per-mm TSB's, we zero out the CONTEXT | ||
35 | * field. | ||
36 | * | ||
32 | * Like the powerpc hashtables we need to use locking in order to | 37 | * Like the powerpc hashtables we need to use locking in order to |
33 | * synchronize while we update the entries. PTE updates need locking | 38 | * synchronize while we update the entries. PTE updates need locking |
34 | * as well. | 39 | * as well. |
@@ -42,6 +47,9 @@ | |||
42 | #define TSB_TAG_LOCK_BIT 47 | 47 | #define TSB_TAG_LOCK_BIT 47 |
43 | #define TSB_TAG_LOCK_HIGH (1 << (TSB_TAG_LOCK_BIT - 32)) | 48 | #define TSB_TAG_LOCK_HIGH (1 << (TSB_TAG_LOCK_BIT - 32)) |
44 | 49 | ||
50 | #define TSB_TAG_INVALID_BIT 46 | ||
51 | #define TSB_TAG_INVALID_HIGH (1 << (TSB_TAG_INVALID_BIT - 32)) | ||
52 | |||
45 | #define TSB_MEMBAR membar #StoreStore | 53 | #define TSB_MEMBAR membar #StoreStore |
46 | 54 | ||
47 | /* Some cpus support physical address quad loads. We want to use | 55 | /* Some cpus support physical address quad loads. We want to use |
diff --git a/include/asm-sparc64/ttable.h b/include/asm-sparc64/ttable.h index 9e28b240f3aa..2d5e3c464df5 100644 --- a/include/asm-sparc64/ttable.h +++ b/include/asm-sparc64/ttable.h | |||
@@ -184,20 +184,20 @@ | |||
184 | ldxa [%g0] ASI_SCRATCHPAD, %g2; \ | 184 | ldxa [%g0] ASI_SCRATCHPAD, %g2; \ |
185 | ldx [%g2 + HV_FAULT_I_ADDR_OFFSET], %g4; \ | 185 | ldx [%g2 + HV_FAULT_I_ADDR_OFFSET], %g4; \ |
186 | ldx [%g2 + HV_FAULT_I_CTX_OFFSET], %g5; \ | 186 | ldx [%g2 + HV_FAULT_I_CTX_OFFSET], %g5; \ |
187 | srlx %g4, 22, %g7; \ | 187 | srlx %g4, 22, %g6; \ |
188 | sllx %g5, 48, %g6; \ | ||
189 | ba,pt %xcc, sun4v_itsb_miss; \ | 188 | ba,pt %xcc, sun4v_itsb_miss; \ |
190 | or %g6, %g7, %g6; \ | 189 | nop; \ |
190 | nop; \ | ||
191 | nop; | 191 | nop; |
192 | 192 | ||
193 | #define SUN4V_DTSB_MISS \ | 193 | #define SUN4V_DTSB_MISS \ |
194 | ldxa [%g0] ASI_SCRATCHPAD, %g2; \ | 194 | ldxa [%g0] ASI_SCRATCHPAD, %g2; \ |
195 | ldx [%g2 + HV_FAULT_D_ADDR_OFFSET], %g4; \ | 195 | ldx [%g2 + HV_FAULT_D_ADDR_OFFSET], %g4; \ |
196 | ldx [%g2 + HV_FAULT_D_CTX_OFFSET], %g5; \ | 196 | ldx [%g2 + HV_FAULT_D_CTX_OFFSET], %g5; \ |
197 | srlx %g4, 22, %g7; \ | 197 | srlx %g4, 22, %g6; \ |
198 | sllx %g5, 48, %g6; \ | ||
199 | ba,pt %xcc, sun4v_dtsb_miss; \ | 198 | ba,pt %xcc, sun4v_dtsb_miss; \ |
200 | or %g6, %g7, %g6; \ | 199 | nop; \ |
200 | nop; \ | ||
201 | nop; | 201 | nop; |
202 | 202 | ||
203 | /* Before touching these macros, you owe it to yourself to go and | 203 | /* Before touching these macros, you owe it to yourself to go and |