aboutsummaryrefslogtreecommitdiffstats
path: root/arch/sparc64/kernel/sun4v_tlb_miss.S
diff options
context:
space:
mode:
Diffstat (limited to 'arch/sparc64/kernel/sun4v_tlb_miss.S')
-rw-r--r--arch/sparc64/kernel/sun4v_tlb_miss.S130
1 files changed, 53 insertions, 77 deletions
diff --git a/arch/sparc64/kernel/sun4v_tlb_miss.S b/arch/sparc64/kernel/sun4v_tlb_miss.S
index f6222623de38..f7129137f9a4 100644
--- a/arch/sparc64/kernel/sun4v_tlb_miss.S
+++ b/arch/sparc64/kernel/sun4v_tlb_miss.S
@@ -6,48 +6,55 @@
6 .text 6 .text
7 .align 32 7 .align 32
8 8
9sun4v_itlb_miss: 9 /* Load ITLB fault information into VADDR and CTX, using BASE. */
10 /* Load MMU Miss base into %g2. */ 10#define LOAD_ITLB_INFO(BASE, VADDR, CTX) \
11 ldxa [%g0] ASI_SCRATCHPAD, %g3 11 ldx [BASE + HV_FAULT_I_ADDR_OFFSET], VADDR; \
12 12 ldx [BASE + HV_FAULT_I_CTX_OFFSET], CTX;
13 /* Load UTSB reg into %g1. */ 13
14 mov SCRATCHPAD_UTSBREG1, %g1 14 /* Load DTLB fault information into VADDR and CTX, using BASE. */
15 ldxa [%g1] ASI_SCRATCHPAD, %g1 15#define LOAD_DTLB_INFO(BASE, VADDR, CTX) \
16 ldx [BASE + HV_FAULT_D_ADDR_OFFSET], VADDR; \
17 ldx [BASE + HV_FAULT_D_CTX_OFFSET], CTX;
16 18
17 /* Create a TAG TARGET, "(vaddr>>22) | (ctx << 48)", in %g6. 19 /* DEST = (CTX << 48) | (VADDR >> 22)
18 * Branch if kernel TLB miss. The kernel TSB and user TSB miss 20 *
19 * code wants the missing virtual address in %g4, so that value 21 * Branch to ZERO_CTX_LABEL is context is zero.
20 * cannot be modified through the entirety of this handler.
21 */ 22 */
22 ldx [%g2 + HV_FAULT_I_ADDR_OFFSET], %g4 23#define COMPUTE_TAG_TARGET(DEST, VADDR, CTX, TMP, ZERO_CTX_LABEL) \
23 ldx [%g2 + HV_FAULT_I_CTX_OFFSET], %g5 24 srlx VADDR, 22, TMP; \
24 srlx %g4, 22, %g3 25 sllx CTX, 48, DEST; \
25 sllx %g5, 48, %g6 26 brz,pn CTX, ZERO_CTX_LABEL; \
26 or %g6, %g3, %g6 27 or DEST, TMP, DEST;
27 brz,pn %g5, kvmap_itlb_4v
28 nop
29 28
30 /* Create TSB pointer. This is something like: 29 /* Create TSB pointer. This is something like:
31 * 30 *
32 * index_mask = (512 << (tsb_reg & 0x7UL)) - 1UL; 31 * index_mask = (512 << (tsb_reg & 0x7UL)) - 1UL;
33 * tsb_base = tsb_reg & ~0x7UL; 32 * tsb_base = tsb_reg & ~0x7UL;
34 */
35 and %g1, 0x7, %g3
36 andn %g1, 0x7, %g1
37 mov 512, %g7
38 sllx %g7, %g3, %g7
39 sub %g7, 1, %g7
40
41 /* TSB index mask is in %g7, tsb base is in %g1. Compute
42 * the TSB entry pointer into %g1:
43 *
44 * tsb_index = ((vaddr >> PAGE_SHIFT) & tsb_mask); 33 * tsb_index = ((vaddr >> PAGE_SHIFT) & tsb_mask);
45 * tsb_ptr = tsb_base + (tsb_index * 16); 34 * tsb_ptr = tsb_base + (tsb_index * 16);
46 */ 35 */
47 srlx %g4, PAGE_SHIFT, %g3 36#define COMPUTE_TSB_PTR(TSB_PTR, VADDR, TMP1, TMP2) \
48 and %g3, %g7, %g3 37 and TSB_PTR, 0x7, TMP1; \
49 sllx %g3, 4, %g3 38 mov 512, TMP2; \
50 add %g1, %g3, %g1 39 andn TSB_PTR, 0x7, TSB_PTR; \
40 sllx TMP2, TMP1, TMP2; \
41 srlx VADDR, PAGE_SHIFT, TMP1; \
42 sub TMP2, 1, TMP2; \
43 and TMP1, TMP2, TMP1; \
44 sllx TMP1, 4, TMP1; \
45 add TSB_PTR, TMP1, TSB_PTR;
46
47sun4v_itlb_miss:
48 /* Load MMU Miss base into %g2. */
49 ldxa [%g0] ASI_SCRATCHPAD, %g2
50
51 /* Load UTSB reg into %g1. */
52 mov SCRATCHPAD_UTSBREG1, %g1
53 ldxa [%g1] ASI_SCRATCHPAD, %g1
54
55 LOAD_ITLB_INFO(%g2, %g4, %g5)
56 COMPUTE_TAG_TARGET(%g6, %g4, %g5, %g3, kvmap_itlb_4v)
57 COMPUTE_TSB_PTR(%g1, %g4, %g3, %g7)
51 58
52 /* Load TSB tag/pte into %g2/%g3 and compare the tag. */ 59 /* Load TSB tag/pte into %g2/%g3 and compare the tag. */
53 ldda [%g1] ASI_QUAD_LDD_PHYS, %g2 60 ldda [%g1] ASI_QUAD_LDD_PHYS, %g2
@@ -91,40 +98,9 @@ sun4v_dtlb_miss:
91 mov SCRATCHPAD_UTSBREG1, %g1 98 mov SCRATCHPAD_UTSBREG1, %g1
92 ldxa [%g1 + %g1] ASI_SCRATCHPAD, %g1 99 ldxa [%g1 + %g1] ASI_SCRATCHPAD, %g1
93 100
94 /* Create a TAG TARGET, "(vaddr>>22) | (ctx << 48)", in %g6. 101 LOAD_DTLB_INFO(%g2, %g4, %g5)
95 * Branch if kernel TLB miss. The kernel TSB and user TSB miss 102 COMPUTE_TAG_TARGET(%g6, %g4, %g5, %g3, kvmap_dtlb_4v)
96 * code wants the missing virtual address in %g4, so that value 103 COMPUTE_TSB_PTR(%g1, %g4, %g3, %g7)
97 * cannot be modified through the entirety of this handler.
98 */
99 ldx [%g2 + HV_FAULT_D_ADDR_OFFSET], %g4
100 ldx [%g2 + HV_FAULT_D_CTX_OFFSET], %g5
101 srlx %g4, 22, %g3
102 sllx %g5, 48, %g6
103 or %g6, %g3, %g6
104 brz,pn %g5, kvmap_dtlb_4v
105 nop
106
107 /* Create TSB pointer. This is something like:
108 *
109 * index_mask = (512 << (tsb_reg & 0x7UL)) - 1UL;
110 * tsb_base = tsb_reg & ~0x7UL;
111 */
112 and %g1, 0x7, %g3
113 andn %g1, 0x7, %g1
114 mov 512, %g7
115 sllx %g7, %g3, %g7
116 sub %g7, 1, %g7
117
118 /* TSB index mask is in %g7, tsb base is in %g1. Compute
119 * the TSB entry pointer into %g1:
120 *
121 * tsb_index = ((vaddr >> PAGE_SHIFT) & tsb_mask);
122 * tsb_ptr = tsb_base + (tsb_index * 16);
123 */
124 srlx %g4, PAGE_SHIFT, %g3
125 and %g3, %g7, %g3
126 sllx %g3, 4, %g3
127 add %g1, %g3, %g1
128 104
129 /* Load TSB tag/pte into %g2/%g3 and compare the tag. */ 105 /* Load TSB tag/pte into %g2/%g3 and compare the tag. */
130 ldda [%g1] ASI_QUAD_LDD_PHYS, %g2 106 ldda [%g1] ASI_QUAD_LDD_PHYS, %g2
@@ -169,7 +145,8 @@ sun4v_dtlb_prot:
169 mov FAULT_CODE_DTLB | FAULT_CODE_WRITE, %g4 145 mov FAULT_CODE_DTLB | FAULT_CODE_WRITE, %g4
170 146
171 /* Called from trap table with TAG TARGET placed into 147 /* Called from trap table with TAG TARGET placed into
172 * %g6 and SCRATCHPAD_UTSBREG1 contents in %g1. 148 * %g6, SCRATCHPAD_UTSBREG1 contents in %g1, and
149 * SCRATCHPAD_MMU_MISS contents in %g2.
173 */ 150 */
174sun4v_itsb_miss: 151sun4v_itsb_miss:
175 ba,pt %xcc, sun4v_tsb_miss_common 152 ba,pt %xcc, sun4v_tsb_miss_common
@@ -189,16 +166,15 @@ sun4v_dtsb_miss:
189 * tsb_ptr = tsb_base + (tsb_index * 16); 166 * tsb_ptr = tsb_base + (tsb_index * 16);
190 */ 167 */
191sun4v_tsb_miss_common: 168sun4v_tsb_miss_common:
192 and %g1, 0x7, %g2 169 COMPUTE_TSB_PTR(%g1, %g4, %g5, %g7)
193 andn %g1, 0x7, %g1 170
194 mov 512, %g7 171 /* Branch directly to page table lookup. We have SCRATCHPAD_MMU_MISS
195 sllx %g7, %g2, %g7 172 * still in %g2, so it's quite trivial to get at the PGD PHYS value
196 sub %g7, 1, %g7 173 * so we can preload it into %g7.
197 srlx %g4, PAGE_SHIFT, %g2 174 */
198 and %g2, %g7, %g2 175 sub %g2, TRAP_PER_CPU_FAULT_INFO, %g2
199 sllx %g2, 4, %g2 176 ba,pt %xcc, tsb_miss_page_table_walk_sun4v_fastpath
200 ba,pt %xcc, tsb_miss_page_table_walk 177 ldx [%g2 + TRAP_PER_CPU_PGD_PADDR], %g7
201 add %g1, %g2, %g1
202 178
203 /* Instruction Access Exception, tl0. */ 179 /* Instruction Access Exception, tl0. */
204sun4v_iacc: 180sun4v_iacc: