aboutsummaryrefslogtreecommitdiffstats
path: root/arch/powerpc/mm/hash_native_64.c
diff options
context:
space:
mode:
Diffstat (limited to 'arch/powerpc/mm/hash_native_64.c')
-rw-r--r--arch/powerpc/mm/hash_native_64.c92
1 files changed, 46 insertions, 46 deletions
diff --git a/arch/powerpc/mm/hash_native_64.c b/arch/powerpc/mm/hash_native_64.c
index 6ba9b47e55af..34e5c0b219b9 100644
--- a/arch/powerpc/mm/hash_native_64.c
+++ b/arch/powerpc/mm/hash_native_64.c
@@ -38,7 +38,7 @@
38 38
39static DEFINE_SPINLOCK(native_tlbie_lock); 39static DEFINE_SPINLOCK(native_tlbie_lock);
40 40
41static inline void __tlbie(unsigned long va, unsigned int psize) 41static inline void __tlbie(unsigned long va, int psize, int ssize)
42{ 42{
43 unsigned int penc; 43 unsigned int penc;
44 44
@@ -48,18 +48,20 @@ static inline void __tlbie(unsigned long va, unsigned int psize)
48 switch (psize) { 48 switch (psize) {
49 case MMU_PAGE_4K: 49 case MMU_PAGE_4K:
50 va &= ~0xffful; 50 va &= ~0xffful;
51 va |= ssize << 8;
51 asm volatile("tlbie %0,0" : : "r" (va) : "memory"); 52 asm volatile("tlbie %0,0" : : "r" (va) : "memory");
52 break; 53 break;
53 default: 54 default:
54 penc = mmu_psize_defs[psize].penc; 55 penc = mmu_psize_defs[psize].penc;
55 va &= ~((1ul << mmu_psize_defs[psize].shift) - 1); 56 va &= ~((1ul << mmu_psize_defs[psize].shift) - 1);
56 va |= penc << 12; 57 va |= penc << 12;
58 va |= ssize << 8;
57 asm volatile("tlbie %0,1" : : "r" (va) : "memory"); 59 asm volatile("tlbie %0,1" : : "r" (va) : "memory");
58 break; 60 break;
59 } 61 }
60} 62}
61 63
62static inline void __tlbiel(unsigned long va, unsigned int psize) 64static inline void __tlbiel(unsigned long va, int psize, int ssize)
63{ 65{
64 unsigned int penc; 66 unsigned int penc;
65 67
@@ -69,6 +71,7 @@ static inline void __tlbiel(unsigned long va, unsigned int psize)
69 switch (psize) { 71 switch (psize) {
70 case MMU_PAGE_4K: 72 case MMU_PAGE_4K:
71 va &= ~0xffful; 73 va &= ~0xffful;
74 va |= ssize << 8;
72 asm volatile(".long 0x7c000224 | (%0 << 11) | (0 << 21)" 75 asm volatile(".long 0x7c000224 | (%0 << 11) | (0 << 21)"
73 : : "r"(va) : "memory"); 76 : : "r"(va) : "memory");
74 break; 77 break;
@@ -76,6 +79,7 @@ static inline void __tlbiel(unsigned long va, unsigned int psize)
76 penc = mmu_psize_defs[psize].penc; 79 penc = mmu_psize_defs[psize].penc;
77 va &= ~((1ul << mmu_psize_defs[psize].shift) - 1); 80 va &= ~((1ul << mmu_psize_defs[psize].shift) - 1);
78 va |= penc << 12; 81 va |= penc << 12;
82 va |= ssize << 8;
79 asm volatile(".long 0x7c000224 | (%0 << 11) | (1 << 21)" 83 asm volatile(".long 0x7c000224 | (%0 << 11) | (1 << 21)"
80 : : "r"(va) : "memory"); 84 : : "r"(va) : "memory");
81 break; 85 break;
@@ -83,7 +87,7 @@ static inline void __tlbiel(unsigned long va, unsigned int psize)
83 87
84} 88}
85 89
86static inline void tlbie(unsigned long va, int psize, int local) 90static inline void tlbie(unsigned long va, int psize, int ssize, int local)
87{ 91{
88 unsigned int use_local = local && cpu_has_feature(CPU_FTR_TLBIEL); 92 unsigned int use_local = local && cpu_has_feature(CPU_FTR_TLBIEL);
89 int lock_tlbie = !cpu_has_feature(CPU_FTR_LOCKLESS_TLBIE); 93 int lock_tlbie = !cpu_has_feature(CPU_FTR_LOCKLESS_TLBIE);
@@ -94,10 +98,10 @@ static inline void tlbie(unsigned long va, int psize, int local)
94 spin_lock(&native_tlbie_lock); 98 spin_lock(&native_tlbie_lock);
95 asm volatile("ptesync": : :"memory"); 99 asm volatile("ptesync": : :"memory");
96 if (use_local) { 100 if (use_local) {
97 __tlbiel(va, psize); 101 __tlbiel(va, psize, ssize);
98 asm volatile("ptesync": : :"memory"); 102 asm volatile("ptesync": : :"memory");
99 } else { 103 } else {
100 __tlbie(va, psize); 104 __tlbie(va, psize, ssize);
101 asm volatile("eieio; tlbsync; ptesync": : :"memory"); 105 asm volatile("eieio; tlbsync; ptesync": : :"memory");
102 } 106 }
103 if (lock_tlbie && !use_local) 107 if (lock_tlbie && !use_local)
@@ -126,7 +130,7 @@ static inline void native_unlock_hpte(struct hash_pte *hptep)
126 130
127static long native_hpte_insert(unsigned long hpte_group, unsigned long va, 131static long native_hpte_insert(unsigned long hpte_group, unsigned long va,
128 unsigned long pa, unsigned long rflags, 132 unsigned long pa, unsigned long rflags,
129 unsigned long vflags, int psize) 133 unsigned long vflags, int psize, int ssize)
130{ 134{
131 struct hash_pte *hptep = htab_address + hpte_group; 135 struct hash_pte *hptep = htab_address + hpte_group;
132 unsigned long hpte_v, hpte_r; 136 unsigned long hpte_v, hpte_r;
@@ -153,7 +157,7 @@ static long native_hpte_insert(unsigned long hpte_group, unsigned long va,
153 if (i == HPTES_PER_GROUP) 157 if (i == HPTES_PER_GROUP)
154 return -1; 158 return -1;
155 159
156 hpte_v = hpte_encode_v(va, psize) | vflags | HPTE_V_VALID; 160 hpte_v = hpte_encode_v(va, psize, ssize) | vflags | HPTE_V_VALID;
157 hpte_r = hpte_encode_r(pa, psize) | rflags; 161 hpte_r = hpte_encode_r(pa, psize) | rflags;
158 162
159 if (!(vflags & HPTE_V_BOLTED)) { 163 if (!(vflags & HPTE_V_BOLTED)) {
@@ -215,13 +219,14 @@ static long native_hpte_remove(unsigned long hpte_group)
215} 219}
216 220
217static long native_hpte_updatepp(unsigned long slot, unsigned long newpp, 221static long native_hpte_updatepp(unsigned long slot, unsigned long newpp,
218 unsigned long va, int psize, int local) 222 unsigned long va, int psize, int ssize,
223 int local)
219{ 224{
220 struct hash_pte *hptep = htab_address + slot; 225 struct hash_pte *hptep = htab_address + slot;
221 unsigned long hpte_v, want_v; 226 unsigned long hpte_v, want_v;
222 int ret = 0; 227 int ret = 0;
223 228
224 want_v = hpte_encode_v(va, psize); 229 want_v = hpte_encode_v(va, psize, ssize);
225 230
226 DBG_LOW(" update(va=%016lx, avpnv=%016lx, hash=%016lx, newpp=%x)", 231 DBG_LOW(" update(va=%016lx, avpnv=%016lx, hash=%016lx, newpp=%x)",
227 va, want_v & HPTE_V_AVPN, slot, newpp); 232 va, want_v & HPTE_V_AVPN, slot, newpp);
@@ -243,39 +248,32 @@ static long native_hpte_updatepp(unsigned long slot, unsigned long newpp,
243 native_unlock_hpte(hptep); 248 native_unlock_hpte(hptep);
244 249
245 /* Ensure it is out of the tlb too. */ 250 /* Ensure it is out of the tlb too. */
246 tlbie(va, psize, local); 251 tlbie(va, psize, ssize, local);
247 252
248 return ret; 253 return ret;
249} 254}
250 255
251static long native_hpte_find(unsigned long va, int psize) 256static long native_hpte_find(unsigned long va, int psize, int ssize)
252{ 257{
253 struct hash_pte *hptep; 258 struct hash_pte *hptep;
254 unsigned long hash; 259 unsigned long hash;
255 unsigned long i, j; 260 unsigned long i;
256 long slot; 261 long slot;
257 unsigned long want_v, hpte_v; 262 unsigned long want_v, hpte_v;
258 263
259 hash = hpt_hash(va, mmu_psize_defs[psize].shift); 264 hash = hpt_hash(va, mmu_psize_defs[psize].shift, ssize);
260 want_v = hpte_encode_v(va, psize); 265 want_v = hpte_encode_v(va, psize, ssize);
261 266
262 for (j = 0; j < 2; j++) { 267 /* Bolted mappings are only ever in the primary group */
263 slot = (hash & htab_hash_mask) * HPTES_PER_GROUP; 268 slot = (hash & htab_hash_mask) * HPTES_PER_GROUP;
264 for (i = 0; i < HPTES_PER_GROUP; i++) { 269 for (i = 0; i < HPTES_PER_GROUP; i++) {
265 hptep = htab_address + slot; 270 hptep = htab_address + slot;
266 hpte_v = hptep->v; 271 hpte_v = hptep->v;
267 272
268 if (HPTE_V_COMPARE(hpte_v, want_v) 273 if (HPTE_V_COMPARE(hpte_v, want_v) && (hpte_v & HPTE_V_VALID))
269 && (hpte_v & HPTE_V_VALID) 274 /* HPTE matches */
270 && ( !!(hpte_v & HPTE_V_SECONDARY) == j)) { 275 return slot;
271 /* HPTE matches */ 276 ++slot;
272 if (j)
273 slot = -slot;
274 return slot;
275 }
276 ++slot;
277 }
278 hash = ~hash;
279 } 277 }
280 278
281 return -1; 279 return -1;
@@ -289,16 +287,16 @@ static long native_hpte_find(unsigned long va, int psize)
289 * No need to lock here because we should be the only user. 287 * No need to lock here because we should be the only user.
290 */ 288 */
291static void native_hpte_updateboltedpp(unsigned long newpp, unsigned long ea, 289static void native_hpte_updateboltedpp(unsigned long newpp, unsigned long ea,
292 int psize) 290 int psize, int ssize)
293{ 291{
294 unsigned long vsid, va; 292 unsigned long vsid, va;
295 long slot; 293 long slot;
296 struct hash_pte *hptep; 294 struct hash_pte *hptep;
297 295
298 vsid = get_kernel_vsid(ea); 296 vsid = get_kernel_vsid(ea, ssize);
299 va = (vsid << 28) | (ea & 0x0fffffff); 297 va = hpt_va(ea, vsid, ssize);
300 298
301 slot = native_hpte_find(va, psize); 299 slot = native_hpte_find(va, psize, ssize);
302 if (slot == -1) 300 if (slot == -1)
303 panic("could not find page to bolt\n"); 301 panic("could not find page to bolt\n");
304 hptep = htab_address + slot; 302 hptep = htab_address + slot;
@@ -308,11 +306,11 @@ static void native_hpte_updateboltedpp(unsigned long newpp, unsigned long ea,
308 (newpp & (HPTE_R_PP | HPTE_R_N)); 306 (newpp & (HPTE_R_PP | HPTE_R_N));
309 307
310 /* Ensure it is out of the tlb too. */ 308 /* Ensure it is out of the tlb too. */
311 tlbie(va, psize, 0); 309 tlbie(va, psize, ssize, 0);
312} 310}
313 311
314static void native_hpte_invalidate(unsigned long slot, unsigned long va, 312static void native_hpte_invalidate(unsigned long slot, unsigned long va,
315 int psize, int local) 313 int psize, int ssize, int local)
316{ 314{
317 struct hash_pte *hptep = htab_address + slot; 315 struct hash_pte *hptep = htab_address + slot;
318 unsigned long hpte_v; 316 unsigned long hpte_v;
@@ -323,7 +321,7 @@ static void native_hpte_invalidate(unsigned long slot, unsigned long va,
323 321
324 DBG_LOW(" invalidate(va=%016lx, hash: %x)\n", va, slot); 322 DBG_LOW(" invalidate(va=%016lx, hash: %x)\n", va, slot);
325 323
326 want_v = hpte_encode_v(va, psize); 324 want_v = hpte_encode_v(va, psize, ssize);
327 native_lock_hpte(hptep); 325 native_lock_hpte(hptep);
328 hpte_v = hptep->v; 326 hpte_v = hptep->v;
329 327
@@ -335,7 +333,7 @@ static void native_hpte_invalidate(unsigned long slot, unsigned long va,
335 hptep->v = 0; 333 hptep->v = 0;
336 334
337 /* Invalidate the TLB */ 335 /* Invalidate the TLB */
338 tlbie(va, psize, local); 336 tlbie(va, psize, ssize, local);
339 337
340 local_irq_restore(flags); 338 local_irq_restore(flags);
341} 339}
@@ -345,7 +343,7 @@ static void native_hpte_invalidate(unsigned long slot, unsigned long va,
345#define LP_MASK(i) ((0xFF >> (i)) << LP_SHIFT) 343#define LP_MASK(i) ((0xFF >> (i)) << LP_SHIFT)
346 344
347static void hpte_decode(struct hash_pte *hpte, unsigned long slot, 345static void hpte_decode(struct hash_pte *hpte, unsigned long slot,
348 int *psize, unsigned long *va) 346 int *psize, int *ssize, unsigned long *va)
349{ 347{
350 unsigned long hpte_r = hpte->r; 348 unsigned long hpte_r = hpte->r;
351 unsigned long hpte_v = hpte->v; 349 unsigned long hpte_v = hpte->v;
@@ -401,6 +399,7 @@ static void hpte_decode(struct hash_pte *hpte, unsigned long slot,
401 399
402 *va = avpn; 400 *va = avpn;
403 *psize = size; 401 *psize = size;
402 *ssize = hpte_v >> HPTE_V_SSIZE_SHIFT;
404} 403}
405 404
406/* 405/*
@@ -417,7 +416,7 @@ static void native_hpte_clear(void)
417 struct hash_pte *hptep = htab_address; 416 struct hash_pte *hptep = htab_address;
418 unsigned long hpte_v, va; 417 unsigned long hpte_v, va;
419 unsigned long pteg_count; 418 unsigned long pteg_count;
420 int psize; 419 int psize, ssize;
421 420
422 pteg_count = htab_hash_mask + 1; 421 pteg_count = htab_hash_mask + 1;
423 422
@@ -443,9 +442,9 @@ static void native_hpte_clear(void)
443 * already hold the native_tlbie_lock. 442 * already hold the native_tlbie_lock.
444 */ 443 */
445 if (hpte_v & HPTE_V_VALID) { 444 if (hpte_v & HPTE_V_VALID) {
446 hpte_decode(hptep, slot, &psize, &va); 445 hpte_decode(hptep, slot, &psize, &ssize, &va);
447 hptep->v = 0; 446 hptep->v = 0;
448 __tlbie(va, psize); 447 __tlbie(va, psize, ssize);
449 } 448 }
450 } 449 }
451 450
@@ -468,6 +467,7 @@ static void native_flush_hash_range(unsigned long number, int local)
468 real_pte_t pte; 467 real_pte_t pte;
469 struct ppc64_tlb_batch *batch = &__get_cpu_var(ppc64_tlb_batch); 468 struct ppc64_tlb_batch *batch = &__get_cpu_var(ppc64_tlb_batch);
470 unsigned long psize = batch->psize; 469 unsigned long psize = batch->psize;
470 int ssize = batch->ssize;
471 int i; 471 int i;
472 472
473 local_irq_save(flags); 473 local_irq_save(flags);
@@ -477,14 +477,14 @@ static void native_flush_hash_range(unsigned long number, int local)
477 pte = batch->pte[i]; 477 pte = batch->pte[i];
478 478
479 pte_iterate_hashed_subpages(pte, psize, va, index, shift) { 479 pte_iterate_hashed_subpages(pte, psize, va, index, shift) {
480 hash = hpt_hash(va, shift); 480 hash = hpt_hash(va, shift, ssize);
481 hidx = __rpte_to_hidx(pte, index); 481 hidx = __rpte_to_hidx(pte, index);
482 if (hidx & _PTEIDX_SECONDARY) 482 if (hidx & _PTEIDX_SECONDARY)
483 hash = ~hash; 483 hash = ~hash;
484 slot = (hash & htab_hash_mask) * HPTES_PER_GROUP; 484 slot = (hash & htab_hash_mask) * HPTES_PER_GROUP;
485 slot += hidx & _PTEIDX_GROUP_IX; 485 slot += hidx & _PTEIDX_GROUP_IX;
486 hptep = htab_address + slot; 486 hptep = htab_address + slot;
487 want_v = hpte_encode_v(va, psize); 487 want_v = hpte_encode_v(va, psize, ssize);
488 native_lock_hpte(hptep); 488 native_lock_hpte(hptep);
489 hpte_v = hptep->v; 489 hpte_v = hptep->v;
490 if (!HPTE_V_COMPARE(hpte_v, want_v) || 490 if (!HPTE_V_COMPARE(hpte_v, want_v) ||
@@ -504,7 +504,7 @@ static void native_flush_hash_range(unsigned long number, int local)
504 504
505 pte_iterate_hashed_subpages(pte, psize, va, index, 505 pte_iterate_hashed_subpages(pte, psize, va, index,
506 shift) { 506 shift) {
507 __tlbiel(va, psize); 507 __tlbiel(va, psize, ssize);
508 } pte_iterate_hashed_end(); 508 } pte_iterate_hashed_end();
509 } 509 }
510 asm volatile("ptesync":::"memory"); 510 asm volatile("ptesync":::"memory");
@@ -521,7 +521,7 @@ static void native_flush_hash_range(unsigned long number, int local)
521 521
522 pte_iterate_hashed_subpages(pte, psize, va, index, 522 pte_iterate_hashed_subpages(pte, psize, va, index,
523 shift) { 523 shift) {
524 __tlbie(va, psize); 524 __tlbie(va, psize, ssize);
525 } pte_iterate_hashed_end(); 525 } pte_iterate_hashed_end();
526 } 526 }
527 asm volatile("eieio; tlbsync; ptesync":::"memory"); 527 asm volatile("eieio; tlbsync; ptesync":::"memory");