aboutsummaryrefslogtreecommitdiffstats
path: root/arch/powerpc/mm/hash_native_64.c
diff options
context:
space:
mode:
Diffstat (limited to 'arch/powerpc/mm/hash_native_64.c')
-rw-r--r--arch/powerpc/mm/hash_native_64.c19
1 files changed, 9 insertions, 10 deletions
diff --git a/arch/powerpc/mm/hash_native_64.c b/arch/powerpc/mm/hash_native_64.c
index 056d23a1b105..784a400e0781 100644
--- a/arch/powerpc/mm/hash_native_64.c
+++ b/arch/powerpc/mm/hash_native_64.c
@@ -37,7 +37,7 @@
37 37
38#define HPTE_LOCK_BIT 3 38#define HPTE_LOCK_BIT 3
39 39
40static DEFINE_SPINLOCK(native_tlbie_lock); 40static DEFINE_RAW_SPINLOCK(native_tlbie_lock);
41 41
42static inline void __tlbie(unsigned long va, int psize, int ssize) 42static inline void __tlbie(unsigned long va, int psize, int ssize)
43{ 43{
@@ -104,7 +104,7 @@ static inline void tlbie(unsigned long va, int psize, int ssize, int local)
104 if (use_local) 104 if (use_local)
105 use_local = mmu_psize_defs[psize].tlbiel; 105 use_local = mmu_psize_defs[psize].tlbiel;
106 if (lock_tlbie && !use_local) 106 if (lock_tlbie && !use_local)
107 spin_lock(&native_tlbie_lock); 107 raw_spin_lock(&native_tlbie_lock);
108 asm volatile("ptesync": : :"memory"); 108 asm volatile("ptesync": : :"memory");
109 if (use_local) { 109 if (use_local) {
110 __tlbiel(va, psize, ssize); 110 __tlbiel(va, psize, ssize);
@@ -114,7 +114,7 @@ static inline void tlbie(unsigned long va, int psize, int ssize, int local)
114 asm volatile("eieio; tlbsync; ptesync": : :"memory"); 114 asm volatile("eieio; tlbsync; ptesync": : :"memory");
115 } 115 }
116 if (lock_tlbie && !use_local) 116 if (lock_tlbie && !use_local)
117 spin_unlock(&native_tlbie_lock); 117 raw_spin_unlock(&native_tlbie_lock);
118} 118}
119 119
120static inline void native_lock_hpte(struct hash_pte *hptep) 120static inline void native_lock_hpte(struct hash_pte *hptep)
@@ -122,7 +122,7 @@ static inline void native_lock_hpte(struct hash_pte *hptep)
122 unsigned long *word = &hptep->v; 122 unsigned long *word = &hptep->v;
123 123
124 while (1) { 124 while (1) {
125 if (!test_and_set_bit(HPTE_LOCK_BIT, word)) 125 if (!test_and_set_bit_lock(HPTE_LOCK_BIT, word))
126 break; 126 break;
127 while(test_bit(HPTE_LOCK_BIT, word)) 127 while(test_bit(HPTE_LOCK_BIT, word))
128 cpu_relax(); 128 cpu_relax();
@@ -133,8 +133,7 @@ static inline void native_unlock_hpte(struct hash_pte *hptep)
133{ 133{
134 unsigned long *word = &hptep->v; 134 unsigned long *word = &hptep->v;
135 135
136 asm volatile("lwsync":::"memory"); 136 clear_bit_unlock(HPTE_LOCK_BIT, word);
137 clear_bit(HPTE_LOCK_BIT, word);
138} 137}
139 138
140static long native_hpte_insert(unsigned long hpte_group, unsigned long va, 139static long native_hpte_insert(unsigned long hpte_group, unsigned long va,
@@ -434,7 +433,7 @@ static void native_hpte_clear(void)
434 /* we take the tlbie lock and hold it. Some hardware will 433 /* we take the tlbie lock and hold it. Some hardware will
435 * deadlock if we try to tlbie from two processors at once. 434 * deadlock if we try to tlbie from two processors at once.
436 */ 435 */
437 spin_lock(&native_tlbie_lock); 436 raw_spin_lock(&native_tlbie_lock);
438 437
439 slots = pteg_count * HPTES_PER_GROUP; 438 slots = pteg_count * HPTES_PER_GROUP;
440 439
@@ -458,7 +457,7 @@ static void native_hpte_clear(void)
458 } 457 }
459 458
460 asm volatile("eieio; tlbsync; ptesync":::"memory"); 459 asm volatile("eieio; tlbsync; ptesync":::"memory");
461 spin_unlock(&native_tlbie_lock); 460 raw_spin_unlock(&native_tlbie_lock);
462 local_irq_restore(flags); 461 local_irq_restore(flags);
463} 462}
464 463
@@ -521,7 +520,7 @@ static void native_flush_hash_range(unsigned long number, int local)
521 int lock_tlbie = !cpu_has_feature(CPU_FTR_LOCKLESS_TLBIE); 520 int lock_tlbie = !cpu_has_feature(CPU_FTR_LOCKLESS_TLBIE);
522 521
523 if (lock_tlbie) 522 if (lock_tlbie)
524 spin_lock(&native_tlbie_lock); 523 raw_spin_lock(&native_tlbie_lock);
525 524
526 asm volatile("ptesync":::"memory"); 525 asm volatile("ptesync":::"memory");
527 for (i = 0; i < number; i++) { 526 for (i = 0; i < number; i++) {
@@ -536,7 +535,7 @@ static void native_flush_hash_range(unsigned long number, int local)
536 asm volatile("eieio; tlbsync; ptesync":::"memory"); 535 asm volatile("eieio; tlbsync; ptesync":::"memory");
537 536
538 if (lock_tlbie) 537 if (lock_tlbie)
539 spin_unlock(&native_tlbie_lock); 538 raw_spin_unlock(&native_tlbie_lock);
540 } 539 }
541 540
542 local_irq_restore(flags); 541 local_irq_restore(flags);