aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--arch/powerpc/kernel/io.c12
-rw-r--r--arch/powerpc/mm/hash_native_64.c2
-rw-r--r--arch/powerpc/mm/stab.c4
-rw-r--r--include/asm-powerpc/system.h2
4 files changed, 10 insertions, 10 deletions
diff --git a/arch/powerpc/kernel/io.c b/arch/powerpc/kernel/io.c
index 34ae11494ddc..e31aca9208eb 100644
--- a/arch/powerpc/kernel/io.c
+++ b/arch/powerpc/kernel/io.c
@@ -35,7 +35,7 @@ void _insb(const volatile u8 __iomem *port, void *buf, long count)
35 asm volatile("sync"); 35 asm volatile("sync");
36 do { 36 do {
37 tmp = *port; 37 tmp = *port;
38 asm volatile("eieio"); 38 eieio();
39 *tbuf++ = tmp; 39 *tbuf++ = tmp;
40 } while (--count != 0); 40 } while (--count != 0);
41 asm volatile("twi 0,%0,0; isync" : : "r" (tmp)); 41 asm volatile("twi 0,%0,0; isync" : : "r" (tmp));
@@ -66,7 +66,7 @@ void _insw_ns(const volatile u16 __iomem *port, void *buf, long count)
66 asm volatile("sync"); 66 asm volatile("sync");
67 do { 67 do {
68 tmp = *port; 68 tmp = *port;
69 asm volatile("eieio"); 69 eieio();
70 *tbuf++ = tmp; 70 *tbuf++ = tmp;
71 } while (--count != 0); 71 } while (--count != 0);
72 asm volatile("twi 0,%0,0; isync" : : "r" (tmp)); 72 asm volatile("twi 0,%0,0; isync" : : "r" (tmp));
@@ -97,7 +97,7 @@ void _insl_ns(const volatile u32 __iomem *port, void *buf, long count)
97 asm volatile("sync"); 97 asm volatile("sync");
98 do { 98 do {
99 tmp = *port; 99 tmp = *port;
100 asm volatile("eieio"); 100 eieio();
101 *tbuf++ = tmp; 101 *tbuf++ = tmp;
102 } while (--count != 0); 102 } while (--count != 0);
103 asm volatile("twi 0,%0,0; isync" : : "r" (tmp)); 103 asm volatile("twi 0,%0,0; isync" : : "r" (tmp));
@@ -155,21 +155,21 @@ void _memcpy_fromio(void *dest, const volatile void __iomem *src,
155 __asm__ __volatile__ ("sync" : : : "memory"); 155 __asm__ __volatile__ ("sync" : : : "memory");
156 while(n && (!IO_CHECK_ALIGN(vsrc, 4) || !IO_CHECK_ALIGN(dest, 4))) { 156 while(n && (!IO_CHECK_ALIGN(vsrc, 4) || !IO_CHECK_ALIGN(dest, 4))) {
157 *((u8 *)dest) = *((volatile u8 *)vsrc); 157 *((u8 *)dest) = *((volatile u8 *)vsrc);
158 __asm__ __volatile__ ("eieio" : : : "memory"); 158 eieio();
159 vsrc++; 159 vsrc++;
160 dest++; 160 dest++;
161 n--; 161 n--;
162 } 162 }
163 while(n > 4) { 163 while(n > 4) {
164 *((u32 *)dest) = *((volatile u32 *)vsrc); 164 *((u32 *)dest) = *((volatile u32 *)vsrc);
165 __asm__ __volatile__ ("eieio" : : : "memory"); 165 eieio();
166 vsrc += 4; 166 vsrc += 4;
167 dest += 4; 167 dest += 4;
168 n -= 4; 168 n -= 4;
169 } 169 }
170 while(n) { 170 while(n) {
171 *((u8 *)dest) = *((volatile u8 *)vsrc); 171 *((u8 *)dest) = *((volatile u8 *)vsrc);
172 __asm__ __volatile__ ("eieio" : : : "memory"); 172 eieio();
173 vsrc++; 173 vsrc++;
174 dest++; 174 dest++;
175 n--; 175 n--;
diff --git a/arch/powerpc/mm/hash_native_64.c b/arch/powerpc/mm/hash_native_64.c
index 823fa63e6485..6ba9b47e55af 100644
--- a/arch/powerpc/mm/hash_native_64.c
+++ b/arch/powerpc/mm/hash_native_64.c
@@ -163,7 +163,7 @@ static long native_hpte_insert(unsigned long hpte_group, unsigned long va,
163 163
164 hptep->r = hpte_r; 164 hptep->r = hpte_r;
165 /* Guarantee the second dword is visible before the valid bit */ 165 /* Guarantee the second dword is visible before the valid bit */
166 __asm__ __volatile__ ("eieio" : : : "memory"); 166 eieio();
167 /* 167 /*
168 * Now set the first dword including the valid bit 168 * Now set the first dword including the valid bit
169 * NOTE: this also unlocks the hpte 169 * NOTE: this also unlocks the hpte
diff --git a/arch/powerpc/mm/stab.c b/arch/powerpc/mm/stab.c
index 132c6bc66ce1..28492bbdee8e 100644
--- a/arch/powerpc/mm/stab.c
+++ b/arch/powerpc/mm/stab.c
@@ -55,7 +55,7 @@ static int make_ste(unsigned long stab, unsigned long esid, unsigned long vsid)
55 for (entry = 0; entry < 8; entry++, ste++) { 55 for (entry = 0; entry < 8; entry++, ste++) {
56 if (!(ste->esid_data & STE_ESID_V)) { 56 if (!(ste->esid_data & STE_ESID_V)) {
57 ste->vsid_data = vsid_data; 57 ste->vsid_data = vsid_data;
58 asm volatile("eieio":::"memory"); 58 eieio();
59 ste->esid_data = esid_data; 59 ste->esid_data = esid_data;
60 return (global_entry | entry); 60 return (global_entry | entry);
61 } 61 }
@@ -101,7 +101,7 @@ static int make_ste(unsigned long stab, unsigned long esid, unsigned long vsid)
101 asm volatile("sync" : : : "memory"); /* Order update */ 101 asm volatile("sync" : : : "memory"); /* Order update */
102 102
103 castout_ste->vsid_data = vsid_data; 103 castout_ste->vsid_data = vsid_data;
104 asm volatile("eieio" : : : "memory"); /* Order update */ 104 eieio(); /* Order update */
105 castout_ste->esid_data = esid_data; 105 castout_ste->esid_data = esid_data;
106 106
107 asm volatile("slbie %0" : : "r" (old_esid << SID_SHIFT)); 107 asm volatile("slbie %0" : : "r" (old_esid << SID_SHIFT));
diff --git a/include/asm-powerpc/system.h b/include/asm-powerpc/system.h
index 09621f611dbc..eff3de953712 100644
--- a/include/asm-powerpc/system.h
+++ b/include/asm-powerpc/system.h
@@ -43,7 +43,7 @@
43#ifdef CONFIG_SMP 43#ifdef CONFIG_SMP
44#define smp_mb() mb() 44#define smp_mb() mb()
45#define smp_rmb() rmb() 45#define smp_rmb() rmb()
46#define smp_wmb() __asm__ __volatile__ ("eieio" : : : "memory") 46#define smp_wmb() eieio()
47#define smp_read_barrier_depends() read_barrier_depends() 47#define smp_read_barrier_depends() read_barrier_depends()
48#else 48#else
49#define smp_mb() barrier() 49#define smp_mb() barrier()