aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--arch/powerpc/kernel/misc.S49
-rw-r--r--include/asm-powerpc/eeh.h3
-rw-r--r--include/asm-powerpc/io.h43
-rw-r--r--include/asm-powerpc/paca.h1
-rw-r--r--include/asm-powerpc/spinlock.h17
-rw-r--r--include/asm-ppc/io.h20
6 files changed, 85 insertions, 48 deletions
diff --git a/arch/powerpc/kernel/misc.S b/arch/powerpc/kernel/misc.S
index fc23040d5a26..f770805f1215 100644
--- a/arch/powerpc/kernel/misc.S
+++ b/arch/powerpc/kernel/misc.S
@@ -17,15 +17,6 @@
17 17
18 .text 18 .text
19 19
20#ifdef CONFIG_PPC64
21#define IN_SYNC twi 0,r5,0; isync
22#define EIEIO_32
23#define SYNC_64 sync
24#else /* CONFIG_PPC32 */
25#define IN_SYNC
26#define EIEIO_32 eieio
27#define SYNC_64
28#endif
29/* 20/*
30 * Returns (address we are running at) - (address we were linked at) 21 * Returns (address we are running at) - (address we were linked at)
31 * for use before the text and data are mapped to KERNELBASE. 22 * for use before the text and data are mapped to KERNELBASE.
@@ -70,6 +61,7 @@ _GLOBAL(add_reloc_offset)
70 * The *_ns versions don't do byte-swapping. 61 * The *_ns versions don't do byte-swapping.
71 */ 62 */
72_GLOBAL(_insb) 63_GLOBAL(_insb)
64 sync
73 cmpwi 0,r5,0 65 cmpwi 0,r5,0
74 mtctr r5 66 mtctr r5
75 subi r4,r4,1 67 subi r4,r4,1
@@ -78,7 +70,8 @@ _GLOBAL(_insb)
78 eieio 70 eieio
79 stbu r5,1(r4) 71 stbu r5,1(r4)
80 bdnz 00b 72 bdnz 00b
81 IN_SYNC 73 twi 0,r5,0
74 isync
82 blr 75 blr
83 76
84_GLOBAL(_outsb) 77_GLOBAL(_outsb)
@@ -86,14 +79,15 @@ _GLOBAL(_outsb)
86 mtctr r5 79 mtctr r5
87 subi r4,r4,1 80 subi r4,r4,1
88 blelr- 81 blelr-
82 sync
8900: lbzu r5,1(r4) 8300: lbzu r5,1(r4)
90 stb r5,0(r3) 84 stb r5,0(r3)
91 EIEIO_32
92 bdnz 00b 85 bdnz 00b
93 SYNC_64 86 sync
94 blr 87 blr
95 88
96_GLOBAL(_insw) 89_GLOBAL(_insw)
90 sync
97 cmpwi 0,r5,0 91 cmpwi 0,r5,0
98 mtctr r5 92 mtctr r5
99 subi r4,r4,2 93 subi r4,r4,2
@@ -102,7 +96,8 @@ _GLOBAL(_insw)
102 eieio 96 eieio
103 sthu r5,2(r4) 97 sthu r5,2(r4)
104 bdnz 00b 98 bdnz 00b
105 IN_SYNC 99 twi 0,r5,0
100 isync
106 blr 101 blr
107 102
108_GLOBAL(_outsw) 103_GLOBAL(_outsw)
@@ -110,14 +105,15 @@ _GLOBAL(_outsw)
110 mtctr r5 105 mtctr r5
111 subi r4,r4,2 106 subi r4,r4,2
112 blelr- 107 blelr-
108 sync
11300: lhzu r5,2(r4) 10900: lhzu r5,2(r4)
114 EIEIO_32
115 sthbrx r5,0,r3 110 sthbrx r5,0,r3
116 bdnz 00b 111 bdnz 00b
117 SYNC_64 112 sync
118 blr 113 blr
119 114
120_GLOBAL(_insl) 115_GLOBAL(_insl)
116 sync
121 cmpwi 0,r5,0 117 cmpwi 0,r5,0
122 mtctr r5 118 mtctr r5
123 subi r4,r4,4 119 subi r4,r4,4
@@ -126,7 +122,8 @@ _GLOBAL(_insl)
126 eieio 122 eieio
127 stwu r5,4(r4) 123 stwu r5,4(r4)
128 bdnz 00b 124 bdnz 00b
129 IN_SYNC 125 twi 0,r5,0
126 isync
130 blr 127 blr
131 128
132_GLOBAL(_outsl) 129_GLOBAL(_outsl)
@@ -134,17 +131,18 @@ _GLOBAL(_outsl)
134 mtctr r5 131 mtctr r5
135 subi r4,r4,4 132 subi r4,r4,4
136 blelr- 133 blelr-
134 sync
13700: lwzu r5,4(r4) 13500: lwzu r5,4(r4)
138 stwbrx r5,0,r3 136 stwbrx r5,0,r3
139 EIEIO_32
140 bdnz 00b 137 bdnz 00b
141 SYNC_64 138 sync
142 blr 139 blr
143 140
144#ifdef CONFIG_PPC32 141#ifdef CONFIG_PPC32
145_GLOBAL(__ide_mm_insw) 142_GLOBAL(__ide_mm_insw)
146#endif 143#endif
147_GLOBAL(_insw_ns) 144_GLOBAL(_insw_ns)
145 sync
148 cmpwi 0,r5,0 146 cmpwi 0,r5,0
149 mtctr r5 147 mtctr r5
150 subi r4,r4,2 148 subi r4,r4,2
@@ -153,7 +151,8 @@ _GLOBAL(_insw_ns)
153 eieio 151 eieio
154 sthu r5,2(r4) 152 sthu r5,2(r4)
155 bdnz 00b 153 bdnz 00b
156 IN_SYNC 154 twi 0,r5,0
155 isync
157 blr 156 blr
158 157
159#ifdef CONFIG_PPC32 158#ifdef CONFIG_PPC32
@@ -164,17 +163,18 @@ _GLOBAL(_outsw_ns)
164 mtctr r5 163 mtctr r5
165 subi r4,r4,2 164 subi r4,r4,2
166 blelr- 165 blelr-
166 sync
16700: lhzu r5,2(r4) 16700: lhzu r5,2(r4)
168 sth r5,0(r3) 168 sth r5,0(r3)
169 EIEIO_32
170 bdnz 00b 169 bdnz 00b
171 SYNC_64 170 sync
172 blr 171 blr
173 172
174#ifdef CONFIG_PPC32 173#ifdef CONFIG_PPC32
175_GLOBAL(__ide_mm_insl) 174_GLOBAL(__ide_mm_insl)
176#endif 175#endif
177_GLOBAL(_insl_ns) 176_GLOBAL(_insl_ns)
177 sync
178 cmpwi 0,r5,0 178 cmpwi 0,r5,0
179 mtctr r5 179 mtctr r5
180 subi r4,r4,4 180 subi r4,r4,4
@@ -183,7 +183,8 @@ _GLOBAL(_insl_ns)
183 eieio 183 eieio
184 stwu r5,4(r4) 184 stwu r5,4(r4)
185 bdnz 00b 185 bdnz 00b
186 IN_SYNC 186 twi 0,r5,0
187 isync
187 blr 188 blr
188 189
189#ifdef CONFIG_PPC32 190#ifdef CONFIG_PPC32
@@ -194,10 +195,10 @@ _GLOBAL(_outsl_ns)
194 mtctr r5 195 mtctr r5
195 subi r4,r4,4 196 subi r4,r4,4
196 blelr- 197 blelr-
198 sync
19700: lwzu r5,4(r4) 19900: lwzu r5,4(r4)
198 stw r5,0(r3) 200 stw r5,0(r3)
199 EIEIO_32
200 bdnz 00b 201 bdnz 00b
201 SYNC_64 202 sync
202 blr 203 blr
203 204
diff --git a/include/asm-powerpc/eeh.h b/include/asm-powerpc/eeh.h
index 4df3e80118f4..6a784396660b 100644
--- a/include/asm-powerpc/eeh.h
+++ b/include/asm-powerpc/eeh.h
@@ -205,6 +205,7 @@ static inline void eeh_memset_io(volatile void __iomem *addr, int c,
205 lc |= lc << 8; 205 lc |= lc << 8;
206 lc |= lc << 16; 206 lc |= lc << 16;
207 207
208 __asm__ __volatile__ ("sync" : : : "memory");
208 while(n && !EEH_CHECK_ALIGN(p, 4)) { 209 while(n && !EEH_CHECK_ALIGN(p, 4)) {
209 *((volatile u8 *)p) = c; 210 *((volatile u8 *)p) = c;
210 p++; 211 p++;
@@ -229,6 +230,7 @@ static inline void eeh_memcpy_fromio(void *dest, const volatile void __iomem *sr
229 void *destsave = dest; 230 void *destsave = dest;
230 unsigned long nsave = n; 231 unsigned long nsave = n;
231 232
233 __asm__ __volatile__ ("sync" : : : "memory");
232 while(n && (!EEH_CHECK_ALIGN(vsrc, 4) || !EEH_CHECK_ALIGN(dest, 4))) { 234 while(n && (!EEH_CHECK_ALIGN(vsrc, 4) || !EEH_CHECK_ALIGN(dest, 4))) {
233 *((u8 *)dest) = *((volatile u8 *)vsrc); 235 *((u8 *)dest) = *((volatile u8 *)vsrc);
234 __asm__ __volatile__ ("eieio" : : : "memory"); 236 __asm__ __volatile__ ("eieio" : : : "memory");
@@ -266,6 +268,7 @@ static inline void eeh_memcpy_toio(volatile void __iomem *dest, const void *src,
266{ 268{
267 void *vdest = (void __force *) dest; 269 void *vdest = (void __force *) dest;
268 270
271 __asm__ __volatile__ ("sync" : : : "memory");
269 while(n && (!EEH_CHECK_ALIGN(vdest, 4) || !EEH_CHECK_ALIGN(src, 4))) { 272 while(n && (!EEH_CHECK_ALIGN(vdest, 4) || !EEH_CHECK_ALIGN(src, 4))) {
270 *((volatile u8 *)vdest) = *((u8 *)src); 273 *((volatile u8 *)vdest) = *((u8 *)src);
271 src++; 274 src++;
diff --git a/include/asm-powerpc/io.h b/include/asm-powerpc/io.h
index 36c4c34bf565..212428db0d8b 100644
--- a/include/asm-powerpc/io.h
+++ b/include/asm-powerpc/io.h
@@ -19,6 +19,7 @@ extern int check_legacy_ioport(unsigned long base_port);
19#include <linux/compiler.h> 19#include <linux/compiler.h>
20#include <asm/page.h> 20#include <asm/page.h>
21#include <asm/byteorder.h> 21#include <asm/byteorder.h>
22#include <asm/paca.h>
22#ifdef CONFIG_PPC_ISERIES 23#ifdef CONFIG_PPC_ISERIES
23#include <asm/iseries/iseries_io.h> 24#include <asm/iseries/iseries_io.h>
24#endif 25#endif
@@ -162,7 +163,11 @@ extern void _outsw_ns(volatile u16 __iomem *port, const void *buf, int ns);
162extern void _insl_ns(volatile u32 __iomem *port, void *buf, int nl); 163extern void _insl_ns(volatile u32 __iomem *port, void *buf, int nl);
163extern void _outsl_ns(volatile u32 __iomem *port, const void *buf, int nl); 164extern void _outsl_ns(volatile u32 __iomem *port, const void *buf, int nl);
164 165
165#define mmiowb() 166static inline void mmiowb(void)
167{
168 __asm__ __volatile__ ("sync" : : : "memory");
169 get_paca()->io_sync = 0;
170}
166 171
167/* 172/*
168 * output pause versions need a delay at least for the 173 * output pause versions need a delay at least for the
@@ -278,22 +283,23 @@ static inline int in_8(const volatile unsigned char __iomem *addr)
278{ 283{
279 int ret; 284 int ret;
280 285
281 __asm__ __volatile__("lbz%U1%X1 %0,%1; twi 0,%0,0; isync" 286 __asm__ __volatile__("sync; lbz%U1%X1 %0,%1; twi 0,%0,0; isync"
282 : "=r" (ret) : "m" (*addr)); 287 : "=r" (ret) : "m" (*addr));
283 return ret; 288 return ret;
284} 289}
285 290
286static inline void out_8(volatile unsigned char __iomem *addr, int val) 291static inline void out_8(volatile unsigned char __iomem *addr, int val)
287{ 292{
288 __asm__ __volatile__("stb%U0%X0 %1,%0; sync" 293 __asm__ __volatile__("sync; stb%U0%X0 %1,%0"
289 : "=m" (*addr) : "r" (val)); 294 : "=m" (*addr) : "r" (val));
295 get_paca()->io_sync = 1;
290} 296}
291 297
292static inline int in_le16(const volatile unsigned short __iomem *addr) 298static inline int in_le16(const volatile unsigned short __iomem *addr)
293{ 299{
294 int ret; 300 int ret;
295 301
296 __asm__ __volatile__("lhbrx %0,0,%1; twi 0,%0,0; isync" 302 __asm__ __volatile__("sync; lhbrx %0,0,%1; twi 0,%0,0; isync"
297 : "=r" (ret) : "r" (addr), "m" (*addr)); 303 : "=r" (ret) : "r" (addr), "m" (*addr));
298 return ret; 304 return ret;
299} 305}
@@ -302,28 +308,30 @@ static inline int in_be16(const volatile unsigned short __iomem *addr)
302{ 308{
303 int ret; 309 int ret;
304 310
305 __asm__ __volatile__("lhz%U1%X1 %0,%1; twi 0,%0,0; isync" 311 __asm__ __volatile__("sync; lhz%U1%X1 %0,%1; twi 0,%0,0; isync"
306 : "=r" (ret) : "m" (*addr)); 312 : "=r" (ret) : "m" (*addr));
307 return ret; 313 return ret;
308} 314}
309 315
310static inline void out_le16(volatile unsigned short __iomem *addr, int val) 316static inline void out_le16(volatile unsigned short __iomem *addr, int val)
311{ 317{
312 __asm__ __volatile__("sthbrx %1,0,%2; sync" 318 __asm__ __volatile__("sync; sthbrx %1,0,%2"
313 : "=m" (*addr) : "r" (val), "r" (addr)); 319 : "=m" (*addr) : "r" (val), "r" (addr));
320 get_paca()->io_sync = 1;
314} 321}
315 322
316static inline void out_be16(volatile unsigned short __iomem *addr, int val) 323static inline void out_be16(volatile unsigned short __iomem *addr, int val)
317{ 324{
318 __asm__ __volatile__("sth%U0%X0 %1,%0; sync" 325 __asm__ __volatile__("sync; sth%U0%X0 %1,%0"
319 : "=m" (*addr) : "r" (val)); 326 : "=m" (*addr) : "r" (val));
327 get_paca()->io_sync = 1;
320} 328}
321 329
322static inline unsigned in_le32(const volatile unsigned __iomem *addr) 330static inline unsigned in_le32(const volatile unsigned __iomem *addr)
323{ 331{
324 unsigned ret; 332 unsigned ret;
325 333
326 __asm__ __volatile__("lwbrx %0,0,%1; twi 0,%0,0; isync" 334 __asm__ __volatile__("sync; lwbrx %0,0,%1; twi 0,%0,0; isync"
327 : "=r" (ret) : "r" (addr), "m" (*addr)); 335 : "=r" (ret) : "r" (addr), "m" (*addr));
328 return ret; 336 return ret;
329} 337}
@@ -332,21 +340,23 @@ static inline unsigned in_be32(const volatile unsigned __iomem *addr)
332{ 340{
333 unsigned ret; 341 unsigned ret;
334 342
335 __asm__ __volatile__("lwz%U1%X1 %0,%1; twi 0,%0,0; isync" 343 __asm__ __volatile__("sync; lwz%U1%X1 %0,%1; twi 0,%0,0; isync"
336 : "=r" (ret) : "m" (*addr)); 344 : "=r" (ret) : "m" (*addr));
337 return ret; 345 return ret;
338} 346}
339 347
340static inline void out_le32(volatile unsigned __iomem *addr, int val) 348static inline void out_le32(volatile unsigned __iomem *addr, int val)
341{ 349{
342 __asm__ __volatile__("stwbrx %1,0,%2; sync" : "=m" (*addr) 350 __asm__ __volatile__("sync; stwbrx %1,0,%2" : "=m" (*addr)
343 : "r" (val), "r" (addr)); 351 : "r" (val), "r" (addr));
352 get_paca()->io_sync = 1;
344} 353}
345 354
346static inline void out_be32(volatile unsigned __iomem *addr, int val) 355static inline void out_be32(volatile unsigned __iomem *addr, int val)
347{ 356{
348 __asm__ __volatile__("stw%U0%X0 %1,%0; sync" 357 __asm__ __volatile__("sync; stw%U0%X0 %1,%0"
349 : "=m" (*addr) : "r" (val)); 358 : "=m" (*addr) : "r" (val));
359 get_paca()->io_sync = 1;
350} 360}
351 361
352static inline unsigned long in_le64(const volatile unsigned long __iomem *addr) 362static inline unsigned long in_le64(const volatile unsigned long __iomem *addr)
@@ -354,6 +364,7 @@ static inline unsigned long in_le64(const volatile unsigned long __iomem *addr)
354 unsigned long tmp, ret; 364 unsigned long tmp, ret;
355 365
356 __asm__ __volatile__( 366 __asm__ __volatile__(
367 "sync\n"
357 "ld %1,0(%2)\n" 368 "ld %1,0(%2)\n"
358 "twi 0,%1,0\n" 369 "twi 0,%1,0\n"
359 "isync\n" 370 "isync\n"
@@ -372,7 +383,7 @@ static inline unsigned long in_be64(const volatile unsigned long __iomem *addr)
372{ 383{
373 unsigned long ret; 384 unsigned long ret;
374 385
375 __asm__ __volatile__("ld%U1%X1 %0,%1; twi 0,%0,0; isync" 386 __asm__ __volatile__("sync; ld%U1%X1 %0,%1; twi 0,%0,0; isync"
376 : "=r" (ret) : "m" (*addr)); 387 : "=r" (ret) : "m" (*addr));
377 return ret; 388 return ret;
378} 389}
@@ -389,14 +400,16 @@ static inline void out_le64(volatile unsigned long __iomem *addr, unsigned long
389 "rldicl %1,%1,32,0\n" 400 "rldicl %1,%1,32,0\n"
390 "rlwimi %0,%1,8,8,31\n" 401 "rlwimi %0,%1,8,8,31\n"
391 "rlwimi %0,%1,24,16,23\n" 402 "rlwimi %0,%1,24,16,23\n"
392 "std %0,0(%3)\n" 403 "sync\n"
393 "sync" 404 "std %0,0(%3)"
394 : "=&r" (tmp) , "=&r" (val) : "1" (val) , "b" (addr) , "m" (*addr)); 405 : "=&r" (tmp) , "=&r" (val) : "1" (val) , "b" (addr) , "m" (*addr));
406 get_paca()->io_sync = 1;
395} 407}
396 408
397static inline void out_be64(volatile unsigned long __iomem *addr, unsigned long val) 409static inline void out_be64(volatile unsigned long __iomem *addr, unsigned long val)
398{ 410{
399 __asm__ __volatile__("std%U0%X0 %1,%0; sync" : "=m" (*addr) : "r" (val)); 411 __asm__ __volatile__("sync; std%U0%X0 %1,%0" : "=m" (*addr) : "r" (val));
412 get_paca()->io_sync = 1;
400} 413}
401 414
402#ifndef CONFIG_PPC_ISERIES 415#ifndef CONFIG_PPC_ISERIES
diff --git a/include/asm-powerpc/paca.h b/include/asm-powerpc/paca.h
index 2d4585f06209..3d5d590bc4b0 100644
--- a/include/asm-powerpc/paca.h
+++ b/include/asm-powerpc/paca.h
@@ -93,6 +93,7 @@ struct paca_struct {
93 u64 saved_r1; /* r1 save for RTAS calls */ 93 u64 saved_r1; /* r1 save for RTAS calls */
94 u64 saved_msr; /* MSR saved here by enter_rtas */ 94 u64 saved_msr; /* MSR saved here by enter_rtas */
95 u8 proc_enabled; /* irq soft-enable flag */ 95 u8 proc_enabled; /* irq soft-enable flag */
96 u8 io_sync; /* writel() needs spin_unlock sync */
96 97
97 /* Stuff for accurate time accounting */ 98 /* Stuff for accurate time accounting */
98 u64 user_time; /* accumulated usermode TB ticks */ 99 u64 user_time; /* accumulated usermode TB ticks */
diff --git a/include/asm-powerpc/spinlock.h b/include/asm-powerpc/spinlock.h
index 895cb6d3a42a..c31e4382a775 100644
--- a/include/asm-powerpc/spinlock.h
+++ b/include/asm-powerpc/spinlock.h
@@ -36,6 +36,19 @@
36#define LOCK_TOKEN 1 36#define LOCK_TOKEN 1
37#endif 37#endif
38 38
39#if defined(CONFIG_PPC64) && defined(CONFIG_SMP)
40#define CLEAR_IO_SYNC (get_paca()->io_sync = 0)
41#define SYNC_IO do { \
42 if (unlikely(get_paca()->io_sync)) { \
43 mb(); \
44 get_paca()->io_sync = 0; \
45 } \
46 } while (0)
47#else
48#define CLEAR_IO_SYNC
49#define SYNC_IO
50#endif
51
39/* 52/*
40 * This returns the old value in the lock, so we succeeded 53 * This returns the old value in the lock, so we succeeded
41 * in getting the lock if the return value is 0. 54 * in getting the lock if the return value is 0.
@@ -61,6 +74,7 @@ static __inline__ unsigned long __spin_trylock(raw_spinlock_t *lock)
61 74
62static int __inline__ __raw_spin_trylock(raw_spinlock_t *lock) 75static int __inline__ __raw_spin_trylock(raw_spinlock_t *lock)
63{ 76{
77 CLEAR_IO_SYNC;
64 return __spin_trylock(lock) == 0; 78 return __spin_trylock(lock) == 0;
65} 79}
66 80
@@ -91,6 +105,7 @@ extern void __rw_yield(raw_rwlock_t *lock);
91 105
92static void __inline__ __raw_spin_lock(raw_spinlock_t *lock) 106static void __inline__ __raw_spin_lock(raw_spinlock_t *lock)
93{ 107{
108 CLEAR_IO_SYNC;
94 while (1) { 109 while (1) {
95 if (likely(__spin_trylock(lock) == 0)) 110 if (likely(__spin_trylock(lock) == 0))
96 break; 111 break;
@@ -107,6 +122,7 @@ static void __inline__ __raw_spin_lock_flags(raw_spinlock_t *lock, unsigned long
107{ 122{
108 unsigned long flags_dis; 123 unsigned long flags_dis;
109 124
125 CLEAR_IO_SYNC;
110 while (1) { 126 while (1) {
111 if (likely(__spin_trylock(lock) == 0)) 127 if (likely(__spin_trylock(lock) == 0))
112 break; 128 break;
@@ -124,6 +140,7 @@ static void __inline__ __raw_spin_lock_flags(raw_spinlock_t *lock, unsigned long
124 140
125static __inline__ void __raw_spin_unlock(raw_spinlock_t *lock) 141static __inline__ void __raw_spin_unlock(raw_spinlock_t *lock)
126{ 142{
143 SYNC_IO;
127 __asm__ __volatile__("# __raw_spin_unlock\n\t" 144 __asm__ __volatile__("# __raw_spin_unlock\n\t"
128 LWSYNC_ON_SMP: : :"memory"); 145 LWSYNC_ON_SMP: : :"memory");
129 lock->slock = 0; 146 lock->slock = 0;
diff --git a/include/asm-ppc/io.h b/include/asm-ppc/io.h
index 89c6f1bc3aab..680555be22ec 100644
--- a/include/asm-ppc/io.h
+++ b/include/asm-ppc/io.h
@@ -63,7 +63,7 @@ extern inline int in_8(const volatile unsigned char __iomem *addr)
63 int ret; 63 int ret;
64 64
65 __asm__ __volatile__( 65 __asm__ __volatile__(
66 "lbz%U1%X1 %0,%1;\n" 66 "sync; lbz%U1%X1 %0,%1;\n"
67 "twi 0,%0,0;\n" 67 "twi 0,%0,0;\n"
68 "isync" : "=r" (ret) : "m" (*addr)); 68 "isync" : "=r" (ret) : "m" (*addr));
69 return ret; 69 return ret;
@@ -78,7 +78,7 @@ extern inline int in_le16(const volatile unsigned short __iomem *addr)
78{ 78{
79 int ret; 79 int ret;
80 80
81 __asm__ __volatile__("lhbrx %0,0,%1;\n" 81 __asm__ __volatile__("sync; lhbrx %0,0,%1;\n"
82 "twi 0,%0,0;\n" 82 "twi 0,%0,0;\n"
83 "isync" : "=r" (ret) : 83 "isync" : "=r" (ret) :
84 "r" (addr), "m" (*addr)); 84 "r" (addr), "m" (*addr));
@@ -89,7 +89,7 @@ extern inline int in_be16(const volatile unsigned short __iomem *addr)
89{ 89{
90 int ret; 90 int ret;
91 91
92 __asm__ __volatile__("lhz%U1%X1 %0,%1;\n" 92 __asm__ __volatile__("sync; lhz%U1%X1 %0,%1;\n"
93 "twi 0,%0,0;\n" 93 "twi 0,%0,0;\n"
94 "isync" : "=r" (ret) : "m" (*addr)); 94 "isync" : "=r" (ret) : "m" (*addr));
95 return ret; 95 return ret;
@@ -97,20 +97,20 @@ extern inline int in_be16(const volatile unsigned short __iomem *addr)
97 97
98extern inline void out_le16(volatile unsigned short __iomem *addr, int val) 98extern inline void out_le16(volatile unsigned short __iomem *addr, int val)
99{ 99{
100 __asm__ __volatile__("sthbrx %1,0,%2; eieio" : "=m" (*addr) : 100 __asm__ __volatile__("sync; sthbrx %1,0,%2" : "=m" (*addr) :
101 "r" (val), "r" (addr)); 101 "r" (val), "r" (addr));
102} 102}
103 103
104extern inline void out_be16(volatile unsigned short __iomem *addr, int val) 104extern inline void out_be16(volatile unsigned short __iomem *addr, int val)
105{ 105{
106 __asm__ __volatile__("sth%U0%X0 %1,%0; eieio" : "=m" (*addr) : "r" (val)); 106 __asm__ __volatile__("sync; sth%U0%X0 %1,%0" : "=m" (*addr) : "r" (val));
107} 107}
108 108
109extern inline unsigned in_le32(const volatile unsigned __iomem *addr) 109extern inline unsigned in_le32(const volatile unsigned __iomem *addr)
110{ 110{
111 unsigned ret; 111 unsigned ret;
112 112
113 __asm__ __volatile__("lwbrx %0,0,%1;\n" 113 __asm__ __volatile__("sync; lwbrx %0,0,%1;\n"
114 "twi 0,%0,0;\n" 114 "twi 0,%0,0;\n"
115 "isync" : "=r" (ret) : 115 "isync" : "=r" (ret) :
116 "r" (addr), "m" (*addr)); 116 "r" (addr), "m" (*addr));
@@ -121,7 +121,7 @@ extern inline unsigned in_be32(const volatile unsigned __iomem *addr)
121{ 121{
122 unsigned ret; 122 unsigned ret;
123 123
124 __asm__ __volatile__("lwz%U1%X1 %0,%1;\n" 124 __asm__ __volatile__("sync; lwz%U1%X1 %0,%1;\n"
125 "twi 0,%0,0;\n" 125 "twi 0,%0,0;\n"
126 "isync" : "=r" (ret) : "m" (*addr)); 126 "isync" : "=r" (ret) : "m" (*addr));
127 return ret; 127 return ret;
@@ -129,13 +129,13 @@ extern inline unsigned in_be32(const volatile unsigned __iomem *addr)
129 129
130extern inline void out_le32(volatile unsigned __iomem *addr, int val) 130extern inline void out_le32(volatile unsigned __iomem *addr, int val)
131{ 131{
132 __asm__ __volatile__("stwbrx %1,0,%2; eieio" : "=m" (*addr) : 132 __asm__ __volatile__("sync; stwbrx %1,0,%2" : "=m" (*addr) :
133 "r" (val), "r" (addr)); 133 "r" (val), "r" (addr));
134} 134}
135 135
136extern inline void out_be32(volatile unsigned __iomem *addr, int val) 136extern inline void out_be32(volatile unsigned __iomem *addr, int val)
137{ 137{
138 __asm__ __volatile__("stw%U0%X0 %1,%0; eieio" : "=m" (*addr) : "r" (val)); 138 __asm__ __volatile__("sync; stw%U0%X0 %1,%0" : "=m" (*addr) : "r" (val));
139} 139}
140#if defined (CONFIG_8260_PCI9) 140#if defined (CONFIG_8260_PCI9)
141#define readb(addr) in_8((volatile u8 *)(addr)) 141#define readb(addr) in_8((volatile u8 *)(addr))
@@ -259,6 +259,7 @@ extern __inline__ unsigned int name(unsigned int port) \
259{ \ 259{ \
260 unsigned int x; \ 260 unsigned int x; \
261 __asm__ __volatile__( \ 261 __asm__ __volatile__( \
262 "sync\n" \
262 "0:" op " %0,0,%1\n" \ 263 "0:" op " %0,0,%1\n" \
263 "1: twi 0,%0,0\n" \ 264 "1: twi 0,%0,0\n" \
264 "2: isync\n" \ 265 "2: isync\n" \
@@ -284,6 +285,7 @@ extern __inline__ unsigned int name(unsigned int port) \
284extern __inline__ void name(unsigned int val, unsigned int port) \ 285extern __inline__ void name(unsigned int val, unsigned int port) \
285{ \ 286{ \
286 __asm__ __volatile__( \ 287 __asm__ __volatile__( \
288 "sync\n" \
287 "0:" op " %0,0,%1\n" \ 289 "0:" op " %0,0,%1\n" \
288 "1: sync\n" \ 290 "1: sync\n" \
289 "2:\n" \ 291 "2:\n" \