diff options
author | Linus Torvalds <torvalds@g5.osdl.org> | 2006-09-13 11:01:41 -0400 |
---|---|---|
committer | Linus Torvalds <torvalds@g5.osdl.org> | 2006-09-13 11:01:41 -0400 |
commit | 63b98080daa35f0d682db04f4fb7ada010888752 (patch) | |
tree | 213e124c89a51bbb99dd8e07fb6eacc970bbdc99 /include | |
parent | 1883c5aba9973331e3ff0050e05707fe8e84fe0d (diff) | |
parent | eeac5c142b8687e35780b11b54b4c2f95b1a2436 (diff) |
Merge branch 'merge' of git://git.kernel.org/pub/scm/linux/kernel/git/paulus/powerpc
* 'merge' of git://git.kernel.org/pub/scm/linux/kernel/git/paulus/powerpc:
[POWERPC] Fix G5 DART (IOMMU) race causing occasional data corruption
[POWERPC] Fix MMIO ops to provide expected barrier behaviour
[POWERPC] Fix interrupt clearing in kdump shutdown sequence
[POWERPC] update prep_defconfig
[POWERPC] kdump: Support kernels having 64k page size.
[POWERPC] Implement PowerPC futex_atomic_cmpxchg_inatomic().
[POWERPC] Add new, missing argument to of_irq_map_raw() for 86xx.
[POWERPC] Update defconfigs
Diffstat (limited to 'include')
-rw-r--r-- | include/asm-powerpc/eeh.h | 3 | ||||
-rw-r--r-- | include/asm-powerpc/futex.h | 28 | ||||
-rw-r--r-- | include/asm-powerpc/io.h | 43 | ||||
-rw-r--r-- | include/asm-powerpc/kdump.h | 2 | ||||
-rw-r--r-- | include/asm-powerpc/paca.h | 1 | ||||
-rw-r--r-- | include/asm-powerpc/spinlock.h | 17 | ||||
-rw-r--r-- | include/asm-ppc/io.h | 20 |
7 files changed, 88 insertions, 26 deletions
diff --git a/include/asm-powerpc/eeh.h b/include/asm-powerpc/eeh.h index 4df3e80118f..6a784396660 100644 --- a/include/asm-powerpc/eeh.h +++ b/include/asm-powerpc/eeh.h | |||
@@ -205,6 +205,7 @@ static inline void eeh_memset_io(volatile void __iomem *addr, int c, | |||
205 | lc |= lc << 8; | 205 | lc |= lc << 8; |
206 | lc |= lc << 16; | 206 | lc |= lc << 16; |
207 | 207 | ||
208 | __asm__ __volatile__ ("sync" : : : "memory"); | ||
208 | while(n && !EEH_CHECK_ALIGN(p, 4)) { | 209 | while(n && !EEH_CHECK_ALIGN(p, 4)) { |
209 | *((volatile u8 *)p) = c; | 210 | *((volatile u8 *)p) = c; |
210 | p++; | 211 | p++; |
@@ -229,6 +230,7 @@ static inline void eeh_memcpy_fromio(void *dest, const volatile void __iomem *sr | |||
229 | void *destsave = dest; | 230 | void *destsave = dest; |
230 | unsigned long nsave = n; | 231 | unsigned long nsave = n; |
231 | 232 | ||
233 | __asm__ __volatile__ ("sync" : : : "memory"); | ||
232 | while(n && (!EEH_CHECK_ALIGN(vsrc, 4) || !EEH_CHECK_ALIGN(dest, 4))) { | 234 | while(n && (!EEH_CHECK_ALIGN(vsrc, 4) || !EEH_CHECK_ALIGN(dest, 4))) { |
233 | *((u8 *)dest) = *((volatile u8 *)vsrc); | 235 | *((u8 *)dest) = *((volatile u8 *)vsrc); |
234 | __asm__ __volatile__ ("eieio" : : : "memory"); | 236 | __asm__ __volatile__ ("eieio" : : : "memory"); |
@@ -266,6 +268,7 @@ static inline void eeh_memcpy_toio(volatile void __iomem *dest, const void *src, | |||
266 | { | 268 | { |
267 | void *vdest = (void __force *) dest; | 269 | void *vdest = (void __force *) dest; |
268 | 270 | ||
271 | __asm__ __volatile__ ("sync" : : : "memory"); | ||
269 | while(n && (!EEH_CHECK_ALIGN(vdest, 4) || !EEH_CHECK_ALIGN(src, 4))) { | 272 | while(n && (!EEH_CHECK_ALIGN(vdest, 4) || !EEH_CHECK_ALIGN(src, 4))) { |
270 | *((volatile u8 *)vdest) = *((u8 *)src); | 273 | *((volatile u8 *)vdest) = *((u8 *)src); |
271 | src++; | 274 | src++; |
diff --git a/include/asm-powerpc/futex.h b/include/asm-powerpc/futex.h index f1b3c00bc1c..936422e5489 100644 --- a/include/asm-powerpc/futex.h +++ b/include/asm-powerpc/futex.h | |||
@@ -84,7 +84,33 @@ static inline int futex_atomic_op_inuser (int encoded_op, int __user *uaddr) | |||
84 | static inline int | 84 | static inline int |
85 | futex_atomic_cmpxchg_inatomic(int __user *uaddr, int oldval, int newval) | 85 | futex_atomic_cmpxchg_inatomic(int __user *uaddr, int oldval, int newval) |
86 | { | 86 | { |
87 | return -ENOSYS; | 87 | int prev; |
88 | |||
89 | if (!access_ok(VERIFY_WRITE, uaddr, sizeof(int))) | ||
90 | return -EFAULT; | ||
91 | |||
92 | __asm__ __volatile__ ( | ||
93 | LWSYNC_ON_SMP | ||
94 | "1: lwarx %0,0,%2 # futex_atomic_cmpxchg_inatomic\n\ | ||
95 | cmpw 0,%0,%3\n\ | ||
96 | bne- 3f\n" | ||
97 | PPC405_ERR77(0,%2) | ||
98 | "2: stwcx. %4,0,%2\n\ | ||
99 | bne- 1b\n" | ||
100 | ISYNC_ON_SMP | ||
101 | "3: .section .fixup,\"ax\"\n\ | ||
102 | 4: li %0,%5\n\ | ||
103 | b 3b\n\ | ||
104 | .previous\n\ | ||
105 | .section __ex_table,\"a\"\n\ | ||
106 | .align 3\n\ | ||
107 | " PPC_LONG "1b,4b,2b,4b\n\ | ||
108 | .previous" \ | ||
109 | : "=&r" (prev), "+m" (*uaddr) | ||
110 | : "r" (uaddr), "r" (oldval), "r" (newval), "i" (-EFAULT) | ||
111 | : "cc", "memory"); | ||
112 | |||
113 | return prev; | ||
88 | } | 114 | } |
89 | 115 | ||
90 | #endif /* __KERNEL__ */ | 116 | #endif /* __KERNEL__ */ |
diff --git a/include/asm-powerpc/io.h b/include/asm-powerpc/io.h index 36c4c34bf56..212428db0d8 100644 --- a/include/asm-powerpc/io.h +++ b/include/asm-powerpc/io.h | |||
@@ -19,6 +19,7 @@ extern int check_legacy_ioport(unsigned long base_port); | |||
19 | #include <linux/compiler.h> | 19 | #include <linux/compiler.h> |
20 | #include <asm/page.h> | 20 | #include <asm/page.h> |
21 | #include <asm/byteorder.h> | 21 | #include <asm/byteorder.h> |
22 | #include <asm/paca.h> | ||
22 | #ifdef CONFIG_PPC_ISERIES | 23 | #ifdef CONFIG_PPC_ISERIES |
23 | #include <asm/iseries/iseries_io.h> | 24 | #include <asm/iseries/iseries_io.h> |
24 | #endif | 25 | #endif |
@@ -162,7 +163,11 @@ extern void _outsw_ns(volatile u16 __iomem *port, const void *buf, int ns); | |||
162 | extern void _insl_ns(volatile u32 __iomem *port, void *buf, int nl); | 163 | extern void _insl_ns(volatile u32 __iomem *port, void *buf, int nl); |
163 | extern void _outsl_ns(volatile u32 __iomem *port, const void *buf, int nl); | 164 | extern void _outsl_ns(volatile u32 __iomem *port, const void *buf, int nl); |
164 | 165 | ||
165 | #define mmiowb() | 166 | static inline void mmiowb(void) |
167 | { | ||
168 | __asm__ __volatile__ ("sync" : : : "memory"); | ||
169 | get_paca()->io_sync = 0; | ||
170 | } | ||
166 | 171 | ||
167 | /* | 172 | /* |
168 | * output pause versions need a delay at least for the | 173 | * output pause versions need a delay at least for the |
@@ -278,22 +283,23 @@ static inline int in_8(const volatile unsigned char __iomem *addr) | |||
278 | { | 283 | { |
279 | int ret; | 284 | int ret; |
280 | 285 | ||
281 | __asm__ __volatile__("lbz%U1%X1 %0,%1; twi 0,%0,0; isync" | 286 | __asm__ __volatile__("sync; lbz%U1%X1 %0,%1; twi 0,%0,0; isync" |
282 | : "=r" (ret) : "m" (*addr)); | 287 | : "=r" (ret) : "m" (*addr)); |
283 | return ret; | 288 | return ret; |
284 | } | 289 | } |
285 | 290 | ||
286 | static inline void out_8(volatile unsigned char __iomem *addr, int val) | 291 | static inline void out_8(volatile unsigned char __iomem *addr, int val) |
287 | { | 292 | { |
288 | __asm__ __volatile__("stb%U0%X0 %1,%0; sync" | 293 | __asm__ __volatile__("sync; stb%U0%X0 %1,%0" |
289 | : "=m" (*addr) : "r" (val)); | 294 | : "=m" (*addr) : "r" (val)); |
295 | get_paca()->io_sync = 1; | ||
290 | } | 296 | } |
291 | 297 | ||
292 | static inline int in_le16(const volatile unsigned short __iomem *addr) | 298 | static inline int in_le16(const volatile unsigned short __iomem *addr) |
293 | { | 299 | { |
294 | int ret; | 300 | int ret; |
295 | 301 | ||
296 | __asm__ __volatile__("lhbrx %0,0,%1; twi 0,%0,0; isync" | 302 | __asm__ __volatile__("sync; lhbrx %0,0,%1; twi 0,%0,0; isync" |
297 | : "=r" (ret) : "r" (addr), "m" (*addr)); | 303 | : "=r" (ret) : "r" (addr), "m" (*addr)); |
298 | return ret; | 304 | return ret; |
299 | } | 305 | } |
@@ -302,28 +308,30 @@ static inline int in_be16(const volatile unsigned short __iomem *addr) | |||
302 | { | 308 | { |
303 | int ret; | 309 | int ret; |
304 | 310 | ||
305 | __asm__ __volatile__("lhz%U1%X1 %0,%1; twi 0,%0,0; isync" | 311 | __asm__ __volatile__("sync; lhz%U1%X1 %0,%1; twi 0,%0,0; isync" |
306 | : "=r" (ret) : "m" (*addr)); | 312 | : "=r" (ret) : "m" (*addr)); |
307 | return ret; | 313 | return ret; |
308 | } | 314 | } |
309 | 315 | ||
310 | static inline void out_le16(volatile unsigned short __iomem *addr, int val) | 316 | static inline void out_le16(volatile unsigned short __iomem *addr, int val) |
311 | { | 317 | { |
312 | __asm__ __volatile__("sthbrx %1,0,%2; sync" | 318 | __asm__ __volatile__("sync; sthbrx %1,0,%2" |
313 | : "=m" (*addr) : "r" (val), "r" (addr)); | 319 | : "=m" (*addr) : "r" (val), "r" (addr)); |
320 | get_paca()->io_sync = 1; | ||
314 | } | 321 | } |
315 | 322 | ||
316 | static inline void out_be16(volatile unsigned short __iomem *addr, int val) | 323 | static inline void out_be16(volatile unsigned short __iomem *addr, int val) |
317 | { | 324 | { |
318 | __asm__ __volatile__("sth%U0%X0 %1,%0; sync" | 325 | __asm__ __volatile__("sync; sth%U0%X0 %1,%0" |
319 | : "=m" (*addr) : "r" (val)); | 326 | : "=m" (*addr) : "r" (val)); |
327 | get_paca()->io_sync = 1; | ||
320 | } | 328 | } |
321 | 329 | ||
322 | static inline unsigned in_le32(const volatile unsigned __iomem *addr) | 330 | static inline unsigned in_le32(const volatile unsigned __iomem *addr) |
323 | { | 331 | { |
324 | unsigned ret; | 332 | unsigned ret; |
325 | 333 | ||
326 | __asm__ __volatile__("lwbrx %0,0,%1; twi 0,%0,0; isync" | 334 | __asm__ __volatile__("sync; lwbrx %0,0,%1; twi 0,%0,0; isync" |
327 | : "=r" (ret) : "r" (addr), "m" (*addr)); | 335 | : "=r" (ret) : "r" (addr), "m" (*addr)); |
328 | return ret; | 336 | return ret; |
329 | } | 337 | } |
@@ -332,21 +340,23 @@ static inline unsigned in_be32(const volatile unsigned __iomem *addr) | |||
332 | { | 340 | { |
333 | unsigned ret; | 341 | unsigned ret; |
334 | 342 | ||
335 | __asm__ __volatile__("lwz%U1%X1 %0,%1; twi 0,%0,0; isync" | 343 | __asm__ __volatile__("sync; lwz%U1%X1 %0,%1; twi 0,%0,0; isync" |
336 | : "=r" (ret) : "m" (*addr)); | 344 | : "=r" (ret) : "m" (*addr)); |
337 | return ret; | 345 | return ret; |
338 | } | 346 | } |
339 | 347 | ||
340 | static inline void out_le32(volatile unsigned __iomem *addr, int val) | 348 | static inline void out_le32(volatile unsigned __iomem *addr, int val) |
341 | { | 349 | { |
342 | __asm__ __volatile__("stwbrx %1,0,%2; sync" : "=m" (*addr) | 350 | __asm__ __volatile__("sync; stwbrx %1,0,%2" : "=m" (*addr) |
343 | : "r" (val), "r" (addr)); | 351 | : "r" (val), "r" (addr)); |
352 | get_paca()->io_sync = 1; | ||
344 | } | 353 | } |
345 | 354 | ||
346 | static inline void out_be32(volatile unsigned __iomem *addr, int val) | 355 | static inline void out_be32(volatile unsigned __iomem *addr, int val) |
347 | { | 356 | { |
348 | __asm__ __volatile__("stw%U0%X0 %1,%0; sync" | 357 | __asm__ __volatile__("sync; stw%U0%X0 %1,%0" |
349 | : "=m" (*addr) : "r" (val)); | 358 | : "=m" (*addr) : "r" (val)); |
359 | get_paca()->io_sync = 1; | ||
350 | } | 360 | } |
351 | 361 | ||
352 | static inline unsigned long in_le64(const volatile unsigned long __iomem *addr) | 362 | static inline unsigned long in_le64(const volatile unsigned long __iomem *addr) |
@@ -354,6 +364,7 @@ static inline unsigned long in_le64(const volatile unsigned long __iomem *addr) | |||
354 | unsigned long tmp, ret; | 364 | unsigned long tmp, ret; |
355 | 365 | ||
356 | __asm__ __volatile__( | 366 | __asm__ __volatile__( |
367 | "sync\n" | ||
357 | "ld %1,0(%2)\n" | 368 | "ld %1,0(%2)\n" |
358 | "twi 0,%1,0\n" | 369 | "twi 0,%1,0\n" |
359 | "isync\n" | 370 | "isync\n" |
@@ -372,7 +383,7 @@ static inline unsigned long in_be64(const volatile unsigned long __iomem *addr) | |||
372 | { | 383 | { |
373 | unsigned long ret; | 384 | unsigned long ret; |
374 | 385 | ||
375 | __asm__ __volatile__("ld%U1%X1 %0,%1; twi 0,%0,0; isync" | 386 | __asm__ __volatile__("sync; ld%U1%X1 %0,%1; twi 0,%0,0; isync" |
376 | : "=r" (ret) : "m" (*addr)); | 387 | : "=r" (ret) : "m" (*addr)); |
377 | return ret; | 388 | return ret; |
378 | } | 389 | } |
@@ -389,14 +400,16 @@ static inline void out_le64(volatile unsigned long __iomem *addr, unsigned long | |||
389 | "rldicl %1,%1,32,0\n" | 400 | "rldicl %1,%1,32,0\n" |
390 | "rlwimi %0,%1,8,8,31\n" | 401 | "rlwimi %0,%1,8,8,31\n" |
391 | "rlwimi %0,%1,24,16,23\n" | 402 | "rlwimi %0,%1,24,16,23\n" |
392 | "std %0,0(%3)\n" | 403 | "sync\n" |
393 | "sync" | 404 | "std %0,0(%3)" |
394 | : "=&r" (tmp) , "=&r" (val) : "1" (val) , "b" (addr) , "m" (*addr)); | 405 | : "=&r" (tmp) , "=&r" (val) : "1" (val) , "b" (addr) , "m" (*addr)); |
406 | get_paca()->io_sync = 1; | ||
395 | } | 407 | } |
396 | 408 | ||
397 | static inline void out_be64(volatile unsigned long __iomem *addr, unsigned long val) | 409 | static inline void out_be64(volatile unsigned long __iomem *addr, unsigned long val) |
398 | { | 410 | { |
399 | __asm__ __volatile__("std%U0%X0 %1,%0; sync" : "=m" (*addr) : "r" (val)); | 411 | __asm__ __volatile__("sync; std%U0%X0 %1,%0" : "=m" (*addr) : "r" (val)); |
412 | get_paca()->io_sync = 1; | ||
400 | } | 413 | } |
401 | 414 | ||
402 | #ifndef CONFIG_PPC_ISERIES | 415 | #ifndef CONFIG_PPC_ISERIES |
diff --git a/include/asm-powerpc/kdump.h b/include/asm-powerpc/kdump.h index dc1574c945f..10e8eb1e6f4 100644 --- a/include/asm-powerpc/kdump.h +++ b/include/asm-powerpc/kdump.h | |||
@@ -7,7 +7,7 @@ | |||
7 | /* How many bytes to reserve at zero for kdump. The reserve limit should | 7 | /* How many bytes to reserve at zero for kdump. The reserve limit should |
8 | * be greater or equal to the trampoline's end address. | 8 | * be greater or equal to the trampoline's end address. |
9 | * Reserve to the end of the FWNMI area, see head_64.S */ | 9 | * Reserve to the end of the FWNMI area, see head_64.S */ |
10 | #define KDUMP_RESERVE_LIMIT 0x8000 | 10 | #define KDUMP_RESERVE_LIMIT 0x10000 /* 64K */ |
11 | 11 | ||
12 | #ifdef CONFIG_CRASH_DUMP | 12 | #ifdef CONFIG_CRASH_DUMP |
13 | 13 | ||
diff --git a/include/asm-powerpc/paca.h b/include/asm-powerpc/paca.h index 2d4585f0620..3d5d590bc4b 100644 --- a/include/asm-powerpc/paca.h +++ b/include/asm-powerpc/paca.h | |||
@@ -93,6 +93,7 @@ struct paca_struct { | |||
93 | u64 saved_r1; /* r1 save for RTAS calls */ | 93 | u64 saved_r1; /* r1 save for RTAS calls */ |
94 | u64 saved_msr; /* MSR saved here by enter_rtas */ | 94 | u64 saved_msr; /* MSR saved here by enter_rtas */ |
95 | u8 proc_enabled; /* irq soft-enable flag */ | 95 | u8 proc_enabled; /* irq soft-enable flag */ |
96 | u8 io_sync; /* writel() needs spin_unlock sync */ | ||
96 | 97 | ||
97 | /* Stuff for accurate time accounting */ | 98 | /* Stuff for accurate time accounting */ |
98 | u64 user_time; /* accumulated usermode TB ticks */ | 99 | u64 user_time; /* accumulated usermode TB ticks */ |
diff --git a/include/asm-powerpc/spinlock.h b/include/asm-powerpc/spinlock.h index 895cb6d3a42..c31e4382a77 100644 --- a/include/asm-powerpc/spinlock.h +++ b/include/asm-powerpc/spinlock.h | |||
@@ -36,6 +36,19 @@ | |||
36 | #define LOCK_TOKEN 1 | 36 | #define LOCK_TOKEN 1 |
37 | #endif | 37 | #endif |
38 | 38 | ||
39 | #if defined(CONFIG_PPC64) && defined(CONFIG_SMP) | ||
40 | #define CLEAR_IO_SYNC (get_paca()->io_sync = 0) | ||
41 | #define SYNC_IO do { \ | ||
42 | if (unlikely(get_paca()->io_sync)) { \ | ||
43 | mb(); \ | ||
44 | get_paca()->io_sync = 0; \ | ||
45 | } \ | ||
46 | } while (0) | ||
47 | #else | ||
48 | #define CLEAR_IO_SYNC | ||
49 | #define SYNC_IO | ||
50 | #endif | ||
51 | |||
39 | /* | 52 | /* |
40 | * This returns the old value in the lock, so we succeeded | 53 | * This returns the old value in the lock, so we succeeded |
41 | * in getting the lock if the return value is 0. | 54 | * in getting the lock if the return value is 0. |
@@ -61,6 +74,7 @@ static __inline__ unsigned long __spin_trylock(raw_spinlock_t *lock) | |||
61 | 74 | ||
62 | static int __inline__ __raw_spin_trylock(raw_spinlock_t *lock) | 75 | static int __inline__ __raw_spin_trylock(raw_spinlock_t *lock) |
63 | { | 76 | { |
77 | CLEAR_IO_SYNC; | ||
64 | return __spin_trylock(lock) == 0; | 78 | return __spin_trylock(lock) == 0; |
65 | } | 79 | } |
66 | 80 | ||
@@ -91,6 +105,7 @@ extern void __rw_yield(raw_rwlock_t *lock); | |||
91 | 105 | ||
92 | static void __inline__ __raw_spin_lock(raw_spinlock_t *lock) | 106 | static void __inline__ __raw_spin_lock(raw_spinlock_t *lock) |
93 | { | 107 | { |
108 | CLEAR_IO_SYNC; | ||
94 | while (1) { | 109 | while (1) { |
95 | if (likely(__spin_trylock(lock) == 0)) | 110 | if (likely(__spin_trylock(lock) == 0)) |
96 | break; | 111 | break; |
@@ -107,6 +122,7 @@ static void __inline__ __raw_spin_lock_flags(raw_spinlock_t *lock, unsigned long | |||
107 | { | 122 | { |
108 | unsigned long flags_dis; | 123 | unsigned long flags_dis; |
109 | 124 | ||
125 | CLEAR_IO_SYNC; | ||
110 | while (1) { | 126 | while (1) { |
111 | if (likely(__spin_trylock(lock) == 0)) | 127 | if (likely(__spin_trylock(lock) == 0)) |
112 | break; | 128 | break; |
@@ -124,6 +140,7 @@ static void __inline__ __raw_spin_lock_flags(raw_spinlock_t *lock, unsigned long | |||
124 | 140 | ||
125 | static __inline__ void __raw_spin_unlock(raw_spinlock_t *lock) | 141 | static __inline__ void __raw_spin_unlock(raw_spinlock_t *lock) |
126 | { | 142 | { |
143 | SYNC_IO; | ||
127 | __asm__ __volatile__("# __raw_spin_unlock\n\t" | 144 | __asm__ __volatile__("# __raw_spin_unlock\n\t" |
128 | LWSYNC_ON_SMP: : :"memory"); | 145 | LWSYNC_ON_SMP: : :"memory"); |
129 | lock->slock = 0; | 146 | lock->slock = 0; |
diff --git a/include/asm-ppc/io.h b/include/asm-ppc/io.h index 89c6f1bc3aa..680555be22e 100644 --- a/include/asm-ppc/io.h +++ b/include/asm-ppc/io.h | |||
@@ -63,7 +63,7 @@ extern inline int in_8(const volatile unsigned char __iomem *addr) | |||
63 | int ret; | 63 | int ret; |
64 | 64 | ||
65 | __asm__ __volatile__( | 65 | __asm__ __volatile__( |
66 | "lbz%U1%X1 %0,%1;\n" | 66 | "sync; lbz%U1%X1 %0,%1;\n" |
67 | "twi 0,%0,0;\n" | 67 | "twi 0,%0,0;\n" |
68 | "isync" : "=r" (ret) : "m" (*addr)); | 68 | "isync" : "=r" (ret) : "m" (*addr)); |
69 | return ret; | 69 | return ret; |
@@ -78,7 +78,7 @@ extern inline int in_le16(const volatile unsigned short __iomem *addr) | |||
78 | { | 78 | { |
79 | int ret; | 79 | int ret; |
80 | 80 | ||
81 | __asm__ __volatile__("lhbrx %0,0,%1;\n" | 81 | __asm__ __volatile__("sync; lhbrx %0,0,%1;\n" |
82 | "twi 0,%0,0;\n" | 82 | "twi 0,%0,0;\n" |
83 | "isync" : "=r" (ret) : | 83 | "isync" : "=r" (ret) : |
84 | "r" (addr), "m" (*addr)); | 84 | "r" (addr), "m" (*addr)); |
@@ -89,7 +89,7 @@ extern inline int in_be16(const volatile unsigned short __iomem *addr) | |||
89 | { | 89 | { |
90 | int ret; | 90 | int ret; |
91 | 91 | ||
92 | __asm__ __volatile__("lhz%U1%X1 %0,%1;\n" | 92 | __asm__ __volatile__("sync; lhz%U1%X1 %0,%1;\n" |
93 | "twi 0,%0,0;\n" | 93 | "twi 0,%0,0;\n" |
94 | "isync" : "=r" (ret) : "m" (*addr)); | 94 | "isync" : "=r" (ret) : "m" (*addr)); |
95 | return ret; | 95 | return ret; |
@@ -97,20 +97,20 @@ extern inline int in_be16(const volatile unsigned short __iomem *addr) | |||
97 | 97 | ||
98 | extern inline void out_le16(volatile unsigned short __iomem *addr, int val) | 98 | extern inline void out_le16(volatile unsigned short __iomem *addr, int val) |
99 | { | 99 | { |
100 | __asm__ __volatile__("sthbrx %1,0,%2; eieio" : "=m" (*addr) : | 100 | __asm__ __volatile__("sync; sthbrx %1,0,%2" : "=m" (*addr) : |
101 | "r" (val), "r" (addr)); | 101 | "r" (val), "r" (addr)); |
102 | } | 102 | } |
103 | 103 | ||
104 | extern inline void out_be16(volatile unsigned short __iomem *addr, int val) | 104 | extern inline void out_be16(volatile unsigned short __iomem *addr, int val) |
105 | { | 105 | { |
106 | __asm__ __volatile__("sth%U0%X0 %1,%0; eieio" : "=m" (*addr) : "r" (val)); | 106 | __asm__ __volatile__("sync; sth%U0%X0 %1,%0" : "=m" (*addr) : "r" (val)); |
107 | } | 107 | } |
108 | 108 | ||
109 | extern inline unsigned in_le32(const volatile unsigned __iomem *addr) | 109 | extern inline unsigned in_le32(const volatile unsigned __iomem *addr) |
110 | { | 110 | { |
111 | unsigned ret; | 111 | unsigned ret; |
112 | 112 | ||
113 | __asm__ __volatile__("lwbrx %0,0,%1;\n" | 113 | __asm__ __volatile__("sync; lwbrx %0,0,%1;\n" |
114 | "twi 0,%0,0;\n" | 114 | "twi 0,%0,0;\n" |
115 | "isync" : "=r" (ret) : | 115 | "isync" : "=r" (ret) : |
116 | "r" (addr), "m" (*addr)); | 116 | "r" (addr), "m" (*addr)); |
@@ -121,7 +121,7 @@ extern inline unsigned in_be32(const volatile unsigned __iomem *addr) | |||
121 | { | 121 | { |
122 | unsigned ret; | 122 | unsigned ret; |
123 | 123 | ||
124 | __asm__ __volatile__("lwz%U1%X1 %0,%1;\n" | 124 | __asm__ __volatile__("sync; lwz%U1%X1 %0,%1;\n" |
125 | "twi 0,%0,0;\n" | 125 | "twi 0,%0,0;\n" |
126 | "isync" : "=r" (ret) : "m" (*addr)); | 126 | "isync" : "=r" (ret) : "m" (*addr)); |
127 | return ret; | 127 | return ret; |
@@ -129,13 +129,13 @@ extern inline unsigned in_be32(const volatile unsigned __iomem *addr) | |||
129 | 129 | ||
130 | extern inline void out_le32(volatile unsigned __iomem *addr, int val) | 130 | extern inline void out_le32(volatile unsigned __iomem *addr, int val) |
131 | { | 131 | { |
132 | __asm__ __volatile__("stwbrx %1,0,%2; eieio" : "=m" (*addr) : | 132 | __asm__ __volatile__("sync; stwbrx %1,0,%2" : "=m" (*addr) : |
133 | "r" (val), "r" (addr)); | 133 | "r" (val), "r" (addr)); |
134 | } | 134 | } |
135 | 135 | ||
136 | extern inline void out_be32(volatile unsigned __iomem *addr, int val) | 136 | extern inline void out_be32(volatile unsigned __iomem *addr, int val) |
137 | { | 137 | { |
138 | __asm__ __volatile__("stw%U0%X0 %1,%0; eieio" : "=m" (*addr) : "r" (val)); | 138 | __asm__ __volatile__("sync; stw%U0%X0 %1,%0" : "=m" (*addr) : "r" (val)); |
139 | } | 139 | } |
140 | #if defined (CONFIG_8260_PCI9) | 140 | #if defined (CONFIG_8260_PCI9) |
141 | #define readb(addr) in_8((volatile u8 *)(addr)) | 141 | #define readb(addr) in_8((volatile u8 *)(addr)) |
@@ -259,6 +259,7 @@ extern __inline__ unsigned int name(unsigned int port) \ | |||
259 | { \ | 259 | { \ |
260 | unsigned int x; \ | 260 | unsigned int x; \ |
261 | __asm__ __volatile__( \ | 261 | __asm__ __volatile__( \ |
262 | "sync\n" \ | ||
262 | "0:" op " %0,0,%1\n" \ | 263 | "0:" op " %0,0,%1\n" \ |
263 | "1: twi 0,%0,0\n" \ | 264 | "1: twi 0,%0,0\n" \ |
264 | "2: isync\n" \ | 265 | "2: isync\n" \ |
@@ -284,6 +285,7 @@ extern __inline__ unsigned int name(unsigned int port) \ | |||
284 | extern __inline__ void name(unsigned int val, unsigned int port) \ | 285 | extern __inline__ void name(unsigned int val, unsigned int port) \ |
285 | { \ | 286 | { \ |
286 | __asm__ __volatile__( \ | 287 | __asm__ __volatile__( \ |
288 | "sync\n" \ | ||
287 | "0:" op " %0,0,%1\n" \ | 289 | "0:" op " %0,0,%1\n" \ |
288 | "1: sync\n" \ | 290 | "1: sync\n" \ |
289 | "2:\n" \ | 291 | "2:\n" \ |