diff options
author | Lennert Buytenhek <buytenh@wantstofly.org> | 2007-03-31 07:03:20 -0400 |
---|---|---|
committer | Russell King <rmk+kernel@arm.linux.org.uk> | 2007-04-01 17:38:36 -0400 |
commit | 398e692fd5cecdd25d311b47bbae69f7bac3a3cb (patch) | |
tree | f1852c28d9f9c47c4763607630ac3db1c91930d6 /include | |
parent | 9a4d93d49d140c196020a1bae339efcf211cac03 (diff) |
[ARM] 4298/1: fix memory barriers for DMA coherent and SMP platforms
This patch:
- Switches mb/rmb/wmb back to being full-blown DMBs on ARM SMP systems,
since mb/rmb/wmb are required to order Normal memory accesses as well.
- Enables the use of DMB and ISB on XSC3 (which is an ARMv5TE ISA core
but conforms to the ARMv6 memory ordering model and supports the
various ARMv6 barriers.)
- Makes DMA coherent platforms (only ixp23xx at the moment) map
mb/rmb/wmb to dmb(), as on DMA coherent platforms, DMA consistent
mappings are done as Normal mappings, which are weakly ordered.
Signed-off-by: Lennert Buytenhek <buytenh@wantstofly.org>
Acked-by: David Howells <dhowells@redhat.com>
Acked-by: Catalin Marinas <catalin.marinas@arm.com>
Acked-by: Paul E. McKenney <paulmck@linux.vnet.ibm.com>
Acked-by: Dan Williams <dan.j.williams@intel.com>
Signed-off-by: Russell King <rmk+kernel@arm.linux.org.uk>
Diffstat (limited to 'include')
-rw-r--r-- | include/asm-arm/system.h | 34 |
1 files changed, 18 insertions, 16 deletions
diff --git a/include/asm-arm/system.h b/include/asm-arm/system.h index f06d8a43fdee..69134c7518c1 100644 --- a/include/asm-arm/system.h +++ b/include/asm-arm/system.h | |||
@@ -3,6 +3,7 @@ | |||
3 | 3 | ||
4 | #ifdef __KERNEL__ | 4 | #ifdef __KERNEL__ |
5 | 5 | ||
6 | #include <asm/memory.h> | ||
6 | 7 | ||
7 | #define CPU_ARCH_UNKNOWN 0 | 8 | #define CPU_ARCH_UNKNOWN 0 |
8 | #define CPU_ARCH_ARMv3 1 | 9 | #define CPU_ARCH_ARMv3 1 |
@@ -154,7 +155,7 @@ extern unsigned int user_debug; | |||
154 | #define vectors_high() (0) | 155 | #define vectors_high() (0) |
155 | #endif | 156 | #endif |
156 | 157 | ||
157 | #if __LINUX_ARM_ARCH__ >= 6 | 158 | #if defined(CONFIG_CPU_XSC3) || __LINUX_ARM_ARCH__ >= 6 |
158 | #define isb() __asm__ __volatile__ ("mcr p15, 0, %0, c7, c5, 4" \ | 159 | #define isb() __asm__ __volatile__ ("mcr p15, 0, %0, c7, c5, 4" \ |
159 | : : "r" (0) : "memory") | 160 | : : "r" (0) : "memory") |
160 | #define dsb() __asm__ __volatile__ ("mcr p15, 0, %0, c7, c10, 4" \ | 161 | #define dsb() __asm__ __volatile__ ("mcr p15, 0, %0, c7, c10, 4" \ |
@@ -168,22 +169,23 @@ extern unsigned int user_debug; | |||
168 | #define dmb() __asm__ __volatile__ ("" : : : "memory") | 169 | #define dmb() __asm__ __volatile__ ("" : : : "memory") |
169 | #endif | 170 | #endif |
170 | 171 | ||
171 | #define mb() barrier() | 172 | #ifndef CONFIG_SMP |
172 | #define rmb() barrier() | 173 | #define mb() do { if (arch_is_coherent()) dmb(); else barrier(); } while (0) |
173 | #define wmb() barrier() | 174 | #define rmb() do { if (arch_is_coherent()) dmb(); else barrier(); } while (0) |
174 | #define read_barrier_depends() do { } while(0) | 175 | #define wmb() do { if (arch_is_coherent()) dmb(); else barrier(); } while (0) |
175 | 176 | #define smp_mb() barrier() | |
176 | #ifdef CONFIG_SMP | 177 | #define smp_rmb() barrier() |
177 | #define smp_mb() dmb() | 178 | #define smp_wmb() barrier() |
178 | #define smp_rmb() dmb() | ||
179 | #define smp_wmb() dmb() | ||
180 | #define smp_read_barrier_depends() read_barrier_depends() | ||
181 | #else | 179 | #else |
182 | #define smp_mb() barrier() | 180 | #define mb() dmb() |
183 | #define smp_rmb() barrier() | 181 | #define rmb() dmb() |
184 | #define smp_wmb() barrier() | 182 | #define wmb() dmb() |
185 | #define smp_read_barrier_depends() read_barrier_depends() | 183 | #define smp_mb() dmb() |
186 | #endif /* CONFIG_SMP */ | 184 | #define smp_rmb() dmb() |
185 | #define smp_wmb() dmb() | ||
186 | #endif | ||
187 | #define read_barrier_depends() do { } while(0) | ||
188 | #define smp_read_barrier_depends() do { } while(0) | ||
187 | 189 | ||
188 | #define set_mb(var, value) do { var = value; smp_mb(); } while (0) | 190 | #define set_mb(var, value) do { var = value; smp_mb(); } while (0) |
189 | #define nop() __asm__ __volatile__("mov\tr0,r0\t@ nop\n\t"); | 191 | #define nop() __asm__ __volatile__("mov\tr0,r0\t@ nop\n\t"); |