diff options
Diffstat (limited to 'include')
-rw-r--r-- | include/asm-i386/atomic.h | 12 | ||||
-rw-r--r-- | include/asm-i386/edac.h | 18 | ||||
-rw-r--r-- | include/asm-x86_64/atomic.h | 12 | ||||
-rw-r--r-- | include/asm-x86_64/edac.h | 18 |
4 files changed, 36 insertions, 24 deletions
diff --git a/include/asm-i386/atomic.h b/include/asm-i386/atomic.h index e2c00c95a5e1..de649d3aa2d4 100644 --- a/include/asm-i386/atomic.h +++ b/include/asm-i386/atomic.h | |||
@@ -255,17 +255,5 @@ __asm__ __volatile__(LOCK "orl %0,%1" \ | |||
255 | #define smp_mb__before_atomic_inc() barrier() | 255 | #define smp_mb__before_atomic_inc() barrier() |
256 | #define smp_mb__after_atomic_inc() barrier() | 256 | #define smp_mb__after_atomic_inc() barrier() |
257 | 257 | ||
258 | /* ECC atomic, DMA, SMP and interrupt safe scrub function */ | ||
259 | |||
260 | static __inline__ void atomic_scrub(unsigned long *virt_addr, u32 size) | ||
261 | { | ||
262 | u32 i; | ||
263 | for (i = 0; i < size / 4; i++, virt_addr++) | ||
264 | /* Very carefully read and write to memory atomically | ||
265 | * so we are interrupt, DMA and SMP safe. | ||
266 | */ | ||
267 | __asm__ __volatile__("lock; addl $0, %0"::"m"(*virt_addr)); | ||
268 | } | ||
269 | |||
270 | #include <asm-generic/atomic.h> | 258 | #include <asm-generic/atomic.h> |
271 | #endif | 259 | #endif |
diff --git a/include/asm-i386/edac.h b/include/asm-i386/edac.h new file mode 100644 index 000000000000..3e7dd0ab68ce --- /dev/null +++ b/include/asm-i386/edac.h | |||
@@ -0,0 +1,18 @@ | |||
1 | #ifndef ASM_EDAC_H | ||
2 | #define ASM_EDAC_H | ||
3 | |||
4 | /* ECC atomic, DMA, SMP and interrupt safe scrub function */ | ||
5 | |||
6 | static __inline__ void atomic_scrub(void *va, u32 size) | ||
7 | { | ||
8 | unsigned long *virt_addr = va; | ||
9 | u32 i; | ||
10 | |||
11 | for (i = 0; i < size / 4; i++, virt_addr++) | ||
12 | /* Very carefully read and write to memory atomically | ||
13 | * so we are interrupt, DMA and SMP safe. | ||
14 | */ | ||
15 | __asm__ __volatile__("lock; addl $0, %0"::"m"(*virt_addr)); | ||
16 | } | ||
17 | |||
18 | #endif | ||
diff --git a/include/asm-x86_64/atomic.h b/include/asm-x86_64/atomic.h index 4048508c4f40..4b5cd553e772 100644 --- a/include/asm-x86_64/atomic.h +++ b/include/asm-x86_64/atomic.h | |||
@@ -426,17 +426,5 @@ __asm__ __volatile__(LOCK "orl %0,%1" \ | |||
426 | #define smp_mb__before_atomic_inc() barrier() | 426 | #define smp_mb__before_atomic_inc() barrier() |
427 | #define smp_mb__after_atomic_inc() barrier() | 427 | #define smp_mb__after_atomic_inc() barrier() |
428 | 428 | ||
429 | /* ECC atomic, DMA, SMP and interrupt safe scrub function */ | ||
430 | |||
431 | static __inline__ void atomic_scrub(u32 *virt_addr, u32 size) | ||
432 | { | ||
433 | u32 i; | ||
434 | for (i = 0; i < size / 4; i++, virt_addr++) | ||
435 | /* Very carefully read and write to memory atomically | ||
436 | * so we are interrupt, DMA and SMP safe. | ||
437 | */ | ||
438 | __asm__ __volatile__("lock; addl $0, %0"::"m"(*virt_addr)); | ||
439 | } | ||
440 | |||
441 | #include <asm-generic/atomic.h> | 429 | #include <asm-generic/atomic.h> |
442 | #endif | 430 | #endif |
diff --git a/include/asm-x86_64/edac.h b/include/asm-x86_64/edac.h new file mode 100644 index 000000000000..cad1cd42b4ee --- /dev/null +++ b/include/asm-x86_64/edac.h | |||
@@ -0,0 +1,18 @@ | |||
1 | #ifndef ASM_EDAC_H | ||
2 | #define ASM_EDAC_H | ||
3 | |||
4 | /* ECC atomic, DMA, SMP and interrupt safe scrub function */ | ||
5 | |||
6 | static __inline__ void atomic_scrub(void *va, u32 size) | ||
7 | { | ||
8 | unsigned int *virt_addr = va; | ||
9 | u32 i; | ||
10 | |||
11 | for (i = 0; i < size / 4; i++, virt_addr++) | ||
12 | /* Very carefully read and write to memory atomically | ||
13 | * so we are interrupt, DMA and SMP safe. | ||
14 | */ | ||
15 | __asm__ __volatile__("lock; addl $0, %0"::"m"(*virt_addr)); | ||
16 | } | ||
17 | |||
18 | #endif | ||