aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorThomas Gleixner <tglx@linutronix.de>2007-10-15 17:28:20 -0400
committerThomas Gleixner <tglx@inhelltoy.tec.linutronix.de>2007-10-17 14:17:19 -0400
commit1f7afb08a595292d946a5f1fd4929c81db7042d2 (patch)
tree42bc30a41de142a28bde7f17b5a92139fc327a79
parent327c21bc3d347d545d227103d7cc58039ab8a0be (diff)
x86: unify include/asm/cache_32/64.h
Same file, except for whitespace, comment formatting and: 32-bit: unsigned long *virt_addr = va; 64-bit: unsigned int *virt_addr = va; Both can be safely replaced by: u32 i, *virt_addr = va; Signed-off-by: Thomas Gleixner <tglx@linutronix.de>
-rw-r--r--include/asm-x86/edac.h21
-rw-r--r--include/asm-x86/edac_32.h18
-rw-r--r--include/asm-x86/edac_64.h18
3 files changed, 17 insertions, 40 deletions
diff --git a/include/asm-x86/edac.h b/include/asm-x86/edac.h
index f8b888e140b0..cf3200a745ad 100644
--- a/include/asm-x86/edac.h
+++ b/include/asm-x86/edac.h
@@ -1,5 +1,18 @@
1#ifdef CONFIG_X86_32 1#ifndef _ASM_X86_EDAC_H
2# include "edac_32.h" 2#define _ASM_X86_EDAC_H
3#else 3
4# include "edac_64.h" 4/* ECC atomic, DMA, SMP and interrupt safe scrub function */
5
6static __inline__ void atomic_scrub(void *va, u32 size)
7{
8 u32 i, *virt_addr = va;
9
10 /*
11 * Very carefully read and write to memory atomically so we
12 * are interrupt, DMA and SMP safe.
13 */
14 for (i = 0; i < size / 4; i++, virt_addr++)
15 __asm__ __volatile__("lock; addl $0, %0"::"m"(*virt_addr));
16}
17
5#endif 18#endif
diff --git a/include/asm-x86/edac_32.h b/include/asm-x86/edac_32.h
deleted file mode 100644
index 3e7dd0ab68ce..000000000000
--- a/include/asm-x86/edac_32.h
+++ /dev/null
@@ -1,18 +0,0 @@
1#ifndef ASM_EDAC_H
2#define ASM_EDAC_H
3
4/* ECC atomic, DMA, SMP and interrupt safe scrub function */
5
6static __inline__ void atomic_scrub(void *va, u32 size)
7{
8 unsigned long *virt_addr = va;
9 u32 i;
10
11 for (i = 0; i < size / 4; i++, virt_addr++)
12 /* Very carefully read and write to memory atomically
13 * so we are interrupt, DMA and SMP safe.
14 */
15 __asm__ __volatile__("lock; addl $0, %0"::"m"(*virt_addr));
16}
17
18#endif
diff --git a/include/asm-x86/edac_64.h b/include/asm-x86/edac_64.h
deleted file mode 100644
index cad1cd42b4ee..000000000000
--- a/include/asm-x86/edac_64.h
+++ /dev/null
@@ -1,18 +0,0 @@
1#ifndef ASM_EDAC_H
2#define ASM_EDAC_H
3
4/* ECC atomic, DMA, SMP and interrupt safe scrub function */
5
6static __inline__ void atomic_scrub(void *va, u32 size)
7{
8 unsigned int *virt_addr = va;
9 u32 i;
10
11 for (i = 0; i < size / 4; i++, virt_addr++)
12 /* Very carefully read and write to memory atomically
13 * so we are interrupt, DMA and SMP safe.
14 */
15 __asm__ __volatile__("lock; addl $0, %0"::"m"(*virt_addr));
16}
17
18#endif