aboutsummaryrefslogtreecommitdiffstats
path: root/arch/arm/include/asm/edac.h
diff options
context:
space:
mode:
authorRob Herring <rob.herring@calxeda.com>2011-12-09 11:58:35 -0500
committerRussell King <rmk+kernel@arm.linux.org.uk>2011-12-11 03:35:50 -0500
commit786a767465b12cb4c1a45421b12fbf6bff45b0ea (patch)
tree782492ca819b0176fca17e1be242f01bdf4afd65 /arch/arm/include/asm/edac.h
parent8878a539ff19a43cf3729e7562cd528f490246ae (diff)
ARM: 7201/1: add EDAC atomic_scrub function
Add support for architecture specific EDAC atomic_scrub to ARM. Only ARMv6+ is implemented as ldrex/strex instructions are needed. Supporting EDAC on ARMv5 or earlier is unlikely at this point anyway. Signed-off-by: Rob Herring <rob.herring@calxeda.com> Signed-off-by: Russell King <rmk+kernel@arm.linux.org.uk>
Diffstat (limited to 'arch/arm/include/asm/edac.h')
-rw-r--r--arch/arm/include/asm/edac.h48
1 files changed, 48 insertions, 0 deletions
diff --git a/arch/arm/include/asm/edac.h b/arch/arm/include/asm/edac.h
new file mode 100644
index 000000000000..0df7a2c1fc3d
--- /dev/null
+++ b/arch/arm/include/asm/edac.h
@@ -0,0 +1,48 @@
1/*
2 * Copyright 2011 Calxeda, Inc.
3 * Based on PPC version Copyright 2007 MontaVista Software, Inc.
4 *
5 * This program is free software; you can redistribute it and/or modify it
6 * under the terms and conditions of the GNU General Public License,
7 * version 2, as published by the Free Software Foundation.
8 *
9 * This program is distributed in the hope it will be useful, but WITHOUT
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
12 * more details.
13 *
14 * You should have received a copy of the GNU General Public License along with
15 * this program. If not, see <http://www.gnu.org/licenses/>.
16 */
17#ifndef ASM_EDAC_H
18#define ASM_EDAC_H
19/*
20 * ECC atomic, DMA, SMP and interrupt safe scrub function.
21 * Implements the per arch atomic_scrub() that EDAC use for software
22 * ECC scrubbing. It reads memory and then writes back the original
23 * value, allowing the hardware to detect and correct memory errors.
24 */
25static inline void atomic_scrub(void *va, u32 size)
26{
27#if __LINUX_ARM_ARCH__ >= 6
28 unsigned int *virt_addr = va;
29 unsigned int temp, temp2;
30 unsigned int i;
31
32 for (i = 0; i < size / sizeof(*virt_addr); i++, virt_addr++) {
33 /* Very carefully read and write to memory atomically
34 * so we are interrupt, DMA and SMP safe.
35 */
36 __asm__ __volatile__("\n"
37 "1: ldrex %0, [%2]\n"
38 " strex %1, %0, [%2]\n"
39 " teq %1, #0\n"
40 " bne 1b\n"
41 : "=&r"(temp), "=&r"(temp2)
42 : "r"(virt_addr)
43 : "cc");
44 }
45#endif
46}
47
48#endif