diff options
author | Andreas Herrmann <andreas.herrmann3@amd.com> | 2009-02-25 05:27:27 -0500 |
---|---|---|
committer | Ingo Molnar <mingo@elte.hu> | 2009-02-25 06:19:45 -0500 |
commit | 6d74171bf7315257d276aa35400c5a8d6a993f19 (patch) | |
tree | 58fdd7f83bbba2da139749bd7f0dcb1b91fc03c9 /arch | |
parent | 40823f737e5bd186a1156fb1c28f360260e1e084 (diff) |
x86: memtest: introduce array to select memtest patterns
Impact: code cleanup
Signed-off-by: Andreas Herrmann <andreas.herrmann3@amd.com>
Signed-off-by: Ingo Molnar <mingo@elte.hu>
Diffstat (limited to 'arch')
-rw-r--r-- | arch/x86/mm/memtest.c | 65 |
1 files changed, 23 insertions, 42 deletions
diff --git a/arch/x86/mm/memtest.c b/arch/x86/mm/memtest.c index 00b8bdc64c3e..827f94044cfc 100644 --- a/arch/x86/mm/memtest.c +++ b/arch/x86/mm/memtest.c | |||
@@ -9,48 +9,25 @@ | |||
9 | 9 | ||
10 | #include <asm/e820.h> | 10 | #include <asm/e820.h> |
11 | 11 | ||
12 | #define _MAX_MEM_PATTERNS 4 | 12 | static u64 patterns[] __initdata = { |
13 | 0, | ||
14 | 0xffffffffffffffffULL, | ||
15 | 0x5555555555555555ULL, | ||
16 | 0xaaaaaaaaaaaaaaaaULL, | ||
17 | }; | ||
13 | 18 | ||
14 | static void __init memtest(unsigned long start_phys, unsigned long size, | 19 | static void __init memtest(unsigned long start_phys, unsigned long size, |
15 | unsigned pattern) | 20 | u64 pattern) |
16 | { | 21 | { |
17 | unsigned long i; | 22 | unsigned long i; |
18 | unsigned long *start; | 23 | u64 *start; |
19 | unsigned long start_bad; | 24 | unsigned long start_bad; |
20 | unsigned long last_bad; | 25 | unsigned long last_bad; |
21 | unsigned long val; | ||
22 | unsigned long start_phys_aligned; | 26 | unsigned long start_phys_aligned; |
23 | unsigned long count; | 27 | unsigned long count; |
24 | unsigned long incr; | 28 | unsigned long incr; |
25 | 29 | ||
26 | pattern = pattern % _MAX_MEM_PATTERNS; | 30 | incr = sizeof(pattern); |
27 | |||
28 | switch (pattern) { | ||
29 | case 0: | ||
30 | val = 0UL; | ||
31 | break; | ||
32 | case 1: | ||
33 | val = -1UL; | ||
34 | break; | ||
35 | case 2: | ||
36 | #ifdef CONFIG_X86_64 | ||
37 | val = 0x5555555555555555UL; | ||
38 | #else | ||
39 | val = 0x55555555UL; | ||
40 | #endif | ||
41 | break; | ||
42 | case 3: | ||
43 | #ifdef CONFIG_X86_64 | ||
44 | val = 0xaaaaaaaaaaaaaaaaUL; | ||
45 | #else | ||
46 | val = 0xaaaaaaaaUL; | ||
47 | #endif | ||
48 | break; | ||
49 | default: | ||
50 | return; | ||
51 | } | ||
52 | |||
53 | incr = sizeof(unsigned long); | ||
54 | start_phys_aligned = ALIGN(start_phys, incr); | 31 | start_phys_aligned = ALIGN(start_phys, incr); |
55 | count = (size - (start_phys_aligned - start_phys))/incr; | 32 | count = (size - (start_phys_aligned - start_phys))/incr; |
56 | start = __va(start_phys_aligned); | 33 | start = __va(start_phys_aligned); |
@@ -58,15 +35,16 @@ static void __init memtest(unsigned long start_phys, unsigned long size, | |||
58 | last_bad = 0; | 35 | last_bad = 0; |
59 | 36 | ||
60 | for (i = 0; i < count; i++) | 37 | for (i = 0; i < count; i++) |
61 | start[i] = val; | 38 | start[i] = pattern; |
62 | for (i = 0; i < count; i++, start++, start_phys_aligned += incr) { | 39 | for (i = 0; i < count; i++, start++, start_phys_aligned += incr) { |
63 | if (*start != val) { | 40 | if (*start != pattern) { |
64 | if (start_phys_aligned == last_bad + incr) { | 41 | if (start_phys_aligned == last_bad + incr) { |
65 | last_bad += incr; | 42 | last_bad += incr; |
66 | } else { | 43 | } else { |
67 | if (start_bad) { | 44 | if (start_bad) { |
68 | printk(KERN_CONT "\n %016lx bad mem addr %010lx - %010lx reserved", | 45 | printk(KERN_CONT "\n %016llx bad mem addr %010lx - %010lx reserved", |
69 | val, start_bad, last_bad + incr); | 46 | (unsigned long long) pattern, |
47 | start_bad, last_bad + incr); | ||
70 | reserve_early(start_bad, last_bad + incr, "BAD RAM"); | 48 | reserve_early(start_bad, last_bad + incr, "BAD RAM"); |
71 | } | 49 | } |
72 | start_bad = last_bad = start_phys_aligned; | 50 | start_bad = last_bad = start_phys_aligned; |
@@ -74,8 +52,9 @@ static void __init memtest(unsigned long start_phys, unsigned long size, | |||
74 | } | 52 | } |
75 | } | 53 | } |
76 | if (start_bad) { | 54 | if (start_bad) { |
77 | printk(KERN_CONT "\n %016lx bad mem addr %010lx - %010lx reserved", | 55 | printk(KERN_CONT "\n %016llx bad mem addr %010lx - %010lx reserved", |
78 | val, start_bad, last_bad + incr); | 56 | (unsigned long long) pattern, start_bad, |
57 | last_bad + incr); | ||
79 | reserve_early(start_bad, last_bad + incr, "BAD RAM"); | 58 | reserve_early(start_bad, last_bad + incr, "BAD RAM"); |
80 | } | 59 | } |
81 | } | 60 | } |
@@ -95,13 +74,16 @@ early_param("memtest", parse_memtest); | |||
95 | void __init early_memtest(unsigned long start, unsigned long end) | 74 | void __init early_memtest(unsigned long start, unsigned long end) |
96 | { | 75 | { |
97 | u64 t_start, t_size; | 76 | u64 t_start, t_size; |
98 | unsigned pattern; | 77 | unsigned int i; |
78 | u64 pattern; | ||
99 | 79 | ||
100 | if (!memtest_pattern) | 80 | if (!memtest_pattern) |
101 | return; | 81 | return; |
102 | 82 | ||
103 | printk(KERN_INFO "early_memtest: pattern num %d", memtest_pattern); | 83 | printk(KERN_INFO "early_memtest: pattern num %d", memtest_pattern); |
104 | for (pattern = 0; pattern < memtest_pattern; pattern++) { | 84 | for (i = 0; i < memtest_pattern; i++) { |
85 | unsigned int idx = i % ARRAY_SIZE(patterns); | ||
86 | pattern = patterns[idx]; | ||
105 | t_start = start; | 87 | t_start = start; |
106 | t_size = 0; | 88 | t_size = 0; |
107 | while (t_start < end) { | 89 | while (t_start < end) { |
@@ -115,8 +97,7 @@ void __init early_memtest(unsigned long start, unsigned long end) | |||
115 | 97 | ||
116 | printk(KERN_CONT "\n %010llx - %010llx pattern %d", | 98 | printk(KERN_CONT "\n %010llx - %010llx pattern %d", |
117 | (unsigned long long)t_start, | 99 | (unsigned long long)t_start, |
118 | (unsigned long long)t_start + t_size, | 100 | (unsigned long long)t_start + t_size, idx); |
119 | pattern % _MAX_MEM_PATTERNS); | ||
120 | 101 | ||
121 | memtest(t_start, t_size, pattern); | 102 | memtest(t_start, t_size, pattern); |
122 | 103 | ||