diff options
author | Andreas Herrmann <andreas.herrmann3@amd.com> | 2008-12-16 13:16:34 -0500 |
---|---|---|
committer | Ingo Molnar <mingo@elte.hu> | 2008-12-16 13:58:04 -0500 |
commit | 29d0887ffd084cde9d6a1286cb82b71701a974dd (patch) | |
tree | ec4f694a3e1ecf8cf8980971a927b745d722d520 /arch | |
parent | 0657d9ebff186dcdb17e582dcb909028775a7707 (diff) |
x86: microcode_amd: replace inline asm by common rdmsr/wrmsr functions
Impact: cleanup
Signed-off-by: Andreas Herrmann <andreas.herrmann3@amd.com>
Signed-off-by: Ingo Molnar <mingo@elte.hu>
Diffstat (limited to 'arch')
-rw-r--r-- | arch/x86/include/asm/msr-index.h | 2 | ||||
-rw-r--r-- | arch/x86/kernel/microcode_amd.c | 23 |
2 files changed, 7 insertions, 18 deletions
diff --git a/arch/x86/include/asm/msr-index.h b/arch/x86/include/asm/msr-index.h index e38859d577a1..cb58643947b9 100644 --- a/arch/x86/include/asm/msr-index.h +++ b/arch/x86/include/asm/msr-index.h | |||
@@ -85,7 +85,9 @@ | |||
85 | /* AMD64 MSRs. Not complete. See the architecture manual for a more | 85 | /* AMD64 MSRs. Not complete. See the architecture manual for a more |
86 | complete list. */ | 86 | complete list. */ |
87 | 87 | ||
88 | #define MSR_AMD64_PATCH_LEVEL 0x0000008b | ||
88 | #define MSR_AMD64_NB_CFG 0xc001001f | 89 | #define MSR_AMD64_NB_CFG 0xc001001f |
90 | #define MSR_AMD64_PATCH_LOADER 0xc0010020 | ||
89 | #define MSR_AMD64_IBSFETCHCTL 0xc0011030 | 91 | #define MSR_AMD64_IBSFETCHCTL 0xc0011030 |
90 | #define MSR_AMD64_IBSFETCHLINAD 0xc0011031 | 92 | #define MSR_AMD64_IBSFETCHLINAD 0xc0011031 |
91 | #define MSR_AMD64_IBSFETCHPHYSAD 0xc0011032 | 93 | #define MSR_AMD64_IBSFETCHPHYSAD 0xc0011032 |
diff --git a/arch/x86/kernel/microcode_amd.c b/arch/x86/kernel/microcode_amd.c index c7f225c7e481..2856955ddab1 100644 --- a/arch/x86/kernel/microcode_amd.c +++ b/arch/x86/kernel/microcode_amd.c | |||
@@ -93,6 +93,7 @@ static struct equiv_cpu_entry *equiv_cpu_table; | |||
93 | static int collect_cpu_info_amd(int cpu, struct cpu_signature *csig) | 93 | static int collect_cpu_info_amd(int cpu, struct cpu_signature *csig) |
94 | { | 94 | { |
95 | struct cpuinfo_x86 *c = &cpu_data(cpu); | 95 | struct cpuinfo_x86 *c = &cpu_data(cpu); |
96 | u32 dummy; | ||
96 | 97 | ||
97 | memset(csig, 0, sizeof(*csig)); | 98 | memset(csig, 0, sizeof(*csig)); |
98 | 99 | ||
@@ -102,9 +103,7 @@ static int collect_cpu_info_amd(int cpu, struct cpu_signature *csig) | |||
102 | return -1; | 103 | return -1; |
103 | } | 104 | } |
104 | 105 | ||
105 | asm volatile("movl %1, %%ecx; rdmsr" | 106 | rdmsr(MSR_AMD64_PATCH_LEVEL, csig->rev, dummy); |
106 | : "=a" (csig->rev) | ||
107 | : "i" (0x0000008B) : "ecx"); | ||
108 | 107 | ||
109 | printk(KERN_INFO "microcode: collect_cpu_info_amd : patch_id=0x%x\n", | 108 | printk(KERN_INFO "microcode: collect_cpu_info_amd : patch_id=0x%x\n", |
110 | csig->rev); | 109 | csig->rev); |
@@ -181,12 +180,10 @@ static int get_matching_microcode(int cpu, void *mc, int rev) | |||
181 | static void apply_microcode_amd(int cpu) | 180 | static void apply_microcode_amd(int cpu) |
182 | { | 181 | { |
183 | unsigned long flags; | 182 | unsigned long flags; |
184 | unsigned int eax, edx; | 183 | u32 rev, dummy; |
185 | unsigned int rev; | ||
186 | int cpu_num = raw_smp_processor_id(); | 184 | int cpu_num = raw_smp_processor_id(); |
187 | struct ucode_cpu_info *uci = ucode_cpu_info + cpu_num; | 185 | struct ucode_cpu_info *uci = ucode_cpu_info + cpu_num; |
188 | struct microcode_amd *mc_amd = uci->mc; | 186 | struct microcode_amd *mc_amd = uci->mc; |
189 | unsigned long addr; | ||
190 | 187 | ||
191 | /* We should bind the task to the CPU */ | 188 | /* We should bind the task to the CPU */ |
192 | BUG_ON(cpu_num != cpu); | 189 | BUG_ON(cpu_num != cpu); |
@@ -195,19 +192,9 @@ static void apply_microcode_amd(int cpu) | |||
195 | return; | 192 | return; |
196 | 193 | ||
197 | spin_lock_irqsave(µcode_update_lock, flags); | 194 | spin_lock_irqsave(µcode_update_lock, flags); |
198 | 195 | wrmsrl(MSR_AMD64_PATCH_LOADER, &mc_amd->hdr.data_code); | |
199 | addr = (unsigned long)&mc_amd->hdr.data_code; | ||
200 | edx = (unsigned int)(((unsigned long)upper_32_bits(addr))); | ||
201 | eax = (unsigned int)(((unsigned long)lower_32_bits(addr))); | ||
202 | |||
203 | asm volatile("movl %0, %%ecx; wrmsr" : | ||
204 | : "i" (0xc0010020), "a" (eax), "d" (edx) : "ecx"); | ||
205 | |||
206 | /* get patch id after patching */ | 196 | /* get patch id after patching */ |
207 | asm volatile("movl %1, %%ecx; rdmsr" | 197 | rdmsr(MSR_AMD64_PATCH_LEVEL, rev, dummy); |
208 | : "=a" (rev) | ||
209 | : "i" (0x0000008B) : "ecx"); | ||
210 | |||
211 | spin_unlock_irqrestore(µcode_update_lock, flags); | 198 | spin_unlock_irqrestore(µcode_update_lock, flags); |
212 | 199 | ||
213 | /* check current patch id and patch's id for match */ | 200 | /* check current patch id and patch's id for match */ |