aboutsummaryrefslogtreecommitdiffstats
path: root/include
diff options
context:
space:
mode:
authorGlauber de Oliveira Costa <gcosta@redhat.com>2008-01-30 07:31:07 -0500
committerIngo Molnar <mingo@elte.hu>2008-01-30 07:31:07 -0500
commit56ec1ddcff967e51d98427e4efcbfc90de67efe3 (patch)
tree00d8206d37cfdc929d6d58001eefe244e9af0287 /include
parentc9dcda5ce46c395c5c99003e259e1973dface640 (diff)
x86: make fixups wordsize agnostic
This patch uses the _ASM_ALIGN and _ASM_PTR macros to make the fixups in native_read/write_msr_safe look the same for x86_64 and i386. Besides using this macros, we also have to take the explicit instruction suffixes out. It's okay because all this instructions uses registers, and can be sized by them. Signed-off-by: Glauber de Oliveira Costa <gcosta@redhat.com> Signed-off-by: Ingo Molnar <mingo@elte.hu> Signed-off-by: Thomas Gleixner <tglx@linutronix.de>
Diffstat (limited to 'include')
-rw-r--r--include/asm-x86/msr.h17
1 files changed, 9 insertions, 8 deletions
diff --git a/include/asm-x86/msr.h b/include/asm-x86/msr.h
index cb7222358897..792fde2e8908 100644
--- a/include/asm-x86/msr.h
+++ b/include/asm-x86/msr.h
@@ -33,6 +33,7 @@ static inline unsigned long long native_read_tscp(int *aux)
33#ifdef __KERNEL__ 33#ifdef __KERNEL__
34#ifndef __ASSEMBLY__ 34#ifndef __ASSEMBLY__
35 35
36#include <asm/asm.h>
36#include <asm/errno.h> 37#include <asm/errno.h>
37 38
38static inline unsigned long long native_read_msr(unsigned int msr) 39static inline unsigned long long native_read_msr(unsigned int msr)
@@ -48,14 +49,14 @@ static inline unsigned long long native_read_msr_safe(unsigned int msr,
48{ 49{
49 unsigned long long val; 50 unsigned long long val;
50 51
51 asm volatile("2: rdmsr ; xorl %0,%0\n" 52 asm volatile("2: rdmsr ; xor %0,%0\n"
52 "1:\n\t" 53 "1:\n\t"
53 ".section .fixup,\"ax\"\n\t" 54 ".section .fixup,\"ax\"\n\t"
54 "3: movl %3,%0 ; jmp 1b\n\t" 55 "3: mov %3,%0 ; jmp 1b\n\t"
55 ".previous\n\t" 56 ".previous\n\t"
56 ".section __ex_table,\"a\"\n" 57 ".section __ex_table,\"a\"\n"
57 " .align 4\n\t" 58 _ASM_ALIGN "\n\t"
58 " .long 2b,3b\n\t" 59 _ASM_PTR " 2b,3b\n\t"
59 ".previous" 60 ".previous"
60 : "=r" (*err), "=A" (val) 61 : "=r" (*err), "=A" (val)
61 : "c" (msr), "i" (-EFAULT)); 62 : "c" (msr), "i" (-EFAULT));
@@ -73,14 +74,14 @@ static inline int native_write_msr_safe(unsigned int msr,
73 unsigned low, unsigned high) 74 unsigned low, unsigned high)
74{ 75{
75 int err; 76 int err;
76 asm volatile("2: wrmsr ; xorl %0,%0\n" 77 asm volatile("2: wrmsr ; xor %0,%0\n"
77 "1:\n\t" 78 "1:\n\t"
78 ".section .fixup,\"ax\"\n\t" 79 ".section .fixup,\"ax\"\n\t"
79 "3: movl %4,%0 ; jmp 1b\n\t" 80 "3: mov %4,%0 ; jmp 1b\n\t"
80 ".previous\n\t" 81 ".previous\n\t"
81 ".section __ex_table,\"a\"\n" 82 ".section __ex_table,\"a\"\n"
82 " .align 4\n\t" 83 _ASM_ALIGN "\n\t"
83 " .long 2b,3b\n\t" 84 _ASM_PTR " 2b,3b\n\t"
84 ".previous" 85 ".previous"
85 : "=a" (err) 86 : "=a" (err)
86 : "c" (msr), "0" (low), "d" (high), 87 : "c" (msr), "0" (low), "d" (high),