diff options
author | Glauber de Oliveira Costa <gcosta@redhat.com> | 2008-01-30 07:31:08 -0500 |
---|---|---|
committer | Ingo Molnar <mingo@elte.hu> | 2008-01-30 07:31:08 -0500 |
commit | d3ca901f94b3299dfe3a814770d751844608419f (patch) | |
tree | f2679001320446acdddc02a88dafdd4ea5120d1e /include/asm-x86/system.h | |
parent | a6b4655258efd39b590e519815ed43bb74cd7188 (diff) |
x86: unify paravirt parts of system.h
This patch moves the i386 control registers manipulation functions,
wbinvd, and clts functions to system.h. They are essentially the same
as in x86_64.
With this, system.h paravirt comes for free in x86_64.
[ mingo@elte.hu: reintroduced the cr8 bits - needed for resume images ]
Signed-off-by: Glauber de Oliveira Costa <gcosta@redhat.com>
Signed-off-by: Ingo Molnar <mingo@elte.hu>
Signed-off-by: Thomas Gleixner <tglx@linutronix.de>
Diffstat (limited to 'include/asm-x86/system.h')
-rw-r--r-- | include/asm-x86/system.h | 110 |
1 files changed, 110 insertions, 0 deletions
diff --git a/include/asm-x86/system.h b/include/asm-x86/system.h index 3740bada097c..01ba1f8e64d1 100644 --- a/include/asm-x86/system.h +++ b/include/asm-x86/system.h | |||
@@ -3,6 +3,8 @@ | |||
3 | 3 | ||
4 | #include <asm/asm.h> | 4 | #include <asm/asm.h> |
5 | 5 | ||
6 | #include <linux/kernel.h> | ||
7 | |||
6 | #ifdef CONFIG_X86_32 | 8 | #ifdef CONFIG_X86_32 |
7 | # include "system_32.h" | 9 | # include "system_32.h" |
8 | #else | 10 | #else |
@@ -38,6 +40,8 @@ __asm__ __volatile__ ("movw %%dx,%1\n\t" \ | |||
38 | #define set_base(ldt, base) _set_base(((char *)&(ldt)) , (base)) | 40 | #define set_base(ldt, base) _set_base(((char *)&(ldt)) , (base)) |
39 | #define set_limit(ldt, limit) _set_limit(((char *)&(ldt)) , ((limit)-1)) | 41 | #define set_limit(ldt, limit) _set_limit(((char *)&(ldt)) , ((limit)-1)) |
40 | 42 | ||
43 | extern void load_gs_index(unsigned); | ||
44 | |||
41 | /* | 45 | /* |
42 | * Load a segment. Fall back on loading the zero | 46 | * Load a segment. Fall back on loading the zero |
43 | * segment if something goes wrong.. | 47 | * segment if something goes wrong.. |
@@ -72,6 +76,112 @@ static inline unsigned long get_limit(unsigned long segment) | |||
72 | :"=r" (__limit):"r" (segment)); | 76 | :"=r" (__limit):"r" (segment)); |
73 | return __limit+1; | 77 | return __limit+1; |
74 | } | 78 | } |
79 | |||
80 | static inline void native_clts(void) | ||
81 | { | ||
82 | asm volatile ("clts"); | ||
83 | } | ||
84 | |||
85 | /* | ||
86 | * Volatile isn't enough to prevent the compiler from reordering the | ||
87 | * read/write functions for the control registers and messing everything up. | ||
88 | * A memory clobber would solve the problem, but would prevent reordering of | ||
89 | * all loads stores around it, which can hurt performance. Solution is to | ||
90 | * use a variable and mimic reads and writes to it to enforce serialization | ||
91 | */ | ||
92 | static unsigned long __force_order; | ||
93 | |||
94 | static inline unsigned long native_read_cr0(void) | ||
95 | { | ||
96 | unsigned long val; | ||
97 | asm volatile("mov %%cr0,%0\n\t" :"=r" (val), "=m" (__force_order)); | ||
98 | return val; | ||
99 | } | ||
100 | |||
101 | static inline void native_write_cr0(unsigned long val) | ||
102 | { | ||
103 | asm volatile("mov %0,%%cr0": :"r" (val), "m" (__force_order)); | ||
104 | } | ||
105 | |||
106 | static inline unsigned long native_read_cr2(void) | ||
107 | { | ||
108 | unsigned long val; | ||
109 | asm volatile("mov %%cr2,%0\n\t" :"=r" (val), "=m" (__force_order)); | ||
110 | return val; | ||
111 | } | ||
112 | |||
113 | static inline void native_write_cr2(unsigned long val) | ||
114 | { | ||
115 | asm volatile("mov %0,%%cr2": :"r" (val), "m" (__force_order)); | ||
116 | } | ||
117 | |||
118 | static inline unsigned long native_read_cr3(void) | ||
119 | { | ||
120 | unsigned long val; | ||
121 | asm volatile("mov %%cr3,%0\n\t" :"=r" (val), "=m" (__force_order)); | ||
122 | return val; | ||
123 | } | ||
124 | |||
125 | static inline void native_write_cr3(unsigned long val) | ||
126 | { | ||
127 | asm volatile("mov %0,%%cr3": :"r" (val), "m" (__force_order)); | ||
128 | } | ||
129 | |||
130 | static inline unsigned long native_read_cr4(void) | ||
131 | { | ||
132 | unsigned long val; | ||
133 | asm volatile("mov %%cr4,%0\n\t" :"=r" (val), "=m" (__force_order)); | ||
134 | return val; | ||
135 | } | ||
136 | |||
137 | static inline unsigned long native_read_cr4_safe(void) | ||
138 | { | ||
139 | unsigned long val; | ||
140 | /* This could fault if %cr4 does not exist. In x86_64, a cr4 always | ||
141 | * exists, so it will never fail. */ | ||
142 | #ifdef CONFIG_X86_32 | ||
143 | asm volatile("1: mov %%cr4, %0 \n" | ||
144 | "2: \n" | ||
145 | ".section __ex_table,\"a\" \n" | ||
146 | ".long 1b,2b \n" | ||
147 | ".previous \n" | ||
148 | : "=r" (val), "=m" (__force_order) : "0" (0)); | ||
149 | #else | ||
150 | val = native_read_cr4(); | ||
151 | #endif | ||
152 | return val; | ||
153 | } | ||
154 | |||
155 | static inline void native_write_cr4(unsigned long val) | ||
156 | { | ||
157 | asm volatile("mov %0,%%cr4": :"r" (val), "m" (__force_order)); | ||
158 | } | ||
159 | |||
160 | static inline void native_wbinvd(void) | ||
161 | { | ||
162 | asm volatile("wbinvd": : :"memory"); | ||
163 | } | ||
164 | #ifdef CONFIG_PARAVIRT | ||
165 | #include <asm/paravirt.h> | ||
166 | #else | ||
167 | #define read_cr0() (native_read_cr0()) | ||
168 | #define write_cr0(x) (native_write_cr0(x)) | ||
169 | #define read_cr2() (native_read_cr2()) | ||
170 | #define write_cr2(x) (native_write_cr2(x)) | ||
171 | #define read_cr3() (native_read_cr3()) | ||
172 | #define write_cr3(x) (native_write_cr3(x)) | ||
173 | #define read_cr4() (native_read_cr4()) | ||
174 | #define read_cr4_safe() (native_read_cr4_safe()) | ||
175 | #define write_cr4(x) (native_write_cr4(x)) | ||
176 | #define wbinvd() (native_wbinvd()) | ||
177 | |||
178 | /* Clear the 'TS' bit */ | ||
179 | #define clts() (native_clts()) | ||
180 | |||
181 | #endif/* CONFIG_PARAVIRT */ | ||
182 | |||
183 | #define stts() write_cr0(8 | read_cr0()) | ||
184 | |||
75 | #endif /* __KERNEL__ */ | 185 | #endif /* __KERNEL__ */ |
76 | 186 | ||
77 | static inline void clflush(void *__p) | 187 | static inline void clflush(void *__p) |