aboutsummaryrefslogtreecommitdiffstats
path: root/include/asm-i386/system.h
diff options
context:
space:
mode:
authorRusty Russell <rusty@rustcorp.com.au>2007-05-02 13:27:10 -0400
committerAndi Kleen <andi@basil.nowhere.org>2007-05-02 13:27:10 -0400
commit90a0a06aa81692028864c21f981905fda46b1208 (patch)
tree516528b328d5288ee057d1eff5491e2ba1b49af1 /include/asm-i386/system.h
parent52de74dd3994e165ef1b35c33d54655a6400e30c (diff)
[PATCH] i386: rationalize paravirt wrappers
paravirt.c used to implement native versions of all low-level functions. Far cleaner is to have the native versions exposed in the headers and as inline native_XXX, and if !CONFIG_PARAVIRT, then simply #define XXX native_XXX. There are several nice side effects: 1) write_dt_entry() now takes the correct "struct Xgt_desc_struct *" not "void *". 2) load_TLS is reintroduced to the for loop, not manually unrolled with a #error in case the bounds ever change. 3) Macros become inlines, with type checking. 4) Access to the native versions is trivial for KVM, lguest, Xen and others who might want it. Signed-off-by: Jeremy Fitzhardinge <jeremy@xensource.com> Signed-off-by: Rusty Russell <rusty@rustcorp.com.au> Signed-off-by: Andi Kleen <ak@suse.de> Cc: Andi Kleen <ak@muc.de> Cc: Avi Kivity <avi@qumranet.com> Signed-off-by: Andrew Morton <akpm@linux-foundation.org>
Diffstat (limited to 'include/asm-i386/system.h')
-rw-r--r--include/asm-i386/system.h139
1 files changed, 85 insertions, 54 deletions
diff --git a/include/asm-i386/system.h b/include/asm-i386/system.h
index a6d20d9a1a30..c3a58c08c495 100644
--- a/include/asm-i386/system.h
+++ b/include/asm-i386/system.h
@@ -88,65 +88,96 @@ __asm__ __volatile__ ("movw %%dx,%1\n\t" \
88#define savesegment(seg, value) \ 88#define savesegment(seg, value) \
89 asm volatile("mov %%" #seg ",%0":"=rm" (value)) 89 asm volatile("mov %%" #seg ",%0":"=rm" (value))
90 90
91
92static inline void native_clts(void)
93{
94 asm volatile ("clts");
95}
96
97static inline unsigned long native_read_cr0(void)
98{
99 unsigned long val;
100 asm volatile("movl %%cr0,%0\n\t" :"=r" (val));
101 return val;
102}
103
104static inline void native_write_cr0(unsigned long val)
105{
106 asm volatile("movl %0,%%cr0": :"r" (val));
107}
108
109static inline unsigned long native_read_cr2(void)
110{
111 unsigned long val;
112 asm volatile("movl %%cr2,%0\n\t" :"=r" (val));
113 return val;
114}
115
116static inline void native_write_cr2(unsigned long val)
117{
118 asm volatile("movl %0,%%cr2": :"r" (val));
119}
120
121static inline unsigned long native_read_cr3(void)
122{
123 unsigned long val;
124 asm volatile("movl %%cr3,%0\n\t" :"=r" (val));
125 return val;
126}
127
128static inline void native_write_cr3(unsigned long val)
129{
130 asm volatile("movl %0,%%cr3": :"r" (val));
131}
132
133static inline unsigned long native_read_cr4(void)
134{
135 unsigned long val;
136 asm volatile("movl %%cr4,%0\n\t" :"=r" (val));
137 return val;
138}
139
140static inline unsigned long native_read_cr4_safe(void)
141{
142 unsigned long val;
143 /* This could fault if %cr4 does not exist */
144 asm("1: movl %%cr4, %0 \n"
145 "2: \n"
146 ".section __ex_table,\"a\" \n"
147 ".long 1b,2b \n"
148 ".previous \n"
149 : "=r" (val): "0" (0));
150 return val;
151}
152
153static inline void native_write_cr4(unsigned long val)
154{
155 asm volatile("movl %0,%%cr4": :"r" (val));
156}
157
158static inline void native_wbinvd(void)
159{
160 asm volatile("wbinvd": : :"memory");
161}
162
163
91#ifdef CONFIG_PARAVIRT 164#ifdef CONFIG_PARAVIRT
92#include <asm/paravirt.h> 165#include <asm/paravirt.h>
93#else 166#else
94#define read_cr0() ({ \ 167#define read_cr0() (native_read_cr0())
95 unsigned int __dummy; \ 168#define write_cr0(x) (native_write_cr0(x))
96 __asm__ __volatile__( \ 169#define read_cr2() (native_read_cr2())
97 "movl %%cr0,%0\n\t" \ 170#define write_cr2(x) (native_write_cr2(x))
98 :"=r" (__dummy)); \ 171#define read_cr3() (native_read_cr3())
99 __dummy; \ 172#define write_cr3(x) (native_write_cr3(x))
100}) 173#define read_cr4() (native_read_cr4())
101#define write_cr0(x) \ 174#define read_cr4_safe() (native_read_cr4_safe())
102 __asm__ __volatile__("movl %0,%%cr0": :"r" (x)) 175#define write_cr4(x) (native_write_cr4(x))
103 176#define wbinvd() (native_wbinvd())
104#define read_cr2() ({ \
105 unsigned int __dummy; \
106 __asm__ __volatile__( \
107 "movl %%cr2,%0\n\t" \
108 :"=r" (__dummy)); \
109 __dummy; \
110})
111#define write_cr2(x) \
112 __asm__ __volatile__("movl %0,%%cr2": :"r" (x))
113
114#define read_cr3() ({ \
115 unsigned int __dummy; \
116 __asm__ ( \
117 "movl %%cr3,%0\n\t" \
118 :"=r" (__dummy)); \
119 __dummy; \
120})
121#define write_cr3(x) \
122 __asm__ __volatile__("movl %0,%%cr3": :"r" (x))
123
124#define read_cr4() ({ \
125 unsigned int __dummy; \
126 __asm__( \
127 "movl %%cr4,%0\n\t" \
128 :"=r" (__dummy)); \
129 __dummy; \
130})
131#define read_cr4_safe() ({ \
132 unsigned int __dummy; \
133 /* This could fault if %cr4 does not exist */ \
134 __asm__("1: movl %%cr4, %0 \n" \
135 "2: \n" \
136 ".section __ex_table,\"a\" \n" \
137 ".long 1b,2b \n" \
138 ".previous \n" \
139 : "=r" (__dummy): "0" (0)); \
140 __dummy; \
141})
142#define write_cr4(x) \
143 __asm__ __volatile__("movl %0,%%cr4": :"r" (x))
144
145#define wbinvd() \
146 __asm__ __volatile__ ("wbinvd": : :"memory")
147 177
148/* Clear the 'TS' bit */ 178/* Clear the 'TS' bit */
149#define clts() __asm__ __volatile__ ("clts") 179#define clts() (native_clts())
180
150#endif/* CONFIG_PARAVIRT */ 181#endif/* CONFIG_PARAVIRT */
151 182
152/* Set the 'TS' bit */ 183/* Set the 'TS' bit */