aboutsummaryrefslogtreecommitdiffstats
path: root/include/asm-x86
diff options
context:
space:
mode:
authorGlauber de Oliveira Costa <gcosta@redhat.com>2008-01-30 07:31:08 -0500
committerIngo Molnar <mingo@elte.hu>2008-01-30 07:31:08 -0500
commitd3ca901f94b3299dfe3a814770d751844608419f (patch)
treef2679001320446acdddc02a88dafdd4ea5120d1e /include/asm-x86
parenta6b4655258efd39b590e519815ed43bb74cd7188 (diff)
x86: unify paravirt parts of system.h
This patch moves the i386 control registers manipulation functions, wbinvd, and clts functions to system.h. They are essentially the same as in x86_64. With this, system.h paravirt comes for free in x86_64. [ mingo@elte.hu: reintroduced the cr8 bits - needed for resume images ] Signed-off-by: Glauber de Oliveira Costa <gcosta@redhat.com> Signed-off-by: Ingo Molnar <mingo@elte.hu> Signed-off-by: Thomas Gleixner <tglx@linutronix.de>
Diffstat (limited to 'include/asm-x86')
-rw-r--r--include/asm-x86/system.h110
-rw-r--r--include/asm-x86/system_32.h94
-rw-r--r--include/asm-x86/system_64.h85
3 files changed, 122 insertions, 167 deletions
diff --git a/include/asm-x86/system.h b/include/asm-x86/system.h
index 3740bada097c..01ba1f8e64d1 100644
--- a/include/asm-x86/system.h
+++ b/include/asm-x86/system.h
@@ -3,6 +3,8 @@
3 3
4#include <asm/asm.h> 4#include <asm/asm.h>
5 5
6#include <linux/kernel.h>
7
6#ifdef CONFIG_X86_32 8#ifdef CONFIG_X86_32
7# include "system_32.h" 9# include "system_32.h"
8#else 10#else
@@ -38,6 +40,8 @@ __asm__ __volatile__ ("movw %%dx,%1\n\t" \
38#define set_base(ldt, base) _set_base(((char *)&(ldt)) , (base)) 40#define set_base(ldt, base) _set_base(((char *)&(ldt)) , (base))
39#define set_limit(ldt, limit) _set_limit(((char *)&(ldt)) , ((limit)-1)) 41#define set_limit(ldt, limit) _set_limit(((char *)&(ldt)) , ((limit)-1))
40 42
43extern void load_gs_index(unsigned);
44
41/* 45/*
42 * Load a segment. Fall back on loading the zero 46 * Load a segment. Fall back on loading the zero
43 * segment if something goes wrong.. 47 * segment if something goes wrong..
@@ -72,6 +76,112 @@ static inline unsigned long get_limit(unsigned long segment)
72 :"=r" (__limit):"r" (segment)); 76 :"=r" (__limit):"r" (segment));
73 return __limit+1; 77 return __limit+1;
74} 78}
79
80static inline void native_clts(void)
81{
82 asm volatile ("clts");
83}
84
85/*
86 * Volatile isn't enough to prevent the compiler from reordering the
87 * read/write functions for the control registers and messing everything up.
88 * A memory clobber would solve the problem, but would prevent reordering of
89 * all loads stores around it, which can hurt performance. Solution is to
90 * use a variable and mimic reads and writes to it to enforce serialization
91 */
92static unsigned long __force_order;
93
94static inline unsigned long native_read_cr0(void)
95{
96 unsigned long val;
97 asm volatile("mov %%cr0,%0\n\t" :"=r" (val), "=m" (__force_order));
98 return val;
99}
100
101static inline void native_write_cr0(unsigned long val)
102{
103 asm volatile("mov %0,%%cr0": :"r" (val), "m" (__force_order));
104}
105
106static inline unsigned long native_read_cr2(void)
107{
108 unsigned long val;
109 asm volatile("mov %%cr2,%0\n\t" :"=r" (val), "=m" (__force_order));
110 return val;
111}
112
113static inline void native_write_cr2(unsigned long val)
114{
115 asm volatile("mov %0,%%cr2": :"r" (val), "m" (__force_order));
116}
117
118static inline unsigned long native_read_cr3(void)
119{
120 unsigned long val;
121 asm volatile("mov %%cr3,%0\n\t" :"=r" (val), "=m" (__force_order));
122 return val;
123}
124
125static inline void native_write_cr3(unsigned long val)
126{
127 asm volatile("mov %0,%%cr3": :"r" (val), "m" (__force_order));
128}
129
130static inline unsigned long native_read_cr4(void)
131{
132 unsigned long val;
133 asm volatile("mov %%cr4,%0\n\t" :"=r" (val), "=m" (__force_order));
134 return val;
135}
136
137static inline unsigned long native_read_cr4_safe(void)
138{
139 unsigned long val;
140 /* This could fault if %cr4 does not exist. In x86_64, a cr4 always
141 * exists, so it will never fail. */
142#ifdef CONFIG_X86_32
143 asm volatile("1: mov %%cr4, %0 \n"
144 "2: \n"
145 ".section __ex_table,\"a\" \n"
146 ".long 1b,2b \n"
147 ".previous \n"
148 : "=r" (val), "=m" (__force_order) : "0" (0));
149#else
150 val = native_read_cr4();
151#endif
152 return val;
153}
154
155static inline void native_write_cr4(unsigned long val)
156{
157 asm volatile("mov %0,%%cr4": :"r" (val), "m" (__force_order));
158}
159
160static inline void native_wbinvd(void)
161{
162 asm volatile("wbinvd": : :"memory");
163}
164#ifdef CONFIG_PARAVIRT
165#include <asm/paravirt.h>
166#else
167#define read_cr0() (native_read_cr0())
168#define write_cr0(x) (native_write_cr0(x))
169#define read_cr2() (native_read_cr2())
170#define write_cr2(x) (native_write_cr2(x))
171#define read_cr3() (native_read_cr3())
172#define write_cr3(x) (native_write_cr3(x))
173#define read_cr4() (native_read_cr4())
174#define read_cr4_safe() (native_read_cr4_safe())
175#define write_cr4(x) (native_write_cr4(x))
176#define wbinvd() (native_wbinvd())
177
178/* Clear the 'TS' bit */
179#define clts() (native_clts())
180
181#endif/* CONFIG_PARAVIRT */
182
183#define stts() write_cr0(8 | read_cr0())
184
75#endif /* __KERNEL__ */ 185#endif /* __KERNEL__ */
76 186
77static inline void clflush(void *__p) 187static inline void clflush(void *__p)
diff --git a/include/asm-x86/system_32.h b/include/asm-x86/system_32.h
index 8db478984ed1..c05568290add 100644
--- a/include/asm-x86/system_32.h
+++ b/include/asm-x86/system_32.h
@@ -1,7 +1,6 @@
1#ifndef __ASM_SYSTEM_H 1#ifndef __ASM_SYSTEM_H
2#define __ASM_SYSTEM_H 2#define __ASM_SYSTEM_H
3 3
4#include <linux/kernel.h>
5#include <asm/segment.h> 4#include <asm/segment.h>
6#include <asm/cpufeature.h> 5#include <asm/cpufeature.h>
7#include <asm/cmpxchg.h> 6#include <asm/cmpxchg.h>
@@ -34,99 +33,6 @@ extern struct task_struct * FASTCALL(__switch_to(struct task_struct *prev, struc
34 "2" (prev), "d" (next)); \ 33 "2" (prev), "d" (next)); \
35} while (0) 34} while (0)
36 35
37static inline void native_clts(void)
38{
39 asm volatile ("clts");
40}
41
42static inline unsigned long native_read_cr0(void)
43{
44 unsigned long val;
45 asm volatile("movl %%cr0,%0\n\t" :"=r" (val));
46 return val;
47}
48
49static inline void native_write_cr0(unsigned long val)
50{
51 asm volatile("movl %0,%%cr0": :"r" (val));
52}
53
54static inline unsigned long native_read_cr2(void)
55{
56 unsigned long val;
57 asm volatile("movl %%cr2,%0\n\t" :"=r" (val));
58 return val;
59}
60
61static inline void native_write_cr2(unsigned long val)
62{
63 asm volatile("movl %0,%%cr2": :"r" (val));
64}
65
66static inline unsigned long native_read_cr3(void)
67{
68 unsigned long val;
69 asm volatile("movl %%cr3,%0\n\t" :"=r" (val));
70 return val;
71}
72
73static inline void native_write_cr3(unsigned long val)
74{
75 asm volatile("movl %0,%%cr3": :"r" (val));
76}
77
78static inline unsigned long native_read_cr4(void)
79{
80 unsigned long val;
81 asm volatile("movl %%cr4,%0\n\t" :"=r" (val));
82 return val;
83}
84
85static inline unsigned long native_read_cr4_safe(void)
86{
87 unsigned long val;
88 /* This could fault if %cr4 does not exist */
89 asm volatile("1: movl %%cr4, %0 \n"
90 "2: \n"
91 ".section __ex_table,\"a\" \n"
92 ".long 1b,2b \n"
93 ".previous \n"
94 : "=r" (val): "0" (0));
95 return val;
96}
97
98static inline void native_write_cr4(unsigned long val)
99{
100 asm volatile("movl %0,%%cr4": :"r" (val));
101}
102
103static inline void native_wbinvd(void)
104{
105 asm volatile("wbinvd": : :"memory");
106}
107
108#ifdef CONFIG_PARAVIRT
109#include <asm/paravirt.h>
110#else
111#define read_cr0() (native_read_cr0())
112#define write_cr0(x) (native_write_cr0(x))
113#define read_cr2() (native_read_cr2())
114#define write_cr2(x) (native_write_cr2(x))
115#define read_cr3() (native_read_cr3())
116#define write_cr3(x) (native_write_cr3(x))
117#define read_cr4() (native_read_cr4())
118#define read_cr4_safe() (native_read_cr4_safe())
119#define write_cr4(x) (native_write_cr4(x))
120#define wbinvd() (native_wbinvd())
121
122/* Clear the 'TS' bit */
123#define clts() (native_clts())
124
125#endif/* CONFIG_PARAVIRT */
126
127/* Set the 'TS' bit */
128#define stts() write_cr0(8 | read_cr0())
129
130#endif /* __KERNEL__ */ 36#endif /* __KERNEL__ */
131 37
132 38
diff --git a/include/asm-x86/system_64.h b/include/asm-x86/system_64.h
index 0885caace5d4..14ad6138439b 100644
--- a/include/asm-x86/system_64.h
+++ b/include/asm-x86/system_64.h
@@ -1,7 +1,6 @@
1#ifndef __ASM_SYSTEM_H 1#ifndef __ASM_SYSTEM_H
2#define __ASM_SYSTEM_H 2#define __ASM_SYSTEM_H
3 3
4#include <linux/kernel.h>
5#include <asm/segment.h> 4#include <asm/segment.h>
6#include <asm/cmpxchg.h> 5#include <asm/cmpxchg.h>
7 6
@@ -47,78 +46,6 @@
47 [pda_pcurrent] "i" (offsetof(struct x8664_pda, pcurrent)) \ 46 [pda_pcurrent] "i" (offsetof(struct x8664_pda, pcurrent)) \
48 : "memory", "cc" __EXTRA_CLOBBER) 47 : "memory", "cc" __EXTRA_CLOBBER)
49 48
50extern void load_gs_index(unsigned);
51
52/*
53 * Clear and set 'TS' bit respectively
54 */
55#define clts() __asm__ __volatile__ ("clts")
56
57static inline unsigned long read_cr0(void)
58{
59 unsigned long cr0;
60 asm volatile("movq %%cr0,%0" : "=r" (cr0));
61 return cr0;
62}
63
64static inline void write_cr0(unsigned long val)
65{
66 asm volatile("movq %0,%%cr0" :: "r" (val));
67}
68
69static inline unsigned long read_cr2(void)
70{
71 unsigned long cr2;
72 asm volatile("movq %%cr2,%0" : "=r" (cr2));
73 return cr2;
74}
75
76static inline void write_cr2(unsigned long val)
77{
78 asm volatile("movq %0,%%cr2" :: "r" (val));
79}
80
81static inline unsigned long read_cr3(void)
82{
83 unsigned long cr3;
84 asm volatile("movq %%cr3,%0" : "=r" (cr3));
85 return cr3;
86}
87
88static inline void write_cr3(unsigned long val)
89{
90 asm volatile("movq %0,%%cr3" :: "r" (val) : "memory");
91}
92
93static inline unsigned long read_cr4(void)
94{
95 unsigned long cr4;
96 asm volatile("movq %%cr4,%0" : "=r" (cr4));
97 return cr4;
98}
99
100static inline void write_cr4(unsigned long val)
101{
102 asm volatile("movq %0,%%cr4" :: "r" (val) : "memory");
103}
104
105static inline unsigned long read_cr8(void)
106{
107 unsigned long cr8;
108 asm volatile("movq %%cr8,%0" : "=r" (cr8));
109 return cr8;
110}
111
112static inline void write_cr8(unsigned long val)
113{
114 asm volatile("movq %0,%%cr8" :: "r" (val) : "memory");
115}
116
117#define stts() write_cr0(8 | read_cr0())
118
119#define wbinvd() \
120 __asm__ __volatile__ ("wbinvd": : :"memory")
121
122#endif /* __KERNEL__ */ 49#endif /* __KERNEL__ */
123 50
124#ifdef CONFIG_SMP 51#ifdef CONFIG_SMP
@@ -148,6 +75,18 @@ static inline void write_cr8(unsigned long val)
148 75
149#define warn_if_not_ulong(x) do { unsigned long foo; (void) (&(x) == &foo); } while (0) 76#define warn_if_not_ulong(x) do { unsigned long foo; (void) (&(x) == &foo); } while (0)
150 77
78static inline unsigned long read_cr8(void)
79{
80 unsigned long cr8;
81 asm volatile("movq %%cr8,%0" : "=r" (cr8));
82 return cr8;
83}
84
85static inline void write_cr8(unsigned long val)
86{
87 asm volatile("movq %0,%%cr8" :: "r" (val) : "memory");
88}
89
151#include <linux/irqflags.h> 90#include <linux/irqflags.h>
152 91
153#endif 92#endif