aboutsummaryrefslogtreecommitdiffstats
path: root/include/asm-x86/system_64.h
diff options
context:
space:
mode:
authorGlauber de Oliveira Costa <gcosta@redhat.com>2008-01-30 07:31:08 -0500
committerIngo Molnar <mingo@elte.hu>2008-01-30 07:31:08 -0500
commitd3ca901f94b3299dfe3a814770d751844608419f (patch)
treef2679001320446acdddc02a88dafdd4ea5120d1e /include/asm-x86/system_64.h
parenta6b4655258efd39b590e519815ed43bb74cd7188 (diff)
x86: unify paravirt parts of system.h
This patch moves the i386 control registers manipulation functions, wbinvd, and clts functions to system.h. They are essentially the same as in x86_64. With this, system.h paravirt comes for free in x86_64. [ mingo@elte.hu: reintroduced the cr8 bits - needed for resume images ] Signed-off-by: Glauber de Oliveira Costa <gcosta@redhat.com> Signed-off-by: Ingo Molnar <mingo@elte.hu> Signed-off-by: Thomas Gleixner <tglx@linutronix.de>
Diffstat (limited to 'include/asm-x86/system_64.h')
-rw-r--r--include/asm-x86/system_64.h85
1 files changed, 12 insertions, 73 deletions
diff --git a/include/asm-x86/system_64.h b/include/asm-x86/system_64.h
index 0885caace5d4..14ad6138439b 100644
--- a/include/asm-x86/system_64.h
+++ b/include/asm-x86/system_64.h
@@ -1,7 +1,6 @@
1#ifndef __ASM_SYSTEM_H 1#ifndef __ASM_SYSTEM_H
2#define __ASM_SYSTEM_H 2#define __ASM_SYSTEM_H
3 3
4#include <linux/kernel.h>
5#include <asm/segment.h> 4#include <asm/segment.h>
6#include <asm/cmpxchg.h> 5#include <asm/cmpxchg.h>
7 6
@@ -47,78 +46,6 @@
47 [pda_pcurrent] "i" (offsetof(struct x8664_pda, pcurrent)) \ 46 [pda_pcurrent] "i" (offsetof(struct x8664_pda, pcurrent)) \
48 : "memory", "cc" __EXTRA_CLOBBER) 47 : "memory", "cc" __EXTRA_CLOBBER)
49 48
50extern void load_gs_index(unsigned);
51
52/*
53 * Clear and set 'TS' bit respectively
54 */
55#define clts() __asm__ __volatile__ ("clts")
56
57static inline unsigned long read_cr0(void)
58{
59 unsigned long cr0;
60 asm volatile("movq %%cr0,%0" : "=r" (cr0));
61 return cr0;
62}
63
64static inline void write_cr0(unsigned long val)
65{
66 asm volatile("movq %0,%%cr0" :: "r" (val));
67}
68
69static inline unsigned long read_cr2(void)
70{
71 unsigned long cr2;
72 asm volatile("movq %%cr2,%0" : "=r" (cr2));
73 return cr2;
74}
75
76static inline void write_cr2(unsigned long val)
77{
78 asm volatile("movq %0,%%cr2" :: "r" (val));
79}
80
81static inline unsigned long read_cr3(void)
82{
83 unsigned long cr3;
84 asm volatile("movq %%cr3,%0" : "=r" (cr3));
85 return cr3;
86}
87
88static inline void write_cr3(unsigned long val)
89{
90 asm volatile("movq %0,%%cr3" :: "r" (val) : "memory");
91}
92
93static inline unsigned long read_cr4(void)
94{
95 unsigned long cr4;
96 asm volatile("movq %%cr4,%0" : "=r" (cr4));
97 return cr4;
98}
99
100static inline void write_cr4(unsigned long val)
101{
102 asm volatile("movq %0,%%cr4" :: "r" (val) : "memory");
103}
104
105static inline unsigned long read_cr8(void)
106{
107 unsigned long cr8;
108 asm volatile("movq %%cr8,%0" : "=r" (cr8));
109 return cr8;
110}
111
112static inline void write_cr8(unsigned long val)
113{
114 asm volatile("movq %0,%%cr8" :: "r" (val) : "memory");
115}
116
117#define stts() write_cr0(8 | read_cr0())
118
119#define wbinvd() \
120 __asm__ __volatile__ ("wbinvd": : :"memory")
121
122#endif /* __KERNEL__ */ 49#endif /* __KERNEL__ */
123 50
124#ifdef CONFIG_SMP 51#ifdef CONFIG_SMP
@@ -148,6 +75,18 @@ static inline void write_cr8(unsigned long val)
148 75
149#define warn_if_not_ulong(x) do { unsigned long foo; (void) (&(x) == &foo); } while (0) 76#define warn_if_not_ulong(x) do { unsigned long foo; (void) (&(x) == &foo); } while (0)
150 77
78static inline unsigned long read_cr8(void)
79{
80 unsigned long cr8;
81 asm volatile("movq %%cr8,%0" : "=r" (cr8));
82 return cr8;
83}
84
85static inline void write_cr8(unsigned long val)
86{
87 asm volatile("movq %0,%%cr8" :: "r" (val) : "memory");
88}
89
151#include <linux/irqflags.h> 90#include <linux/irqflags.h>
152 91
153#endif 92#endif