diff options
author | Kirill Korotaev <dev@openvz.org> | 2007-10-17 12:04:33 -0400 |
---|---|---|
committer | Thomas Gleixner <tglx@inhelltoy.tec.linutronix.de> | 2007-10-17 14:15:31 -0400 |
commit | c1217a75ea102d4e69321f210fab60bc47b9a48e (patch) | |
tree | fffa575916b27bac17dfb751300ed2e1f9107f0c /include/asm-x86 | |
parent | f891dd18c107d582c3ab98da5209c930e16baf78 (diff) |
x86: mark read_crX() asm code as volatile
Some gcc versions (I checked at least 4.1.1 from RHEL5 & 4.1.2 from gentoo)
can generate incorrect code with read_crX()/write_crX() functions mix up,
due to cached results of read_crX().
The small app for x8664 below compiled with -O2 demonstrates this
(i686 does the same thing):
Diffstat (limited to 'include/asm-x86')
-rw-r--r-- | include/asm-x86/system_32.h | 2 | ||||
-rw-r--r-- | include/asm-x86/system_64.h | 8 |
2 files changed, 5 insertions, 5 deletions
diff --git a/include/asm-x86/system_32.h b/include/asm-x86/system_32.h index d84e593b7dfc..1d6fb3afa533 100644 --- a/include/asm-x86/system_32.h +++ b/include/asm-x86/system_32.h | |||
@@ -142,7 +142,7 @@ static inline unsigned long native_read_cr4_safe(void) | |||
142 | { | 142 | { |
143 | unsigned long val; | 143 | unsigned long val; |
144 | /* This could fault if %cr4 does not exist */ | 144 | /* This could fault if %cr4 does not exist */ |
145 | asm("1: movl %%cr4, %0 \n" | 145 | asm volatile("1: movl %%cr4, %0 \n" |
146 | "2: \n" | 146 | "2: \n" |
147 | ".section __ex_table,\"a\" \n" | 147 | ".section __ex_table,\"a\" \n" |
148 | ".long 1b,2b \n" | 148 | ".long 1b,2b \n" |
diff --git a/include/asm-x86/system_64.h b/include/asm-x86/system_64.h index 5022aecc333d..fb4bcf99e665 100644 --- a/include/asm-x86/system_64.h +++ b/include/asm-x86/system_64.h | |||
@@ -85,7 +85,7 @@ static inline void write_cr0(unsigned long val) | |||
85 | static inline unsigned long read_cr2(void) | 85 | static inline unsigned long read_cr2(void) |
86 | { | 86 | { |
87 | unsigned long cr2; | 87 | unsigned long cr2; |
88 | asm("movq %%cr2,%0" : "=r" (cr2)); | 88 | asm volatile("movq %%cr2,%0" : "=r" (cr2)); |
89 | return cr2; | 89 | return cr2; |
90 | } | 90 | } |
91 | 91 | ||
@@ -97,7 +97,7 @@ static inline void write_cr2(unsigned long val) | |||
97 | static inline unsigned long read_cr3(void) | 97 | static inline unsigned long read_cr3(void) |
98 | { | 98 | { |
99 | unsigned long cr3; | 99 | unsigned long cr3; |
100 | asm("movq %%cr3,%0" : "=r" (cr3)); | 100 | asm volatile("movq %%cr3,%0" : "=r" (cr3)); |
101 | return cr3; | 101 | return cr3; |
102 | } | 102 | } |
103 | 103 | ||
@@ -109,7 +109,7 @@ static inline void write_cr3(unsigned long val) | |||
109 | static inline unsigned long read_cr4(void) | 109 | static inline unsigned long read_cr4(void) |
110 | { | 110 | { |
111 | unsigned long cr4; | 111 | unsigned long cr4; |
112 | asm("movq %%cr4,%0" : "=r" (cr4)); | 112 | asm volatile("movq %%cr4,%0" : "=r" (cr4)); |
113 | return cr4; | 113 | return cr4; |
114 | } | 114 | } |
115 | 115 | ||
@@ -121,7 +121,7 @@ static inline void write_cr4(unsigned long val) | |||
121 | static inline unsigned long read_cr8(void) | 121 | static inline unsigned long read_cr8(void) |
122 | { | 122 | { |
123 | unsigned long cr8; | 123 | unsigned long cr8; |
124 | asm("movq %%cr8,%0" : "=r" (cr8)); | 124 | asm volatile("movq %%cr8,%0" : "=r" (cr8)); |
125 | return cr8; | 125 | return cr8; |
126 | } | 126 | } |
127 | 127 | ||