diff options
author | H. Peter Anvin <hpa@zytor.com> | 2008-01-30 07:30:30 -0500 |
---|---|---|
committer | Ingo Molnar <mingo@elte.hu> | 2008-01-30 07:30:30 -0500 |
commit | 1d8a1f6b51f6b195dfdcf05821be97edede5664a (patch) | |
tree | 1c14f052b49aa7253135356e7e9716f425f41efc /include | |
parent | 2b8e05b5677d2b4f3cd218ee90a7332715cb262f (diff) |
x86: prepare merger of <asm/alternative_{32,64}.h>
Prepare for merging <asm/alternative_{32,64}.h> by making the 32- and
64-bit versions textually identical. This involves:
- removing arbitrary header inclusion differences
- reorganizing the 32-bit version slightly to match the 64-bit version
- using <asm/asm.h> to unify the assembly code
- renaming struct paravirt_patch to struct paravirt_patch_site in the
64-bit version to match the 32-bit version; there are no references
to struct paravirt_patch elsewhere in the tree.
Signed-off-by: H. Peter Anvin <hpa@zytor.com>
Signed-off-by: Thomas Gleixner <tglx@linutronix.de>
Signed-off-by: Ingo Molnar <mingo@elte.hu>
Signed-off-by: Thomas Gleixner <tglx@linutronix.de>
Diffstat (limited to 'include')
-rw-r--r-- | include/asm-x86/alternative_32.h | 131 | ||||
-rw-r--r-- | include/asm-x86/alternative_64.h | 84 |
2 files changed, 112 insertions, 103 deletions
diff --git a/include/asm-x86/alternative_32.h b/include/asm-x86/alternative_32.h index bda6c810c0f4..4919b5ee8b9f 100644 --- a/include/asm-x86/alternative_32.h +++ b/include/asm-x86/alternative_32.h | |||
@@ -1,23 +1,63 @@ | |||
1 | #ifndef _I386_ALTERNATIVE_H | 1 | #ifndef _I386_ALTERNATIVE_H |
2 | #define _I386_ALTERNATIVE_H | 2 | #define _I386_ALTERNATIVE_H |
3 | 3 | ||
4 | #include <asm/types.h> | ||
5 | #include <linux/stddef.h> | ||
6 | #include <linux/types.h> | 4 | #include <linux/types.h> |
5 | #include <linux/stddef.h> | ||
6 | #include <asm/asm.h> | ||
7 | |||
8 | /* | ||
9 | * Alternative inline assembly for SMP. | ||
10 | * | ||
11 | * The LOCK_PREFIX macro defined here replaces the LOCK and | ||
12 | * LOCK_PREFIX macros used everywhere in the source tree. | ||
13 | * | ||
14 | * SMP alternatives use the same data structures as the other | ||
15 | * alternatives and the X86_FEATURE_UP flag to indicate the case of a | ||
16 | * UP system running a SMP kernel. The existing apply_alternatives() | ||
17 | * works fine for patching a SMP kernel for UP. | ||
18 | * | ||
19 | * The SMP alternative tables can be kept after boot and contain both | ||
20 | * UP and SMP versions of the instructions to allow switching back to | ||
21 | * SMP at runtime, when hotplugging in a new CPU, which is especially | ||
22 | * useful in virtualized environments. | ||
23 | * | ||
24 | * The very common lock prefix is handled as special case in a | ||
25 | * separate table which is a pure address list without replacement ptr | ||
26 | * and size information. That keeps the table sizes small. | ||
27 | */ | ||
28 | |||
29 | #ifdef CONFIG_SMP | ||
30 | #define LOCK_PREFIX \ | ||
31 | ".section .smp_locks,\"a\"\n" \ | ||
32 | _ASM_ALIGN "\n" \ | ||
33 | _ASM_PTR "661f\n" /* address */ \ | ||
34 | ".previous\n" \ | ||
35 | "661:\n\tlock; " | ||
36 | |||
37 | #else /* ! CONFIG_SMP */ | ||
38 | #define LOCK_PREFIX "" | ||
39 | #endif | ||
40 | |||
41 | /* This must be included *after* the definition of LOCK_PREFIX */ | ||
42 | #include <asm/cpufeature.h> | ||
7 | 43 | ||
8 | struct alt_instr { | 44 | struct alt_instr { |
9 | u8 *instr; /* original instruction */ | 45 | u8 *instr; /* original instruction */ |
10 | u8 *replacement; | 46 | u8 *replacement; |
11 | u8 cpuid; /* cpuid bit set for replacement */ | 47 | u8 cpuid; /* cpuid bit set for replacement */ |
12 | u8 instrlen; /* length of original instruction */ | 48 | u8 instrlen; /* length of original instruction */ |
13 | u8 replacementlen; /* length of new instruction, <= instrlen */ | 49 | u8 replacementlen; /* length of new instruction, <= instrlen */ |
14 | u8 pad; | 50 | u8 pad1; |
51 | #ifdef CONFIG_X86_64 | ||
52 | u32 pad2; | ||
53 | #endif | ||
15 | }; | 54 | }; |
16 | 55 | ||
17 | extern void alternative_instructions(void); | 56 | extern void alternative_instructions(void); |
18 | extern void apply_alternatives(struct alt_instr *start, struct alt_instr *end); | 57 | extern void apply_alternatives(struct alt_instr *start, struct alt_instr *end); |
19 | 58 | ||
20 | struct module; | 59 | struct module; |
60 | |||
21 | #ifdef CONFIG_SMP | 61 | #ifdef CONFIG_SMP |
22 | extern void alternatives_smp_module_add(struct module *mod, char *name, | 62 | extern void alternatives_smp_module_add(struct module *mod, char *name, |
23 | void *locks, void *locks_end, | 63 | void *locks, void *locks_end, |
@@ -45,17 +85,17 @@ static inline void alternatives_smp_switch(int smp) {} | |||
45 | * without volatile and memory clobber. | 85 | * without volatile and memory clobber. |
46 | */ | 86 | */ |
47 | #define alternative(oldinstr, newinstr, feature) \ | 87 | #define alternative(oldinstr, newinstr, feature) \ |
48 | asm volatile ("661:\n\t" oldinstr "\n662:\n" \ | 88 | asm volatile ("661:\n\t" oldinstr "\n662:\n" \ |
49 | ".section .altinstructions,\"a\"\n" \ | 89 | ".section .altinstructions,\"a\"\n" \ |
50 | " .align 4\n" \ | 90 | _ASM_ALIGN "\n" \ |
51 | " .long 661b\n" /* label */ \ | 91 | _ASM_PTR "661b\n" /* label */ \ |
52 | " .long 663f\n" /* new instruction */ \ | 92 | _ASM_PTR "663f\n" /* new instruction */ \ |
53 | " .byte %c0\n" /* feature bit */ \ | 93 | " .byte %c0\n" /* feature bit */ \ |
54 | " .byte 662b-661b\n" /* sourcelen */ \ | 94 | " .byte 662b-661b\n" /* sourcelen */ \ |
55 | " .byte 664f-663f\n" /* replacementlen */ \ | 95 | " .byte 664f-663f\n" /* replacementlen */ \ |
56 | ".previous\n" \ | 96 | ".previous\n" \ |
57 | ".section .altinstr_replacement,\"ax\"\n" \ | 97 | ".section .altinstr_replacement,\"ax\"\n" \ |
58 | "663:\n\t" newinstr "\n664:\n" /* replacement */\ | 98 | "663:\n\t" newinstr "\n664:\n" /* replacement */ \ |
59 | ".previous" :: "i" (feature) : "memory") | 99 | ".previous" :: "i" (feature) : "memory") |
60 | 100 | ||
61 | /* | 101 | /* |
@@ -66,35 +106,35 @@ static inline void alternatives_smp_switch(int smp) {} | |||
66 | * Argument numbers start with 1. | 106 | * Argument numbers start with 1. |
67 | * Best is to use constraints that are fixed size (like (%1) ... "r") | 107 | * Best is to use constraints that are fixed size (like (%1) ... "r") |
68 | * If you use variable sized constraints like "m" or "g" in the | 108 | * If you use variable sized constraints like "m" or "g" in the |
69 | * replacement maake sure to pad to the worst case length. | 109 | * replacement make sure to pad to the worst case length. |
70 | */ | 110 | */ |
71 | #define alternative_input(oldinstr, newinstr, feature, input...) \ | 111 | #define alternative_input(oldinstr, newinstr, feature, input...) \ |
72 | asm volatile ("661:\n\t" oldinstr "\n662:\n" \ | 112 | asm volatile ("661:\n\t" oldinstr "\n662:\n" \ |
73 | ".section .altinstructions,\"a\"\n" \ | 113 | ".section .altinstructions,\"a\"\n" \ |
74 | " .align 4\n" \ | 114 | _ASM_ALIGN "\n" \ |
75 | " .long 661b\n" /* label */ \ | 115 | _ASM_PTR "661b\n" /* label */ \ |
76 | " .long 663f\n" /* new instruction */ \ | 116 | _ASM_PTR "663f\n" /* new instruction */ \ |
77 | " .byte %c0\n" /* feature bit */ \ | 117 | " .byte %c0\n" /* feature bit */ \ |
78 | " .byte 662b-661b\n" /* sourcelen */ \ | 118 | " .byte 662b-661b\n" /* sourcelen */ \ |
79 | " .byte 664f-663f\n" /* replacementlen */ \ | 119 | " .byte 664f-663f\n" /* replacementlen */ \ |
80 | ".previous\n" \ | 120 | ".previous\n" \ |
81 | ".section .altinstr_replacement,\"ax\"\n" \ | 121 | ".section .altinstr_replacement,\"ax\"\n" \ |
82 | "663:\n\t" newinstr "\n664:\n" /* replacement */\ | 122 | "663:\n\t" newinstr "\n664:\n" /* replacement */ \ |
83 | ".previous" :: "i" (feature), ##input) | 123 | ".previous" :: "i" (feature), ##input) |
84 | 124 | ||
85 | /* Like alternative_input, but with a single output argument */ | 125 | /* Like alternative_input, but with a single output argument */ |
86 | #define alternative_io(oldinstr, newinstr, feature, output, input...) \ | 126 | #define alternative_io(oldinstr, newinstr, feature, output, input...) \ |
87 | asm volatile ("661:\n\t" oldinstr "\n662:\n" \ | 127 | asm volatile ("661:\n\t" oldinstr "\n662:\n" \ |
88 | ".section .altinstructions,\"a\"\n" \ | 128 | ".section .altinstructions,\"a\"\n" \ |
89 | " .align 4\n" \ | 129 | _ASM_ALIGN "\n" \ |
90 | " .long 661b\n" /* label */ \ | 130 | _ASM_PTR "661b\n" /* label */ \ |
91 | " .long 663f\n" /* new instruction */ \ | 131 | _ASM_PTR "663f\n" /* new instruction */ \ |
92 | " .byte %c[feat]\n" /* feature bit */ \ | 132 | " .byte %c[feat]\n" /* feature bit */ \ |
93 | " .byte 662b-661b\n" /* sourcelen */ \ | 133 | " .byte 662b-661b\n" /* sourcelen */ \ |
94 | " .byte 664f-663f\n" /* replacementlen */ \ | 134 | " .byte 664f-663f\n" /* replacementlen */ \ |
95 | ".previous\n" \ | 135 | ".previous\n" \ |
96 | ".section .altinstr_replacement,\"ax\"\n" \ | 136 | ".section .altinstr_replacement,\"ax\"\n" \ |
97 | "663:\n\t" newinstr "\n664:\n" /* replacement */ \ | 137 | "663:\n\t" newinstr "\n664:\n" /* replacement */ \ |
98 | ".previous" : output : [feat] "i" (feature), ##input) | 138 | ".previous" : output : [feat] "i" (feature), ##input) |
99 | 139 | ||
100 | /* | 140 | /* |
@@ -103,39 +143,6 @@ static inline void alternatives_smp_switch(int smp) {} | |||
103 | */ | 143 | */ |
104 | #define ASM_OUTPUT2(a, b) a, b | 144 | #define ASM_OUTPUT2(a, b) a, b |
105 | 145 | ||
106 | /* | ||
107 | * Alternative inline assembly for SMP. | ||
108 | * | ||
109 | * The LOCK_PREFIX macro defined here replaces the LOCK and | ||
110 | * LOCK_PREFIX macros used everywhere in the source tree. | ||
111 | * | ||
112 | * SMP alternatives use the same data structures as the other | ||
113 | * alternatives and the X86_FEATURE_UP flag to indicate the case of a | ||
114 | * UP system running a SMP kernel. The existing apply_alternatives() | ||
115 | * works fine for patching a SMP kernel for UP. | ||
116 | * | ||
117 | * The SMP alternative tables can be kept after boot and contain both | ||
118 | * UP and SMP versions of the instructions to allow switching back to | ||
119 | * SMP at runtime, when hotplugging in a new CPU, which is especially | ||
120 | * useful in virtualized environments. | ||
121 | * | ||
122 | * The very common lock prefix is handled as special case in a | ||
123 | * separate table which is a pure address list without replacement ptr | ||
124 | * and size information. That keeps the table sizes small. | ||
125 | */ | ||
126 | |||
127 | #ifdef CONFIG_SMP | ||
128 | #define LOCK_PREFIX \ | ||
129 | ".section .smp_locks,\"a\"\n" \ | ||
130 | " .align 4\n" \ | ||
131 | " .long 661f\n" /* address */ \ | ||
132 | ".previous\n" \ | ||
133 | "661:\n\tlock; " | ||
134 | |||
135 | #else /* ! CONFIG_SMP */ | ||
136 | #define LOCK_PREFIX "" | ||
137 | #endif | ||
138 | |||
139 | struct paravirt_patch_site; | 146 | struct paravirt_patch_site; |
140 | #ifdef CONFIG_PARAVIRT | 147 | #ifdef CONFIG_PARAVIRT |
141 | void apply_paravirt(struct paravirt_patch_site *start, | 148 | void apply_paravirt(struct paravirt_patch_site *start, |
diff --git a/include/asm-x86/alternative_64.h b/include/asm-x86/alternative_64.h index ab161e810151..50efcebae33f 100644 --- a/include/asm-x86/alternative_64.h +++ b/include/asm-x86/alternative_64.h | |||
@@ -1,10 +1,9 @@ | |||
1 | #ifndef _X86_64_ALTERNATIVE_H | 1 | #ifndef _X86_64_ALTERNATIVE_H |
2 | #define _X86_64_ALTERNATIVE_H | 2 | #define _X86_64_ALTERNATIVE_H |
3 | 3 | ||
4 | #ifdef __KERNEL__ | ||
5 | |||
6 | #include <linux/types.h> | 4 | #include <linux/types.h> |
7 | #include <linux/stddef.h> | 5 | #include <linux/stddef.h> |
6 | #include <asm/asm.h> | ||
8 | 7 | ||
9 | /* | 8 | /* |
10 | * Alternative inline assembly for SMP. | 9 | * Alternative inline assembly for SMP. |
@@ -30,10 +29,10 @@ | |||
30 | #ifdef CONFIG_SMP | 29 | #ifdef CONFIG_SMP |
31 | #define LOCK_PREFIX \ | 30 | #define LOCK_PREFIX \ |
32 | ".section .smp_locks,\"a\"\n" \ | 31 | ".section .smp_locks,\"a\"\n" \ |
33 | " .align 8\n" \ | 32 | _ASM_ALIGN "\n" \ |
34 | " .quad 661f\n" /* address */ \ | 33 | _ASM_PTR "661f\n" /* address */ \ |
35 | ".previous\n" \ | 34 | ".previous\n" \ |
36 | "661:\n\tlock; " | 35 | "661:\n\tlock; " |
37 | 36 | ||
38 | #else /* ! CONFIG_SMP */ | 37 | #else /* ! CONFIG_SMP */ |
39 | #define LOCK_PREFIX "" | 38 | #define LOCK_PREFIX "" |
@@ -43,12 +42,15 @@ | |||
43 | #include <asm/cpufeature.h> | 42 | #include <asm/cpufeature.h> |
44 | 43 | ||
45 | struct alt_instr { | 44 | struct alt_instr { |
46 | u8 *instr; /* original instruction */ | 45 | u8 *instr; /* original instruction */ |
47 | u8 *replacement; | 46 | u8 *replacement; |
48 | u8 cpuid; /* cpuid bit set for replacement */ | 47 | u8 cpuid; /* cpuid bit set for replacement */ |
49 | u8 instrlen; /* length of original instruction */ | 48 | u8 instrlen; /* length of original instruction */ |
50 | u8 replacementlen; /* length of new instruction, <= instrlen */ | 49 | u8 replacementlen; /* length of new instruction, <= instrlen */ |
51 | u8 pad[5]; | 50 | u8 pad1; |
51 | #ifdef CONFIG_X86_64 | ||
52 | u32 pad2; | ||
53 | #endif | ||
52 | }; | 54 | }; |
53 | 55 | ||
54 | extern void alternative_instructions(void); | 56 | extern void alternative_instructions(void); |
@@ -68,9 +70,7 @@ static inline void alternatives_smp_module_add(struct module *mod, char *name, | |||
68 | void *text, void *text_end) {} | 70 | void *text, void *text_end) {} |
69 | static inline void alternatives_smp_module_del(struct module *mod) {} | 71 | static inline void alternatives_smp_module_del(struct module *mod) {} |
70 | static inline void alternatives_smp_switch(int smp) {} | 72 | static inline void alternatives_smp_switch(int smp) {} |
71 | #endif | 73 | #endif /* CONFIG_SMP */ |
72 | |||
73 | #endif | ||
74 | 74 | ||
75 | /* | 75 | /* |
76 | * Alternative instructions for different CPU types or capabilities. | 76 | * Alternative instructions for different CPU types or capabilities. |
@@ -84,18 +84,18 @@ static inline void alternatives_smp_switch(int smp) {} | |||
84 | * For non barrier like inlines please define new variants | 84 | * For non barrier like inlines please define new variants |
85 | * without volatile and memory clobber. | 85 | * without volatile and memory clobber. |
86 | */ | 86 | */ |
87 | #define alternative(oldinstr, newinstr, feature) \ | 87 | #define alternative(oldinstr, newinstr, feature) \ |
88 | asm volatile ("661:\n\t" oldinstr "\n662:\n" \ | 88 | asm volatile ("661:\n\t" oldinstr "\n662:\n" \ |
89 | ".section .altinstructions,\"a\"\n" \ | 89 | ".section .altinstructions,\"a\"\n" \ |
90 | " .align 8\n" \ | 90 | _ASM_ALIGN "\n" \ |
91 | " .quad 661b\n" /* label */ \ | 91 | _ASM_PTR "661b\n" /* label */ \ |
92 | " .quad 663f\n" /* new instruction */ \ | 92 | _ASM_PTR "663f\n" /* new instruction */ \ |
93 | " .byte %c0\n" /* feature bit */ \ | 93 | " .byte %c0\n" /* feature bit */ \ |
94 | " .byte 662b-661b\n" /* sourcelen */ \ | 94 | " .byte 662b-661b\n" /* sourcelen */ \ |
95 | " .byte 664f-663f\n" /* replacementlen */ \ | 95 | " .byte 664f-663f\n" /* replacementlen */ \ |
96 | ".previous\n" \ | 96 | ".previous\n" \ |
97 | ".section .altinstr_replacement,\"ax\"\n" \ | 97 | ".section .altinstr_replacement,\"ax\"\n" \ |
98 | "663:\n\t" newinstr "\n664:\n" /* replacement */ \ | 98 | "663:\n\t" newinstr "\n664:\n" /* replacement */ \ |
99 | ".previous" :: "i" (feature) : "memory") | 99 | ".previous" :: "i" (feature) : "memory") |
100 | 100 | ||
101 | /* | 101 | /* |
@@ -111,30 +111,30 @@ static inline void alternatives_smp_switch(int smp) {} | |||
111 | #define alternative_input(oldinstr, newinstr, feature, input...) \ | 111 | #define alternative_input(oldinstr, newinstr, feature, input...) \ |
112 | asm volatile ("661:\n\t" oldinstr "\n662:\n" \ | 112 | asm volatile ("661:\n\t" oldinstr "\n662:\n" \ |
113 | ".section .altinstructions,\"a\"\n" \ | 113 | ".section .altinstructions,\"a\"\n" \ |
114 | " .align 8\n" \ | 114 | _ASM_ALIGN "\n" \ |
115 | " .quad 661b\n" /* label */ \ | 115 | _ASM_PTR "661b\n" /* label */ \ |
116 | " .quad 663f\n" /* new instruction */ \ | 116 | _ASM_PTR "663f\n" /* new instruction */ \ |
117 | " .byte %c0\n" /* feature bit */ \ | 117 | " .byte %c0\n" /* feature bit */ \ |
118 | " .byte 662b-661b\n" /* sourcelen */ \ | 118 | " .byte 662b-661b\n" /* sourcelen */ \ |
119 | " .byte 664f-663f\n" /* replacementlen */ \ | 119 | " .byte 664f-663f\n" /* replacementlen */ \ |
120 | ".previous\n" \ | 120 | ".previous\n" \ |
121 | ".section .altinstr_replacement,\"ax\"\n" \ | 121 | ".section .altinstr_replacement,\"ax\"\n" \ |
122 | "663:\n\t" newinstr "\n664:\n" /* replacement */ \ | 122 | "663:\n\t" newinstr "\n664:\n" /* replacement */ \ |
123 | ".previous" :: "i" (feature), ##input) | 123 | ".previous" :: "i" (feature), ##input) |
124 | 124 | ||
125 | /* Like alternative_input, but with a single output argument */ | 125 | /* Like alternative_input, but with a single output argument */ |
126 | #define alternative_io(oldinstr, newinstr, feature, output, input...) \ | 126 | #define alternative_io(oldinstr, newinstr, feature, output, input...) \ |
127 | asm volatile ("661:\n\t" oldinstr "\n662:\n" \ | 127 | asm volatile ("661:\n\t" oldinstr "\n662:\n" \ |
128 | ".section .altinstructions,\"a\"\n" \ | 128 | ".section .altinstructions,\"a\"\n" \ |
129 | " .align 8\n" \ | 129 | _ASM_ALIGN "\n" \ |
130 | " .quad 661b\n" /* label */ \ | 130 | _ASM_PTR "661b\n" /* label */ \ |
131 | " .quad 663f\n" /* new instruction */ \ | 131 | _ASM_PTR "663f\n" /* new instruction */ \ |
132 | " .byte %c[feat]\n" /* feature bit */ \ | 132 | " .byte %c[feat]\n" /* feature bit */ \ |
133 | " .byte 662b-661b\n" /* sourcelen */ \ | 133 | " .byte 662b-661b\n" /* sourcelen */ \ |
134 | " .byte 664f-663f\n" /* replacementlen */ \ | 134 | " .byte 664f-663f\n" /* replacementlen */ \ |
135 | ".previous\n" \ | 135 | ".previous\n" \ |
136 | ".section .altinstr_replacement,\"ax\"\n" \ | 136 | ".section .altinstr_replacement,\"ax\"\n" \ |
137 | "663:\n\t" newinstr "\n664:\n" /* replacement */ \ | 137 | "663:\n\t" newinstr "\n664:\n" /* replacement */ \ |
138 | ".previous" : output : [feat] "i" (feature), ##input) | 138 | ".previous" : output : [feat] "i" (feature), ##input) |
139 | 139 | ||
140 | /* | 140 | /* |
@@ -143,15 +143,17 @@ static inline void alternatives_smp_switch(int smp) {} | |||
143 | */ | 143 | */ |
144 | #define ASM_OUTPUT2(a, b) a, b | 144 | #define ASM_OUTPUT2(a, b) a, b |
145 | 145 | ||
146 | struct paravirt_patch; | 146 | struct paravirt_patch_site; |
147 | #ifdef CONFIG_PARAVIRT | 147 | #ifdef CONFIG_PARAVIRT |
148 | void apply_paravirt(struct paravirt_patch *start, struct paravirt_patch *end); | 148 | void apply_paravirt(struct paravirt_patch_site *start, |
149 | struct paravirt_patch_site *end); | ||
149 | #else | 150 | #else |
150 | static inline void | 151 | static inline void |
151 | apply_paravirt(struct paravirt_patch *start, struct paravirt_patch *end) | 152 | apply_paravirt(struct paravirt_patch_site *start, |
153 | struct paravirt_patch_site *end) | ||
152 | {} | 154 | {} |
153 | #define __parainstructions NULL | 155 | #define __parainstructions NULL |
154 | #define __parainstructions_end NULL | 156 | #define __parainstructions_end NULL |
155 | #endif | 157 | #endif |
156 | 158 | ||
157 | extern void text_poke(void *addr, unsigned char *opcode, int len); | 159 | extern void text_poke(void *addr, unsigned char *opcode, int len); |