aboutsummaryrefslogtreecommitdiffstats
path: root/arch
diff options
context:
space:
mode:
Diffstat (limited to 'arch')
-rw-r--r--arch/x86/include/asm/alternative.h7
-rw-r--r--arch/x86/include/asm/cpufeature.h14
-rw-r--r--arch/x86/kernel/alternative.c1
-rw-r--r--arch/x86/kernel/entry_32.S2
-rw-r--r--arch/x86/lib/clear_page_64.S2
-rw-r--r--arch/x86/lib/copy_page_64.S2
-rw-r--r--arch/x86/lib/copy_user_64.S2
-rw-r--r--arch/x86/lib/memcpy_64.S2
-rw-r--r--arch/x86/lib/memset_64.S2
9 files changed, 19 insertions, 15 deletions
diff --git a/arch/x86/include/asm/alternative.h b/arch/x86/include/asm/alternative.h
index 03b6bb5394a0..bc6abb7bc7ee 100644
--- a/arch/x86/include/asm/alternative.h
+++ b/arch/x86/include/asm/alternative.h
@@ -45,10 +45,9 @@
45struct alt_instr { 45struct alt_instr {
46 u8 *instr; /* original instruction */ 46 u8 *instr; /* original instruction */
47 u8 *replacement; 47 u8 *replacement;
48 u8 cpuid; /* cpuid bit set for replacement */ 48 u16 cpuid; /* cpuid bit set for replacement */
49 u8 instrlen; /* length of original instruction */ 49 u8 instrlen; /* length of original instruction */
50 u8 replacementlen; /* length of new instruction, <= instrlen */ 50 u8 replacementlen; /* length of new instruction, <= instrlen */
51 u8 pad1;
52#ifdef CONFIG_X86_64 51#ifdef CONFIG_X86_64
53 u32 pad2; 52 u32 pad2;
54#endif 53#endif
@@ -86,9 +85,11 @@ static inline int alternatives_text_reserved(void *start, void *end)
86 _ASM_ALIGN "\n" \ 85 _ASM_ALIGN "\n" \
87 _ASM_PTR "661b\n" /* label */ \ 86 _ASM_PTR "661b\n" /* label */ \
88 _ASM_PTR "663f\n" /* new instruction */ \ 87 _ASM_PTR "663f\n" /* new instruction */ \
89 " .byte " __stringify(feature) "\n" /* feature bit */ \ 88 " .word " __stringify(feature) "\n" /* feature bit */ \
90 " .byte 662b-661b\n" /* sourcelen */ \ 89 " .byte 662b-661b\n" /* sourcelen */ \
91 " .byte 664f-663f\n" /* replacementlen */ \ 90 " .byte 664f-663f\n" /* replacementlen */ \
91 ".previous\n" \
92 ".section .discard,\"aw\",@progbits\n" \
92 " .byte 0xff + (664f-663f) - (662b-661b)\n" /* rlen <= slen */ \ 93 " .byte 0xff + (664f-663f) - (662b-661b)\n" /* rlen <= slen */ \
93 ".previous\n" \ 94 ".previous\n" \
94 ".section .altinstr_replacement, \"ax\"\n" \ 95 ".section .altinstr_replacement, \"ax\"\n" \
diff --git a/arch/x86/include/asm/cpufeature.h b/arch/x86/include/asm/cpufeature.h
index 0b205b8a4308..781a50b29a49 100644
--- a/arch/x86/include/asm/cpufeature.h
+++ b/arch/x86/include/asm/cpufeature.h
@@ -302,7 +302,7 @@ extern const char * const x86_power_flags[32];
302 * patch the target code for additional performance. 302 * patch the target code for additional performance.
303 * 303 *
304 */ 304 */
305static __always_inline __pure bool __static_cpu_has(u8 bit) 305static __always_inline __pure bool __static_cpu_has(u16 bit)
306{ 306{
307#if __GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 5) 307#if __GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 5)
308 asm goto("1: jmp %l[t_no]\n" 308 asm goto("1: jmp %l[t_no]\n"
@@ -311,11 +311,11 @@ static __always_inline __pure bool __static_cpu_has(u8 bit)
311 _ASM_ALIGN "\n" 311 _ASM_ALIGN "\n"
312 _ASM_PTR "1b\n" 312 _ASM_PTR "1b\n"
313 _ASM_PTR "0\n" /* no replacement */ 313 _ASM_PTR "0\n" /* no replacement */
314 " .byte %P0\n" /* feature bit */ 314 " .word %P0\n" /* feature bit */
315 " .byte 2b - 1b\n" /* source len */ 315 " .byte 2b - 1b\n" /* source len */
316 " .byte 0\n" /* replacement len */ 316 " .byte 0\n" /* replacement len */
317 " .byte 0xff + 0 - (2b-1b)\n" /* padding */
318 ".previous\n" 317 ".previous\n"
318 /* skipping size check since replacement size = 0 */
319 : : "i" (bit) : : t_no); 319 : : "i" (bit) : : t_no);
320 return true; 320 return true;
321 t_no: 321 t_no:
@@ -329,10 +329,12 @@ static __always_inline __pure bool __static_cpu_has(u8 bit)
329 _ASM_ALIGN "\n" 329 _ASM_ALIGN "\n"
330 _ASM_PTR "1b\n" 330 _ASM_PTR "1b\n"
331 _ASM_PTR "3f\n" 331 _ASM_PTR "3f\n"
332 " .byte %P1\n" /* feature bit */ 332 " .word %P1\n" /* feature bit */
333 " .byte 2b - 1b\n" /* source len */ 333 " .byte 2b - 1b\n" /* source len */
334 " .byte 4f - 3f\n" /* replacement len */ 334 " .byte 4f - 3f\n" /* replacement len */
335 " .byte 0xff + (4f-3f) - (2b-1b)\n" /* padding */ 335 ".previous\n"
336 ".section .discard,\"aw\",@progbits\n"
337 " .byte 0xff + (4f-3f) - (2b-1b)\n" /* size check */
336 ".previous\n" 338 ".previous\n"
337 ".section .altinstr_replacement,\"ax\"\n" 339 ".section .altinstr_replacement,\"ax\"\n"
338 "3: movb $1,%0\n" 340 "3: movb $1,%0\n"
@@ -348,7 +350,7 @@ static __always_inline __pure bool __static_cpu_has(u8 bit)
348( \ 350( \
349 __builtin_constant_p(boot_cpu_has(bit)) ? \ 351 __builtin_constant_p(boot_cpu_has(bit)) ? \
350 boot_cpu_has(bit) : \ 352 boot_cpu_has(bit) : \
351 (__builtin_constant_p(bit) && !((bit) & ~0xff)) ? \ 353 __builtin_constant_p(bit) ? \
352 __static_cpu_has(bit) : \ 354 __static_cpu_has(bit) : \
353 boot_cpu_has(bit) \ 355 boot_cpu_has(bit) \
354) 356)
diff --git a/arch/x86/kernel/alternative.c b/arch/x86/kernel/alternative.c
index 70237732a6c7..f65ab8b014c4 100644
--- a/arch/x86/kernel/alternative.c
+++ b/arch/x86/kernel/alternative.c
@@ -214,6 +214,7 @@ void __init_or_module apply_alternatives(struct alt_instr *start,
214 u8 *instr = a->instr; 214 u8 *instr = a->instr;
215 BUG_ON(a->replacementlen > a->instrlen); 215 BUG_ON(a->replacementlen > a->instrlen);
216 BUG_ON(a->instrlen > sizeof(insnbuf)); 216 BUG_ON(a->instrlen > sizeof(insnbuf));
217 BUG_ON(a->cpuid >= NCAPINTS*32);
217 if (!boot_cpu_has(a->cpuid)) 218 if (!boot_cpu_has(a->cpuid))
218 continue; 219 continue;
219#ifdef CONFIG_X86_64 220#ifdef CONFIG_X86_64
diff --git a/arch/x86/kernel/entry_32.S b/arch/x86/kernel/entry_32.S
index 258e93fa2630..227d00920d2f 100644
--- a/arch/x86/kernel/entry_32.S
+++ b/arch/x86/kernel/entry_32.S
@@ -913,7 +913,7 @@ ENTRY(simd_coprocessor_error)
913 .balign 4 913 .balign 4
914 .long 661b 914 .long 661b
915 .long 663f 915 .long 663f
916 .byte X86_FEATURE_XMM 916 .word X86_FEATURE_XMM
917 .byte 662b-661b 917 .byte 662b-661b
918 .byte 664f-663f 918 .byte 664f-663f
919.previous 919.previous
diff --git a/arch/x86/lib/clear_page_64.S b/arch/x86/lib/clear_page_64.S
index ebeafcce04a9..aa4326bfb24a 100644
--- a/arch/x86/lib/clear_page_64.S
+++ b/arch/x86/lib/clear_page_64.S
@@ -52,7 +52,7 @@ ENDPROC(clear_page)
52 .align 8 52 .align 8
53 .quad clear_page 53 .quad clear_page
54 .quad 1b 54 .quad 1b
55 .byte X86_FEATURE_REP_GOOD 55 .word X86_FEATURE_REP_GOOD
56 .byte .Lclear_page_end - clear_page 56 .byte .Lclear_page_end - clear_page
57 .byte 2b - 1b 57 .byte 2b - 1b
58 .previous 58 .previous
diff --git a/arch/x86/lib/copy_page_64.S b/arch/x86/lib/copy_page_64.S
index 727a5d46d2fc..6fec2d1cebe1 100644
--- a/arch/x86/lib/copy_page_64.S
+++ b/arch/x86/lib/copy_page_64.S
@@ -113,7 +113,7 @@ ENDPROC(copy_page)
113 .align 8 113 .align 8
114 .quad copy_page 114 .quad copy_page
115 .quad 1b 115 .quad 1b
116 .byte X86_FEATURE_REP_GOOD 116 .word X86_FEATURE_REP_GOOD
117 .byte .Lcopy_page_end - copy_page 117 .byte .Lcopy_page_end - copy_page
118 .byte 2b - 1b 118 .byte 2b - 1b
119 .previous 119 .previous
diff --git a/arch/x86/lib/copy_user_64.S b/arch/x86/lib/copy_user_64.S
index 71100c98e337..a460158b5ac5 100644
--- a/arch/x86/lib/copy_user_64.S
+++ b/arch/x86/lib/copy_user_64.S
@@ -29,7 +29,7 @@
29 .align 8 29 .align 8
30 .quad 0b 30 .quad 0b
31 .quad 2b 31 .quad 2b
32 .byte \feature /* when feature is set */ 32 .word \feature /* when feature is set */
33 .byte 5 33 .byte 5
34 .byte 5 34 .byte 5
35 .previous 35 .previous
diff --git a/arch/x86/lib/memcpy_64.S b/arch/x86/lib/memcpy_64.S
index f82e884928af..bcbcd1e0f7d5 100644
--- a/arch/x86/lib/memcpy_64.S
+++ b/arch/x86/lib/memcpy_64.S
@@ -131,7 +131,7 @@ ENDPROC(__memcpy)
131 .align 8 131 .align 8
132 .quad memcpy 132 .quad memcpy
133 .quad .Lmemcpy_c 133 .quad .Lmemcpy_c
134 .byte X86_FEATURE_REP_GOOD 134 .word X86_FEATURE_REP_GOOD
135 135
136 /* 136 /*
137 * Replace only beginning, memcpy is used to apply alternatives, 137 * Replace only beginning, memcpy is used to apply alternatives,
diff --git a/arch/x86/lib/memset_64.S b/arch/x86/lib/memset_64.S
index e88d3b81644a..09d344269652 100644
--- a/arch/x86/lib/memset_64.S
+++ b/arch/x86/lib/memset_64.S
@@ -121,7 +121,7 @@ ENDPROC(__memset)
121 .align 8 121 .align 8
122 .quad memset 122 .quad memset
123 .quad .Lmemset_c 123 .quad .Lmemset_c
124 .byte X86_FEATURE_REP_GOOD 124 .word X86_FEATURE_REP_GOOD
125 .byte .Lfinal - memset 125 .byte .Lfinal - memset
126 .byte .Lmemset_e - .Lmemset_c 126 .byte .Lmemset_e - .Lmemset_c
127 .previous 127 .previous