aboutsummaryrefslogtreecommitdiffstats
path: root/arch/x86/lib
diff options
context:
space:
mode:
authorBorislav Petkov <bp@suse.de>2014-12-27 04:41:52 -0500
committerBorislav Petkov <bp@suse.de>2015-02-23 07:44:00 -0500
commit4332195c5615bf748624094ce4ff6797e475024d (patch)
tree7caa974cd666a8280882e9ed6d2093723159b569 /arch/x86/lib
parentdb477a3386dee183130916d6bbf21f5828b0b2e2 (diff)
x86/alternatives: Add instruction padding
Up until now we have always paid attention to make sure the length of the new instruction replacing the old one is at least less or equal to the length of the old instruction. If the new instruction is longer, at the time it replaces the old instruction it will overwrite the beginning of the next instruction in the kernel image and cause your pants to catch fire. So instead of having to pay attention, teach the alternatives framework to pad shorter old instructions with NOPs at buildtime - but only in the case when len(old instruction(s)) < len(new instruction(s)) and add nothing in the >= case. (In that case we do add_nops() when patching). This way the alternatives user shouldn't have to care about instruction sizes and simply use the macros. Add asm ALTERNATIVE* flavor macros too, while at it. Also, we need to save the pad length in a separate struct alt_instr member for NOP optimization and the way to do that reliably is to carry the pad length instead of trying to detect whether we're looking at single-byte NOPs or at pathological instruction offsets like e9 90 90 90 90, for example, which is a valid instruction. Thanks to Michael Matz for the great help with toolchain questions. Signed-off-by: Borislav Petkov <bp@suse.de>
Diffstat (limited to 'arch/x86/lib')
-rw-r--r--arch/x86/lib/clear_page_64.S4
-rw-r--r--arch/x86/lib/copy_page_64.S2
-rw-r--r--arch/x86/lib/copy_user_64.S4
-rw-r--r--arch/x86/lib/memcpy_64.S4
-rw-r--r--arch/x86/lib/memmove_64.S2
-rw-r--r--arch/x86/lib/memset_64.S4
6 files changed, 10 insertions, 10 deletions
diff --git a/arch/x86/lib/clear_page_64.S b/arch/x86/lib/clear_page_64.S
index f2145cfa12a6..38e57faefd71 100644
--- a/arch/x86/lib/clear_page_64.S
+++ b/arch/x86/lib/clear_page_64.S
@@ -67,7 +67,7 @@ ENDPROC(clear_page)
67 .previous 67 .previous
68 .section .altinstructions,"a" 68 .section .altinstructions,"a"
69 altinstruction_entry clear_page,1b,X86_FEATURE_REP_GOOD,\ 69 altinstruction_entry clear_page,1b,X86_FEATURE_REP_GOOD,\
70 .Lclear_page_end-clear_page, 2b-1b 70 .Lclear_page_end-clear_page, 2b-1b, 0
71 altinstruction_entry clear_page,2b,X86_FEATURE_ERMS, \ 71 altinstruction_entry clear_page,2b,X86_FEATURE_ERMS, \
72 .Lclear_page_end-clear_page,3b-2b 72 .Lclear_page_end-clear_page,3b-2b, 0
73 .previous 73 .previous
diff --git a/arch/x86/lib/copy_page_64.S b/arch/x86/lib/copy_page_64.S
index 176cca67212b..f1ffdbb07755 100644
--- a/arch/x86/lib/copy_page_64.S
+++ b/arch/x86/lib/copy_page_64.S
@@ -106,5 +106,5 @@ ENDPROC(copy_page)
106 .previous 106 .previous
107 .section .altinstructions,"a" 107 .section .altinstructions,"a"
108 altinstruction_entry copy_page, 1b, X86_FEATURE_REP_GOOD, \ 108 altinstruction_entry copy_page, 1b, X86_FEATURE_REP_GOOD, \
109 .Lcopy_page_end-copy_page, 2b-1b 109 .Lcopy_page_end-copy_page, 2b-1b, 0
110 .previous 110 .previous
diff --git a/arch/x86/lib/copy_user_64.S b/arch/x86/lib/copy_user_64.S
index 1530ec2c1b12..a9aedd6aa7f7 100644
--- a/arch/x86/lib/copy_user_64.S
+++ b/arch/x86/lib/copy_user_64.S
@@ -36,8 +36,8 @@
36 .previous 36 .previous
37 37
38 .section .altinstructions,"a" 38 .section .altinstructions,"a"
39 altinstruction_entry 0b,2b,\feature1,5,5 39 altinstruction_entry 0b,2b,\feature1,5,5,0
40 altinstruction_entry 0b,3b,\feature2,5,5 40 altinstruction_entry 0b,3b,\feature2,5,5,0
41 .previous 41 .previous
42 .endm 42 .endm
43 43
diff --git a/arch/x86/lib/memcpy_64.S b/arch/x86/lib/memcpy_64.S
index 89b53c9968e7..bbfdacc01760 100644
--- a/arch/x86/lib/memcpy_64.S
+++ b/arch/x86/lib/memcpy_64.S
@@ -202,7 +202,7 @@ ENDPROC(__memcpy)
202 */ 202 */
203 .section .altinstructions, "a" 203 .section .altinstructions, "a"
204 altinstruction_entry __memcpy,.Lmemcpy_c,X86_FEATURE_REP_GOOD,\ 204 altinstruction_entry __memcpy,.Lmemcpy_c,X86_FEATURE_REP_GOOD,\
205 .Lmemcpy_e-.Lmemcpy_c,.Lmemcpy_e-.Lmemcpy_c 205 .Lmemcpy_e-.Lmemcpy_c,.Lmemcpy_e-.Lmemcpy_c,0
206 altinstruction_entry __memcpy,.Lmemcpy_c_e,X86_FEATURE_ERMS, \ 206 altinstruction_entry __memcpy,.Lmemcpy_c_e,X86_FEATURE_ERMS, \
207 .Lmemcpy_e_e-.Lmemcpy_c_e,.Lmemcpy_e_e-.Lmemcpy_c_e 207 .Lmemcpy_e_e-.Lmemcpy_c_e,.Lmemcpy_e_e-.Lmemcpy_c_e,0
208 .previous 208 .previous
diff --git a/arch/x86/lib/memmove_64.S b/arch/x86/lib/memmove_64.S
index 9c4b530575da..bbfa6b269ece 100644
--- a/arch/x86/lib/memmove_64.S
+++ b/arch/x86/lib/memmove_64.S
@@ -221,7 +221,7 @@ ENTRY(__memmove)
221 altinstruction_entry .Lmemmove_begin_forward, \ 221 altinstruction_entry .Lmemmove_begin_forward, \
222 .Lmemmove_begin_forward_efs,X86_FEATURE_ERMS, \ 222 .Lmemmove_begin_forward_efs,X86_FEATURE_ERMS, \
223 .Lmemmove_end_forward-.Lmemmove_begin_forward, \ 223 .Lmemmove_end_forward-.Lmemmove_begin_forward, \
224 .Lmemmove_end_forward_efs-.Lmemmove_begin_forward_efs 224 .Lmemmove_end_forward_efs-.Lmemmove_begin_forward_efs,0
225 .previous 225 .previous
226ENDPROC(__memmove) 226ENDPROC(__memmove)
227ENDPROC(memmove) 227ENDPROC(memmove)
diff --git a/arch/x86/lib/memset_64.S b/arch/x86/lib/memset_64.S
index 6f44935c6a60..f6153c1cdddc 100644
--- a/arch/x86/lib/memset_64.S
+++ b/arch/x86/lib/memset_64.S
@@ -150,7 +150,7 @@ ENDPROC(__memset)
150 */ 150 */
151 .section .altinstructions,"a" 151 .section .altinstructions,"a"
152 altinstruction_entry __memset,.Lmemset_c,X86_FEATURE_REP_GOOD,\ 152 altinstruction_entry __memset,.Lmemset_c,X86_FEATURE_REP_GOOD,\
153 .Lfinal-__memset,.Lmemset_e-.Lmemset_c 153 .Lfinal-__memset,.Lmemset_e-.Lmemset_c,0
154 altinstruction_entry __memset,.Lmemset_c_e,X86_FEATURE_ERMS, \ 154 altinstruction_entry __memset,.Lmemset_c_e,X86_FEATURE_ERMS, \
155 .Lfinal-__memset,.Lmemset_e_e-.Lmemset_c_e 155 .Lfinal-__memset,.Lmemset_e_e-.Lmemset_c_e,0
156 .previous 156 .previous