aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorMartin Schwidefsky <schwidefsky@de.ibm.com>2018-04-20 10:49:46 -0400
committerMartin Schwidefsky <schwidefsky@de.ibm.com>2018-05-07 03:07:32 -0400
commit6dd85fbb87d1d6b87a3b1f02ca28d7b2abd2e7ba (patch)
tree595e60d24724231047b429bb4a40a3e908648e22
parentd66a7355717ec903d455277a550d930ba13df4a8 (diff)
s390: move expoline assembler macros to a header
To be able to use the expoline branches in different assembler files move the associated macros from entry.S to a new header nospec-insn.h. While we are at it make the macros a bit nicer to use. Cc: stable@vger.kernel.org # 4.16 Fixes: f19fbd5ed6 ("s390: introduce execute-trampolines for branches") Signed-off-by: Martin Schwidefsky <schwidefsky@de.ibm.com>
-rw-r--r--arch/s390/include/asm/nospec-insn.h127
-rw-r--r--arch/s390/kernel/entry.S105
2 files changed, 151 insertions, 81 deletions
diff --git a/arch/s390/include/asm/nospec-insn.h b/arch/s390/include/asm/nospec-insn.h
new file mode 100644
index 000000000000..440689cbcf51
--- /dev/null
+++ b/arch/s390/include/asm/nospec-insn.h
@@ -0,0 +1,127 @@
1/* SPDX-License-Identifier: GPL-2.0 */
2#ifndef _ASM_S390_NOSPEC_ASM_H
3#define _ASM_S390_NOSPEC_ASM_H
4
5#include <asm/dwarf.h>
6
7#ifdef __ASSEMBLY__
8
9#ifdef CONFIG_EXPOLINE
10
11/*
12 * The expoline macros are used to create thunks in the same format
13 * as gcc generates them. The 'comdat' section flag makes sure that
14 * the various thunks are merged into a single copy.
15 */
16 .macro __THUNK_PROLOG_NAME name
17 .pushsection .text.\name,"axG",@progbits,\name,comdat
18 .globl \name
19 .hidden \name
20 .type \name,@function
21\name:
22 CFI_STARTPROC
23 .endm
24
25 .macro __THUNK_EPILOG
26 CFI_ENDPROC
27 .popsection
28 .endm
29
30 .macro __THUNK_PROLOG_BR r1,r2
31 __THUNK_PROLOG_NAME __s390x_indirect_jump_r\r2\()use_r\r1
32 .endm
33
34 .macro __THUNK_BR r1,r2
35 jg __s390x_indirect_jump_r\r2\()use_r\r1
36 .endm
37
38 .macro __THUNK_BRASL r1,r2,r3
39 brasl \r1,__s390x_indirect_jump_r\r3\()use_r\r2
40 .endm
41
42 .macro __DECODE_RR expand,reg,ruse
43 .set __decode_fail,1
44 .irp r1,0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15
45 .ifc \reg,%r\r1
46 .irp r2,0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15
47 .ifc \ruse,%r\r2
48 \expand \r1,\r2
49 .set __decode_fail,0
50 .endif
51 .endr
52 .endif
53 .endr
54 .if __decode_fail == 1
55 .error "__DECODE_RR failed"
56 .endif
57 .endm
58
59 .macro __DECODE_RRR expand,rsave,rtarget,ruse
60 .set __decode_fail,1
61 .irp r1,0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15
62 .ifc \rsave,%r\r1
63 .irp r2,0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15
64 .ifc \rtarget,%r\r2
65 .irp r3,0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15
66 .ifc \ruse,%r\r3
67 \expand \r1,\r2,\r3
68 .set __decode_fail,0
69 .endif
70 .endr
71 .endif
72 .endr
73 .endif
74 .endr
75 .if __decode_fail == 1
76 .error "__DECODE_RRR failed"
77 .endif
78 .endm
79
80 .macro __THUNK_EX_BR reg,ruse
81#ifdef CONFIG_HAVE_MARCH_Z10_FEATURES
82 exrl 0,555f
83 j .
84#else
85 larl \ruse,555f
86 ex 0,0(\ruse)
87 j .
88#endif
89555: br \reg
90 .endm
91
92 .macro GEN_BR_THUNK reg,ruse=%r1
93 __DECODE_RR __THUNK_PROLOG_BR,\reg,\ruse
94 __THUNK_EX_BR \reg,\ruse
95 __THUNK_EPILOG
96 .endm
97
98 .macro BR_EX reg,ruse=%r1
99557: __DECODE_RR __THUNK_BR,\reg,\ruse
100 .pushsection .s390_indirect_branches,"a",@progbits
101 .long 557b-.
102 .popsection
103 .endm
104
105 .macro BASR_EX rsave,rtarget,ruse=%r1
106559: __DECODE_RRR __THUNK_BRASL,\rsave,\rtarget,\ruse
107 .pushsection .s390_indirect_branches,"a",@progbits
108 .long 559b-.
109 .popsection
110 .endm
111
112#else
113 .macro GEN_BR_THUNK reg,ruse=%r1
114 .endm
115
116 .macro BR_EX reg,ruse=%r1
117 br \reg
118 .endm
119
120 .macro BASR_EX rsave,rtarget,ruse=%r1
121 basr \rsave,\rtarget
122 .endm
123#endif
124
125#endif /* __ASSEMBLY__ */
126
127#endif /* _ASM_S390_NOSPEC_ASM_H */
diff --git a/arch/s390/kernel/entry.S b/arch/s390/kernel/entry.S
index 3f22f139a041..f03402efab4b 100644
--- a/arch/s390/kernel/entry.S
+++ b/arch/s390/kernel/entry.S
@@ -28,6 +28,7 @@
28#include <asm/setup.h> 28#include <asm/setup.h>
29#include <asm/nmi.h> 29#include <asm/nmi.h>
30#include <asm/export.h> 30#include <asm/export.h>
31#include <asm/nospec-insn.h>
31 32
32__PT_R0 = __PT_GPRS 33__PT_R0 = __PT_GPRS
33__PT_R1 = __PT_GPRS + 8 34__PT_R1 = __PT_GPRS + 8
@@ -183,67 +184,9 @@ _LPP_OFFSET = __LC_LPP
183 "jnz .+8; .long 0xb2e8d000", 82 184 "jnz .+8; .long 0xb2e8d000", 82
184 .endm 185 .endm
185 186
186#ifdef CONFIG_EXPOLINE 187 GEN_BR_THUNK %r9
187 188 GEN_BR_THUNK %r14
188 .macro GEN_BR_THUNK name,reg,tmp 189 GEN_BR_THUNK %r14,%r11
189 .section .text.\name,"axG",@progbits,\name,comdat
190 .globl \name
191 .hidden \name
192 .type \name,@function
193\name:
194 CFI_STARTPROC
195#ifdef CONFIG_HAVE_MARCH_Z10_FEATURES
196 exrl 0,0f
197#else
198 larl \tmp,0f
199 ex 0,0(\tmp)
200#endif
201 j .
2020: br \reg
203 CFI_ENDPROC
204 .endm
205
206 GEN_BR_THUNK __s390x_indirect_jump_r1use_r9,%r9,%r1
207 GEN_BR_THUNK __s390x_indirect_jump_r1use_r14,%r14,%r1
208 GEN_BR_THUNK __s390x_indirect_jump_r11use_r14,%r14,%r11
209
210 .macro BASR_R14_R9
2110: brasl %r14,__s390x_indirect_jump_r1use_r9
212 .pushsection .s390_indirect_branches,"a",@progbits
213 .long 0b-.
214 .popsection
215 .endm
216
217 .macro BR_R1USE_R14
2180: jg __s390x_indirect_jump_r1use_r14
219 .pushsection .s390_indirect_branches,"a",@progbits
220 .long 0b-.
221 .popsection
222 .endm
223
224 .macro BR_R11USE_R14
2250: jg __s390x_indirect_jump_r11use_r14
226 .pushsection .s390_indirect_branches,"a",@progbits
227 .long 0b-.
228 .popsection
229 .endm
230
231#else /* CONFIG_EXPOLINE */
232
233 .macro BASR_R14_R9
234 basr %r14,%r9
235 .endm
236
237 .macro BR_R1USE_R14
238 br %r14
239 .endm
240
241 .macro BR_R11USE_R14
242 br %r14
243 .endm
244
245#endif /* CONFIG_EXPOLINE */
246
247 190
248 .section .kprobes.text, "ax" 191 .section .kprobes.text, "ax"
249.Ldummy: 192.Ldummy:
@@ -260,7 +203,7 @@ _LPP_OFFSET = __LC_LPP
260ENTRY(__bpon) 203ENTRY(__bpon)
261 .globl __bpon 204 .globl __bpon
262 BPON 205 BPON
263 BR_R1USE_R14 206 BR_EX %r14
264 207
265/* 208/*
266 * Scheduler resume function, called by switch_to 209 * Scheduler resume function, called by switch_to
@@ -284,7 +227,7 @@ ENTRY(__switch_to)
284 mvc __LC_CURRENT_PID(4,%r0),0(%r3) # store pid of next 227 mvc __LC_CURRENT_PID(4,%r0),0(%r3) # store pid of next
285 lmg %r6,%r15,__SF_GPRS(%r15) # load gprs of next task 228 lmg %r6,%r15,__SF_GPRS(%r15) # load gprs of next task
286 ALTERNATIVE "", ".insn s,0xb2800000,_LPP_OFFSET", 40 229 ALTERNATIVE "", ".insn s,0xb2800000,_LPP_OFFSET", 40
287 BR_R1USE_R14 230 BR_EX %r14
288 231
289.L__critical_start: 232.L__critical_start:
290 233
@@ -351,7 +294,7 @@ sie_exit:
351 xgr %r5,%r5 294 xgr %r5,%r5
352 lmg %r6,%r14,__SF_GPRS(%r15) # restore kernel registers 295 lmg %r6,%r14,__SF_GPRS(%r15) # restore kernel registers
353 lg %r2,__SF_SIE_REASON(%r15) # return exit reason code 296 lg %r2,__SF_SIE_REASON(%r15) # return exit reason code
354 BR_R1USE_R14 297 BR_EX %r14
355.Lsie_fault: 298.Lsie_fault:
356 lghi %r14,-EFAULT 299 lghi %r14,-EFAULT
357 stg %r14,__SF_SIE_REASON(%r15) # set exit reason code 300 stg %r14,__SF_SIE_REASON(%r15) # set exit reason code
@@ -410,7 +353,7 @@ ENTRY(system_call)
410 lgf %r9,0(%r8,%r10) # get system call add. 353 lgf %r9,0(%r8,%r10) # get system call add.
411 TSTMSK __TI_flags(%r12),_TIF_TRACE 354 TSTMSK __TI_flags(%r12),_TIF_TRACE
412 jnz .Lsysc_tracesys 355 jnz .Lsysc_tracesys
413 BASR_R14_R9 # call sys_xxxx 356 BASR_EX %r14,%r9 # call sys_xxxx
414 stg %r2,__PT_R2(%r11) # store return value 357 stg %r2,__PT_R2(%r11) # store return value
415 358
416.Lsysc_return: 359.Lsysc_return:
@@ -595,7 +538,7 @@ ENTRY(system_call)
595 lmg %r3,%r7,__PT_R3(%r11) 538 lmg %r3,%r7,__PT_R3(%r11)
596 stg %r7,STACK_FRAME_OVERHEAD(%r15) 539 stg %r7,STACK_FRAME_OVERHEAD(%r15)
597 lg %r2,__PT_ORIG_GPR2(%r11) 540 lg %r2,__PT_ORIG_GPR2(%r11)
598 BASR_R14_R9 # call sys_xxx 541 BASR_EX %r14,%r9 # call sys_xxx
599 stg %r2,__PT_R2(%r11) # store return value 542 stg %r2,__PT_R2(%r11) # store return value
600.Lsysc_tracenogo: 543.Lsysc_tracenogo:
601 TSTMSK __TI_flags(%r12),_TIF_TRACE 544 TSTMSK __TI_flags(%r12),_TIF_TRACE
@@ -619,7 +562,7 @@ ENTRY(ret_from_fork)
619 lmg %r9,%r10,__PT_R9(%r11) # load gprs 562 lmg %r9,%r10,__PT_R9(%r11) # load gprs
620ENTRY(kernel_thread_starter) 563ENTRY(kernel_thread_starter)
621 la %r2,0(%r10) 564 la %r2,0(%r10)
622 BASR_R14_R9 565 BASR_EX %r14,%r9
623 j .Lsysc_tracenogo 566 j .Lsysc_tracenogo
624 567
625/* 568/*
@@ -701,7 +644,7 @@ ENTRY(pgm_check_handler)
701 je .Lpgm_return 644 je .Lpgm_return
702 lgf %r9,0(%r10,%r1) # load address of handler routine 645 lgf %r9,0(%r10,%r1) # load address of handler routine
703 lgr %r2,%r11 # pass pointer to pt_regs 646 lgr %r2,%r11 # pass pointer to pt_regs
704 BASR_R14_R9 # branch to interrupt-handler 647 BASR_EX %r14,%r9 # branch to interrupt-handler
705.Lpgm_return: 648.Lpgm_return:
706 LOCKDEP_SYS_EXIT 649 LOCKDEP_SYS_EXIT
707 tm __PT_PSW+1(%r11),0x01 # returning to user ? 650 tm __PT_PSW+1(%r11),0x01 # returning to user ?
@@ -1019,7 +962,7 @@ ENTRY(psw_idle)
1019 stpt __TIMER_IDLE_ENTER(%r2) 962 stpt __TIMER_IDLE_ENTER(%r2)
1020.Lpsw_idle_lpsw: 963.Lpsw_idle_lpsw:
1021 lpswe __SF_EMPTY(%r15) 964 lpswe __SF_EMPTY(%r15)
1022 BR_R1USE_R14 965 BR_EX %r14
1023.Lpsw_idle_end: 966.Lpsw_idle_end:
1024 967
1025/* 968/*
@@ -1061,7 +1004,7 @@ ENTRY(save_fpu_regs)
1061.Lsave_fpu_regs_done: 1004.Lsave_fpu_regs_done:
1062 oi __LC_CPU_FLAGS+7,_CIF_FPU 1005 oi __LC_CPU_FLAGS+7,_CIF_FPU
1063.Lsave_fpu_regs_exit: 1006.Lsave_fpu_regs_exit:
1064 BR_R1USE_R14 1007 BR_EX %r14
1065.Lsave_fpu_regs_end: 1008.Lsave_fpu_regs_end:
1066EXPORT_SYMBOL(save_fpu_regs) 1009EXPORT_SYMBOL(save_fpu_regs)
1067 1010
@@ -1107,7 +1050,7 @@ load_fpu_regs:
1107.Lload_fpu_regs_done: 1050.Lload_fpu_regs_done:
1108 ni __LC_CPU_FLAGS+7,255-_CIF_FPU 1051 ni __LC_CPU_FLAGS+7,255-_CIF_FPU
1109.Lload_fpu_regs_exit: 1052.Lload_fpu_regs_exit:
1110 BR_R1USE_R14 1053 BR_EX %r14
1111.Lload_fpu_regs_end: 1054.Lload_fpu_regs_end:
1112 1055
1113.L__critical_end: 1056.L__critical_end:
@@ -1322,7 +1265,7 @@ cleanup_critical:
1322 jl 0f 1265 jl 0f
1323 clg %r9,BASED(.Lcleanup_table+104) # .Lload_fpu_regs_end 1266 clg %r9,BASED(.Lcleanup_table+104) # .Lload_fpu_regs_end
1324 jl .Lcleanup_load_fpu_regs 1267 jl .Lcleanup_load_fpu_regs
13250: BR_R11USE_R14 12680: BR_EX %r14
1326 1269
1327 .align 8 1270 .align 8
1328.Lcleanup_table: 1271.Lcleanup_table:
@@ -1358,7 +1301,7 @@ cleanup_critical:
1358 ni __SIE_PROG0C+3(%r9),0xfe # no longer in SIE 1301 ni __SIE_PROG0C+3(%r9),0xfe # no longer in SIE
1359 lctlg %c1,%c1,__LC_USER_ASCE # load primary asce 1302 lctlg %c1,%c1,__LC_USER_ASCE # load primary asce
1360 larl %r9,sie_exit # skip forward to sie_exit 1303 larl %r9,sie_exit # skip forward to sie_exit
1361 BR_R11USE_R14 1304 BR_EX %r14
1362#endif 1305#endif
1363 1306
1364.Lcleanup_system_call: 1307.Lcleanup_system_call:
@@ -1412,7 +1355,7 @@ cleanup_critical:
1412 stg %r15,56(%r11) # r15 stack pointer 1355 stg %r15,56(%r11) # r15 stack pointer
1413 # set new psw address and exit 1356 # set new psw address and exit
1414 larl %r9,.Lsysc_do_svc 1357 larl %r9,.Lsysc_do_svc
1415 BR_R11USE_R14 1358 BR_EX %r14,%r11
1416.Lcleanup_system_call_insn: 1359.Lcleanup_system_call_insn:
1417 .quad system_call 1360 .quad system_call
1418 .quad .Lsysc_stmg 1361 .quad .Lsysc_stmg
@@ -1424,7 +1367,7 @@ cleanup_critical:
1424 1367
1425.Lcleanup_sysc_tif: 1368.Lcleanup_sysc_tif:
1426 larl %r9,.Lsysc_tif 1369 larl %r9,.Lsysc_tif
1427 BR_R11USE_R14 1370 BR_EX %r14,%r11
1428 1371
1429.Lcleanup_sysc_restore: 1372.Lcleanup_sysc_restore:
1430 # check if stpt has been executed 1373 # check if stpt has been executed
@@ -1441,14 +1384,14 @@ cleanup_critical:
1441 mvc 0(64,%r11),__PT_R8(%r9) 1384 mvc 0(64,%r11),__PT_R8(%r9)
1442 lmg %r0,%r7,__PT_R0(%r9) 1385 lmg %r0,%r7,__PT_R0(%r9)
14431: lmg %r8,%r9,__LC_RETURN_PSW 13861: lmg %r8,%r9,__LC_RETURN_PSW
1444 BR_R11USE_R14 1387 BR_EX %r14,%r11
1445.Lcleanup_sysc_restore_insn: 1388.Lcleanup_sysc_restore_insn:
1446 .quad .Lsysc_exit_timer 1389 .quad .Lsysc_exit_timer
1447 .quad .Lsysc_done - 4 1390 .quad .Lsysc_done - 4
1448 1391
1449.Lcleanup_io_tif: 1392.Lcleanup_io_tif:
1450 larl %r9,.Lio_tif 1393 larl %r9,.Lio_tif
1451 BR_R11USE_R14 1394 BR_EX %r14,%r11
1452 1395
1453.Lcleanup_io_restore: 1396.Lcleanup_io_restore:
1454 # check if stpt has been executed 1397 # check if stpt has been executed
@@ -1462,7 +1405,7 @@ cleanup_critical:
1462 mvc 0(64,%r11),__PT_R8(%r9) 1405 mvc 0(64,%r11),__PT_R8(%r9)
1463 lmg %r0,%r7,__PT_R0(%r9) 1406 lmg %r0,%r7,__PT_R0(%r9)
14641: lmg %r8,%r9,__LC_RETURN_PSW 14071: lmg %r8,%r9,__LC_RETURN_PSW
1465 BR_R11USE_R14 1408 BR_EX %r14,%r11
1466.Lcleanup_io_restore_insn: 1409.Lcleanup_io_restore_insn:
1467 .quad .Lio_exit_timer 1410 .quad .Lio_exit_timer
1468 .quad .Lio_done - 4 1411 .quad .Lio_done - 4
@@ -1515,17 +1458,17 @@ cleanup_critical:
1515 # prepare return psw 1458 # prepare return psw
1516 nihh %r8,0xfcfd # clear irq & wait state bits 1459 nihh %r8,0xfcfd # clear irq & wait state bits
1517 lg %r9,48(%r11) # return from psw_idle 1460 lg %r9,48(%r11) # return from psw_idle
1518 BR_R11USE_R14 1461 BR_EX %r14,%r11
1519.Lcleanup_idle_insn: 1462.Lcleanup_idle_insn:
1520 .quad .Lpsw_idle_lpsw 1463 .quad .Lpsw_idle_lpsw
1521 1464
1522.Lcleanup_save_fpu_regs: 1465.Lcleanup_save_fpu_regs:
1523 larl %r9,save_fpu_regs 1466 larl %r9,save_fpu_regs
1524 BR_R11USE_R14 1467 BR_EX %r14,%r11
1525 1468
1526.Lcleanup_load_fpu_regs: 1469.Lcleanup_load_fpu_regs:
1527 larl %r9,load_fpu_regs 1470 larl %r9,load_fpu_regs
1528 BR_R11USE_R14 1471 BR_EX %r14,%r11
1529 1472
1530/* 1473/*
1531 * Integer constants 1474 * Integer constants