aboutsummaryrefslogtreecommitdiffstats
path: root/arch/arm64
diff options
context:
space:
mode:
authorDave P Martin <Dave.Martin@arm.com>2015-01-29 11:24:43 -0500
committerCatalin Marinas <catalin.marinas@arm.com>2015-01-29 12:24:39 -0500
commit6917c857e3ab5bc5e15d2b1ff34dc2443ccf5b0d (patch)
tree1f383325c1b8793a57696eccebd6a34870090ed6 /arch/arm64
parenta1c76574f345342d23836b520ce44674d23bc267 (diff)
arm64: Avoid breakage caused by .altmacro in fpsimd save/restore macros
Alternate macro mode is not a property of a macro definition, but a gas runtime state that alters the way macros are expanded for ever after (until .noaltmacro is seen). This means that subsequent assembly code that calls other macros can break if fpsimdmacros.h is included. Since these instruction sequences are simple (if dull -- but in a good way), this patch solves the problem by simply expanding the .irp loops. The pre-existing fpsimd_{save,restore} macros weren't rolled with .irp anyway and the sequences affected are short, so this change restores consistency at little cost. Signed-off-by: Dave Martin <Dave.Martin@arm.com> Acked-by: Marc Zyngier <marc.zyngier@arm.com> Acked-by: Ard Biesheuvel <ard.biesheuvel@linaro.org> Signed-off-by: Catalin Marinas <catalin.marinas@arm.com>
Diffstat (limited to 'arch/arm64')
-rw-r--r--arch/arm64/include/asm/fpsimdmacros.h43
1 files changed, 32 insertions, 11 deletions
diff --git a/arch/arm64/include/asm/fpsimdmacros.h b/arch/arm64/include/asm/fpsimdmacros.h
index 007618b8188c..a2daf1293028 100644
--- a/arch/arm64/include/asm/fpsimdmacros.h
+++ b/arch/arm64/include/asm/fpsimdmacros.h
@@ -76,7 +76,6 @@
76 fpsimd_restore_fpcr x\tmpnr, \state 76 fpsimd_restore_fpcr x\tmpnr, \state
77.endm 77.endm
78 78
79.altmacro
80.macro fpsimd_save_partial state, numnr, tmpnr1, tmpnr2 79.macro fpsimd_save_partial state, numnr, tmpnr1, tmpnr2
81 mrs x\tmpnr1, fpsr 80 mrs x\tmpnr1, fpsr
82 str w\numnr, [\state, #8] 81 str w\numnr, [\state, #8]
@@ -86,11 +85,22 @@
86 add \state, \state, x\numnr, lsl #4 85 add \state, \state, x\numnr, lsl #4
87 sub x\tmpnr1, x\tmpnr1, x\numnr, lsl #1 86 sub x\tmpnr1, x\tmpnr1, x\numnr, lsl #1
88 br x\tmpnr1 87 br x\tmpnr1
89 .irp qa, 30, 28, 26, 24, 22, 20, 18, 16, 14, 12, 10, 8, 6, 4, 2, 0 88 stp q30, q31, [\state, #-16 * 30 - 16]
90 .irp qb, %(qa + 1) 89 stp q28, q29, [\state, #-16 * 28 - 16]
91 stp q\qa, q\qb, [\state, # -16 * \qa - 16] 90 stp q26, q27, [\state, #-16 * 26 - 16]
92 .endr 91 stp q24, q25, [\state, #-16 * 24 - 16]
93 .endr 92 stp q22, q23, [\state, #-16 * 22 - 16]
93 stp q20, q21, [\state, #-16 * 20 - 16]
94 stp q18, q19, [\state, #-16 * 18 - 16]
95 stp q16, q17, [\state, #-16 * 16 - 16]
96 stp q14, q15, [\state, #-16 * 14 - 16]
97 stp q12, q13, [\state, #-16 * 12 - 16]
98 stp q10, q11, [\state, #-16 * 10 - 16]
99 stp q8, q9, [\state, #-16 * 8 - 16]
100 stp q6, q7, [\state, #-16 * 6 - 16]
101 stp q4, q5, [\state, #-16 * 4 - 16]
102 stp q2, q3, [\state, #-16 * 2 - 16]
103 stp q0, q1, [\state, #-16 * 0 - 16]
940: 1040:
95.endm 105.endm
96 106
@@ -103,10 +113,21 @@
103 add \state, \state, x\tmpnr2, lsl #4 113 add \state, \state, x\tmpnr2, lsl #4
104 sub x\tmpnr1, x\tmpnr1, x\tmpnr2, lsl #1 114 sub x\tmpnr1, x\tmpnr1, x\tmpnr2, lsl #1
105 br x\tmpnr1 115 br x\tmpnr1
106 .irp qa, 30, 28, 26, 24, 22, 20, 18, 16, 14, 12, 10, 8, 6, 4, 2, 0 116 ldp q30, q31, [\state, #-16 * 30 - 16]
107 .irp qb, %(qa + 1) 117 ldp q28, q29, [\state, #-16 * 28 - 16]
108 ldp q\qa, q\qb, [\state, # -16 * \qa - 16] 118 ldp q26, q27, [\state, #-16 * 26 - 16]
109 .endr 119 ldp q24, q25, [\state, #-16 * 24 - 16]
110 .endr 120 ldp q22, q23, [\state, #-16 * 22 - 16]
121 ldp q20, q21, [\state, #-16 * 20 - 16]
122 ldp q18, q19, [\state, #-16 * 18 - 16]
123 ldp q16, q17, [\state, #-16 * 16 - 16]
124 ldp q14, q15, [\state, #-16 * 14 - 16]
125 ldp q12, q13, [\state, #-16 * 12 - 16]
126 ldp q10, q11, [\state, #-16 * 10 - 16]
127 ldp q8, q9, [\state, #-16 * 8 - 16]
128 ldp q6, q7, [\state, #-16 * 6 - 16]
129 ldp q4, q5, [\state, #-16 * 4 - 16]
130 ldp q2, q3, [\state, #-16 * 2 - 16]
131 ldp q0, q1, [\state, #-16 * 0 - 16]
1110: 1320:
112.endm 133.endm