aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorUlrich Weigand <ulrich.weigand@de.ibm.com>2014-02-14 13:21:03 -0500
committerAnton Blanchard <anton@samba.org>2014-04-22 20:05:24 -0400
commit752a6422fec3c0f5f9d4ac43d92f5dd13e22fde4 (patch)
tree6ef91c1ad3c067345ce45bb6d7730ab9f38c9241
parentb37c10d128a2fa3256d4e67c184177270eac4b86 (diff)
powerpc: Fix unsafe accesses to parameter area in ELFv2
Some of the assembler files in lib/ make use of the fact that in the ELFv1 ABI, the caller guarantees to provide stack space to save the parameter registers r3 ... r10. This guarantee is no longer present in ELFv2 for functions that have no variable argument list and no more than 8 arguments. Change the affected routines to temporarily store registers in the red zone and/or the top of their own stack frame (in the space provided to save r31 .. r29, which is actually not used in these routines). In opal_query_takeover, simply always allocate a stack frame; the routine is not performance critical. Signed-off-by: Ulrich Weigand <ulrich.weigand@de.ibm.com> Signed-off-by: Anton Blanchard <anton@samba.org>
-rw-r--r--arch/powerpc/lib/copypage_power7.S8
-rw-r--r--arch/powerpc/lib/copyuser_power7.S24
-rw-r--r--arch/powerpc/lib/memcpy_64.S8
-rw-r--r--arch/powerpc/lib/memcpy_power7.S20
-rw-r--r--arch/powerpc/platforms/powernv/opal-takeover.S2
5 files changed, 32 insertions, 30 deletions
diff --git a/arch/powerpc/lib/copypage_power7.S b/arch/powerpc/lib/copypage_power7.S
index affc6d308e13..d7dafb3777ac 100644
--- a/arch/powerpc/lib/copypage_power7.S
+++ b/arch/powerpc/lib/copypage_power7.S
@@ -56,15 +56,15 @@ _GLOBAL(copypage_power7)
56 56
57#ifdef CONFIG_ALTIVEC 57#ifdef CONFIG_ALTIVEC
58 mflr r0 58 mflr r0
59 std r3,STK_PARAM(R3)(r1) 59 std r3,-STACKFRAMESIZE+STK_REG(R31)(r1)
60 std r4,STK_PARAM(R4)(r1) 60 std r4,-STACKFRAMESIZE+STK_REG(R30)(r1)
61 std r0,16(r1) 61 std r0,16(r1)
62 stdu r1,-STACKFRAMESIZE(r1) 62 stdu r1,-STACKFRAMESIZE(r1)
63 bl enter_vmx_copy 63 bl enter_vmx_copy
64 cmpwi r3,0 64 cmpwi r3,0
65 ld r0,STACKFRAMESIZE+16(r1) 65 ld r0,STACKFRAMESIZE+16(r1)
66 ld r3,STACKFRAMESIZE+STK_PARAM(R3)(r1) 66 ld r3,STK_REG(R31)(r1)
67 ld r4,STACKFRAMESIZE+STK_PARAM(R4)(r1) 67 ld r4,STK_REG(R30)(r1)
68 mtlr r0 68 mtlr r0
69 69
70 li r0,(PAGE_SIZE/128) 70 li r0,(PAGE_SIZE/128)
diff --git a/arch/powerpc/lib/copyuser_power7.S b/arch/powerpc/lib/copyuser_power7.S
index db0fcbcc1d60..c46c876ac96a 100644
--- a/arch/powerpc/lib/copyuser_power7.S
+++ b/arch/powerpc/lib/copyuser_power7.S
@@ -85,9 +85,9 @@
85.Lexit: 85.Lexit:
86 addi r1,r1,STACKFRAMESIZE 86 addi r1,r1,STACKFRAMESIZE
87.Ldo_err1: 87.Ldo_err1:
88 ld r3,STK_PARAM(R3)(r1) 88 ld r3,-STACKFRAMESIZE+STK_REG(R31)(r1)
89 ld r4,STK_PARAM(R4)(r1) 89 ld r4,-STACKFRAMESIZE+STK_REG(R30)(r1)
90 ld r5,STK_PARAM(R5)(r1) 90 ld r5,-STACKFRAMESIZE+STK_REG(R29)(r1)
91 b __copy_tofrom_user_base 91 b __copy_tofrom_user_base
92 92
93 93
@@ -96,18 +96,18 @@ _GLOBAL(__copy_tofrom_user_power7)
96 cmpldi r5,16 96 cmpldi r5,16
97 cmpldi cr1,r5,4096 97 cmpldi cr1,r5,4096
98 98
99 std r3,STK_PARAM(R3)(r1) 99 std r3,-STACKFRAMESIZE+STK_REG(R31)(r1)
100 std r4,STK_PARAM(R4)(r1) 100 std r4,-STACKFRAMESIZE+STK_REG(R30)(r1)
101 std r5,STK_PARAM(R5)(r1) 101 std r5,-STACKFRAMESIZE+STK_REG(R29)(r1)
102 102
103 blt .Lshort_copy 103 blt .Lshort_copy
104 bgt cr1,.Lvmx_copy 104 bgt cr1,.Lvmx_copy
105#else 105#else
106 cmpldi r5,16 106 cmpldi r5,16
107 107
108 std r3,STK_PARAM(R3)(r1) 108 std r3,-STACKFRAMESIZE+STK_REG(R31)(r1)
109 std r4,STK_PARAM(R4)(r1) 109 std r4,-STACKFRAMESIZE+STK_REG(R30)(r1)
110 std r5,STK_PARAM(R5)(r1) 110 std r5,-STACKFRAMESIZE+STK_REG(R29)(r1)
111 111
112 blt .Lshort_copy 112 blt .Lshort_copy
113#endif 113#endif
@@ -298,9 +298,9 @@ err1; stb r0,0(r3)
298 bl enter_vmx_usercopy 298 bl enter_vmx_usercopy
299 cmpwi cr1,r3,0 299 cmpwi cr1,r3,0
300 ld r0,STACKFRAMESIZE+16(r1) 300 ld r0,STACKFRAMESIZE+16(r1)
301 ld r3,STACKFRAMESIZE+STK_PARAM(R3)(r1) 301 ld r3,STK_REG(R31)(r1)
302 ld r4,STACKFRAMESIZE+STK_PARAM(R4)(r1) 302 ld r4,STK_REG(R30)(r1)
303 ld r5,STACKFRAMESIZE+STK_PARAM(R5)(r1) 303 ld r5,STK_REG(R29)(r1)
304 mtlr r0 304 mtlr r0
305 305
306 /* 306 /*
diff --git a/arch/powerpc/lib/memcpy_64.S b/arch/powerpc/lib/memcpy_64.S
index 01da956a52fb..9d3960c16fde 100644
--- a/arch/powerpc/lib/memcpy_64.S
+++ b/arch/powerpc/lib/memcpy_64.S
@@ -12,7 +12,7 @@
12 .align 7 12 .align 7
13_GLOBAL(memcpy) 13_GLOBAL(memcpy)
14BEGIN_FTR_SECTION 14BEGIN_FTR_SECTION
15 std r3,STK_PARAM(R3)(r1) /* save destination pointer for return value */ 15 std r3,-STACKFRAMESIZE+STK_REG(R31)(r1) /* save destination pointer for return value */
16FTR_SECTION_ELSE 16FTR_SECTION_ELSE
17#ifndef SELFTEST 17#ifndef SELFTEST
18 b memcpy_power7 18 b memcpy_power7
@@ -73,7 +73,7 @@ END_FTR_SECTION_IFCLR(CPU_FTR_UNALIGNED_LD_STD)
732: bf cr7*4+3,3f 732: bf cr7*4+3,3f
74 lbz r9,8(r4) 74 lbz r9,8(r4)
75 stb r9,0(r3) 75 stb r9,0(r3)
763: ld r3,STK_PARAM(R3)(r1) /* return dest pointer */ 763: ld r3,-STACKFRAMESIZE+STK_REG(R31)(r1) /* return dest pointer */
77 blr 77 blr
78 78
79.Lsrc_unaligned: 79.Lsrc_unaligned:
@@ -156,7 +156,7 @@ END_FTR_SECTION_IFCLR(CPU_FTR_UNALIGNED_LD_STD)
1562: bf cr7*4+3,3f 1562: bf cr7*4+3,3f
157 rotldi r9,r9,8 157 rotldi r9,r9,8
158 stb r9,0(r3) 158 stb r9,0(r3)
1593: ld r3,STK_PARAM(R3)(r1) /* return dest pointer */ 1593: ld r3,-STACKFRAMESIZE+STK_REG(R31)(r1) /* return dest pointer */
160 blr 160 blr
161 161
162.Ldst_unaligned: 162.Ldst_unaligned:
@@ -201,5 +201,5 @@ END_FTR_SECTION_IFCLR(CPU_FTR_UNALIGNED_LD_STD)
2013: bf cr7*4+3,4f 2013: bf cr7*4+3,4f
202 lbz r0,0(r4) 202 lbz r0,0(r4)
203 stb r0,0(r3) 203 stb r0,0(r3)
2044: ld r3,STK_PARAM(R3)(r1) /* return dest pointer */ 2044: ld r3,-STACKFRAMESIZE+STK_REG(R31)(r1) /* return dest pointer */
205 blr 205 blr
diff --git a/arch/powerpc/lib/memcpy_power7.S b/arch/powerpc/lib/memcpy_power7.S
index 87d8eeccd4b7..2ff5c142f87b 100644
--- a/arch/powerpc/lib/memcpy_power7.S
+++ b/arch/powerpc/lib/memcpy_power7.S
@@ -33,14 +33,14 @@ _GLOBAL(memcpy_power7)
33 cmpldi r5,16 33 cmpldi r5,16
34 cmpldi cr1,r5,4096 34 cmpldi cr1,r5,4096
35 35
36 std r3,STK_PARAM(R1)(r1) 36 std r3,-STACKFRAMESIZE+STK_REG(R31)(r1)
37 37
38 blt .Lshort_copy 38 blt .Lshort_copy
39 bgt cr1,.Lvmx_copy 39 bgt cr1,.Lvmx_copy
40#else 40#else
41 cmpldi r5,16 41 cmpldi r5,16
42 42
43 std r3,STK_PARAM(R1)(r1) 43 std r3,-STACKFRAMESIZE+STK_REG(R31)(r1)
44 44
45 blt .Lshort_copy 45 blt .Lshort_copy
46#endif 46#endif
@@ -216,7 +216,7 @@ _GLOBAL(memcpy_power7)
216 lbz r0,0(r4) 216 lbz r0,0(r4)
217 stb r0,0(r3) 217 stb r0,0(r3)
218 218
21915: ld r3,STK_PARAM(R3)(r1) 21915: ld r3,-STACKFRAMESIZE+STK_REG(R31)(r1)
220 blr 220 blr
221 221
222.Lunwind_stack_nonvmx_copy: 222.Lunwind_stack_nonvmx_copy:
@@ -226,16 +226,16 @@ _GLOBAL(memcpy_power7)
226#ifdef CONFIG_ALTIVEC 226#ifdef CONFIG_ALTIVEC
227.Lvmx_copy: 227.Lvmx_copy:
228 mflr r0 228 mflr r0
229 std r4,STK_PARAM(R4)(r1) 229 std r4,-STACKFRAMESIZE+STK_REG(R30)(r1)
230 std r5,STK_PARAM(R5)(r1) 230 std r5,-STACKFRAMESIZE+STK_REG(R29)(r1)
231 std r0,16(r1) 231 std r0,16(r1)
232 stdu r1,-STACKFRAMESIZE(r1) 232 stdu r1,-STACKFRAMESIZE(r1)
233 bl enter_vmx_copy 233 bl enter_vmx_copy
234 cmpwi cr1,r3,0 234 cmpwi cr1,r3,0
235 ld r0,STACKFRAMESIZE+16(r1) 235 ld r0,STACKFRAMESIZE+16(r1)
236 ld r3,STACKFRAMESIZE+STK_PARAM(R3)(r1) 236 ld r3,STK_REG(R31)(r1)
237 ld r4,STACKFRAMESIZE+STK_PARAM(R4)(r1) 237 ld r4,STK_REG(R30)(r1)
238 ld r5,STACKFRAMESIZE+STK_PARAM(R5)(r1) 238 ld r5,STK_REG(R29)(r1)
239 mtlr r0 239 mtlr r0
240 240
241 /* 241 /*
@@ -447,7 +447,7 @@ _GLOBAL(memcpy_power7)
447 stb r0,0(r3) 447 stb r0,0(r3)
448 448
44915: addi r1,r1,STACKFRAMESIZE 44915: addi r1,r1,STACKFRAMESIZE
450 ld r3,STK_PARAM(R3)(r1) 450 ld r3,-STACKFRAMESIZE+STK_REG(R31)(r1)
451 b exit_vmx_copy /* tail call optimise */ 451 b exit_vmx_copy /* tail call optimise */
452 452
453.Lvmx_unaligned_copy: 453.Lvmx_unaligned_copy:
@@ -651,6 +651,6 @@ _GLOBAL(memcpy_power7)
651 stb r0,0(r3) 651 stb r0,0(r3)
652 652
65315: addi r1,r1,STACKFRAMESIZE 65315: addi r1,r1,STACKFRAMESIZE
654 ld r3,STK_PARAM(R3)(r1) 654 ld r3,-STACKFRAMESIZE+STK_REG(R31)(r1)
655 b exit_vmx_copy /* tail call optimise */ 655 b exit_vmx_copy /* tail call optimise */
656#endif /* CONFiG_ALTIVEC */ 656#endif /* CONFiG_ALTIVEC */
diff --git a/arch/powerpc/platforms/powernv/opal-takeover.S b/arch/powerpc/platforms/powernv/opal-takeover.S
index 3cd262897c27..11a3169ee583 100644
--- a/arch/powerpc/platforms/powernv/opal-takeover.S
+++ b/arch/powerpc/platforms/powernv/opal-takeover.S
@@ -21,11 +21,13 @@
21_GLOBAL(opal_query_takeover) 21_GLOBAL(opal_query_takeover)
22 mfcr r0 22 mfcr r0
23 stw r0,8(r1) 23 stw r0,8(r1)
24 stdu r1,-STACKFRAMESIZE(r1)
24 std r3,STK_PARAM(R3)(r1) 25 std r3,STK_PARAM(R3)(r1)
25 std r4,STK_PARAM(R4)(r1) 26 std r4,STK_PARAM(R4)(r1)
26 li r3,H_HAL_TAKEOVER 27 li r3,H_HAL_TAKEOVER
27 li r4,H_HAL_TAKEOVER_QUERY_MAGIC 28 li r4,H_HAL_TAKEOVER_QUERY_MAGIC
28 HVSC 29 HVSC
30 addi r1,r1,STACKFRAMESIZE
29 ld r10,STK_PARAM(R3)(r1) 31 ld r10,STK_PARAM(R3)(r1)
30 std r4,0(r10) 32 std r4,0(r10)
31 ld r10,STK_PARAM(R4)(r1) 33 ld r10,STK_PARAM(R4)(r1)