aboutsummaryrefslogtreecommitdiffstats
path: root/arch
diff options
context:
space:
mode:
authorMark Nelson <markn@au1.ibm.com>2009-02-25 08:46:24 -0500
committerBenjamin Herrenschmidt <benh@kernel.crashing.org>2009-02-25 22:02:54 -0500
commitf72b728bf100f276628e378e1fe6c6acd5d09401 (patch)
treeedc7e3f0ad62669a188c24d92831565dc5c35bfd /arch
parente423b9ecd6aa434ce9ba72a21fdc61079e620e0a (diff)
powerpc: Fix 64bit __copy_tofrom_user() regression
This fixes a regression introduced by commit a4e22f02f5b6518c1484faea1f88d81802b9feac ("powerpc: Update 64bit __copy_tofrom_user() using CPU_FTR_UNALIGNED_LD_STD"). The same bug that existed in the 64bit memcpy() also exists here so fix it here too. The fix is the same as that applied to memcpy() with the addition of fixes for the exception handling code required for __copy_tofrom_user(). This stops us reading beyond the end of the source region we were told to copy. Signed-off-by: Mark Nelson <markn@au1.ibm.com> Signed-off-by: Benjamin Herrenschmidt <benh@kernel.crashing.org>
Diffstat (limited to 'arch')
-rw-r--r--arch/powerpc/lib/copyuser_64.S38
1 files changed, 31 insertions, 7 deletions
diff --git a/arch/powerpc/lib/copyuser_64.S b/arch/powerpc/lib/copyuser_64.S
index 70693a5c12a1..693b14a778fa 100644
--- a/arch/powerpc/lib/copyuser_64.S
+++ b/arch/powerpc/lib/copyuser_64.S
@@ -62,18 +62,19 @@ END_FTR_SECTION_IFCLR(CPU_FTR_UNALIGNED_LD_STD)
6272: std r8,8(r3) 6272: std r8,8(r3)
63 beq+ 3f 63 beq+ 3f
64 addi r3,r3,16 64 addi r3,r3,16
6523: ld r9,8(r4)
66.Ldo_tail: 65.Ldo_tail:
67 bf cr7*4+1,1f 66 bf cr7*4+1,1f
68 rotldi r9,r9,32 6723: lwz r9,8(r4)
68 addi r4,r4,4
6973: stw r9,0(r3) 6973: stw r9,0(r3)
70 addi r3,r3,4 70 addi r3,r3,4
711: bf cr7*4+2,2f 711: bf cr7*4+2,2f
72 rotldi r9,r9,16 7244: lhz r9,8(r4)
73 addi r4,r4,2
7374: sth r9,0(r3) 7474: sth r9,0(r3)
74 addi r3,r3,2 75 addi r3,r3,2
752: bf cr7*4+3,3f 762: bf cr7*4+3,3f
76 rotldi r9,r9,8 7745: lbz r9,8(r4)
7775: stb r9,0(r3) 7875: stb r9,0(r3)
783: li r3,0 793: li r3,0
79 blr 80 blr
@@ -141,11 +142,24 @@ END_FTR_SECTION_IFCLR(CPU_FTR_UNALIGNED_LD_STD)
1416: cmpwi cr1,r5,8 1426: cmpwi cr1,r5,8
142 addi r3,r3,32 143 addi r3,r3,32
143 sld r9,r9,r10 144 sld r9,r9,r10
144 ble cr1,.Ldo_tail 145 ble cr1,7f
14534: ld r0,8(r4) 14634: ld r0,8(r4)
146 srd r7,r0,r11 147 srd r7,r0,r11
147 or r9,r7,r9 148 or r9,r7,r9
148 b .Ldo_tail 1497:
150 bf cr7*4+1,1f
151 rotldi r9,r9,32
15294: stw r9,0(r3)
153 addi r3,r3,4
1541: bf cr7*4+2,2f
155 rotldi r9,r9,16
15695: sth r9,0(r3)
157 addi r3,r3,2
1582: bf cr7*4+3,3f
159 rotldi r9,r9,8
16096: stb r9,0(r3)
1613: li r3,0
162 blr
149 163
150.Ldst_unaligned: 164.Ldst_unaligned:
151 PPC_MTOCRF 0x01,r6 /* put #bytes to 8B bdry into cr7 */ 165 PPC_MTOCRF 0x01,r6 /* put #bytes to 8B bdry into cr7 */
@@ -218,7 +232,6 @@ END_FTR_SECTION_IFCLR(CPU_FTR_UNALIGNED_LD_STD)
218121: 232121:
219132: 233132:
220 addi r3,r3,8 234 addi r3,r3,8
221123:
222134: 235134:
223135: 236135:
224138: 237138:
@@ -226,6 +239,9 @@ END_FTR_SECTION_IFCLR(CPU_FTR_UNALIGNED_LD_STD)
226140: 239140:
227141: 240141:
228142: 241142:
242123:
243144:
244145:
229 245
230/* 246/*
231 * here we have had a fault on a load and r3 points to the first 247 * here we have had a fault on a load and r3 points to the first
@@ -309,6 +325,9 @@ END_FTR_SECTION_IFCLR(CPU_FTR_UNALIGNED_LD_STD)
309187: 325187:
310188: 326188:
311189: 327189:
328194:
329195:
330196:
3121: 3311:
313 ld r6,-24(r1) 332 ld r6,-24(r1)
314 ld r5,-8(r1) 333 ld r5,-8(r1)
@@ -329,7 +348,9 @@ END_FTR_SECTION_IFCLR(CPU_FTR_UNALIGNED_LD_STD)
329 .llong 72b,172b 348 .llong 72b,172b
330 .llong 23b,123b 349 .llong 23b,123b
331 .llong 73b,173b 350 .llong 73b,173b
351 .llong 44b,144b
332 .llong 74b,174b 352 .llong 74b,174b
353 .llong 45b,145b
333 .llong 75b,175b 354 .llong 75b,175b
334 .llong 24b,124b 355 .llong 24b,124b
335 .llong 25b,125b 356 .llong 25b,125b
@@ -347,6 +368,9 @@ END_FTR_SECTION_IFCLR(CPU_FTR_UNALIGNED_LD_STD)
347 .llong 79b,179b 368 .llong 79b,179b
348 .llong 80b,180b 369 .llong 80b,180b
349 .llong 34b,134b 370 .llong 34b,134b
371 .llong 94b,194b
372 .llong 95b,195b
373 .llong 96b,196b
350 .llong 35b,135b 374 .llong 35b,135b
351 .llong 81b,181b 375 .llong 81b,181b
352 .llong 36b,136b 376 .llong 36b,136b