aboutsummaryrefslogtreecommitdiffstats
path: root/arch/powerpc/kernel/align.c
diff options
context:
space:
mode:
Diffstat (limited to 'arch/powerpc/kernel/align.c')
-rw-r--r--arch/powerpc/kernel/align.c56
1 files changed, 31 insertions, 25 deletions
diff --git a/arch/powerpc/kernel/align.c b/arch/powerpc/kernel/align.c
index 4734b5de599d..5c9ff7f5c44e 100644
--- a/arch/powerpc/kernel/align.c
+++ b/arch/powerpc/kernel/align.c
@@ -241,7 +241,7 @@ static int emulate_dcbz(struct pt_regs *regs, unsigned char __user *addr)
241 if (user_mode(regs) && !access_ok(VERIFY_WRITE, p, size)) 241 if (user_mode(regs) && !access_ok(VERIFY_WRITE, p, size))
242 return -EFAULT; 242 return -EFAULT;
243 for (i = 0; i < size / sizeof(long); ++i) 243 for (i = 0; i < size / sizeof(long); ++i)
244 if (__put_user(0, p+i)) 244 if (__put_user_inatomic(0, p+i))
245 return -EFAULT; 245 return -EFAULT;
246 return 1; 246 return 1;
247} 247}
@@ -288,7 +288,8 @@ static int emulate_multiple(struct pt_regs *regs, unsigned char __user *addr,
288 } else { 288 } else {
289 unsigned long pc = regs->nip ^ (swiz & 4); 289 unsigned long pc = regs->nip ^ (swiz & 4);
290 290
291 if (__get_user(instr, (unsigned int __user *)pc)) 291 if (__get_user_inatomic(instr,
292 (unsigned int __user *)pc))
292 return -EFAULT; 293 return -EFAULT;
293 if (swiz == 0 && (flags & SW)) 294 if (swiz == 0 && (flags & SW))
294 instr = cpu_to_le32(instr); 295 instr = cpu_to_le32(instr);
@@ -324,27 +325,31 @@ static int emulate_multiple(struct pt_regs *regs, unsigned char __user *addr,
324 ((nb0 + 3) / 4) * sizeof(unsigned long)); 325 ((nb0 + 3) / 4) * sizeof(unsigned long));
325 326
326 for (i = 0; i < nb; ++i, ++p) 327 for (i = 0; i < nb; ++i, ++p)
327 if (__get_user(REG_BYTE(rptr, i ^ bswiz), SWIZ_PTR(p))) 328 if (__get_user_inatomic(REG_BYTE(rptr, i ^ bswiz),
329 SWIZ_PTR(p)))
328 return -EFAULT; 330 return -EFAULT;
329 if (nb0 > 0) { 331 if (nb0 > 0) {
330 rptr = &regs->gpr[0]; 332 rptr = &regs->gpr[0];
331 addr += nb; 333 addr += nb;
332 for (i = 0; i < nb0; ++i, ++p) 334 for (i = 0; i < nb0; ++i, ++p)
333 if (__get_user(REG_BYTE(rptr, i ^ bswiz), 335 if (__get_user_inatomic(REG_BYTE(rptr,
334 SWIZ_PTR(p))) 336 i ^ bswiz),
337 SWIZ_PTR(p)))
335 return -EFAULT; 338 return -EFAULT;
336 } 339 }
337 340
338 } else { 341 } else {
339 for (i = 0; i < nb; ++i, ++p) 342 for (i = 0; i < nb; ++i, ++p)
340 if (__put_user(REG_BYTE(rptr, i ^ bswiz), SWIZ_PTR(p))) 343 if (__put_user_inatomic(REG_BYTE(rptr, i ^ bswiz),
344 SWIZ_PTR(p)))
341 return -EFAULT; 345 return -EFAULT;
342 if (nb0 > 0) { 346 if (nb0 > 0) {
343 rptr = &regs->gpr[0]; 347 rptr = &regs->gpr[0];
344 addr += nb; 348 addr += nb;
345 for (i = 0; i < nb0; ++i, ++p) 349 for (i = 0; i < nb0; ++i, ++p)
346 if (__put_user(REG_BYTE(rptr, i ^ bswiz), 350 if (__put_user_inatomic(REG_BYTE(rptr,
347 SWIZ_PTR(p))) 351 i ^ bswiz),
352 SWIZ_PTR(p)))
348 return -EFAULT; 353 return -EFAULT;
349 } 354 }
350 } 355 }
@@ -398,7 +403,8 @@ int fix_alignment(struct pt_regs *regs)
398 403
399 if (cpu_has_feature(CPU_FTR_PPC_LE) && (regs->msr & MSR_LE)) 404 if (cpu_has_feature(CPU_FTR_PPC_LE) && (regs->msr & MSR_LE))
400 pc ^= 4; 405 pc ^= 4;
401 if (unlikely(__get_user(instr, (unsigned int __user *)pc))) 406 if (unlikely(__get_user_inatomic(instr,
407 (unsigned int __user *)pc)))
402 return -EFAULT; 408 return -EFAULT;
403 if (cpu_has_feature(CPU_FTR_REAL_LE) && (regs->msr & MSR_LE)) 409 if (cpu_has_feature(CPU_FTR_REAL_LE) && (regs->msr & MSR_LE))
404 instr = cpu_to_le32(instr); 410 instr = cpu_to_le32(instr);
@@ -474,16 +480,16 @@ int fix_alignment(struct pt_regs *regs)
474 p = (unsigned long) addr; 480 p = (unsigned long) addr;
475 switch (nb) { 481 switch (nb) {
476 case 8: 482 case 8:
477 ret |= __get_user(data.v[0], SWIZ_PTR(p++)); 483 ret |= __get_user_inatomic(data.v[0], SWIZ_PTR(p++));
478 ret |= __get_user(data.v[1], SWIZ_PTR(p++)); 484 ret |= __get_user_inatomic(data.v[1], SWIZ_PTR(p++));
479 ret |= __get_user(data.v[2], SWIZ_PTR(p++)); 485 ret |= __get_user_inatomic(data.v[2], SWIZ_PTR(p++));
480 ret |= __get_user(data.v[3], SWIZ_PTR(p++)); 486 ret |= __get_user_inatomic(data.v[3], SWIZ_PTR(p++));
481 case 4: 487 case 4:
482 ret |= __get_user(data.v[4], SWIZ_PTR(p++)); 488 ret |= __get_user_inatomic(data.v[4], SWIZ_PTR(p++));
483 ret |= __get_user(data.v[5], SWIZ_PTR(p++)); 489 ret |= __get_user_inatomic(data.v[5], SWIZ_PTR(p++));
484 case 2: 490 case 2:
485 ret |= __get_user(data.v[6], SWIZ_PTR(p++)); 491 ret |= __get_user_inatomic(data.v[6], SWIZ_PTR(p++));
486 ret |= __get_user(data.v[7], SWIZ_PTR(p++)); 492 ret |= __get_user_inatomic(data.v[7], SWIZ_PTR(p++));
487 if (unlikely(ret)) 493 if (unlikely(ret))
488 return -EFAULT; 494 return -EFAULT;
489 } 495 }
@@ -551,16 +557,16 @@ int fix_alignment(struct pt_regs *regs)
551 p = (unsigned long) addr; 557 p = (unsigned long) addr;
552 switch (nb) { 558 switch (nb) {
553 case 8: 559 case 8:
554 ret |= __put_user(data.v[0], SWIZ_PTR(p++)); 560 ret |= __put_user_inatomic(data.v[0], SWIZ_PTR(p++));
555 ret |= __put_user(data.v[1], SWIZ_PTR(p++)); 561 ret |= __put_user_inatomic(data.v[1], SWIZ_PTR(p++));
556 ret |= __put_user(data.v[2], SWIZ_PTR(p++)); 562 ret |= __put_user_inatomic(data.v[2], SWIZ_PTR(p++));
557 ret |= __put_user(data.v[3], SWIZ_PTR(p++)); 563 ret |= __put_user_inatomic(data.v[3], SWIZ_PTR(p++));
558 case 4: 564 case 4:
559 ret |= __put_user(data.v[4], SWIZ_PTR(p++)); 565 ret |= __put_user_inatomic(data.v[4], SWIZ_PTR(p++));
560 ret |= __put_user(data.v[5], SWIZ_PTR(p++)); 566 ret |= __put_user_inatomic(data.v[5], SWIZ_PTR(p++));
561 case 2: 567 case 2:
562 ret |= __put_user(data.v[6], SWIZ_PTR(p++)); 568 ret |= __put_user_inatomic(data.v[6], SWIZ_PTR(p++));
563 ret |= __put_user(data.v[7], SWIZ_PTR(p++)); 569 ret |= __put_user_inatomic(data.v[7], SWIZ_PTR(p++));
564 } 570 }
565 if (unlikely(ret)) 571 if (unlikely(ret))
566 return -EFAULT; 572 return -EFAULT;