summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorAl Viro <viro@zeniv.linux.org.uk>2017-03-19 16:11:13 -0400
committerAl Viro <viro@zeniv.linux.org.uk>2017-03-28 18:23:38 -0400
commit8cd920f26785ce42ec6fb807d40de67fc2eb41ce (patch)
tree8fed33da7a042733084eda32eb4d4c6e1663a61a
parent35f8acd5c660eb9dbf7a8379724821abb823a3d8 (diff)
m32r: get rid of zeroing
Signed-off-by: Al Viro <viro@zeniv.linux.org.uk>
-rw-r--r--arch/m32r/include/asm/uaccess.h66
-rw-r--r--arch/m32r/lib/usercopy.c9
2 files changed, 6 insertions, 69 deletions
diff --git a/arch/m32r/include/asm/uaccess.h b/arch/m32r/include/asm/uaccess.h
index e47dcc05a0f2..d5c5e68fa2fb 100644
--- a/arch/m32r/include/asm/uaccess.h
+++ b/arch/m32r/include/asm/uaccess.h
@@ -460,77 +460,13 @@ do { \
460 : "r14", "memory"); \ 460 : "r14", "memory"); \
461} while (0) 461} while (0)
462 462
463#define __copy_user_zeroing(to, from, size) \
464do { \
465 unsigned long __dst, __src, __c; \
466 __asm__ __volatile__ ( \
467 " mv r14, %0\n" \
468 " or r14, %1\n" \
469 " beq %0, %1, 9f\n" \
470 " beqz %2, 9f\n" \
471 " and3 r14, r14, #3\n" \
472 " bnez r14, 2f\n" \
473 " and3 %2, %2, #3\n" \
474 " beqz %3, 2f\n" \
475 " addi %0, #-4 ; word_copy \n" \
476 " .fillinsn\n" \
477 "0: ld r14, @%1+\n" \
478 " addi %3, #-1\n" \
479 " .fillinsn\n" \
480 "1: st r14, @+%0\n" \
481 " bnez %3, 0b\n" \
482 " beqz %2, 9f\n" \
483 " addi %0, #4\n" \
484 " .fillinsn\n" \
485 "2: ldb r14, @%1 ; byte_copy \n" \
486 " .fillinsn\n" \
487 "3: stb r14, @%0\n" \
488 " addi %1, #1\n" \
489 " addi %2, #-1\n" \
490 " addi %0, #1\n" \
491 " bnez %2, 2b\n" \
492 " .fillinsn\n" \
493 "9:\n" \
494 ".section .fixup,\"ax\"\n" \
495 " .balign 4\n" \
496 "5: addi %3, #1\n" \
497 " addi %1, #-4\n" \
498 " .fillinsn\n" \
499 "6: slli %3, #2\n" \
500 " add %2, %3\n" \
501 " addi %0, #4\n" \
502 " .fillinsn\n" \
503 "7: ldi r14, #0 ; store zero \n" \
504 " .fillinsn\n" \
505 "8: addi %2, #-1\n" \
506 " stb r14, @%0 ; ACE? \n" \
507 " addi %0, #1\n" \
508 " bnez %2, 8b\n" \
509 " seth r14, #high(9b)\n" \
510 " or3 r14, r14, #low(9b)\n" \
511 " jmp r14\n" \
512 ".previous\n" \
513 ".section __ex_table,\"a\"\n" \
514 " .balign 4\n" \
515 " .long 0b,6b\n" \
516 " .long 1b,5b\n" \
517 " .long 2b,7b\n" \
518 " .long 3b,7b\n" \
519 ".previous\n" \
520 : "=&r" (__dst), "=&r" (__src), "=&r" (size), \
521 "=&r" (__c) \
522 : "0" (to), "1" (from), "2" (size), "3" (size / 4) \
523 : "r14", "memory"); \
524} while (0)
525
526
527/* We let the __ versions of copy_from/to_user inline, because they're often 463/* We let the __ versions of copy_from/to_user inline, because they're often
528 * used in fast paths and have only a small space overhead. 464 * used in fast paths and have only a small space overhead.
529 */ 465 */
530static inline unsigned long __generic_copy_from_user_nocheck(void *to, 466static inline unsigned long __generic_copy_from_user_nocheck(void *to,
531 const void __user *from, unsigned long n) 467 const void __user *from, unsigned long n)
532{ 468{
533 __copy_user_zeroing(to, from, n); 469 __copy_user(to, from, n);
534 return n; 470 return n;
535} 471}
536 472
diff --git a/arch/m32r/lib/usercopy.c b/arch/m32r/lib/usercopy.c
index fd03f2731f20..6aacf5ba0a58 100644
--- a/arch/m32r/lib/usercopy.c
+++ b/arch/m32r/lib/usercopy.c
@@ -23,12 +23,13 @@ __generic_copy_to_user(void __user *to, const void *from, unsigned long n)
23unsigned long 23unsigned long
24__generic_copy_from_user(void *to, const void __user *from, unsigned long n) 24__generic_copy_from_user(void *to, const void __user *from, unsigned long n)
25{ 25{
26 unsigned long ret = n;
26 prefetchw(to); 27 prefetchw(to);
27 if (access_ok(VERIFY_READ, from, n)) 28 if (access_ok(VERIFY_READ, from, n))
28 __copy_user_zeroing(to,from,n); 29 ret = __copy_user(to,from,n);
29 else 30 if (unlikely(ret))
30 memset(to, 0, n); 31 memset(to + n - ret, 0, ret);
31 return n; 32 return ret;
32} 33}
33 34
34 35