aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorAl Viro <viro@zeniv.linux.org.uk>2017-05-02 20:06:33 -0400
committerAl Viro <viro@zeniv.linux.org.uk>2017-07-03 18:44:22 -0400
commit3170d8d226c2053355f3946b4b5ded4c006fe6d4 (patch)
tree91b180fe1a8f216ff0abbf61488c3a280135a63f
parent468138d78510688fb5476f98d23f11ac6a63229a (diff)
kill {__,}{get,put}_user_unaligned()
no users left Signed-off-by: Al Viro <viro@zeniv.linux.org.uk>
-rw-r--r--arch/arm/include/asm/uaccess.h7
-rw-r--r--arch/arm64/include/asm/uaccess.h4
-rw-r--r--arch/ia64/include/asm/uaccess.h36
-rw-r--r--arch/m68k/include/asm/uaccess.h7
-rw-r--r--arch/mips/include/asm/uaccess.h277
-rw-r--r--arch/parisc/include/asm/uaccess.h1
-rw-r--r--arch/powerpc/include/asm/uaccess.h3
-rw-r--r--arch/s390/include/asm/uaccess.h3
-rw-r--r--arch/sparc/include/asm/uaccess_64.h1
-rw-r--r--arch/tile/include/asm/uaccess.h1
-rw-r--r--arch/x86/include/asm/uaccess.h3
-rw-r--r--include/asm-generic/uaccess-unaligned.h26
12 files changed, 0 insertions, 369 deletions
diff --git a/arch/arm/include/asm/uaccess.h b/arch/arm/include/asm/uaccess.h
index 2577405d082d..0726091a8964 100644
--- a/arch/arm/include/asm/uaccess.h
+++ b/arch/arm/include/asm/uaccess.h
@@ -17,13 +17,6 @@
17#include <asm/unified.h> 17#include <asm/unified.h>
18#include <asm/compiler.h> 18#include <asm/compiler.h>
19 19
20#ifndef CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS
21#include <asm-generic/uaccess-unaligned.h>
22#else
23#define __get_user_unaligned __get_user
24#define __put_user_unaligned __put_user
25#endif
26
27#include <asm/extable.h> 20#include <asm/extable.h>
28 21
29/* 22/*
diff --git a/arch/arm64/include/asm/uaccess.h b/arch/arm64/include/asm/uaccess.h
index 7b8a04789cef..f2b465e129de 100644
--- a/arch/arm64/include/asm/uaccess.h
+++ b/arch/arm64/include/asm/uaccess.h
@@ -254,8 +254,6 @@ do { \
254 (void)0; \ 254 (void)0; \
255}) 255})
256 256
257#define __get_user_unaligned __get_user
258
259#define get_user(x, ptr) \ 257#define get_user(x, ptr) \
260({ \ 258({ \
261 __typeof__(*(ptr)) __user *__p = (ptr); \ 259 __typeof__(*(ptr)) __user *__p = (ptr); \
@@ -320,8 +318,6 @@ do { \
320 (void)0; \ 318 (void)0; \
321}) 319})
322 320
323#define __put_user_unaligned __put_user
324
325#define put_user(x, ptr) \ 321#define put_user(x, ptr) \
326({ \ 322({ \
327 __typeof__(*(ptr)) __user *__p = (ptr); \ 323 __typeof__(*(ptr)) __user *__p = (ptr); \
diff --git a/arch/ia64/include/asm/uaccess.h b/arch/ia64/include/asm/uaccess.h
index 82a7646c4416..a217bcfe6700 100644
--- a/arch/ia64/include/asm/uaccess.h
+++ b/arch/ia64/include/asm/uaccess.h
@@ -87,42 +87,6 @@ static inline int __access_ok(const void __user *p, unsigned long size)
87#define __put_user(x, ptr) __put_user_nocheck((__typeof__(*(ptr))) (x), (ptr), sizeof(*(ptr))) 87#define __put_user(x, ptr) __put_user_nocheck((__typeof__(*(ptr))) (x), (ptr), sizeof(*(ptr)))
88#define __get_user(x, ptr) __get_user_nocheck((x), (ptr), sizeof(*(ptr))) 88#define __get_user(x, ptr) __get_user_nocheck((x), (ptr), sizeof(*(ptr)))
89 89
90extern long __put_user_unaligned_unknown (void);
91
92#define __put_user_unaligned(x, ptr) \
93({ \
94 long __ret; \
95 switch (sizeof(*(ptr))) { \
96 case 1: __ret = __put_user((x), (ptr)); break; \
97 case 2: __ret = (__put_user((x), (u8 __user *)(ptr))) \
98 | (__put_user((x) >> 8, ((u8 __user *)(ptr) + 1))); break; \
99 case 4: __ret = (__put_user((x), (u16 __user *)(ptr))) \
100 | (__put_user((x) >> 16, ((u16 __user *)(ptr) + 1))); break; \
101 case 8: __ret = (__put_user((x), (u32 __user *)(ptr))) \
102 | (__put_user((x) >> 32, ((u32 __user *)(ptr) + 1))); break; \
103 default: __ret = __put_user_unaligned_unknown(); \
104 } \
105 __ret; \
106})
107
108extern long __get_user_unaligned_unknown (void);
109
110#define __get_user_unaligned(x, ptr) \
111({ \
112 long __ret; \
113 switch (sizeof(*(ptr))) { \
114 case 1: __ret = __get_user((x), (ptr)); break; \
115 case 2: __ret = (__get_user((x), (u8 __user *)(ptr))) \
116 | (__get_user((x) >> 8, ((u8 __user *)(ptr) + 1))); break; \
117 case 4: __ret = (__get_user((x), (u16 __user *)(ptr))) \
118 | (__get_user((x) >> 16, ((u16 __user *)(ptr) + 1))); break; \
119 case 8: __ret = (__get_user((x), (u32 __user *)(ptr))) \
120 | (__get_user((x) >> 32, ((u32 __user *)(ptr) + 1))); break; \
121 default: __ret = __get_user_unaligned_unknown(); \
122 } \
123 __ret; \
124})
125
126#ifdef ASM_SUPPORTED 90#ifdef ASM_SUPPORTED
127 struct __large_struct { unsigned long buf[100]; }; 91 struct __large_struct { unsigned long buf[100]; };
128# define __m(x) (*(struct __large_struct __user *)(x)) 92# define __m(x) (*(struct __large_struct __user *)(x))
diff --git a/arch/m68k/include/asm/uaccess.h b/arch/m68k/include/asm/uaccess.h
index 67b3481d6020..63ba18e4c9a2 100644
--- a/arch/m68k/include/asm/uaccess.h
+++ b/arch/m68k/include/asm/uaccess.h
@@ -3,11 +3,4 @@
3#else 3#else
4#include <asm/uaccess_mm.h> 4#include <asm/uaccess_mm.h>
5#endif 5#endif
6
7#include <asm/extable.h> 6#include <asm/extable.h>
8#ifdef CONFIG_CPU_HAS_NO_UNALIGNED
9#include <asm-generic/uaccess-unaligned.h>
10#else
11#define __get_user_unaligned(x, ptr) __get_user((x), (ptr))
12#define __put_user_unaligned(x, ptr) __put_user((x), (ptr))
13#endif
diff --git a/arch/mips/include/asm/uaccess.h b/arch/mips/include/asm/uaccess.h
index 99e629a590a5..c5fc42429ce8 100644
--- a/arch/mips/include/asm/uaccess.h
+++ b/arch/mips/include/asm/uaccess.h
@@ -497,283 +497,6 @@ do { \
497extern void __put_user_unknown(void); 497extern void __put_user_unknown(void);
498 498
499/* 499/*
500 * ul{b,h,w} are macros and there are no equivalent macros for EVA.
501 * EVA unaligned access is handled in the ADE exception handler.
502 */
503#ifndef CONFIG_EVA
504/*
505 * put_user_unaligned: - Write a simple value into user space.
506 * @x: Value to copy to user space.
507 * @ptr: Destination address, in user space.
508 *
509 * Context: User context only. This function may sleep if pagefaults are
510 * enabled.
511 *
512 * This macro copies a single simple value from kernel space to user
513 * space. It supports simple types like char and int, but not larger
514 * data types like structures or arrays.
515 *
516 * @ptr must have pointer-to-simple-variable type, and @x must be assignable
517 * to the result of dereferencing @ptr.
518 *
519 * Returns zero on success, or -EFAULT on error.
520 */
521#define put_user_unaligned(x,ptr) \
522 __put_user_unaligned_check((x),(ptr),sizeof(*(ptr)))
523
524/*
525 * get_user_unaligned: - Get a simple variable from user space.
526 * @x: Variable to store result.
527 * @ptr: Source address, in user space.
528 *
529 * Context: User context only. This function may sleep if pagefaults are
530 * enabled.
531 *
532 * This macro copies a single simple variable from user space to kernel
533 * space. It supports simple types like char and int, but not larger
534 * data types like structures or arrays.
535 *
536 * @ptr must have pointer-to-simple-variable type, and the result of
537 * dereferencing @ptr must be assignable to @x without a cast.
538 *
539 * Returns zero on success, or -EFAULT on error.
540 * On error, the variable @x is set to zero.
541 */
542#define get_user_unaligned(x,ptr) \
543 __get_user_unaligned_check((x),(ptr),sizeof(*(ptr)))
544
545/*
546 * __put_user_unaligned: - Write a simple value into user space, with less checking.
547 * @x: Value to copy to user space.
548 * @ptr: Destination address, in user space.
549 *
550 * Context: User context only. This function may sleep if pagefaults are
551 * enabled.
552 *
553 * This macro copies a single simple value from kernel space to user
554 * space. It supports simple types like char and int, but not larger
555 * data types like structures or arrays.
556 *
557 * @ptr must have pointer-to-simple-variable type, and @x must be assignable
558 * to the result of dereferencing @ptr.
559 *
560 * Caller must check the pointer with access_ok() before calling this
561 * function.
562 *
563 * Returns zero on success, or -EFAULT on error.
564 */
565#define __put_user_unaligned(x,ptr) \
566 __put_user_unaligned_nocheck((x),(ptr),sizeof(*(ptr)))
567
568/*
569 * __get_user_unaligned: - Get a simple variable from user space, with less checking.
570 * @x: Variable to store result.
571 * @ptr: Source address, in user space.
572 *
573 * Context: User context only. This function may sleep if pagefaults are
574 * enabled.
575 *
576 * This macro copies a single simple variable from user space to kernel
577 * space. It supports simple types like char and int, but not larger
578 * data types like structures or arrays.
579 *
580 * @ptr must have pointer-to-simple-variable type, and the result of
581 * dereferencing @ptr must be assignable to @x without a cast.
582 *
583 * Caller must check the pointer with access_ok() before calling this
584 * function.
585 *
586 * Returns zero on success, or -EFAULT on error.
587 * On error, the variable @x is set to zero.
588 */
589#define __get_user_unaligned(x,ptr) \
590 __get_user_unaligned_nocheck((x),(ptr),sizeof(*(ptr)))
591
592/*
593 * Yuck. We need two variants, one for 64bit operation and one
594 * for 32 bit mode and old iron.
595 */
596#ifdef CONFIG_32BIT
597#define __GET_USER_UNALIGNED_DW(val, ptr) \
598 __get_user_unaligned_asm_ll32(val, ptr)
599#endif
600#ifdef CONFIG_64BIT
601#define __GET_USER_UNALIGNED_DW(val, ptr) \
602 __get_user_unaligned_asm(val, "uld", ptr)
603#endif
604
605extern void __get_user_unaligned_unknown(void);
606
607#define __get_user_unaligned_common(val, size, ptr) \
608do { \
609 switch (size) { \
610 case 1: __get_data_asm(val, "lb", ptr); break; \
611 case 2: __get_data_unaligned_asm(val, "ulh", ptr); break; \
612 case 4: __get_data_unaligned_asm(val, "ulw", ptr); break; \
613 case 8: __GET_USER_UNALIGNED_DW(val, ptr); break; \
614 default: __get_user_unaligned_unknown(); break; \
615 } \
616} while (0)
617
618#define __get_user_unaligned_nocheck(x,ptr,size) \
619({ \
620 int __gu_err; \
621 \
622 __get_user_unaligned_common((x), size, ptr); \
623 __gu_err; \
624})
625
626#define __get_user_unaligned_check(x,ptr,size) \
627({ \
628 int __gu_err = -EFAULT; \
629 const __typeof__(*(ptr)) __user * __gu_ptr = (ptr); \
630 \
631 if (likely(access_ok(VERIFY_READ, __gu_ptr, size))) \
632 __get_user_unaligned_common((x), size, __gu_ptr); \
633 \
634 __gu_err; \
635})
636
637#define __get_data_unaligned_asm(val, insn, addr) \
638{ \
639 long __gu_tmp; \
640 \
641 __asm__ __volatile__( \
642 "1: " insn " %1, %3 \n" \
643 "2: \n" \
644 " .insn \n" \
645 " .section .fixup,\"ax\" \n" \
646 "3: li %0, %4 \n" \
647 " move %1, $0 \n" \
648 " j 2b \n" \
649 " .previous \n" \
650 " .section __ex_table,\"a\" \n" \
651 " "__UA_ADDR "\t1b, 3b \n" \
652 " "__UA_ADDR "\t1b + 4, 3b \n" \
653 " .previous \n" \
654 : "=r" (__gu_err), "=r" (__gu_tmp) \
655 : "0" (0), "o" (__m(addr)), "i" (-EFAULT)); \
656 \
657 (val) = (__typeof__(*(addr))) __gu_tmp; \
658}
659
660/*
661 * Get a long long 64 using 32 bit registers.
662 */
663#define __get_user_unaligned_asm_ll32(val, addr) \
664{ \
665 unsigned long long __gu_tmp; \
666 \
667 __asm__ __volatile__( \
668 "1: ulw %1, (%3) \n" \
669 "2: ulw %D1, 4(%3) \n" \
670 " move %0, $0 \n" \
671 "3: \n" \
672 " .insn \n" \
673 " .section .fixup,\"ax\" \n" \
674 "4: li %0, %4 \n" \
675 " move %1, $0 \n" \
676 " move %D1, $0 \n" \
677 " j 3b \n" \
678 " .previous \n" \
679 " .section __ex_table,\"a\" \n" \
680 " " __UA_ADDR " 1b, 4b \n" \
681 " " __UA_ADDR " 1b + 4, 4b \n" \
682 " " __UA_ADDR " 2b, 4b \n" \
683 " " __UA_ADDR " 2b + 4, 4b \n" \
684 " .previous \n" \
685 : "=r" (__gu_err), "=&r" (__gu_tmp) \
686 : "0" (0), "r" (addr), "i" (-EFAULT)); \
687 (val) = (__typeof__(*(addr))) __gu_tmp; \
688}
689
690/*
691 * Yuck. We need two variants, one for 64bit operation and one
692 * for 32 bit mode and old iron.
693 */
694#ifdef CONFIG_32BIT
695#define __PUT_USER_UNALIGNED_DW(ptr) __put_user_unaligned_asm_ll32(ptr)
696#endif
697#ifdef CONFIG_64BIT
698#define __PUT_USER_UNALIGNED_DW(ptr) __put_user_unaligned_asm("usd", ptr)
699#endif
700
701#define __put_user_unaligned_common(ptr, size) \
702do { \
703 switch (size) { \
704 case 1: __put_data_asm("sb", ptr); break; \
705 case 2: __put_user_unaligned_asm("ush", ptr); break; \
706 case 4: __put_user_unaligned_asm("usw", ptr); break; \
707 case 8: __PUT_USER_UNALIGNED_DW(ptr); break; \
708 default: __put_user_unaligned_unknown(); break; \
709} while (0)
710
711#define __put_user_unaligned_nocheck(x,ptr,size) \
712({ \
713 __typeof__(*(ptr)) __pu_val; \
714 int __pu_err = 0; \
715 \
716 __pu_val = (x); \
717 __put_user_unaligned_common(ptr, size); \
718 __pu_err; \
719})
720
721#define __put_user_unaligned_check(x,ptr,size) \
722({ \
723 __typeof__(*(ptr)) __user *__pu_addr = (ptr); \
724 __typeof__(*(ptr)) __pu_val = (x); \
725 int __pu_err = -EFAULT; \
726 \
727 if (likely(access_ok(VERIFY_WRITE, __pu_addr, size))) \
728 __put_user_unaligned_common(__pu_addr, size); \
729 \
730 __pu_err; \
731})
732
733#define __put_user_unaligned_asm(insn, ptr) \
734{ \
735 __asm__ __volatile__( \
736 "1: " insn " %z2, %3 # __put_user_unaligned_asm\n" \
737 "2: \n" \
738 " .insn \n" \
739 " .section .fixup,\"ax\" \n" \
740 "3: li %0, %4 \n" \
741 " j 2b \n" \
742 " .previous \n" \
743 " .section __ex_table,\"a\" \n" \
744 " " __UA_ADDR " 1b, 3b \n" \
745 " .previous \n" \
746 : "=r" (__pu_err) \
747 : "0" (0), "Jr" (__pu_val), "o" (__m(ptr)), \
748 "i" (-EFAULT)); \
749}
750
751#define __put_user_unaligned_asm_ll32(ptr) \
752{ \
753 __asm__ __volatile__( \
754 "1: sw %2, (%3) # __put_user_unaligned_asm_ll32 \n" \
755 "2: sw %D2, 4(%3) \n" \
756 "3: \n" \
757 " .insn \n" \
758 " .section .fixup,\"ax\" \n" \
759 "4: li %0, %4 \n" \
760 " j 3b \n" \
761 " .previous \n" \
762 " .section __ex_table,\"a\" \n" \
763 " " __UA_ADDR " 1b, 4b \n" \
764 " " __UA_ADDR " 1b + 4, 4b \n" \
765 " " __UA_ADDR " 2b, 4b \n" \
766 " " __UA_ADDR " 2b + 4, 4b \n" \
767 " .previous" \
768 : "=r" (__pu_err) \
769 : "0" (0), "r" (__pu_val), "r" (ptr), \
770 "i" (-EFAULT)); \
771}
772
773extern void __put_user_unaligned_unknown(void);
774#endif
775
776/*
777 * We're generating jump to subroutines which will be outside the range of 500 * We're generating jump to subroutines which will be outside the range of
778 * jump instructions 501 * jump instructions
779 */ 502 */
diff --git a/arch/parisc/include/asm/uaccess.h b/arch/parisc/include/asm/uaccess.h
index 6b113f39f30c..d24091e1907a 100644
--- a/arch/parisc/include/asm/uaccess.h
+++ b/arch/parisc/include/asm/uaccess.h
@@ -6,7 +6,6 @@
6 */ 6 */
7#include <asm/page.h> 7#include <asm/page.h>
8#include <asm/cache.h> 8#include <asm/cache.h>
9#include <asm-generic/uaccess-unaligned.h>
10 9
11#include <linux/bug.h> 10#include <linux/bug.h>
12#include <linux/string.h> 11#include <linux/string.h>
diff --git a/arch/powerpc/include/asm/uaccess.h b/arch/powerpc/include/asm/uaccess.h
index 5c0d8a8cdae5..9e7b08bbde5b 100644
--- a/arch/powerpc/include/asm/uaccess.h
+++ b/arch/powerpc/include/asm/uaccess.h
@@ -90,9 +90,6 @@
90#define __put_user_inatomic(x, ptr) \ 90#define __put_user_inatomic(x, ptr) \
91 __put_user_nosleep((__typeof__(*(ptr)))(x), (ptr), sizeof(*(ptr))) 91 __put_user_nosleep((__typeof__(*(ptr)))(x), (ptr), sizeof(*(ptr)))
92 92
93#define __get_user_unaligned __get_user
94#define __put_user_unaligned __put_user
95
96extern long __put_user_bad(void); 93extern long __put_user_bad(void);
97 94
98/* 95/*
diff --git a/arch/s390/include/asm/uaccess.h b/arch/s390/include/asm/uaccess.h
index 78f3f093d143..fad9df965ec2 100644
--- a/arch/s390/include/asm/uaccess.h
+++ b/arch/s390/include/asm/uaccess.h
@@ -249,9 +249,6 @@ int __put_user_bad(void) __attribute__((noreturn));
249 249
250int __get_user_bad(void) __attribute__((noreturn)); 250int __get_user_bad(void) __attribute__((noreturn));
251 251
252#define __put_user_unaligned __put_user
253#define __get_user_unaligned __get_user
254
255unsigned long __must_check 252unsigned long __must_check
256raw_copy_in_user(void __user *to, const void __user *from, unsigned long n); 253raw_copy_in_user(void __user *to, const void __user *from, unsigned long n);
257 254
diff --git a/arch/sparc/include/asm/uaccess_64.h b/arch/sparc/include/asm/uaccess_64.h
index 6096d671aa63..b5f976ee7510 100644
--- a/arch/sparc/include/asm/uaccess_64.h
+++ b/arch/sparc/include/asm/uaccess_64.h
@@ -9,7 +9,6 @@
9#include <linux/string.h> 9#include <linux/string.h>
10#include <asm/asi.h> 10#include <asm/asi.h>
11#include <asm/spitfire.h> 11#include <asm/spitfire.h>
12#include <asm-generic/uaccess-unaligned.h>
13#include <asm/extable_64.h> 12#include <asm/extable_64.h>
14 13
15#include <asm/processor.h> 14#include <asm/processor.h>
diff --git a/arch/tile/include/asm/uaccess.h b/arch/tile/include/asm/uaccess.h
index a803f6bb4d92..7f777411d1d6 100644
--- a/arch/tile/include/asm/uaccess.h
+++ b/arch/tile/include/asm/uaccess.h
@@ -19,7 +19,6 @@
19 * User space memory access functions 19 * User space memory access functions
20 */ 20 */
21#include <linux/mm.h> 21#include <linux/mm.h>
22#include <asm-generic/uaccess-unaligned.h>
23#include <asm/processor.h> 22#include <asm/processor.h>
24#include <asm/page.h> 23#include <asm/page.h>
25 24
diff --git a/arch/x86/include/asm/uaccess.h b/arch/x86/include/asm/uaccess.h
index 68766b276d9e..fd91722315c8 100644
--- a/arch/x86/include/asm/uaccess.h
+++ b/arch/x86/include/asm/uaccess.h
@@ -535,9 +535,6 @@ struct __large_struct { unsigned long buf[100]; };
535#define __put_user(x, ptr) \ 535#define __put_user(x, ptr) \
536 __put_user_nocheck((__typeof__(*(ptr)))(x), (ptr), sizeof(*(ptr))) 536 __put_user_nocheck((__typeof__(*(ptr)))(x), (ptr), sizeof(*(ptr)))
537 537
538#define __get_user_unaligned __get_user
539#define __put_user_unaligned __put_user
540
541/* 538/*
542 * {get|put}_user_try and catch 539 * {get|put}_user_try and catch
543 * 540 *
diff --git a/include/asm-generic/uaccess-unaligned.h b/include/asm-generic/uaccess-unaligned.h
deleted file mode 100644
index 67deb898f0c5..000000000000
--- a/include/asm-generic/uaccess-unaligned.h
+++ /dev/null
@@ -1,26 +0,0 @@
1#ifndef __ASM_GENERIC_UACCESS_UNALIGNED_H
2#define __ASM_GENERIC_UACCESS_UNALIGNED_H
3
4/*
5 * This macro should be used instead of __get_user() when accessing
6 * values at locations that are not known to be aligned.
7 */
8#define __get_user_unaligned(x, ptr) \
9({ \
10 __typeof__ (*(ptr)) __x; \
11 __copy_from_user(&__x, (ptr), sizeof(*(ptr))) ? -EFAULT : 0; \
12 (x) = __x; \
13})
14
15
16/*
17 * This macro should be used instead of __put_user() when accessing
18 * values at locations that are not known to be aligned.
19 */
20#define __put_user_unaligned(x, ptr) \
21({ \
22 __typeof__ (*(ptr)) __x = (x); \
23 __copy_to_user((ptr), &__x, sizeof(*(ptr))) ? -EFAULT : 0; \
24})
25
26#endif /* __ASM_GENERIC_UACCESS_UNALIGNED_H */