aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorVictor Kamensky <victor.kamensky@linaro.org>2014-09-04 01:07:33 -0400
committerRussell King <rmk+kernel@arm.linux.org.uk>2014-09-12 12:38:59 -0400
commitd9981380b49b839ecaffbbe131908a342db68980 (patch)
treebd39bf86b001180366029db52f5a3c7e9e5bd7e3
parente918a62a2ba81d10a3cc2c513dc70034c9524a95 (diff)
ARM: 8137/1: fix get_user BE behavior for target variable with size of 8 bytes
e38361d 'ARM: 8091/2: add get_user() support for 8 byte types' commit broke V7 BE get_user call when target var size is 64 bit, but '*ptr' size is 32 bit or smaller. e38361d changed type of __r2 from 'register unsigned long' to 'register typeof(x) __r2 asm("r2")' i.e before the change even when target variable size was 64 bit, __r2 was still 32 bit. But after e38361d commit, for target var of 64 bit size, __r2 became 64 bit and now it should occupy 2 registers r2, and r3. The issue in BE case that r3 register is least significant word of __r2 and r2 register is most significant word of __r2. But __get_user_4 still copies result into r2 (most significant word of __r2). Subsequent code copies from __r2 into x, but for situation described it will pick up only garbage from r3 register. Special __get_user_64t_(124) functions are introduced. They are similar to corresponding __get_user_(124) function but result stored in r3 register (lsw in case of 64 bit __r2 in BE image). Those function are used by get_user macro in case of BE and target var size is 64bit. Also changed __get_user_lo8 name into __get_user_32t_8 to get consistent naming accross all cases. Signed-off-by: Victor Kamensky <victor.kamensky@linaro.org> Suggested-by: Daniel Thompson <daniel.thompson@linaro.org> Reviewed-by: Daniel Thompson <daniel.thompson@linaro.org> Signed-off-by: Russell King <rmk+kernel@arm.linux.org.uk>
-rw-r--r--arch/arm/include/asm/uaccess.h48
-rw-r--r--arch/arm/lib/getuser.S38
2 files changed, 75 insertions, 11 deletions
diff --git a/arch/arm/include/asm/uaccess.h b/arch/arm/include/asm/uaccess.h
index a4cd7af475e9..4767eb9caa78 100644
--- a/arch/arm/include/asm/uaccess.h
+++ b/arch/arm/include/asm/uaccess.h
@@ -107,8 +107,11 @@ static inline void set_fs(mm_segment_t fs)
107extern int __get_user_1(void *); 107extern int __get_user_1(void *);
108extern int __get_user_2(void *); 108extern int __get_user_2(void *);
109extern int __get_user_4(void *); 109extern int __get_user_4(void *);
110extern int __get_user_lo8(void *); 110extern int __get_user_32t_8(void *);
111extern int __get_user_8(void *); 111extern int __get_user_8(void *);
112extern int __get_user_64t_1(void *);
113extern int __get_user_64t_2(void *);
114extern int __get_user_64t_4(void *);
112 115
113#define __GUP_CLOBBER_1 "lr", "cc" 116#define __GUP_CLOBBER_1 "lr", "cc"
114#ifdef CONFIG_CPU_USE_DOMAINS 117#ifdef CONFIG_CPU_USE_DOMAINS
@@ -117,7 +120,7 @@ extern int __get_user_8(void *);
117#define __GUP_CLOBBER_2 "lr", "cc" 120#define __GUP_CLOBBER_2 "lr", "cc"
118#endif 121#endif
119#define __GUP_CLOBBER_4 "lr", "cc" 122#define __GUP_CLOBBER_4 "lr", "cc"
120#define __GUP_CLOBBER_lo8 "lr", "cc" 123#define __GUP_CLOBBER_32t_8 "lr", "cc"
121#define __GUP_CLOBBER_8 "lr", "cc" 124#define __GUP_CLOBBER_8 "lr", "cc"
122 125
123#define __get_user_x(__r2,__p,__e,__l,__s) \ 126#define __get_user_x(__r2,__p,__e,__l,__s) \
@@ -131,12 +134,30 @@ extern int __get_user_8(void *);
131 134
132/* narrowing a double-word get into a single 32bit word register: */ 135/* narrowing a double-word get into a single 32bit word register: */
133#ifdef __ARMEB__ 136#ifdef __ARMEB__
134#define __get_user_xb(__r2, __p, __e, __l, __s) \ 137#define __get_user_x_32t(__r2, __p, __e, __l, __s) \
135 __get_user_x(__r2, __p, __e, __l, lo8) 138 __get_user_x(__r2, __p, __e, __l, 32t_8)
136#else 139#else
137#define __get_user_xb __get_user_x 140#define __get_user_x_32t __get_user_x
138#endif 141#endif
139 142
143/*
144 * storing result into proper least significant word of 64bit target var,
145 * different only for big endian case where 64 bit __r2 lsw is r3:
146 */
147#ifdef __ARMEB__
148#define __get_user_x_64t(__r2, __p, __e, __l, __s) \
149 __asm__ __volatile__ ( \
150 __asmeq("%0", "r0") __asmeq("%1", "r2") \
151 __asmeq("%3", "r1") \
152 "bl __get_user_64t_" #__s \
153 : "=&r" (__e), "=r" (__r2) \
154 : "0" (__p), "r" (__l) \
155 : __GUP_CLOBBER_##__s)
156#else
157#define __get_user_x_64t __get_user_x
158#endif
159
160
140#define __get_user_check(x,p) \ 161#define __get_user_check(x,p) \
141 ({ \ 162 ({ \
142 unsigned long __limit = current_thread_info()->addr_limit - 1; \ 163 unsigned long __limit = current_thread_info()->addr_limit - 1; \
@@ -146,17 +167,26 @@ extern int __get_user_8(void *);
146 register int __e asm("r0"); \ 167 register int __e asm("r0"); \
147 switch (sizeof(*(__p))) { \ 168 switch (sizeof(*(__p))) { \
148 case 1: \ 169 case 1: \
149 __get_user_x(__r2, __p, __e, __l, 1); \ 170 if (sizeof((x)) >= 8) \
171 __get_user_x_64t(__r2, __p, __e, __l, 1); \
172 else \
173 __get_user_x(__r2, __p, __e, __l, 1); \
150 break; \ 174 break; \
151 case 2: \ 175 case 2: \
152 __get_user_x(__r2, __p, __e, __l, 2); \ 176 if (sizeof((x)) >= 8) \
177 __get_user_x_64t(__r2, __p, __e, __l, 2); \
178 else \
179 __get_user_x(__r2, __p, __e, __l, 2); \
153 break; \ 180 break; \
154 case 4: \ 181 case 4: \
155 __get_user_x(__r2, __p, __e, __l, 4); \ 182 if (sizeof((x)) >= 8) \
183 __get_user_x_64t(__r2, __p, __e, __l, 4); \
184 else \
185 __get_user_x(__r2, __p, __e, __l, 4); \
156 break; \ 186 break; \
157 case 8: \ 187 case 8: \
158 if (sizeof((x)) < 8) \ 188 if (sizeof((x)) < 8) \
159 __get_user_xb(__r2, __p, __e, __l, 4); \ 189 __get_user_x_32t(__r2, __p, __e, __l, 4); \
160 else \ 190 else \
161 __get_user_x(__r2, __p, __e, __l, 8); \ 191 __get_user_x(__r2, __p, __e, __l, 8); \
162 break; \ 192 break; \
diff --git a/arch/arm/lib/getuser.S b/arch/arm/lib/getuser.S
index 938600098b88..8ecfd15c3a02 100644
--- a/arch/arm/lib/getuser.S
+++ b/arch/arm/lib/getuser.S
@@ -80,7 +80,7 @@ ENTRY(__get_user_8)
80ENDPROC(__get_user_8) 80ENDPROC(__get_user_8)
81 81
82#ifdef __ARMEB__ 82#ifdef __ARMEB__
83ENTRY(__get_user_lo8) 83ENTRY(__get_user_32t_8)
84 check_uaccess r0, 8, r1, r2, __get_user_bad 84 check_uaccess r0, 8, r1, r2, __get_user_bad
85#ifdef CONFIG_CPU_USE_DOMAINS 85#ifdef CONFIG_CPU_USE_DOMAINS
86 add r0, r0, #4 86 add r0, r0, #4
@@ -90,7 +90,37 @@ ENTRY(__get_user_lo8)
90#endif 90#endif
91 mov r0, #0 91 mov r0, #0
92 ret lr 92 ret lr
93ENDPROC(__get_user_lo8) 93ENDPROC(__get_user_32t_8)
94
95ENTRY(__get_user_64t_1)
96 check_uaccess r0, 1, r1, r2, __get_user_bad8
978: TUSER(ldrb) r3, [r0]
98 mov r0, #0
99 ret lr
100ENDPROC(__get_user_64t_1)
101
102ENTRY(__get_user_64t_2)
103 check_uaccess r0, 2, r1, r2, __get_user_bad8
104#ifdef CONFIG_CPU_USE_DOMAINS
105rb .req ip
1069: ldrbt r3, [r0], #1
10710: ldrbt rb, [r0], #0
108#else
109rb .req r0
1109: ldrb r3, [r0]
11110: ldrb rb, [r0, #1]
112#endif
113 orr r3, rb, r3, lsl #8
114 mov r0, #0
115 ret lr
116ENDPROC(__get_user_64t_2)
117
118ENTRY(__get_user_64t_4)
119 check_uaccess r0, 4, r1, r2, __get_user_bad8
12011: TUSER(ldr) r3, [r0]
121 mov r0, #0
122 ret lr
123ENDPROC(__get_user_64t_4)
94#endif 124#endif
95 125
96__get_user_bad8: 126__get_user_bad8:
@@ -111,5 +141,9 @@ ENDPROC(__get_user_bad8)
111 .long 6b, __get_user_bad8 141 .long 6b, __get_user_bad8
112#ifdef __ARMEB__ 142#ifdef __ARMEB__
113 .long 7b, __get_user_bad 143 .long 7b, __get_user_bad
144 .long 8b, __get_user_bad8
145 .long 9b, __get_user_bad8
146 .long 10b, __get_user_bad8
147 .long 11b, __get_user_bad8
114#endif 148#endif
115.popsection 149.popsection