aboutsummaryrefslogtreecommitdiffstats
path: root/include
diff options
context:
space:
mode:
authorStuart Menefy <stuart.menefy@st.com>2007-11-30 04:16:23 -0500
committerPaul Mundt <lethal@linux-sh.org>2008-01-27 23:18:59 -0500
commit0fb19dcb64ec4bd9934eee26ce66417fe028ffd8 (patch)
treea2bb20e3d3f0703959e800cfcab2ad0eb81bf503 /include
parentfc55888f83c1c0ac09abe4680f9a94fc0662677f (diff)
sh: get_user fixes and nommu consolidation.
When a get_user(to, from++) is called the pointer increment is performed after its first usage, in the specific after the __add_ok invokation. This causes a wrong get_user return value, putting a wrong character in the destination variable. This patch solves the problem using a new temporary pointer. Additionally this reworks the use of the register banks, allowing for consolidation between the MMU and nommu implementations. Signed-off-by: Carmelo Amoroso <carmelo.amoroso@st.com> Signed-off-by: Giuseppe Condorelli <giuseppe.condorelli@st.com> Signed-off-by: Stuart Menefy <stuart.menefy@st.com> Signed-off-by: Paul Mundt <lethal@linux-sh.org>
Diffstat (limited to 'include')
-rw-r--r--include/asm-sh/uaccess_32.h253
1 files changed, 98 insertions, 155 deletions
diff --git a/include/asm-sh/uaccess_32.h b/include/asm-sh/uaccess_32.h
index f18a1a5c95c0..59a9f20c2dc7 100644
--- a/include/asm-sh/uaccess_32.h
+++ b/include/asm-sh/uaccess_32.h
@@ -73,37 +73,24 @@ static inline int __access_ok(unsigned long addr, unsigned long size)
73/* 73/*
74 * __access_ok: Check if address with size is OK or not. 74 * __access_ok: Check if address with size is OK or not.
75 * 75 *
76 * We do three checks: 76 * Uhhuh, this needs 33-bit arithmetic. We have a carry..
77 * (1) is it user space?
78 * (2) addr + size --> carry?
79 * (3) addr + size >= 0x80000000 (PAGE_OFFSET)
80 * 77 *
81 * (1) (2) (3) | RESULT 78 * sum := addr + size; carry? --> flag = true;
82 * 0 0 0 | ok 79 * if (sum >= addr_limit) flag = true;
83 * 0 0 1 | ok
84 * 0 1 0 | bad
85 * 0 1 1 | bad
86 * 1 0 0 | ok
87 * 1 0 1 | bad
88 * 1 1 0 | bad
89 * 1 1 1 | bad
90 */ 80 */
91static inline int __access_ok(unsigned long addr, unsigned long size) 81static inline int __access_ok(unsigned long addr, unsigned long size)
92{ 82{
93 unsigned long flag, tmp; 83 unsigned long flag, sum;
94 84
95 __asm__("stc r7_bank, %0\n\t" 85 __asm__("clrt\n\t"
96 "mov.l @(8,%0), %0\n\t" 86 "addc %3, %1\n\t"
97 "clrt\n\t" 87 "movt %0\n\t"
98 "addc %2, %1\n\t" 88 "cmp/hi %4, %1\n\t"
99 "and %1, %0\n\t" 89 "rotcl %0"
100 "rotcl %0\n\t" 90 :"=&r" (flag), "=r" (sum)
101 "rotcl %0\n\t" 91 :"1" (addr), "r" (size),
102 "and #3, %0" 92 "r" (current_thread_info()->addr_limit.seg)
103 : "=&z" (flag), "=r" (tmp) 93 :"t");
104 : "r" (addr), "1" (size)
105 : "t");
106
107 return flag == 0; 94 return flag == 0;
108} 95}
109#endif /* CONFIG_MMU */ 96#endif /* CONFIG_MMU */
@@ -165,135 +152,47 @@ do { \
165#define __get_user_nocheck(x,ptr,size) \ 152#define __get_user_nocheck(x,ptr,size) \
166({ \ 153({ \
167 long __gu_err, __gu_val; \ 154 long __gu_err, __gu_val; \
168 __get_user_size(__gu_val, (ptr), (size), __gu_err); \ 155 __typeof__(*(ptr)) *__pu_addr = (ptr); \
156 __get_user_size(__gu_val, (__pu_addr), (size), __gu_err); \
169 (x) = (__typeof__(*(ptr)))__gu_val; \ 157 (x) = (__typeof__(*(ptr)))__gu_val; \
170 __gu_err; \ 158 __gu_err; \
171}) 159})
172 160
173#ifdef CONFIG_MMU
174#define __get_user_check(x,ptr,size) \
175({ \
176 long __gu_err, __gu_val; \
177 __chk_user_ptr(ptr); \
178 switch (size) { \
179 case 1: \
180 __get_user_1(__gu_val, (ptr), __gu_err); \
181 break; \
182 case 2: \
183 __get_user_2(__gu_val, (ptr), __gu_err); \
184 break; \
185 case 4: \
186 __get_user_4(__gu_val, (ptr), __gu_err); \
187 break; \
188 default: \
189 __get_user_unknown(); \
190 break; \
191 } \
192 \
193 (x) = (__typeof__(*(ptr)))__gu_val; \
194 __gu_err; \
195})
196
197#define __get_user_1(x,addr,err) ({ \
198__asm__("stc r7_bank, %1\n\t" \
199 "mov.l @(8,%1), %1\n\t" \
200 "and %2, %1\n\t" \
201 "cmp/pz %1\n\t" \
202 "bt/s 1f\n\t" \
203 " mov #0, %0\n\t" \
204 "0:\n" \
205 "mov #-14, %0\n\t" \
206 "bra 2f\n\t" \
207 " mov #0, %1\n" \
208 "1:\n\t" \
209 "mov.b @%2, %1\n\t" \
210 "extu.b %1, %1\n" \
211 "2:\n" \
212 ".section __ex_table,\"a\"\n\t" \
213 ".long 1b, 0b\n\t" \
214 ".previous" \
215 : "=&r" (err), "=&r" (x) \
216 : "r" (addr) \
217 : "t"); \
218})
219
220#define __get_user_2(x,addr,err) ({ \
221__asm__("stc r7_bank, %1\n\t" \
222 "mov.l @(8,%1), %1\n\t" \
223 "and %2, %1\n\t" \
224 "cmp/pz %1\n\t" \
225 "bt/s 1f\n\t" \
226 " mov #0, %0\n\t" \
227 "0:\n" \
228 "mov #-14, %0\n\t" \
229 "bra 2f\n\t" \
230 " mov #0, %1\n" \
231 "1:\n\t" \
232 "mov.w @%2, %1\n\t" \
233 "extu.w %1, %1\n" \
234 "2:\n" \
235 ".section __ex_table,\"a\"\n\t" \
236 ".long 1b, 0b\n\t" \
237 ".previous" \
238 : "=&r" (err), "=&r" (x) \
239 : "r" (addr) \
240 : "t"); \
241})
242
243#define __get_user_4(x,addr,err) ({ \
244__asm__("stc r7_bank, %1\n\t" \
245 "mov.l @(8,%1), %1\n\t" \
246 "and %2, %1\n\t" \
247 "cmp/pz %1\n\t" \
248 "bt/s 1f\n\t" \
249 " mov #0, %0\n\t" \
250 "0:\n" \
251 "mov #-14, %0\n\t" \
252 "bra 2f\n\t" \
253 " mov #0, %1\n" \
254 "1:\n\t" \
255 "mov.l @%2, %1\n\t" \
256 "2:\n" \
257 ".section __ex_table,\"a\"\n\t" \
258 ".long 1b, 0b\n\t" \
259 ".previous" \
260 : "=&r" (err), "=&r" (x) \
261 : "r" (addr) \
262 : "t"); \
263})
264#else /* CONFIG_MMU */
265#define __get_user_check(x,ptr,size) \ 161#define __get_user_check(x,ptr,size) \
266({ \ 162({ \
267 long __gu_err, __gu_val; \ 163 long __gu_err, __gu_val; \
268 if (__access_ok((unsigned long)(ptr), (size))) { \ 164 __typeof__(*(ptr)) *__pu_addr = (ptr); \
269 __get_user_size(__gu_val, (ptr), (size), __gu_err); \ 165 __chk_user_ptr(__pu_addr); \
270 (x) = (__typeof__(*(ptr)))__gu_val; \ 166 if (likely(__addr_ok((unsigned long)(__pu_addr)))) { \
271 } else \ 167 __get_user_size(__gu_val, (__pu_addr), (size), __gu_err);\
168 } else { \
272 __gu_err = -EFAULT; \ 169 __gu_err = -EFAULT; \
170 __gu_val = 0; \
171 } \
172 (x) = (__typeof__(*(ptr)))__gu_val; \
273 __gu_err; \ 173 __gu_err; \
274}) 174})
275#endif
276 175
277#define __get_user_asm(x, addr, err, insn) \ 176#define __get_user_asm(x, addr, err, insn) \
278({ \ 177({ \
279__asm__ __volatile__( \ 178__asm__ __volatile__( \
280 "1:\n\t" \ 179 "1:\n\t" \
281 "mov." insn " %2, %1\n\t" \ 180 "mov." insn " %2, %1\n\t" \
282 "mov #0, %0\n" \
283 "2:\n" \ 181 "2:\n" \
284 ".section .fixup,\"ax\"\n" \ 182 ".section .fixup,\"ax\"\n" \
285 "3:\n\t" \ 183 "3:\n\t" \
286 "mov #0, %1\n\t" \ 184 "mov #0, %1\n\t" \
287 "mov.l 4f, %0\n\t" \ 185 "mov.l 4f, %0\n\t" \
288 "jmp @%0\n\t" \ 186 "jmp @%0\n\t" \
289 " mov %3, %0\n" \ 187 " mov %3, %0\n\t" \
188 ".balign 4\n" \
290 "4: .long 2b\n\t" \ 189 "4: .long 2b\n\t" \
291 ".previous\n" \ 190 ".previous\n" \
292 ".section __ex_table,\"a\"\n\t" \ 191 ".section __ex_table,\"a\"\n\t" \
293 ".long 1b, 3b\n\t" \ 192 ".long 1b, 3b\n\t" \
294 ".previous" \ 193 ".previous" \
295 :"=&r" (err), "=&r" (x) \ 194 :"=&r" (err), "=&r" (x) \
296 :"m" (__m(addr)), "i" (-EFAULT)); }) 195 :"m" (__m(addr)), "i" (-EFAULT), "0" (err)); })
297 196
298extern void __get_user_unknown(void); 197extern void __get_user_unknown(void);
299 198
@@ -328,11 +227,13 @@ do { \
328 227
329#define __put_user_check(x,ptr,size) \ 228#define __put_user_check(x,ptr,size) \
330({ \ 229({ \
331 long __pu_err = -EFAULT; \ 230 long __pu_err; \
332 __typeof__(*(ptr)) __user *__pu_addr = (ptr); \ 231 __typeof__(*(ptr)) __user *__pu_addr = (ptr); \
333 \ 232 \
334 if (__access_ok((unsigned long)__pu_addr,size)) \ 233 if (likely(__addr_ok((unsigned long)__pu_addr))) \
335 __put_user_size((x),__pu_addr,(size),__pu_err); \ 234 __put_user_size((x),__pu_addr,(size),__pu_err); \
235 else \
236 __pu_err = -EFAULT; \
336 __pu_err; \ 237 __pu_err; \
337}) 238})
338 239
@@ -341,45 +242,43 @@ do { \
341__asm__ __volatile__( \ 242__asm__ __volatile__( \
342 "1:\n\t" \ 243 "1:\n\t" \
343 "mov." insn " %1, %2\n\t" \ 244 "mov." insn " %1, %2\n\t" \
344 "mov #0, %0\n" \
345 "2:\n" \ 245 "2:\n" \
346 ".section .fixup,\"ax\"\n" \ 246 ".section .fixup,\"ax\"\n" \
347 "3:\n\t" \ 247 "3:\n\t" \
348 "nop\n\t" \
349 "mov.l 4f, %0\n\t" \ 248 "mov.l 4f, %0\n\t" \
350 "jmp @%0\n\t" \ 249 "jmp @%0\n\t" \
351 "mov %3, %0\n" \ 250 " mov %3, %0\n\t" \
251 ".balign 4\n" \
352 "4: .long 2b\n\t" \ 252 "4: .long 2b\n\t" \
353 ".previous\n" \ 253 ".previous\n" \
354 ".section __ex_table,\"a\"\n\t" \ 254 ".section __ex_table,\"a\"\n\t" \
355 ".long 1b, 3b\n\t" \ 255 ".long 1b, 3b\n\t" \
356 ".previous" \ 256 ".previous" \
357 :"=&r" (err) \ 257 :"=&r" (err) \
358 :"r" (x), "m" (__m(addr)), "i" (-EFAULT) \ 258 :"r" (x), "m" (__m(addr)), "i" (-EFAULT), "0" (err) \
359 :"memory"); }) 259 :"memory"); })
360 260
361#if defined(__LITTLE_ENDIAN__) 261#if defined(CONFIG_CPU_LITTLE_ENDIAN)
362#define __put_user_u64(val,addr,retval) \ 262#define __put_user_u64(val,addr,retval) \
363({ \ 263({ \
364__asm__ __volatile__( \ 264__asm__ __volatile__( \
365 "1:\n\t" \ 265 "1:\n\t" \
366 "mov.l %R1,%2\n\t" \ 266 "mov.l %R1,%2\n\t" \
367 "mov.l %S1,%T2\n\t" \ 267 "mov.l %S1,%T2\n\t" \
368 "mov #0,%0\n" \
369 "2:\n" \ 268 "2:\n" \
370 ".section .fixup,\"ax\"\n" \ 269 ".section .fixup,\"ax\"\n" \
371 "3:\n\t" \ 270 "3:\n\t" \
372 "nop\n\t" \
373 "mov.l 4f,%0\n\t" \ 271 "mov.l 4f,%0\n\t" \
374 "jmp @%0\n\t" \ 272 "jmp @%0\n\t" \
375 " mov %3,%0\n" \ 273 " mov %3,%0\n\t" \
274 ".balign 4\n" \
376 "4: .long 2b\n\t" \ 275 "4: .long 2b\n\t" \
377 ".previous\n" \ 276 ".previous\n" \
378 ".section __ex_table,\"a\"\n\t" \ 277 ".section __ex_table,\"a\"\n\t" \
379 ".long 1b, 3b\n\t" \ 278 ".long 1b, 3b\n\t" \
380 ".previous" \ 279 ".previous" \
381 : "=r" (retval) \ 280 : "=r" (retval) \
382 : "r" (val), "m" (__m(addr)), "i" (-EFAULT) \ 281 : "r" (val), "m" (__m(addr)), "i" (-EFAULT), "0" (retval) \
383 : "memory"); }) 282 : "memory"); })
384#else 283#else
385#define __put_user_u64(val,addr,retval) \ 284#define __put_user_u64(val,addr,retval) \
@@ -388,21 +287,20 @@ __asm__ __volatile__( \
388 "1:\n\t" \ 287 "1:\n\t" \
389 "mov.l %S1,%2\n\t" \ 288 "mov.l %S1,%2\n\t" \
390 "mov.l %R1,%T2\n\t" \ 289 "mov.l %R1,%T2\n\t" \
391 "mov #0,%0\n" \
392 "2:\n" \ 290 "2:\n" \
393 ".section .fixup,\"ax\"\n" \ 291 ".section .fixup,\"ax\"\n" \
394 "3:\n\t" \ 292 "3:\n\t" \
395 "nop\n\t" \
396 "mov.l 4f,%0\n\t" \ 293 "mov.l 4f,%0\n\t" \
397 "jmp @%0\n\t" \ 294 "jmp @%0\n\t" \
398 " mov %3,%0\n" \ 295 " mov %3,%0\n\t" \
296 ".balign 4\n" \
399 "4: .long 2b\n\t" \ 297 "4: .long 2b\n\t" \
400 ".previous\n" \ 298 ".previous\n" \
401 ".section __ex_table,\"a\"\n\t" \ 299 ".section __ex_table,\"a\"\n\t" \
402 ".long 1b, 3b\n\t" \ 300 ".long 1b, 3b\n\t" \
403 ".previous" \ 301 ".previous" \
404 : "=r" (retval) \ 302 : "=r" (retval) \
405 : "r" (val), "m" (__m(addr)), "i" (-EFAULT) \ 303 : "r" (val), "m" (__m(addr)), "i" (-EFAULT), "0" (retval) \
406 : "memory"); }) 304 : "memory"); })
407#endif 305#endif
408 306
@@ -463,7 +361,7 @@ static __inline__ int
463__strncpy_from_user(unsigned long __dest, unsigned long __user __src, int __count) 361__strncpy_from_user(unsigned long __dest, unsigned long __user __src, int __count)
464{ 362{
465 __kernel_size_t res; 363 __kernel_size_t res;
466 unsigned long __dummy, _d, _s; 364 unsigned long __dummy, _d, _s, _c;
467 365
468 __asm__ __volatile__( 366 __asm__ __volatile__(
469 "9:\n" 367 "9:\n"
@@ -472,17 +370,17 @@ __strncpy_from_user(unsigned long __dest, unsigned long __user __src, int __coun
472 "bt/s 2f\n" 370 "bt/s 2f\n"
473 "1:\n" 371 "1:\n"
474 "mov.b %1, @%3\n\t" 372 "mov.b %1, @%3\n\t"
475 "dt %7\n\t" 373 "dt %4\n\t"
476 "bf/s 9b\n\t" 374 "bf/s 9b\n\t"
477 " add #1, %3\n\t" 375 " add #1, %3\n\t"
478 "2:\n\t" 376 "2:\n\t"
479 "sub %7, %0\n" 377 "sub %4, %0\n"
480 "3:\n" 378 "3:\n"
481 ".section .fixup,\"ax\"\n" 379 ".section .fixup,\"ax\"\n"
482 "4:\n\t" 380 "4:\n\t"
483 "mov.l 5f, %1\n\t" 381 "mov.l 5f, %1\n\t"
484 "jmp @%1\n\t" 382 "jmp @%1\n\t"
485 " mov %8, %0\n\t" 383 " mov %9, %0\n\t"
486 ".balign 4\n" 384 ".balign 4\n"
487 "5: .long 3b\n" 385 "5: .long 3b\n"
488 ".previous\n" 386 ".previous\n"
@@ -490,14 +388,32 @@ __strncpy_from_user(unsigned long __dest, unsigned long __user __src, int __coun
490 " .balign 4\n" 388 " .balign 4\n"
491 " .long 9b,4b\n" 389 " .long 9b,4b\n"
492 ".previous" 390 ".previous"
493 : "=r" (res), "=&z" (__dummy), "=r" (_s), "=r" (_d) 391 : "=r" (res), "=&z" (__dummy), "=r" (_s), "=r" (_d), "=r"(_c)
494 : "0" (__count), "2" (__src), "3" (__dest), "r" (__count), 392 : "0" (__count), "2" (__src), "3" (__dest), "4" (__count),
495 "i" (-EFAULT) 393 "i" (-EFAULT)
496 : "memory", "t"); 394 : "memory", "t");
497 395
498 return res; 396 return res;
499} 397}
500 398
399/**
400 * strncpy_from_user: - Copy a NUL terminated string from userspace.
401 * @dst: Destination address, in kernel space. This buffer must be at
402 * least @count bytes long.
403 * @src: Source address, in user space.
404 * @count: Maximum number of bytes to copy, including the trailing NUL.
405 *
406 * Copies a NUL-terminated string from userspace to kernel space.
407 *
408 * On success, returns the length of the string (not including the trailing
409 * NUL).
410 *
411 * If access to userspace fails, returns -EFAULT (some data may have been
412 * copied).
413 *
414 * If @count is smaller than the length of the string, copies @count bytes
415 * and returns @count.
416 */
501#define strncpy_from_user(dest,src,count) ({ \ 417#define strncpy_from_user(dest,src,count) ({ \
502unsigned long __sfu_src = (unsigned long) (src); \ 418unsigned long __sfu_src = (unsigned long) (src); \
503int __sfu_count = (int) (count); \ 419int __sfu_count = (int) (count); \
@@ -507,7 +423,8 @@ __sfu_res = __strncpy_from_user((unsigned long) (dest), __sfu_src, __sfu_count);
507} __sfu_res; }) 423} __sfu_res; })
508 424
509/* 425/*
510 * Return the size of a string (including the ending 0!) 426 * Return the size of a string (including the ending 0 even when we have
427 * exceeded the maximum string length).
511 */ 428 */
512static __inline__ long __strnlen_user(const char __user *__s, long __n) 429static __inline__ long __strnlen_user(const char __user *__s, long __n)
513{ 430{
@@ -515,14 +432,13 @@ static __inline__ long __strnlen_user(const char __user *__s, long __n)
515 unsigned long __dummy; 432 unsigned long __dummy;
516 433
517 __asm__ __volatile__( 434 __asm__ __volatile__(
518 "9:\n"
519 "cmp/eq %4, %0\n\t"
520 "bt 2f\n"
521 "1:\t" 435 "1:\t"
522 "mov.b @(%0,%3), %1\n\t" 436 "mov.b @(%0,%3), %1\n\t"
437 "cmp/eq %4, %0\n\t"
438 "bt/s 2f\n\t"
439 " add #1, %0\n\t"
523 "tst %1, %1\n\t" 440 "tst %1, %1\n\t"
524 "bf/s 9b\n\t" 441 "bf 1b\n\t"
525 " add #1, %0\n"
526 "2:\n" 442 "2:\n"
527 ".section .fixup,\"ax\"\n" 443 ".section .fixup,\"ax\"\n"
528 "3:\n\t" 444 "3:\n\t"
@@ -542,6 +458,19 @@ static __inline__ long __strnlen_user(const char __user *__s, long __n)
542 return res; 458 return res;
543} 459}
544 460
461/**
462 * strnlen_user: - Get the size of a string in user space.
463 * @s: The string to measure.
464 * @n: The maximum valid length
465 *
466 * Context: User context only. This function may sleep.
467 *
468 * Get the size of a NUL-terminated string in user space.
469 *
470 * Returns the size of the string INCLUDING the terminating NUL.
471 * On exception, returns 0.
472 * If the string is too long, returns a value greater than @n.
473 */
545static __inline__ long strnlen_user(const char __user *s, long n) 474static __inline__ long strnlen_user(const char __user *s, long n)
546{ 475{
547 if (!__addr_ok(s)) 476 if (!__addr_ok(s))
@@ -550,6 +479,20 @@ static __inline__ long strnlen_user(const char __user *s, long n)
550 return __strnlen_user(s, n); 479 return __strnlen_user(s, n);
551} 480}
552 481
482/**
483 * strlen_user: - Get the size of a string in user space.
484 * @str: The string to measure.
485 *
486 * Context: User context only. This function may sleep.
487 *
488 * Get the size of a NUL-terminated string in user space.
489 *
490 * Returns the size of the string INCLUDING the terminating NUL.
491 * On exception, returns 0.
492 *
493 * If there is a limit on the length of a valid string, you may wish to
494 * consider using strnlen_user() instead.
495 */
553#define strlen_user(str) strnlen_user(str, ~0UL >> 1) 496#define strlen_user(str) strnlen_user(str, ~0UL >> 1)
554 497
555/* 498/*