diff options
author | Ralf Baechle <ralf@linux-mips.org> | 2006-10-30 21:52:56 -0500 |
---|---|---|
committer | Ralf Baechle <ralf@linux-mips.org> | 2008-10-11 11:18:54 -0400 |
commit | 71ec6ccfdc2706e767189d40016fc802305067bb (patch) | |
tree | 8b5f197dc30e1aedb64b3401b535202038d55d84 | |
parent | 4bacc68766b11c191ee1567f54103f109c002f4f (diff) |
MIPS: Replace use of <asm-generic/uaccess.h> with native implementations.
Signed-off-by: Ralf Baechle <ralf@linux-mips.org>
-rw-r--r-- | arch/mips/include/asm/uaccess.h | 264 |
1 files changed, 263 insertions, 1 deletions
diff --git a/arch/mips/include/asm/uaccess.h b/arch/mips/include/asm/uaccess.h index b895144d577a..09ff5bb17445 100644 --- a/arch/mips/include/asm/uaccess.h +++ b/arch/mips/include/asm/uaccess.h | |||
@@ -13,7 +13,6 @@ | |||
13 | #include <linux/kernel.h> | 13 | #include <linux/kernel.h> |
14 | #include <linux/errno.h> | 14 | #include <linux/errno.h> |
15 | #include <linux/thread_info.h> | 15 | #include <linux/thread_info.h> |
16 | #include <asm-generic/uaccess.h> | ||
17 | 16 | ||
18 | /* | 17 | /* |
19 | * The fs value determines whether argument validity checking should be | 18 | * The fs value determines whether argument validity checking should be |
@@ -374,6 +373,269 @@ do { \ | |||
374 | extern void __put_user_unknown(void); | 373 | extern void __put_user_unknown(void); |
375 | 374 | ||
376 | /* | 375 | /* |
376 | * put_user_unaligned: - Write a simple value into user space. | ||
377 | * @x: Value to copy to user space. | ||
378 | * @ptr: Destination address, in user space. | ||
379 | * | ||
380 | * Context: User context only. This function may sleep. | ||
381 | * | ||
382 | * This macro copies a single simple value from kernel space to user | ||
383 | * space. It supports simple types like char and int, but not larger | ||
384 | * data types like structures or arrays. | ||
385 | * | ||
386 | * @ptr must have pointer-to-simple-variable type, and @x must be assignable | ||
387 | * to the result of dereferencing @ptr. | ||
388 | * | ||
389 | * Returns zero on success, or -EFAULT on error. | ||
390 | */ | ||
391 | #define put_user_unaligned(x,ptr) \ | ||
392 | __put_user_unaligned_check((x),(ptr),sizeof(*(ptr))) | ||
393 | |||
394 | /* | ||
395 | * get_user_unaligned: - Get a simple variable from user space. | ||
396 | * @x: Variable to store result. | ||
397 | * @ptr: Source address, in user space. | ||
398 | * | ||
399 | * Context: User context only. This function may sleep. | ||
400 | * | ||
401 | * This macro copies a single simple variable from user space to kernel | ||
402 | * space. It supports simple types like char and int, but not larger | ||
403 | * data types like structures or arrays. | ||
404 | * | ||
405 | * @ptr must have pointer-to-simple-variable type, and the result of | ||
406 | * dereferencing @ptr must be assignable to @x without a cast. | ||
407 | * | ||
408 | * Returns zero on success, or -EFAULT on error. | ||
409 | * On error, the variable @x is set to zero. | ||
410 | */ | ||
411 | #define get_user_unaligned(x,ptr) \ | ||
412 | __get_user_unaligned_check((x),(ptr),sizeof(*(ptr))) | ||
413 | |||
414 | /* | ||
415 | * __put_user_unaligned: - Write a simple value into user space, with less checking. | ||
416 | * @x: Value to copy to user space. | ||
417 | * @ptr: Destination address, in user space. | ||
418 | * | ||
419 | * Context: User context only. This function may sleep. | ||
420 | * | ||
421 | * This macro copies a single simple value from kernel space to user | ||
422 | * space. It supports simple types like char and int, but not larger | ||
423 | * data types like structures or arrays. | ||
424 | * | ||
425 | * @ptr must have pointer-to-simple-variable type, and @x must be assignable | ||
426 | * to the result of dereferencing @ptr. | ||
427 | * | ||
428 | * Caller must check the pointer with access_ok() before calling this | ||
429 | * function. | ||
430 | * | ||
431 | * Returns zero on success, or -EFAULT on error. | ||
432 | */ | ||
433 | #define __put_user_unaligned(x,ptr) \ | ||
434 | __put_user_unaligned_nocheck((x),(ptr),sizeof(*(ptr))) | ||
435 | |||
436 | /* | ||
437 | * __get_user_unaligned: - Get a simple variable from user space, with less checking. | ||
438 | * @x: Variable to store result. | ||
439 | * @ptr: Source address, in user space. | ||
440 | * | ||
441 | * Context: User context only. This function may sleep. | ||
442 | * | ||
443 | * This macro copies a single simple variable from user space to kernel | ||
444 | * space. It supports simple types like char and int, but not larger | ||
445 | * data types like structures or arrays. | ||
446 | * | ||
447 | * @ptr must have pointer-to-simple-variable type, and the result of | ||
448 | * dereferencing @ptr must be assignable to @x without a cast. | ||
449 | * | ||
450 | * Caller must check the pointer with access_ok() before calling this | ||
451 | * function. | ||
452 | * | ||
453 | * Returns zero on success, or -EFAULT on error. | ||
454 | * On error, the variable @x is set to zero. | ||
455 | */ | ||
456 | #define __get_user_unaligned(x,ptr) \ | ||
457 | __get_user__unalignednocheck((x),(ptr),sizeof(*(ptr))) | ||
458 | |||
459 | /* | ||
460 | * Yuck. We need two variants, one for 64bit operation and one | ||
461 | * for 32 bit mode and old iron. | ||
462 | */ | ||
463 | #ifdef CONFIG_32BIT | ||
464 | #define __GET_USER_UNALIGNED_DW(val, ptr) \ | ||
465 | __get_user_unaligned_asm_ll32(val, ptr) | ||
466 | #endif | ||
467 | #ifdef CONFIG_64BIT | ||
468 | #define __GET_USER_UNALIGNED_DW(val, ptr) \ | ||
469 | __get_user_unaligned_asm(val, "uld", ptr) | ||
470 | #endif | ||
471 | |||
472 | extern void __get_user_unaligned_unknown(void); | ||
473 | |||
474 | #define __get_user_unaligned_common(val, size, ptr) \ | ||
475 | do { \ | ||
476 | switch (size) { \ | ||
477 | case 1: __get_user_asm(val, "lb", ptr); break; \ | ||
478 | case 2: __get_user_unaligned_asm(val, "ulh", ptr); break; \ | ||
479 | case 4: __get_user_unaligned_asm(val, "ulw", ptr); break; \ | ||
480 | case 8: __GET_USER_UNALIGNED_DW(val, ptr); break; \ | ||
481 | default: __get_user_unaligned_unknown(); break; \ | ||
482 | } \ | ||
483 | } while (0) | ||
484 | |||
485 | #define __get_user_unaligned_nocheck(x,ptr,size) \ | ||
486 | ({ \ | ||
487 | int __gu_err; \ | ||
488 | \ | ||
489 | __get_user_unaligned_common((x), size, ptr); \ | ||
490 | __gu_err; \ | ||
491 | }) | ||
492 | |||
493 | #define __get_user_unaligned_check(x,ptr,size) \ | ||
494 | ({ \ | ||
495 | int __gu_err = -EFAULT; \ | ||
496 | const __typeof__(*(ptr)) __user * __gu_ptr = (ptr); \ | ||
497 | \ | ||
498 | if (likely(access_ok(VERIFY_READ, __gu_ptr, size))) \ | ||
499 | __get_user_unaligned_common((x), size, __gu_ptr); \ | ||
500 | \ | ||
501 | __gu_err; \ | ||
502 | }) | ||
503 | |||
504 | #define __get_user_unaligned_asm(val, insn, addr) \ | ||
505 | { \ | ||
506 | long __gu_tmp; \ | ||
507 | \ | ||
508 | __asm__ __volatile__( \ | ||
509 | "1: " insn " %1, %3 \n" \ | ||
510 | "2: \n" \ | ||
511 | " .section .fixup,\"ax\" \n" \ | ||
512 | "3: li %0, %4 \n" \ | ||
513 | " j 2b \n" \ | ||
514 | " .previous \n" \ | ||
515 | " .section __ex_table,\"a\" \n" \ | ||
516 | " "__UA_ADDR "\t1b, 3b \n" \ | ||
517 | " "__UA_ADDR "\t1b + 4, 3b \n" \ | ||
518 | " .previous \n" \ | ||
519 | : "=r" (__gu_err), "=r" (__gu_tmp) \ | ||
520 | : "0" (0), "o" (__m(addr)), "i" (-EFAULT)); \ | ||
521 | \ | ||
522 | (val) = (__typeof__(*(addr))) __gu_tmp; \ | ||
523 | } | ||
524 | |||
525 | /* | ||
526 | * Get a long long 64 using 32 bit registers. | ||
527 | */ | ||
528 | #define __get_user_unaligned_asm_ll32(val, addr) \ | ||
529 | { \ | ||
530 | unsigned long long __gu_tmp; \ | ||
531 | \ | ||
532 | __asm__ __volatile__( \ | ||
533 | "1: ulw %1, (%3) \n" \ | ||
534 | "2: ulw %D1, 4(%3) \n" \ | ||
535 | " move %0, $0 \n" \ | ||
536 | "3: .section .fixup,\"ax\" \n" \ | ||
537 | "4: li %0, %4 \n" \ | ||
538 | " move %1, $0 \n" \ | ||
539 | " move %D1, $0 \n" \ | ||
540 | " j 3b \n" \ | ||
541 | " .previous \n" \ | ||
542 | " .section __ex_table,\"a\" \n" \ | ||
543 | " " __UA_ADDR " 1b, 4b \n" \ | ||
544 | " " __UA_ADDR " 1b + 4, 4b \n" \ | ||
545 | " " __UA_ADDR " 2b, 4b \n" \ | ||
546 | " " __UA_ADDR " 2b + 4, 4b \n" \ | ||
547 | " .previous \n" \ | ||
548 | : "=r" (__gu_err), "=&r" (__gu_tmp) \ | ||
549 | : "0" (0), "r" (addr), "i" (-EFAULT)); \ | ||
550 | (val) = (__typeof__(*(addr))) __gu_tmp; \ | ||
551 | } | ||
552 | |||
553 | /* | ||
554 | * Yuck. We need two variants, one for 64bit operation and one | ||
555 | * for 32 bit mode and old iron. | ||
556 | */ | ||
557 | #ifdef CONFIG_32BIT | ||
558 | #define __PUT_USER_UNALIGNED_DW(ptr) __put_user_unaligned_asm_ll32(ptr) | ||
559 | #endif | ||
560 | #ifdef CONFIG_64BIT | ||
561 | #define __PUT_USER_UNALIGNED_DW(ptr) __put_user_unaligned_asm("usd", ptr) | ||
562 | #endif | ||
563 | |||
564 | #define __put_user_unaligned_nocheck(x,ptr,size) \ | ||
565 | ({ \ | ||
566 | __typeof__(*(ptr)) __pu_val; \ | ||
567 | int __pu_err = 0; \ | ||
568 | \ | ||
569 | __pu_val = (x); \ | ||
570 | switch (size) { \ | ||
571 | case 1: __put_user_asm("sb", ptr); break; \ | ||
572 | case 2: __put_user_unaligned_asm("ush", ptr); break; \ | ||
573 | case 4: __put_user_unaligned_asm("usw", ptr); break; \ | ||
574 | case 8: __PUT_USER_UNALIGNED_DW(ptr); break; \ | ||
575 | default: __put_user_unaligned_unknown(); break; \ | ||
576 | } \ | ||
577 | __pu_err; \ | ||
578 | }) | ||
579 | |||
580 | #define __put_user_unaligned_check(x,ptr,size) \ | ||
581 | ({ \ | ||
582 | __typeof__(*(ptr)) __user *__pu_addr = (ptr); \ | ||
583 | __typeof__(*(ptr)) __pu_val = (x); \ | ||
584 | int __pu_err = -EFAULT; \ | ||
585 | \ | ||
586 | if (likely(access_ok(VERIFY_WRITE, __pu_addr, size))) { \ | ||
587 | switch (size) { \ | ||
588 | case 1: __put_user_asm("sb", __pu_addr); break; \ | ||
589 | case 2: __put_user_unaligned_asm("ush", __pu_addr); break; \ | ||
590 | case 4: __put_user_unaligned_asm("usw", __pu_addr); break; \ | ||
591 | case 8: __PUT_USER_UNALGINED_DW(__pu_addr); break; \ | ||
592 | default: __put_user_unaligned_unknown(); break; \ | ||
593 | } \ | ||
594 | } \ | ||
595 | __pu_err; \ | ||
596 | }) | ||
597 | |||
598 | #define __put_user_unaligned_asm(insn, ptr) \ | ||
599 | { \ | ||
600 | __asm__ __volatile__( \ | ||
601 | "1: " insn " %z2, %3 # __put_user_unaligned_asm\n" \ | ||
602 | "2: \n" \ | ||
603 | " .section .fixup,\"ax\" \n" \ | ||
604 | "3: li %0, %4 \n" \ | ||
605 | " j 2b \n" \ | ||
606 | " .previous \n" \ | ||
607 | " .section __ex_table,\"a\" \n" \ | ||
608 | " " __UA_ADDR " 1b, 3b \n" \ | ||
609 | " .previous \n" \ | ||
610 | : "=r" (__pu_err) \ | ||
611 | : "0" (0), "Jr" (__pu_val), "o" (__m(ptr)), \ | ||
612 | "i" (-EFAULT)); \ | ||
613 | } | ||
614 | |||
615 | #define __put_user_unaligned_asm_ll32(ptr) \ | ||
616 | { \ | ||
617 | __asm__ __volatile__( \ | ||
618 | "1: sw %2, (%3) # __put_user_unaligned_asm_ll32 \n" \ | ||
619 | "2: sw %D2, 4(%3) \n" \ | ||
620 | "3: \n" \ | ||
621 | " .section .fixup,\"ax\" \n" \ | ||
622 | "4: li %0, %4 \n" \ | ||
623 | " j 3b \n" \ | ||
624 | " .previous \n" \ | ||
625 | " .section __ex_table,\"a\" \n" \ | ||
626 | " " __UA_ADDR " 1b, 4b \n" \ | ||
627 | " " __UA_ADDR " 1b + 4, 4b \n" \ | ||
628 | " " __UA_ADDR " 2b, 4b \n" \ | ||
629 | " " __UA_ADDR " 2b + 4, 4b \n" \ | ||
630 | " .previous" \ | ||
631 | : "=r" (__pu_err) \ | ||
632 | : "0" (0), "r" (__pu_val), "r" (ptr), \ | ||
633 | "i" (-EFAULT)); \ | ||
634 | } | ||
635 | |||
636 | extern void __put_user_unaligned_unknown(void); | ||
637 | |||
638 | /* | ||
377 | * We're generating jump to subroutines which will be outside the range of | 639 | * We're generating jump to subroutines which will be outside the range of |
378 | * jump instructions | 640 | * jump instructions |
379 | */ | 641 | */ |