diff options
author | Qiaowei Ren <qiaowei.ren@intel.com> | 2013-12-14 01:25:02 -0500 |
---|---|---|
committer | H. Peter Anvin <hpa@linux.intel.com> | 2013-12-16 12:07:57 -0500 |
commit | f09174c501f8bb259788cc36d5a7aa5b2831fb5e (patch) | |
tree | 6c38b937005241368a7bd6a01c58b4f181f2a959 /arch | |
parent | e7d820a5e549b3eb6c3f9467507566565646a669 (diff) |
x86: add user_atomic_cmpxchg_inatomic at uaccess.h
This patch adds user_atomic_cmpxchg_inatomic() to use CMPXCHG
instruction against a user space address.
This generalizes the already existing futex_atomic_cmpxchg_inatomic()
so it can be used in other contexts. This will be used in the
upcoming support for Intel MPX (Memory Protection Extensions.)
[ hpa: replaced #ifdef inside a macro with IS_ENABLED() ]
Signed-off-by: Qiaowei Ren <qiaowei.ren@intel.com>
Link: http://lkml.kernel.org/r/1387002303-6620-1-git-send-email-qiaowei.ren@intel.com
Signed-off-by: H. Peter Anvin <hpa@linux.intel.com>
Cc: Peter Zijlstra <a.p.zijlstra@chello.nl>
Diffstat (limited to 'arch')
-rw-r--r-- | arch/x86/include/asm/uaccess.h | 92 |
1 files changed, 92 insertions, 0 deletions
diff --git a/arch/x86/include/asm/uaccess.h b/arch/x86/include/asm/uaccess.h index 8ec57c07b125..48ff83855268 100644 --- a/arch/x86/include/asm/uaccess.h +++ b/arch/x86/include/asm/uaccess.h | |||
@@ -525,6 +525,98 @@ extern __must_check long strnlen_user(const char __user *str, long n); | |||
525 | unsigned long __must_check clear_user(void __user *mem, unsigned long len); | 525 | unsigned long __must_check clear_user(void __user *mem, unsigned long len); |
526 | unsigned long __must_check __clear_user(void __user *mem, unsigned long len); | 526 | unsigned long __must_check __clear_user(void __user *mem, unsigned long len); |
527 | 527 | ||
528 | extern void __cmpxchg_wrong_size(void) | ||
529 | __compiletime_error("Bad argument size for cmpxchg"); | ||
530 | |||
531 | #define __user_atomic_cmpxchg_inatomic(uval, ptr, old, new, size) \ | ||
532 | ({ \ | ||
533 | int __ret = 0; \ | ||
534 | __typeof__(ptr) __uval = (uval); \ | ||
535 | __typeof__(*(ptr)) __old = (old); \ | ||
536 | __typeof__(*(ptr)) __new = (new); \ | ||
537 | switch (size) { \ | ||
538 | case 1: \ | ||
539 | { \ | ||
540 | asm volatile("\t" ASM_STAC "\n" \ | ||
541 | "1:\t" LOCK_PREFIX "cmpxchgb %4, %2\n" \ | ||
542 | "2:\t" ASM_CLAC "\n" \ | ||
543 | "\t.section .fixup, \"ax\"\n" \ | ||
544 | "3:\tmov %3, %0\n" \ | ||
545 | "\tjmp 2b\n" \ | ||
546 | "\t.previous\n" \ | ||
547 | _ASM_EXTABLE(1b, 3b) \ | ||
548 | : "+r" (__ret), "=a" (__old), "+m" (*(ptr)) \ | ||
549 | : "i" (-EFAULT), "q" (__new), "1" (__old) \ | ||
550 | : "memory" \ | ||
551 | ); \ | ||
552 | break; \ | ||
553 | } \ | ||
554 | case 2: \ | ||
555 | { \ | ||
556 | asm volatile("\t" ASM_STAC "\n" \ | ||
557 | "1:\t" LOCK_PREFIX "cmpxchgw %4, %2\n" \ | ||
558 | "2:\t" ASM_CLAC "\n" \ | ||
559 | "\t.section .fixup, \"ax\"\n" \ | ||
560 | "3:\tmov %3, %0\n" \ | ||
561 | "\tjmp 2b\n" \ | ||
562 | "\t.previous\n" \ | ||
563 | _ASM_EXTABLE(1b, 3b) \ | ||
564 | : "+r" (__ret), "=a" (__old), "+m" (*(ptr)) \ | ||
565 | : "i" (-EFAULT), "r" (__new), "1" (__old) \ | ||
566 | : "memory" \ | ||
567 | ); \ | ||
568 | break; \ | ||
569 | } \ | ||
570 | case 4: \ | ||
571 | { \ | ||
572 | asm volatile("\t" ASM_STAC "\n" \ | ||
573 | "1:\t" LOCK_PREFIX "cmpxchgl %4, %2\n" \ | ||
574 | "2:\t" ASM_CLAC "\n" \ | ||
575 | "\t.section .fixup, \"ax\"\n" \ | ||
576 | "3:\tmov %3, %0\n" \ | ||
577 | "\tjmp 2b\n" \ | ||
578 | "\t.previous\n" \ | ||
579 | _ASM_EXTABLE(1b, 3b) \ | ||
580 | : "+r" (__ret), "=a" (__old), "+m" (*(ptr)) \ | ||
581 | : "i" (-EFAULT), "r" (__new), "1" (__old) \ | ||
582 | : "memory" \ | ||
583 | ); \ | ||
584 | break; \ | ||
585 | } \ | ||
586 | case 8: \ | ||
587 | { \ | ||
588 | if (!IS_ENABLED(CONFIG_X86_64)) \ | ||
589 | __cmpxchg_wrong_size(); \ | ||
590 | \ | ||
591 | asm volatile("\t" ASM_STAC "\n" \ | ||
592 | "1:\t" LOCK_PREFIX "cmpxchgq %4, %2\n" \ | ||
593 | "2:\t" ASM_CLAC "\n" \ | ||
594 | "\t.section .fixup, \"ax\"\n" \ | ||
595 | "3:\tmov %3, %0\n" \ | ||
596 | "\tjmp 2b\n" \ | ||
597 | "\t.previous\n" \ | ||
598 | _ASM_EXTABLE(1b, 3b) \ | ||
599 | : "+r" (__ret), "=a" (__old), "+m" (*(ptr)) \ | ||
600 | : "i" (-EFAULT), "r" (__new), "1" (__old) \ | ||
601 | : "memory" \ | ||
602 | ); \ | ||
603 | break; \ | ||
604 | } \ | ||
605 | default: \ | ||
606 | __cmpxchg_wrong_size(); \ | ||
607 | } \ | ||
608 | *__uval = __old; \ | ||
609 | __ret; \ | ||
610 | }) | ||
611 | |||
612 | #define user_atomic_cmpxchg_inatomic(uval, ptr, old, new) \ | ||
613 | ({ \ | ||
614 | access_ok(VERIFY_WRITE, (ptr), sizeof(*(ptr))) ? \ | ||
615 | __user_atomic_cmpxchg_inatomic((uval), (ptr), \ | ||
616 | (old), (new), sizeof(*(ptr))) : \ | ||
617 | -EFAULT; \ | ||
618 | }) | ||
619 | |||
528 | /* | 620 | /* |
529 | * movsl can be slow when source and dest are not both 8-byte aligned | 621 | * movsl can be slow when source and dest are not both 8-byte aligned |
530 | */ | 622 | */ |