aboutsummaryrefslogtreecommitdiffstats
path: root/arch/x86/include/asm/xsave.h
diff options
context:
space:
mode:
authorH. Peter Anvin <hpa@linux.intel.com>2012-09-21 15:43:12 -0400
committerH. Peter Anvin <hpa@linux.intel.com>2012-09-21 15:45:27 -0400
commit63bcff2a307b9bcc712a8251eb27df8b2e117967 (patch)
tree0a23a378d2c967edf63e9bb2b5df2288bf30859c /arch/x86/include/asm/xsave.h
parenta052858fabb376b695f2c125633daa6728e0f284 (diff)
x86, smap: Add STAC and CLAC instructions to control user space access
When Supervisor Mode Access Prevention (SMAP) is enabled, access to userspace from the kernel is controlled by the AC flag. To make the performance of manipulating that flag acceptable, there are two new instructions, STAC and CLAC, to set and clear it. This patch adds those instructions, via alternative(), when the SMAP feature is enabled. It also adds X86_EFLAGS_AC unconditionally to the SYSCALL entry mask; there is simply no reason to make that one conditional. Signed-off-by: H. Peter Anvin <hpa@linux.intel.com> Link: http://lkml.kernel.org/r/1348256595-29119-9-git-send-email-hpa@linux.intel.com
Diffstat (limited to 'arch/x86/include/asm/xsave.h')
-rw-r--r--arch/x86/include/asm/xsave.h10
1 files changed, 6 insertions, 4 deletions
diff --git a/arch/x86/include/asm/xsave.h b/arch/x86/include/asm/xsave.h
index 8a1b6f9b594a..2a923bd54341 100644
--- a/arch/x86/include/asm/xsave.h
+++ b/arch/x86/include/asm/xsave.h
@@ -74,8 +74,9 @@ static inline int xsave_user(struct xsave_struct __user *buf)
74 if (unlikely(err)) 74 if (unlikely(err))
75 return -EFAULT; 75 return -EFAULT;
76 76
77 __asm__ __volatile__("1: .byte " REX_PREFIX "0x0f,0xae,0x27\n" 77 __asm__ __volatile__(ASM_STAC "\n"
78 "2:\n" 78 "1: .byte " REX_PREFIX "0x0f,0xae,0x27\n"
79 "2: " ASM_CLAC "\n"
79 ".section .fixup,\"ax\"\n" 80 ".section .fixup,\"ax\"\n"
80 "3: movl $-1,%[err]\n" 81 "3: movl $-1,%[err]\n"
81 " jmp 2b\n" 82 " jmp 2b\n"
@@ -97,8 +98,9 @@ static inline int xrestore_user(struct xsave_struct __user *buf, u64 mask)
97 u32 lmask = mask; 98 u32 lmask = mask;
98 u32 hmask = mask >> 32; 99 u32 hmask = mask >> 32;
99 100
100 __asm__ __volatile__("1: .byte " REX_PREFIX "0x0f,0xae,0x2f\n" 101 __asm__ __volatile__(ASM_STAC "\n"
101 "2:\n" 102 "1: .byte " REX_PREFIX "0x0f,0xae,0x2f\n"
103 "2: " ASM_CLAC "\n"
102 ".section .fixup,\"ax\"\n" 104 ".section .fixup,\"ax\"\n"
103 "3: movl $-1,%[err]\n" 105 "3: movl $-1,%[err]\n"
104 " jmp 2b\n" 106 " jmp 2b\n"