aboutsummaryrefslogtreecommitdiffstats
path: root/arch/x86/include/asm
diff options
context:
space:
mode:
authorH. Peter Anvin <hpa@linux.intel.com>2012-09-21 15:43:12 -0400
committerH. Peter Anvin <hpa@linux.intel.com>2012-09-21 15:45:27 -0400
commit63bcff2a307b9bcc712a8251eb27df8b2e117967 (patch)
tree0a23a378d2c967edf63e9bb2b5df2288bf30859c /arch/x86/include/asm
parenta052858fabb376b695f2c125633daa6728e0f284 (diff)
x86, smap: Add STAC and CLAC instructions to control user space access
When Supervisor Mode Access Prevention (SMAP) is enabled, access to userspace from the kernel is controlled by the AC flag. To make the performance of manipulating that flag acceptable, there are two new instructions, STAC and CLAC, to set and clear it. This patch adds those instructions, via alternative(), when the SMAP feature is enabled. It also adds X86_EFLAGS_AC unconditionally to the SYSCALL entry mask; there is simply no reason to make that one conditional. Signed-off-by: H. Peter Anvin <hpa@linux.intel.com> Link: http://lkml.kernel.org/r/1348256595-29119-9-git-send-email-hpa@linux.intel.com
Diffstat (limited to 'arch/x86/include/asm')
-rw-r--r--arch/x86/include/asm/fpu-internal.h10
-rw-r--r--arch/x86/include/asm/futex.h19
-rw-r--r--arch/x86/include/asm/smap.h4
-rw-r--r--arch/x86/include/asm/uaccess.h31
-rw-r--r--arch/x86/include/asm/xsave.h10
5 files changed, 46 insertions, 28 deletions
diff --git a/arch/x86/include/asm/fpu-internal.h b/arch/x86/include/asm/fpu-internal.h
index 75f4c6d6a331..0fe13583a028 100644
--- a/arch/x86/include/asm/fpu-internal.h
+++ b/arch/x86/include/asm/fpu-internal.h
@@ -126,8 +126,9 @@ static inline int fxsave_user(struct i387_fxsave_struct __user *fx)
126 126
127 /* See comment in fxsave() below. */ 127 /* See comment in fxsave() below. */
128#ifdef CONFIG_AS_FXSAVEQ 128#ifdef CONFIG_AS_FXSAVEQ
129 asm volatile("1: fxsaveq %[fx]\n\t" 129 asm volatile(ASM_STAC "\n"
130 "2:\n" 130 "1: fxsaveq %[fx]\n\t"
131 "2: " ASM_CLAC "\n"
131 ".section .fixup,\"ax\"\n" 132 ".section .fixup,\"ax\"\n"
132 "3: movl $-1,%[err]\n" 133 "3: movl $-1,%[err]\n"
133 " jmp 2b\n" 134 " jmp 2b\n"
@@ -136,8 +137,9 @@ static inline int fxsave_user(struct i387_fxsave_struct __user *fx)
136 : [err] "=r" (err), [fx] "=m" (*fx) 137 : [err] "=r" (err), [fx] "=m" (*fx)
137 : "0" (0)); 138 : "0" (0));
138#else 139#else
139 asm volatile("1: rex64/fxsave (%[fx])\n\t" 140 asm volatile(ASM_STAC "\n"
140 "2:\n" 141 "1: rex64/fxsave (%[fx])\n\t"
142 "2: " ASM_CLAC "\n"
141 ".section .fixup,\"ax\"\n" 143 ".section .fixup,\"ax\"\n"
142 "3: movl $-1,%[err]\n" 144 "3: movl $-1,%[err]\n"
143 " jmp 2b\n" 145 " jmp 2b\n"
diff --git a/arch/x86/include/asm/futex.h b/arch/x86/include/asm/futex.h
index 71ecbcba1a4e..f373046e63ec 100644
--- a/arch/x86/include/asm/futex.h
+++ b/arch/x86/include/asm/futex.h
@@ -9,10 +9,13 @@
9#include <asm/asm.h> 9#include <asm/asm.h>
10#include <asm/errno.h> 10#include <asm/errno.h>
11#include <asm/processor.h> 11#include <asm/processor.h>
12#include <asm/smap.h>
12 13
13#define __futex_atomic_op1(insn, ret, oldval, uaddr, oparg) \ 14#define __futex_atomic_op1(insn, ret, oldval, uaddr, oparg) \
14 asm volatile("1:\t" insn "\n" \ 15 asm volatile("\t" ASM_STAC "\n" \
15 "2:\t.section .fixup,\"ax\"\n" \ 16 "1:\t" insn "\n" \
17 "2:\t" ASM_CLAC "\n" \
18 "\t.section .fixup,\"ax\"\n" \
16 "3:\tmov\t%3, %1\n" \ 19 "3:\tmov\t%3, %1\n" \
17 "\tjmp\t2b\n" \ 20 "\tjmp\t2b\n" \
18 "\t.previous\n" \ 21 "\t.previous\n" \
@@ -21,12 +24,14 @@
21 : "i" (-EFAULT), "0" (oparg), "1" (0)) 24 : "i" (-EFAULT), "0" (oparg), "1" (0))
22 25
23#define __futex_atomic_op2(insn, ret, oldval, uaddr, oparg) \ 26#define __futex_atomic_op2(insn, ret, oldval, uaddr, oparg) \
24 asm volatile("1:\tmovl %2, %0\n" \ 27 asm volatile("\t" ASM_STAC "\n" \
28 "1:\tmovl %2, %0\n" \
25 "\tmovl\t%0, %3\n" \ 29 "\tmovl\t%0, %3\n" \
26 "\t" insn "\n" \ 30 "\t" insn "\n" \
27 "2:\t" LOCK_PREFIX "cmpxchgl %3, %2\n" \ 31 "2:\t" LOCK_PREFIX "cmpxchgl %3, %2\n" \
28 "\tjnz\t1b\n" \ 32 "\tjnz\t1b\n" \
29 "3:\t.section .fixup,\"ax\"\n" \ 33 "3:\t" ASM_CLAC "\n" \
34 "\t.section .fixup,\"ax\"\n" \
30 "4:\tmov\t%5, %1\n" \ 35 "4:\tmov\t%5, %1\n" \
31 "\tjmp\t3b\n" \ 36 "\tjmp\t3b\n" \
32 "\t.previous\n" \ 37 "\t.previous\n" \
@@ -122,8 +127,10 @@ static inline int futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr,
122 if (!access_ok(VERIFY_WRITE, uaddr, sizeof(u32))) 127 if (!access_ok(VERIFY_WRITE, uaddr, sizeof(u32)))
123 return -EFAULT; 128 return -EFAULT;
124 129
125 asm volatile("1:\t" LOCK_PREFIX "cmpxchgl %4, %2\n" 130 asm volatile("\t" ASM_STAC "\n"
126 "2:\t.section .fixup, \"ax\"\n" 131 "1:\t" LOCK_PREFIX "cmpxchgl %4, %2\n"
132 "2:\t" ASM_CLAC "\n"
133 "\t.section .fixup, \"ax\"\n"
127 "3:\tmov %3, %0\n" 134 "3:\tmov %3, %0\n"
128 "\tjmp 2b\n" 135 "\tjmp 2b\n"
129 "\t.previous\n" 136 "\t.previous\n"
diff --git a/arch/x86/include/asm/smap.h b/arch/x86/include/asm/smap.h
index 3989c2492eb5..8d3120f4e270 100644
--- a/arch/x86/include/asm/smap.h
+++ b/arch/x86/include/asm/smap.h
@@ -58,13 +58,13 @@
58 58
59#ifdef CONFIG_X86_SMAP 59#ifdef CONFIG_X86_SMAP
60 60
61static inline void clac(void) 61static __always_inline void clac(void)
62{ 62{
63 /* Note: a barrier is implicit in alternative() */ 63 /* Note: a barrier is implicit in alternative() */
64 alternative(ASM_NOP3, __stringify(__ASM_CLAC), X86_FEATURE_SMAP); 64 alternative(ASM_NOP3, __stringify(__ASM_CLAC), X86_FEATURE_SMAP);
65} 65}
66 66
67static inline void stac(void) 67static __always_inline void stac(void)
68{ 68{
69 /* Note: a barrier is implicit in alternative() */ 69 /* Note: a barrier is implicit in alternative() */
70 alternative(ASM_NOP3, __stringify(__ASM_STAC), X86_FEATURE_SMAP); 70 alternative(ASM_NOP3, __stringify(__ASM_STAC), X86_FEATURE_SMAP);
diff --git a/arch/x86/include/asm/uaccess.h b/arch/x86/include/asm/uaccess.h
index 2c7df3d184f2..b92ece13c238 100644
--- a/arch/x86/include/asm/uaccess.h
+++ b/arch/x86/include/asm/uaccess.h
@@ -9,6 +9,7 @@
9#include <linux/string.h> 9#include <linux/string.h>
10#include <asm/asm.h> 10#include <asm/asm.h>
11#include <asm/page.h> 11#include <asm/page.h>
12#include <asm/smap.h>
12 13
13#define VERIFY_READ 0 14#define VERIFY_READ 0
14#define VERIFY_WRITE 1 15#define VERIFY_WRITE 1
@@ -192,9 +193,10 @@ extern int __get_user_bad(void);
192 193
193#ifdef CONFIG_X86_32 194#ifdef CONFIG_X86_32
194#define __put_user_asm_u64(x, addr, err, errret) \ 195#define __put_user_asm_u64(x, addr, err, errret) \
195 asm volatile("1: movl %%eax,0(%2)\n" \ 196 asm volatile(ASM_STAC "\n" \
197 "1: movl %%eax,0(%2)\n" \
196 "2: movl %%edx,4(%2)\n" \ 198 "2: movl %%edx,4(%2)\n" \
197 "3:\n" \ 199 "3: " ASM_CLAC "\n" \
198 ".section .fixup,\"ax\"\n" \ 200 ".section .fixup,\"ax\"\n" \
199 "4: movl %3,%0\n" \ 201 "4: movl %3,%0\n" \
200 " jmp 3b\n" \ 202 " jmp 3b\n" \
@@ -205,9 +207,10 @@ extern int __get_user_bad(void);
205 : "A" (x), "r" (addr), "i" (errret), "0" (err)) 207 : "A" (x), "r" (addr), "i" (errret), "0" (err))
206 208
207#define __put_user_asm_ex_u64(x, addr) \ 209#define __put_user_asm_ex_u64(x, addr) \
208 asm volatile("1: movl %%eax,0(%1)\n" \ 210 asm volatile(ASM_STAC "\n" \
211 "1: movl %%eax,0(%1)\n" \
209 "2: movl %%edx,4(%1)\n" \ 212 "2: movl %%edx,4(%1)\n" \
210 "3:\n" \ 213 "3: " ASM_CLAC "\n" \
211 _ASM_EXTABLE_EX(1b, 2b) \ 214 _ASM_EXTABLE_EX(1b, 2b) \
212 _ASM_EXTABLE_EX(2b, 3b) \ 215 _ASM_EXTABLE_EX(2b, 3b) \
213 : : "A" (x), "r" (addr)) 216 : : "A" (x), "r" (addr))
@@ -379,8 +382,9 @@ do { \
379} while (0) 382} while (0)
380 383
381#define __get_user_asm(x, addr, err, itype, rtype, ltype, errret) \ 384#define __get_user_asm(x, addr, err, itype, rtype, ltype, errret) \
382 asm volatile("1: mov"itype" %2,%"rtype"1\n" \ 385 asm volatile(ASM_STAC "\n" \
383 "2:\n" \ 386 "1: mov"itype" %2,%"rtype"1\n" \
387 "2: " ASM_CLAC "\n" \
384 ".section .fixup,\"ax\"\n" \ 388 ".section .fixup,\"ax\"\n" \
385 "3: mov %3,%0\n" \ 389 "3: mov %3,%0\n" \
386 " xor"itype" %"rtype"1,%"rtype"1\n" \ 390 " xor"itype" %"rtype"1,%"rtype"1\n" \
@@ -412,8 +416,9 @@ do { \
412} while (0) 416} while (0)
413 417
414#define __get_user_asm_ex(x, addr, itype, rtype, ltype) \ 418#define __get_user_asm_ex(x, addr, itype, rtype, ltype) \
415 asm volatile("1: mov"itype" %1,%"rtype"0\n" \ 419 asm volatile(ASM_STAC "\n" \
416 "2:\n" \ 420 "1: mov"itype" %1,%"rtype"0\n" \
421 "2: " ASM_CLAC "\n" \
417 _ASM_EXTABLE_EX(1b, 2b) \ 422 _ASM_EXTABLE_EX(1b, 2b) \
418 : ltype(x) : "m" (__m(addr))) 423 : ltype(x) : "m" (__m(addr)))
419 424
@@ -443,8 +448,9 @@ struct __large_struct { unsigned long buf[100]; };
443 * aliasing issues. 448 * aliasing issues.
444 */ 449 */
445#define __put_user_asm(x, addr, err, itype, rtype, ltype, errret) \ 450#define __put_user_asm(x, addr, err, itype, rtype, ltype, errret) \
446 asm volatile("1: mov"itype" %"rtype"1,%2\n" \ 451 asm volatile(ASM_STAC "\n" \
447 "2:\n" \ 452 "1: mov"itype" %"rtype"1,%2\n" \
453 "2: " ASM_CLAC "\n" \
448 ".section .fixup,\"ax\"\n" \ 454 ".section .fixup,\"ax\"\n" \
449 "3: mov %3,%0\n" \ 455 "3: mov %3,%0\n" \
450 " jmp 2b\n" \ 456 " jmp 2b\n" \
@@ -454,8 +460,9 @@ struct __large_struct { unsigned long buf[100]; };
454 : ltype(x), "m" (__m(addr)), "i" (errret), "0" (err)) 460 : ltype(x), "m" (__m(addr)), "i" (errret), "0" (err))
455 461
456#define __put_user_asm_ex(x, addr, itype, rtype, ltype) \ 462#define __put_user_asm_ex(x, addr, itype, rtype, ltype) \
457 asm volatile("1: mov"itype" %"rtype"0,%1\n" \ 463 asm volatile(ASM_STAC "\n" \
458 "2:\n" \ 464 "1: mov"itype" %"rtype"0,%1\n" \
465 "2: " ASM_CLAC "\n" \
459 _ASM_EXTABLE_EX(1b, 2b) \ 466 _ASM_EXTABLE_EX(1b, 2b) \
460 : : ltype(x), "m" (__m(addr))) 467 : : ltype(x), "m" (__m(addr)))
461 468
diff --git a/arch/x86/include/asm/xsave.h b/arch/x86/include/asm/xsave.h
index 8a1b6f9b594a..2a923bd54341 100644
--- a/arch/x86/include/asm/xsave.h
+++ b/arch/x86/include/asm/xsave.h
@@ -74,8 +74,9 @@ static inline int xsave_user(struct xsave_struct __user *buf)
74 if (unlikely(err)) 74 if (unlikely(err))
75 return -EFAULT; 75 return -EFAULT;
76 76
77 __asm__ __volatile__("1: .byte " REX_PREFIX "0x0f,0xae,0x27\n" 77 __asm__ __volatile__(ASM_STAC "\n"
78 "2:\n" 78 "1: .byte " REX_PREFIX "0x0f,0xae,0x27\n"
79 "2: " ASM_CLAC "\n"
79 ".section .fixup,\"ax\"\n" 80 ".section .fixup,\"ax\"\n"
80 "3: movl $-1,%[err]\n" 81 "3: movl $-1,%[err]\n"
81 " jmp 2b\n" 82 " jmp 2b\n"
@@ -97,8 +98,9 @@ static inline int xrestore_user(struct xsave_struct __user *buf, u64 mask)
97 u32 lmask = mask; 98 u32 lmask = mask;
98 u32 hmask = mask >> 32; 99 u32 hmask = mask >> 32;
99 100
100 __asm__ __volatile__("1: .byte " REX_PREFIX "0x0f,0xae,0x2f\n" 101 __asm__ __volatile__(ASM_STAC "\n"
101 "2:\n" 102 "1: .byte " REX_PREFIX "0x0f,0xae,0x2f\n"
103 "2: " ASM_CLAC "\n"
102 ".section .fixup,\"ax\"\n" 104 ".section .fixup,\"ax\"\n"
103 "3: movl $-1,%[err]\n" 105 "3: movl $-1,%[err]\n"
104 " jmp 2b\n" 106 " jmp 2b\n"