aboutsummaryrefslogtreecommitdiffstats
path: root/include
diff options
context:
space:
mode:
Diffstat (limited to 'include')
-rw-r--r--include/asm-x86/futex.h138
-rw-r--r--include/asm-x86/futex_32.h133
-rw-r--r--include/asm-x86/futex_64.h127
3 files changed, 134 insertions, 264 deletions
diff --git a/include/asm-x86/futex.h b/include/asm-x86/futex.h
index 1f4610e0c613..62828d63f1b1 100644
--- a/include/asm-x86/futex.h
+++ b/include/asm-x86/futex.h
@@ -1,5 +1,135 @@
1#ifdef CONFIG_X86_32 1#ifndef _ASM_X86_FUTEX_H
2# include "futex_32.h" 2#define _ASM_X86_FUTEX_H
3#else 3
4# include "futex_64.h" 4#ifdef __KERNEL__
5
6#include <linux/futex.h>
7
8#include <asm/asm.h>
9#include <asm/errno.h>
10#include <asm/processor.h>
11#include <asm/system.h>
12#include <asm/uaccess.h>
13
14#define __futex_atomic_op1(insn, ret, oldval, uaddr, oparg) \
15 __asm__ __volatile( \
16"1: " insn "\n" \
17"2: .section .fixup,\"ax\"\n \
183: mov %3, %1\n \
19 jmp 2b\n \
20 .previous\n \
21 .section __ex_table,\"a\"\n \
22 .align 8\n" \
23 _ASM_PTR "1b,3b\n \
24 .previous" \
25 : "=r" (oldval), "=r" (ret), "+m" (*uaddr) \
26 : "i" (-EFAULT), "0" (oparg), "1" (0))
27
28#define __futex_atomic_op2(insn, ret, oldval, uaddr, oparg) \
29 __asm__ __volatile( \
30"1: movl %2, %0\n \
31 movl %0, %3\n" \
32 insn "\n" \
33"2: " LOCK_PREFIX "cmpxchgl %3, %2\n \
34 jnz 1b\n \
353: .section .fixup,\"ax\"\n \
364: mov %5, %1\n \
37 jmp 3b\n \
38 .previous\n \
39 .section __ex_table,\"a\"\n \
40 .align 8\n" \
41 _ASM_PTR "1b,4b,2b,4b\n \
42 .previous" \
43 : "=&a" (oldval), "=&r" (ret), "+m" (*uaddr), \
44 "=&r" (tem) \
45 : "r" (oparg), "i" (-EFAULT), "1" (0))
46
47static inline int
48futex_atomic_op_inuser(int encoded_op, int __user *uaddr)
49{
50 int op = (encoded_op >> 28) & 7;
51 int cmp = (encoded_op >> 24) & 15;
52 int oparg = (encoded_op << 8) >> 20;
53 int cmparg = (encoded_op << 20) >> 20;
54 int oldval = 0, ret, tem;
55
56 if (encoded_op & (FUTEX_OP_OPARG_SHIFT << 28))
57 oparg = 1 << oparg;
58
59 if (!access_ok(VERIFY_WRITE, uaddr, sizeof(int)))
60 return -EFAULT;
61
62#if defined(CONFIG_X86_32) && !defined(CONFIG_X86_BSWAP)
63 /* Real i386 machines can only support FUTEX_OP_SET */
64 if (op != FUTEX_OP_SET && boot_cpu_data.x86 == 3)
65 return -ENOSYS;
66#endif
67
68 pagefault_disable();
69
70 switch (op) {
71 case FUTEX_OP_SET:
72 __futex_atomic_op1("xchgl %0, %2", ret, oldval, uaddr, oparg);
73 break;
74 case FUTEX_OP_ADD:
75 __futex_atomic_op1(LOCK_PREFIX "xaddl %0, %2", ret, oldval,
76 uaddr, oparg);
77 break;
78 case FUTEX_OP_OR:
79 __futex_atomic_op2("orl %4, %3", ret, oldval, uaddr, oparg);
80 break;
81 case FUTEX_OP_ANDN:
82 __futex_atomic_op2("andl %4, %3", ret, oldval, uaddr, ~oparg);
83 break;
84 case FUTEX_OP_XOR:
85 __futex_atomic_op2("xorl %4, %3", ret, oldval, uaddr, oparg);
86 break;
87 default:
88 ret = -ENOSYS;
89 }
90
91 pagefault_enable();
92
93 if (!ret) {
94 switch (cmp) {
95 case FUTEX_OP_CMP_EQ: ret = (oldval == cmparg); break;
96 case FUTEX_OP_CMP_NE: ret = (oldval != cmparg); break;
97 case FUTEX_OP_CMP_LT: ret = (oldval < cmparg); break;
98 case FUTEX_OP_CMP_GE: ret = (oldval >= cmparg); break;
99 case FUTEX_OP_CMP_LE: ret = (oldval <= cmparg); break;
100 case FUTEX_OP_CMP_GT: ret = (oldval > cmparg); break;
101 default: ret = -ENOSYS;
102 }
103 }
104 return ret;
105}
106
107static inline int
108futex_atomic_cmpxchg_inatomic(int __user *uaddr, int oldval, int newval)
109{
110 if (!access_ok(VERIFY_WRITE, uaddr, sizeof(int)))
111 return -EFAULT;
112
113 __asm__ __volatile__(
114 "1: " LOCK_PREFIX "cmpxchgl %3, %1 \n"
115
116 "2: .section .fixup, \"ax\" \n"
117 "3: mov %2, %0 \n"
118 " jmp 2b \n"
119 " .previous \n"
120
121 " .section __ex_table, \"a\" \n"
122 " .align 8 \n"
123 _ASM_PTR " 1b,3b \n"
124 " .previous \n"
125
126 : "=a" (oldval), "+m" (*uaddr)
127 : "i" (-EFAULT), "r" (newval), "0" (oldval)
128 : "memory"
129 );
130
131 return oldval;
132}
133
134#endif
5#endif 135#endif
diff --git a/include/asm-x86/futex_32.h b/include/asm-x86/futex_32.h
deleted file mode 100644
index c80013e19c21..000000000000
--- a/include/asm-x86/futex_32.h
+++ /dev/null
@@ -1,133 +0,0 @@
1#ifndef _ASM_FUTEX_H
2#define _ASM_FUTEX_H
3
4#ifdef __KERNEL__
5
6#include <linux/futex.h>
7
8#include <asm/asm.h>
9#include <asm/errno.h>
10#include <asm/system.h>
11#include <asm/processor.h>
12#include <asm/uaccess.h>
13
14#define __futex_atomic_op1(insn, ret, oldval, uaddr, oparg) \
15 __asm__ __volatile ( \
16"1: " insn "\n" \
17"2: .section .fixup,\"ax\"\n\
183: mov %3, %1\n\
19 jmp 2b\n\
20 .previous\n\
21 .section __ex_table,\"a\"\n\
22 .align 8\n" \
23 _ASM_PTR "1b,3b\n \
24 .previous" \
25 : "=r" (oldval), "=r" (ret), "+m" (*uaddr) \
26 : "i" (-EFAULT), "0" (oparg), "1" (0))
27
28#define __futex_atomic_op2(insn, ret, oldval, uaddr, oparg) \
29 __asm__ __volatile ( \
30"1: movl %2, %0\n\
31 movl %0, %3\n" \
32 insn "\n" \
33"2: " LOCK_PREFIX "cmpxchgl %3, %2\n\
34 jnz 1b\n\
353: .section .fixup,\"ax\"\n\
364: mov %5, %1\n\
37 jmp 3b\n\
38 .previous\n\
39 .section __ex_table,\"a\"\n\
40 .align 8\n" \
41 _ASM_PTR "1b,4b,2b,4b\n \
42 .previous" \
43 : "=&a" (oldval), "=&r" (ret), "+m" (*uaddr), \
44 "=&r" (tem) \
45 : "r" (oparg), "i" (-EFAULT), "1" (0))
46
47static inline int
48futex_atomic_op_inuser (int encoded_op, int __user *uaddr)
49{
50 int op = (encoded_op >> 28) & 7;
51 int cmp = (encoded_op >> 24) & 15;
52 int oparg = (encoded_op << 8) >> 20;
53 int cmparg = (encoded_op << 20) >> 20;
54 int oldval = 0, ret, tem;
55 if (encoded_op & (FUTEX_OP_OPARG_SHIFT << 28))
56 oparg = 1 << oparg;
57
58 if (! access_ok (VERIFY_WRITE, uaddr, sizeof(int)))
59 return -EFAULT;
60
61#ifndef CONFIG_X86_BSWAP
62 if (op == FUTEX_OP_SET && boot_cpu_data.x86 == 3)
63 return -ENOSYS;
64#endif
65
66 pagefault_disable();
67
68 switch (op) {
69 case FUTEX_OP_SET:
70 __futex_atomic_op1("xchgl %0, %2", ret, oldval, uaddr, oparg);
71 break;
72 case FUTEX_OP_ADD:
73 __futex_atomic_op1(LOCK_PREFIX "xaddl %0, %2", ret, oldval,
74 uaddr, oparg);
75 break;
76 case FUTEX_OP_OR:
77 __futex_atomic_op2("orl %4, %3", ret, oldval, uaddr, oparg);
78 break;
79 case FUTEX_OP_ANDN:
80 __futex_atomic_op2("andl %4, %3", ret, oldval, uaddr, ~oparg);
81 break;
82 case FUTEX_OP_XOR:
83 __futex_atomic_op2("xorl %4, %3", ret, oldval, uaddr, oparg);
84 break;
85 default:
86 ret = -ENOSYS;
87 }
88
89 pagefault_enable();
90
91 if (!ret) {
92 switch (cmp) {
93 case FUTEX_OP_CMP_EQ: ret = (oldval == cmparg); break;
94 case FUTEX_OP_CMP_NE: ret = (oldval != cmparg); break;
95 case FUTEX_OP_CMP_LT: ret = (oldval < cmparg); break;
96 case FUTEX_OP_CMP_GE: ret = (oldval >= cmparg); break;
97 case FUTEX_OP_CMP_LE: ret = (oldval <= cmparg); break;
98 case FUTEX_OP_CMP_GT: ret = (oldval > cmparg); break;
99 default: ret = -ENOSYS;
100 }
101 }
102 return ret;
103}
104
105static inline int
106futex_atomic_cmpxchg_inatomic(int __user *uaddr, int oldval, int newval)
107{
108 if (!access_ok(VERIFY_WRITE, uaddr, sizeof(int)))
109 return -EFAULT;
110
111 __asm__ __volatile__(
112 "1: " LOCK_PREFIX "cmpxchgl %3, %1 \n"
113
114 "2: .section .fixup, \"ax\" \n"
115 "3: mov %2, %0 \n"
116 " jmp 2b \n"
117 " .previous \n"
118
119 " .section __ex_table, \"a\" \n"
120 " .align 8 \n"
121 _ASM_PTR " 1b,3b \n"
122 " .previous \n"
123
124 : "=a" (oldval), "+m" (*uaddr)
125 : "i" (-EFAULT), "r" (newval), "0" (oldval)
126 : "memory"
127 );
128
129 return oldval;
130}
131
132#endif
133#endif
diff --git a/include/asm-x86/futex_64.h b/include/asm-x86/futex_64.h
deleted file mode 100644
index 02964d225d18..000000000000
--- a/include/asm-x86/futex_64.h
+++ /dev/null
@@ -1,127 +0,0 @@
1#ifndef _ASM_FUTEX_H
2#define _ASM_FUTEX_H
3
4#ifdef __KERNEL__
5
6#include <linux/futex.h>
7
8#include <asm/asm.h>
9#include <asm/errno.h>
10#include <asm/system.h>
11#include <asm/uaccess.h>
12
13#define __futex_atomic_op1(insn, ret, oldval, uaddr, oparg) \
14 __asm__ __volatile ( \
15"1: " insn "\n" \
16"2: .section .fixup,\"ax\"\n\
173: mov %3, %1\n\
18 jmp 2b\n\
19 .previous\n\
20 .section __ex_table,\"a\"\n\
21 .align 8\n" \
22 _ASM_PTR "1b,3b\n \
23 .previous" \
24 : "=r" (oldval), "=r" (ret), "+m" (*uaddr) \
25 : "i" (-EFAULT), "0" (oparg), "1" (0))
26
27#define __futex_atomic_op2(insn, ret, oldval, uaddr, oparg) \
28 __asm__ __volatile ( \
29"1: movl %2, %0\n\
30 movl %0, %3\n" \
31 insn "\n" \
32"2: " LOCK_PREFIX "cmpxchgl %3, %2\n\
33 jnz 1b\n\
343: .section .fixup,\"ax\"\n\
354: mov %5, %1\n\
36 jmp 3b\n\
37 .previous\n\
38 .section __ex_table,\"a\"\n\
39 .align 8\n" \
40 _ASM_PTR "1b,4b,2b,4b\n \
41 .previous" \
42 : "=&a" (oldval), "=&r" (ret), "+m" (*uaddr), \
43 "=&r" (tem) \
44 : "r" (oparg), "i" (-EFAULT), "1" (0))
45
46static inline int
47futex_atomic_op_inuser (int encoded_op, int __user *uaddr)
48{
49 int op = (encoded_op >> 28) & 7;
50 int cmp = (encoded_op >> 24) & 15;
51 int oparg = (encoded_op << 8) >> 20;
52 int cmparg = (encoded_op << 20) >> 20;
53 int oldval = 0, ret, tem;
54 if (encoded_op & (FUTEX_OP_OPARG_SHIFT << 28))
55 oparg = 1 << oparg;
56
57 if (! access_ok (VERIFY_WRITE, uaddr, sizeof(int)))
58 return -EFAULT;
59
60 pagefault_disable();
61
62 switch (op) {
63 case FUTEX_OP_SET:
64 __futex_atomic_op1("xchgl %0, %2", ret, oldval, uaddr, oparg);
65 break;
66 case FUTEX_OP_ADD:
67 __futex_atomic_op1(LOCK_PREFIX "xaddl %0, %2", ret, oldval,
68 uaddr, oparg);
69 break;
70 case FUTEX_OP_OR:
71 __futex_atomic_op2("orl %4, %3", ret, oldval, uaddr, oparg);
72 break;
73 case FUTEX_OP_ANDN:
74 __futex_atomic_op2("andl %4, %3", ret, oldval, uaddr, ~oparg);
75 break;
76 case FUTEX_OP_XOR:
77 __futex_atomic_op2("xorl %4, %3", ret, oldval, uaddr, oparg);
78 break;
79 default:
80 ret = -ENOSYS;
81 }
82
83 pagefault_enable();
84
85 if (!ret) {
86 switch (cmp) {
87 case FUTEX_OP_CMP_EQ: ret = (oldval == cmparg); break;
88 case FUTEX_OP_CMP_NE: ret = (oldval != cmparg); break;
89 case FUTEX_OP_CMP_LT: ret = (oldval < cmparg); break;
90 case FUTEX_OP_CMP_GE: ret = (oldval >= cmparg); break;
91 case FUTEX_OP_CMP_LE: ret = (oldval <= cmparg); break;
92 case FUTEX_OP_CMP_GT: ret = (oldval > cmparg); break;
93 default: ret = -ENOSYS;
94 }
95 }
96 return ret;
97}
98
99static inline int
100futex_atomic_cmpxchg_inatomic(int __user *uaddr, int oldval, int newval)
101{
102 if (!access_ok(VERIFY_WRITE, uaddr, sizeof(int)))
103 return -EFAULT;
104
105 __asm__ __volatile__(
106 "1: " LOCK_PREFIX "cmpxchgl %3, %1 \n"
107
108 "2: .section .fixup, \"ax\" \n"
109 "3: mov %2, %0 \n"
110 " jmp 2b \n"
111 " .previous \n"
112
113 " .section __ex_table, \"a\" \n"
114 " .align 8 \n"
115 _ASM_PTR " 1b,3b \n"
116 " .previous \n"
117
118 : "=a" (oldval), "+m" (*uaddr)
119 : "i" (-EFAULT), "r" (newval), "0" (oldval)
120 : "memory"
121 );
122
123 return oldval;
124}
125
126#endif
127#endif