aboutsummaryrefslogtreecommitdiffstats
path: root/arch/arm/include/asm/futex.h
diff options
context:
space:
mode:
authorMikael Pettersson <mikpe@it.uu.se>2008-08-20 04:36:07 -0400
committerRussell King <rmk+kernel@arm.linux.org.uk>2008-09-01 07:06:26 -0400
commite589ed23dd27b890900eb7514f0a9e297d1e02b5 (patch)
treef7f97ea17983bfc41d85d86e94fb580267f991cc /arch/arm/include/asm/futex.h
parent3305a60795442a22fe8e9f5fb93a6f1f8dea6bb2 (diff)
[ARM] 5218/1: arm: improved futex support
Linux/ARM currently doesn't support robust or PI futexes. The problem is that the kernel wants to perform certain ops (cmpxchg, set, add, or, andn, xor) atomically on user-space addresses, and ARM's futex.h doesn't support that. This patch adds that support, but only for uniprocessor machines. For UP it's enough to disable preemption to ensure mutual exclusion with other software agents (futexes don't need to care about other hardware agents, fortunately). This patch is based on one posted by Khem Raj on 2007-08-01 <http://marc.info/?l=linux-arm-kernel&m=118599407413016&w=2>. (That patch is included in the -RT kernel patches.) My changes since that version include: * corrected implementation of FUTEX_OP_ANDN (must complement oparg) * added missing memory clobber to futex_atomic_cmpxchg_inatomic() * removed spinlock because it's unnecessary for UP and insufficient for SMP, instead the code is restricted to UP and relies on the fact that pagefault_disable() also disables preemption * coding style cleanups Tested on ARMv5 XScales with the glibc-2.6 nptl test suite. Tested-by: Bruce Ashfield <bruce.ashfield@windriver.com> Signed-off-by: Mikael Pettersson <mikpe@it.uu.se> Signed-off-by: Russell King <rmk+kernel@arm.linux.org.uk>
Diffstat (limited to 'arch/arm/include/asm/futex.h')
-rw-r--r--arch/arm/include/asm/futex.h124
1 files changed, 121 insertions, 3 deletions
diff --git a/arch/arm/include/asm/futex.h b/arch/arm/include/asm/futex.h
index 6a332a9f099c..9ee743b95de8 100644
--- a/arch/arm/include/asm/futex.h
+++ b/arch/arm/include/asm/futex.h
@@ -1,6 +1,124 @@
1#ifndef _ASM_FUTEX_H 1#ifndef _ASM_ARM_FUTEX_H
2#define _ASM_FUTEX_H 2#define _ASM_ARM_FUTEX_H
3
4#ifdef __KERNEL__
5
6#ifdef CONFIG_SMP
3 7
4#include <asm-generic/futex.h> 8#include <asm-generic/futex.h>
5 9
6#endif 10#else /* !SMP, we can work around lack of atomic ops by disabling preemption */
11
12#include <linux/futex.h>
13#include <linux/preempt.h>
14#include <linux/uaccess.h>
15#include <asm/errno.h>
16
17#define __futex_atomic_op(insn, ret, oldval, uaddr, oparg) \
18 __asm__ __volatile__( \
19 "1: ldrt %1, [%2]\n" \
20 " " insn "\n" \
21 "2: strt %0, [%2]\n" \
22 " mov %0, #0\n" \
23 "3:\n" \
24 " .section __ex_table,\"a\"\n" \
25 " .align 3\n" \
26 " .long 1b, 4f, 2b, 4f\n" \
27 " .previous\n" \
28 " .section .fixup,\"ax\"\n" \
29 "4: mov %0, %4\n" \
30 " b 3b\n" \
31 " .previous" \
32 : "=&r" (ret), "=&r" (oldval) \
33 : "r" (uaddr), "r" (oparg), "Ir" (-EFAULT) \
34 : "cc", "memory")
35
36static inline int
37futex_atomic_op_inuser (int encoded_op, int __user *uaddr)
38{
39 int op = (encoded_op >> 28) & 7;
40 int cmp = (encoded_op >> 24) & 15;
41 int oparg = (encoded_op << 8) >> 20;
42 int cmparg = (encoded_op << 20) >> 20;
43 int oldval = 0, ret;
44
45 if (encoded_op & (FUTEX_OP_OPARG_SHIFT << 28))
46 oparg = 1 << oparg;
47
48 if (!access_ok(VERIFY_WRITE, uaddr, sizeof(int)))
49 return -EFAULT;
50
51 pagefault_disable(); /* implies preempt_disable() */
52
53 switch (op) {
54 case FUTEX_OP_SET:
55 __futex_atomic_op("mov %0, %3", ret, oldval, uaddr, oparg);
56 break;
57 case FUTEX_OP_ADD:
58 __futex_atomic_op("add %0, %1, %3", ret, oldval, uaddr, oparg);
59 break;
60 case FUTEX_OP_OR:
61 __futex_atomic_op("orr %0, %1, %3", ret, oldval, uaddr, oparg);
62 break;
63 case FUTEX_OP_ANDN:
64 __futex_atomic_op("and %0, %1, %3", ret, oldval, uaddr, ~oparg);
65 break;
66 case FUTEX_OP_XOR:
67 __futex_atomic_op("eor %0, %1, %3", ret, oldval, uaddr, oparg);
68 break;
69 default:
70 ret = -ENOSYS;
71 }
72
73 pagefault_enable(); /* subsumes preempt_enable() */
74
75 if (!ret) {
76 switch (cmp) {
77 case FUTEX_OP_CMP_EQ: ret = (oldval == cmparg); break;
78 case FUTEX_OP_CMP_NE: ret = (oldval != cmparg); break;
79 case FUTEX_OP_CMP_LT: ret = (oldval < cmparg); break;
80 case FUTEX_OP_CMP_GE: ret = (oldval >= cmparg); break;
81 case FUTEX_OP_CMP_LE: ret = (oldval <= cmparg); break;
82 case FUTEX_OP_CMP_GT: ret = (oldval > cmparg); break;
83 default: ret = -ENOSYS;
84 }
85 }
86 return ret;
87}
88
89static inline int
90futex_atomic_cmpxchg_inatomic(int __user *uaddr, int oldval, int newval)
91{
92 int val;
93
94 if (!access_ok(VERIFY_WRITE, uaddr, sizeof(int)))
95 return -EFAULT;
96
97 pagefault_disable(); /* implies preempt_disable() */
98
99 __asm__ __volatile__("@futex_atomic_cmpxchg_inatomic\n"
100 "1: ldrt %0, [%3]\n"
101 " teq %0, %1\n"
102 "2: streqt %2, [%3]\n"
103 "3:\n"
104 " .section __ex_table,\"a\"\n"
105 " .align 3\n"
106 " .long 1b, 4f, 2b, 4f\n"
107 " .previous\n"
108 " .section .fixup,\"ax\"\n"
109 "4: mov %0, %4\n"
110 " b 3b\n"
111 " .previous"
112 : "=&r" (val)
113 : "r" (oldval), "r" (newval), "r" (uaddr), "Ir" (-EFAULT)
114 : "cc", "memory");
115
116 pagefault_enable(); /* subsumes preempt_enable() */
117
118 return val;
119}
120
121#endif /* !SMP */
122
123#endif /* __KERNEL__ */
124#endif /* _ASM_ARM_FUTEX_H */