aboutsummaryrefslogtreecommitdiffstats
path: root/arch/arm/include/asm/futex.h
diff options
context:
space:
mode:
authorWill Deacon <will.deacon@arm.com>2011-04-28 13:43:01 -0400
committerRussell King <rmk+kernel@arm.linux.org.uk>2011-05-12 05:11:44 -0400
commitc1b0db56604b4ccc55a325104b14093aeedeb829 (patch)
tree3c7a8df020dc52fa87ce013cfad42df3dea5f312 /arch/arm/include/asm/futex.h
parentaf3e4fd37a18f2e5a00175bc96061541d1364a3b (diff)
ARM: 6889/1: futex: add SMP futex support when !CPU_USE_DOMAINS
This patch uses the load/store exclusive instructions to add SMP futex support for ARM. Since the ARM architecture does not provide instructions for unprivileged exclusive memory accesses, we can only provide SMP futexes when CPU domain support is disabled. Cc: Nicolas Pitre <nico@fluxnic.net> Signed-off-by: Will Deacon <will.deacon@arm.com> Signed-off-by: Russell King <rmk+kernel@arm.linux.org.uk>
Diffstat (limited to 'arch/arm/include/asm/futex.h')
-rw-r--r--arch/arm/include/asm/futex.h137
1 files changed, 90 insertions, 47 deletions
diff --git a/arch/arm/include/asm/futex.h b/arch/arm/include/asm/futex.h
index 199a6b6de7f4..8c73900da9ed 100644
--- a/arch/arm/include/asm/futex.h
+++ b/arch/arm/include/asm/futex.h
@@ -3,16 +3,74 @@
3 3
4#ifdef __KERNEL__ 4#ifdef __KERNEL__
5 5
6#if defined(CONFIG_CPU_USE_DOMAINS) && defined(CONFIG_SMP)
7/* ARM doesn't provide unprivileged exclusive memory accessors */
8#include <asm-generic/futex.h>
9#else
10
11#include <linux/futex.h>
12#include <linux/uaccess.h>
13#include <asm/errno.h>
14
15#define __futex_atomic_ex_table(err_reg) \
16 "3:\n" \
17 " .pushsection __ex_table,\"a\"\n" \
18 " .align 3\n" \
19 " .long 1b, 4f, 2b, 4f\n" \
20 " .popsection\n" \
21 " .pushsection .fixup,\"ax\"\n" \
22 "4: mov %0, " err_reg "\n" \
23 " b 3b\n" \
24 " .popsection"
25
6#ifdef CONFIG_SMP 26#ifdef CONFIG_SMP
7 27
8#include <asm-generic/futex.h> 28#define __futex_atomic_op(insn, ret, oldval, uaddr, oparg) \
29 smp_mb(); \
30 __asm__ __volatile__( \
31 "1: ldrex %1, [%2]\n" \
32 " " insn "\n" \
33 "2: strex %1, %0, [%2]\n" \
34 " teq %1, #0\n" \
35 " bne 1b\n" \
36 " mov %0, #0\n" \
37 __futex_atomic_ex_table("%4") \
38 : "=&r" (ret), "=&r" (oldval) \
39 : "r" (uaddr), "r" (oparg), "Ir" (-EFAULT) \
40 : "cc", "memory")
41
42static inline int
43futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr,
44 u32 oldval, u32 newval)
45{
46 int ret;
47 u32 val;
48
49 if (!access_ok(VERIFY_WRITE, uaddr, sizeof(u32)))
50 return -EFAULT;
51
52 smp_mb();
53 __asm__ __volatile__("@futex_atomic_cmpxchg_inatomic\n"
54 "1: ldrex %1, [%4]\n"
55 " teq %1, %2\n"
56 " ite eq @ explicit IT needed for the 2b label\n"
57 "2: strexeq %0, %3, [%4]\n"
58 " movne %0, #0\n"
59 " teq %0, #0\n"
60 " bne 1b\n"
61 __futex_atomic_ex_table("%5")
62 : "=&r" (ret), "=&r" (val)
63 : "r" (oldval), "r" (newval), "r" (uaddr), "Ir" (-EFAULT)
64 : "cc", "memory");
65 smp_mb();
66
67 *uval = val;
68 return ret;
69}
9 70
10#else /* !SMP, we can work around lack of atomic ops by disabling preemption */ 71#else /* !SMP, we can work around lack of atomic ops by disabling preemption */
11 72
12#include <linux/futex.h>
13#include <linux/preempt.h> 73#include <linux/preempt.h>
14#include <linux/uaccess.h>
15#include <asm/errno.h>
16#include <asm/domain.h> 74#include <asm/domain.h>
17 75
18#define __futex_atomic_op(insn, ret, oldval, uaddr, oparg) \ 76#define __futex_atomic_op(insn, ret, oldval, uaddr, oparg) \
@@ -21,20 +79,38 @@
21 " " insn "\n" \ 79 " " insn "\n" \
22 "2: " T(str) " %0, [%2]\n" \ 80 "2: " T(str) " %0, [%2]\n" \
23 " mov %0, #0\n" \ 81 " mov %0, #0\n" \
24 "3:\n" \ 82 __futex_atomic_ex_table("%4") \
25 " .pushsection __ex_table,\"a\"\n" \
26 " .align 3\n" \
27 " .long 1b, 4f, 2b, 4f\n" \
28 " .popsection\n" \
29 " .pushsection .fixup,\"ax\"\n" \
30 "4: mov %0, %4\n" \
31 " b 3b\n" \
32 " .popsection" \
33 : "=&r" (ret), "=&r" (oldval) \ 83 : "=&r" (ret), "=&r" (oldval) \
34 : "r" (uaddr), "r" (oparg), "Ir" (-EFAULT) \ 84 : "r" (uaddr), "r" (oparg), "Ir" (-EFAULT) \
35 : "cc", "memory") 85 : "cc", "memory")
36 86
37static inline int 87static inline int
88futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr,
89 u32 oldval, u32 newval)
90{
91 int ret = 0;
92 u32 val;
93
94 if (!access_ok(VERIFY_WRITE, uaddr, sizeof(u32)))
95 return -EFAULT;
96
97 __asm__ __volatile__("@futex_atomic_cmpxchg_inatomic\n"
98 "1: " T(ldr) " %1, [%4]\n"
99 " teq %1, %2\n"
100 " it eq @ explicit IT needed for the 2b label\n"
101 "2: " T(streq) " %3, [%4]\n"
102 __futex_atomic_ex_table("%5")
103 : "+r" (ret), "=&r" (val)
104 : "r" (oldval), "r" (newval), "r" (uaddr), "Ir" (-EFAULT)
105 : "cc", "memory");
106
107 *uval = val;
108 return ret;
109}
110
111#endif /* !SMP */
112
113static inline int
38futex_atomic_op_inuser (int encoded_op, u32 __user *uaddr) 114futex_atomic_op_inuser (int encoded_op, u32 __user *uaddr)
39{ 115{
40 int op = (encoded_op >> 28) & 7; 116 int op = (encoded_op >> 28) & 7;
@@ -87,39 +163,6 @@ futex_atomic_op_inuser (int encoded_op, u32 __user *uaddr)
87 return ret; 163 return ret;
88} 164}
89 165
90static inline int 166#endif /* !(CPU_USE_DOMAINS && SMP) */
91futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr,
92 u32 oldval, u32 newval)
93{
94 int ret = 0;
95 u32 val;
96
97 if (!access_ok(VERIFY_WRITE, uaddr, sizeof(u32)))
98 return -EFAULT;
99
100 __asm__ __volatile__("@futex_atomic_cmpxchg_inatomic\n"
101 "1: " T(ldr) " %1, [%4]\n"
102 " teq %1, %2\n"
103 " it eq @ explicit IT needed for the 2b label\n"
104 "2: " T(streq) " %3, [%4]\n"
105 "3:\n"
106 " .pushsection __ex_table,\"a\"\n"
107 " .align 3\n"
108 " .long 1b, 4f, 2b, 4f\n"
109 " .popsection\n"
110 " .pushsection .fixup,\"ax\"\n"
111 "4: mov %0, %5\n"
112 " b 3b\n"
113 " .popsection"
114 : "+r" (ret), "=&r" (val)
115 : "r" (oldval), "r" (newval), "r" (uaddr), "Ir" (-EFAULT)
116 : "cc", "memory");
117
118 *uval = val;
119 return ret;
120}
121
122#endif /* !SMP */
123
124#endif /* __KERNEL__ */ 167#endif /* __KERNEL__ */
125#endif /* _ASM_ARM_FUTEX_H */ 168#endif /* _ASM_ARM_FUTEX_H */