diff options
author | Ivan Kokshaysky <ink@jurassic.park.msu.ru> | 2009-04-30 18:08:48 -0400 |
---|---|---|
committer | Linus Torvalds <torvalds@linux-foundation.org> | 2009-05-02 18:36:10 -0400 |
commit | 77b4cf5cb0c9d6010a18030a0e0a8d2aaf6b43ec (patch) | |
tree | b9b0b9f25e4495499ae5a35f1beaca7a5b240201 | |
parent | 08a42e86bce511b45ca3eee40f51674b02a777d1 (diff) |
alpha: futex implementation
Signed-off-by: Ivan Kokshaysky <ink@jurassic.park.msu.ru>
Cc: Al Viro <viro@ZenIV.linux.org.uk>
Cc: Richard Henderson <rth@twiddle.net
Signed-off-by: Andrew Morton <akpm@linux-foundation.org>
Signed-off-by: Linus Torvalds <torvalds@linux-foundation.org>
-rw-r--r-- | arch/alpha/include/asm/barrier.h | 2 | ||||
-rw-r--r-- | arch/alpha/include/asm/futex.h | 118 |
2 files changed, 116 insertions, 4 deletions
diff --git a/arch/alpha/include/asm/barrier.h b/arch/alpha/include/asm/barrier.h index ac78eba909bc..ce8860a0b32d 100644 --- a/arch/alpha/include/asm/barrier.h +++ b/arch/alpha/include/asm/barrier.h | |||
@@ -16,11 +16,13 @@ __asm__ __volatile__("wmb": : :"memory") | |||
16 | __asm__ __volatile__("mb": : :"memory") | 16 | __asm__ __volatile__("mb": : :"memory") |
17 | 17 | ||
18 | #ifdef CONFIG_SMP | 18 | #ifdef CONFIG_SMP |
19 | #define __ASM_SMP_MB "\tmb\n" | ||
19 | #define smp_mb() mb() | 20 | #define smp_mb() mb() |
20 | #define smp_rmb() rmb() | 21 | #define smp_rmb() rmb() |
21 | #define smp_wmb() wmb() | 22 | #define smp_wmb() wmb() |
22 | #define smp_read_barrier_depends() read_barrier_depends() | 23 | #define smp_read_barrier_depends() read_barrier_depends() |
23 | #else | 24 | #else |
25 | #define __ASM_SMP_MB | ||
24 | #define smp_mb() barrier() | 26 | #define smp_mb() barrier() |
25 | #define smp_rmb() barrier() | 27 | #define smp_rmb() barrier() |
26 | #define smp_wmb() barrier() | 28 | #define smp_wmb() barrier() |
diff --git a/arch/alpha/include/asm/futex.h b/arch/alpha/include/asm/futex.h index 6a332a9f099c..945de222ab91 100644 --- a/arch/alpha/include/asm/futex.h +++ b/arch/alpha/include/asm/futex.h | |||
@@ -1,6 +1,116 @@ | |||
1 | #ifndef _ASM_FUTEX_H | 1 | #ifndef _ASM_ALPHA_FUTEX_H |
2 | #define _ASM_FUTEX_H | 2 | #define _ASM_ALPHA_FUTEX_H |
3 | 3 | ||
4 | #include <asm-generic/futex.h> | 4 | #ifdef __KERNEL__ |
5 | 5 | ||
6 | #endif | 6 | #include <linux/futex.h> |
7 | #include <linux/uaccess.h> | ||
8 | #include <asm/errno.h> | ||
9 | #include <asm/barrier.h> | ||
10 | |||
11 | #define __futex_atomic_op(insn, ret, oldval, uaddr, oparg) \ | ||
12 | __asm__ __volatile__( \ | ||
13 | __ASM_SMP_MB \ | ||
14 | "1: ldl_l %0,0(%2)\n" \ | ||
15 | insn \ | ||
16 | "2: stl_c %1,0(%2)\n" \ | ||
17 | " beq %1,4f\n" \ | ||
18 | " mov $31,%1\n" \ | ||
19 | "3: .subsection 2\n" \ | ||
20 | "4: br 1b\n" \ | ||
21 | " .previous\n" \ | ||
22 | " .section __ex_table,\"a\"\n" \ | ||
23 | " .long 1b-.\n" \ | ||
24 | " lda $31,3b-1b(%1)\n" \ | ||
25 | " .long 2b-.\n" \ | ||
26 | " lda $31,3b-2b(%1)\n" \ | ||
27 | " .previous\n" \ | ||
28 | : "=&r" (oldval), "=&r"(ret) \ | ||
29 | : "r" (uaddr), "r"(oparg) \ | ||
30 | : "memory") | ||
31 | |||
32 | static inline int futex_atomic_op_inuser (int encoded_op, int __user *uaddr) | ||
33 | { | ||
34 | int op = (encoded_op >> 28) & 7; | ||
35 | int cmp = (encoded_op >> 24) & 15; | ||
36 | int oparg = (encoded_op << 8) >> 20; | ||
37 | int cmparg = (encoded_op << 20) >> 20; | ||
38 | int oldval = 0, ret; | ||
39 | if (encoded_op & (FUTEX_OP_OPARG_SHIFT << 28)) | ||
40 | oparg = 1 << oparg; | ||
41 | |||
42 | if (!access_ok(VERIFY_WRITE, uaddr, sizeof(int))) | ||
43 | return -EFAULT; | ||
44 | |||
45 | pagefault_disable(); | ||
46 | |||
47 | switch (op) { | ||
48 | case FUTEX_OP_SET: | ||
49 | __futex_atomic_op("mov %3,%1\n", ret, oldval, uaddr, oparg); | ||
50 | break; | ||
51 | case FUTEX_OP_ADD: | ||
52 | __futex_atomic_op("addl %0,%3,%1\n", ret, oldval, uaddr, oparg); | ||
53 | break; | ||
54 | case FUTEX_OP_OR: | ||
55 | __futex_atomic_op("or %0,%3,%1\n", ret, oldval, uaddr, oparg); | ||
56 | break; | ||
57 | case FUTEX_OP_ANDN: | ||
58 | __futex_atomic_op("andnot %0,%3,%1\n", ret, oldval, uaddr, oparg); | ||
59 | break; | ||
60 | case FUTEX_OP_XOR: | ||
61 | __futex_atomic_op("xor %0,%3,%1\n", ret, oldval, uaddr, oparg); | ||
62 | break; | ||
63 | default: | ||
64 | ret = -ENOSYS; | ||
65 | } | ||
66 | |||
67 | pagefault_enable(); | ||
68 | |||
69 | if (!ret) { | ||
70 | switch (cmp) { | ||
71 | case FUTEX_OP_CMP_EQ: ret = (oldval == cmparg); break; | ||
72 | case FUTEX_OP_CMP_NE: ret = (oldval != cmparg); break; | ||
73 | case FUTEX_OP_CMP_LT: ret = (oldval < cmparg); break; | ||
74 | case FUTEX_OP_CMP_GE: ret = (oldval >= cmparg); break; | ||
75 | case FUTEX_OP_CMP_LE: ret = (oldval <= cmparg); break; | ||
76 | case FUTEX_OP_CMP_GT: ret = (oldval > cmparg); break; | ||
77 | default: ret = -ENOSYS; | ||
78 | } | ||
79 | } | ||
80 | return ret; | ||
81 | } | ||
82 | |||
83 | static inline int | ||
84 | futex_atomic_cmpxchg_inatomic(int __user *uaddr, int oldval, int newval) | ||
85 | { | ||
86 | int prev, cmp; | ||
87 | |||
88 | if (!access_ok(VERIFY_WRITE, uaddr, sizeof(int))) | ||
89 | return -EFAULT; | ||
90 | |||
91 | __asm__ __volatile__ ( | ||
92 | __ASM_SMP_MB | ||
93 | "1: ldl_l %0,0(%2)\n" | ||
94 | " cmpeq %0,%3,%1\n" | ||
95 | " beq %1,3f\n" | ||
96 | " mov %4,%1\n" | ||
97 | "2: stl_c %1,0(%2)\n" | ||
98 | " beq %1,4f\n" | ||
99 | "3: .subsection 2\n" | ||
100 | "4: br 1b\n" | ||
101 | " .previous\n" | ||
102 | " .section __ex_table,\"a\"\n" | ||
103 | " .long 1b-.\n" | ||
104 | " lda $31,3b-1b(%0)\n" | ||
105 | " .long 2b-.\n" | ||
106 | " lda $31,3b-2b(%0)\n" | ||
107 | " .previous\n" | ||
108 | : "=&r"(prev), "=&r"(cmp) | ||
109 | : "r"(uaddr), "r"((long)oldval), "r"(newval) | ||
110 | : "memory"); | ||
111 | |||
112 | return prev; | ||
113 | } | ||
114 | |||
115 | #endif /* __KERNEL__ */ | ||
116 | #endif /* _ASM_ALPHA_FUTEX_H */ | ||