diff options
author | Gerald Schaefer <geraldsc@de.ibm.com> | 2006-09-20 09:59:42 -0400 |
---|---|---|
committer | Martin Schwidefsky <schwidefsky@de.ibm.com> | 2006-09-20 09:59:42 -0400 |
commit | d02765d1af743567398eb6d523dea0ba5e5e7e8e (patch) | |
tree | 9a39c21d9924a8d81ce85254cd3d013dbe50d23e /include/asm-s390/futex.h | |
parent | 6837a8c352efcc5efc70424e9bfd94ff9bfa9a47 (diff) |
[S390] Make user-copy operations run-time configurable.
Introduces a struct uaccess_ops which allows setting user-copy
operations at run-time.
Signed-off-by: Gerald Schaefer <geraldsc@de.ibm.com>
Signed-off-by: Martin Schwidefsky <schwidefsky@de.ibm.com>
Diffstat (limited to 'include/asm-s390/futex.h')
-rw-r--r-- | include/asm-s390/futex.h | 87 |
1 files changed, 7 insertions, 80 deletions
diff --git a/include/asm-s390/futex.h b/include/asm-s390/futex.h index ffedf14f89f6..5e261e1de671 100644 --- a/include/asm-s390/futex.h +++ b/include/asm-s390/futex.h | |||
@@ -7,75 +7,21 @@ | |||
7 | #include <asm/errno.h> | 7 | #include <asm/errno.h> |
8 | #include <asm/uaccess.h> | 8 | #include <asm/uaccess.h> |
9 | 9 | ||
10 | #ifndef __s390x__ | ||
11 | #define __futex_atomic_fixup \ | ||
12 | ".section __ex_table,\"a\"\n" \ | ||
13 | " .align 4\n" \ | ||
14 | " .long 0b,4b,2b,4b,3b,4b\n" \ | ||
15 | ".previous" | ||
16 | #else /* __s390x__ */ | ||
17 | #define __futex_atomic_fixup \ | ||
18 | ".section __ex_table,\"a\"\n" \ | ||
19 | " .align 8\n" \ | ||
20 | " .quad 0b,4b,2b,4b,3b,4b\n" \ | ||
21 | ".previous" | ||
22 | #endif /* __s390x__ */ | ||
23 | |||
24 | #define __futex_atomic_op(insn, ret, oldval, newval, uaddr, oparg) \ | ||
25 | asm volatile(" sacf 256\n" \ | ||
26 | "0: l %1,0(%6)\n" \ | ||
27 | "1: " insn \ | ||
28 | "2: cs %1,%2,0(%6)\n" \ | ||
29 | "3: jl 1b\n" \ | ||
30 | " lhi %0,0\n" \ | ||
31 | "4: sacf 0\n" \ | ||
32 | __futex_atomic_fixup \ | ||
33 | : "=d" (ret), "=&d" (oldval), "=&d" (newval), \ | ||
34 | "=m" (*uaddr) \ | ||
35 | : "0" (-EFAULT), "d" (oparg), "a" (uaddr), \ | ||
36 | "m" (*uaddr) : "cc" ); | ||
37 | |||
38 | static inline int futex_atomic_op_inuser (int encoded_op, int __user *uaddr) | 10 | static inline int futex_atomic_op_inuser (int encoded_op, int __user *uaddr) |
39 | { | 11 | { |
40 | int op = (encoded_op >> 28) & 7; | 12 | int op = (encoded_op >> 28) & 7; |
41 | int cmp = (encoded_op >> 24) & 15; | 13 | int cmp = (encoded_op >> 24) & 15; |
42 | int oparg = (encoded_op << 8) >> 20; | 14 | int oparg = (encoded_op << 8) >> 20; |
43 | int cmparg = (encoded_op << 20) >> 20; | 15 | int cmparg = (encoded_op << 20) >> 20; |
44 | int oldval = 0, newval, ret; | 16 | int oldval, ret; |
17 | |||
45 | if (encoded_op & (FUTEX_OP_OPARG_SHIFT << 28)) | 18 | if (encoded_op & (FUTEX_OP_OPARG_SHIFT << 28)) |
46 | oparg = 1 << oparg; | 19 | oparg = 1 << oparg; |
47 | 20 | ||
48 | if (! access_ok (VERIFY_WRITE, uaddr, sizeof(int))) | 21 | if (! access_ok (VERIFY_WRITE, uaddr, sizeof(int))) |
49 | return -EFAULT; | 22 | return -EFAULT; |
50 | 23 | ||
51 | inc_preempt_count(); | 24 | ret = uaccess.futex_atomic_op(op, uaddr, oparg, &oldval); |
52 | |||
53 | switch (op) { | ||
54 | case FUTEX_OP_SET: | ||
55 | __futex_atomic_op("lr %2,%5\n", | ||
56 | ret, oldval, newval, uaddr, oparg); | ||
57 | break; | ||
58 | case FUTEX_OP_ADD: | ||
59 | __futex_atomic_op("lr %2,%1\nar %2,%5\n", | ||
60 | ret, oldval, newval, uaddr, oparg); | ||
61 | break; | ||
62 | case FUTEX_OP_OR: | ||
63 | __futex_atomic_op("lr %2,%1\nor %2,%5\n", | ||
64 | ret, oldval, newval, uaddr, oparg); | ||
65 | break; | ||
66 | case FUTEX_OP_ANDN: | ||
67 | __futex_atomic_op("lr %2,%1\nnr %2,%5\n", | ||
68 | ret, oldval, newval, uaddr, oparg); | ||
69 | break; | ||
70 | case FUTEX_OP_XOR: | ||
71 | __futex_atomic_op("lr %2,%1\nxr %2,%5\n", | ||
72 | ret, oldval, newval, uaddr, oparg); | ||
73 | break; | ||
74 | default: | ||
75 | ret = -ENOSYS; | ||
76 | } | ||
77 | |||
78 | dec_preempt_count(); | ||
79 | 25 | ||
80 | if (!ret) { | 26 | if (!ret) { |
81 | switch (cmp) { | 27 | switch (cmp) { |
@@ -91,32 +37,13 @@ static inline int futex_atomic_op_inuser (int encoded_op, int __user *uaddr) | |||
91 | return ret; | 37 | return ret; |
92 | } | 38 | } |
93 | 39 | ||
94 | static inline int | 40 | static inline int futex_atomic_cmpxchg_inatomic(int __user *uaddr, |
95 | futex_atomic_cmpxchg_inatomic(int __user *uaddr, int oldval, int newval) | 41 | int oldval, int newval) |
96 | { | 42 | { |
97 | int ret; | ||
98 | |||
99 | if (! access_ok (VERIFY_WRITE, uaddr, sizeof(int))) | 43 | if (! access_ok (VERIFY_WRITE, uaddr, sizeof(int))) |
100 | return -EFAULT; | 44 | return -EFAULT; |
101 | asm volatile(" sacf 256\n" | 45 | |
102 | " cs %1,%4,0(%5)\n" | 46 | return uaccess.futex_atomic_cmpxchg(uaddr, oldval, newval); |
103 | "0: lr %0,%1\n" | ||
104 | "1: sacf 0\n" | ||
105 | #ifndef __s390x__ | ||
106 | ".section __ex_table,\"a\"\n" | ||
107 | " .align 4\n" | ||
108 | " .long 0b,1b\n" | ||
109 | ".previous" | ||
110 | #else /* __s390x__ */ | ||
111 | ".section __ex_table,\"a\"\n" | ||
112 | " .align 8\n" | ||
113 | " .quad 0b,1b\n" | ||
114 | ".previous" | ||
115 | #endif /* __s390x__ */ | ||
116 | : "=d" (ret), "+d" (oldval), "=m" (*uaddr) | ||
117 | : "0" (-EFAULT), "d" (newval), "a" (uaddr), "m" (*uaddr) | ||
118 | : "cc", "memory" ); | ||
119 | return oldval; | ||
120 | } | 47 | } |
121 | 48 | ||
122 | #endif /* __KERNEL__ */ | 49 | #endif /* __KERNEL__ */ |