aboutsummaryrefslogtreecommitdiffstats
path: root/include/asm-s390
diff options
context:
space:
mode:
Diffstat (limited to 'include/asm-s390')
-rw-r--r--include/asm-s390/cache.h2
-rw-r--r--include/asm-s390/futex.h123
2 files changed, 121 insertions, 4 deletions
diff --git a/include/asm-s390/cache.h b/include/asm-s390/cache.h
index e20cdd9074db..cdf431b061bb 100644
--- a/include/asm-s390/cache.h
+++ b/include/asm-s390/cache.h
@@ -16,4 +16,6 @@
16 16
17#define ARCH_KMALLOC_MINALIGN 8 17#define ARCH_KMALLOC_MINALIGN 8
18 18
19#define __read_mostly __attribute__((__section__(".data.read_mostly")))
20
19#endif 21#endif
diff --git a/include/asm-s390/futex.h b/include/asm-s390/futex.h
index 6a332a9f099c..40c25e166a9b 100644
--- a/include/asm-s390/futex.h
+++ b/include/asm-s390/futex.h
@@ -1,6 +1,121 @@
1#ifndef _ASM_FUTEX_H 1#ifndef _ASM_S390_FUTEX_H
2#define _ASM_FUTEX_H 2#define _ASM_S390_FUTEX_H
3 3
4#include <asm-generic/futex.h> 4#ifdef __KERNEL__
5 5
6#endif 6#include <linux/futex.h>
7#include <asm/errno.h>
8#include <asm/uaccess.h>
9
10#ifndef __s390x__
11#define __futex_atomic_fixup \
12 ".section __ex_table,\"a\"\n" \
13 " .align 4\n" \
14 " .long 0b,2b,1b,2b\n" \
15 ".previous"
16#else /* __s390x__ */
17#define __futex_atomic_fixup \
18 ".section __ex_table,\"a\"\n" \
19 " .align 8\n" \
20 " .quad 0b,2b,1b,2b\n" \
21 ".previous"
22#endif /* __s390x__ */
23
24#define __futex_atomic_op(insn, ret, oldval, newval, uaddr, oparg) \
25 asm volatile(" l %1,0(%6)\n" \
26 "0: " insn \
27 " cs %1,%2,0(%6)\n" \
28 "1: jl 0b\n" \
29 " lhi %0,0\n" \
30 "2:\n" \
31 __futex_atomic_fixup \
32 : "=d" (ret), "=&d" (oldval), "=&d" (newval), \
33 "=m" (*uaddr) \
34 : "0" (-EFAULT), "d" (oparg), "a" (uaddr), \
35 "m" (*uaddr) : "cc" );
36
37static inline int futex_atomic_op_inuser (int encoded_op, int __user *uaddr)
38{
39 int op = (encoded_op >> 28) & 7;
40 int cmp = (encoded_op >> 24) & 15;
41 int oparg = (encoded_op << 8) >> 20;
42 int cmparg = (encoded_op << 20) >> 20;
43 int oldval = 0, newval, ret;
44 if (encoded_op & (FUTEX_OP_OPARG_SHIFT << 28))
45 oparg = 1 << oparg;
46
47 if (! access_ok (VERIFY_WRITE, uaddr, sizeof(int)))
48 return -EFAULT;
49
50 inc_preempt_count();
51
52 switch (op) {
53 case FUTEX_OP_SET:
54 __futex_atomic_op("lr %2,%5\n",
55 ret, oldval, newval, uaddr, oparg);
56 break;
57 case FUTEX_OP_ADD:
58 __futex_atomic_op("lr %2,%1\nar %2,%5\n",
59 ret, oldval, newval, uaddr, oparg);
60 break;
61 case FUTEX_OP_OR:
62 __futex_atomic_op("lr %2,%1\nor %2,%5\n",
63 ret, oldval, newval, uaddr, oparg);
64 break;
65 case FUTEX_OP_ANDN:
66 __futex_atomic_op("lr %2,%1\nnr %2,%5\n",
67 ret, oldval, newval, uaddr, oparg);
68 break;
69 case FUTEX_OP_XOR:
70 __futex_atomic_op("lr %2,%1\nxr %2,%5\n",
71 ret, oldval, newval, uaddr, oparg);
72 break;
73 default:
74 ret = -ENOSYS;
75 }
76
77 dec_preempt_count();
78
79 if (!ret) {
80 switch (cmp) {
81 case FUTEX_OP_CMP_EQ: ret = (oldval == cmparg); break;
82 case FUTEX_OP_CMP_NE: ret = (oldval != cmparg); break;
83 case FUTEX_OP_CMP_LT: ret = (oldval < cmparg); break;
84 case FUTEX_OP_CMP_GE: ret = (oldval >= cmparg); break;
85 case FUTEX_OP_CMP_LE: ret = (oldval <= cmparg); break;
86 case FUTEX_OP_CMP_GT: ret = (oldval > cmparg); break;
87 default: ret = -ENOSYS;
88 }
89 }
90 return ret;
91}
92
93static inline int
94futex_atomic_cmpxchg_inatomic(int __user *uaddr, int oldval, int newval)
95{
96 int ret;
97
98 if (! access_ok (VERIFY_WRITE, uaddr, sizeof(int)))
99 return -EFAULT;
100 asm volatile(" cs %1,%4,0(%5)\n"
101 "0: lr %0,%1\n"
102 "1:\n"
103#ifndef __s390x__
104 ".section __ex_table,\"a\"\n"
105 " .align 4\n"
106 " .long 0b,1b\n"
107 ".previous"
108#else /* __s390x__ */
109 ".section __ex_table,\"a\"\n"
110 " .align 8\n"
111 " .quad 0b,1b\n"
112 ".previous"
113#endif /* __s390x__ */
114 : "=d" (ret), "+d" (oldval), "=m" (*uaddr)
115 : "0" (-EFAULT), "d" (newval), "a" (uaddr), "m" (*uaddr)
116 : "cc", "memory" );
117 return oldval;
118}
119
120#endif /* __KERNEL__ */
121#endif /* _ASM_S390_FUTEX_H */