diff options
Diffstat (limited to 'arch/arc/include')
-rw-r--r-- | arch/arc/include/asm/Kbuild | 1 | ||||
-rw-r--r-- | arch/arc/include/asm/bitops.h | 35 | ||||
-rw-r--r-- | arch/arc/include/asm/futex.h | 48 | ||||
-rw-r--r-- | arch/arc/include/asm/mm-arch-hooks.h | 15 | ||||
-rw-r--r-- | arch/arc/include/asm/ptrace.h | 2 |
5 files changed, 53 insertions, 48 deletions
diff --git a/arch/arc/include/asm/Kbuild b/arch/arc/include/asm/Kbuild index 1a80cc91a03b..7611b10a2d23 100644 --- a/arch/arc/include/asm/Kbuild +++ b/arch/arc/include/asm/Kbuild | |||
@@ -22,6 +22,7 @@ generic-y += kvm_para.h | |||
22 | generic-y += local.h | 22 | generic-y += local.h |
23 | generic-y += local64.h | 23 | generic-y += local64.h |
24 | generic-y += mcs_spinlock.h | 24 | generic-y += mcs_spinlock.h |
25 | generic-y += mm-arch-hooks.h | ||
25 | generic-y += mman.h | 26 | generic-y += mman.h |
26 | generic-y += msgbuf.h | 27 | generic-y += msgbuf.h |
27 | generic-y += param.h | 28 | generic-y += param.h |
diff --git a/arch/arc/include/asm/bitops.h b/arch/arc/include/asm/bitops.h index 99fe118d3730..57c1f33844d4 100644 --- a/arch/arc/include/asm/bitops.h +++ b/arch/arc/include/asm/bitops.h | |||
@@ -50,8 +50,7 @@ static inline void op##_bit(unsigned long nr, volatile unsigned long *m)\ | |||
50 | * done for const @nr, but no code is generated due to gcc \ | 50 | * done for const @nr, but no code is generated due to gcc \ |
51 | * const prop. \ | 51 | * const prop. \ |
52 | */ \ | 52 | */ \ |
53 | if (__builtin_constant_p(nr)) \ | 53 | nr &= 0x1f; \ |
54 | nr &= 0x1f; \ | ||
55 | \ | 54 | \ |
56 | __asm__ __volatile__( \ | 55 | __asm__ __volatile__( \ |
57 | "1: llock %0, [%1] \n" \ | 56 | "1: llock %0, [%1] \n" \ |
@@ -82,8 +81,7 @@ static inline int test_and_##op##_bit(unsigned long nr, volatile unsigned long * | |||
82 | \ | 81 | \ |
83 | m += nr >> 5; \ | 82 | m += nr >> 5; \ |
84 | \ | 83 | \ |
85 | if (__builtin_constant_p(nr)) \ | 84 | nr &= 0x1f; \ |
86 | nr &= 0x1f; \ | ||
87 | \ | 85 | \ |
88 | /* \ | 86 | /* \ |
89 | * Explicit full memory barrier needed before/after as \ | 87 | * Explicit full memory barrier needed before/after as \ |
@@ -129,16 +127,13 @@ static inline void op##_bit(unsigned long nr, volatile unsigned long *m)\ | |||
129 | unsigned long temp, flags; \ | 127 | unsigned long temp, flags; \ |
130 | m += nr >> 5; \ | 128 | m += nr >> 5; \ |
131 | \ | 129 | \ |
132 | if (__builtin_constant_p(nr)) \ | ||
133 | nr &= 0x1f; \ | ||
134 | \ | ||
135 | /* \ | 130 | /* \ |
136 | * spin lock/unlock provide the needed smp_mb() before/after \ | 131 | * spin lock/unlock provide the needed smp_mb() before/after \ |
137 | */ \ | 132 | */ \ |
138 | bitops_lock(flags); \ | 133 | bitops_lock(flags); \ |
139 | \ | 134 | \ |
140 | temp = *m; \ | 135 | temp = *m; \ |
141 | *m = temp c_op (1UL << nr); \ | 136 | *m = temp c_op (1UL << (nr & 0x1f)); \ |
142 | \ | 137 | \ |
143 | bitops_unlock(flags); \ | 138 | bitops_unlock(flags); \ |
144 | } | 139 | } |
@@ -149,17 +144,14 @@ static inline int test_and_##op##_bit(unsigned long nr, volatile unsigned long * | |||
149 | unsigned long old, flags; \ | 144 | unsigned long old, flags; \ |
150 | m += nr >> 5; \ | 145 | m += nr >> 5; \ |
151 | \ | 146 | \ |
152 | if (__builtin_constant_p(nr)) \ | ||
153 | nr &= 0x1f; \ | ||
154 | \ | ||
155 | bitops_lock(flags); \ | 147 | bitops_lock(flags); \ |
156 | \ | 148 | \ |
157 | old = *m; \ | 149 | old = *m; \ |
158 | *m = old c_op (1 << nr); \ | 150 | *m = old c_op (1UL << (nr & 0x1f)); \ |
159 | \ | 151 | \ |
160 | bitops_unlock(flags); \ | 152 | bitops_unlock(flags); \ |
161 | \ | 153 | \ |
162 | return (old & (1 << nr)) != 0; \ | 154 | return (old & (1UL << (nr & 0x1f))) != 0; \ |
163 | } | 155 | } |
164 | 156 | ||
165 | #endif /* CONFIG_ARC_HAS_LLSC */ | 157 | #endif /* CONFIG_ARC_HAS_LLSC */ |
@@ -174,11 +166,8 @@ static inline void __##op##_bit(unsigned long nr, volatile unsigned long *m) \ | |||
174 | unsigned long temp; \ | 166 | unsigned long temp; \ |
175 | m += nr >> 5; \ | 167 | m += nr >> 5; \ |
176 | \ | 168 | \ |
177 | if (__builtin_constant_p(nr)) \ | ||
178 | nr &= 0x1f; \ | ||
179 | \ | ||
180 | temp = *m; \ | 169 | temp = *m; \ |
181 | *m = temp c_op (1UL << nr); \ | 170 | *m = temp c_op (1UL << (nr & 0x1f)); \ |
182 | } | 171 | } |
183 | 172 | ||
184 | #define __TEST_N_BIT_OP(op, c_op, asm_op) \ | 173 | #define __TEST_N_BIT_OP(op, c_op, asm_op) \ |
@@ -187,13 +176,10 @@ static inline int __test_and_##op##_bit(unsigned long nr, volatile unsigned long | |||
187 | unsigned long old; \ | 176 | unsigned long old; \ |
188 | m += nr >> 5; \ | 177 | m += nr >> 5; \ |
189 | \ | 178 | \ |
190 | if (__builtin_constant_p(nr)) \ | ||
191 | nr &= 0x1f; \ | ||
192 | \ | ||
193 | old = *m; \ | 179 | old = *m; \ |
194 | *m = old c_op (1 << nr); \ | 180 | *m = old c_op (1UL << (nr & 0x1f)); \ |
195 | \ | 181 | \ |
196 | return (old & (1 << nr)) != 0; \ | 182 | return (old & (1UL << (nr & 0x1f))) != 0; \ |
197 | } | 183 | } |
198 | 184 | ||
199 | #define BIT_OPS(op, c_op, asm_op) \ | 185 | #define BIT_OPS(op, c_op, asm_op) \ |
@@ -224,10 +210,7 @@ test_bit(unsigned int nr, const volatile unsigned long *addr) | |||
224 | 210 | ||
225 | addr += nr >> 5; | 211 | addr += nr >> 5; |
226 | 212 | ||
227 | if (__builtin_constant_p(nr)) | 213 | mask = 1UL << (nr & 0x1f); |
228 | nr &= 0x1f; | ||
229 | |||
230 | mask = 1 << nr; | ||
231 | 214 | ||
232 | return ((mask & *addr) != 0); | 215 | return ((mask & *addr) != 0); |
233 | } | 216 | } |
diff --git a/arch/arc/include/asm/futex.h b/arch/arc/include/asm/futex.h index 05b5aaf5b0f9..70cfe16b742d 100644 --- a/arch/arc/include/asm/futex.h +++ b/arch/arc/include/asm/futex.h | |||
@@ -16,12 +16,40 @@ | |||
16 | #include <linux/uaccess.h> | 16 | #include <linux/uaccess.h> |
17 | #include <asm/errno.h> | 17 | #include <asm/errno.h> |
18 | 18 | ||
19 | #ifdef CONFIG_ARC_HAS_LLSC | ||
20 | |||
21 | #define __futex_atomic_op(insn, ret, oldval, uaddr, oparg)\ | ||
22 | \ | ||
23 | __asm__ __volatile__( \ | ||
24 | "1: llock %1, [%2] \n" \ | ||
25 | insn "\n" \ | ||
26 | "2: scond %0, [%2] \n" \ | ||
27 | " bnz 1b \n" \ | ||
28 | " mov %0, 0 \n" \ | ||
29 | "3: \n" \ | ||
30 | " .section .fixup,\"ax\" \n" \ | ||
31 | " .align 4 \n" \ | ||
32 | "4: mov %0, %4 \n" \ | ||
33 | " b 3b \n" \ | ||
34 | " .previous \n" \ | ||
35 | " .section __ex_table,\"a\" \n" \ | ||
36 | " .align 4 \n" \ | ||
37 | " .word 1b, 4b \n" \ | ||
38 | " .word 2b, 4b \n" \ | ||
39 | " .previous \n" \ | ||
40 | \ | ||
41 | : "=&r" (ret), "=&r" (oldval) \ | ||
42 | : "r" (uaddr), "r" (oparg), "ir" (-EFAULT) \ | ||
43 | : "cc", "memory") | ||
44 | |||
45 | #else /* !CONFIG_ARC_HAS_LLSC */ | ||
46 | |||
19 | #define __futex_atomic_op(insn, ret, oldval, uaddr, oparg)\ | 47 | #define __futex_atomic_op(insn, ret, oldval, uaddr, oparg)\ |
20 | \ | 48 | \ |
21 | __asm__ __volatile__( \ | 49 | __asm__ __volatile__( \ |
22 | "1: ld %1, [%2] \n" \ | 50 | "1: ld %1, [%2] \n" \ |
23 | insn "\n" \ | 51 | insn "\n" \ |
24 | "2: st %0, [%2] \n" \ | 52 | "2: st %0, [%2] \n" \ |
25 | " mov %0, 0 \n" \ | 53 | " mov %0, 0 \n" \ |
26 | "3: \n" \ | 54 | "3: \n" \ |
27 | " .section .fixup,\"ax\" \n" \ | 55 | " .section .fixup,\"ax\" \n" \ |
@@ -39,6 +67,8 @@ | |||
39 | : "r" (uaddr), "r" (oparg), "ir" (-EFAULT) \ | 67 | : "r" (uaddr), "r" (oparg), "ir" (-EFAULT) \ |
40 | : "cc", "memory") | 68 | : "cc", "memory") |
41 | 69 | ||
70 | #endif | ||
71 | |||
42 | static inline int futex_atomic_op_inuser(int encoded_op, u32 __user *uaddr) | 72 | static inline int futex_atomic_op_inuser(int encoded_op, u32 __user *uaddr) |
43 | { | 73 | { |
44 | int op = (encoded_op >> 28) & 7; | 74 | int op = (encoded_op >> 28) & 7; |
@@ -123,11 +153,17 @@ futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr, u32 oldval, | |||
123 | 153 | ||
124 | pagefault_disable(); | 154 | pagefault_disable(); |
125 | 155 | ||
126 | /* TBD : can use llock/scond */ | ||
127 | __asm__ __volatile__( | 156 | __asm__ __volatile__( |
128 | "1: ld %0, [%3] \n" | 157 | #ifdef CONFIG_ARC_HAS_LLSC |
129 | " brne %0, %1, 3f \n" | 158 | "1: llock %0, [%3] \n" |
130 | "2: st %2, [%3] \n" | 159 | " brne %0, %1, 3f \n" |
160 | "2: scond %2, [%3] \n" | ||
161 | " bnz 1b \n" | ||
162 | #else | ||
163 | "1: ld %0, [%3] \n" | ||
164 | " brne %0, %1, 3f \n" | ||
165 | "2: st %2, [%3] \n" | ||
166 | #endif | ||
131 | "3: \n" | 167 | "3: \n" |
132 | " .section .fixup,\"ax\" \n" | 168 | " .section .fixup,\"ax\" \n" |
133 | "4: mov %0, %4 \n" | 169 | "4: mov %0, %4 \n" |
diff --git a/arch/arc/include/asm/mm-arch-hooks.h b/arch/arc/include/asm/mm-arch-hooks.h deleted file mode 100644 index c37541c5f8ba..000000000000 --- a/arch/arc/include/asm/mm-arch-hooks.h +++ /dev/null | |||
@@ -1,15 +0,0 @@ | |||
1 | /* | ||
2 | * Architecture specific mm hooks | ||
3 | * | ||
4 | * Copyright (C) 2015, IBM Corporation | ||
5 | * Author: Laurent Dufour <ldufour@linux.vnet.ibm.com> | ||
6 | * | ||
7 | * This program is free software; you can redistribute it and/or modify | ||
8 | * it under the terms of the GNU General Public License version 2 as | ||
9 | * published by the Free Software Foundation. | ||
10 | */ | ||
11 | |||
12 | #ifndef _ASM_ARC_MM_ARCH_HOOKS_H | ||
13 | #define _ASM_ARC_MM_ARCH_HOOKS_H | ||
14 | |||
15 | #endif /* _ASM_ARC_MM_ARCH_HOOKS_H */ | ||
diff --git a/arch/arc/include/asm/ptrace.h b/arch/arc/include/asm/ptrace.h index 91755972b9a2..91694ec1ce95 100644 --- a/arch/arc/include/asm/ptrace.h +++ b/arch/arc/include/asm/ptrace.h | |||
@@ -106,7 +106,7 @@ struct callee_regs { | |||
106 | long r25, r24, r23, r22, r21, r20, r19, r18, r17, r16, r15, r14, r13; | 106 | long r25, r24, r23, r22, r21, r20, r19, r18, r17, r16, r15, r14, r13; |
107 | }; | 107 | }; |
108 | 108 | ||
109 | #define instruction_pointer(regs) ((regs)->ret) | 109 | #define instruction_pointer(regs) (unsigned long)((regs)->ret) |
110 | #define profile_pc(regs) instruction_pointer(regs) | 110 | #define profile_pc(regs) instruction_pointer(regs) |
111 | 111 | ||
112 | /* return 1 if user mode or 0 if kernel mode */ | 112 | /* return 1 if user mode or 0 if kernel mode */ |