aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorWill Deacon <will.deacon@arm.com>2018-02-27 05:48:31 -0500
committerWill Deacon <will.deacon@arm.com>2018-03-27 08:15:29 -0400
commitc9406e514b95e825af20967430786a234d2dcabd (patch)
treee0ca1dc1470d269a0975e8b14b0e9a6ff947c3a9
parente8a2d040fee54606ff62cc1f22e14ad9b2677f15 (diff)
arm64: move percpu cmpxchg implementation from cmpxchg.h to percpu.h
We want to avoid pulling linux/preempt.h into cmpxchg.h, since that can introduce a circular dependency on linux/bitops.h. linux/preempt.h is only needed by the per-cpu cmpxchg implementation, which is better off alongside the per-cpu xchg implementation in percpu.h, so move it there and add the missing #include. Reported-by: Mark Rutland <mark.rutland@arm.com> Signed-off-by: Will Deacon <will.deacon@arm.com>
-rw-r--r--arch/arm64/include/asm/cmpxchg.h26
-rw-r--r--arch/arm64/include/asm/percpu.h29
2 files changed, 29 insertions, 26 deletions
diff --git a/arch/arm64/include/asm/cmpxchg.h b/arch/arm64/include/asm/cmpxchg.h
index bc9e07bc6428..19d4a18c2ac8 100644
--- a/arch/arm64/include/asm/cmpxchg.h
+++ b/arch/arm64/include/asm/cmpxchg.h
@@ -196,32 +196,6 @@ __CMPXCHG_GEN(_mb)
196 __ret; \ 196 __ret; \
197}) 197})
198 198
199/* this_cpu_cmpxchg */
200#define _protect_cmpxchg_local(pcp, o, n) \
201({ \
202 typeof(*raw_cpu_ptr(&(pcp))) __ret; \
203 preempt_disable(); \
204 __ret = cmpxchg_local(raw_cpu_ptr(&(pcp)), o, n); \
205 preempt_enable(); \
206 __ret; \
207})
208
209#define this_cpu_cmpxchg_1(ptr, o, n) _protect_cmpxchg_local(ptr, o, n)
210#define this_cpu_cmpxchg_2(ptr, o, n) _protect_cmpxchg_local(ptr, o, n)
211#define this_cpu_cmpxchg_4(ptr, o, n) _protect_cmpxchg_local(ptr, o, n)
212#define this_cpu_cmpxchg_8(ptr, o, n) _protect_cmpxchg_local(ptr, o, n)
213
214#define this_cpu_cmpxchg_double_8(ptr1, ptr2, o1, o2, n1, n2) \
215({ \
216 int __ret; \
217 preempt_disable(); \
218 __ret = cmpxchg_double_local( raw_cpu_ptr(&(ptr1)), \
219 raw_cpu_ptr(&(ptr2)), \
220 o1, o2, n1, n2); \
221 preempt_enable(); \
222 __ret; \
223})
224
225#define __CMPWAIT_CASE(w, sz, name) \ 199#define __CMPWAIT_CASE(w, sz, name) \
226static inline void __cmpwait_case_##name(volatile void *ptr, \ 200static inline void __cmpwait_case_##name(volatile void *ptr, \
227 unsigned long val) \ 201 unsigned long val) \
diff --git a/arch/arm64/include/asm/percpu.h b/arch/arm64/include/asm/percpu.h
index 43393208229e..9234013e759e 100644
--- a/arch/arm64/include/asm/percpu.h
+++ b/arch/arm64/include/asm/percpu.h
@@ -16,7 +16,10 @@
16#ifndef __ASM_PERCPU_H 16#ifndef __ASM_PERCPU_H
17#define __ASM_PERCPU_H 17#define __ASM_PERCPU_H
18 18
19#include <linux/preempt.h>
20
19#include <asm/alternative.h> 21#include <asm/alternative.h>
22#include <asm/cmpxchg.h>
20#include <asm/stack_pointer.h> 23#include <asm/stack_pointer.h>
21 24
22static inline void set_my_cpu_offset(unsigned long off) 25static inline void set_my_cpu_offset(unsigned long off)
@@ -197,6 +200,32 @@ static inline unsigned long __percpu_xchg(void *ptr, unsigned long val,
197 return ret; 200 return ret;
198} 201}
199 202
203/* this_cpu_cmpxchg */
204#define _protect_cmpxchg_local(pcp, o, n) \
205({ \
206 typeof(*raw_cpu_ptr(&(pcp))) __ret; \
207 preempt_disable(); \
208 __ret = cmpxchg_local(raw_cpu_ptr(&(pcp)), o, n); \
209 preempt_enable(); \
210 __ret; \
211})
212
213#define this_cpu_cmpxchg_1(ptr, o, n) _protect_cmpxchg_local(ptr, o, n)
214#define this_cpu_cmpxchg_2(ptr, o, n) _protect_cmpxchg_local(ptr, o, n)
215#define this_cpu_cmpxchg_4(ptr, o, n) _protect_cmpxchg_local(ptr, o, n)
216#define this_cpu_cmpxchg_8(ptr, o, n) _protect_cmpxchg_local(ptr, o, n)
217
218#define this_cpu_cmpxchg_double_8(ptr1, ptr2, o1, o2, n1, n2) \
219({ \
220 int __ret; \
221 preempt_disable(); \
222 __ret = cmpxchg_double_local( raw_cpu_ptr(&(ptr1)), \
223 raw_cpu_ptr(&(ptr2)), \
224 o1, o2, n1, n2); \
225 preempt_enable(); \
226 __ret; \
227})
228
200#define _percpu_read(pcp) \ 229#define _percpu_read(pcp) \
201({ \ 230({ \
202 typeof(pcp) __retval; \ 231 typeof(pcp) __retval; \