aboutsummaryrefslogtreecommitdiffstats
path: root/include/asm-generic/percpu.h
diff options
context:
space:
mode:
authorTejun Heo <tj@kernel.org>2014-06-17 19:12:39 -0400
committerTejun Heo <tj@kernel.org>2014-06-17 19:12:39 -0400
commit9c28278a24c01c0073fb89e53c1d2a605ab9587d (patch)
treee180627880bd5eaa5e1e8965abe583d0ee5ca989 /include/asm-generic/percpu.h
parenta32f8d8eda8bd49017ac5f88e2b859f1f582557f (diff)
percpu: reorder macros in percpu header files
* In include/asm-generic/percpu.h, collect {raw|_this}_cpu_generic*() macros into one place. They were dispersed through {raw|this}_cpu_*_N() definitions and the visiual inconsistency was making following the code unnecessarily difficult. * In include/linux/percpu-defs.h, move __verify_pcpu_ptr() later in the file so that it's right above accessor definitions where it's actually used. This is pure reorganization. Signed-off-by: Tejun Heo <tj@kernel.org> Acked-by: Christoph Lameter <cl@linux.com>
Diffstat (limited to 'include/asm-generic/percpu.h')
-rw-r--r--include/asm-generic/percpu.h198
1 files changed, 99 insertions, 99 deletions
diff --git a/include/asm-generic/percpu.h b/include/asm-generic/percpu.h
index 932ce602128f..2300d989087b 100644
--- a/include/asm-generic/percpu.h
+++ b/include/asm-generic/percpu.h
@@ -65,6 +65,105 @@ extern void setup_per_cpu_areas(void);
65#define PER_CPU_DEF_ATTRIBUTES 65#define PER_CPU_DEF_ATTRIBUTES
66#endif 66#endif
67 67
68#define raw_cpu_generic_to_op(pcp, val, op) \
69do { \
70 *raw_cpu_ptr(&(pcp)) op val; \
71} while (0)
72
73#define raw_cpu_generic_add_return(pcp, val) \
74({ \
75 raw_cpu_add(pcp, val); \
76 raw_cpu_read(pcp); \
77})
78
79#define raw_cpu_generic_xchg(pcp, nval) \
80({ typeof(pcp) ret__; \
81 ret__ = raw_cpu_read(pcp); \
82 raw_cpu_write(pcp, nval); \
83 ret__; \
84})
85
86#define raw_cpu_generic_cmpxchg(pcp, oval, nval) \
87({ \
88 typeof(pcp) ret__; \
89 ret__ = raw_cpu_read(pcp); \
90 if (ret__ == (oval)) \
91 raw_cpu_write(pcp, nval); \
92 ret__; \
93})
94
95#define raw_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2) \
96({ \
97 int __ret = 0; \
98 if (raw_cpu_read(pcp1) == (oval1) && \
99 raw_cpu_read(pcp2) == (oval2)) { \
100 raw_cpu_write(pcp1, (nval1)); \
101 raw_cpu_write(pcp2, (nval2)); \
102 __ret = 1; \
103 } \
104 (__ret); \
105})
106
107#define _this_cpu_generic_read(pcp) \
108({ typeof(pcp) ret__; \
109 preempt_disable(); \
110 ret__ = *this_cpu_ptr(&(pcp)); \
111 preempt_enable(); \
112 ret__; \
113})
114
115#define _this_cpu_generic_to_op(pcp, val, op) \
116do { \
117 unsigned long flags; \
118 raw_local_irq_save(flags); \
119 *raw_cpu_ptr(&(pcp)) op val; \
120 raw_local_irq_restore(flags); \
121} while (0)
122
123#define _this_cpu_generic_add_return(pcp, val) \
124({ \
125 typeof(pcp) ret__; \
126 unsigned long flags; \
127 raw_local_irq_save(flags); \
128 raw_cpu_add(pcp, val); \
129 ret__ = raw_cpu_read(pcp); \
130 raw_local_irq_restore(flags); \
131 ret__; \
132})
133
134#define _this_cpu_generic_xchg(pcp, nval) \
135({ typeof(pcp) ret__; \
136 unsigned long flags; \
137 raw_local_irq_save(flags); \
138 ret__ = raw_cpu_read(pcp); \
139 raw_cpu_write(pcp, nval); \
140 raw_local_irq_restore(flags); \
141 ret__; \
142})
143
144#define _this_cpu_generic_cmpxchg(pcp, oval, nval) \
145({ \
146 typeof(pcp) ret__; \
147 unsigned long flags; \
148 raw_local_irq_save(flags); \
149 ret__ = raw_cpu_read(pcp); \
150 if (ret__ == (oval)) \
151 raw_cpu_write(pcp, nval); \
152 raw_local_irq_restore(flags); \
153 ret__; \
154})
155
156#define _this_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2) \
157({ \
158 int ret__; \
159 unsigned long flags; \
160 raw_local_irq_save(flags); \
161 ret__ = raw_cpu_generic_cmpxchg_double(pcp1, pcp2, \
162 oval1, oval2, nval1, nval2); \
163 raw_local_irq_restore(flags); \
164 ret__; \
165})
166
68# ifndef raw_cpu_read_1 167# ifndef raw_cpu_read_1
69# define raw_cpu_read_1(pcp) (*raw_cpu_ptr(&(pcp))) 168# define raw_cpu_read_1(pcp) (*raw_cpu_ptr(&(pcp)))
70# endif 169# endif
@@ -78,11 +177,6 @@ extern void setup_per_cpu_areas(void);
78# define raw_cpu_read_8(pcp) (*raw_cpu_ptr(&(pcp))) 177# define raw_cpu_read_8(pcp) (*raw_cpu_ptr(&(pcp)))
79# endif 178# endif
80 179
81#define raw_cpu_generic_to_op(pcp, val, op) \
82do { \
83 *raw_cpu_ptr(&(pcp)) op val; \
84} while (0)
85
86# ifndef raw_cpu_write_1 180# ifndef raw_cpu_write_1
87# define raw_cpu_write_1(pcp, val) raw_cpu_generic_to_op((pcp), (val), =) 181# define raw_cpu_write_1(pcp, val) raw_cpu_generic_to_op((pcp), (val), =)
88# endif 182# endif
@@ -135,12 +229,6 @@ do { \
135# define raw_cpu_or_8(pcp, val) raw_cpu_generic_to_op((pcp), (val), |=) 229# define raw_cpu_or_8(pcp, val) raw_cpu_generic_to_op((pcp), (val), |=)
136# endif 230# endif
137 231
138#define raw_cpu_generic_add_return(pcp, val) \
139({ \
140 raw_cpu_add(pcp, val); \
141 raw_cpu_read(pcp); \
142})
143
144# ifndef raw_cpu_add_return_1 232# ifndef raw_cpu_add_return_1
145# define raw_cpu_add_return_1(pcp, val) raw_cpu_generic_add_return(pcp, val) 233# define raw_cpu_add_return_1(pcp, val) raw_cpu_generic_add_return(pcp, val)
146# endif 234# endif
@@ -154,13 +242,6 @@ do { \
154# define raw_cpu_add_return_8(pcp, val) raw_cpu_generic_add_return(pcp, val) 242# define raw_cpu_add_return_8(pcp, val) raw_cpu_generic_add_return(pcp, val)
155# endif 243# endif
156 244
157#define raw_cpu_generic_xchg(pcp, nval) \
158({ typeof(pcp) ret__; \
159 ret__ = raw_cpu_read(pcp); \
160 raw_cpu_write(pcp, nval); \
161 ret__; \
162})
163
164# ifndef raw_cpu_xchg_1 245# ifndef raw_cpu_xchg_1
165# define raw_cpu_xchg_1(pcp, nval) raw_cpu_generic_xchg(pcp, nval) 246# define raw_cpu_xchg_1(pcp, nval) raw_cpu_generic_xchg(pcp, nval)
166# endif 247# endif
@@ -174,15 +255,6 @@ do { \
174# define raw_cpu_xchg_8(pcp, nval) raw_cpu_generic_xchg(pcp, nval) 255# define raw_cpu_xchg_8(pcp, nval) raw_cpu_generic_xchg(pcp, nval)
175# endif 256# endif
176 257
177#define raw_cpu_generic_cmpxchg(pcp, oval, nval) \
178({ \
179 typeof(pcp) ret__; \
180 ret__ = raw_cpu_read(pcp); \
181 if (ret__ == (oval)) \
182 raw_cpu_write(pcp, nval); \
183 ret__; \
184})
185
186# ifndef raw_cpu_cmpxchg_1 258# ifndef raw_cpu_cmpxchg_1
187# define raw_cpu_cmpxchg_1(pcp, oval, nval) raw_cpu_generic_cmpxchg(pcp, oval, nval) 259# define raw_cpu_cmpxchg_1(pcp, oval, nval) raw_cpu_generic_cmpxchg(pcp, oval, nval)
188# endif 260# endif
@@ -196,18 +268,6 @@ do { \
196# define raw_cpu_cmpxchg_8(pcp, oval, nval) raw_cpu_generic_cmpxchg(pcp, oval, nval) 268# define raw_cpu_cmpxchg_8(pcp, oval, nval) raw_cpu_generic_cmpxchg(pcp, oval, nval)
197# endif 269# endif
198 270
199#define raw_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2) \
200({ \
201 int __ret = 0; \
202 if (raw_cpu_read(pcp1) == (oval1) && \
203 raw_cpu_read(pcp2) == (oval2)) { \
204 raw_cpu_write(pcp1, (nval1)); \
205 raw_cpu_write(pcp2, (nval2)); \
206 __ret = 1; \
207 } \
208 (__ret); \
209})
210
211# ifndef raw_cpu_cmpxchg_double_1 271# ifndef raw_cpu_cmpxchg_double_1
212# define raw_cpu_cmpxchg_double_1(pcp1, pcp2, oval1, oval2, nval1, nval2) \ 272# define raw_cpu_cmpxchg_double_1(pcp1, pcp2, oval1, oval2, nval1, nval2) \
213 raw_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2) 273 raw_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
@@ -225,14 +285,6 @@ do { \
225 raw_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2) 285 raw_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
226# endif 286# endif
227 287
228#define _this_cpu_generic_read(pcp) \
229({ typeof(pcp) ret__; \
230 preempt_disable(); \
231 ret__ = *this_cpu_ptr(&(pcp)); \
232 preempt_enable(); \
233 ret__; \
234})
235
236# ifndef this_cpu_read_1 288# ifndef this_cpu_read_1
237# define this_cpu_read_1(pcp) _this_cpu_generic_read(pcp) 289# define this_cpu_read_1(pcp) _this_cpu_generic_read(pcp)
238# endif 290# endif
@@ -246,14 +298,6 @@ do { \
246# define this_cpu_read_8(pcp) _this_cpu_generic_read(pcp) 298# define this_cpu_read_8(pcp) _this_cpu_generic_read(pcp)
247# endif 299# endif
248 300
249#define _this_cpu_generic_to_op(pcp, val, op) \
250do { \
251 unsigned long flags; \
252 raw_local_irq_save(flags); \
253 *raw_cpu_ptr(&(pcp)) op val; \
254 raw_local_irq_restore(flags); \
255} while (0)
256
257# ifndef this_cpu_write_1 301# ifndef this_cpu_write_1
258# define this_cpu_write_1(pcp, val) _this_cpu_generic_to_op((pcp), (val), =) 302# define this_cpu_write_1(pcp, val) _this_cpu_generic_to_op((pcp), (val), =)
259# endif 303# endif
@@ -306,17 +350,6 @@ do { \
306# define this_cpu_or_8(pcp, val) _this_cpu_generic_to_op((pcp), (val), |=) 350# define this_cpu_or_8(pcp, val) _this_cpu_generic_to_op((pcp), (val), |=)
307# endif 351# endif
308 352
309#define _this_cpu_generic_add_return(pcp, val) \
310({ \
311 typeof(pcp) ret__; \
312 unsigned long flags; \
313 raw_local_irq_save(flags); \
314 raw_cpu_add(pcp, val); \
315 ret__ = raw_cpu_read(pcp); \
316 raw_local_irq_restore(flags); \
317 ret__; \
318})
319
320# ifndef this_cpu_add_return_1 353# ifndef this_cpu_add_return_1
321# define this_cpu_add_return_1(pcp, val) _this_cpu_generic_add_return(pcp, val) 354# define this_cpu_add_return_1(pcp, val) _this_cpu_generic_add_return(pcp, val)
322# endif 355# endif
@@ -330,16 +363,6 @@ do { \
330# define this_cpu_add_return_8(pcp, val) _this_cpu_generic_add_return(pcp, val) 363# define this_cpu_add_return_8(pcp, val) _this_cpu_generic_add_return(pcp, val)
331# endif 364# endif
332 365
333#define _this_cpu_generic_xchg(pcp, nval) \
334({ typeof(pcp) ret__; \
335 unsigned long flags; \
336 raw_local_irq_save(flags); \
337 ret__ = raw_cpu_read(pcp); \
338 raw_cpu_write(pcp, nval); \
339 raw_local_irq_restore(flags); \
340 ret__; \
341})
342
343# ifndef this_cpu_xchg_1 366# ifndef this_cpu_xchg_1
344# define this_cpu_xchg_1(pcp, nval) _this_cpu_generic_xchg(pcp, nval) 367# define this_cpu_xchg_1(pcp, nval) _this_cpu_generic_xchg(pcp, nval)
345# endif 368# endif
@@ -353,18 +376,6 @@ do { \
353# define this_cpu_xchg_8(pcp, nval) _this_cpu_generic_xchg(pcp, nval) 376# define this_cpu_xchg_8(pcp, nval) _this_cpu_generic_xchg(pcp, nval)
354# endif 377# endif
355 378
356#define _this_cpu_generic_cmpxchg(pcp, oval, nval) \
357({ \
358 typeof(pcp) ret__; \
359 unsigned long flags; \
360 raw_local_irq_save(flags); \
361 ret__ = raw_cpu_read(pcp); \
362 if (ret__ == (oval)) \
363 raw_cpu_write(pcp, nval); \
364 raw_local_irq_restore(flags); \
365 ret__; \
366})
367
368# ifndef this_cpu_cmpxchg_1 379# ifndef this_cpu_cmpxchg_1
369# define this_cpu_cmpxchg_1(pcp, oval, nval) _this_cpu_generic_cmpxchg(pcp, oval, nval) 380# define this_cpu_cmpxchg_1(pcp, oval, nval) _this_cpu_generic_cmpxchg(pcp, oval, nval)
370# endif 381# endif
@@ -378,17 +389,6 @@ do { \
378# define this_cpu_cmpxchg_8(pcp, oval, nval) _this_cpu_generic_cmpxchg(pcp, oval, nval) 389# define this_cpu_cmpxchg_8(pcp, oval, nval) _this_cpu_generic_cmpxchg(pcp, oval, nval)
379# endif 390# endif
380 391
381#define _this_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2) \
382({ \
383 int ret__; \
384 unsigned long flags; \
385 raw_local_irq_save(flags); \
386 ret__ = raw_cpu_generic_cmpxchg_double(pcp1, pcp2, \
387 oval1, oval2, nval1, nval2); \
388 raw_local_irq_restore(flags); \
389 ret__; \
390})
391
392# ifndef this_cpu_cmpxchg_double_1 392# ifndef this_cpu_cmpxchg_double_1
393# define this_cpu_cmpxchg_double_1(pcp1, pcp2, oval1, oval2, nval1, nval2) \ 393# define this_cpu_cmpxchg_double_1(pcp1, pcp2, oval1, oval2, nval1, nval2) \
394 _this_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2) 394 _this_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)