aboutsummaryrefslogtreecommitdiffstats
path: root/include/asm-generic
diff options
context:
space:
mode:
authorLinus Torvalds <torvalds@linux-foundation.org>2014-08-04 13:09:27 -0400
committerLinus Torvalds <torvalds@linux-foundation.org>2014-08-04 13:09:27 -0400
commitf2a84170ede80e4b80f636e3700ef4d4d5dc7d33 (patch)
tree68a51fd83da747173200e06b046cbeddd80251a4 /include/asm-generic
parentc4c3f5fba01e189fb3618f09545abdb4cf8ec8ee (diff)
parent2d7227828e1475c7b272e55bd70c4cec8eea219a (diff)
Merge branch 'for-3.17' of git://git.kernel.org/pub/scm/linux/kernel/git/tj/percpu
Pull percpu updates from Tejun Heo: - Major reorganization of percpu header files which I think makes things a lot more readable and logical than before. - percpu-refcount is updated so that it requires explicit destruction and can be reinitialized if necessary. This was pulled into the block tree to replace the custom percpu refcnting implemented in blk-mq. - In the process, percpu and percpu-refcount got cleaned up a bit * 'for-3.17' of git://git.kernel.org/pub/scm/linux/kernel/git/tj/percpu: (21 commits) percpu-refcount: implement percpu_ref_reinit() and percpu_ref_is_zero() percpu-refcount: require percpu_ref to be exited explicitly percpu-refcount: use unsigned long for pcpu_count pointer percpu-refcount: add helpers for ->percpu_count accesses percpu-refcount: one bit is enough for REF_STATUS percpu-refcount, aio: use percpu_ref_cancel_init() in ioctx_alloc() workqueue: stronger test in process_one_work() workqueue: clear POOL_DISASSOCIATED in rebind_workers() percpu: Use ALIGN macro instead of hand coding alignment calculation percpu: invoke __verify_pcpu_ptr() from the generic part of accessors and operations percpu: preffity percpu header files percpu: use raw_cpu_*() to define __this_cpu_*() percpu: reorder macros in percpu header files percpu: move {raw|this}_cpu_*() definitions to include/linux/percpu-defs.h percpu: move generic {raw|this}_cpu_*_N() definitions to include/asm-generic/percpu.h percpu: only allow sized arch overrides for {raw|this}_cpu_*() ops percpu: reorganize include/linux/percpu-defs.h percpu: move accessors from include/linux/percpu.h to percpu-defs.h percpu: include/asm-generic/percpu.h should contain only arch-overridable parts percpu: introduce arch_raw_cpu_ptr() ...
Diffstat (limited to 'include/asm-generic')
-rw-r--r--include/asm-generic/percpu.h410
1 files changed, 351 insertions, 59 deletions
diff --git a/include/asm-generic/percpu.h b/include/asm-generic/percpu.h
index 0703aa75b5e8..4d9f233c4ba8 100644
--- a/include/asm-generic/percpu.h
+++ b/include/asm-generic/percpu.h
@@ -36,93 +36,385 @@ extern unsigned long __per_cpu_offset[NR_CPUS];
36#endif 36#endif
37 37
38/* 38/*
39 * Add a offset to a pointer but keep the pointer as is. 39 * Arch may define arch_raw_cpu_ptr() to provide more efficient address
40 * 40 * translations for raw_cpu_ptr().
41 * Only S390 provides its own means of moving the pointer.
42 */ 41 */
43#ifndef SHIFT_PERCPU_PTR 42#ifndef arch_raw_cpu_ptr
44/* Weird cast keeps both GCC and sparse happy. */ 43#define arch_raw_cpu_ptr(ptr) SHIFT_PERCPU_PTR(ptr, __my_cpu_offset)
45#define SHIFT_PERCPU_PTR(__p, __offset) ({ \
46 __verify_pcpu_ptr((__p)); \
47 RELOC_HIDE((typeof(*(__p)) __kernel __force *)(__p), (__offset)); \
48})
49#endif 44#endif
50 45
51/* 46#ifdef CONFIG_HAVE_SETUP_PER_CPU_AREA
52 * A percpu variable may point to a discarded regions. The following are 47extern void setup_per_cpu_areas(void);
53 * established ways to produce a usable pointer from the percpu variable
54 * offset.
55 */
56#define per_cpu(var, cpu) \
57 (*SHIFT_PERCPU_PTR(&(var), per_cpu_offset(cpu)))
58
59#ifndef raw_cpu_ptr
60#define raw_cpu_ptr(ptr) SHIFT_PERCPU_PTR(ptr, __my_cpu_offset)
61#endif 48#endif
62#ifdef CONFIG_DEBUG_PREEMPT 49
63#define this_cpu_ptr(ptr) SHIFT_PERCPU_PTR(ptr, my_cpu_offset) 50#endif /* SMP */
51
52#ifndef PER_CPU_BASE_SECTION
53#ifdef CONFIG_SMP
54#define PER_CPU_BASE_SECTION ".data..percpu"
64#else 55#else
65#define this_cpu_ptr(ptr) raw_cpu_ptr(ptr) 56#define PER_CPU_BASE_SECTION ".data"
57#endif
66#endif 58#endif
67 59
68#define __get_cpu_var(var) (*this_cpu_ptr(&(var))) 60#ifndef PER_CPU_ATTRIBUTES
69#define __raw_get_cpu_var(var) (*raw_cpu_ptr(&(var))) 61#define PER_CPU_ATTRIBUTES
62#endif
70 63
71#ifdef CONFIG_HAVE_SETUP_PER_CPU_AREA 64#ifndef PER_CPU_DEF_ATTRIBUTES
72extern void setup_per_cpu_areas(void); 65#define PER_CPU_DEF_ATTRIBUTES
73#endif 66#endif
74 67
75#else /* ! SMP */ 68#define raw_cpu_generic_to_op(pcp, val, op) \
69do { \
70 *raw_cpu_ptr(&(pcp)) op val; \
71} while (0)
76 72
77#define VERIFY_PERCPU_PTR(__p) ({ \ 73#define raw_cpu_generic_add_return(pcp, val) \
78 __verify_pcpu_ptr((__p)); \ 74({ \
79 (typeof(*(__p)) __kernel __force *)(__p); \ 75 raw_cpu_add(pcp, val); \
76 raw_cpu_read(pcp); \
80}) 77})
81 78
82#define per_cpu(var, cpu) (*((void)(cpu), VERIFY_PERCPU_PTR(&(var)))) 79#define raw_cpu_generic_xchg(pcp, nval) \
83#define __get_cpu_var(var) (*VERIFY_PERCPU_PTR(&(var))) 80({ \
84#define __raw_get_cpu_var(var) (*VERIFY_PERCPU_PTR(&(var))) 81 typeof(pcp) __ret; \
85#define this_cpu_ptr(ptr) per_cpu_ptr(ptr, 0) 82 __ret = raw_cpu_read(pcp); \
86#define raw_cpu_ptr(ptr) this_cpu_ptr(ptr) 83 raw_cpu_write(pcp, nval); \
84 __ret; \
85})
87 86
88#endif /* SMP */ 87#define raw_cpu_generic_cmpxchg(pcp, oval, nval) \
88({ \
89 typeof(pcp) __ret; \
90 __ret = raw_cpu_read(pcp); \
91 if (__ret == (oval)) \
92 raw_cpu_write(pcp, nval); \
93 __ret; \
94})
89 95
90#ifndef PER_CPU_BASE_SECTION 96#define raw_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2) \
91#ifdef CONFIG_SMP 97({ \
92#define PER_CPU_BASE_SECTION ".data..percpu" 98 int __ret = 0; \
93#else 99 if (raw_cpu_read(pcp1) == (oval1) && \
94#define PER_CPU_BASE_SECTION ".data" 100 raw_cpu_read(pcp2) == (oval2)) { \
101 raw_cpu_write(pcp1, nval1); \
102 raw_cpu_write(pcp2, nval2); \
103 __ret = 1; \
104 } \
105 (__ret); \
106})
107
108#define this_cpu_generic_read(pcp) \
109({ \
110 typeof(pcp) __ret; \
111 preempt_disable(); \
112 __ret = *this_cpu_ptr(&(pcp)); \
113 preempt_enable(); \
114 __ret; \
115})
116
117#define this_cpu_generic_to_op(pcp, val, op) \
118do { \
119 unsigned long __flags; \
120 raw_local_irq_save(__flags); \
121 *raw_cpu_ptr(&(pcp)) op val; \
122 raw_local_irq_restore(__flags); \
123} while (0)
124
125#define this_cpu_generic_add_return(pcp, val) \
126({ \
127 typeof(pcp) __ret; \
128 unsigned long __flags; \
129 raw_local_irq_save(__flags); \
130 raw_cpu_add(pcp, val); \
131 __ret = raw_cpu_read(pcp); \
132 raw_local_irq_restore(__flags); \
133 __ret; \
134})
135
136#define this_cpu_generic_xchg(pcp, nval) \
137({ \
138 typeof(pcp) __ret; \
139 unsigned long __flags; \
140 raw_local_irq_save(__flags); \
141 __ret = raw_cpu_read(pcp); \
142 raw_cpu_write(pcp, nval); \
143 raw_local_irq_restore(__flags); \
144 __ret; \
145})
146
147#define this_cpu_generic_cmpxchg(pcp, oval, nval) \
148({ \
149 typeof(pcp) __ret; \
150 unsigned long __flags; \
151 raw_local_irq_save(__flags); \
152 __ret = raw_cpu_read(pcp); \
153 if (__ret == (oval)) \
154 raw_cpu_write(pcp, nval); \
155 raw_local_irq_restore(__flags); \
156 __ret; \
157})
158
159#define this_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2) \
160({ \
161 int __ret; \
162 unsigned long __flags; \
163 raw_local_irq_save(__flags); \
164 __ret = raw_cpu_generic_cmpxchg_double(pcp1, pcp2, \
165 oval1, oval2, nval1, nval2); \
166 raw_local_irq_restore(__flags); \
167 __ret; \
168})
169
170#ifndef raw_cpu_read_1
171#define raw_cpu_read_1(pcp) (*raw_cpu_ptr(&(pcp)))
95#endif 172#endif
173#ifndef raw_cpu_read_2
174#define raw_cpu_read_2(pcp) (*raw_cpu_ptr(&(pcp)))
175#endif
176#ifndef raw_cpu_read_4
177#define raw_cpu_read_4(pcp) (*raw_cpu_ptr(&(pcp)))
178#endif
179#ifndef raw_cpu_read_8
180#define raw_cpu_read_8(pcp) (*raw_cpu_ptr(&(pcp)))
96#endif 181#endif
97 182
98#ifdef CONFIG_SMP 183#ifndef raw_cpu_write_1
184#define raw_cpu_write_1(pcp, val) raw_cpu_generic_to_op(pcp, val, =)
185#endif
186#ifndef raw_cpu_write_2
187#define raw_cpu_write_2(pcp, val) raw_cpu_generic_to_op(pcp, val, =)
188#endif
189#ifndef raw_cpu_write_4
190#define raw_cpu_write_4(pcp, val) raw_cpu_generic_to_op(pcp, val, =)
191#endif
192#ifndef raw_cpu_write_8
193#define raw_cpu_write_8(pcp, val) raw_cpu_generic_to_op(pcp, val, =)
194#endif
99 195
100#ifdef MODULE 196#ifndef raw_cpu_add_1
101#define PER_CPU_SHARED_ALIGNED_SECTION "" 197#define raw_cpu_add_1(pcp, val) raw_cpu_generic_to_op(pcp, val, +=)
102#define PER_CPU_ALIGNED_SECTION "" 198#endif
103#else 199#ifndef raw_cpu_add_2
104#define PER_CPU_SHARED_ALIGNED_SECTION "..shared_aligned" 200#define raw_cpu_add_2(pcp, val) raw_cpu_generic_to_op(pcp, val, +=)
105#define PER_CPU_ALIGNED_SECTION "..shared_aligned" 201#endif
202#ifndef raw_cpu_add_4
203#define raw_cpu_add_4(pcp, val) raw_cpu_generic_to_op(pcp, val, +=)
204#endif
205#ifndef raw_cpu_add_8
206#define raw_cpu_add_8(pcp, val) raw_cpu_generic_to_op(pcp, val, +=)
106#endif 207#endif
107#define PER_CPU_FIRST_SECTION "..first"
108 208
109#else 209#ifndef raw_cpu_and_1
210#define raw_cpu_and_1(pcp, val) raw_cpu_generic_to_op(pcp, val, &=)
211#endif
212#ifndef raw_cpu_and_2
213#define raw_cpu_and_2(pcp, val) raw_cpu_generic_to_op(pcp, val, &=)
214#endif
215#ifndef raw_cpu_and_4
216#define raw_cpu_and_4(pcp, val) raw_cpu_generic_to_op(pcp, val, &=)
217#endif
218#ifndef raw_cpu_and_8
219#define raw_cpu_and_8(pcp, val) raw_cpu_generic_to_op(pcp, val, &=)
220#endif
221
222#ifndef raw_cpu_or_1
223#define raw_cpu_or_1(pcp, val) raw_cpu_generic_to_op(pcp, val, |=)
224#endif
225#ifndef raw_cpu_or_2
226#define raw_cpu_or_2(pcp, val) raw_cpu_generic_to_op(pcp, val, |=)
227#endif
228#ifndef raw_cpu_or_4
229#define raw_cpu_or_4(pcp, val) raw_cpu_generic_to_op(pcp, val, |=)
230#endif
231#ifndef raw_cpu_or_8
232#define raw_cpu_or_8(pcp, val) raw_cpu_generic_to_op(pcp, val, |=)
233#endif
110 234
111#define PER_CPU_SHARED_ALIGNED_SECTION "" 235#ifndef raw_cpu_add_return_1
112#define PER_CPU_ALIGNED_SECTION "..shared_aligned" 236#define raw_cpu_add_return_1(pcp, val) raw_cpu_generic_add_return(pcp, val)
113#define PER_CPU_FIRST_SECTION "" 237#endif
238#ifndef raw_cpu_add_return_2
239#define raw_cpu_add_return_2(pcp, val) raw_cpu_generic_add_return(pcp, val)
240#endif
241#ifndef raw_cpu_add_return_4
242#define raw_cpu_add_return_4(pcp, val) raw_cpu_generic_add_return(pcp, val)
243#endif
244#ifndef raw_cpu_add_return_8
245#define raw_cpu_add_return_8(pcp, val) raw_cpu_generic_add_return(pcp, val)
246#endif
114 247
248#ifndef raw_cpu_xchg_1
249#define raw_cpu_xchg_1(pcp, nval) raw_cpu_generic_xchg(pcp, nval)
250#endif
251#ifndef raw_cpu_xchg_2
252#define raw_cpu_xchg_2(pcp, nval) raw_cpu_generic_xchg(pcp, nval)
253#endif
254#ifndef raw_cpu_xchg_4
255#define raw_cpu_xchg_4(pcp, nval) raw_cpu_generic_xchg(pcp, nval)
256#endif
257#ifndef raw_cpu_xchg_8
258#define raw_cpu_xchg_8(pcp, nval) raw_cpu_generic_xchg(pcp, nval)
115#endif 259#endif
116 260
117#ifndef PER_CPU_ATTRIBUTES 261#ifndef raw_cpu_cmpxchg_1
118#define PER_CPU_ATTRIBUTES 262#define raw_cpu_cmpxchg_1(pcp, oval, nval) \
263 raw_cpu_generic_cmpxchg(pcp, oval, nval)
264#endif
265#ifndef raw_cpu_cmpxchg_2
266#define raw_cpu_cmpxchg_2(pcp, oval, nval) \
267 raw_cpu_generic_cmpxchg(pcp, oval, nval)
268#endif
269#ifndef raw_cpu_cmpxchg_4
270#define raw_cpu_cmpxchg_4(pcp, oval, nval) \
271 raw_cpu_generic_cmpxchg(pcp, oval, nval)
272#endif
273#ifndef raw_cpu_cmpxchg_8
274#define raw_cpu_cmpxchg_8(pcp, oval, nval) \
275 raw_cpu_generic_cmpxchg(pcp, oval, nval)
119#endif 276#endif
120 277
121#ifndef PER_CPU_DEF_ATTRIBUTES 278#ifndef raw_cpu_cmpxchg_double_1
122#define PER_CPU_DEF_ATTRIBUTES 279#define raw_cpu_cmpxchg_double_1(pcp1, pcp2, oval1, oval2, nval1, nval2) \
280 raw_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
281#endif
282#ifndef raw_cpu_cmpxchg_double_2
283#define raw_cpu_cmpxchg_double_2(pcp1, pcp2, oval1, oval2, nval1, nval2) \
284 raw_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
285#endif
286#ifndef raw_cpu_cmpxchg_double_4
287#define raw_cpu_cmpxchg_double_4(pcp1, pcp2, oval1, oval2, nval1, nval2) \
288 raw_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
289#endif
290#ifndef raw_cpu_cmpxchg_double_8
291#define raw_cpu_cmpxchg_double_8(pcp1, pcp2, oval1, oval2, nval1, nval2) \
292 raw_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
293#endif
294
295#ifndef this_cpu_read_1
296#define this_cpu_read_1(pcp) this_cpu_generic_read(pcp)
297#endif
298#ifndef this_cpu_read_2
299#define this_cpu_read_2(pcp) this_cpu_generic_read(pcp)
300#endif
301#ifndef this_cpu_read_4
302#define this_cpu_read_4(pcp) this_cpu_generic_read(pcp)
303#endif
304#ifndef this_cpu_read_8
305#define this_cpu_read_8(pcp) this_cpu_generic_read(pcp)
123#endif 306#endif
124 307
125/* Keep until we have removed all uses of __this_cpu_ptr */ 308#ifndef this_cpu_write_1
126#define __this_cpu_ptr raw_cpu_ptr 309#define this_cpu_write_1(pcp, val) this_cpu_generic_to_op(pcp, val, =)
310#endif
311#ifndef this_cpu_write_2
312#define this_cpu_write_2(pcp, val) this_cpu_generic_to_op(pcp, val, =)
313#endif
314#ifndef this_cpu_write_4
315#define this_cpu_write_4(pcp, val) this_cpu_generic_to_op(pcp, val, =)
316#endif
317#ifndef this_cpu_write_8
318#define this_cpu_write_8(pcp, val) this_cpu_generic_to_op(pcp, val, =)
319#endif
320
321#ifndef this_cpu_add_1
322#define this_cpu_add_1(pcp, val) this_cpu_generic_to_op(pcp, val, +=)
323#endif
324#ifndef this_cpu_add_2
325#define this_cpu_add_2(pcp, val) this_cpu_generic_to_op(pcp, val, +=)
326#endif
327#ifndef this_cpu_add_4
328#define this_cpu_add_4(pcp, val) this_cpu_generic_to_op(pcp, val, +=)
329#endif
330#ifndef this_cpu_add_8
331#define this_cpu_add_8(pcp, val) this_cpu_generic_to_op(pcp, val, +=)
332#endif
333
334#ifndef this_cpu_and_1
335#define this_cpu_and_1(pcp, val) this_cpu_generic_to_op(pcp, val, &=)
336#endif
337#ifndef this_cpu_and_2
338#define this_cpu_and_2(pcp, val) this_cpu_generic_to_op(pcp, val, &=)
339#endif
340#ifndef this_cpu_and_4
341#define this_cpu_and_4(pcp, val) this_cpu_generic_to_op(pcp, val, &=)
342#endif
343#ifndef this_cpu_and_8
344#define this_cpu_and_8(pcp, val) this_cpu_generic_to_op(pcp, val, &=)
345#endif
346
347#ifndef this_cpu_or_1
348#define this_cpu_or_1(pcp, val) this_cpu_generic_to_op(pcp, val, |=)
349#endif
350#ifndef this_cpu_or_2
351#define this_cpu_or_2(pcp, val) this_cpu_generic_to_op(pcp, val, |=)
352#endif
353#ifndef this_cpu_or_4
354#define this_cpu_or_4(pcp, val) this_cpu_generic_to_op(pcp, val, |=)
355#endif
356#ifndef this_cpu_or_8
357#define this_cpu_or_8(pcp, val) this_cpu_generic_to_op(pcp, val, |=)
358#endif
359
360#ifndef this_cpu_add_return_1
361#define this_cpu_add_return_1(pcp, val) this_cpu_generic_add_return(pcp, val)
362#endif
363#ifndef this_cpu_add_return_2
364#define this_cpu_add_return_2(pcp, val) this_cpu_generic_add_return(pcp, val)
365#endif
366#ifndef this_cpu_add_return_4
367#define this_cpu_add_return_4(pcp, val) this_cpu_generic_add_return(pcp, val)
368#endif
369#ifndef this_cpu_add_return_8
370#define this_cpu_add_return_8(pcp, val) this_cpu_generic_add_return(pcp, val)
371#endif
372
373#ifndef this_cpu_xchg_1
374#define this_cpu_xchg_1(pcp, nval) this_cpu_generic_xchg(pcp, nval)
375#endif
376#ifndef this_cpu_xchg_2
377#define this_cpu_xchg_2(pcp, nval) this_cpu_generic_xchg(pcp, nval)
378#endif
379#ifndef this_cpu_xchg_4
380#define this_cpu_xchg_4(pcp, nval) this_cpu_generic_xchg(pcp, nval)
381#endif
382#ifndef this_cpu_xchg_8
383#define this_cpu_xchg_8(pcp, nval) this_cpu_generic_xchg(pcp, nval)
384#endif
385
386#ifndef this_cpu_cmpxchg_1
387#define this_cpu_cmpxchg_1(pcp, oval, nval) \
388 this_cpu_generic_cmpxchg(pcp, oval, nval)
389#endif
390#ifndef this_cpu_cmpxchg_2
391#define this_cpu_cmpxchg_2(pcp, oval, nval) \
392 this_cpu_generic_cmpxchg(pcp, oval, nval)
393#endif
394#ifndef this_cpu_cmpxchg_4
395#define this_cpu_cmpxchg_4(pcp, oval, nval) \
396 this_cpu_generic_cmpxchg(pcp, oval, nval)
397#endif
398#ifndef this_cpu_cmpxchg_8
399#define this_cpu_cmpxchg_8(pcp, oval, nval) \
400 this_cpu_generic_cmpxchg(pcp, oval, nval)
401#endif
402
403#ifndef this_cpu_cmpxchg_double_1
404#define this_cpu_cmpxchg_double_1(pcp1, pcp2, oval1, oval2, nval1, nval2) \
405 this_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
406#endif
407#ifndef this_cpu_cmpxchg_double_2
408#define this_cpu_cmpxchg_double_2(pcp1, pcp2, oval1, oval2, nval1, nval2) \
409 this_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
410#endif
411#ifndef this_cpu_cmpxchg_double_4
412#define this_cpu_cmpxchg_double_4(pcp1, pcp2, oval1, oval2, nval1, nval2) \
413 this_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
414#endif
415#ifndef this_cpu_cmpxchg_double_8
416#define this_cpu_cmpxchg_double_8(pcp1, pcp2, oval1, oval2, nval1, nval2) \
417 this_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
418#endif
127 419
128#endif /* _ASM_GENERIC_PERCPU_H_ */ 420#endif /* _ASM_GENERIC_PERCPU_H_ */