diff options
author | Tejun Heo <tj@kernel.org> | 2014-06-17 19:12:39 -0400 |
---|---|---|
committer | Tejun Heo <tj@kernel.org> | 2014-06-17 19:12:39 -0400 |
commit | 47b69ad673d9aa53c1d6032a6a522fc0ce8d6fc1 (patch) | |
tree | 73f5da096b50009412498af666e828a8297e87d5 /include/asm-generic | |
parent | dcba4333683c3a0642fd575e475c6c740122a037 (diff) |
percpu: move generic {raw|this}_cpu_*_N() definitions to include/asm-generic/percpu.h
{raw|this}_cpu_*_N() operations are expected to be provided by archs
and the generic definitions are provided as fallbacks. As such, these
firmly belong to include/asm-generic/percpu.h.
Move the generic definitions to include/asm-generic/percpu.h. The
code is moved mostly verbatim; however, raw_cpu_*_N() are placed above
this_cpu_*_N() which is more conventional as the raw operations may be
used to defined other variants.
This is pure reorganization.
Signed-off-by: Tejun Heo <tj@kernel.org>
Acked-by: Christoph Lameter <cl@linux.com>
Diffstat (limited to 'include/asm-generic')
-rw-r--r-- | include/asm-generic/percpu.h | 341 |
1 files changed, 341 insertions, 0 deletions
diff --git a/include/asm-generic/percpu.h b/include/asm-generic/percpu.h index e5ace4d49084..932ce602128f 100644 --- a/include/asm-generic/percpu.h +++ b/include/asm-generic/percpu.h | |||
@@ -65,4 +65,345 @@ extern void setup_per_cpu_areas(void); | |||
65 | #define PER_CPU_DEF_ATTRIBUTES | 65 | #define PER_CPU_DEF_ATTRIBUTES |
66 | #endif | 66 | #endif |
67 | 67 | ||
68 | # ifndef raw_cpu_read_1 | ||
69 | # define raw_cpu_read_1(pcp) (*raw_cpu_ptr(&(pcp))) | ||
70 | # endif | ||
71 | # ifndef raw_cpu_read_2 | ||
72 | # define raw_cpu_read_2(pcp) (*raw_cpu_ptr(&(pcp))) | ||
73 | # endif | ||
74 | # ifndef raw_cpu_read_4 | ||
75 | # define raw_cpu_read_4(pcp) (*raw_cpu_ptr(&(pcp))) | ||
76 | # endif | ||
77 | # ifndef raw_cpu_read_8 | ||
78 | # define raw_cpu_read_8(pcp) (*raw_cpu_ptr(&(pcp))) | ||
79 | # endif | ||
80 | |||
81 | #define raw_cpu_generic_to_op(pcp, val, op) \ | ||
82 | do { \ | ||
83 | *raw_cpu_ptr(&(pcp)) op val; \ | ||
84 | } while (0) | ||
85 | |||
86 | # ifndef raw_cpu_write_1 | ||
87 | # define raw_cpu_write_1(pcp, val) raw_cpu_generic_to_op((pcp), (val), =) | ||
88 | # endif | ||
89 | # ifndef raw_cpu_write_2 | ||
90 | # define raw_cpu_write_2(pcp, val) raw_cpu_generic_to_op((pcp), (val), =) | ||
91 | # endif | ||
92 | # ifndef raw_cpu_write_4 | ||
93 | # define raw_cpu_write_4(pcp, val) raw_cpu_generic_to_op((pcp), (val), =) | ||
94 | # endif | ||
95 | # ifndef raw_cpu_write_8 | ||
96 | # define raw_cpu_write_8(pcp, val) raw_cpu_generic_to_op((pcp), (val), =) | ||
97 | # endif | ||
98 | |||
99 | # ifndef raw_cpu_add_1 | ||
100 | # define raw_cpu_add_1(pcp, val) raw_cpu_generic_to_op((pcp), (val), +=) | ||
101 | # endif | ||
102 | # ifndef raw_cpu_add_2 | ||
103 | # define raw_cpu_add_2(pcp, val) raw_cpu_generic_to_op((pcp), (val), +=) | ||
104 | # endif | ||
105 | # ifndef raw_cpu_add_4 | ||
106 | # define raw_cpu_add_4(pcp, val) raw_cpu_generic_to_op((pcp), (val), +=) | ||
107 | # endif | ||
108 | # ifndef raw_cpu_add_8 | ||
109 | # define raw_cpu_add_8(pcp, val) raw_cpu_generic_to_op((pcp), (val), +=) | ||
110 | # endif | ||
111 | |||
112 | # ifndef raw_cpu_and_1 | ||
113 | # define raw_cpu_and_1(pcp, val) raw_cpu_generic_to_op((pcp), (val), &=) | ||
114 | # endif | ||
115 | # ifndef raw_cpu_and_2 | ||
116 | # define raw_cpu_and_2(pcp, val) raw_cpu_generic_to_op((pcp), (val), &=) | ||
117 | # endif | ||
118 | # ifndef raw_cpu_and_4 | ||
119 | # define raw_cpu_and_4(pcp, val) raw_cpu_generic_to_op((pcp), (val), &=) | ||
120 | # endif | ||
121 | # ifndef raw_cpu_and_8 | ||
122 | # define raw_cpu_and_8(pcp, val) raw_cpu_generic_to_op((pcp), (val), &=) | ||
123 | # endif | ||
124 | |||
125 | # ifndef raw_cpu_or_1 | ||
126 | # define raw_cpu_or_1(pcp, val) raw_cpu_generic_to_op((pcp), (val), |=) | ||
127 | # endif | ||
128 | # ifndef raw_cpu_or_2 | ||
129 | # define raw_cpu_or_2(pcp, val) raw_cpu_generic_to_op((pcp), (val), |=) | ||
130 | # endif | ||
131 | # ifndef raw_cpu_or_4 | ||
132 | # define raw_cpu_or_4(pcp, val) raw_cpu_generic_to_op((pcp), (val), |=) | ||
133 | # endif | ||
134 | # ifndef raw_cpu_or_8 | ||
135 | # define raw_cpu_or_8(pcp, val) raw_cpu_generic_to_op((pcp), (val), |=) | ||
136 | # endif | ||
137 | |||
138 | #define raw_cpu_generic_add_return(pcp, val) \ | ||
139 | ({ \ | ||
140 | raw_cpu_add(pcp, val); \ | ||
141 | raw_cpu_read(pcp); \ | ||
142 | }) | ||
143 | |||
144 | # ifndef raw_cpu_add_return_1 | ||
145 | # define raw_cpu_add_return_1(pcp, val) raw_cpu_generic_add_return(pcp, val) | ||
146 | # endif | ||
147 | # ifndef raw_cpu_add_return_2 | ||
148 | # define raw_cpu_add_return_2(pcp, val) raw_cpu_generic_add_return(pcp, val) | ||
149 | # endif | ||
150 | # ifndef raw_cpu_add_return_4 | ||
151 | # define raw_cpu_add_return_4(pcp, val) raw_cpu_generic_add_return(pcp, val) | ||
152 | # endif | ||
153 | # ifndef raw_cpu_add_return_8 | ||
154 | # define raw_cpu_add_return_8(pcp, val) raw_cpu_generic_add_return(pcp, val) | ||
155 | # endif | ||
156 | |||
157 | #define raw_cpu_generic_xchg(pcp, nval) \ | ||
158 | ({ typeof(pcp) ret__; \ | ||
159 | ret__ = raw_cpu_read(pcp); \ | ||
160 | raw_cpu_write(pcp, nval); \ | ||
161 | ret__; \ | ||
162 | }) | ||
163 | |||
164 | # ifndef raw_cpu_xchg_1 | ||
165 | # define raw_cpu_xchg_1(pcp, nval) raw_cpu_generic_xchg(pcp, nval) | ||
166 | # endif | ||
167 | # ifndef raw_cpu_xchg_2 | ||
168 | # define raw_cpu_xchg_2(pcp, nval) raw_cpu_generic_xchg(pcp, nval) | ||
169 | # endif | ||
170 | # ifndef raw_cpu_xchg_4 | ||
171 | # define raw_cpu_xchg_4(pcp, nval) raw_cpu_generic_xchg(pcp, nval) | ||
172 | # endif | ||
173 | # ifndef raw_cpu_xchg_8 | ||
174 | # define raw_cpu_xchg_8(pcp, nval) raw_cpu_generic_xchg(pcp, nval) | ||
175 | # endif | ||
176 | |||
177 | #define raw_cpu_generic_cmpxchg(pcp, oval, nval) \ | ||
178 | ({ \ | ||
179 | typeof(pcp) ret__; \ | ||
180 | ret__ = raw_cpu_read(pcp); \ | ||
181 | if (ret__ == (oval)) \ | ||
182 | raw_cpu_write(pcp, nval); \ | ||
183 | ret__; \ | ||
184 | }) | ||
185 | |||
186 | # ifndef raw_cpu_cmpxchg_1 | ||
187 | # define raw_cpu_cmpxchg_1(pcp, oval, nval) raw_cpu_generic_cmpxchg(pcp, oval, nval) | ||
188 | # endif | ||
189 | # ifndef raw_cpu_cmpxchg_2 | ||
190 | # define raw_cpu_cmpxchg_2(pcp, oval, nval) raw_cpu_generic_cmpxchg(pcp, oval, nval) | ||
191 | # endif | ||
192 | # ifndef raw_cpu_cmpxchg_4 | ||
193 | # define raw_cpu_cmpxchg_4(pcp, oval, nval) raw_cpu_generic_cmpxchg(pcp, oval, nval) | ||
194 | # endif | ||
195 | # ifndef raw_cpu_cmpxchg_8 | ||
196 | # define raw_cpu_cmpxchg_8(pcp, oval, nval) raw_cpu_generic_cmpxchg(pcp, oval, nval) | ||
197 | # endif | ||
198 | |||
199 | #define raw_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2) \ | ||
200 | ({ \ | ||
201 | int __ret = 0; \ | ||
202 | if (raw_cpu_read(pcp1) == (oval1) && \ | ||
203 | raw_cpu_read(pcp2) == (oval2)) { \ | ||
204 | raw_cpu_write(pcp1, (nval1)); \ | ||
205 | raw_cpu_write(pcp2, (nval2)); \ | ||
206 | __ret = 1; \ | ||
207 | } \ | ||
208 | (__ret); \ | ||
209 | }) | ||
210 | |||
211 | # ifndef raw_cpu_cmpxchg_double_1 | ||
212 | # define raw_cpu_cmpxchg_double_1(pcp1, pcp2, oval1, oval2, nval1, nval2) \ | ||
213 | raw_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2) | ||
214 | # endif | ||
215 | # ifndef raw_cpu_cmpxchg_double_2 | ||
216 | # define raw_cpu_cmpxchg_double_2(pcp1, pcp2, oval1, oval2, nval1, nval2) \ | ||
217 | raw_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2) | ||
218 | # endif | ||
219 | # ifndef raw_cpu_cmpxchg_double_4 | ||
220 | # define raw_cpu_cmpxchg_double_4(pcp1, pcp2, oval1, oval2, nval1, nval2) \ | ||
221 | raw_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2) | ||
222 | # endif | ||
223 | # ifndef raw_cpu_cmpxchg_double_8 | ||
224 | # define raw_cpu_cmpxchg_double_8(pcp1, pcp2, oval1, oval2, nval1, nval2) \ | ||
225 | raw_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2) | ||
226 | # endif | ||
227 | |||
228 | #define _this_cpu_generic_read(pcp) \ | ||
229 | ({ typeof(pcp) ret__; \ | ||
230 | preempt_disable(); \ | ||
231 | ret__ = *this_cpu_ptr(&(pcp)); \ | ||
232 | preempt_enable(); \ | ||
233 | ret__; \ | ||
234 | }) | ||
235 | |||
236 | # ifndef this_cpu_read_1 | ||
237 | # define this_cpu_read_1(pcp) _this_cpu_generic_read(pcp) | ||
238 | # endif | ||
239 | # ifndef this_cpu_read_2 | ||
240 | # define this_cpu_read_2(pcp) _this_cpu_generic_read(pcp) | ||
241 | # endif | ||
242 | # ifndef this_cpu_read_4 | ||
243 | # define this_cpu_read_4(pcp) _this_cpu_generic_read(pcp) | ||
244 | # endif | ||
245 | # ifndef this_cpu_read_8 | ||
246 | # define this_cpu_read_8(pcp) _this_cpu_generic_read(pcp) | ||
247 | # endif | ||
248 | |||
249 | #define _this_cpu_generic_to_op(pcp, val, op) \ | ||
250 | do { \ | ||
251 | unsigned long flags; \ | ||
252 | raw_local_irq_save(flags); \ | ||
253 | *raw_cpu_ptr(&(pcp)) op val; \ | ||
254 | raw_local_irq_restore(flags); \ | ||
255 | } while (0) | ||
256 | |||
257 | # ifndef this_cpu_write_1 | ||
258 | # define this_cpu_write_1(pcp, val) _this_cpu_generic_to_op((pcp), (val), =) | ||
259 | # endif | ||
260 | # ifndef this_cpu_write_2 | ||
261 | # define this_cpu_write_2(pcp, val) _this_cpu_generic_to_op((pcp), (val), =) | ||
262 | # endif | ||
263 | # ifndef this_cpu_write_4 | ||
264 | # define this_cpu_write_4(pcp, val) _this_cpu_generic_to_op((pcp), (val), =) | ||
265 | # endif | ||
266 | # ifndef this_cpu_write_8 | ||
267 | # define this_cpu_write_8(pcp, val) _this_cpu_generic_to_op((pcp), (val), =) | ||
268 | # endif | ||
269 | |||
270 | # ifndef this_cpu_add_1 | ||
271 | # define this_cpu_add_1(pcp, val) _this_cpu_generic_to_op((pcp), (val), +=) | ||
272 | # endif | ||
273 | # ifndef this_cpu_add_2 | ||
274 | # define this_cpu_add_2(pcp, val) _this_cpu_generic_to_op((pcp), (val), +=) | ||
275 | # endif | ||
276 | # ifndef this_cpu_add_4 | ||
277 | # define this_cpu_add_4(pcp, val) _this_cpu_generic_to_op((pcp), (val), +=) | ||
278 | # endif | ||
279 | # ifndef this_cpu_add_8 | ||
280 | # define this_cpu_add_8(pcp, val) _this_cpu_generic_to_op((pcp), (val), +=) | ||
281 | # endif | ||
282 | |||
283 | # ifndef this_cpu_and_1 | ||
284 | # define this_cpu_and_1(pcp, val) _this_cpu_generic_to_op((pcp), (val), &=) | ||
285 | # endif | ||
286 | # ifndef this_cpu_and_2 | ||
287 | # define this_cpu_and_2(pcp, val) _this_cpu_generic_to_op((pcp), (val), &=) | ||
288 | # endif | ||
289 | # ifndef this_cpu_and_4 | ||
290 | # define this_cpu_and_4(pcp, val) _this_cpu_generic_to_op((pcp), (val), &=) | ||
291 | # endif | ||
292 | # ifndef this_cpu_and_8 | ||
293 | # define this_cpu_and_8(pcp, val) _this_cpu_generic_to_op((pcp), (val), &=) | ||
294 | # endif | ||
295 | |||
296 | # ifndef this_cpu_or_1 | ||
297 | # define this_cpu_or_1(pcp, val) _this_cpu_generic_to_op((pcp), (val), |=) | ||
298 | # endif | ||
299 | # ifndef this_cpu_or_2 | ||
300 | # define this_cpu_or_2(pcp, val) _this_cpu_generic_to_op((pcp), (val), |=) | ||
301 | # endif | ||
302 | # ifndef this_cpu_or_4 | ||
303 | # define this_cpu_or_4(pcp, val) _this_cpu_generic_to_op((pcp), (val), |=) | ||
304 | # endif | ||
305 | # ifndef this_cpu_or_8 | ||
306 | # define this_cpu_or_8(pcp, val) _this_cpu_generic_to_op((pcp), (val), |=) | ||
307 | # endif | ||
308 | |||
309 | #define _this_cpu_generic_add_return(pcp, val) \ | ||
310 | ({ \ | ||
311 | typeof(pcp) ret__; \ | ||
312 | unsigned long flags; \ | ||
313 | raw_local_irq_save(flags); \ | ||
314 | raw_cpu_add(pcp, val); \ | ||
315 | ret__ = raw_cpu_read(pcp); \ | ||
316 | raw_local_irq_restore(flags); \ | ||
317 | ret__; \ | ||
318 | }) | ||
319 | |||
320 | # ifndef this_cpu_add_return_1 | ||
321 | # define this_cpu_add_return_1(pcp, val) _this_cpu_generic_add_return(pcp, val) | ||
322 | # endif | ||
323 | # ifndef this_cpu_add_return_2 | ||
324 | # define this_cpu_add_return_2(pcp, val) _this_cpu_generic_add_return(pcp, val) | ||
325 | # endif | ||
326 | # ifndef this_cpu_add_return_4 | ||
327 | # define this_cpu_add_return_4(pcp, val) _this_cpu_generic_add_return(pcp, val) | ||
328 | # endif | ||
329 | # ifndef this_cpu_add_return_8 | ||
330 | # define this_cpu_add_return_8(pcp, val) _this_cpu_generic_add_return(pcp, val) | ||
331 | # endif | ||
332 | |||
333 | #define _this_cpu_generic_xchg(pcp, nval) \ | ||
334 | ({ typeof(pcp) ret__; \ | ||
335 | unsigned long flags; \ | ||
336 | raw_local_irq_save(flags); \ | ||
337 | ret__ = raw_cpu_read(pcp); \ | ||
338 | raw_cpu_write(pcp, nval); \ | ||
339 | raw_local_irq_restore(flags); \ | ||
340 | ret__; \ | ||
341 | }) | ||
342 | |||
343 | # ifndef this_cpu_xchg_1 | ||
344 | # define this_cpu_xchg_1(pcp, nval) _this_cpu_generic_xchg(pcp, nval) | ||
345 | # endif | ||
346 | # ifndef this_cpu_xchg_2 | ||
347 | # define this_cpu_xchg_2(pcp, nval) _this_cpu_generic_xchg(pcp, nval) | ||
348 | # endif | ||
349 | # ifndef this_cpu_xchg_4 | ||
350 | # define this_cpu_xchg_4(pcp, nval) _this_cpu_generic_xchg(pcp, nval) | ||
351 | # endif | ||
352 | # ifndef this_cpu_xchg_8 | ||
353 | # define this_cpu_xchg_8(pcp, nval) _this_cpu_generic_xchg(pcp, nval) | ||
354 | # endif | ||
355 | |||
356 | #define _this_cpu_generic_cmpxchg(pcp, oval, nval) \ | ||
357 | ({ \ | ||
358 | typeof(pcp) ret__; \ | ||
359 | unsigned long flags; \ | ||
360 | raw_local_irq_save(flags); \ | ||
361 | ret__ = raw_cpu_read(pcp); \ | ||
362 | if (ret__ == (oval)) \ | ||
363 | raw_cpu_write(pcp, nval); \ | ||
364 | raw_local_irq_restore(flags); \ | ||
365 | ret__; \ | ||
366 | }) | ||
367 | |||
368 | # ifndef this_cpu_cmpxchg_1 | ||
369 | # define this_cpu_cmpxchg_1(pcp, oval, nval) _this_cpu_generic_cmpxchg(pcp, oval, nval) | ||
370 | # endif | ||
371 | # ifndef this_cpu_cmpxchg_2 | ||
372 | # define this_cpu_cmpxchg_2(pcp, oval, nval) _this_cpu_generic_cmpxchg(pcp, oval, nval) | ||
373 | # endif | ||
374 | # ifndef this_cpu_cmpxchg_4 | ||
375 | # define this_cpu_cmpxchg_4(pcp, oval, nval) _this_cpu_generic_cmpxchg(pcp, oval, nval) | ||
376 | # endif | ||
377 | # ifndef this_cpu_cmpxchg_8 | ||
378 | # define this_cpu_cmpxchg_8(pcp, oval, nval) _this_cpu_generic_cmpxchg(pcp, oval, nval) | ||
379 | # endif | ||
380 | |||
381 | #define _this_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2) \ | ||
382 | ({ \ | ||
383 | int ret__; \ | ||
384 | unsigned long flags; \ | ||
385 | raw_local_irq_save(flags); \ | ||
386 | ret__ = raw_cpu_generic_cmpxchg_double(pcp1, pcp2, \ | ||
387 | oval1, oval2, nval1, nval2); \ | ||
388 | raw_local_irq_restore(flags); \ | ||
389 | ret__; \ | ||
390 | }) | ||
391 | |||
392 | # ifndef this_cpu_cmpxchg_double_1 | ||
393 | # define this_cpu_cmpxchg_double_1(pcp1, pcp2, oval1, oval2, nval1, nval2) \ | ||
394 | _this_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2) | ||
395 | # endif | ||
396 | # ifndef this_cpu_cmpxchg_double_2 | ||
397 | # define this_cpu_cmpxchg_double_2(pcp1, pcp2, oval1, oval2, nval1, nval2) \ | ||
398 | _this_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2) | ||
399 | # endif | ||
400 | # ifndef this_cpu_cmpxchg_double_4 | ||
401 | # define this_cpu_cmpxchg_double_4(pcp1, pcp2, oval1, oval2, nval1, nval2) \ | ||
402 | _this_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2) | ||
403 | # endif | ||
404 | # ifndef this_cpu_cmpxchg_double_8 | ||
405 | # define this_cpu_cmpxchg_double_8(pcp1, pcp2, oval1, oval2, nval1, nval2) \ | ||
406 | _this_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2) | ||
407 | # endif | ||
408 | |||
68 | #endif /* _ASM_GENERIC_PERCPU_H_ */ | 409 | #endif /* _ASM_GENERIC_PERCPU_H_ */ |