diff options
Diffstat (limited to 'arch')
-rw-r--r-- | arch/alpha/include/asm/local.h | 17 | ||||
-rw-r--r-- | arch/m32r/include/asm/local.h | 25 | ||||
-rw-r--r-- | arch/mips/include/asm/local.h | 25 | ||||
-rw-r--r-- | arch/powerpc/include/asm/local.h | 25 | ||||
-rw-r--r-- | arch/x86/include/asm/local.h | 37 |
5 files changed, 0 insertions, 129 deletions
diff --git a/arch/alpha/include/asm/local.h b/arch/alpha/include/asm/local.h index 6ad3ea696421..b9e3e3318371 100644 --- a/arch/alpha/include/asm/local.h +++ b/arch/alpha/include/asm/local.h | |||
@@ -98,21 +98,4 @@ static __inline__ long local_sub_return(long i, local_t * l) | |||
98 | #define __local_add(i,l) ((l)->a.counter+=(i)) | 98 | #define __local_add(i,l) ((l)->a.counter+=(i)) |
99 | #define __local_sub(i,l) ((l)->a.counter-=(i)) | 99 | #define __local_sub(i,l) ((l)->a.counter-=(i)) |
100 | 100 | ||
101 | /* Use these for per-cpu local_t variables: on some archs they are | ||
102 | * much more efficient than these naive implementations. Note they take | ||
103 | * a variable, not an address. | ||
104 | */ | ||
105 | #define cpu_local_read(l) local_read(&__get_cpu_var(l)) | ||
106 | #define cpu_local_set(l, i) local_set(&__get_cpu_var(l), (i)) | ||
107 | |||
108 | #define cpu_local_inc(l) local_inc(&__get_cpu_var(l)) | ||
109 | #define cpu_local_dec(l) local_dec(&__get_cpu_var(l)) | ||
110 | #define cpu_local_add(i, l) local_add((i), &__get_cpu_var(l)) | ||
111 | #define cpu_local_sub(i, l) local_sub((i), &__get_cpu_var(l)) | ||
112 | |||
113 | #define __cpu_local_inc(l) __local_inc(&__get_cpu_var(l)) | ||
114 | #define __cpu_local_dec(l) __local_dec(&__get_cpu_var(l)) | ||
115 | #define __cpu_local_add(i, l) __local_add((i), &__get_cpu_var(l)) | ||
116 | #define __cpu_local_sub(i, l) __local_sub((i), &__get_cpu_var(l)) | ||
117 | |||
118 | #endif /* _ALPHA_LOCAL_H */ | 101 | #endif /* _ALPHA_LOCAL_H */ |
diff --git a/arch/m32r/include/asm/local.h b/arch/m32r/include/asm/local.h index 22256d138630..734bca87018a 100644 --- a/arch/m32r/include/asm/local.h +++ b/arch/m32r/include/asm/local.h | |||
@@ -338,29 +338,4 @@ static inline void local_set_mask(unsigned long mask, local_t *addr) | |||
338 | * a variable, not an address. | 338 | * a variable, not an address. |
339 | */ | 339 | */ |
340 | 340 | ||
341 | /* Need to disable preemption for the cpu local counters otherwise we could | ||
342 | still access a variable of a previous CPU in a non local way. */ | ||
343 | #define cpu_local_wrap_v(l) \ | ||
344 | ({ local_t res__; \ | ||
345 | preempt_disable(); \ | ||
346 | res__ = (l); \ | ||
347 | preempt_enable(); \ | ||
348 | res__; }) | ||
349 | #define cpu_local_wrap(l) \ | ||
350 | ({ preempt_disable(); \ | ||
351 | l; \ | ||
352 | preempt_enable(); }) \ | ||
353 | |||
354 | #define cpu_local_read(l) cpu_local_wrap_v(local_read(&__get_cpu_var(l))) | ||
355 | #define cpu_local_set(l, i) cpu_local_wrap(local_set(&__get_cpu_var(l), (i))) | ||
356 | #define cpu_local_inc(l) cpu_local_wrap(local_inc(&__get_cpu_var(l))) | ||
357 | #define cpu_local_dec(l) cpu_local_wrap(local_dec(&__get_cpu_var(l))) | ||
358 | #define cpu_local_add(i, l) cpu_local_wrap(local_add((i), &__get_cpu_var(l))) | ||
359 | #define cpu_local_sub(i, l) cpu_local_wrap(local_sub((i), &__get_cpu_var(l))) | ||
360 | |||
361 | #define __cpu_local_inc(l) cpu_local_inc(l) | ||
362 | #define __cpu_local_dec(l) cpu_local_dec(l) | ||
363 | #define __cpu_local_add(i, l) cpu_local_add((i), (l)) | ||
364 | #define __cpu_local_sub(i, l) cpu_local_sub((i), (l)) | ||
365 | |||
366 | #endif /* __M32R_LOCAL_H */ | 341 | #endif /* __M32R_LOCAL_H */ |
diff --git a/arch/mips/include/asm/local.h b/arch/mips/include/asm/local.h index 361f4f16c30c..bdcdef02d147 100644 --- a/arch/mips/include/asm/local.h +++ b/arch/mips/include/asm/local.h | |||
@@ -193,29 +193,4 @@ static __inline__ long local_sub_return(long i, local_t * l) | |||
193 | #define __local_add(i, l) ((l)->a.counter+=(i)) | 193 | #define __local_add(i, l) ((l)->a.counter+=(i)) |
194 | #define __local_sub(i, l) ((l)->a.counter-=(i)) | 194 | #define __local_sub(i, l) ((l)->a.counter-=(i)) |
195 | 195 | ||
196 | /* Need to disable preemption for the cpu local counters otherwise we could | ||
197 | still access a variable of a previous CPU in a non atomic way. */ | ||
198 | #define cpu_local_wrap_v(l) \ | ||
199 | ({ local_t res__; \ | ||
200 | preempt_disable(); \ | ||
201 | res__ = (l); \ | ||
202 | preempt_enable(); \ | ||
203 | res__; }) | ||
204 | #define cpu_local_wrap(l) \ | ||
205 | ({ preempt_disable(); \ | ||
206 | l; \ | ||
207 | preempt_enable(); }) \ | ||
208 | |||
209 | #define cpu_local_read(l) cpu_local_wrap_v(local_read(&__get_cpu_var(l))) | ||
210 | #define cpu_local_set(l, i) cpu_local_wrap(local_set(&__get_cpu_var(l), (i))) | ||
211 | #define cpu_local_inc(l) cpu_local_wrap(local_inc(&__get_cpu_var(l))) | ||
212 | #define cpu_local_dec(l) cpu_local_wrap(local_dec(&__get_cpu_var(l))) | ||
213 | #define cpu_local_add(i, l) cpu_local_wrap(local_add((i), &__get_cpu_var(l))) | ||
214 | #define cpu_local_sub(i, l) cpu_local_wrap(local_sub((i), &__get_cpu_var(l))) | ||
215 | |||
216 | #define __cpu_local_inc(l) cpu_local_inc(l) | ||
217 | #define __cpu_local_dec(l) cpu_local_dec(l) | ||
218 | #define __cpu_local_add(i, l) cpu_local_add((i), (l)) | ||
219 | #define __cpu_local_sub(i, l) cpu_local_sub((i), (l)) | ||
220 | |||
221 | #endif /* _ARCH_MIPS_LOCAL_H */ | 196 | #endif /* _ARCH_MIPS_LOCAL_H */ |
diff --git a/arch/powerpc/include/asm/local.h b/arch/powerpc/include/asm/local.h index 84b457a3c1bc..227753d288f6 100644 --- a/arch/powerpc/include/asm/local.h +++ b/arch/powerpc/include/asm/local.h | |||
@@ -172,29 +172,4 @@ static __inline__ long local_dec_if_positive(local_t *l) | |||
172 | #define __local_add(i,l) ((l)->a.counter+=(i)) | 172 | #define __local_add(i,l) ((l)->a.counter+=(i)) |
173 | #define __local_sub(i,l) ((l)->a.counter-=(i)) | 173 | #define __local_sub(i,l) ((l)->a.counter-=(i)) |
174 | 174 | ||
175 | /* Need to disable preemption for the cpu local counters otherwise we could | ||
176 | still access a variable of a previous CPU in a non atomic way. */ | ||
177 | #define cpu_local_wrap_v(l) \ | ||
178 | ({ local_t res__; \ | ||
179 | preempt_disable(); \ | ||
180 | res__ = (l); \ | ||
181 | preempt_enable(); \ | ||
182 | res__; }) | ||
183 | #define cpu_local_wrap(l) \ | ||
184 | ({ preempt_disable(); \ | ||
185 | l; \ | ||
186 | preempt_enable(); }) \ | ||
187 | |||
188 | #define cpu_local_read(l) cpu_local_wrap_v(local_read(&__get_cpu_var(l))) | ||
189 | #define cpu_local_set(l, i) cpu_local_wrap(local_set(&__get_cpu_var(l), (i))) | ||
190 | #define cpu_local_inc(l) cpu_local_wrap(local_inc(&__get_cpu_var(l))) | ||
191 | #define cpu_local_dec(l) cpu_local_wrap(local_dec(&__get_cpu_var(l))) | ||
192 | #define cpu_local_add(i, l) cpu_local_wrap(local_add((i), &__get_cpu_var(l))) | ||
193 | #define cpu_local_sub(i, l) cpu_local_wrap(local_sub((i), &__get_cpu_var(l))) | ||
194 | |||
195 | #define __cpu_local_inc(l) cpu_local_inc(l) | ||
196 | #define __cpu_local_dec(l) cpu_local_dec(l) | ||
197 | #define __cpu_local_add(i, l) cpu_local_add((i), (l)) | ||
198 | #define __cpu_local_sub(i, l) cpu_local_sub((i), (l)) | ||
199 | |||
200 | #endif /* _ARCH_POWERPC_LOCAL_H */ | 175 | #endif /* _ARCH_POWERPC_LOCAL_H */ |
diff --git a/arch/x86/include/asm/local.h b/arch/x86/include/asm/local.h index 47b9b6f19057..2e9972468a5d 100644 --- a/arch/x86/include/asm/local.h +++ b/arch/x86/include/asm/local.h | |||
@@ -195,41 +195,4 @@ static inline long local_sub_return(long i, local_t *l) | |||
195 | #define __local_add(i, l) local_add((i), (l)) | 195 | #define __local_add(i, l) local_add((i), (l)) |
196 | #define __local_sub(i, l) local_sub((i), (l)) | 196 | #define __local_sub(i, l) local_sub((i), (l)) |
197 | 197 | ||
198 | /* Use these for per-cpu local_t variables: on some archs they are | ||
199 | * much more efficient than these naive implementations. Note they take | ||
200 | * a variable, not an address. | ||
201 | * | ||
202 | * X86_64: This could be done better if we moved the per cpu data directly | ||
203 | * after GS. | ||
204 | */ | ||
205 | |||
206 | /* Need to disable preemption for the cpu local counters otherwise we could | ||
207 | still access a variable of a previous CPU in a non atomic way. */ | ||
208 | #define cpu_local_wrap_v(l) \ | ||
209 | ({ \ | ||
210 | local_t res__; \ | ||
211 | preempt_disable(); \ | ||
212 | res__ = (l); \ | ||
213 | preempt_enable(); \ | ||
214 | res__; \ | ||
215 | }) | ||
216 | #define cpu_local_wrap(l) \ | ||
217 | ({ \ | ||
218 | preempt_disable(); \ | ||
219 | (l); \ | ||
220 | preempt_enable(); \ | ||
221 | }) \ | ||
222 | |||
223 | #define cpu_local_read(l) cpu_local_wrap_v(local_read(&__get_cpu_var((l)))) | ||
224 | #define cpu_local_set(l, i) cpu_local_wrap(local_set(&__get_cpu_var((l)), (i))) | ||
225 | #define cpu_local_inc(l) cpu_local_wrap(local_inc(&__get_cpu_var((l)))) | ||
226 | #define cpu_local_dec(l) cpu_local_wrap(local_dec(&__get_cpu_var((l)))) | ||
227 | #define cpu_local_add(i, l) cpu_local_wrap(local_add((i), &__get_cpu_var((l)))) | ||
228 | #define cpu_local_sub(i, l) cpu_local_wrap(local_sub((i), &__get_cpu_var((l)))) | ||
229 | |||
230 | #define __cpu_local_inc(l) cpu_local_inc((l)) | ||
231 | #define __cpu_local_dec(l) cpu_local_dec((l)) | ||
232 | #define __cpu_local_add(i, l) cpu_local_add((i), (l)) | ||
233 | #define __cpu_local_sub(i, l) cpu_local_sub((i), (l)) | ||
234 | |||
235 | #endif /* _ASM_X86_LOCAL_H */ | 198 | #endif /* _ASM_X86_LOCAL_H */ |