diff options
-rw-r--r-- | include/asm-i386/local.h | 26 | ||||
-rw-r--r-- | include/asm-x86_64/local.h | 26 |
2 files changed, 40 insertions, 12 deletions
diff --git a/include/asm-i386/local.h b/include/asm-i386/local.h index e67fa08260fe..3b4998c51d08 100644 --- a/include/asm-i386/local.h +++ b/include/asm-i386/local.h | |||
@@ -55,12 +55,26 @@ static __inline__ void local_sub(long i, local_t *v) | |||
55 | * much more efficient than these naive implementations. Note they take | 55 | * much more efficient than these naive implementations. Note they take |
56 | * a variable, not an address. | 56 | * a variable, not an address. |
57 | */ | 57 | */ |
58 | #define cpu_local_read(v) local_read(&__get_cpu_var(v)) | 58 | |
59 | #define cpu_local_set(v, i) local_set(&__get_cpu_var(v), (i)) | 59 | /* Need to disable preemption for the cpu local counters otherwise we could |
60 | #define cpu_local_inc(v) local_inc(&__get_cpu_var(v)) | 60 | still access a variable of a previous CPU in a non atomic way. */ |
61 | #define cpu_local_dec(v) local_dec(&__get_cpu_var(v)) | 61 | #define cpu_local_wrap_v(v) \ |
62 | #define cpu_local_add(i, v) local_add((i), &__get_cpu_var(v)) | 62 | ({ local_t res__; \ |
63 | #define cpu_local_sub(i, v) local_sub((i), &__get_cpu_var(v)) | 63 | preempt_disable(); \ |
64 | res__ = (v); \ | ||
65 | preempt_enable(); \ | ||
66 | res__; }) | ||
67 | #define cpu_local_wrap(v) \ | ||
68 | ({ preempt_disable(); \ | ||
69 | v; \ | ||
70 | preempt_enable(); }) \ | ||
71 | |||
72 | #define cpu_local_read(v) cpu_local_wrap_v(local_read(&__get_cpu_var(v))) | ||
73 | #define cpu_local_set(v, i) cpu_local_wrap(local_set(&__get_cpu_var(v), (i))) | ||
74 | #define cpu_local_inc(v) cpu_local_wrap(local_inc(&__get_cpu_var(v))) | ||
75 | #define cpu_local_dec(v) cpu_local_wrap(local_dec(&__get_cpu_var(v))) | ||
76 | #define cpu_local_add(i, v) cpu_local_wrap(local_add((i), &__get_cpu_var(v))) | ||
77 | #define cpu_local_sub(i, v) cpu_local_wrap(local_sub((i), &__get_cpu_var(v))) | ||
64 | 78 | ||
65 | #define __cpu_local_inc(v) cpu_local_inc(v) | 79 | #define __cpu_local_inc(v) cpu_local_inc(v) |
66 | #define __cpu_local_dec(v) cpu_local_dec(v) | 80 | #define __cpu_local_dec(v) cpu_local_dec(v) |
diff --git a/include/asm-x86_64/local.h b/include/asm-x86_64/local.h index cd17945bf218..e769e6200225 100644 --- a/include/asm-x86_64/local.h +++ b/include/asm-x86_64/local.h | |||
@@ -59,12 +59,26 @@ static inline void local_sub(long i, local_t *v) | |||
59 | * This could be done better if we moved the per cpu data directly | 59 | * This could be done better if we moved the per cpu data directly |
60 | * after GS. | 60 | * after GS. |
61 | */ | 61 | */ |
62 | #define cpu_local_read(v) local_read(&__get_cpu_var(v)) | 62 | |
63 | #define cpu_local_set(v, i) local_set(&__get_cpu_var(v), (i)) | 63 | /* Need to disable preemption for the cpu local counters otherwise we could |
64 | #define cpu_local_inc(v) local_inc(&__get_cpu_var(v)) | 64 | still access a variable of a previous CPU in a non atomic way. */ |
65 | #define cpu_local_dec(v) local_dec(&__get_cpu_var(v)) | 65 | #define cpu_local_wrap_v(v) \ |
66 | #define cpu_local_add(i, v) local_add((i), &__get_cpu_var(v)) | 66 | ({ local_t res__; \ |
67 | #define cpu_local_sub(i, v) local_sub((i), &__get_cpu_var(v)) | 67 | preempt_disable(); \ |
68 | res__ = (v); \ | ||
69 | preempt_enable(); \ | ||
70 | res__; }) | ||
71 | #define cpu_local_wrap(v) \ | ||
72 | ({ preempt_disable(); \ | ||
73 | v; \ | ||
74 | preempt_enable(); }) \ | ||
75 | |||
76 | #define cpu_local_read(v) cpu_local_wrap_v(local_read(&__get_cpu_var(v))) | ||
77 | #define cpu_local_set(v, i) cpu_local_wrap(local_set(&__get_cpu_var(v), (i))) | ||
78 | #define cpu_local_inc(v) cpu_local_wrap(local_inc(&__get_cpu_var(v))) | ||
79 | #define cpu_local_dec(v) cpu_local_wrap(local_dec(&__get_cpu_var(v))) | ||
80 | #define cpu_local_add(i, v) cpu_local_wrap(local_add((i), &__get_cpu_var(v))) | ||
81 | #define cpu_local_sub(i, v) cpu_local_wrap(local_sub((i), &__get_cpu_var(v))) | ||
68 | 82 | ||
69 | #define __cpu_local_inc(v) cpu_local_inc(v) | 83 | #define __cpu_local_inc(v) cpu_local_inc(v) |
70 | #define __cpu_local_dec(v) cpu_local_dec(v) | 84 | #define __cpu_local_dec(v) cpu_local_dec(v) |