aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--include/linux/percpu_counter.h10
-rw-r--r--lib/percpu_counter.c6
2 files changed, 12 insertions, 4 deletions
diff --git a/include/linux/percpu_counter.h b/include/linux/percpu_counter.h
index 438a170187ec..40df86f7a3b4 100644
--- a/include/linux/percpu_counter.h
+++ b/include/linux/percpu_counter.h
@@ -32,9 +32,14 @@ struct percpu_counter {
32 32
33void percpu_counter_init(struct percpu_counter *fbc, s64 amount); 33void percpu_counter_init(struct percpu_counter *fbc, s64 amount);
34void percpu_counter_destroy(struct percpu_counter *fbc); 34void percpu_counter_destroy(struct percpu_counter *fbc);
35void percpu_counter_add(struct percpu_counter *fbc, s32 amount); 35void __percpu_counter_add(struct percpu_counter *fbc, s32 amount, s32 batch);
36s64 percpu_counter_sum(struct percpu_counter *fbc); 36s64 percpu_counter_sum(struct percpu_counter *fbc);
37 37
38static inline void percpu_counter_add(struct percpu_counter *fbc, s32 amount)
39{
40 __percpu_counter_add(fbc, amount, FBC_BATCH);
41}
42
38static inline s64 percpu_counter_read(struct percpu_counter *fbc) 43static inline s64 percpu_counter_read(struct percpu_counter *fbc)
39{ 44{
40 return fbc->count; 45 return fbc->count;
@@ -70,6 +75,9 @@ static inline void percpu_counter_destroy(struct percpu_counter *fbc)
70{ 75{
71} 76}
72 77
78#define __percpu_counter_add(fbc, amount, batch) \
79 percpu_counter_add(fbc, amount)
80
73static inline void 81static inline void
74percpu_counter_add(struct percpu_counter *fbc, s32 amount) 82percpu_counter_add(struct percpu_counter *fbc, s32 amount)
75{ 83{
diff --git a/lib/percpu_counter.c b/lib/percpu_counter.c
index 5f36ad79a24a..f736d67c64d7 100644
--- a/lib/percpu_counter.c
+++ b/lib/percpu_counter.c
@@ -14,7 +14,7 @@ static LIST_HEAD(percpu_counters);
14static DEFINE_MUTEX(percpu_counters_lock); 14static DEFINE_MUTEX(percpu_counters_lock);
15#endif 15#endif
16 16
17void percpu_counter_add(struct percpu_counter *fbc, s32 amount) 17void __percpu_counter_add(struct percpu_counter *fbc, s32 amount, s32 batch)
18{ 18{
19 long count; 19 long count;
20 s32 *pcount; 20 s32 *pcount;
@@ -22,7 +22,7 @@ void percpu_counter_add(struct percpu_counter *fbc, s32 amount)
22 22
23 pcount = per_cpu_ptr(fbc->counters, cpu); 23 pcount = per_cpu_ptr(fbc->counters, cpu);
24 count = *pcount + amount; 24 count = *pcount + amount;
25 if (count >= FBC_BATCH || count <= -FBC_BATCH) { 25 if (count >= batch || count <= -batch) {
26 spin_lock(&fbc->lock); 26 spin_lock(&fbc->lock);
27 fbc->count += count; 27 fbc->count += count;
28 *pcount = 0; 28 *pcount = 0;
@@ -32,7 +32,7 @@ void percpu_counter_add(struct percpu_counter *fbc, s32 amount)
32 } 32 }
33 put_cpu(); 33 put_cpu();
34} 34}
35EXPORT_SYMBOL(percpu_counter_add); 35EXPORT_SYMBOL(__percpu_counter_add);
36 36
37/* 37/*
38 * Add up all the per-cpu counts, return the result. This is a more accurate 38 * Add up all the per-cpu counts, return the result. This is a more accurate