aboutsummaryrefslogtreecommitdiffstats
path: root/lib/lockref.c
diff options
context:
space:
mode:
Diffstat (limited to 'lib/lockref.c')
-rw-r--r--lib/lockref.c60
1 files changed, 59 insertions, 1 deletions
diff --git a/lib/lockref.c b/lib/lockref.c
index a9a4f4e1eff5..7819c2d1d315 100644
--- a/lib/lockref.c
+++ b/lib/lockref.c
@@ -1,6 +1,33 @@
1#include <linux/export.h> 1#include <linux/export.h>
2#include <linux/lockref.h> 2#include <linux/lockref.h>
3 3
4#ifdef CONFIG_CMPXCHG_LOCKREF
5
6/*
7 * Note that the "cmpxchg()" reloads the "old" value for the
8 * failure case.
9 */
10#define CMPXCHG_LOOP(CODE, SUCCESS) do { \
11 struct lockref old; \
12 BUILD_BUG_ON(sizeof(old) != 8); \
13 old.lock_count = ACCESS_ONCE(lockref->lock_count); \
14 while (likely(arch_spin_value_unlocked(old.lock.rlock.raw_lock))) { \
15 struct lockref new = old, prev = old; \
16 CODE \
17 old.lock_count = cmpxchg(&lockref->lock_count, \
18 old.lock_count, new.lock_count); \
19 if (likely(old.lock_count == prev.lock_count)) { \
20 SUCCESS; \
21 } \
22 } \
23} while (0)
24
25#else
26
27#define CMPXCHG_LOOP(CODE, SUCCESS) do { } while (0)
28
29#endif
30
4/** 31/**
5 * lockref_get - Increments reference count unconditionally 32 * lockref_get - Increments reference count unconditionally
6 * @lockcnt: pointer to lockref structure 33 * @lockcnt: pointer to lockref structure
@@ -10,6 +37,12 @@
10 */ 37 */
11void lockref_get(struct lockref *lockref) 38void lockref_get(struct lockref *lockref)
12{ 39{
40 CMPXCHG_LOOP(
41 new.count++;
42 ,
43 return;
44 );
45
13 spin_lock(&lockref->lock); 46 spin_lock(&lockref->lock);
14 lockref->count++; 47 lockref->count++;
15 spin_unlock(&lockref->lock); 48 spin_unlock(&lockref->lock);
@@ -23,9 +56,18 @@ EXPORT_SYMBOL(lockref_get);
23 */ 56 */
24int lockref_get_not_zero(struct lockref *lockref) 57int lockref_get_not_zero(struct lockref *lockref)
25{ 58{
26 int retval = 0; 59 int retval;
60
61 CMPXCHG_LOOP(
62 new.count++;
63 if (!old.count)
64 return 0;
65 ,
66 return 1;
67 );
27 68
28 spin_lock(&lockref->lock); 69 spin_lock(&lockref->lock);
70 retval = 0;
29 if (lockref->count) { 71 if (lockref->count) {
30 lockref->count++; 72 lockref->count++;
31 retval = 1; 73 retval = 1;
@@ -43,6 +85,14 @@ EXPORT_SYMBOL(lockref_get_not_zero);
43 */ 85 */
44int lockref_get_or_lock(struct lockref *lockref) 86int lockref_get_or_lock(struct lockref *lockref)
45{ 87{
88 CMPXCHG_LOOP(
89 new.count++;
90 if (!old.count)
91 break;
92 ,
93 return 1;
94 );
95
46 spin_lock(&lockref->lock); 96 spin_lock(&lockref->lock);
47 if (!lockref->count) 97 if (!lockref->count)
48 return 0; 98 return 0;
@@ -59,6 +109,14 @@ EXPORT_SYMBOL(lockref_get_or_lock);
59 */ 109 */
60int lockref_put_or_lock(struct lockref *lockref) 110int lockref_put_or_lock(struct lockref *lockref)
61{ 111{
112 CMPXCHG_LOOP(
113 new.count--;
114 if (old.count <= 1)
115 break;
116 ,
117 return 1;
118 );
119
62 spin_lock(&lockref->lock); 120 spin_lock(&lockref->lock);
63 if (lockref->count <= 1) 121 if (lockref->count <= 1)
64 return 0; 122 return 0;