aboutsummaryrefslogtreecommitdiffstats
path: root/include/asm-s390/spinlock.h
diff options
context:
space:
mode:
Diffstat (limited to 'include/asm-s390/spinlock.h')
-rw-r--r--include/asm-s390/spinlock.h63
1 files changed, 23 insertions, 40 deletions
diff --git a/include/asm-s390/spinlock.h b/include/asm-s390/spinlock.h
index 321b23bba1ec..273dbecf8ace 100644
--- a/include/asm-s390/spinlock.h
+++ b/include/asm-s390/spinlock.h
@@ -27,25 +27,19 @@ _raw_compare_and_swap(volatile unsigned int *lock,
27 * on the local processor, one does not. 27 * on the local processor, one does not.
28 * 28 *
29 * We make no fairness assumptions. They have a cost. 29 * We make no fairness assumptions. They have a cost.
30 *
31 * (the type definitions are in asm/spinlock_types.h)
30 */ 32 */
31 33
32typedef struct { 34#define __raw_spin_is_locked(x) ((x)->lock != 0)
33 volatile unsigned int lock; 35#define __raw_spin_lock_flags(lock, flags) __raw_spin_lock(lock)
34#ifdef CONFIG_PREEMPT 36#define __raw_spin_unlock_wait(lock) \
35 unsigned int break_lock; 37 do { while (__raw_spin_is_locked(lock)) cpu_relax(); } while (0)
36#endif
37} __attribute__ ((aligned (4))) spinlock_t;
38
39#define SPIN_LOCK_UNLOCKED (spinlock_t) { 0 }
40#define spin_lock_init(lp) do { (lp)->lock = 0; } while(0)
41#define spin_unlock_wait(lp) do { barrier(); } while(((volatile spinlock_t *)(lp))->lock)
42#define spin_is_locked(x) ((x)->lock != 0)
43#define _raw_spin_lock_flags(lock, flags) _raw_spin_lock(lock)
44 38
45extern void _raw_spin_lock_wait(spinlock_t *lp, unsigned int pc); 39extern void _raw_spin_lock_wait(raw_spinlock_t *lp, unsigned int pc);
46extern int _raw_spin_trylock_retry(spinlock_t *lp, unsigned int pc); 40extern int _raw_spin_trylock_retry(raw_spinlock_t *lp, unsigned int pc);
47 41
48static inline void _raw_spin_lock(spinlock_t *lp) 42static inline void __raw_spin_lock(raw_spinlock_t *lp)
49{ 43{
50 unsigned long pc = 1 | (unsigned long) __builtin_return_address(0); 44 unsigned long pc = 1 | (unsigned long) __builtin_return_address(0);
51 45
@@ -53,7 +47,7 @@ static inline void _raw_spin_lock(spinlock_t *lp)
53 _raw_spin_lock_wait(lp, pc); 47 _raw_spin_lock_wait(lp, pc);
54} 48}
55 49
56static inline int _raw_spin_trylock(spinlock_t *lp) 50static inline int __raw_spin_trylock(raw_spinlock_t *lp)
57{ 51{
58 unsigned long pc = 1 | (unsigned long) __builtin_return_address(0); 52 unsigned long pc = 1 | (unsigned long) __builtin_return_address(0);
59 53
@@ -62,7 +56,7 @@ static inline int _raw_spin_trylock(spinlock_t *lp)
62 return _raw_spin_trylock_retry(lp, pc); 56 return _raw_spin_trylock_retry(lp, pc);
63} 57}
64 58
65static inline void _raw_spin_unlock(spinlock_t *lp) 59static inline void __raw_spin_unlock(raw_spinlock_t *lp)
66{ 60{
67 _raw_compare_and_swap(&lp->lock, lp->lock, 0); 61 _raw_compare_and_swap(&lp->lock, lp->lock, 0);
68} 62}
@@ -77,36 +71,25 @@ static inline void _raw_spin_unlock(spinlock_t *lp)
77 * irq-safe write-lock, but readers can get non-irqsafe 71 * irq-safe write-lock, but readers can get non-irqsafe
78 * read-locks. 72 * read-locks.
79 */ 73 */
80typedef struct {
81 volatile unsigned int lock;
82 volatile unsigned long owner_pc;
83#ifdef CONFIG_PREEMPT
84 unsigned int break_lock;
85#endif
86} rwlock_t;
87
88#define RW_LOCK_UNLOCKED (rwlock_t) { 0, 0 }
89
90#define rwlock_init(x) do { *(x) = RW_LOCK_UNLOCKED; } while(0)
91 74
92/** 75/**
93 * read_can_lock - would read_trylock() succeed? 76 * read_can_lock - would read_trylock() succeed?
94 * @lock: the rwlock in question. 77 * @lock: the rwlock in question.
95 */ 78 */
96#define read_can_lock(x) ((int)(x)->lock >= 0) 79#define __raw_read_can_lock(x) ((int)(x)->lock >= 0)
97 80
98/** 81/**
99 * write_can_lock - would write_trylock() succeed? 82 * write_can_lock - would write_trylock() succeed?
100 * @lock: the rwlock in question. 83 * @lock: the rwlock in question.
101 */ 84 */
102#define write_can_lock(x) ((x)->lock == 0) 85#define __raw_write_can_lock(x) ((x)->lock == 0)
103 86
104extern void _raw_read_lock_wait(rwlock_t *lp); 87extern void _raw_read_lock_wait(raw_rwlock_t *lp);
105extern int _raw_read_trylock_retry(rwlock_t *lp); 88extern int _raw_read_trylock_retry(raw_rwlock_t *lp);
106extern void _raw_write_lock_wait(rwlock_t *lp); 89extern void _raw_write_lock_wait(raw_rwlock_t *lp);
107extern int _raw_write_trylock_retry(rwlock_t *lp); 90extern int _raw_write_trylock_retry(raw_rwlock_t *lp);
108 91
109static inline void _raw_read_lock(rwlock_t *rw) 92static inline void __raw_read_lock(raw_rwlock_t *rw)
110{ 93{
111 unsigned int old; 94 unsigned int old;
112 old = rw->lock & 0x7fffffffU; 95 old = rw->lock & 0x7fffffffU;
@@ -114,7 +97,7 @@ static inline void _raw_read_lock(rwlock_t *rw)
114 _raw_read_lock_wait(rw); 97 _raw_read_lock_wait(rw);
115} 98}
116 99
117static inline void _raw_read_unlock(rwlock_t *rw) 100static inline void __raw_read_unlock(raw_rwlock_t *rw)
118{ 101{
119 unsigned int old, cmp; 102 unsigned int old, cmp;
120 103
@@ -125,18 +108,18 @@ static inline void _raw_read_unlock(rwlock_t *rw)
125 } while (cmp != old); 108 } while (cmp != old);
126} 109}
127 110
128static inline void _raw_write_lock(rwlock_t *rw) 111static inline void __raw_write_lock(raw_rwlock_t *rw)
129{ 112{
130 if (unlikely(_raw_compare_and_swap(&rw->lock, 0, 0x80000000) != 0)) 113 if (unlikely(_raw_compare_and_swap(&rw->lock, 0, 0x80000000) != 0))
131 _raw_write_lock_wait(rw); 114 _raw_write_lock_wait(rw);
132} 115}
133 116
134static inline void _raw_write_unlock(rwlock_t *rw) 117static inline void __raw_write_unlock(raw_rwlock_t *rw)
135{ 118{
136 _raw_compare_and_swap(&rw->lock, 0x80000000, 0); 119 _raw_compare_and_swap(&rw->lock, 0x80000000, 0);
137} 120}
138 121
139static inline int _raw_read_trylock(rwlock_t *rw) 122static inline int __raw_read_trylock(raw_rwlock_t *rw)
140{ 123{
141 unsigned int old; 124 unsigned int old;
142 old = rw->lock & 0x7fffffffU; 125 old = rw->lock & 0x7fffffffU;
@@ -145,7 +128,7 @@ static inline int _raw_read_trylock(rwlock_t *rw)
145 return _raw_read_trylock_retry(rw); 128 return _raw_read_trylock_retry(rw);
146} 129}
147 130
148static inline int _raw_write_trylock(rwlock_t *rw) 131static inline int __raw_write_trylock(raw_rwlock_t *rw)
149{ 132{
150 if (likely(_raw_compare_and_swap(&rw->lock, 0, 0x80000000) == 0)) 133 if (likely(_raw_compare_and_swap(&rw->lock, 0, 0x80000000) == 0))
151 return 1; 134 return 1;