aboutsummaryrefslogtreecommitdiffstats
path: root/include/linux/seqlock.h
diff options
context:
space:
mode:
Diffstat (limited to 'include/linux/seqlock.h')
-rw-r--r--include/linux/seqlock.h46
1 files changed, 29 insertions, 17 deletions
diff --git a/include/linux/seqlock.h b/include/linux/seqlock.h
index 26e4925bc35b..632205ccc25d 100644
--- a/include/linux/seqlock.h
+++ b/include/linux/seqlock.h
@@ -85,23 +85,29 @@ static inline int write_tryseqlock(seqlock_t *sl)
85/* Start of read calculation -- fetch last complete writer token */ 85/* Start of read calculation -- fetch last complete writer token */
86static __always_inline unsigned read_seqbegin(const seqlock_t *sl) 86static __always_inline unsigned read_seqbegin(const seqlock_t *sl)
87{ 87{
88 unsigned ret = sl->sequence; 88 unsigned ret;
89
90repeat:
91 ret = sl->sequence;
89 smp_rmb(); 92 smp_rmb();
93 if (unlikely(ret & 1)) {
94 cpu_relax();
95 goto repeat;
96 }
97
90 return ret; 98 return ret;
91} 99}
92 100
93/* Test if reader processed invalid data. 101/*
94 * If initial values is odd, 102 * Test if reader processed invalid data.
95 * then writer had already started when section was entered 103 *
96 * If sequence value changed 104 * If sequence value changed then writer changed data while in section.
97 * then writer changed data while in section
98 *
99 * Using xor saves one conditional branch.
100 */ 105 */
101static __always_inline int read_seqretry(const seqlock_t *sl, unsigned iv) 106static __always_inline int read_seqretry(const seqlock_t *sl, unsigned start)
102{ 107{
103 smp_rmb(); 108 smp_rmb();
104 return (iv & 1) | (sl->sequence ^ iv); 109
110 return (sl->sequence != start);
105} 111}
106 112
107 113
@@ -122,20 +128,26 @@ typedef struct seqcount {
122/* Start of read using pointer to a sequence counter only. */ 128/* Start of read using pointer to a sequence counter only. */
123static inline unsigned read_seqcount_begin(const seqcount_t *s) 129static inline unsigned read_seqcount_begin(const seqcount_t *s)
124{ 130{
125 unsigned ret = s->sequence; 131 unsigned ret;
132
133repeat:
134 ret = s->sequence;
126 smp_rmb(); 135 smp_rmb();
136 if (unlikely(ret & 1)) {
137 cpu_relax();
138 goto repeat;
139 }
127 return ret; 140 return ret;
128} 141}
129 142
130/* Test if reader processed invalid data. 143/*
131 * Equivalent to: iv is odd or sequence number has changed. 144 * Test if reader processed invalid data because sequence number has changed.
132 * (iv & 1) || (*s != iv)
133 * Using xor saves one conditional branch.
134 */ 145 */
135static inline int read_seqcount_retry(const seqcount_t *s, unsigned iv) 146static inline int read_seqcount_retry(const seqcount_t *s, unsigned start)
136{ 147{
137 smp_rmb(); 148 smp_rmb();
138 return (iv & 1) | (s->sequence ^ iv); 149
150 return s->sequence != start;
139} 151}
140 152
141 153