aboutsummaryrefslogtreecommitdiffstats
path: root/include
diff options
context:
space:
mode:
Diffstat (limited to 'include')
-rw-r--r--include/asm-x86_64/io.h2
-rw-r--r--include/linux/seqlock.h4
2 files changed, 3 insertions, 3 deletions
diff --git a/include/asm-x86_64/io.h b/include/asm-x86_64/io.h
index cafdfb37f0d8..a05da8a50bfd 100644
--- a/include/asm-x86_64/io.h
+++ b/include/asm-x86_64/io.h
@@ -177,7 +177,7 @@ static inline __u16 __readw(const volatile void __iomem *addr)
177{ 177{
178 return *(__force volatile __u16 *)addr; 178 return *(__force volatile __u16 *)addr;
179} 179}
180static inline __u32 __readl(const volatile void __iomem *addr) 180static __always_inline __u32 __readl(const volatile void __iomem *addr)
181{ 181{
182 return *(__force volatile __u32 *)addr; 182 return *(__force volatile __u32 *)addr;
183} 183}
diff --git a/include/linux/seqlock.h b/include/linux/seqlock.h
index fca9b0fb5b4e..5a095572881d 100644
--- a/include/linux/seqlock.h
+++ b/include/linux/seqlock.h
@@ -73,7 +73,7 @@ static inline int write_tryseqlock(seqlock_t *sl)
73} 73}
74 74
75/* Start of read calculation -- fetch last complete writer token */ 75/* Start of read calculation -- fetch last complete writer token */
76static inline unsigned read_seqbegin(const seqlock_t *sl) 76static __always_inline unsigned read_seqbegin(const seqlock_t *sl)
77{ 77{
78 unsigned ret = sl->sequence; 78 unsigned ret = sl->sequence;
79 smp_rmb(); 79 smp_rmb();
@@ -88,7 +88,7 @@ static inline unsigned read_seqbegin(const seqlock_t *sl)
88 * 88 *
89 * Using xor saves one conditional branch. 89 * Using xor saves one conditional branch.
90 */ 90 */
91static inline int read_seqretry(const seqlock_t *sl, unsigned iv) 91static __always_inline int read_seqretry(const seqlock_t *sl, unsigned iv)
92{ 92{
93 smp_rmb(); 93 smp_rmb();
94 return (iv & 1) | (sl->sequence ^ iv); 94 return (iv & 1) | (sl->sequence ^ iv);