diff options
Diffstat (limited to 'include/asm-cris/bitops.h')
-rw-r--r-- | include/asm-cris/bitops.h | 18 |
1 files changed, 9 insertions, 9 deletions
diff --git a/include/asm-cris/bitops.h b/include/asm-cris/bitops.h index e3da57f97964..1bddb3f3a289 100644 --- a/include/asm-cris/bitops.h +++ b/include/asm-cris/bitops.h | |||
@@ -89,7 +89,7 @@ struct __dummy { unsigned long a[100]; }; | |||
89 | * It also implies a memory barrier. | 89 | * It also implies a memory barrier. |
90 | */ | 90 | */ |
91 | 91 | ||
92 | extern inline int test_and_set_bit(int nr, volatile unsigned long *addr) | 92 | static inline int test_and_set_bit(int nr, volatile unsigned long *addr) |
93 | { | 93 | { |
94 | unsigned int mask, retval; | 94 | unsigned int mask, retval; |
95 | unsigned long flags; | 95 | unsigned long flags; |
@@ -105,7 +105,7 @@ extern inline int test_and_set_bit(int nr, volatile unsigned long *addr) | |||
105 | return retval; | 105 | return retval; |
106 | } | 106 | } |
107 | 107 | ||
108 | extern inline int __test_and_set_bit(int nr, volatile unsigned long *addr) | 108 | static inline int __test_and_set_bit(int nr, volatile unsigned long *addr) |
109 | { | 109 | { |
110 | unsigned int mask, retval; | 110 | unsigned int mask, retval; |
111 | unsigned int *adr = (unsigned int *)addr; | 111 | unsigned int *adr = (unsigned int *)addr; |
@@ -132,7 +132,7 @@ extern inline int __test_and_set_bit(int nr, volatile unsigned long *addr) | |||
132 | * It also implies a memory barrier. | 132 | * It also implies a memory barrier. |
133 | */ | 133 | */ |
134 | 134 | ||
135 | extern inline int test_and_clear_bit(int nr, volatile unsigned long *addr) | 135 | static inline int test_and_clear_bit(int nr, volatile unsigned long *addr) |
136 | { | 136 | { |
137 | unsigned int mask, retval; | 137 | unsigned int mask, retval; |
138 | unsigned long flags; | 138 | unsigned long flags; |
@@ -157,7 +157,7 @@ extern inline int test_and_clear_bit(int nr, volatile unsigned long *addr) | |||
157 | * but actually fail. You must protect multiple accesses with a lock. | 157 | * but actually fail. You must protect multiple accesses with a lock. |
158 | */ | 158 | */ |
159 | 159 | ||
160 | extern inline int __test_and_clear_bit(int nr, volatile unsigned long *addr) | 160 | static inline int __test_and_clear_bit(int nr, volatile unsigned long *addr) |
161 | { | 161 | { |
162 | unsigned int mask, retval; | 162 | unsigned int mask, retval; |
163 | unsigned int *adr = (unsigned int *)addr; | 163 | unsigned int *adr = (unsigned int *)addr; |
@@ -177,7 +177,7 @@ extern inline int __test_and_clear_bit(int nr, volatile unsigned long *addr) | |||
177 | * It also implies a memory barrier. | 177 | * It also implies a memory barrier. |
178 | */ | 178 | */ |
179 | 179 | ||
180 | extern inline int test_and_change_bit(int nr, volatile unsigned long *addr) | 180 | static inline int test_and_change_bit(int nr, volatile unsigned long *addr) |
181 | { | 181 | { |
182 | unsigned int mask, retval; | 182 | unsigned int mask, retval; |
183 | unsigned long flags; | 183 | unsigned long flags; |
@@ -193,7 +193,7 @@ extern inline int test_and_change_bit(int nr, volatile unsigned long *addr) | |||
193 | 193 | ||
194 | /* WARNING: non atomic and it can be reordered! */ | 194 | /* WARNING: non atomic and it can be reordered! */ |
195 | 195 | ||
196 | extern inline int __test_and_change_bit(int nr, volatile unsigned long *addr) | 196 | static inline int __test_and_change_bit(int nr, volatile unsigned long *addr) |
197 | { | 197 | { |
198 | unsigned int mask, retval; | 198 | unsigned int mask, retval; |
199 | unsigned int *adr = (unsigned int *)addr; | 199 | unsigned int *adr = (unsigned int *)addr; |
@@ -214,7 +214,7 @@ extern inline int __test_and_change_bit(int nr, volatile unsigned long *addr) | |||
214 | * This routine doesn't need to be atomic. | 214 | * This routine doesn't need to be atomic. |
215 | */ | 215 | */ |
216 | 216 | ||
217 | extern inline int test_bit(int nr, const volatile unsigned long *addr) | 217 | static inline int test_bit(int nr, const volatile unsigned long *addr) |
218 | { | 218 | { |
219 | unsigned int mask; | 219 | unsigned int mask; |
220 | unsigned int *adr = (unsigned int *)addr; | 220 | unsigned int *adr = (unsigned int *)addr; |
@@ -258,7 +258,7 @@ extern inline int test_bit(int nr, const volatile unsigned long *addr) | |||
258 | * @offset: The bitnumber to start searching at | 258 | * @offset: The bitnumber to start searching at |
259 | * @size: The maximum size to search | 259 | * @size: The maximum size to search |
260 | */ | 260 | */ |
261 | extern inline int find_next_zero_bit (const unsigned long * addr, int size, int offset) | 261 | static inline int find_next_zero_bit (const unsigned long * addr, int size, int offset) |
262 | { | 262 | { |
263 | unsigned long *p = ((unsigned long *) addr) + (offset >> 5); | 263 | unsigned long *p = ((unsigned long *) addr) + (offset >> 5); |
264 | unsigned long result = offset & ~31UL; | 264 | unsigned long result = offset & ~31UL; |
@@ -366,7 +366,7 @@ found_middle: | |||
366 | #define minix_test_bit(nr,addr) test_bit(nr,addr) | 366 | #define minix_test_bit(nr,addr) test_bit(nr,addr) |
367 | #define minix_find_first_zero_bit(addr,size) find_first_zero_bit(addr,size) | 367 | #define minix_find_first_zero_bit(addr,size) find_first_zero_bit(addr,size) |
368 | 368 | ||
369 | extern inline int sched_find_first_bit(const unsigned long *b) | 369 | static inline int sched_find_first_bit(const unsigned long *b) |
370 | { | 370 | { |
371 | if (unlikely(b[0])) | 371 | if (unlikely(b[0])) |
372 | return __ffs(b[0]); | 372 | return __ffs(b[0]); |