diff options
author | Mathieu Desnoyers <mathieu.desnoyers@polymtl.ca> | 2007-05-08 03:34:47 -0400 |
---|---|---|
committer | Linus Torvalds <torvalds@woody.linux-foundation.org> | 2007-05-08 14:15:20 -0400 |
commit | 7232311ef14c274d88871212a07557f18f4140d1 (patch) | |
tree | 46027210d51dee5fd5086f159d98bac3535a005d | |
parent | 4431f46f5fe0e3b740dfaf09ba34f0b14688185e (diff) |
local_t: mips extension
Signed-off-by: Mathieu Desnoyers <mathieu.desnoyers@polymtl.ca>
Cc: Ralf Baechle <ralf@linux-mips.org>
Signed-off-by: Andrew Morton <akpm@linux-foundation.org>
Signed-off-by: Linus Torvalds <torvalds@linux-foundation.org>
-rw-r--r-- | include/asm-mips/local.h | 304 | ||||
-rw-r--r-- | include/asm-mips/system.h | 125 |
2 files changed, 390 insertions, 39 deletions
diff --git a/include/asm-mips/local.h b/include/asm-mips/local.h index 9e2d43bae388..ed882c88e0ca 100644 --- a/include/asm-mips/local.h +++ b/include/asm-mips/local.h | |||
@@ -1,60 +1,288 @@ | |||
1 | #ifndef _ASM_LOCAL_H | 1 | #ifndef _ARCH_MIPS_LOCAL_H |
2 | #define _ASM_LOCAL_H | 2 | #define _ARCH_MIPS_LOCAL_H |
3 | 3 | ||
4 | #include <linux/percpu.h> | 4 | #include <linux/percpu.h> |
5 | #include <linux/bitops.h> | ||
5 | #include <asm/atomic.h> | 6 | #include <asm/atomic.h> |
7 | #include <asm/war.h> | ||
6 | 8 | ||
7 | #ifdef CONFIG_32BIT | 9 | typedef struct |
10 | { | ||
11 | atomic_long_t a; | ||
12 | } local_t; | ||
8 | 13 | ||
9 | typedef atomic_t local_t; | 14 | #define LOCAL_INIT(i) { ATOMIC_LONG_INIT(i) } |
10 | 15 | ||
11 | #define LOCAL_INIT(i) ATOMIC_INIT(i) | 16 | #define local_read(l) atomic_long_read(&(l)->a) |
12 | #define local_read(v) atomic_read(v) | 17 | #define local_set(l,i) atomic_long_set(&(l)->a, (i)) |
13 | #define local_set(v,i) atomic_set(v,i) | ||
14 | 18 | ||
15 | #define local_inc(v) atomic_inc(v) | 19 | #define local_add(i,l) atomic_long_add((i),(&(l)->a)) |
16 | #define local_dec(v) atomic_dec(v) | 20 | #define local_sub(i,l) atomic_long_sub((i),(&(l)->a)) |
17 | #define local_add(i, v) atomic_add(i, v) | 21 | #define local_inc(l) atomic_long_inc(&(l)->a) |
18 | #define local_sub(i, v) atomic_sub(i, v) | 22 | #define local_dec(l) atomic_long_dec(&(l)->a) |
19 | 23 | ||
20 | #endif | 24 | /* |
25 | * Same as above, but return the result value | ||
26 | */ | ||
27 | static __inline__ long local_add_return(long i, local_t * l) | ||
28 | { | ||
29 | unsigned long result; | ||
30 | |||
31 | if (cpu_has_llsc && R10000_LLSC_WAR) { | ||
32 | unsigned long temp; | ||
33 | |||
34 | __asm__ __volatile__( | ||
35 | " .set mips3 \n" | ||
36 | "1:" __LL "%1, %2 # local_add_return \n" | ||
37 | " addu %0, %1, %3 \n" | ||
38 | __SC "%0, %2 \n" | ||
39 | " beqzl %0, 1b \n" | ||
40 | " addu %0, %1, %3 \n" | ||
41 | " .set mips0 \n" | ||
42 | : "=&r" (result), "=&r" (temp), "=m" (l->a.counter) | ||
43 | : "Ir" (i), "m" (l->a.counter) | ||
44 | : "memory"); | ||
45 | } else if (cpu_has_llsc) { | ||
46 | unsigned long temp; | ||
47 | |||
48 | __asm__ __volatile__( | ||
49 | " .set mips3 \n" | ||
50 | "1:" __LL "%1, %2 # local_add_return \n" | ||
51 | " addu %0, %1, %3 \n" | ||
52 | __SC "%0, %2 \n" | ||
53 | " beqz %0, 1b \n" | ||
54 | " addu %0, %1, %3 \n" | ||
55 | " .set mips0 \n" | ||
56 | : "=&r" (result), "=&r" (temp), "=m" (l->a.counter) | ||
57 | : "Ir" (i), "m" (l->a.counter) | ||
58 | : "memory"); | ||
59 | } else { | ||
60 | unsigned long flags; | ||
21 | 61 | ||
22 | #ifdef CONFIG_64BIT | 62 | local_irq_save(flags); |
63 | result = l->a.counter; | ||
64 | result += i; | ||
65 | l->a.counter = result; | ||
66 | local_irq_restore(flags); | ||
67 | } | ||
23 | 68 | ||
24 | typedef atomic64_t local_t; | 69 | return result; |
70 | } | ||
25 | 71 | ||
26 | #define LOCAL_INIT(i) ATOMIC64_INIT(i) | 72 | static __inline__ long local_sub_return(long i, local_t * l) |
27 | #define local_read(v) atomic64_read(v) | 73 | { |
28 | #define local_set(v,i) atomic64_set(v,i) | 74 | unsigned long result; |
29 | 75 | ||
30 | #define local_inc(v) atomic64_inc(v) | 76 | if (cpu_has_llsc && R10000_LLSC_WAR) { |
31 | #define local_dec(v) atomic64_dec(v) | 77 | unsigned long temp; |
32 | #define local_add(i, v) atomic64_add(i, v) | ||
33 | #define local_sub(i, v) atomic64_sub(i, v) | ||
34 | 78 | ||
35 | #endif | 79 | __asm__ __volatile__( |
80 | " .set mips3 \n" | ||
81 | "1:" __LL "%1, %2 # local_sub_return \n" | ||
82 | " subu %0, %1, %3 \n" | ||
83 | __SC "%0, %2 \n" | ||
84 | " beqzl %0, 1b \n" | ||
85 | " subu %0, %1, %3 \n" | ||
86 | " .set mips0 \n" | ||
87 | : "=&r" (result), "=&r" (temp), "=m" (l->a.counter) | ||
88 | : "Ir" (i), "m" (l->a.counter) | ||
89 | : "memory"); | ||
90 | } else if (cpu_has_llsc) { | ||
91 | unsigned long temp; | ||
36 | 92 | ||
37 | #define __local_inc(v) ((v)->counter++) | 93 | __asm__ __volatile__( |
38 | #define __local_dec(v) ((v)->counter--) | 94 | " .set mips3 \n" |
39 | #define __local_add(i,v) ((v)->counter+=(i)) | 95 | "1:" __LL "%1, %2 # local_sub_return \n" |
40 | #define __local_sub(i,v) ((v)->counter-=(i)) | 96 | " subu %0, %1, %3 \n" |
97 | __SC "%0, %2 \n" | ||
98 | " beqz %0, 1b \n" | ||
99 | " subu %0, %1, %3 \n" | ||
100 | " .set mips0 \n" | ||
101 | : "=&r" (result), "=&r" (temp), "=m" (l->a.counter) | ||
102 | : "Ir" (i), "m" (l->a.counter) | ||
103 | : "memory"); | ||
104 | } else { | ||
105 | unsigned long flags; | ||
106 | |||
107 | local_irq_save(flags); | ||
108 | result = l->a.counter; | ||
109 | result -= i; | ||
110 | l->a.counter = result; | ||
111 | local_irq_restore(flags); | ||
112 | } | ||
113 | |||
114 | return result; | ||
115 | } | ||
41 | 116 | ||
42 | /* | 117 | /* |
43 | * Use these for per-cpu local_t variables: on some archs they are | 118 | * local_sub_if_positive - conditionally subtract integer from atomic variable |
119 | * @i: integer value to subtract | ||
120 | * @l: pointer of type local_t | ||
121 | * | ||
122 | * Atomically test @l and subtract @i if @l is greater or equal than @i. | ||
123 | * The function returns the old value of @l minus @i. | ||
124 | */ | ||
125 | static __inline__ long local_sub_if_positive(long i, local_t * l) | ||
126 | { | ||
127 | unsigned long result; | ||
128 | |||
129 | if (cpu_has_llsc && R10000_LLSC_WAR) { | ||
130 | unsigned long temp; | ||
131 | |||
132 | __asm__ __volatile__( | ||
133 | " .set mips3 \n" | ||
134 | "1:" __LL "%1, %2 # local_sub_if_positive\n" | ||
135 | " dsubu %0, %1, %3 \n" | ||
136 | " bltz %0, 1f \n" | ||
137 | __SC "%0, %2 \n" | ||
138 | " .set noreorder \n" | ||
139 | " beqzl %0, 1b \n" | ||
140 | " dsubu %0, %1, %3 \n" | ||
141 | " .set reorder \n" | ||
142 | "1: \n" | ||
143 | " .set mips0 \n" | ||
144 | : "=&r" (result), "=&r" (temp), "=m" (l->a.counter) | ||
145 | : "Ir" (i), "m" (l->a.counter) | ||
146 | : "memory"); | ||
147 | } else if (cpu_has_llsc) { | ||
148 | unsigned long temp; | ||
149 | |||
150 | __asm__ __volatile__( | ||
151 | " .set mips3 \n" | ||
152 | "1:" __LL "%1, %2 # local_sub_if_positive\n" | ||
153 | " dsubu %0, %1, %3 \n" | ||
154 | " bltz %0, 1f \n" | ||
155 | __SC "%0, %2 \n" | ||
156 | " .set noreorder \n" | ||
157 | " beqz %0, 1b \n" | ||
158 | " dsubu %0, %1, %3 \n" | ||
159 | " .set reorder \n" | ||
160 | "1: \n" | ||
161 | " .set mips0 \n" | ||
162 | : "=&r" (result), "=&r" (temp), "=m" (l->a.counter) | ||
163 | : "Ir" (i), "m" (l->a.counter) | ||
164 | : "memory"); | ||
165 | } else { | ||
166 | unsigned long flags; | ||
167 | |||
168 | local_irq_save(flags); | ||
169 | result = l->a.counter; | ||
170 | result -= i; | ||
171 | if (result >= 0) | ||
172 | l->a.counter = result; | ||
173 | local_irq_restore(flags); | ||
174 | } | ||
175 | |||
176 | return result; | ||
177 | } | ||
178 | |||
179 | #define local_cmpxchg(l, o, n) \ | ||
180 | ((long)cmpxchg_local(&((l)->a.counter), (o), (n))) | ||
181 | #define local_xchg(l, n) (xchg_local(&((l)->a.counter),(n))) | ||
182 | |||
183 | /** | ||
184 | * local_add_unless - add unless the number is a given value | ||
185 | * @l: pointer of type local_t | ||
186 | * @a: the amount to add to l... | ||
187 | * @u: ...unless l is equal to u. | ||
188 | * | ||
189 | * Atomically adds @a to @l, so long as it was not @u. | ||
190 | * Returns non-zero if @l was not @u, and zero otherwise. | ||
191 | */ | ||
192 | #define local_add_unless(l, a, u) \ | ||
193 | ({ \ | ||
194 | long c, old; \ | ||
195 | c = local_read(l); \ | ||
196 | while (c != (u) && (old = local_cmpxchg((l), c, c + (a))) != c) \ | ||
197 | c = old; \ | ||
198 | c != (u); \ | ||
199 | }) | ||
200 | #define local_inc_not_zero(l) local_add_unless((l), 1, 0) | ||
201 | |||
202 | #define local_dec_return(l) local_sub_return(1,(l)) | ||
203 | #define local_inc_return(l) local_add_return(1,(l)) | ||
204 | |||
205 | /* | ||
206 | * local_sub_and_test - subtract value from variable and test result | ||
207 | * @i: integer value to subtract | ||
208 | * @l: pointer of type local_t | ||
209 | * | ||
210 | * Atomically subtracts @i from @l and returns | ||
211 | * true if the result is zero, or false for all | ||
212 | * other cases. | ||
213 | */ | ||
214 | #define local_sub_and_test(i,l) (local_sub_return((i), (l)) == 0) | ||
215 | |||
216 | /* | ||
217 | * local_inc_and_test - increment and test | ||
218 | * @l: pointer of type local_t | ||
219 | * | ||
220 | * Atomically increments @l by 1 | ||
221 | * and returns true if the result is zero, or false for all | ||
222 | * other cases. | ||
223 | */ | ||
224 | #define local_inc_and_test(l) (local_inc_return(l) == 0) | ||
225 | |||
226 | /* | ||
227 | * local_dec_and_test - decrement by 1 and test | ||
228 | * @l: pointer of type local_t | ||
229 | * | ||
230 | * Atomically decrements @l by 1 and | ||
231 | * returns true if the result is 0, or false for all other | ||
232 | * cases. | ||
233 | */ | ||
234 | #define local_dec_and_test(l) (local_sub_return(1, (l)) == 0) | ||
235 | |||
236 | /* | ||
237 | * local_dec_if_positive - decrement by 1 if old value positive | ||
238 | * @l: pointer of type local_t | ||
239 | */ | ||
240 | #define local_dec_if_positive(l) local_sub_if_positive(1, l) | ||
241 | |||
242 | /* | ||
243 | * local_add_negative - add and test if negative | ||
244 | * @l: pointer of type local_t | ||
245 | * @i: integer value to add | ||
246 | * | ||
247 | * Atomically adds @i to @l and returns true | ||
248 | * if the result is negative, or false when | ||
249 | * result is greater than or equal to zero. | ||
250 | */ | ||
251 | #define local_add_negative(i,l) (local_add_return(i, (l)) < 0) | ||
252 | |||
253 | /* Use these for per-cpu local_t variables: on some archs they are | ||
44 | * much more efficient than these naive implementations. Note they take | 254 | * much more efficient than these naive implementations. Note they take |
45 | * a variable, not an address. | 255 | * a variable, not an address. |
46 | */ | 256 | */ |
47 | #define cpu_local_read(v) local_read(&__get_cpu_var(v)) | ||
48 | #define cpu_local_set(v, i) local_set(&__get_cpu_var(v), (i)) | ||
49 | 257 | ||
50 | #define cpu_local_inc(v) local_inc(&__get_cpu_var(v)) | 258 | #define __local_inc(l) ((l)->a.counter++) |
51 | #define cpu_local_dec(v) local_dec(&__get_cpu_var(v)) | 259 | #define __local_dec(l) ((l)->a.counter++) |
52 | #define cpu_local_add(i, v) local_add((i), &__get_cpu_var(v)) | 260 | #define __local_add(i,l) ((l)->a.counter+=(i)) |
53 | #define cpu_local_sub(i, v) local_sub((i), &__get_cpu_var(v)) | 261 | #define __local_sub(i,l) ((l)->a.counter-=(i)) |
262 | |||
263 | /* Need to disable preemption for the cpu local counters otherwise we could | ||
264 | still access a variable of a previous CPU in a non atomic way. */ | ||
265 | #define cpu_local_wrap_v(l) \ | ||
266 | ({ local_t res__; \ | ||
267 | preempt_disable(); \ | ||
268 | res__ = (l); \ | ||
269 | preempt_enable(); \ | ||
270 | res__; }) | ||
271 | #define cpu_local_wrap(l) \ | ||
272 | ({ preempt_disable(); \ | ||
273 | l; \ | ||
274 | preempt_enable(); }) \ | ||
275 | |||
276 | #define cpu_local_read(l) cpu_local_wrap_v(local_read(&__get_cpu_var(l))) | ||
277 | #define cpu_local_set(l, i) cpu_local_wrap(local_set(&__get_cpu_var(l), (i))) | ||
278 | #define cpu_local_inc(l) cpu_local_wrap(local_inc(&__get_cpu_var(l))) | ||
279 | #define cpu_local_dec(l) cpu_local_wrap(local_dec(&__get_cpu_var(l))) | ||
280 | #define cpu_local_add(i, l) cpu_local_wrap(local_add((i), &__get_cpu_var(l))) | ||
281 | #define cpu_local_sub(i, l) cpu_local_wrap(local_sub((i), &__get_cpu_var(l))) | ||
54 | 282 | ||
55 | #define __cpu_local_inc(v) __local_inc(&__get_cpu_var(v)) | 283 | #define __cpu_local_inc(l) cpu_local_inc(l) |
56 | #define __cpu_local_dec(v) __local_dec(&__get_cpu_var(v)) | 284 | #define __cpu_local_dec(l) cpu_local_dec(l) |
57 | #define __cpu_local_add(i, v) __local_add((i), &__get_cpu_var(v)) | 285 | #define __cpu_local_add(i, l) cpu_local_add((i), (l)) |
58 | #define __cpu_local_sub(i, v) __local_sub((i), &__get_cpu_var(v)) | 286 | #define __cpu_local_sub(i, l) cpu_local_sub((i), (l)) |
59 | 287 | ||
60 | #endif /* _ASM_LOCAL_H */ | 288 | #endif /* _ARCH_MIPS_LOCAL_H */ |
diff --git a/include/asm-mips/system.h b/include/asm-mips/system.h index 290887077e44..d53dd7245561 100644 --- a/include/asm-mips/system.h +++ b/include/asm-mips/system.h | |||
@@ -262,6 +262,58 @@ static inline unsigned long __cmpxchg_u32(volatile int * m, unsigned long old, | |||
262 | return retval; | 262 | return retval; |
263 | } | 263 | } |
264 | 264 | ||
265 | static inline unsigned long __cmpxchg_u32_local(volatile int * m, | ||
266 | unsigned long old, unsigned long new) | ||
267 | { | ||
268 | __u32 retval; | ||
269 | |||
270 | if (cpu_has_llsc && R10000_LLSC_WAR) { | ||
271 | __asm__ __volatile__( | ||
272 | " .set push \n" | ||
273 | " .set noat \n" | ||
274 | " .set mips3 \n" | ||
275 | "1: ll %0, %2 # __cmpxchg_u32 \n" | ||
276 | " bne %0, %z3, 2f \n" | ||
277 | " .set mips0 \n" | ||
278 | " move $1, %z4 \n" | ||
279 | " .set mips3 \n" | ||
280 | " sc $1, %1 \n" | ||
281 | " beqzl $1, 1b \n" | ||
282 | "2: \n" | ||
283 | " .set pop \n" | ||
284 | : "=&r" (retval), "=R" (*m) | ||
285 | : "R" (*m), "Jr" (old), "Jr" (new) | ||
286 | : "memory"); | ||
287 | } else if (cpu_has_llsc) { | ||
288 | __asm__ __volatile__( | ||
289 | " .set push \n" | ||
290 | " .set noat \n" | ||
291 | " .set mips3 \n" | ||
292 | "1: ll %0, %2 # __cmpxchg_u32 \n" | ||
293 | " bne %0, %z3, 2f \n" | ||
294 | " .set mips0 \n" | ||
295 | " move $1, %z4 \n" | ||
296 | " .set mips3 \n" | ||
297 | " sc $1, %1 \n" | ||
298 | " beqz $1, 1b \n" | ||
299 | "2: \n" | ||
300 | " .set pop \n" | ||
301 | : "=&r" (retval), "=R" (*m) | ||
302 | : "R" (*m), "Jr" (old), "Jr" (new) | ||
303 | : "memory"); | ||
304 | } else { | ||
305 | unsigned long flags; | ||
306 | |||
307 | local_irq_save(flags); | ||
308 | retval = *m; | ||
309 | if (retval == old) | ||
310 | *m = new; | ||
311 | local_irq_restore(flags); /* implies memory barrier */ | ||
312 | } | ||
313 | |||
314 | return retval; | ||
315 | } | ||
316 | |||
265 | #ifdef CONFIG_64BIT | 317 | #ifdef CONFIG_64BIT |
266 | static inline unsigned long __cmpxchg_u64(volatile int * m, unsigned long old, | 318 | static inline unsigned long __cmpxchg_u64(volatile int * m, unsigned long old, |
267 | unsigned long new) | 319 | unsigned long new) |
@@ -315,10 +367,62 @@ static inline unsigned long __cmpxchg_u64(volatile int * m, unsigned long old, | |||
315 | 367 | ||
316 | return retval; | 368 | return retval; |
317 | } | 369 | } |
370 | |||
371 | static inline unsigned long __cmpxchg_u64_local(volatile int * m, | ||
372 | unsigned long old, unsigned long new) | ||
373 | { | ||
374 | __u64 retval; | ||
375 | |||
376 | if (cpu_has_llsc && R10000_LLSC_WAR) { | ||
377 | __asm__ __volatile__( | ||
378 | " .set push \n" | ||
379 | " .set noat \n" | ||
380 | " .set mips3 \n" | ||
381 | "1: lld %0, %2 # __cmpxchg_u64 \n" | ||
382 | " bne %0, %z3, 2f \n" | ||
383 | " move $1, %z4 \n" | ||
384 | " scd $1, %1 \n" | ||
385 | " beqzl $1, 1b \n" | ||
386 | "2: \n" | ||
387 | " .set pop \n" | ||
388 | : "=&r" (retval), "=R" (*m) | ||
389 | : "R" (*m), "Jr" (old), "Jr" (new) | ||
390 | : "memory"); | ||
391 | } else if (cpu_has_llsc) { | ||
392 | __asm__ __volatile__( | ||
393 | " .set push \n" | ||
394 | " .set noat \n" | ||
395 | " .set mips3 \n" | ||
396 | "1: lld %0, %2 # __cmpxchg_u64 \n" | ||
397 | " bne %0, %z3, 2f \n" | ||
398 | " move $1, %z4 \n" | ||
399 | " scd $1, %1 \n" | ||
400 | " beqz $1, 1b \n" | ||
401 | "2: \n" | ||
402 | " .set pop \n" | ||
403 | : "=&r" (retval), "=R" (*m) | ||
404 | : "R" (*m), "Jr" (old), "Jr" (new) | ||
405 | : "memory"); | ||
406 | } else { | ||
407 | unsigned long flags; | ||
408 | |||
409 | local_irq_save(flags); | ||
410 | retval = *m; | ||
411 | if (retval == old) | ||
412 | *m = new; | ||
413 | local_irq_restore(flags); /* implies memory barrier */ | ||
414 | } | ||
415 | |||
416 | return retval; | ||
417 | } | ||
418 | |||
318 | #else | 419 | #else |
319 | extern unsigned long __cmpxchg_u64_unsupported_on_32bit_kernels( | 420 | extern unsigned long __cmpxchg_u64_unsupported_on_32bit_kernels( |
320 | volatile int * m, unsigned long old, unsigned long new); | 421 | volatile int * m, unsigned long old, unsigned long new); |
321 | #define __cmpxchg_u64 __cmpxchg_u64_unsupported_on_32bit_kernels | 422 | #define __cmpxchg_u64 __cmpxchg_u64_unsupported_on_32bit_kernels |
423 | extern unsigned long __cmpxchg_u64_local_unsupported_on_32bit_kernels( | ||
424 | volatile int * m, unsigned long old, unsigned long new); | ||
425 | #define __cmpxchg_u64_local __cmpxchg_u64_local_unsupported_on_32bit_kernels | ||
322 | #endif | 426 | #endif |
323 | 427 | ||
324 | /* This function doesn't exist, so you'll get a linker error | 428 | /* This function doesn't exist, so you'll get a linker error |
@@ -338,7 +442,26 @@ static inline unsigned long __cmpxchg(volatile void * ptr, unsigned long old, | |||
338 | return old; | 442 | return old; |
339 | } | 443 | } |
340 | 444 | ||
341 | #define cmpxchg(ptr,old,new) ((__typeof__(*(ptr)))__cmpxchg((ptr), (unsigned long)(old), (unsigned long)(new),sizeof(*(ptr)))) | 445 | static inline unsigned long __cmpxchg_local(volatile void * ptr, |
446 | unsigned long old, unsigned long new, int size) | ||
447 | { | ||
448 | switch (size) { | ||
449 | case 4: | ||
450 | return __cmpxchg_u32_local(ptr, old, new); | ||
451 | case 8: | ||
452 | return __cmpxchg_u64_local(ptr, old, new); | ||
453 | } | ||
454 | __cmpxchg_called_with_bad_pointer(); | ||
455 | return old; | ||
456 | } | ||
457 | |||
458 | #define cmpxchg(ptr,old,new) \ | ||
459 | ((__typeof__(*(ptr)))__cmpxchg((ptr), \ | ||
460 | (unsigned long)(old), (unsigned long)(new),sizeof(*(ptr)))) | ||
461 | |||
462 | #define cmpxchg_local(ptr,old,new) \ | ||
463 | ((__typeof__(*(ptr)))__cmpxchg_local((ptr), \ | ||
464 | (unsigned long)(old), (unsigned long)(new),sizeof(*(ptr)))) | ||
342 | 465 | ||
343 | extern void set_handler (unsigned long offset, void *addr, unsigned long len); | 466 | extern void set_handler (unsigned long offset, void *addr, unsigned long len); |
344 | extern void set_uncached_handler (unsigned long offset, void *addr, unsigned long len); | 467 | extern void set_uncached_handler (unsigned long offset, void *addr, unsigned long len); |