diff options
Diffstat (limited to 'include/asm-h8300/atomic.h')
-rw-r--r-- | include/asm-h8300/atomic.h | 113 |
1 files changed, 113 insertions, 0 deletions
diff --git a/include/asm-h8300/atomic.h b/include/asm-h8300/atomic.h new file mode 100644 index 000000000000..7230f6507995 --- /dev/null +++ b/include/asm-h8300/atomic.h | |||
@@ -0,0 +1,113 @@ | |||
1 | #ifndef __ARCH_H8300_ATOMIC__ | ||
2 | #define __ARCH_H8300_ATOMIC__ | ||
3 | |||
4 | /* | ||
5 | * Atomic operations that C can't guarantee us. Useful for | ||
6 | * resource counting etc.. | ||
7 | */ | ||
8 | |||
9 | typedef struct { int counter; } atomic_t; | ||
10 | #define ATOMIC_INIT(i) { (i) } | ||
11 | |||
12 | #define atomic_read(v) ((v)->counter) | ||
13 | #define atomic_set(v, i) (((v)->counter) = i) | ||
14 | |||
15 | #include <asm/system.h> | ||
16 | #include <linux/kernel.h> | ||
17 | |||
18 | static __inline__ int atomic_add_return(int i, atomic_t *v) | ||
19 | { | ||
20 | int ret,flags; | ||
21 | local_irq_save(flags); | ||
22 | ret = v->counter += i; | ||
23 | local_irq_restore(flags); | ||
24 | return ret; | ||
25 | } | ||
26 | |||
27 | #define atomic_add(i, v) atomic_add_return(i, v) | ||
28 | #define atomic_add_negative(a, v) (atomic_add_return((a), (v)) < 0) | ||
29 | |||
30 | static __inline__ int atomic_sub_return(int i, atomic_t *v) | ||
31 | { | ||
32 | int ret,flags; | ||
33 | local_irq_save(flags); | ||
34 | ret = v->counter -= i; | ||
35 | local_irq_restore(flags); | ||
36 | return ret; | ||
37 | } | ||
38 | |||
39 | #define atomic_sub(i, v) atomic_sub_return(i, v) | ||
40 | |||
41 | static __inline__ int atomic_inc_return(atomic_t *v) | ||
42 | { | ||
43 | int ret,flags; | ||
44 | local_irq_save(flags); | ||
45 | v->counter++; | ||
46 | ret = v->counter; | ||
47 | local_irq_restore(flags); | ||
48 | return ret; | ||
49 | } | ||
50 | |||
51 | #define atomic_inc(v) atomic_inc_return(v) | ||
52 | |||
53 | /* | ||
54 | * atomic_inc_and_test - increment and test | ||
55 | * @v: pointer of type atomic_t | ||
56 | * | ||
57 | * Atomically increments @v by 1 | ||
58 | * and returns true if the result is zero, or false for all | ||
59 | * other cases. | ||
60 | */ | ||
61 | #define atomic_inc_and_test(v) (atomic_inc_return(v) == 0) | ||
62 | |||
63 | static __inline__ int atomic_dec_return(atomic_t *v) | ||
64 | { | ||
65 | int ret,flags; | ||
66 | local_irq_save(flags); | ||
67 | --v->counter; | ||
68 | ret = v->counter; | ||
69 | local_irq_restore(flags); | ||
70 | return ret; | ||
71 | } | ||
72 | |||
73 | #define atomic_dec(v) atomic_dec_return(v) | ||
74 | |||
75 | static __inline__ int atomic_dec_and_test(atomic_t *v) | ||
76 | { | ||
77 | int ret,flags; | ||
78 | local_irq_save(flags); | ||
79 | --v->counter; | ||
80 | ret = v->counter; | ||
81 | local_irq_restore(flags); | ||
82 | return ret == 0; | ||
83 | } | ||
84 | |||
85 | static __inline__ void atomic_clear_mask(unsigned long mask, unsigned long *v) | ||
86 | { | ||
87 | __asm__ __volatile__("stc ccr,r1l\n\t" | ||
88 | "orc #0x80,ccr\n\t" | ||
89 | "mov.l %0,er0\n\t" | ||
90 | "and.l %1,er0\n\t" | ||
91 | "mov.l er0,%0\n\t" | ||
92 | "ldc r1l,ccr" | ||
93 | : "=m" (*v) : "g" (~(mask)) :"er0","er1"); | ||
94 | } | ||
95 | |||
96 | static __inline__ void atomic_set_mask(unsigned long mask, unsigned long *v) | ||
97 | { | ||
98 | __asm__ __volatile__("stc ccr,r1l\n\t" | ||
99 | "orc #0x80,ccr\n\t" | ||
100 | "mov.l %0,er0\n\t" | ||
101 | "or.l %1,er0\n\t" | ||
102 | "mov.l er0,%0\n\t" | ||
103 | "ldc r1l,ccr" | ||
104 | : "=m" (*v) : "g" (mask) :"er0","er1"); | ||
105 | } | ||
106 | |||
107 | /* Atomic operations are already serializing */ | ||
108 | #define smp_mb__before_atomic_dec() barrier() | ||
109 | #define smp_mb__after_atomic_dec() barrier() | ||
110 | #define smp_mb__before_atomic_inc() barrier() | ||
111 | #define smp_mb__after_atomic_inc() barrier() | ||
112 | |||
113 | #endif /* __ARCH_H8300_ATOMIC __ */ | ||