aboutsummaryrefslogtreecommitdiffstats
path: root/include/asm-arm26/atomic.h
diff options
context:
space:
mode:
Diffstat (limited to 'include/asm-arm26/atomic.h')
-rw-r--r--include/asm-arm26/atomic.h123
1 files changed, 0 insertions, 123 deletions
diff --git a/include/asm-arm26/atomic.h b/include/asm-arm26/atomic.h
deleted file mode 100644
index d6dd42374cf3..000000000000
--- a/include/asm-arm26/atomic.h
+++ /dev/null
@@ -1,123 +0,0 @@
1/*
2 * linux/include/asm-arm26/atomic.h
3 *
4 * Copyright (c) 1996 Russell King.
5 * Modified for arm26 by Ian Molton
6 *
7 * This program is free software; you can redistribute it and/or modify
8 * it under the terms of the GNU General Public License version 2 as
9 * published by the Free Software Foundation.
10 *
11 * Changelog:
12 * 25-11-2004 IM Updated for 2.6.9
13 * 27-06-1996 RMK Created
14 * 13-04-1997 RMK Made functions atomic!
15 * 07-12-1997 RMK Upgraded for v2.1.
16 * 26-08-1998 PJB Added #ifdef __KERNEL__
17 *
18 * FIXME - its probably worth seeing what these compile into...
19 */
20#ifndef __ASM_ARM_ATOMIC_H
21#define __ASM_ARM_ATOMIC_H
22
23#ifdef CONFIG_SMP
24#error SMP is NOT supported
25#endif
26
27typedef struct { volatile int counter; } atomic_t;
28
29#define ATOMIC_INIT(i) { (i) }
30
31#ifdef __KERNEL__
32#include <asm/system.h>
33
34#define atomic_read(v) ((v)->counter)
35#define atomic_set(v,i) (((v)->counter) = (i))
36
37static inline int atomic_add_return(int i, atomic_t *v)
38{
39 unsigned long flags;
40 int val;
41
42 local_irq_save(flags);
43 val = v->counter;
44 v->counter = val += i;
45 local_irq_restore(flags);
46
47 return val;
48}
49
50static inline int atomic_sub_return(int i, atomic_t *v)
51{
52 unsigned long flags;
53 int val;
54
55 local_irq_save(flags);
56 val = v->counter;
57 v->counter = val -= i;
58 local_irq_restore(flags);
59
60 return val;
61}
62
63static inline int atomic_cmpxchg(atomic_t *v, int old, int new)
64{
65 int ret;
66 unsigned long flags;
67
68 local_irq_save(flags);
69 ret = v->counter;
70 if (likely(ret == old))
71 v->counter = new;
72 local_irq_restore(flags);
73
74 return ret;
75}
76
77#define atomic_xchg(v, new) (xchg(&((v)->counter), new))
78
79static inline int atomic_add_unless(atomic_t *v, int a, int u)
80{
81 int ret;
82 unsigned long flags;
83
84 local_irq_save(flags);
85 ret = v->counter;
86 if (ret != u)
87 v->counter += a;
88 local_irq_restore(flags);
89
90 return ret != u;
91}
92#define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
93
94static inline void atomic_clear_mask(unsigned long mask, unsigned long *addr)
95{
96 unsigned long flags;
97
98 local_irq_save(flags);
99 *addr &= ~mask;
100 local_irq_restore(flags);
101}
102
103#define atomic_add(i, v) (void) atomic_add_return(i, v)
104#define atomic_inc(v) (void) atomic_add_return(1, v)
105#define atomic_sub(i, v) (void) atomic_sub_return(i, v)
106#define atomic_dec(v) (void) atomic_sub_return(1, v)
107
108#define atomic_inc_and_test(v) (atomic_add_return(1, v) == 0)
109#define atomic_dec_and_test(v) (atomic_sub_return(1, v) == 0)
110#define atomic_inc_return(v) (atomic_add_return(1, v))
111#define atomic_dec_return(v) (atomic_sub_return(1, v))
112
113#define atomic_add_negative(i,v) (atomic_add_return(i, v) < 0)
114
115/* Atomic operations are already serializing on ARM26 */
116#define smp_mb__before_atomic_dec() barrier()
117#define smp_mb__after_atomic_dec() barrier()
118#define smp_mb__before_atomic_inc() barrier()
119#define smp_mb__after_atomic_inc() barrier()
120
121#include <asm-generic/atomic.h>
122#endif
123#endif