aboutsummaryrefslogtreecommitdiffstats
path: root/include/asm-i386/local.h
diff options
context:
space:
mode:
authorMathieu Desnoyers <mathieu.desnoyers@polymtl.ca>2007-05-08 03:34:44 -0400
committerLinus Torvalds <torvalds@woody.linux-foundation.org>2007-05-08 14:15:20 -0400
commita075227948636e10aa2cc2d8725fbbab27681d4a (patch)
tree6b459363916c4db4cc62a293f122f4c9172b1d6a /include/asm-i386/local.h
parentf43f7b46eb101f50950cfcead0cb0b7a9c4f6823 (diff)
local_t: i386 extension
Signed-off-by: Mathieu Desnoyers <mathieu.desnoyers@polymtl.ca> Cc: Andi Kleen <ak@muc.de> Signed-off-by: Andrew Morton <akpm@linux-foundation.org> Signed-off-by: Linus Torvalds <torvalds@linux-foundation.org>
Diffstat (limited to 'include/asm-i386/local.h')
-rw-r--r--include/asm-i386/local.h205
1 files changed, 178 insertions, 27 deletions
diff --git a/include/asm-i386/local.h b/include/asm-i386/local.h
index 12060e22f7e2..e13d3e98823f 100644
--- a/include/asm-i386/local.h
+++ b/include/asm-i386/local.h
@@ -2,47 +2,198 @@
2#define _ARCH_I386_LOCAL_H 2#define _ARCH_I386_LOCAL_H
3 3
4#include <linux/percpu.h> 4#include <linux/percpu.h>
5#include <asm/system.h>
6#include <asm/atomic.h>
5 7
6typedef struct 8typedef struct
7{ 9{
8 volatile long counter; 10 atomic_long_t a;
9} local_t; 11} local_t;
10 12
11#define LOCAL_INIT(i) { (i) } 13#define LOCAL_INIT(i) { ATOMIC_LONG_INIT(i) }
12 14
13#define local_read(v) ((v)->counter) 15#define local_read(l) atomic_long_read(&(l)->a)
14#define local_set(v,i) (((v)->counter) = (i)) 16#define local_set(l,i) atomic_long_set(&(l)->a, (i))
15 17
16static __inline__ void local_inc(local_t *v) 18static __inline__ void local_inc(local_t *l)
17{ 19{
18 __asm__ __volatile__( 20 __asm__ __volatile__(
19 "incl %0" 21 "incl %0"
20 :"+m" (v->counter)); 22 :"+m" (l->a.counter));
21} 23}
22 24
23static __inline__ void local_dec(local_t *v) 25static __inline__ void local_dec(local_t *l)
24{ 26{
25 __asm__ __volatile__( 27 __asm__ __volatile__(
26 "decl %0" 28 "decl %0"
27 :"+m" (v->counter)); 29 :"+m" (l->a.counter));
28} 30}
29 31
30static __inline__ void local_add(long i, local_t *v) 32static __inline__ void local_add(long i, local_t *l)
31{ 33{
32 __asm__ __volatile__( 34 __asm__ __volatile__(
33 "addl %1,%0" 35 "addl %1,%0"
34 :"+m" (v->counter) 36 :"+m" (l->a.counter)
35 :"ir" (i)); 37 :"ir" (i));
36} 38}
37 39
38static __inline__ void local_sub(long i, local_t *v) 40static __inline__ void local_sub(long i, local_t *l)
39{ 41{
40 __asm__ __volatile__( 42 __asm__ __volatile__(
41 "subl %1,%0" 43 "subl %1,%0"
42 :"+m" (v->counter) 44 :"+m" (l->a.counter)
43 :"ir" (i)); 45 :"ir" (i));
44} 46}
45 47
48/**
49 * local_sub_and_test - subtract value from variable and test result
50 * @i: integer value to subtract
51 * @l: pointer of type local_t
52 *
53 * Atomically subtracts @i from @l and returns
54 * true if the result is zero, or false for all
55 * other cases.
56 */
57static __inline__ int local_sub_and_test(long i, local_t *l)
58{
59 unsigned char c;
60
61 __asm__ __volatile__(
62 "subl %2,%0; sete %1"
63 :"+m" (l->a.counter), "=qm" (c)
64 :"ir" (i) : "memory");
65 return c;
66}
67
68/**
69 * local_dec_and_test - decrement and test
70 * @l: pointer of type local_t
71 *
72 * Atomically decrements @l by 1 and
73 * returns true if the result is 0, or false for all other
74 * cases.
75 */
76static __inline__ int local_dec_and_test(local_t *l)
77{
78 unsigned char c;
79
80 __asm__ __volatile__(
81 "decl %0; sete %1"
82 :"+m" (l->a.counter), "=qm" (c)
83 : : "memory");
84 return c != 0;
85}
86
87/**
88 * local_inc_and_test - increment and test
89 * @l: pointer of type local_t
90 *
91 * Atomically increments @l by 1
92 * and returns true if the result is zero, or false for all
93 * other cases.
94 */
95static __inline__ int local_inc_and_test(local_t *l)
96{
97 unsigned char c;
98
99 __asm__ __volatile__(
100 "incl %0; sete %1"
101 :"+m" (l->a.counter), "=qm" (c)
102 : : "memory");
103 return c != 0;
104}
105
106/**
107 * local_add_negative - add and test if negative
108 * @l: pointer of type local_t
109 * @i: integer value to add
110 *
111 * Atomically adds @i to @l and returns true
112 * if the result is negative, or false when
113 * result is greater than or equal to zero.
114 */
115static __inline__ int local_add_negative(long i, local_t *l)
116{
117 unsigned char c;
118
119 __asm__ __volatile__(
120 "addl %2,%0; sets %1"
121 :"+m" (l->a.counter), "=qm" (c)
122 :"ir" (i) : "memory");
123 return c;
124}
125
126/**
127 * local_add_return - add and return
128 * @l: pointer of type local_t
129 * @i: integer value to add
130 *
131 * Atomically adds @i to @l and returns @i + @l
132 */
133static __inline__ long local_add_return(long i, local_t *l)
134{
135 long __i;
136#ifdef CONFIG_M386
137 unsigned long flags;
138 if(unlikely(boot_cpu_data.x86==3))
139 goto no_xadd;
140#endif
141 /* Modern 486+ processor */
142 __i = i;
143 __asm__ __volatile__(
144 "xaddl %0, %1;"
145 :"+r" (i), "+m" (l->a.counter)
146 : : "memory");
147 return i + __i;
148
149#ifdef CONFIG_M386
150no_xadd: /* Legacy 386 processor */
151 local_irq_save(flags);
152 __i = local_read(l);
153 local_set(l, i + __i);
154 local_irq_restore(flags);
155 return i + __i;
156#endif
157}
158
159static __inline__ long local_sub_return(long i, local_t *l)
160{
161 return local_add_return(-i,l);
162}
163
164#define local_inc_return(l) (local_add_return(1,l))
165#define local_dec_return(l) (local_sub_return(1,l))
166
167#define local_cmpxchg(l, o, n) \
168 (cmpxchg_local(&((l)->a.counter), (o), (n)))
169/* Always has a lock prefix */
170#define local_xchg(l, n) (xchg(&((l)->a.counter), (n)))
171
172/**
173 * local_add_unless - add unless the number is a given value
174 * @l: pointer of type local_t
175 * @a: the amount to add to l...
176 * @u: ...unless l is equal to u.
177 *
178 * Atomically adds @a to @l, so long as it was not @u.
179 * Returns non-zero if @l was not @u, and zero otherwise.
180 */
181#define local_add_unless(l, a, u) \
182({ \
183 long c, old; \
184 c = local_read(l); \
185 for (;;) { \
186 if (unlikely(c == (u))) \
187 break; \
188 old = local_cmpxchg((l), c, c + (a)); \
189 if (likely(old == c)) \
190 break; \
191 c = old; \
192 } \
193 c != (u); \
194})
195#define local_inc_not_zero(l) local_add_unless((l), 1, 0)
196
46/* On x86, these are no better than the atomic variants. */ 197/* On x86, these are no better than the atomic variants. */
47#define __local_inc(l) local_inc(l) 198#define __local_inc(l) local_inc(l)
48#define __local_dec(l) local_dec(l) 199#define __local_dec(l) local_dec(l)
@@ -56,27 +207,27 @@ static __inline__ void local_sub(long i, local_t *v)
56 207
57/* Need to disable preemption for the cpu local counters otherwise we could 208/* Need to disable preemption for the cpu local counters otherwise we could
58 still access a variable of a previous CPU in a non atomic way. */ 209 still access a variable of a previous CPU in a non atomic way. */
59#define cpu_local_wrap_v(v) \ 210#define cpu_local_wrap_v(l) \
60 ({ local_t res__; \ 211 ({ local_t res__; \
61 preempt_disable(); \ 212 preempt_disable(); \
62 res__ = (v); \ 213 res__ = (l); \
63 preempt_enable(); \ 214 preempt_enable(); \
64 res__; }) 215 res__; })
65#define cpu_local_wrap(v) \ 216#define cpu_local_wrap(l) \
66 ({ preempt_disable(); \ 217 ({ preempt_disable(); \
67 v; \ 218 l; \
68 preempt_enable(); }) \ 219 preempt_enable(); }) \
69 220
70#define cpu_local_read(v) cpu_local_wrap_v(local_read(&__get_cpu_var(v))) 221#define cpu_local_read(l) cpu_local_wrap_v(local_read(&__get_cpu_var(l)))
71#define cpu_local_set(v, i) cpu_local_wrap(local_set(&__get_cpu_var(v), (i))) 222#define cpu_local_set(l, i) cpu_local_wrap(local_set(&__get_cpu_var(l), (i)))
72#define cpu_local_inc(v) cpu_local_wrap(local_inc(&__get_cpu_var(v))) 223#define cpu_local_inc(l) cpu_local_wrap(local_inc(&__get_cpu_var(l)))
73#define cpu_local_dec(v) cpu_local_wrap(local_dec(&__get_cpu_var(v))) 224#define cpu_local_dec(l) cpu_local_wrap(local_dec(&__get_cpu_var(l)))
74#define cpu_local_add(i, v) cpu_local_wrap(local_add((i), &__get_cpu_var(v))) 225#define cpu_local_add(i, l) cpu_local_wrap(local_add((i), &__get_cpu_var(l)))
75#define cpu_local_sub(i, v) cpu_local_wrap(local_sub((i), &__get_cpu_var(v))) 226#define cpu_local_sub(i, l) cpu_local_wrap(local_sub((i), &__get_cpu_var(l)))
76 227
77#define __cpu_local_inc(v) cpu_local_inc(v) 228#define __cpu_local_inc(l) cpu_local_inc(l)
78#define __cpu_local_dec(v) cpu_local_dec(v) 229#define __cpu_local_dec(l) cpu_local_dec(l)
79#define __cpu_local_add(i, v) cpu_local_add((i), (v)) 230#define __cpu_local_add(i, l) cpu_local_add((i), (l))
80#define __cpu_local_sub(i, v) cpu_local_sub((i), (v)) 231#define __cpu_local_sub(i, l) cpu_local_sub((i), (l))
81 232
82#endif /* _ARCH_I386_LOCAL_H */ 233#endif /* _ARCH_I386_LOCAL_H */