aboutsummaryrefslogtreecommitdiffstats
path: root/include/asm-sh/bitops-grb.h
diff options
context:
space:
mode:
authorStuart Menefy <stuart.menefy@st.com>2007-11-30 02:12:36 -0500
committerPaul Mundt <lethal@linux-sh.org>2008-01-27 23:18:58 -0500
commit1efe4ce3ca126da77e450d5a83f7201949d76f62 (patch)
treefbae9902aa4103a9e86d06f841d580f24682e7b3 /include/asm-sh/bitops-grb.h
parent53ff09422e5e7a6d6198b767c8f494e43ec8e3ae (diff)
sh: GUSA atomic rollback support.
This implements kernel-level atomic rollback built on top of gUSA, as an alternative non-IRQ based atomicity method. This is generally a faster method for platforms that are lacking the LL/SC pairs that SH-4A and later use, and is only supportable on legacy cores. Signed-off-by: Stuart Menefy <stuart.menefy@st.com> Signed-off-by: Paul Mundt <lethal@linux-sh.org>
Diffstat (limited to 'include/asm-sh/bitops-grb.h')
-rw-r--r--include/asm-sh/bitops-grb.h169
1 files changed, 169 insertions, 0 deletions
diff --git a/include/asm-sh/bitops-grb.h b/include/asm-sh/bitops-grb.h
new file mode 100644
index 000000000000..a5907b94395b
--- /dev/null
+++ b/include/asm-sh/bitops-grb.h
@@ -0,0 +1,169 @@
1#ifndef __ASM_SH_BITOPS_GRB_H
2#define __ASM_SH_BITOPS_GRB_H
3
4static inline void set_bit(int nr, volatile void * addr)
5{
6 int mask;
7 volatile unsigned int *a = addr;
8 unsigned long tmp;
9
10 a += nr >> 5;
11 mask = 1 << (nr & 0x1f);
12
13 __asm__ __volatile__ (
14 " .align 2 \n\t"
15 " mova 1f, r0 \n\t" /* r0 = end point */
16 " mov r15, r1 \n\t" /* r1 = saved sp */
17 " mov #-6, r15 \n\t" /* LOGIN: r15 = size */
18 " mov.l @%1, %0 \n\t" /* load old value */
19 " or %2, %0 \n\t" /* or */
20 " mov.l %0, @%1 \n\t" /* store new value */
21 "1: mov r1, r15 \n\t" /* LOGOUT */
22 : "=&r" (tmp),
23 "+r" (a)
24 : "r" (mask)
25 : "memory" , "r0", "r1");
26}
27
28static inline void clear_bit(int nr, volatile void * addr)
29{
30 int mask;
31 volatile unsigned int *a = addr;
32 unsigned long tmp;
33
34 a += nr >> 5;
35 mask = ~(1 << (nr & 0x1f));
36 __asm__ __volatile__ (
37 " .align 2 \n\t"
38 " mova 1f, r0 \n\t" /* r0 = end point */
39 " mov r15, r1 \n\t" /* r1 = saved sp */
40 " mov #-6, r15 \n\t" /* LOGIN: r15 = size */
41 " mov.l @%1, %0 \n\t" /* load old value */
42 " and %2, %0 \n\t" /* and */
43 " mov.l %0, @%1 \n\t" /* store new value */
44 "1: mov r1, r15 \n\t" /* LOGOUT */
45 : "=&r" (tmp),
46 "+r" (a)
47 : "r" (mask)
48 : "memory" , "r0", "r1");
49}
50
51static inline void change_bit(int nr, volatile void * addr)
52{
53 int mask;
54 volatile unsigned int *a = addr;
55 unsigned long tmp;
56
57 a += nr >> 5;
58 mask = 1 << (nr & 0x1f);
59 __asm__ __volatile__ (
60 " .align 2 \n\t"
61 " mova 1f, r0 \n\t" /* r0 = end point */
62 " mov r15, r1 \n\t" /* r1 = saved sp */
63 " mov #-6, r15 \n\t" /* LOGIN: r15 = size */
64 " mov.l @%1, %0 \n\t" /* load old value */
65 " xor %2, %0 \n\t" /* xor */
66 " mov.l %0, @%1 \n\t" /* store new value */
67 "1: mov r1, r15 \n\t" /* LOGOUT */
68 : "=&r" (tmp),
69 "+r" (a)
70 : "r" (mask)
71 : "memory" , "r0", "r1");
72}
73
74static inline int test_and_set_bit(int nr, volatile void * addr)
75{
76 int mask, retval;
77 volatile unsigned int *a = addr;
78 unsigned long tmp;
79
80 a += nr >> 5;
81 mask = 1 << (nr & 0x1f);
82
83 __asm__ __volatile__ (
84 " .align 2 \n\t"
85 " mova 1f, r0 \n\t" /* r0 = end point */
86 " mov r15, r1 \n\t" /* r1 = saved sp */
87 " mov #-14, r15 \n\t" /* LOGIN: r15 = size */
88 " mov.l @%2, %0 \n\t" /* load old value */
89 " mov %0, %1 \n\t"
90 " tst %1, %3 \n\t" /* T = ((*a & mask) == 0) */
91 " mov #-1, %1 \n\t" /* retvat = -1 */
92 " negc %1, %1 \n\t" /* retval = (mask & *a) != 0 */
93 " or %3, %0 \n\t"
94 " mov.l %0, @%2 \n\t" /* store new value */
95 "1: mov r1, r15 \n\t" /* LOGOUT */
96 : "=&r" (tmp),
97 "=&r" (retval),
98 "+r" (a)
99 : "r" (mask)
100 : "memory" , "r0", "r1" ,"t");
101
102 return retval;
103}
104
105static inline int test_and_clear_bit(int nr, volatile void * addr)
106{
107 int mask, retval,not_mask;
108 volatile unsigned int *a = addr;
109 unsigned long tmp;
110
111 a += nr >> 5;
112 mask = 1 << (nr & 0x1f);
113
114 not_mask = ~mask;
115
116 __asm__ __volatile__ (
117 " .align 2 \n\t"
118 " mova 1f, r0 \n\t" /* r0 = end point */
119 " mov r15, r1 \n\t" /* r1 = saved sp */
120 " mov #-14, r15 \n\t" /* LOGIN */
121 " mov.l @%2, %0 \n\t" /* load old value */
122 " mov %0, %1 \n\t" /* %1 = *a */
123 " tst %1, %3 \n\t" /* T = ((*a & mask) == 0) */
124 " mov #-1, %1 \n\t" /* retvat = -1 */
125 " negc %1, %1 \n\t" /* retval = (mask & *a) != 0 */
126 " and %4, %0 \n\t"
127 " mov.l %0, @%2 \n\t" /* store new value */
128 "1: mov r1, r15 \n\t" /* LOGOUT */
129 : "=&r" (tmp),
130 "=&r" (retval),
131 "+r" (a)
132 : "r" (mask),
133 "r" (not_mask)
134 : "memory" , "r0", "r1", "t");
135
136 return retval;
137}
138
139static inline int test_and_change_bit(int nr, volatile void * addr)
140{
141 int mask, retval;
142 volatile unsigned int *a = addr;
143 unsigned long tmp;
144
145 a += nr >> 5;
146 mask = 1 << (nr & 0x1f);
147
148 __asm__ __volatile__ (
149 " .align 2 \n\t"
150 " mova 1f, r0 \n\t" /* r0 = end point */
151 " mov r15, r1 \n\t" /* r1 = saved sp */
152 " mov #-14, r15 \n\t" /* LOGIN */
153 " mov.l @%2, %0 \n\t" /* load old value */
154 " mov %0, %1 \n\t" /* %1 = *a */
155 " tst %1, %3 \n\t" /* T = ((*a & mask) == 0) */
156 " mov #-1, %1 \n\t" /* retvat = -1 */
157 " negc %1, %1 \n\t" /* retval = (mask & *a) != 0 */
158 " xor %3, %0 \n\t"
159 " mov.l %0, @%2 \n\t" /* store new value */
160 "1: mov r1, r15 \n\t" /* LOGOUT */
161 : "=&r" (tmp),
162 "=&r" (retval),
163 "+r" (a)
164 : "r" (mask)
165 : "memory" , "r0", "r1", "t");
166
167 return retval;
168}
169#endif /* __ASM_SH_BITOPS_GRB_H */