aboutsummaryrefslogtreecommitdiffstats
path: root/arch/arc/include/asm/bitops.h
diff options
context:
space:
mode:
Diffstat (limited to 'arch/arc/include/asm/bitops.h')
-rw-r--r--arch/arc/include/asm/bitops.h60
1 files changed, 57 insertions, 3 deletions
diff --git a/arch/arc/include/asm/bitops.h b/arch/arc/include/asm/bitops.h
index 0352fb8d21b9..8da87feec59a 100644
--- a/arch/arc/include/asm/bitops.h
+++ b/arch/arc/include/asm/bitops.h
@@ -22,7 +22,7 @@
22#include <asm/smp.h> 22#include <asm/smp.h>
23#endif 23#endif
24 24
25#if defined(CONFIG_ARC_HAS_LLSC) 25#ifdef CONFIG_ARC_HAS_LLSC
26 26
27/* 27/*
28 * Hardware assisted Atomic-R-M-W 28 * Hardware assisted Atomic-R-M-W
@@ -88,7 +88,7 @@ static inline int test_and_##op##_bit(unsigned long nr, volatile unsigned long *
88 return (old & (1 << nr)) != 0; \ 88 return (old & (1 << nr)) != 0; \
89} 89}
90 90
91#else /* !CONFIG_ARC_HAS_LLSC */ 91#elif !defined(CONFIG_ARC_PLAT_EZNPS)
92 92
93/* 93/*
94 * Non hardware assisted Atomic-R-M-W 94 * Non hardware assisted Atomic-R-M-W
@@ -139,7 +139,55 @@ static inline int test_and_##op##_bit(unsigned long nr, volatile unsigned long *
139 return (old & (1UL << (nr & 0x1f))) != 0; \ 139 return (old & (1UL << (nr & 0x1f))) != 0; \
140} 140}
141 141
142#endif /* CONFIG_ARC_HAS_LLSC */ 142#else /* CONFIG_ARC_PLAT_EZNPS */
143
144#define BIT_OP(op, c_op, asm_op) \
145static inline void op##_bit(unsigned long nr, volatile unsigned long *m)\
146{ \
147 m += nr >> 5; \
148 \
149 nr = (1UL << (nr & 0x1f)); \
150 if (asm_op == CTOP_INST_AAND_DI_R2_R2_R3) \
151 nr = ~nr; \
152 \
153 __asm__ __volatile__( \
154 " mov r2, %0\n" \
155 " mov r3, %1\n" \
156 " .word %2\n" \
157 : \
158 : "r"(nr), "r"(m), "i"(asm_op) \
159 : "r2", "r3", "memory"); \
160}
161
162#define TEST_N_BIT_OP(op, c_op, asm_op) \
163static inline int test_and_##op##_bit(unsigned long nr, volatile unsigned long *m)\
164{ \
165 unsigned long old; \
166 \
167 m += nr >> 5; \
168 \
169 nr = old = (1UL << (nr & 0x1f)); \
170 if (asm_op == CTOP_INST_AAND_DI_R2_R2_R3) \
171 old = ~old; \
172 \
173 /* Explicit full memory barrier needed before/after */ \
174 smp_mb(); \
175 \
176 __asm__ __volatile__( \
177 " mov r2, %0\n" \
178 " mov r3, %1\n" \
179 " .word %2\n" \
180 " mov %0, r2" \
181 : "+r"(old) \
182 : "r"(m), "i"(asm_op) \
183 : "r2", "r3", "memory"); \
184 \
185 smp_mb(); \
186 \
187 return (old & nr) != 0; \
188}
189
190#endif /* CONFIG_ARC_PLAT_EZNPS */
143 191
144/*************************************** 192/***************************************
145 * Non atomic variants 193 * Non atomic variants
@@ -181,9 +229,15 @@ static inline int __test_and_##op##_bit(unsigned long nr, volatile unsigned long
181 /* __test_and_set_bit(), __test_and_clear_bit(), __test_and_change_bit() */\ 229 /* __test_and_set_bit(), __test_and_clear_bit(), __test_and_change_bit() */\
182 __TEST_N_BIT_OP(op, c_op, asm_op) 230 __TEST_N_BIT_OP(op, c_op, asm_op)
183 231
232#ifndef CONFIG_ARC_PLAT_EZNPS
184BIT_OPS(set, |, bset) 233BIT_OPS(set, |, bset)
185BIT_OPS(clear, & ~, bclr) 234BIT_OPS(clear, & ~, bclr)
186BIT_OPS(change, ^, bxor) 235BIT_OPS(change, ^, bxor)
236#else
237BIT_OPS(set, |, CTOP_INST_AOR_DI_R2_R2_R3)
238BIT_OPS(clear, & ~, CTOP_INST_AAND_DI_R2_R2_R3)
239BIT_OPS(change, ^, CTOP_INST_AXOR_DI_R2_R2_R3)
240#endif
187 241
188/* 242/*
189 * This routine doesn't need to be atomic. 243 * This routine doesn't need to be atomic.