aboutsummaryrefslogtreecommitdiffstats
path: root/include/asm-sh64/bitops.h
diff options
context:
space:
mode:
Diffstat (limited to 'include/asm-sh64/bitops.h')
-rw-r--r--include/asm-sh64/bitops.h155
1 files changed, 0 insertions, 155 deletions
diff --git a/include/asm-sh64/bitops.h b/include/asm-sh64/bitops.h
deleted file mode 100644
index 600c59efb4c2..000000000000
--- a/include/asm-sh64/bitops.h
+++ /dev/null
@@ -1,155 +0,0 @@
1#ifndef __ASM_SH64_BITOPS_H
2#define __ASM_SH64_BITOPS_H
3
4/*
5 * This file is subject to the terms and conditions of the GNU General Public
6 * License. See the file "COPYING" in the main directory of this archive
7 * for more details.
8 *
9 * include/asm-sh64/bitops.h
10 *
11 * Copyright (C) 2000, 2001 Paolo Alberelli
12 * Copyright (C) 2003 Paul Mundt
13 */
14
15#ifdef __KERNEL__
16
17#ifndef _LINUX_BITOPS_H
18#error only <linux/bitops.h> can be included directly
19#endif
20
21#include <linux/compiler.h>
22#include <asm/system.h>
23/* For __swab32 */
24#include <asm/byteorder.h>
25
26static __inline__ void set_bit(int nr, volatile void * addr)
27{
28 int mask;
29 volatile unsigned int *a = addr;
30 unsigned long flags;
31
32 a += nr >> 5;
33 mask = 1 << (nr & 0x1f);
34 local_irq_save(flags);
35 *a |= mask;
36 local_irq_restore(flags);
37}
38
39/*
40 * clear_bit() doesn't provide any barrier for the compiler.
41 */
42#define smp_mb__before_clear_bit() barrier()
43#define smp_mb__after_clear_bit() barrier()
44static inline void clear_bit(int nr, volatile unsigned long *a)
45{
46 int mask;
47 unsigned long flags;
48
49 a += nr >> 5;
50 mask = 1 << (nr & 0x1f);
51 local_irq_save(flags);
52 *a &= ~mask;
53 local_irq_restore(flags);
54}
55
56static __inline__ void change_bit(int nr, volatile void * addr)
57{
58 int mask;
59 volatile unsigned int *a = addr;
60 unsigned long flags;
61
62 a += nr >> 5;
63 mask = 1 << (nr & 0x1f);
64 local_irq_save(flags);
65 *a ^= mask;
66 local_irq_restore(flags);
67}
68
69static __inline__ int test_and_set_bit(int nr, volatile void * addr)
70{
71 int mask, retval;
72 volatile unsigned int *a = addr;
73 unsigned long flags;
74
75 a += nr >> 5;
76 mask = 1 << (nr & 0x1f);
77 local_irq_save(flags);
78 retval = (mask & *a) != 0;
79 *a |= mask;
80 local_irq_restore(flags);
81
82 return retval;
83}
84
85static __inline__ int test_and_clear_bit(int nr, volatile void * addr)
86{
87 int mask, retval;
88 volatile unsigned int *a = addr;
89 unsigned long flags;
90
91 a += nr >> 5;
92 mask = 1 << (nr & 0x1f);
93 local_irq_save(flags);
94 retval = (mask & *a) != 0;
95 *a &= ~mask;
96 local_irq_restore(flags);
97
98 return retval;
99}
100
101static __inline__ int test_and_change_bit(int nr, volatile void * addr)
102{
103 int mask, retval;
104 volatile unsigned int *a = addr;
105 unsigned long flags;
106
107 a += nr >> 5;
108 mask = 1 << (nr & 0x1f);
109 local_irq_save(flags);
110 retval = (mask & *a) != 0;
111 *a ^= mask;
112 local_irq_restore(flags);
113
114 return retval;
115}
116
117#include <asm-generic/bitops/non-atomic.h>
118
119static __inline__ unsigned long ffz(unsigned long word)
120{
121 unsigned long result, __d2, __d3;
122
123 __asm__("gettr tr0, %2\n\t"
124 "pta $+32, tr0\n\t"
125 "andi %1, 1, %3\n\t"
126 "beq %3, r63, tr0\n\t"
127 "pta $+4, tr0\n"
128 "0:\n\t"
129 "shlri.l %1, 1, %1\n\t"
130 "addi %0, 1, %0\n\t"
131 "andi %1, 1, %3\n\t"
132 "beqi %3, 1, tr0\n"
133 "1:\n\t"
134 "ptabs %2, tr0\n\t"
135 : "=r" (result), "=r" (word), "=r" (__d2), "=r" (__d3)
136 : "0" (0L), "1" (word));
137
138 return result;
139}
140
141#include <asm-generic/bitops/__ffs.h>
142#include <asm-generic/bitops/find.h>
143#include <asm-generic/bitops/hweight.h>
144#include <asm-generic/bitops/lock.h>
145#include <asm-generic/bitops/sched.h>
146#include <asm-generic/bitops/ffs.h>
147#include <asm-generic/bitops/ext2-non-atomic.h>
148#include <asm-generic/bitops/ext2-atomic.h>
149#include <asm-generic/bitops/minix.h>
150#include <asm-generic/bitops/fls.h>
151#include <asm-generic/bitops/fls64.h>
152
153#endif /* __KERNEL__ */
154
155#endif /* __ASM_SH64_BITOPS_H */