aboutsummaryrefslogtreecommitdiffstats
path: root/arch/ppc/kernel/bitops.c
diff options
context:
space:
mode:
Diffstat (limited to 'arch/ppc/kernel/bitops.c')
-rw-r--r--arch/ppc/kernel/bitops.c126
1 files changed, 0 insertions, 126 deletions
diff --git a/arch/ppc/kernel/bitops.c b/arch/ppc/kernel/bitops.c
deleted file mode 100644
index 7f53d193968b..000000000000
--- a/arch/ppc/kernel/bitops.c
+++ /dev/null
@@ -1,126 +0,0 @@
1/*
2 * Copyright (C) 1996 Paul Mackerras.
3 */
4
5#include <linux/kernel.h>
6#include <linux/bitops.h>
7
8/*
9 * If the bitops are not inlined in bitops.h, they are defined here.
10 * -- paulus
11 */
12#if !__INLINE_BITOPS
13void set_bit(int nr, volatile void * addr)
14{
15 unsigned long old;
16 unsigned long mask = 1 << (nr & 0x1f);
17 unsigned long *p = ((unsigned long *)addr) + (nr >> 5);
18
19 __asm__ __volatile__(SMP_WMB "\n\
201: lwarx %0,0,%3 \n\
21 or %0,%0,%2 \n"
22 PPC405_ERR77(0,%3)
23" stwcx. %0,0,%3 \n\
24 bne 1b"
25 SMP_MB
26 : "=&r" (old), "=m" (*p)
27 : "r" (mask), "r" (p), "m" (*p)
28 : "cc" );
29}
30
31void clear_bit(int nr, volatile void *addr)
32{
33 unsigned long old;
34 unsigned long mask = 1 << (nr & 0x1f);
35 unsigned long *p = ((unsigned long *)addr) + (nr >> 5);
36
37 __asm__ __volatile__(SMP_WMB "\n\
381: lwarx %0,0,%3 \n\
39 andc %0,%0,%2 \n"
40 PPC405_ERR77(0,%3)
41" stwcx. %0,0,%3 \n\
42 bne 1b"
43 SMP_MB
44 : "=&r" (old), "=m" (*p)
45 : "r" (mask), "r" (p), "m" (*p)
46 : "cc");
47}
48
49void change_bit(int nr, volatile void *addr)
50{
51 unsigned long old;
52 unsigned long mask = 1 << (nr & 0x1f);
53 unsigned long *p = ((unsigned long *)addr) + (nr >> 5);
54
55 __asm__ __volatile__(SMP_WMB "\n\
561: lwarx %0,0,%3 \n\
57 xor %0,%0,%2 \n"
58 PPC405_ERR77(0,%3)
59" stwcx. %0,0,%3 \n\
60 bne 1b"
61 SMP_MB
62 : "=&r" (old), "=m" (*p)
63 : "r" (mask), "r" (p), "m" (*p)
64 : "cc");
65}
66
67int test_and_set_bit(int nr, volatile void *addr)
68{
69 unsigned int old, t;
70 unsigned int mask = 1 << (nr & 0x1f);
71 volatile unsigned int *p = ((volatile unsigned int *)addr) + (nr >> 5);
72
73 __asm__ __volatile__(SMP_WMB "\n\
741: lwarx %0,0,%4 \n\
75 or %1,%0,%3 \n"
76 PPC405_ERR77(0,%4)
77" stwcx. %1,0,%4 \n\
78 bne 1b"
79 SMP_MB
80 : "=&r" (old), "=&r" (t), "=m" (*p)
81 : "r" (mask), "r" (p), "m" (*p)
82 : "cc");
83
84 return (old & mask) != 0;
85}
86
87int test_and_clear_bit(int nr, volatile void *addr)
88{
89 unsigned int old, t;
90 unsigned int mask = 1 << (nr & 0x1f);
91 volatile unsigned int *p = ((volatile unsigned int *)addr) + (nr >> 5);
92
93 __asm__ __volatile__(SMP_WMB "\n\
941: lwarx %0,0,%4 \n\
95 andc %1,%0,%3 \n"
96 PPC405_ERR77(0,%4)
97" stwcx. %1,0,%4 \n\
98 bne 1b"
99 SMP_MB
100 : "=&r" (old), "=&r" (t), "=m" (*p)
101 : "r" (mask), "r" (p), "m" (*p)
102 : "cc");
103
104 return (old & mask) != 0;
105}
106
107int test_and_change_bit(int nr, volatile void *addr)
108{
109 unsigned int old, t;
110 unsigned int mask = 1 << (nr & 0x1f);
111 volatile unsigned int *p = ((volatile unsigned int *)addr) + (nr >> 5);
112
113 __asm__ __volatile__(SMP_WMB "\n\
1141: lwarx %0,0,%4 \n\
115 xor %1,%0,%3 \n"
116 PPC405_ERR77(0,%4)
117" stwcx. %1,0,%4 \n\
118 bne 1b"
119 SMP_MB
120 : "=&r" (old), "=&r" (t), "=m" (*p)
121 : "r" (mask), "r" (p), "m" (*p)
122 : "cc");
123
124 return (old & mask) != 0;
125}
126#endif /* !__INLINE_BITOPS */