aboutsummaryrefslogtreecommitdiffstats
path: root/arch/tile/lib/atomic_32.c
diff options
context:
space:
mode:
Diffstat (limited to 'arch/tile/lib/atomic_32.c')
-rw-r--r--arch/tile/lib/atomic_32.c42
1 files changed, 21 insertions, 21 deletions
diff --git a/arch/tile/lib/atomic_32.c b/arch/tile/lib/atomic_32.c
index 298df1e9912a..5b6bd932c9c7 100644
--- a/arch/tile/lib/atomic_32.c
+++ b/arch/tile/lib/atomic_32.c
@@ -88,29 +88,29 @@ int _atomic_cmpxchg(int *v, int o, int n)
88} 88}
89EXPORT_SYMBOL(_atomic_cmpxchg); 89EXPORT_SYMBOL(_atomic_cmpxchg);
90 90
91unsigned long _atomic_or(volatile unsigned long *p, unsigned long mask) 91unsigned long _atomic_fetch_or(volatile unsigned long *p, unsigned long mask)
92{ 92{
93 return __atomic_or((int *)p, __atomic_setup(p), mask).val; 93 return __atomic_fetch_or((int *)p, __atomic_setup(p), mask).val;
94} 94}
95EXPORT_SYMBOL(_atomic_or); 95EXPORT_SYMBOL(_atomic_fetch_or);
96 96
97unsigned long _atomic_and(volatile unsigned long *p, unsigned long mask) 97unsigned long _atomic_fetch_and(volatile unsigned long *p, unsigned long mask)
98{ 98{
99 return __atomic_and((int *)p, __atomic_setup(p), mask).val; 99 return __atomic_fetch_and((int *)p, __atomic_setup(p), mask).val;
100} 100}
101EXPORT_SYMBOL(_atomic_and); 101EXPORT_SYMBOL(_atomic_fetch_and);
102 102
103unsigned long _atomic_andn(volatile unsigned long *p, unsigned long mask) 103unsigned long _atomic_fetch_andn(volatile unsigned long *p, unsigned long mask)
104{ 104{
105 return __atomic_andn((int *)p, __atomic_setup(p), mask).val; 105 return __atomic_fetch_andn((int *)p, __atomic_setup(p), mask).val;
106} 106}
107EXPORT_SYMBOL(_atomic_andn); 107EXPORT_SYMBOL(_atomic_fetch_andn);
108 108
109unsigned long _atomic_xor(volatile unsigned long *p, unsigned long mask) 109unsigned long _atomic_fetch_xor(volatile unsigned long *p, unsigned long mask)
110{ 110{
111 return __atomic_xor((int *)p, __atomic_setup(p), mask).val; 111 return __atomic_fetch_xor((int *)p, __atomic_setup(p), mask).val;
112} 112}
113EXPORT_SYMBOL(_atomic_xor); 113EXPORT_SYMBOL(_atomic_fetch_xor);
114 114
115 115
116long long _atomic64_xchg(long long *v, long long n) 116long long _atomic64_xchg(long long *v, long long n)
@@ -142,23 +142,23 @@ long long _atomic64_cmpxchg(long long *v, long long o, long long n)
142} 142}
143EXPORT_SYMBOL(_atomic64_cmpxchg); 143EXPORT_SYMBOL(_atomic64_cmpxchg);
144 144
145long long _atomic64_and(long long *v, long long n) 145long long _atomic64_fetch_and(long long *v, long long n)
146{ 146{
147 return __atomic64_and(v, __atomic_setup(v), n); 147 return __atomic64_fetch_and(v, __atomic_setup(v), n);
148} 148}
149EXPORT_SYMBOL(_atomic64_and); 149EXPORT_SYMBOL(_atomic64_fetch_and);
150 150
151long long _atomic64_or(long long *v, long long n) 151long long _atomic64_fetch_or(long long *v, long long n)
152{ 152{
153 return __atomic64_or(v, __atomic_setup(v), n); 153 return __atomic64_fetch_or(v, __atomic_setup(v), n);
154} 154}
155EXPORT_SYMBOL(_atomic64_or); 155EXPORT_SYMBOL(_atomic64_fetch_or);
156 156
157long long _atomic64_xor(long long *v, long long n) 157long long _atomic64_fetch_xor(long long *v, long long n)
158{ 158{
159 return __atomic64_xor(v, __atomic_setup(v), n); 159 return __atomic64_fetch_xor(v, __atomic_setup(v), n);
160} 160}
161EXPORT_SYMBOL(_atomic64_xor); 161EXPORT_SYMBOL(_atomic64_fetch_xor);
162 162
163/* 163/*
164 * If any of the atomic or futex routines hit a bad address (not in 164 * If any of the atomic or futex routines hit a bad address (not in