aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorDmitry Vyukov <dvyukov@google.com>2018-01-29 12:26:06 -0500
committerIngo Molnar <mingo@kernel.org>2018-03-12 07:15:35 -0400
commita35353bb9eb1990a44a0d7585f99e9589bcdb682 (patch)
tree1e9d221ae599ad730cb047c4e1b99b45bbaba011
parent8bf705d130396e69c04cd8e6e010244ad2ce71f4 (diff)
locking/atomic, asm-generic: Add KASAN instrumentation to atomic operations
KASAN uses compiler instrumentation to intercept all memory accesses. But it does not see memory accesses done in assembly code. One notable user of assembly code is atomic operations. Frequently, for example, an atomic reference decrement is the last access to an object and a good candidate for a racy use-after-free. Add manual KASAN checks to atomic operations. Signed-off-by: Dmitry Vyukov <dvyukov@google.com> Cc: Andrew Morton <akpm@linux-foundation.org>, Cc: Andrey Ryabinin <aryabinin@virtuozzo.com>, Cc: Linus Torvalds <torvalds@linux-foundation.org> Cc: Mark Rutland <mark.rutland@arm.com> Cc: Peter Zijlstra <peterz@infradead.org> Cc: Thomas Gleixner <tglx@linutronix.de> Cc: Will Deacon <will.deacon@arm.com>, Cc: kasan-dev@googlegroups.com Cc: linux-mm@kvack.org Link: http://lkml.kernel.org/r/2fa6e7f0210fd20fe404e5b67e6e9213af2b69a1.1517246437.git.dvyukov@google.com Signed-off-by: Ingo Molnar <mingo@kernel.org>
-rw-r--r--include/asm-generic/atomic-instrumented.h62
1 files changed, 62 insertions, 0 deletions
diff --git a/include/asm-generic/atomic-instrumented.h b/include/asm-generic/atomic-instrumented.h
index b966194d120a..82e080505982 100644
--- a/include/asm-generic/atomic-instrumented.h
+++ b/include/asm-generic/atomic-instrumented.h
@@ -2,44 +2,53 @@
2#define _LINUX_ATOMIC_INSTRUMENTED_H 2#define _LINUX_ATOMIC_INSTRUMENTED_H
3 3
4#include <linux/build_bug.h> 4#include <linux/build_bug.h>
5#include <linux/kasan-checks.h>
5 6
6static __always_inline int atomic_read(const atomic_t *v) 7static __always_inline int atomic_read(const atomic_t *v)
7{ 8{
9 kasan_check_read(v, sizeof(*v));
8 return arch_atomic_read(v); 10 return arch_atomic_read(v);
9} 11}
10 12
11static __always_inline s64 atomic64_read(const atomic64_t *v) 13static __always_inline s64 atomic64_read(const atomic64_t *v)
12{ 14{
15 kasan_check_read(v, sizeof(*v));
13 return arch_atomic64_read(v); 16 return arch_atomic64_read(v);
14} 17}
15 18
16static __always_inline void atomic_set(atomic_t *v, int i) 19static __always_inline void atomic_set(atomic_t *v, int i)
17{ 20{
21 kasan_check_write(v, sizeof(*v));
18 arch_atomic_set(v, i); 22 arch_atomic_set(v, i);
19} 23}
20 24
21static __always_inline void atomic64_set(atomic64_t *v, s64 i) 25static __always_inline void atomic64_set(atomic64_t *v, s64 i)
22{ 26{
27 kasan_check_write(v, sizeof(*v));
23 arch_atomic64_set(v, i); 28 arch_atomic64_set(v, i);
24} 29}
25 30
26static __always_inline int atomic_xchg(atomic_t *v, int i) 31static __always_inline int atomic_xchg(atomic_t *v, int i)
27{ 32{
33 kasan_check_write(v, sizeof(*v));
28 return arch_atomic_xchg(v, i); 34 return arch_atomic_xchg(v, i);
29} 35}
30 36
31static __always_inline s64 atomic64_xchg(atomic64_t *v, s64 i) 37static __always_inline s64 atomic64_xchg(atomic64_t *v, s64 i)
32{ 38{
39 kasan_check_write(v, sizeof(*v));
33 return arch_atomic64_xchg(v, i); 40 return arch_atomic64_xchg(v, i);
34} 41}
35 42
36static __always_inline int atomic_cmpxchg(atomic_t *v, int old, int new) 43static __always_inline int atomic_cmpxchg(atomic_t *v, int old, int new)
37{ 44{
45 kasan_check_write(v, sizeof(*v));
38 return arch_atomic_cmpxchg(v, old, new); 46 return arch_atomic_cmpxchg(v, old, new);
39} 47}
40 48
41static __always_inline s64 atomic64_cmpxchg(atomic64_t *v, s64 old, s64 new) 49static __always_inline s64 atomic64_cmpxchg(atomic64_t *v, s64 old, s64 new)
42{ 50{
51 kasan_check_write(v, sizeof(*v));
43 return arch_atomic64_cmpxchg(v, old, new); 52 return arch_atomic64_cmpxchg(v, old, new);
44} 53}
45 54
@@ -47,6 +56,8 @@ static __always_inline s64 atomic64_cmpxchg(atomic64_t *v, s64 old, s64 new)
47#define atomic_try_cmpxchg atomic_try_cmpxchg 56#define atomic_try_cmpxchg atomic_try_cmpxchg
48static __always_inline bool atomic_try_cmpxchg(atomic_t *v, int *old, int new) 57static __always_inline bool atomic_try_cmpxchg(atomic_t *v, int *old, int new)
49{ 58{
59 kasan_check_write(v, sizeof(*v));
60 kasan_check_read(old, sizeof(*old));
50 return arch_atomic_try_cmpxchg(v, old, new); 61 return arch_atomic_try_cmpxchg(v, old, new);
51} 62}
52#endif 63#endif
@@ -55,234 +66,281 @@ static __always_inline bool atomic_try_cmpxchg(atomic_t *v, int *old, int new)
55#define atomic64_try_cmpxchg atomic64_try_cmpxchg 66#define atomic64_try_cmpxchg atomic64_try_cmpxchg
56static __always_inline bool atomic64_try_cmpxchg(atomic64_t *v, s64 *old, s64 new) 67static __always_inline bool atomic64_try_cmpxchg(atomic64_t *v, s64 *old, s64 new)
57{ 68{
69 kasan_check_write(v, sizeof(*v));
70 kasan_check_read(old, sizeof(*old));
58 return arch_atomic64_try_cmpxchg(v, old, new); 71 return arch_atomic64_try_cmpxchg(v, old, new);
59} 72}
60#endif 73#endif
61 74
62static __always_inline int __atomic_add_unless(atomic_t *v, int a, int u) 75static __always_inline int __atomic_add_unless(atomic_t *v, int a, int u)
63{ 76{
77 kasan_check_write(v, sizeof(*v));
64 return __arch_atomic_add_unless(v, a, u); 78 return __arch_atomic_add_unless(v, a, u);
65} 79}
66 80
67 81
68static __always_inline bool atomic64_add_unless(atomic64_t *v, s64 a, s64 u) 82static __always_inline bool atomic64_add_unless(atomic64_t *v, s64 a, s64 u)
69{ 83{
84 kasan_check_write(v, sizeof(*v));
70 return arch_atomic64_add_unless(v, a, u); 85 return arch_atomic64_add_unless(v, a, u);
71} 86}
72 87
73static __always_inline void atomic_inc(atomic_t *v) 88static __always_inline void atomic_inc(atomic_t *v)
74{ 89{
90 kasan_check_write(v, sizeof(*v));
75 arch_atomic_inc(v); 91 arch_atomic_inc(v);
76} 92}
77 93
78static __always_inline void atomic64_inc(atomic64_t *v) 94static __always_inline void atomic64_inc(atomic64_t *v)
79{ 95{
96 kasan_check_write(v, sizeof(*v));
80 arch_atomic64_inc(v); 97 arch_atomic64_inc(v);
81} 98}
82 99
83static __always_inline void atomic_dec(atomic_t *v) 100static __always_inline void atomic_dec(atomic_t *v)
84{ 101{
102 kasan_check_write(v, sizeof(*v));
85 arch_atomic_dec(v); 103 arch_atomic_dec(v);
86} 104}
87 105
88static __always_inline void atomic64_dec(atomic64_t *v) 106static __always_inline void atomic64_dec(atomic64_t *v)
89{ 107{
108 kasan_check_write(v, sizeof(*v));
90 arch_atomic64_dec(v); 109 arch_atomic64_dec(v);
91} 110}
92 111
93static __always_inline void atomic_add(int i, atomic_t *v) 112static __always_inline void atomic_add(int i, atomic_t *v)
94{ 113{
114 kasan_check_write(v, sizeof(*v));
95 arch_atomic_add(i, v); 115 arch_atomic_add(i, v);
96} 116}
97 117
98static __always_inline void atomic64_add(s64 i, atomic64_t *v) 118static __always_inline void atomic64_add(s64 i, atomic64_t *v)
99{ 119{
120 kasan_check_write(v, sizeof(*v));
100 arch_atomic64_add(i, v); 121 arch_atomic64_add(i, v);
101} 122}
102 123
103static __always_inline void atomic_sub(int i, atomic_t *v) 124static __always_inline void atomic_sub(int i, atomic_t *v)
104{ 125{
126 kasan_check_write(v, sizeof(*v));
105 arch_atomic_sub(i, v); 127 arch_atomic_sub(i, v);
106} 128}
107 129
108static __always_inline void atomic64_sub(s64 i, atomic64_t *v) 130static __always_inline void atomic64_sub(s64 i, atomic64_t *v)
109{ 131{
132 kasan_check_write(v, sizeof(*v));
110 arch_atomic64_sub(i, v); 133 arch_atomic64_sub(i, v);
111} 134}
112 135
113static __always_inline void atomic_and(int i, atomic_t *v) 136static __always_inline void atomic_and(int i, atomic_t *v)
114{ 137{
138 kasan_check_write(v, sizeof(*v));
115 arch_atomic_and(i, v); 139 arch_atomic_and(i, v);
116} 140}
117 141
118static __always_inline void atomic64_and(s64 i, atomic64_t *v) 142static __always_inline void atomic64_and(s64 i, atomic64_t *v)
119{ 143{
144 kasan_check_write(v, sizeof(*v));
120 arch_atomic64_and(i, v); 145 arch_atomic64_and(i, v);
121} 146}
122 147
123static __always_inline void atomic_or(int i, atomic_t *v) 148static __always_inline void atomic_or(int i, atomic_t *v)
124{ 149{
150 kasan_check_write(v, sizeof(*v));
125 arch_atomic_or(i, v); 151 arch_atomic_or(i, v);
126} 152}
127 153
128static __always_inline void atomic64_or(s64 i, atomic64_t *v) 154static __always_inline void atomic64_or(s64 i, atomic64_t *v)
129{ 155{
156 kasan_check_write(v, sizeof(*v));
130 arch_atomic64_or(i, v); 157 arch_atomic64_or(i, v);
131} 158}
132 159
133static __always_inline void atomic_xor(int i, atomic_t *v) 160static __always_inline void atomic_xor(int i, atomic_t *v)
134{ 161{
162 kasan_check_write(v, sizeof(*v));
135 arch_atomic_xor(i, v); 163 arch_atomic_xor(i, v);
136} 164}
137 165
138static __always_inline void atomic64_xor(s64 i, atomic64_t *v) 166static __always_inline void atomic64_xor(s64 i, atomic64_t *v)
139{ 167{
168 kasan_check_write(v, sizeof(*v));
140 arch_atomic64_xor(i, v); 169 arch_atomic64_xor(i, v);
141} 170}
142 171
143static __always_inline int atomic_inc_return(atomic_t *v) 172static __always_inline int atomic_inc_return(atomic_t *v)
144{ 173{
174 kasan_check_write(v, sizeof(*v));
145 return arch_atomic_inc_return(v); 175 return arch_atomic_inc_return(v);
146} 176}
147 177
148static __always_inline s64 atomic64_inc_return(atomic64_t *v) 178static __always_inline s64 atomic64_inc_return(atomic64_t *v)
149{ 179{
180 kasan_check_write(v, sizeof(*v));
150 return arch_atomic64_inc_return(v); 181 return arch_atomic64_inc_return(v);
151} 182}
152 183
153static __always_inline int atomic_dec_return(atomic_t *v) 184static __always_inline int atomic_dec_return(atomic_t *v)
154{ 185{
186 kasan_check_write(v, sizeof(*v));
155 return arch_atomic_dec_return(v); 187 return arch_atomic_dec_return(v);
156} 188}
157 189
158static __always_inline s64 atomic64_dec_return(atomic64_t *v) 190static __always_inline s64 atomic64_dec_return(atomic64_t *v)
159{ 191{
192 kasan_check_write(v, sizeof(*v));
160 return arch_atomic64_dec_return(v); 193 return arch_atomic64_dec_return(v);
161} 194}
162 195
163static __always_inline s64 atomic64_inc_not_zero(atomic64_t *v) 196static __always_inline s64 atomic64_inc_not_zero(atomic64_t *v)
164{ 197{
198 kasan_check_write(v, sizeof(*v));
165 return arch_atomic64_inc_not_zero(v); 199 return arch_atomic64_inc_not_zero(v);
166} 200}
167 201
168static __always_inline s64 atomic64_dec_if_positive(atomic64_t *v) 202static __always_inline s64 atomic64_dec_if_positive(atomic64_t *v)
169{ 203{
204 kasan_check_write(v, sizeof(*v));
170 return arch_atomic64_dec_if_positive(v); 205 return arch_atomic64_dec_if_positive(v);
171} 206}
172 207
173static __always_inline bool atomic_dec_and_test(atomic_t *v) 208static __always_inline bool atomic_dec_and_test(atomic_t *v)
174{ 209{
210 kasan_check_write(v, sizeof(*v));
175 return arch_atomic_dec_and_test(v); 211 return arch_atomic_dec_and_test(v);
176} 212}
177 213
178static __always_inline bool atomic64_dec_and_test(atomic64_t *v) 214static __always_inline bool atomic64_dec_and_test(atomic64_t *v)
179{ 215{
216 kasan_check_write(v, sizeof(*v));
180 return arch_atomic64_dec_and_test(v); 217 return arch_atomic64_dec_and_test(v);
181} 218}
182 219
183static __always_inline bool atomic_inc_and_test(atomic_t *v) 220static __always_inline bool atomic_inc_and_test(atomic_t *v)
184{ 221{
222 kasan_check_write(v, sizeof(*v));
185 return arch_atomic_inc_and_test(v); 223 return arch_atomic_inc_and_test(v);
186} 224}
187 225
188static __always_inline bool atomic64_inc_and_test(atomic64_t *v) 226static __always_inline bool atomic64_inc_and_test(atomic64_t *v)
189{ 227{
228 kasan_check_write(v, sizeof(*v));
190 return arch_atomic64_inc_and_test(v); 229 return arch_atomic64_inc_and_test(v);
191} 230}
192 231
193static __always_inline int atomic_add_return(int i, atomic_t *v) 232static __always_inline int atomic_add_return(int i, atomic_t *v)
194{ 233{
234 kasan_check_write(v, sizeof(*v));
195 return arch_atomic_add_return(i, v); 235 return arch_atomic_add_return(i, v);
196} 236}
197 237
198static __always_inline s64 atomic64_add_return(s64 i, atomic64_t *v) 238static __always_inline s64 atomic64_add_return(s64 i, atomic64_t *v)
199{ 239{
240 kasan_check_write(v, sizeof(*v));
200 return arch_atomic64_add_return(i, v); 241 return arch_atomic64_add_return(i, v);
201} 242}
202 243
203static __always_inline int atomic_sub_return(int i, atomic_t *v) 244static __always_inline int atomic_sub_return(int i, atomic_t *v)
204{ 245{
246 kasan_check_write(v, sizeof(*v));
205 return arch_atomic_sub_return(i, v); 247 return arch_atomic_sub_return(i, v);
206} 248}
207 249
208static __always_inline s64 atomic64_sub_return(s64 i, atomic64_t *v) 250static __always_inline s64 atomic64_sub_return(s64 i, atomic64_t *v)
209{ 251{
252 kasan_check_write(v, sizeof(*v));
210 return arch_atomic64_sub_return(i, v); 253 return arch_atomic64_sub_return(i, v);
211} 254}
212 255
213static __always_inline int atomic_fetch_add(int i, atomic_t *v) 256static __always_inline int atomic_fetch_add(int i, atomic_t *v)
214{ 257{
258 kasan_check_write(v, sizeof(*v));
215 return arch_atomic_fetch_add(i, v); 259 return arch_atomic_fetch_add(i, v);
216} 260}
217 261
218static __always_inline s64 atomic64_fetch_add(s64 i, atomic64_t *v) 262static __always_inline s64 atomic64_fetch_add(s64 i, atomic64_t *v)
219{ 263{
264 kasan_check_write(v, sizeof(*v));
220 return arch_atomic64_fetch_add(i, v); 265 return arch_atomic64_fetch_add(i, v);
221} 266}
222 267
223static __always_inline int atomic_fetch_sub(int i, atomic_t *v) 268static __always_inline int atomic_fetch_sub(int i, atomic_t *v)
224{ 269{
270 kasan_check_write(v, sizeof(*v));
225 return arch_atomic_fetch_sub(i, v); 271 return arch_atomic_fetch_sub(i, v);
226} 272}
227 273
228static __always_inline s64 atomic64_fetch_sub(s64 i, atomic64_t *v) 274static __always_inline s64 atomic64_fetch_sub(s64 i, atomic64_t *v)
229{ 275{
276 kasan_check_write(v, sizeof(*v));
230 return arch_atomic64_fetch_sub(i, v); 277 return arch_atomic64_fetch_sub(i, v);
231} 278}
232 279
233static __always_inline int atomic_fetch_and(int i, atomic_t *v) 280static __always_inline int atomic_fetch_and(int i, atomic_t *v)
234{ 281{
282 kasan_check_write(v, sizeof(*v));
235 return arch_atomic_fetch_and(i, v); 283 return arch_atomic_fetch_and(i, v);
236} 284}
237 285
238static __always_inline s64 atomic64_fetch_and(s64 i, atomic64_t *v) 286static __always_inline s64 atomic64_fetch_and(s64 i, atomic64_t *v)
239{ 287{
288 kasan_check_write(v, sizeof(*v));
240 return arch_atomic64_fetch_and(i, v); 289 return arch_atomic64_fetch_and(i, v);
241} 290}
242 291
243static __always_inline int atomic_fetch_or(int i, atomic_t *v) 292static __always_inline int atomic_fetch_or(int i, atomic_t *v)
244{ 293{
294 kasan_check_write(v, sizeof(*v));
245 return arch_atomic_fetch_or(i, v); 295 return arch_atomic_fetch_or(i, v);
246} 296}
247 297
248static __always_inline s64 atomic64_fetch_or(s64 i, atomic64_t *v) 298static __always_inline s64 atomic64_fetch_or(s64 i, atomic64_t *v)
249{ 299{
300 kasan_check_write(v, sizeof(*v));
250 return arch_atomic64_fetch_or(i, v); 301 return arch_atomic64_fetch_or(i, v);
251} 302}
252 303
253static __always_inline int atomic_fetch_xor(int i, atomic_t *v) 304static __always_inline int atomic_fetch_xor(int i, atomic_t *v)
254{ 305{
306 kasan_check_write(v, sizeof(*v));
255 return arch_atomic_fetch_xor(i, v); 307 return arch_atomic_fetch_xor(i, v);
256} 308}
257 309
258static __always_inline s64 atomic64_fetch_xor(s64 i, atomic64_t *v) 310static __always_inline s64 atomic64_fetch_xor(s64 i, atomic64_t *v)
259{ 311{
312 kasan_check_write(v, sizeof(*v));
260 return arch_atomic64_fetch_xor(i, v); 313 return arch_atomic64_fetch_xor(i, v);
261} 314}
262 315
263static __always_inline bool atomic_sub_and_test(int i, atomic_t *v) 316static __always_inline bool atomic_sub_and_test(int i, atomic_t *v)
264{ 317{
318 kasan_check_write(v, sizeof(*v));
265 return arch_atomic_sub_and_test(i, v); 319 return arch_atomic_sub_and_test(i, v);
266} 320}
267 321
268static __always_inline bool atomic64_sub_and_test(s64 i, atomic64_t *v) 322static __always_inline bool atomic64_sub_and_test(s64 i, atomic64_t *v)
269{ 323{
324 kasan_check_write(v, sizeof(*v));
270 return arch_atomic64_sub_and_test(i, v); 325 return arch_atomic64_sub_and_test(i, v);
271} 326}
272 327
273static __always_inline bool atomic_add_negative(int i, atomic_t *v) 328static __always_inline bool atomic_add_negative(int i, atomic_t *v)
274{ 329{
330 kasan_check_write(v, sizeof(*v));
275 return arch_atomic_add_negative(i, v); 331 return arch_atomic_add_negative(i, v);
276} 332}
277 333
278static __always_inline bool atomic64_add_negative(s64 i, atomic64_t *v) 334static __always_inline bool atomic64_add_negative(s64 i, atomic64_t *v)
279{ 335{
336 kasan_check_write(v, sizeof(*v));
280 return arch_atomic64_add_negative(i, v); 337 return arch_atomic64_add_negative(i, v);
281} 338}
282 339
283static __always_inline unsigned long 340static __always_inline unsigned long
284cmpxchg_size(volatile void *ptr, unsigned long old, unsigned long new, int size) 341cmpxchg_size(volatile void *ptr, unsigned long old, unsigned long new, int size)
285{ 342{
343 kasan_check_write(ptr, size);
286 switch (size) { 344 switch (size) {
287 case 1: 345 case 1:
288 return arch_cmpxchg((u8 *)ptr, (u8)old, (u8)new); 346 return arch_cmpxchg((u8 *)ptr, (u8)old, (u8)new);
@@ -308,6 +366,7 @@ static __always_inline unsigned long
308sync_cmpxchg_size(volatile void *ptr, unsigned long old, unsigned long new, 366sync_cmpxchg_size(volatile void *ptr, unsigned long old, unsigned long new,
309 int size) 367 int size)
310{ 368{
369 kasan_check_write(ptr, size);
311 switch (size) { 370 switch (size) {
312 case 1: 371 case 1:
313 return arch_sync_cmpxchg((u8 *)ptr, (u8)old, (u8)new); 372 return arch_sync_cmpxchg((u8 *)ptr, (u8)old, (u8)new);
@@ -334,6 +393,7 @@ static __always_inline unsigned long
334cmpxchg_local_size(volatile void *ptr, unsigned long old, unsigned long new, 393cmpxchg_local_size(volatile void *ptr, unsigned long old, unsigned long new,
335 int size) 394 int size)
336{ 395{
396 kasan_check_write(ptr, size);
337 switch (size) { 397 switch (size) {
338 case 1: 398 case 1:
339 return arch_cmpxchg_local((u8 *)ptr, (u8)old, (u8)new); 399 return arch_cmpxchg_local((u8 *)ptr, (u8)old, (u8)new);
@@ -359,6 +419,7 @@ cmpxchg_local_size(volatile void *ptr, unsigned long old, unsigned long new,
359static __always_inline u64 419static __always_inline u64
360cmpxchg64_size(volatile u64 *ptr, u64 old, u64 new) 420cmpxchg64_size(volatile u64 *ptr, u64 old, u64 new)
361{ 421{
422 kasan_check_write(ptr, sizeof(*ptr));
362 return arch_cmpxchg64(ptr, old, new); 423 return arch_cmpxchg64(ptr, old, new);
363} 424}
364 425
@@ -371,6 +432,7 @@ cmpxchg64_size(volatile u64 *ptr, u64 old, u64 new)
371static __always_inline u64 432static __always_inline u64
372cmpxchg64_local_size(volatile u64 *ptr, u64 old, u64 new) 433cmpxchg64_local_size(volatile u64 *ptr, u64 old, u64 new)
373{ 434{
435 kasan_check_write(ptr, sizeof(*ptr));
374 return arch_cmpxchg64_local(ptr, old, new); 436 return arch_cmpxchg64_local(ptr, old, new);
375} 437}
376 438