diff options
author | Mark Rutland <mark.rutland@arm.com> | 2019-04-09 07:13:13 -0400 |
---|---|---|
committer | Will Deacon <will.deacon@arm.com> | 2019-04-16 11:28:00 -0400 |
commit | 131e135f7fd14b1de7a5eb26631076705c18073f (patch) | |
tree | 501c85de2d35307d42bbf74f68add9659cbad26f /arch/arm64/include/asm/barrier.h | |
parent | eea1bb2248691ed65c33daff94a253af44feb103 (diff) |
arm64: instrument smp_{load_acquire,store_release}
Our __smp_store_release() and __smp_load_acquire() macros use inline
assembly, which is opaque to kasan. This means that kasan can't catch
erroneous use of these.
This patch adds kasan instrumentation to both.
Cc: Catalin Marinas <catalin.marinas@arm.com>
Signed-off-by: Mark Rutland <mark.rutland@arm.com>
[will: consistently use *p as argument to sizeof]
Signed-off-by: Will Deacon <will.deacon@arm.com>
Diffstat (limited to 'arch/arm64/include/asm/barrier.h')
-rw-r--r-- | arch/arm64/include/asm/barrier.h | 24 |
1 files changed, 15 insertions, 9 deletions
diff --git a/arch/arm64/include/asm/barrier.h b/arch/arm64/include/asm/barrier.h index f66bb04fdf2d..85b6bedbcc68 100644 --- a/arch/arm64/include/asm/barrier.h +++ b/arch/arm64/include/asm/barrier.h | |||
@@ -20,6 +20,8 @@ | |||
20 | 20 | ||
21 | #ifndef __ASSEMBLY__ | 21 | #ifndef __ASSEMBLY__ |
22 | 22 | ||
23 | #include <linux/kasan-checks.h> | ||
24 | |||
23 | #define __nops(n) ".rept " #n "\nnop\n.endr\n" | 25 | #define __nops(n) ".rept " #n "\nnop\n.endr\n" |
24 | #define nops(n) asm volatile(__nops(n)) | 26 | #define nops(n) asm volatile(__nops(n)) |
25 | 27 | ||
@@ -72,31 +74,33 @@ static inline unsigned long array_index_mask_nospec(unsigned long idx, | |||
72 | 74 | ||
73 | #define __smp_store_release(p, v) \ | 75 | #define __smp_store_release(p, v) \ |
74 | do { \ | 76 | do { \ |
77 | typeof(p) __p = (p); \ | ||
75 | union { typeof(*p) __val; char __c[1]; } __u = \ | 78 | union { typeof(*p) __val; char __c[1]; } __u = \ |
76 | { .__val = (__force typeof(*p)) (v) }; \ | 79 | { .__val = (__force typeof(*p)) (v) }; \ |
77 | compiletime_assert_atomic_type(*p); \ | 80 | compiletime_assert_atomic_type(*p); \ |
81 | kasan_check_write(__p, sizeof(*p)); \ | ||
78 | switch (sizeof(*p)) { \ | 82 | switch (sizeof(*p)) { \ |
79 | case 1: \ | 83 | case 1: \ |
80 | asm volatile ("stlrb %w1, %0" \ | 84 | asm volatile ("stlrb %w1, %0" \ |
81 | : "=Q" (*p) \ | 85 | : "=Q" (*__p) \ |
82 | : "r" (*(__u8 *)__u.__c) \ | 86 | : "r" (*(__u8 *)__u.__c) \ |
83 | : "memory"); \ | 87 | : "memory"); \ |
84 | break; \ | 88 | break; \ |
85 | case 2: \ | 89 | case 2: \ |
86 | asm volatile ("stlrh %w1, %0" \ | 90 | asm volatile ("stlrh %w1, %0" \ |
87 | : "=Q" (*p) \ | 91 | : "=Q" (*__p) \ |
88 | : "r" (*(__u16 *)__u.__c) \ | 92 | : "r" (*(__u16 *)__u.__c) \ |
89 | : "memory"); \ | 93 | : "memory"); \ |
90 | break; \ | 94 | break; \ |
91 | case 4: \ | 95 | case 4: \ |
92 | asm volatile ("stlr %w1, %0" \ | 96 | asm volatile ("stlr %w1, %0" \ |
93 | : "=Q" (*p) \ | 97 | : "=Q" (*__p) \ |
94 | : "r" (*(__u32 *)__u.__c) \ | 98 | : "r" (*(__u32 *)__u.__c) \ |
95 | : "memory"); \ | 99 | : "memory"); \ |
96 | break; \ | 100 | break; \ |
97 | case 8: \ | 101 | case 8: \ |
98 | asm volatile ("stlr %1, %0" \ | 102 | asm volatile ("stlr %1, %0" \ |
99 | : "=Q" (*p) \ | 103 | : "=Q" (*__p) \ |
100 | : "r" (*(__u64 *)__u.__c) \ | 104 | : "r" (*(__u64 *)__u.__c) \ |
101 | : "memory"); \ | 105 | : "memory"); \ |
102 | break; \ | 106 | break; \ |
@@ -106,27 +110,29 @@ do { \ | |||
106 | #define __smp_load_acquire(p) \ | 110 | #define __smp_load_acquire(p) \ |
107 | ({ \ | 111 | ({ \ |
108 | union { typeof(*p) __val; char __c[1]; } __u; \ | 112 | union { typeof(*p) __val; char __c[1]; } __u; \ |
113 | typeof(p) __p = (p); \ | ||
109 | compiletime_assert_atomic_type(*p); \ | 114 | compiletime_assert_atomic_type(*p); \ |
115 | kasan_check_read(__p, sizeof(*p)); \ | ||
110 | switch (sizeof(*p)) { \ | 116 | switch (sizeof(*p)) { \ |
111 | case 1: \ | 117 | case 1: \ |
112 | asm volatile ("ldarb %w0, %1" \ | 118 | asm volatile ("ldarb %w0, %1" \ |
113 | : "=r" (*(__u8 *)__u.__c) \ | 119 | : "=r" (*(__u8 *)__u.__c) \ |
114 | : "Q" (*p) : "memory"); \ | 120 | : "Q" (*__p) : "memory"); \ |
115 | break; \ | 121 | break; \ |
116 | case 2: \ | 122 | case 2: \ |
117 | asm volatile ("ldarh %w0, %1" \ | 123 | asm volatile ("ldarh %w0, %1" \ |
118 | : "=r" (*(__u16 *)__u.__c) \ | 124 | : "=r" (*(__u16 *)__u.__c) \ |
119 | : "Q" (*p) : "memory"); \ | 125 | : "Q" (*__p) : "memory"); \ |
120 | break; \ | 126 | break; \ |
121 | case 4: \ | 127 | case 4: \ |
122 | asm volatile ("ldar %w0, %1" \ | 128 | asm volatile ("ldar %w0, %1" \ |
123 | : "=r" (*(__u32 *)__u.__c) \ | 129 | : "=r" (*(__u32 *)__u.__c) \ |
124 | : "Q" (*p) : "memory"); \ | 130 | : "Q" (*__p) : "memory"); \ |
125 | break; \ | 131 | break; \ |
126 | case 8: \ | 132 | case 8: \ |
127 | asm volatile ("ldar %0, %1" \ | 133 | asm volatile ("ldar %0, %1" \ |
128 | : "=r" (*(__u64 *)__u.__c) \ | 134 | : "=r" (*(__u64 *)__u.__c) \ |
129 | : "Q" (*p) : "memory"); \ | 135 | : "Q" (*__p) : "memory"); \ |
130 | break; \ | 136 | break; \ |
131 | } \ | 137 | } \ |
132 | __u.__val; \ | 138 | __u.__val; \ |