diff options
author | Xishi Qiu <qiuxishi@huawei.com> | 2015-11-05 21:51:21 -0500 |
---|---|---|
committer | Linus Torvalds <torvalds@linux-foundation.org> | 2015-11-05 22:34:48 -0500 |
commit | 10f702627e139e21465f4c9d44f63527bbca163c (patch) | |
tree | 706cd924235c129fcf30bf766c038025fd227525 /mm | |
parent | e0d57714394f5e2ce4e2f9bbebf48e3c7a7fd3be (diff) |
kasan: use IS_ALIGNED in memory_is_poisoned_8()
Use IS_ALIGNED() to determine whether the shadow span two bytes. It
generates less code and more readable. Also add some comments in shadow
check functions.
Signed-off-by: Xishi Qiu <qiuxishi@huawei.com>
Acked-by: Andrey Ryabinin <aryabinin@virtuozzo.com>
Cc: Andrey Konovalov <adech.fo@gmail.com>
Signed-off-by: Andrew Morton <akpm@linux-foundation.org>
Signed-off-by: Linus Torvalds <torvalds@linux-foundation.org>
Diffstat (limited to 'mm')
-rw-r--r-- | mm/kasan/kasan.c | 24 |
1 files changed, 22 insertions, 2 deletions
diff --git a/mm/kasan/kasan.c b/mm/kasan/kasan.c index 2b21ccd55cd4..d41b21bce6a0 100644 --- a/mm/kasan/kasan.c +++ b/mm/kasan/kasan.c | |||
@@ -86,6 +86,11 @@ static __always_inline bool memory_is_poisoned_2(unsigned long addr) | |||
86 | if (memory_is_poisoned_1(addr + 1)) | 86 | if (memory_is_poisoned_1(addr + 1)) |
87 | return true; | 87 | return true; |
88 | 88 | ||
89 | /* | ||
90 | * If single shadow byte covers 2-byte access, we don't | ||
91 | * need to do anything more. Otherwise, test the first | ||
92 | * shadow byte. | ||
93 | */ | ||
89 | if (likely(((addr + 1) & KASAN_SHADOW_MASK) != 0)) | 94 | if (likely(((addr + 1) & KASAN_SHADOW_MASK) != 0)) |
90 | return false; | 95 | return false; |
91 | 96 | ||
@@ -103,6 +108,11 @@ static __always_inline bool memory_is_poisoned_4(unsigned long addr) | |||
103 | if (memory_is_poisoned_1(addr + 3)) | 108 | if (memory_is_poisoned_1(addr + 3)) |
104 | return true; | 109 | return true; |
105 | 110 | ||
111 | /* | ||
112 | * If single shadow byte covers 4-byte access, we don't | ||
113 | * need to do anything more. Otherwise, test the first | ||
114 | * shadow byte. | ||
115 | */ | ||
106 | if (likely(((addr + 3) & KASAN_SHADOW_MASK) >= 3)) | 116 | if (likely(((addr + 3) & KASAN_SHADOW_MASK) >= 3)) |
107 | return false; | 117 | return false; |
108 | 118 | ||
@@ -120,7 +130,12 @@ static __always_inline bool memory_is_poisoned_8(unsigned long addr) | |||
120 | if (memory_is_poisoned_1(addr + 7)) | 130 | if (memory_is_poisoned_1(addr + 7)) |
121 | return true; | 131 | return true; |
122 | 132 | ||
123 | if (likely(((addr + 7) & KASAN_SHADOW_MASK) >= 7)) | 133 | /* |
134 | * If single shadow byte covers 8-byte access, we don't | ||
135 | * need to do anything more. Otherwise, test the first | ||
136 | * shadow byte. | ||
137 | */ | ||
138 | if (likely(IS_ALIGNED(addr, KASAN_SHADOW_SCALE_SIZE))) | ||
124 | return false; | 139 | return false; |
125 | 140 | ||
126 | return unlikely(*(u8 *)shadow_addr); | 141 | return unlikely(*(u8 *)shadow_addr); |
@@ -139,7 +154,12 @@ static __always_inline bool memory_is_poisoned_16(unsigned long addr) | |||
139 | if (unlikely(shadow_first_bytes)) | 154 | if (unlikely(shadow_first_bytes)) |
140 | return true; | 155 | return true; |
141 | 156 | ||
142 | if (likely(IS_ALIGNED(addr, 8))) | 157 | /* |
158 | * If two shadow bytes covers 16-byte access, we don't | ||
159 | * need to do anything more. Otherwise, test the last | ||
160 | * shadow byte. | ||
161 | */ | ||
162 | if (likely(IS_ALIGNED(addr, KASAN_SHADOW_SCALE_SIZE))) | ||
143 | return false; | 163 | return false; |
144 | 164 | ||
145 | return memory_is_poisoned_1(addr + 15); | 165 | return memory_is_poisoned_1(addr + 15); |