aboutsummaryrefslogtreecommitdiffstats
path: root/arch/sparc/include
diff options
context:
space:
mode:
authorLinus Torvalds <torvalds@linux-foundation.org>2016-08-08 17:48:14 -0400
committerLinus Torvalds <torvalds@linux-foundation.org>2016-08-08 17:48:14 -0400
commit1eccfa090eaea22558570054bbdc147817e1df5e (patch)
treea0adfdb87319abef88f575ee34314649193b7e92 /arch/sparc/include
parent1bd4403d86a1c06cb6cc9ac87664a0c9d3413d51 (diff)
parented18adc1cdd00a5c55a20fbdaed4804660772281 (diff)
Merge tag 'usercopy-v4.8' of git://git.kernel.org/pub/scm/linux/kernel/git/kees/linux
Pull usercopy protection from Kees Cook: "Tbhis implements HARDENED_USERCOPY verification of copy_to_user and copy_from_user bounds checking for most architectures on SLAB and SLUB" * tag 'usercopy-v4.8' of git://git.kernel.org/pub/scm/linux/kernel/git/kees/linux: mm: SLUB hardened usercopy support mm: SLAB hardened usercopy support s390/uaccess: Enable hardened usercopy sparc/uaccess: Enable hardened usercopy powerpc/uaccess: Enable hardened usercopy ia64/uaccess: Enable hardened usercopy arm64/uaccess: Enable hardened usercopy ARM: uaccess: Enable hardened usercopy x86/uaccess: Enable hardened usercopy mm: Hardened usercopy mm: Implement stack frame object validation mm: Add is_migrate_cma_page
Diffstat (limited to 'arch/sparc/include')
-rw-r--r--arch/sparc/include/asm/uaccess_32.h14
-rw-r--r--arch/sparc/include/asm/uaccess_64.h11
2 files changed, 19 insertions, 6 deletions
diff --git a/arch/sparc/include/asm/uaccess_32.h b/arch/sparc/include/asm/uaccess_32.h
index 57aca2792d29..341a5a133f48 100644
--- a/arch/sparc/include/asm/uaccess_32.h
+++ b/arch/sparc/include/asm/uaccess_32.h
@@ -248,22 +248,28 @@ unsigned long __copy_user(void __user *to, const void __user *from, unsigned lon
248 248
249static inline unsigned long copy_to_user(void __user *to, const void *from, unsigned long n) 249static inline unsigned long copy_to_user(void __user *to, const void *from, unsigned long n)
250{ 250{
251 if (n && __access_ok((unsigned long) to, n)) 251 if (n && __access_ok((unsigned long) to, n)) {
252 if (!__builtin_constant_p(n))
253 check_object_size(from, n, true);
252 return __copy_user(to, (__force void __user *) from, n); 254 return __copy_user(to, (__force void __user *) from, n);
253 else 255 } else
254 return n; 256 return n;
255} 257}
256 258
257static inline unsigned long __copy_to_user(void __user *to, const void *from, unsigned long n) 259static inline unsigned long __copy_to_user(void __user *to, const void *from, unsigned long n)
258{ 260{
261 if (!__builtin_constant_p(n))
262 check_object_size(from, n, true);
259 return __copy_user(to, (__force void __user *) from, n); 263 return __copy_user(to, (__force void __user *) from, n);
260} 264}
261 265
262static inline unsigned long copy_from_user(void *to, const void __user *from, unsigned long n) 266static inline unsigned long copy_from_user(void *to, const void __user *from, unsigned long n)
263{ 267{
264 if (n && __access_ok((unsigned long) from, n)) 268 if (n && __access_ok((unsigned long) from, n)) {
269 if (!__builtin_constant_p(n))
270 check_object_size(to, n, false);
265 return __copy_user((__force void __user *) to, from, n); 271 return __copy_user((__force void __user *) to, from, n);
266 else 272 } else
267 return n; 273 return n;
268} 274}
269 275
diff --git a/arch/sparc/include/asm/uaccess_64.h b/arch/sparc/include/asm/uaccess_64.h
index e9a51d64974d..8bda94fab8e8 100644
--- a/arch/sparc/include/asm/uaccess_64.h
+++ b/arch/sparc/include/asm/uaccess_64.h
@@ -210,8 +210,12 @@ unsigned long copy_from_user_fixup(void *to, const void __user *from,
210static inline unsigned long __must_check 210static inline unsigned long __must_check
211copy_from_user(void *to, const void __user *from, unsigned long size) 211copy_from_user(void *to, const void __user *from, unsigned long size)
212{ 212{
213 unsigned long ret = ___copy_from_user(to, from, size); 213 unsigned long ret;
214 214
215 if (!__builtin_constant_p(size))
216 check_object_size(to, size, false);
217
218 ret = ___copy_from_user(to, from, size);
215 if (unlikely(ret)) 219 if (unlikely(ret))
216 ret = copy_from_user_fixup(to, from, size); 220 ret = copy_from_user_fixup(to, from, size);
217 221
@@ -227,8 +231,11 @@ unsigned long copy_to_user_fixup(void __user *to, const void *from,
227static inline unsigned long __must_check 231static inline unsigned long __must_check
228copy_to_user(void __user *to, const void *from, unsigned long size) 232copy_to_user(void __user *to, const void *from, unsigned long size)
229{ 233{
230 unsigned long ret = ___copy_to_user(to, from, size); 234 unsigned long ret;
231 235
236 if (!__builtin_constant_p(size))
237 check_object_size(from, size, true);
238 ret = ___copy_to_user(to, from, size);
232 if (unlikely(ret)) 239 if (unlikely(ret))
233 ret = copy_to_user_fixup(to, from, size); 240 ret = copy_to_user_fixup(to, from, size);
234 return ret; 241 return ret;