aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorWill Deacon <will.deacon@arm.com>2016-07-04 11:59:43 -0400
committerWill Deacon <will.deacon@arm.com>2016-10-19 10:37:23 -0400
commit1c5b51dfb7b4564008e0cadec5381a69e88b0d21 (patch)
treeccad07a3c552411526092333e165cca761df701c
parent91cb163e4d141c74e99639fbee7c2a6332c92901 (diff)
arm64: swp emulation: bound LL/SC retries before rescheduling
If a CPU does not implement a global monitor for certain memory types, then userspace can attempt a kernel DoS by issuing SWP instructions targetting the problematic memory (for example, a framebuffer mapped with non-cacheable attributes). The SWP emulation code protects against these sorts of attacks by checking for pending signals and potentially rescheduling when the STXR instruction fails during the emulation. Whilst this is good for avoiding livelock, it harms emulation of legitimate SWP instructions on CPUs where forward progress is not guaranteed if there are memory accesses to the same reservation granule (up to 2k) between the failing STXR and the retry of the LDXR. This patch solves the problem by retrying the STXR a bounded number of times (4) before breaking out of the LL/SC loop and looking for something else to do. Cc: <stable@vger.kernel.org> Fixes: bd35a4adc413 ("arm64: Port SWP/SWPB emulation support from arm") Reviewed-by: Mark Rutland <mark.rutland@arm.com> Signed-off-by: Will Deacon <will.deacon@arm.com>
-rw-r--r--arch/arm64/kernel/armv8_deprecated.c36
1 files changed, 22 insertions, 14 deletions
diff --git a/arch/arm64/kernel/armv8_deprecated.c b/arch/arm64/kernel/armv8_deprecated.c
index 42ffdb54e162..b0988bb1bf64 100644
--- a/arch/arm64/kernel/armv8_deprecated.c
+++ b/arch/arm64/kernel/armv8_deprecated.c
@@ -280,35 +280,43 @@ static void __init register_insn_emulation_sysctl(struct ctl_table *table)
280/* 280/*
281 * Error-checking SWP macros implemented using ldxr{b}/stxr{b} 281 * Error-checking SWP macros implemented using ldxr{b}/stxr{b}
282 */ 282 */
283#define __user_swpX_asm(data, addr, res, temp, B) \ 283
284/* Arbitrary constant to ensure forward-progress of the LL/SC loop */
285#define __SWP_LL_SC_LOOPS 4
286
287#define __user_swpX_asm(data, addr, res, temp, temp2, B) \
284 __asm__ __volatile__( \ 288 __asm__ __volatile__( \
289 " mov %w3, %w7\n" \
285 ALTERNATIVE("nop", SET_PSTATE_PAN(0), ARM64_HAS_PAN, \ 290 ALTERNATIVE("nop", SET_PSTATE_PAN(0), ARM64_HAS_PAN, \
286 CONFIG_ARM64_PAN) \ 291 CONFIG_ARM64_PAN) \
287 "0: ldxr"B" %w2, [%3]\n" \ 292 "0: ldxr"B" %w2, [%4]\n" \
288 "1: stxr"B" %w0, %w1, [%3]\n" \ 293 "1: stxr"B" %w0, %w1, [%4]\n" \
289 " cbz %w0, 2f\n" \ 294 " cbz %w0, 2f\n" \
290 " mov %w0, %w4\n" \ 295 " sub %w3, %w3, #1\n" \
296 " cbnz %w3, 0b\n" \
297 " mov %w0, %w5\n" \
291 " b 3f\n" \ 298 " b 3f\n" \
292 "2:\n" \ 299 "2:\n" \
293 " mov %w1, %w2\n" \ 300 " mov %w1, %w2\n" \
294 "3:\n" \ 301 "3:\n" \
295 " .pushsection .fixup,\"ax\"\n" \ 302 " .pushsection .fixup,\"ax\"\n" \
296 " .align 2\n" \ 303 " .align 2\n" \
297 "4: mov %w0, %w5\n" \ 304 "4: mov %w0, %w6\n" \
298 " b 3b\n" \ 305 " b 3b\n" \
299 " .popsection" \ 306 " .popsection" \
300 _ASM_EXTABLE(0b, 4b) \ 307 _ASM_EXTABLE(0b, 4b) \
301 _ASM_EXTABLE(1b, 4b) \ 308 _ASM_EXTABLE(1b, 4b) \
302 ALTERNATIVE("nop", SET_PSTATE_PAN(1), ARM64_HAS_PAN, \ 309 ALTERNATIVE("nop", SET_PSTATE_PAN(1), ARM64_HAS_PAN, \
303 CONFIG_ARM64_PAN) \ 310 CONFIG_ARM64_PAN) \
304 : "=&r" (res), "+r" (data), "=&r" (temp) \ 311 : "=&r" (res), "+r" (data), "=&r" (temp), "=&r" (temp2) \
305 : "r" (addr), "i" (-EAGAIN), "i" (-EFAULT) \ 312 : "r" (addr), "i" (-EAGAIN), "i" (-EFAULT), \
313 "i" (__SWP_LL_SC_LOOPS) \
306 : "memory") 314 : "memory")
307 315
308#define __user_swp_asm(data, addr, res, temp) \ 316#define __user_swp_asm(data, addr, res, temp, temp2) \
309 __user_swpX_asm(data, addr, res, temp, "") 317 __user_swpX_asm(data, addr, res, temp, temp2, "")
310#define __user_swpb_asm(data, addr, res, temp) \ 318#define __user_swpb_asm(data, addr, res, temp, temp2) \
311 __user_swpX_asm(data, addr, res, temp, "b") 319 __user_swpX_asm(data, addr, res, temp, temp2, "b")
312 320
313/* 321/*
314 * Bit 22 of the instruction encoding distinguishes between 322 * Bit 22 of the instruction encoding distinguishes between
@@ -328,12 +336,12 @@ static int emulate_swpX(unsigned int address, unsigned int *data,
328 } 336 }
329 337
330 while (1) { 338 while (1) {
331 unsigned long temp; 339 unsigned long temp, temp2;
332 340
333 if (type == TYPE_SWPB) 341 if (type == TYPE_SWPB)
334 __user_swpb_asm(*data, address, res, temp); 342 __user_swpb_asm(*data, address, res, temp, temp2);
335 else 343 else
336 __user_swp_asm(*data, address, res, temp); 344 __user_swp_asm(*data, address, res, temp, temp2);
337 345
338 if (likely(res != -EAGAIN) || signal_pending(current)) 346 if (likely(res != -EAGAIN) || signal_pending(current))
339 break; 347 break;