aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorArd Biesheuvel <ard.biesheuvel@linaro.org>2018-10-08 07:16:59 -0400
committerHerbert Xu <herbert@gondor.apana.org.au>2018-10-12 02:20:45 -0400
commitcc3cc48972371b3c2e94f16b6ac7bdad7fdfc93c (patch)
treea08b46acc09d09c546ff8e226344ee1b5f65f87e
parent22a8118d329334833cd30f2ceb36d28e8cae8a4f (diff)
crypto: arm64/aes-blk - ensure XTS mask is always loaded
Commit 2e5d2f33d1db ("crypto: arm64/aes-blk - improve XTS mask handling") optimized away some reloads of the XTS mask vector, but failed to take into account that calls into the XTS en/decrypt routines will take a slightly different code path if a single block of input is split across different buffers. So let's ensure that the first load occurs unconditionally, and move the reload to the end so it doesn't occur needlessly. Fixes: 2e5d2f33d1db ("crypto: arm64/aes-blk - improve XTS mask handling") Signed-off-by: Ard Biesheuvel <ard.biesheuvel@linaro.org> Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
-rw-r--r--arch/arm64/crypto/aes-modes.S8
1 files changed, 4 insertions, 4 deletions
diff --git a/arch/arm64/crypto/aes-modes.S b/arch/arm64/crypto/aes-modes.S
index 039738ae23f6..67700045a0e0 100644
--- a/arch/arm64/crypto/aes-modes.S
+++ b/arch/arm64/crypto/aes-modes.S
@@ -359,18 +359,17 @@ AES_ENTRY(aes_xts_encrypt)
359 mov x29, sp 359 mov x29, sp
360 360
361 ld1 {v4.16b}, [x6] 361 ld1 {v4.16b}, [x6]
362 xts_load_mask v8
362 cbz w7, .Lxtsencnotfirst 363 cbz w7, .Lxtsencnotfirst
363 364
364 enc_prepare w3, x5, x8 365 enc_prepare w3, x5, x8
365 encrypt_block v4, w3, x5, x8, w7 /* first tweak */ 366 encrypt_block v4, w3, x5, x8, w7 /* first tweak */
366 enc_switch_key w3, x2, x8 367 enc_switch_key w3, x2, x8
367 xts_load_mask v8
368 b .LxtsencNx 368 b .LxtsencNx
369 369
370.Lxtsencnotfirst: 370.Lxtsencnotfirst:
371 enc_prepare w3, x2, x8 371 enc_prepare w3, x2, x8
372.LxtsencloopNx: 372.LxtsencloopNx:
373 xts_reload_mask v8
374 next_tweak v4, v4, v8 373 next_tweak v4, v4, v8
375.LxtsencNx: 374.LxtsencNx:
376 subs w4, w4, #4 375 subs w4, w4, #4
@@ -391,6 +390,7 @@ AES_ENTRY(aes_xts_encrypt)
391 st1 {v0.16b-v3.16b}, [x0], #64 390 st1 {v0.16b-v3.16b}, [x0], #64
392 mov v4.16b, v7.16b 391 mov v4.16b, v7.16b
393 cbz w4, .Lxtsencout 392 cbz w4, .Lxtsencout
393 xts_reload_mask v8
394 b .LxtsencloopNx 394 b .LxtsencloopNx
395.Lxtsenc1x: 395.Lxtsenc1x:
396 adds w4, w4, #4 396 adds w4, w4, #4
@@ -417,18 +417,17 @@ AES_ENTRY(aes_xts_decrypt)
417 mov x29, sp 417 mov x29, sp
418 418
419 ld1 {v4.16b}, [x6] 419 ld1 {v4.16b}, [x6]
420 xts_load_mask v8
420 cbz w7, .Lxtsdecnotfirst 421 cbz w7, .Lxtsdecnotfirst
421 422
422 enc_prepare w3, x5, x8 423 enc_prepare w3, x5, x8
423 encrypt_block v4, w3, x5, x8, w7 /* first tweak */ 424 encrypt_block v4, w3, x5, x8, w7 /* first tweak */
424 dec_prepare w3, x2, x8 425 dec_prepare w3, x2, x8
425 xts_load_mask v8
426 b .LxtsdecNx 426 b .LxtsdecNx
427 427
428.Lxtsdecnotfirst: 428.Lxtsdecnotfirst:
429 dec_prepare w3, x2, x8 429 dec_prepare w3, x2, x8
430.LxtsdecloopNx: 430.LxtsdecloopNx:
431 xts_reload_mask v8
432 next_tweak v4, v4, v8 431 next_tweak v4, v4, v8
433.LxtsdecNx: 432.LxtsdecNx:
434 subs w4, w4, #4 433 subs w4, w4, #4
@@ -449,6 +448,7 @@ AES_ENTRY(aes_xts_decrypt)
449 st1 {v0.16b-v3.16b}, [x0], #64 448 st1 {v0.16b-v3.16b}, [x0], #64
450 mov v4.16b, v7.16b 449 mov v4.16b, v7.16b
451 cbz w4, .Lxtsdecout 450 cbz w4, .Lxtsdecout
451 xts_reload_mask v8
452 b .LxtsdecloopNx 452 b .LxtsdecloopNx
453.Lxtsdec1x: 453.Lxtsdec1x:
454 adds w4, w4, #4 454 adds w4, w4, #4