aboutsummaryrefslogtreecommitdiffstats
path: root/arch/arm
diff options
context:
space:
mode:
authorArd Biesheuvel <ard.biesheuvel@linaro.org>2019-09-03 12:43:36 -0400
committerHerbert Xu <herbert@gondor.apana.org.au>2019-09-09 03:35:39 -0400
commit2ed8b79098cf76287c519d781a14c7983ab7e4f7 (patch)
treebb726fc02c7a6dded7b90d77cb9f9aae7a438bbc /arch/arm
parentc61b1607ed4fbbf2ba7c86f29768cff44a1a88f8 (diff)
crypto: arm/aes-neonbs - implement ciphertext stealing for XTS
Update the AES-XTS implementation based on NEON instructions so that it can deal with inputs whose size is not a multiple of the cipher block size. This is part of the original XTS specification, but was never implemented before in the Linux kernel. Signed-off-by: Ard Biesheuvel <ard.biesheuvel@linaro.org> Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
Diffstat (limited to 'arch/arm')
-rw-r--r--arch/arm/crypto/aes-neonbs-core.S16
-rw-r--r--arch/arm/crypto/aes-neonbs-glue.c69
2 files changed, 72 insertions, 13 deletions
diff --git a/arch/arm/crypto/aes-neonbs-core.S b/arch/arm/crypto/aes-neonbs-core.S
index bb75918e4984..cfaed4e67535 100644
--- a/arch/arm/crypto/aes-neonbs-core.S
+++ b/arch/arm/crypto/aes-neonbs-core.S
@@ -889,9 +889,9 @@ ENDPROC(aesbs_ctr_encrypt)
889 889
890 /* 890 /*
891 * aesbs_xts_encrypt(u8 out[], u8 const in[], u8 const rk[], int rounds, 891 * aesbs_xts_encrypt(u8 out[], u8 const in[], u8 const rk[], int rounds,
892 * int blocks, u8 iv[]) 892 * int blocks, u8 iv[], int reorder_last_tweak)
893 * aesbs_xts_decrypt(u8 out[], u8 const in[], u8 const rk[], int rounds, 893 * aesbs_xts_decrypt(u8 out[], u8 const in[], u8 const rk[], int rounds,
894 * int blocks, u8 iv[]) 894 * int blocks, u8 iv[], int reorder_last_tweak)
895 */ 895 */
896__xts_prepare8: 896__xts_prepare8:
897 vld1.8 {q14}, [r7] // load iv 897 vld1.8 {q14}, [r7] // load iv
@@ -944,17 +944,25 @@ __xts_prepare8:
944 944
945 vld1.8 {q7}, [r1]! 945 vld1.8 {q7}, [r1]!
946 next_tweak q14, q12, q15, q13 946 next_tweak q14, q12, q15, q13
947 veor q7, q7, q12 947THUMB( itt le )
948 W(cmple) r8, #0
949 ble 1f
9500: veor q7, q7, q12
948 vst1.8 {q12}, [r4, :128] 951 vst1.8 {q12}, [r4, :128]
949 952
9500: vst1.8 {q14}, [r7] // store next iv 953 vst1.8 {q14}, [r7] // store next iv
951 bx lr 954 bx lr
955
9561: vswp q12, q14
957 b 0b
952ENDPROC(__xts_prepare8) 958ENDPROC(__xts_prepare8)
953 959
954 .macro __xts_crypt, do8, o0, o1, o2, o3, o4, o5, o6, o7 960 .macro __xts_crypt, do8, o0, o1, o2, o3, o4, o5, o6, o7
955 push {r4-r8, lr} 961 push {r4-r8, lr}
956 mov r5, sp // preserve sp 962 mov r5, sp // preserve sp
957 ldrd r6, r7, [sp, #24] // get blocks and iv args 963 ldrd r6, r7, [sp, #24] // get blocks and iv args
964 ldr r8, [sp, #32] // reorder final tweak?
965 rsb r8, r8, #1
958 sub ip, sp, #128 // make room for 8x tweak 966 sub ip, sp, #128 // make room for 8x tweak
959 bic ip, ip, #0xf // align sp to 16 bytes 967 bic ip, ip, #0xf // align sp to 16 bytes
960 mov sp, ip 968 mov sp, ip
diff --git a/arch/arm/crypto/aes-neonbs-glue.c b/arch/arm/crypto/aes-neonbs-glue.c
index 9000d0796d5e..e85839a8aaeb 100644
--- a/arch/arm/crypto/aes-neonbs-glue.c
+++ b/arch/arm/crypto/aes-neonbs-glue.c
@@ -12,6 +12,7 @@
12#include <crypto/ctr.h> 12#include <crypto/ctr.h>
13#include <crypto/internal/simd.h> 13#include <crypto/internal/simd.h>
14#include <crypto/internal/skcipher.h> 14#include <crypto/internal/skcipher.h>
15#include <crypto/scatterwalk.h>
15#include <crypto/xts.h> 16#include <crypto/xts.h>
16#include <linux/module.h> 17#include <linux/module.h>
17 18
@@ -37,9 +38,9 @@ asmlinkage void aesbs_ctr_encrypt(u8 out[], u8 const in[], u8 const rk[],
37 int rounds, int blocks, u8 ctr[], u8 final[]); 38 int rounds, int blocks, u8 ctr[], u8 final[]);
38 39
39asmlinkage void aesbs_xts_encrypt(u8 out[], u8 const in[], u8 const rk[], 40asmlinkage void aesbs_xts_encrypt(u8 out[], u8 const in[], u8 const rk[],
40 int rounds, int blocks, u8 iv[]); 41 int rounds, int blocks, u8 iv[], int);
41asmlinkage void aesbs_xts_decrypt(u8 out[], u8 const in[], u8 const rk[], 42asmlinkage void aesbs_xts_decrypt(u8 out[], u8 const in[], u8 const rk[],
42 int rounds, int blocks, u8 iv[]); 43 int rounds, int blocks, u8 iv[], int);
43 44
44struct aesbs_ctx { 45struct aesbs_ctx {
45 int rounds; 46 int rounds;
@@ -53,6 +54,7 @@ struct aesbs_cbc_ctx {
53 54
54struct aesbs_xts_ctx { 55struct aesbs_xts_ctx {
55 struct aesbs_ctx key; 56 struct aesbs_ctx key;
57 struct crypto_cipher *cts_tfm;
56 struct crypto_cipher *tweak_tfm; 58 struct crypto_cipher *tweak_tfm;
57}; 59};
58 60
@@ -291,6 +293,9 @@ static int aesbs_xts_setkey(struct crypto_skcipher *tfm, const u8 *in_key,
291 return err; 293 return err;
292 294
293 key_len /= 2; 295 key_len /= 2;
296 err = crypto_cipher_setkey(ctx->cts_tfm, in_key, key_len);
297 if (err)
298 return err;
294 err = crypto_cipher_setkey(ctx->tweak_tfm, in_key + key_len, key_len); 299 err = crypto_cipher_setkey(ctx->tweak_tfm, in_key + key_len, key_len);
295 if (err) 300 if (err)
296 return err; 301 return err;
@@ -302,7 +307,13 @@ static int xts_init(struct crypto_tfm *tfm)
302{ 307{
303 struct aesbs_xts_ctx *ctx = crypto_tfm_ctx(tfm); 308 struct aesbs_xts_ctx *ctx = crypto_tfm_ctx(tfm);
304 309
310 ctx->cts_tfm = crypto_alloc_cipher("aes", 0, 0);
311 if (IS_ERR(ctx->cts_tfm))
312 return PTR_ERR(ctx->cts_tfm);
313
305 ctx->tweak_tfm = crypto_alloc_cipher("aes", 0, 0); 314 ctx->tweak_tfm = crypto_alloc_cipher("aes", 0, 0);
315 if (IS_ERR(ctx->tweak_tfm))
316 crypto_free_cipher(ctx->cts_tfm);
306 317
307 return PTR_ERR_OR_ZERO(ctx->tweak_tfm); 318 return PTR_ERR_OR_ZERO(ctx->tweak_tfm);
308} 319}
@@ -312,17 +323,34 @@ static void xts_exit(struct crypto_tfm *tfm)
312 struct aesbs_xts_ctx *ctx = crypto_tfm_ctx(tfm); 323 struct aesbs_xts_ctx *ctx = crypto_tfm_ctx(tfm);
313 324
314 crypto_free_cipher(ctx->tweak_tfm); 325 crypto_free_cipher(ctx->tweak_tfm);
326 crypto_free_cipher(ctx->cts_tfm);
315} 327}
316 328
317static int __xts_crypt(struct skcipher_request *req, 329static int __xts_crypt(struct skcipher_request *req, bool encrypt,
318 void (*fn)(u8 out[], u8 const in[], u8 const rk[], 330 void (*fn)(u8 out[], u8 const in[], u8 const rk[],
319 int rounds, int blocks, u8 iv[])) 331 int rounds, int blocks, u8 iv[], int))
320{ 332{
321 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); 333 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
322 struct aesbs_xts_ctx *ctx = crypto_skcipher_ctx(tfm); 334 struct aesbs_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
335 int tail = req->cryptlen % AES_BLOCK_SIZE;
336 struct skcipher_request subreq;
337 u8 buf[2 * AES_BLOCK_SIZE];
323 struct skcipher_walk walk; 338 struct skcipher_walk walk;
324 int err; 339 int err;
325 340
341 if (req->cryptlen < AES_BLOCK_SIZE)
342 return -EINVAL;
343
344 if (unlikely(tail)) {
345 skcipher_request_set_tfm(&subreq, tfm);
346 skcipher_request_set_callback(&subreq,
347 skcipher_request_flags(req),
348 NULL, NULL);
349 skcipher_request_set_crypt(&subreq, req->src, req->dst,
350 req->cryptlen - tail, req->iv);
351 req = &subreq;
352 }
353
326 err = skcipher_walk_virt(&walk, req, true); 354 err = skcipher_walk_virt(&walk, req, true);
327 if (err) 355 if (err)
328 return err; 356 return err;
@@ -331,30 +359,53 @@ static int __xts_crypt(struct skcipher_request *req,
331 359
332 while (walk.nbytes >= AES_BLOCK_SIZE) { 360 while (walk.nbytes >= AES_BLOCK_SIZE) {
333 unsigned int blocks = walk.nbytes / AES_BLOCK_SIZE; 361 unsigned int blocks = walk.nbytes / AES_BLOCK_SIZE;
362 int reorder_last_tweak = !encrypt && tail > 0;
334 363
335 if (walk.nbytes < walk.total) 364 if (walk.nbytes < walk.total) {
336 blocks = round_down(blocks, 365 blocks = round_down(blocks,
337 walk.stride / AES_BLOCK_SIZE); 366 walk.stride / AES_BLOCK_SIZE);
367 reorder_last_tweak = 0;
368 }
338 369
339 kernel_neon_begin(); 370 kernel_neon_begin();
340 fn(walk.dst.virt.addr, walk.src.virt.addr, ctx->key.rk, 371 fn(walk.dst.virt.addr, walk.src.virt.addr, ctx->key.rk,
341 ctx->key.rounds, blocks, walk.iv); 372 ctx->key.rounds, blocks, walk.iv, reorder_last_tweak);
342 kernel_neon_end(); 373 kernel_neon_end();
343 err = skcipher_walk_done(&walk, 374 err = skcipher_walk_done(&walk,
344 walk.nbytes - blocks * AES_BLOCK_SIZE); 375 walk.nbytes - blocks * AES_BLOCK_SIZE);
345 } 376 }
346 377
347 return err; 378 if (err || likely(!tail))
379 return err;
380
381 /* handle ciphertext stealing */
382 scatterwalk_map_and_copy(buf, req->dst, req->cryptlen - AES_BLOCK_SIZE,
383 AES_BLOCK_SIZE, 0);
384 memcpy(buf + AES_BLOCK_SIZE, buf, tail);
385 scatterwalk_map_and_copy(buf, req->src, req->cryptlen, tail, 0);
386
387 crypto_xor(buf, req->iv, AES_BLOCK_SIZE);
388
389 if (encrypt)
390 crypto_cipher_encrypt_one(ctx->cts_tfm, buf, buf);
391 else
392 crypto_cipher_decrypt_one(ctx->cts_tfm, buf, buf);
393
394 crypto_xor(buf, req->iv, AES_BLOCK_SIZE);
395
396 scatterwalk_map_and_copy(buf, req->dst, req->cryptlen - AES_BLOCK_SIZE,
397 AES_BLOCK_SIZE + tail, 1);
398 return 0;
348} 399}
349 400
350static int xts_encrypt(struct skcipher_request *req) 401static int xts_encrypt(struct skcipher_request *req)
351{ 402{
352 return __xts_crypt(req, aesbs_xts_encrypt); 403 return __xts_crypt(req, true, aesbs_xts_encrypt);
353} 404}
354 405
355static int xts_decrypt(struct skcipher_request *req) 406static int xts_decrypt(struct skcipher_request *req)
356{ 407{
357 return __xts_crypt(req, aesbs_xts_decrypt); 408 return __xts_crypt(req, false, aesbs_xts_decrypt);
358} 409}
359 410
360static struct skcipher_alg aes_algs[] = { { 411static struct skcipher_alg aes_algs[] = { {