aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorArd Biesheuvel <ard.biesheuvel@linaro.org>2017-07-24 06:28:13 -0400
committerHerbert Xu <herbert@gondor.apana.org.au>2017-08-03 21:27:21 -0400
commite211506979e205e5a00b0a9d321fb3cbb44ee9ea (patch)
tree234859ed43fc55e8f187d155ccdd4efd9da0dd97
parent5092fcf3490811a735ef44bd22d8b5ff1bd63926 (diff)
crypto: arm64/aes-blk - add a non-SIMD fallback for synchronous CTR
To accommodate systems that may disallow use of the NEON in kernel mode in some circumstances, introduce a C fallback for synchronous AES in CTR mode, and use it if may_use_simd() returns false. Signed-off-by: Ard Biesheuvel <ard.biesheuvel@linaro.org> Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
-rw-r--r--arch/arm64/crypto/Kconfig6
-rw-r--r--arch/arm64/crypto/aes-ctr-fallback.h53
-rw-r--r--arch/arm64/crypto/aes-glue.c59
3 files changed, 101 insertions, 17 deletions
diff --git a/arch/arm64/crypto/Kconfig b/arch/arm64/crypto/Kconfig
index ba637765c19a..a068dcbe2518 100644
--- a/arch/arm64/crypto/Kconfig
+++ b/arch/arm64/crypto/Kconfig
@@ -64,15 +64,17 @@ config CRYPTO_AES_ARM64_CE_CCM
64 64
65config CRYPTO_AES_ARM64_CE_BLK 65config CRYPTO_AES_ARM64_CE_BLK
66 tristate "AES in ECB/CBC/CTR/XTS modes using ARMv8 Crypto Extensions" 66 tristate "AES in ECB/CBC/CTR/XTS modes using ARMv8 Crypto Extensions"
67 depends on ARM64 && KERNEL_MODE_NEON 67 depends on KERNEL_MODE_NEON
68 select CRYPTO_BLKCIPHER 68 select CRYPTO_BLKCIPHER
69 select CRYPTO_AES_ARM64_CE 69 select CRYPTO_AES_ARM64_CE
70 select CRYPTO_AES_ARM64
70 select CRYPTO_SIMD 71 select CRYPTO_SIMD
71 72
72config CRYPTO_AES_ARM64_NEON_BLK 73config CRYPTO_AES_ARM64_NEON_BLK
73 tristate "AES in ECB/CBC/CTR/XTS modes using NEON instructions" 74 tristate "AES in ECB/CBC/CTR/XTS modes using NEON instructions"
74 depends on ARM64 && KERNEL_MODE_NEON 75 depends on KERNEL_MODE_NEON
75 select CRYPTO_BLKCIPHER 76 select CRYPTO_BLKCIPHER
77 select CRYPTO_AES_ARM64
76 select CRYPTO_AES 78 select CRYPTO_AES
77 select CRYPTO_SIMD 79 select CRYPTO_SIMD
78 80
diff --git a/arch/arm64/crypto/aes-ctr-fallback.h b/arch/arm64/crypto/aes-ctr-fallback.h
new file mode 100644
index 000000000000..c9285717b6b5
--- /dev/null
+++ b/arch/arm64/crypto/aes-ctr-fallback.h
@@ -0,0 +1,53 @@
1/*
2 * Fallback for sync aes(ctr) in contexts where kernel mode NEON
3 * is not allowed
4 *
5 * Copyright (C) 2017 Linaro Ltd <ard.biesheuvel@linaro.org>
6 *
7 * This program is free software; you can redistribute it and/or modify
8 * it under the terms of the GNU General Public License version 2 as
9 * published by the Free Software Foundation.
10 */
11
12#include <crypto/aes.h>
13#include <crypto/internal/skcipher.h>
14
15asmlinkage void __aes_arm64_encrypt(u32 *rk, u8 *out, const u8 *in, int rounds);
16
17static inline int aes_ctr_encrypt_fallback(struct crypto_aes_ctx *ctx,
18 struct skcipher_request *req)
19{
20 struct skcipher_walk walk;
21 u8 buf[AES_BLOCK_SIZE];
22 int err;
23
24 err = skcipher_walk_virt(&walk, req, true);
25
26 while (walk.nbytes > 0) {
27 u8 *dst = walk.dst.virt.addr;
28 u8 *src = walk.src.virt.addr;
29 int nbytes = walk.nbytes;
30 int tail = 0;
31
32 if (nbytes < walk.total) {
33 nbytes = round_down(nbytes, AES_BLOCK_SIZE);
34 tail = walk.nbytes % AES_BLOCK_SIZE;
35 }
36
37 do {
38 int bsize = min(nbytes, AES_BLOCK_SIZE);
39
40 __aes_arm64_encrypt(ctx->key_enc, buf, walk.iv,
41 6 + ctx->key_length / 4);
42 crypto_xor_cpy(dst, src, buf, bsize);
43 crypto_inc(walk.iv, AES_BLOCK_SIZE);
44
45 dst += AES_BLOCK_SIZE;
46 src += AES_BLOCK_SIZE;
47 nbytes -= AES_BLOCK_SIZE;
48 } while (nbytes > 0);
49
50 err = skcipher_walk_done(&walk, tail);
51 }
52 return err;
53}
diff --git a/arch/arm64/crypto/aes-glue.c b/arch/arm64/crypto/aes-glue.c
index 0da30e3b0e4b..998ba519a026 100644
--- a/arch/arm64/crypto/aes-glue.c
+++ b/arch/arm64/crypto/aes-glue.c
@@ -10,6 +10,7 @@
10 10
11#include <asm/neon.h> 11#include <asm/neon.h>
12#include <asm/hwcap.h> 12#include <asm/hwcap.h>
13#include <asm/simd.h>
13#include <crypto/aes.h> 14#include <crypto/aes.h>
14#include <crypto/internal/hash.h> 15#include <crypto/internal/hash.h>
15#include <crypto/internal/simd.h> 16#include <crypto/internal/simd.h>
@@ -19,6 +20,7 @@
19#include <crypto/xts.h> 20#include <crypto/xts.h>
20 21
21#include "aes-ce-setkey.h" 22#include "aes-ce-setkey.h"
23#include "aes-ctr-fallback.h"
22 24
23#ifdef USE_V8_CRYPTO_EXTENSIONS 25#ifdef USE_V8_CRYPTO_EXTENSIONS
24#define MODE "ce" 26#define MODE "ce"
@@ -249,6 +251,17 @@ static int ctr_encrypt(struct skcipher_request *req)
249 return err; 251 return err;
250} 252}
251 253
254static int ctr_encrypt_sync(struct skcipher_request *req)
255{
256 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
257 struct crypto_aes_ctx *ctx = crypto_skcipher_ctx(tfm);
258
259 if (!may_use_simd())
260 return aes_ctr_encrypt_fallback(ctx, req);
261
262 return ctr_encrypt(req);
263}
264
252static int xts_encrypt(struct skcipher_request *req) 265static int xts_encrypt(struct skcipher_request *req)
253{ 266{
254 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); 267 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
@@ -355,8 +368,8 @@ static struct skcipher_alg aes_algs[] = { {
355 .ivsize = AES_BLOCK_SIZE, 368 .ivsize = AES_BLOCK_SIZE,
356 .chunksize = AES_BLOCK_SIZE, 369 .chunksize = AES_BLOCK_SIZE,
357 .setkey = skcipher_aes_setkey, 370 .setkey = skcipher_aes_setkey,
358 .encrypt = ctr_encrypt, 371 .encrypt = ctr_encrypt_sync,
359 .decrypt = ctr_encrypt, 372 .decrypt = ctr_encrypt_sync,
360}, { 373}, {
361 .base = { 374 .base = {
362 .cra_name = "__xts(aes)", 375 .cra_name = "__xts(aes)",
@@ -458,11 +471,35 @@ static int mac_init(struct shash_desc *desc)
458 return 0; 471 return 0;
459} 472}
460 473
474static void mac_do_update(struct crypto_aes_ctx *ctx, u8 const in[], int blocks,
475 u8 dg[], int enc_before, int enc_after)
476{
477 int rounds = 6 + ctx->key_length / 4;
478
479 if (may_use_simd()) {
480 kernel_neon_begin();
481 aes_mac_update(in, ctx->key_enc, rounds, blocks, dg, enc_before,
482 enc_after);
483 kernel_neon_end();
484 } else {
485 if (enc_before)
486 __aes_arm64_encrypt(ctx->key_enc, dg, dg, rounds);
487
488 while (blocks--) {
489 crypto_xor(dg, in, AES_BLOCK_SIZE);
490 in += AES_BLOCK_SIZE;
491
492 if (blocks || enc_after)
493 __aes_arm64_encrypt(ctx->key_enc, dg, dg,
494 rounds);
495 }
496 }
497}
498
461static int mac_update(struct shash_desc *desc, const u8 *p, unsigned int len) 499static int mac_update(struct shash_desc *desc, const u8 *p, unsigned int len)
462{ 500{
463 struct mac_tfm_ctx *tctx = crypto_shash_ctx(desc->tfm); 501 struct mac_tfm_ctx *tctx = crypto_shash_ctx(desc->tfm);
464 struct mac_desc_ctx *ctx = shash_desc_ctx(desc); 502 struct mac_desc_ctx *ctx = shash_desc_ctx(desc);
465 int rounds = 6 + tctx->key.key_length / 4;
466 503
467 while (len > 0) { 504 while (len > 0) {
468 unsigned int l; 505 unsigned int l;
@@ -474,10 +511,8 @@ static int mac_update(struct shash_desc *desc, const u8 *p, unsigned int len)
474 511
475 len %= AES_BLOCK_SIZE; 512 len %= AES_BLOCK_SIZE;
476 513
477 kernel_neon_begin(); 514 mac_do_update(&tctx->key, p, blocks, ctx->dg,
478 aes_mac_update(p, tctx->key.key_enc, rounds, blocks, 515 (ctx->len != 0), (len != 0));
479 ctx->dg, (ctx->len != 0), (len != 0));
480 kernel_neon_end();
481 516
482 p += blocks * AES_BLOCK_SIZE; 517 p += blocks * AES_BLOCK_SIZE;
483 518
@@ -505,11 +540,8 @@ static int cbcmac_final(struct shash_desc *desc, u8 *out)
505{ 540{
506 struct mac_tfm_ctx *tctx = crypto_shash_ctx(desc->tfm); 541 struct mac_tfm_ctx *tctx = crypto_shash_ctx(desc->tfm);
507 struct mac_desc_ctx *ctx = shash_desc_ctx(desc); 542 struct mac_desc_ctx *ctx = shash_desc_ctx(desc);
508 int rounds = 6 + tctx->key.key_length / 4;
509 543
510 kernel_neon_begin(); 544 mac_do_update(&tctx->key, NULL, 0, ctx->dg, 1, 0);
511 aes_mac_update(NULL, tctx->key.key_enc, rounds, 0, ctx->dg, 1, 0);
512 kernel_neon_end();
513 545
514 memcpy(out, ctx->dg, AES_BLOCK_SIZE); 546 memcpy(out, ctx->dg, AES_BLOCK_SIZE);
515 547
@@ -520,7 +552,6 @@ static int cmac_final(struct shash_desc *desc, u8 *out)
520{ 552{
521 struct mac_tfm_ctx *tctx = crypto_shash_ctx(desc->tfm); 553 struct mac_tfm_ctx *tctx = crypto_shash_ctx(desc->tfm);
522 struct mac_desc_ctx *ctx = shash_desc_ctx(desc); 554 struct mac_desc_ctx *ctx = shash_desc_ctx(desc);
523 int rounds = 6 + tctx->key.key_length / 4;
524 u8 *consts = tctx->consts; 555 u8 *consts = tctx->consts;
525 556
526 if (ctx->len != AES_BLOCK_SIZE) { 557 if (ctx->len != AES_BLOCK_SIZE) {
@@ -528,9 +559,7 @@ static int cmac_final(struct shash_desc *desc, u8 *out)
528 consts += AES_BLOCK_SIZE; 559 consts += AES_BLOCK_SIZE;
529 } 560 }
530 561
531 kernel_neon_begin(); 562 mac_do_update(&tctx->key, consts, 1, ctx->dg, 0, 1);
532 aes_mac_update(consts, tctx->key.key_enc, rounds, 1, ctx->dg, 0, 1);
533 kernel_neon_end();
534 563
535 memcpy(out, ctx->dg, AES_BLOCK_SIZE); 564 memcpy(out, ctx->dg, AES_BLOCK_SIZE);
536 565