aboutsummaryrefslogtreecommitdiffstats
path: root/arch/x86/crypto/aesni-intel_glue.c
diff options
context:
space:
mode:
Diffstat (limited to 'arch/x86/crypto/aesni-intel_glue.c')
-rw-r--r--arch/x86/crypto/aesni-intel_glue.c130
1 files changed, 123 insertions, 7 deletions
diff --git a/arch/x86/crypto/aesni-intel_glue.c b/arch/x86/crypto/aesni-intel_glue.c
index 49c552c060e9..2cb3dcc4490a 100644
--- a/arch/x86/crypto/aesni-intel_glue.c
+++ b/arch/x86/crypto/aesni-intel_glue.c
@@ -18,6 +18,7 @@
18#include <crypto/algapi.h> 18#include <crypto/algapi.h>
19#include <crypto/aes.h> 19#include <crypto/aes.h>
20#include <crypto/cryptd.h> 20#include <crypto/cryptd.h>
21#include <crypto/ctr.h>
21#include <asm/i387.h> 22#include <asm/i387.h>
22#include <asm/aes.h> 23#include <asm/aes.h>
23 24
@@ -58,6 +59,8 @@ asmlinkage void aesni_cbc_enc(struct crypto_aes_ctx *ctx, u8 *out,
58 const u8 *in, unsigned int len, u8 *iv); 59 const u8 *in, unsigned int len, u8 *iv);
59asmlinkage void aesni_cbc_dec(struct crypto_aes_ctx *ctx, u8 *out, 60asmlinkage void aesni_cbc_dec(struct crypto_aes_ctx *ctx, u8 *out,
60 const u8 *in, unsigned int len, u8 *iv); 61 const u8 *in, unsigned int len, u8 *iv);
62asmlinkage void aesni_ctr_enc(struct crypto_aes_ctx *ctx, u8 *out,
63 const u8 *in, unsigned int len, u8 *iv);
61 64
62static inline struct crypto_aes_ctx *aes_ctx(void *raw_ctx) 65static inline struct crypto_aes_ctx *aes_ctx(void *raw_ctx)
63{ 66{
@@ -321,6 +324,72 @@ static struct crypto_alg blk_cbc_alg = {
321 }, 324 },
322}; 325};
323 326
327static void ctr_crypt_final(struct crypto_aes_ctx *ctx,
328 struct blkcipher_walk *walk)
329{
330 u8 *ctrblk = walk->iv;
331 u8 keystream[AES_BLOCK_SIZE];
332 u8 *src = walk->src.virt.addr;
333 u8 *dst = walk->dst.virt.addr;
334 unsigned int nbytes = walk->nbytes;
335
336 aesni_enc(ctx, keystream, ctrblk);
337 crypto_xor(keystream, src, nbytes);
338 memcpy(dst, keystream, nbytes);
339 crypto_inc(ctrblk, AES_BLOCK_SIZE);
340}
341
342static int ctr_crypt(struct blkcipher_desc *desc,
343 struct scatterlist *dst, struct scatterlist *src,
344 unsigned int nbytes)
345{
346 struct crypto_aes_ctx *ctx = aes_ctx(crypto_blkcipher_ctx(desc->tfm));
347 struct blkcipher_walk walk;
348 int err;
349
350 blkcipher_walk_init(&walk, dst, src, nbytes);
351 err = blkcipher_walk_virt_block(desc, &walk, AES_BLOCK_SIZE);
352 desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
353
354 kernel_fpu_begin();
355 while ((nbytes = walk.nbytes) >= AES_BLOCK_SIZE) {
356 aesni_ctr_enc(ctx, walk.dst.virt.addr, walk.src.virt.addr,
357 nbytes & AES_BLOCK_MASK, walk.iv);
358 nbytes &= AES_BLOCK_SIZE - 1;
359 err = blkcipher_walk_done(desc, &walk, nbytes);
360 }
361 if (walk.nbytes) {
362 ctr_crypt_final(ctx, &walk);
363 err = blkcipher_walk_done(desc, &walk, 0);
364 }
365 kernel_fpu_end();
366
367 return err;
368}
369
370static struct crypto_alg blk_ctr_alg = {
371 .cra_name = "__ctr-aes-aesni",
372 .cra_driver_name = "__driver-ctr-aes-aesni",
373 .cra_priority = 0,
374 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
375 .cra_blocksize = 1,
376 .cra_ctxsize = sizeof(struct crypto_aes_ctx)+AESNI_ALIGN-1,
377 .cra_alignmask = 0,
378 .cra_type = &crypto_blkcipher_type,
379 .cra_module = THIS_MODULE,
380 .cra_list = LIST_HEAD_INIT(blk_ctr_alg.cra_list),
381 .cra_u = {
382 .blkcipher = {
383 .min_keysize = AES_MIN_KEY_SIZE,
384 .max_keysize = AES_MAX_KEY_SIZE,
385 .ivsize = AES_BLOCK_SIZE,
386 .setkey = aes_set_key,
387 .encrypt = ctr_crypt,
388 .decrypt = ctr_crypt,
389 },
390 },
391};
392
324static int ablk_set_key(struct crypto_ablkcipher *tfm, const u8 *key, 393static int ablk_set_key(struct crypto_ablkcipher *tfm, const u8 *key,
325 unsigned int key_len) 394 unsigned int key_len)
326{ 395{
@@ -467,13 +536,11 @@ static struct crypto_alg ablk_cbc_alg = {
467 }, 536 },
468}; 537};
469 538
470#ifdef HAS_CTR
471static int ablk_ctr_init(struct crypto_tfm *tfm) 539static int ablk_ctr_init(struct crypto_tfm *tfm)
472{ 540{
473 struct cryptd_ablkcipher *cryptd_tfm; 541 struct cryptd_ablkcipher *cryptd_tfm;
474 542
475 cryptd_tfm = cryptd_alloc_ablkcipher("fpu(ctr(__driver-aes-aesni))", 543 cryptd_tfm = cryptd_alloc_ablkcipher("__driver-ctr-aes-aesni", 0, 0);
476 0, 0);
477 if (IS_ERR(cryptd_tfm)) 544 if (IS_ERR(cryptd_tfm))
478 return PTR_ERR(cryptd_tfm); 545 return PTR_ERR(cryptd_tfm);
479 ablk_init_common(tfm, cryptd_tfm); 546 ablk_init_common(tfm, cryptd_tfm);
@@ -500,11 +567,50 @@ static struct crypto_alg ablk_ctr_alg = {
500 .ivsize = AES_BLOCK_SIZE, 567 .ivsize = AES_BLOCK_SIZE,
501 .setkey = ablk_set_key, 568 .setkey = ablk_set_key,
502 .encrypt = ablk_encrypt, 569 .encrypt = ablk_encrypt,
503 .decrypt = ablk_decrypt, 570 .decrypt = ablk_encrypt,
504 .geniv = "chainiv", 571 .geniv = "chainiv",
505 }, 572 },
506 }, 573 },
507}; 574};
575
576#ifdef HAS_CTR
577static int ablk_rfc3686_ctr_init(struct crypto_tfm *tfm)
578{
579 struct cryptd_ablkcipher *cryptd_tfm;
580
581 cryptd_tfm = cryptd_alloc_ablkcipher(
582 "rfc3686(__driver-ctr-aes-aesni)", 0, 0);
583 if (IS_ERR(cryptd_tfm))
584 return PTR_ERR(cryptd_tfm);
585 ablk_init_common(tfm, cryptd_tfm);
586 return 0;
587}
588
589static struct crypto_alg ablk_rfc3686_ctr_alg = {
590 .cra_name = "rfc3686(ctr(aes))",
591 .cra_driver_name = "rfc3686-ctr-aes-aesni",
592 .cra_priority = 400,
593 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER|CRYPTO_ALG_ASYNC,
594 .cra_blocksize = 1,
595 .cra_ctxsize = sizeof(struct async_aes_ctx),
596 .cra_alignmask = 0,
597 .cra_type = &crypto_ablkcipher_type,
598 .cra_module = THIS_MODULE,
599 .cra_list = LIST_HEAD_INIT(ablk_rfc3686_ctr_alg.cra_list),
600 .cra_init = ablk_rfc3686_ctr_init,
601 .cra_exit = ablk_exit,
602 .cra_u = {
603 .ablkcipher = {
604 .min_keysize = AES_MIN_KEY_SIZE+CTR_RFC3686_NONCE_SIZE,
605 .max_keysize = AES_MAX_KEY_SIZE+CTR_RFC3686_NONCE_SIZE,
606 .ivsize = CTR_RFC3686_IV_SIZE,
607 .setkey = ablk_set_key,
608 .encrypt = ablk_encrypt,
609 .decrypt = ablk_decrypt,
610 .geniv = "seqiv",
611 },
612 },
613};
508#endif 614#endif
509 615
510#ifdef HAS_LRW 616#ifdef HAS_LRW
@@ -640,13 +746,17 @@ static int __init aesni_init(void)
640 goto blk_ecb_err; 746 goto blk_ecb_err;
641 if ((err = crypto_register_alg(&blk_cbc_alg))) 747 if ((err = crypto_register_alg(&blk_cbc_alg)))
642 goto blk_cbc_err; 748 goto blk_cbc_err;
749 if ((err = crypto_register_alg(&blk_ctr_alg)))
750 goto blk_ctr_err;
643 if ((err = crypto_register_alg(&ablk_ecb_alg))) 751 if ((err = crypto_register_alg(&ablk_ecb_alg)))
644 goto ablk_ecb_err; 752 goto ablk_ecb_err;
645 if ((err = crypto_register_alg(&ablk_cbc_alg))) 753 if ((err = crypto_register_alg(&ablk_cbc_alg)))
646 goto ablk_cbc_err; 754 goto ablk_cbc_err;
647#ifdef HAS_CTR
648 if ((err = crypto_register_alg(&ablk_ctr_alg))) 755 if ((err = crypto_register_alg(&ablk_ctr_alg)))
649 goto ablk_ctr_err; 756 goto ablk_ctr_err;
757#ifdef HAS_CTR
758 if ((err = crypto_register_alg(&ablk_rfc3686_ctr_alg)))
759 goto ablk_rfc3686_ctr_err;
650#endif 760#endif
651#ifdef HAS_LRW 761#ifdef HAS_LRW
652 if ((err = crypto_register_alg(&ablk_lrw_alg))) 762 if ((err = crypto_register_alg(&ablk_lrw_alg)))
@@ -675,13 +785,17 @@ ablk_pcbc_err:
675ablk_lrw_err: 785ablk_lrw_err:
676#endif 786#endif
677#ifdef HAS_CTR 787#ifdef HAS_CTR
788 crypto_unregister_alg(&ablk_rfc3686_ctr_alg);
789ablk_rfc3686_ctr_err:
790#endif
678 crypto_unregister_alg(&ablk_ctr_alg); 791 crypto_unregister_alg(&ablk_ctr_alg);
679ablk_ctr_err: 792ablk_ctr_err:
680#endif
681 crypto_unregister_alg(&ablk_cbc_alg); 793 crypto_unregister_alg(&ablk_cbc_alg);
682ablk_cbc_err: 794ablk_cbc_err:
683 crypto_unregister_alg(&ablk_ecb_alg); 795 crypto_unregister_alg(&ablk_ecb_alg);
684ablk_ecb_err: 796ablk_ecb_err:
797 crypto_unregister_alg(&blk_ctr_alg);
798blk_ctr_err:
685 crypto_unregister_alg(&blk_cbc_alg); 799 crypto_unregister_alg(&blk_cbc_alg);
686blk_cbc_err: 800blk_cbc_err:
687 crypto_unregister_alg(&blk_ecb_alg); 801 crypto_unregister_alg(&blk_ecb_alg);
@@ -705,10 +819,12 @@ static void __exit aesni_exit(void)
705 crypto_unregister_alg(&ablk_lrw_alg); 819 crypto_unregister_alg(&ablk_lrw_alg);
706#endif 820#endif
707#ifdef HAS_CTR 821#ifdef HAS_CTR
708 crypto_unregister_alg(&ablk_ctr_alg); 822 crypto_unregister_alg(&ablk_rfc3686_ctr_alg);
709#endif 823#endif
824 crypto_unregister_alg(&ablk_ctr_alg);
710 crypto_unregister_alg(&ablk_cbc_alg); 825 crypto_unregister_alg(&ablk_cbc_alg);
711 crypto_unregister_alg(&ablk_ecb_alg); 826 crypto_unregister_alg(&ablk_ecb_alg);
827 crypto_unregister_alg(&blk_ctr_alg);
712 crypto_unregister_alg(&blk_cbc_alg); 828 crypto_unregister_alg(&blk_cbc_alg);
713 crypto_unregister_alg(&blk_ecb_alg); 829 crypto_unregister_alg(&blk_ecb_alg);
714 crypto_unregister_alg(&__aesni_alg); 830 crypto_unregister_alg(&__aesni_alg);