aboutsummaryrefslogtreecommitdiffstats
path: root/arch/x86/crypto/aesni-intel_glue.c
diff options
context:
space:
mode:
Diffstat (limited to 'arch/x86/crypto/aesni-intel_glue.c')
-rw-r--r--arch/x86/crypto/aesni-intel_glue.c793
1 files changed, 318 insertions, 475 deletions
diff --git a/arch/x86/crypto/aesni-intel_glue.c b/arch/x86/crypto/aesni-intel_glue.c
index c799352e24fc..ac7f5cd019e8 100644
--- a/arch/x86/crypto/aesni-intel_glue.c
+++ b/arch/x86/crypto/aesni-intel_glue.c
@@ -222,27 +222,6 @@ static void aes_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
222 } 222 }
223} 223}
224 224
225static struct crypto_alg aesni_alg = {
226 .cra_name = "aes",
227 .cra_driver_name = "aes-aesni",
228 .cra_priority = 300,
229 .cra_flags = CRYPTO_ALG_TYPE_CIPHER,
230 .cra_blocksize = AES_BLOCK_SIZE,
231 .cra_ctxsize = sizeof(struct crypto_aes_ctx)+AESNI_ALIGN-1,
232 .cra_alignmask = 0,
233 .cra_module = THIS_MODULE,
234 .cra_list = LIST_HEAD_INIT(aesni_alg.cra_list),
235 .cra_u = {
236 .cipher = {
237 .cia_min_keysize = AES_MIN_KEY_SIZE,
238 .cia_max_keysize = AES_MAX_KEY_SIZE,
239 .cia_setkey = aes_set_key,
240 .cia_encrypt = aes_encrypt,
241 .cia_decrypt = aes_decrypt
242 }
243 }
244};
245
246static void __aes_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) 225static void __aes_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
247{ 226{
248 struct crypto_aes_ctx *ctx = aes_ctx(crypto_tfm_ctx(tfm)); 227 struct crypto_aes_ctx *ctx = aes_ctx(crypto_tfm_ctx(tfm));
@@ -257,27 +236,6 @@ static void __aes_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
257 aesni_dec(ctx, dst, src); 236 aesni_dec(ctx, dst, src);
258} 237}
259 238
260static struct crypto_alg __aesni_alg = {
261 .cra_name = "__aes-aesni",
262 .cra_driver_name = "__driver-aes-aesni",
263 .cra_priority = 0,
264 .cra_flags = CRYPTO_ALG_TYPE_CIPHER,
265 .cra_blocksize = AES_BLOCK_SIZE,
266 .cra_ctxsize = sizeof(struct crypto_aes_ctx)+AESNI_ALIGN-1,
267 .cra_alignmask = 0,
268 .cra_module = THIS_MODULE,
269 .cra_list = LIST_HEAD_INIT(__aesni_alg.cra_list),
270 .cra_u = {
271 .cipher = {
272 .cia_min_keysize = AES_MIN_KEY_SIZE,
273 .cia_max_keysize = AES_MAX_KEY_SIZE,
274 .cia_setkey = aes_set_key,
275 .cia_encrypt = __aes_encrypt,
276 .cia_decrypt = __aes_decrypt
277 }
278 }
279};
280
281static int ecb_encrypt(struct blkcipher_desc *desc, 239static int ecb_encrypt(struct blkcipher_desc *desc,
282 struct scatterlist *dst, struct scatterlist *src, 240 struct scatterlist *dst, struct scatterlist *src,
283 unsigned int nbytes) 241 unsigned int nbytes)
@@ -326,28 +284,6 @@ static int ecb_decrypt(struct blkcipher_desc *desc,
326 return err; 284 return err;
327} 285}
328 286
329static struct crypto_alg blk_ecb_alg = {
330 .cra_name = "__ecb-aes-aesni",
331 .cra_driver_name = "__driver-ecb-aes-aesni",
332 .cra_priority = 0,
333 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
334 .cra_blocksize = AES_BLOCK_SIZE,
335 .cra_ctxsize = sizeof(struct crypto_aes_ctx)+AESNI_ALIGN-1,
336 .cra_alignmask = 0,
337 .cra_type = &crypto_blkcipher_type,
338 .cra_module = THIS_MODULE,
339 .cra_list = LIST_HEAD_INIT(blk_ecb_alg.cra_list),
340 .cra_u = {
341 .blkcipher = {
342 .min_keysize = AES_MIN_KEY_SIZE,
343 .max_keysize = AES_MAX_KEY_SIZE,
344 .setkey = aes_set_key,
345 .encrypt = ecb_encrypt,
346 .decrypt = ecb_decrypt,
347 },
348 },
349};
350
351static int cbc_encrypt(struct blkcipher_desc *desc, 287static int cbc_encrypt(struct blkcipher_desc *desc,
352 struct scatterlist *dst, struct scatterlist *src, 288 struct scatterlist *dst, struct scatterlist *src,
353 unsigned int nbytes) 289 unsigned int nbytes)
@@ -396,28 +332,6 @@ static int cbc_decrypt(struct blkcipher_desc *desc,
396 return err; 332 return err;
397} 333}
398 334
399static struct crypto_alg blk_cbc_alg = {
400 .cra_name = "__cbc-aes-aesni",
401 .cra_driver_name = "__driver-cbc-aes-aesni",
402 .cra_priority = 0,
403 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
404 .cra_blocksize = AES_BLOCK_SIZE,
405 .cra_ctxsize = sizeof(struct crypto_aes_ctx)+AESNI_ALIGN-1,
406 .cra_alignmask = 0,
407 .cra_type = &crypto_blkcipher_type,
408 .cra_module = THIS_MODULE,
409 .cra_list = LIST_HEAD_INIT(blk_cbc_alg.cra_list),
410 .cra_u = {
411 .blkcipher = {
412 .min_keysize = AES_MIN_KEY_SIZE,
413 .max_keysize = AES_MAX_KEY_SIZE,
414 .setkey = aes_set_key,
415 .encrypt = cbc_encrypt,
416 .decrypt = cbc_decrypt,
417 },
418 },
419};
420
421#ifdef CONFIG_X86_64 335#ifdef CONFIG_X86_64
422static void ctr_crypt_final(struct crypto_aes_ctx *ctx, 336static void ctr_crypt_final(struct crypto_aes_ctx *ctx,
423 struct blkcipher_walk *walk) 337 struct blkcipher_walk *walk)
@@ -461,29 +375,6 @@ static int ctr_crypt(struct blkcipher_desc *desc,
461 375
462 return err; 376 return err;
463} 377}
464
465static struct crypto_alg blk_ctr_alg = {
466 .cra_name = "__ctr-aes-aesni",
467 .cra_driver_name = "__driver-ctr-aes-aesni",
468 .cra_priority = 0,
469 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
470 .cra_blocksize = 1,
471 .cra_ctxsize = sizeof(struct crypto_aes_ctx)+AESNI_ALIGN-1,
472 .cra_alignmask = 0,
473 .cra_type = &crypto_blkcipher_type,
474 .cra_module = THIS_MODULE,
475 .cra_list = LIST_HEAD_INIT(blk_ctr_alg.cra_list),
476 .cra_u = {
477 .blkcipher = {
478 .min_keysize = AES_MIN_KEY_SIZE,
479 .max_keysize = AES_MAX_KEY_SIZE,
480 .ivsize = AES_BLOCK_SIZE,
481 .setkey = aes_set_key,
482 .encrypt = ctr_crypt,
483 .decrypt = ctr_crypt,
484 },
485 },
486};
487#endif 378#endif
488 379
489static int ablk_set_key(struct crypto_ablkcipher *tfm, const u8 *key, 380static int ablk_set_key(struct crypto_ablkcipher *tfm, const u8 *key,
@@ -551,281 +442,65 @@ static void ablk_exit(struct crypto_tfm *tfm)
551 cryptd_free_ablkcipher(ctx->cryptd_tfm); 442 cryptd_free_ablkcipher(ctx->cryptd_tfm);
552} 443}
553 444
554static void ablk_init_common(struct crypto_tfm *tfm, 445static int ablk_init_common(struct crypto_tfm *tfm, const char *drv_name)
555 struct cryptd_ablkcipher *cryptd_tfm)
556{ 446{
557 struct async_aes_ctx *ctx = crypto_tfm_ctx(tfm); 447 struct async_aes_ctx *ctx = crypto_tfm_ctx(tfm);
448 struct cryptd_ablkcipher *cryptd_tfm;
449
450 cryptd_tfm = cryptd_alloc_ablkcipher(drv_name, 0, 0);
451 if (IS_ERR(cryptd_tfm))
452 return PTR_ERR(cryptd_tfm);
558 453
559 ctx->cryptd_tfm = cryptd_tfm; 454 ctx->cryptd_tfm = cryptd_tfm;
560 tfm->crt_ablkcipher.reqsize = sizeof(struct ablkcipher_request) + 455 tfm->crt_ablkcipher.reqsize = sizeof(struct ablkcipher_request) +
561 crypto_ablkcipher_reqsize(&cryptd_tfm->base); 456 crypto_ablkcipher_reqsize(&cryptd_tfm->base);
457
458 return 0;
562} 459}
563 460
564static int ablk_ecb_init(struct crypto_tfm *tfm) 461static int ablk_ecb_init(struct crypto_tfm *tfm)
565{ 462{
566 struct cryptd_ablkcipher *cryptd_tfm; 463 return ablk_init_common(tfm, "__driver-ecb-aes-aesni");
567
568 cryptd_tfm = cryptd_alloc_ablkcipher("__driver-ecb-aes-aesni", 0, 0);
569 if (IS_ERR(cryptd_tfm))
570 return PTR_ERR(cryptd_tfm);
571 ablk_init_common(tfm, cryptd_tfm);
572 return 0;
573} 464}
574 465
575static struct crypto_alg ablk_ecb_alg = {
576 .cra_name = "ecb(aes)",
577 .cra_driver_name = "ecb-aes-aesni",
578 .cra_priority = 400,
579 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER|CRYPTO_ALG_ASYNC,
580 .cra_blocksize = AES_BLOCK_SIZE,
581 .cra_ctxsize = sizeof(struct async_aes_ctx),
582 .cra_alignmask = 0,
583 .cra_type = &crypto_ablkcipher_type,
584 .cra_module = THIS_MODULE,
585 .cra_list = LIST_HEAD_INIT(ablk_ecb_alg.cra_list),
586 .cra_init = ablk_ecb_init,
587 .cra_exit = ablk_exit,
588 .cra_u = {
589 .ablkcipher = {
590 .min_keysize = AES_MIN_KEY_SIZE,
591 .max_keysize = AES_MAX_KEY_SIZE,
592 .setkey = ablk_set_key,
593 .encrypt = ablk_encrypt,
594 .decrypt = ablk_decrypt,
595 },
596 },
597};
598
599static int ablk_cbc_init(struct crypto_tfm *tfm) 466static int ablk_cbc_init(struct crypto_tfm *tfm)
600{ 467{
601 struct cryptd_ablkcipher *cryptd_tfm; 468 return ablk_init_common(tfm, "__driver-cbc-aes-aesni");
602
603 cryptd_tfm = cryptd_alloc_ablkcipher("__driver-cbc-aes-aesni", 0, 0);
604 if (IS_ERR(cryptd_tfm))
605 return PTR_ERR(cryptd_tfm);
606 ablk_init_common(tfm, cryptd_tfm);
607 return 0;
608} 469}
609 470
610static struct crypto_alg ablk_cbc_alg = {
611 .cra_name = "cbc(aes)",
612 .cra_driver_name = "cbc-aes-aesni",
613 .cra_priority = 400,
614 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER|CRYPTO_ALG_ASYNC,
615 .cra_blocksize = AES_BLOCK_SIZE,
616 .cra_ctxsize = sizeof(struct async_aes_ctx),
617 .cra_alignmask = 0,
618 .cra_type = &crypto_ablkcipher_type,
619 .cra_module = THIS_MODULE,
620 .cra_list = LIST_HEAD_INIT(ablk_cbc_alg.cra_list),
621 .cra_init = ablk_cbc_init,
622 .cra_exit = ablk_exit,
623 .cra_u = {
624 .ablkcipher = {
625 .min_keysize = AES_MIN_KEY_SIZE,
626 .max_keysize = AES_MAX_KEY_SIZE,
627 .ivsize = AES_BLOCK_SIZE,
628 .setkey = ablk_set_key,
629 .encrypt = ablk_encrypt,
630 .decrypt = ablk_decrypt,
631 },
632 },
633};
634
635#ifdef CONFIG_X86_64 471#ifdef CONFIG_X86_64
636static int ablk_ctr_init(struct crypto_tfm *tfm) 472static int ablk_ctr_init(struct crypto_tfm *tfm)
637{ 473{
638 struct cryptd_ablkcipher *cryptd_tfm; 474 return ablk_init_common(tfm, "__driver-ctr-aes-aesni");
639
640 cryptd_tfm = cryptd_alloc_ablkcipher("__driver-ctr-aes-aesni", 0, 0);
641 if (IS_ERR(cryptd_tfm))
642 return PTR_ERR(cryptd_tfm);
643 ablk_init_common(tfm, cryptd_tfm);
644 return 0;
645} 475}
646 476
647static struct crypto_alg ablk_ctr_alg = {
648 .cra_name = "ctr(aes)",
649 .cra_driver_name = "ctr-aes-aesni",
650 .cra_priority = 400,
651 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER|CRYPTO_ALG_ASYNC,
652 .cra_blocksize = 1,
653 .cra_ctxsize = sizeof(struct async_aes_ctx),
654 .cra_alignmask = 0,
655 .cra_type = &crypto_ablkcipher_type,
656 .cra_module = THIS_MODULE,
657 .cra_list = LIST_HEAD_INIT(ablk_ctr_alg.cra_list),
658 .cra_init = ablk_ctr_init,
659 .cra_exit = ablk_exit,
660 .cra_u = {
661 .ablkcipher = {
662 .min_keysize = AES_MIN_KEY_SIZE,
663 .max_keysize = AES_MAX_KEY_SIZE,
664 .ivsize = AES_BLOCK_SIZE,
665 .setkey = ablk_set_key,
666 .encrypt = ablk_encrypt,
667 .decrypt = ablk_encrypt,
668 .geniv = "chainiv",
669 },
670 },
671};
672
673#ifdef HAS_CTR 477#ifdef HAS_CTR
674static int ablk_rfc3686_ctr_init(struct crypto_tfm *tfm) 478static int ablk_rfc3686_ctr_init(struct crypto_tfm *tfm)
675{ 479{
676 struct cryptd_ablkcipher *cryptd_tfm; 480 return ablk_init_common(tfm, "rfc3686(__driver-ctr-aes-aesni)");
677
678 cryptd_tfm = cryptd_alloc_ablkcipher(
679 "rfc3686(__driver-ctr-aes-aesni)", 0, 0);
680 if (IS_ERR(cryptd_tfm))
681 return PTR_ERR(cryptd_tfm);
682 ablk_init_common(tfm, cryptd_tfm);
683 return 0;
684} 481}
685
686static struct crypto_alg ablk_rfc3686_ctr_alg = {
687 .cra_name = "rfc3686(ctr(aes))",
688 .cra_driver_name = "rfc3686-ctr-aes-aesni",
689 .cra_priority = 400,
690 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER|CRYPTO_ALG_ASYNC,
691 .cra_blocksize = 1,
692 .cra_ctxsize = sizeof(struct async_aes_ctx),
693 .cra_alignmask = 0,
694 .cra_type = &crypto_ablkcipher_type,
695 .cra_module = THIS_MODULE,
696 .cra_list = LIST_HEAD_INIT(ablk_rfc3686_ctr_alg.cra_list),
697 .cra_init = ablk_rfc3686_ctr_init,
698 .cra_exit = ablk_exit,
699 .cra_u = {
700 .ablkcipher = {
701 .min_keysize = AES_MIN_KEY_SIZE+CTR_RFC3686_NONCE_SIZE,
702 .max_keysize = AES_MAX_KEY_SIZE+CTR_RFC3686_NONCE_SIZE,
703 .ivsize = CTR_RFC3686_IV_SIZE,
704 .setkey = ablk_set_key,
705 .encrypt = ablk_encrypt,
706 .decrypt = ablk_decrypt,
707 .geniv = "seqiv",
708 },
709 },
710};
711#endif 482#endif
712#endif 483#endif
713 484
714#ifdef HAS_LRW 485#ifdef HAS_LRW
715static int ablk_lrw_init(struct crypto_tfm *tfm) 486static int ablk_lrw_init(struct crypto_tfm *tfm)
716{ 487{
717 struct cryptd_ablkcipher *cryptd_tfm; 488 return ablk_init_common(tfm, "fpu(lrw(__driver-aes-aesni))");
718
719 cryptd_tfm = cryptd_alloc_ablkcipher("fpu(lrw(__driver-aes-aesni))",
720 0, 0);
721 if (IS_ERR(cryptd_tfm))
722 return PTR_ERR(cryptd_tfm);
723 ablk_init_common(tfm, cryptd_tfm);
724 return 0;
725} 489}
726
727static struct crypto_alg ablk_lrw_alg = {
728 .cra_name = "lrw(aes)",
729 .cra_driver_name = "lrw-aes-aesni",
730 .cra_priority = 400,
731 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER|CRYPTO_ALG_ASYNC,
732 .cra_blocksize = AES_BLOCK_SIZE,
733 .cra_ctxsize = sizeof(struct async_aes_ctx),
734 .cra_alignmask = 0,
735 .cra_type = &crypto_ablkcipher_type,
736 .cra_module = THIS_MODULE,
737 .cra_list = LIST_HEAD_INIT(ablk_lrw_alg.cra_list),
738 .cra_init = ablk_lrw_init,
739 .cra_exit = ablk_exit,
740 .cra_u = {
741 .ablkcipher = {
742 .min_keysize = AES_MIN_KEY_SIZE + AES_BLOCK_SIZE,
743 .max_keysize = AES_MAX_KEY_SIZE + AES_BLOCK_SIZE,
744 .ivsize = AES_BLOCK_SIZE,
745 .setkey = ablk_set_key,
746 .encrypt = ablk_encrypt,
747 .decrypt = ablk_decrypt,
748 },
749 },
750};
751#endif 490#endif
752 491
753#ifdef HAS_PCBC 492#ifdef HAS_PCBC
754static int ablk_pcbc_init(struct crypto_tfm *tfm) 493static int ablk_pcbc_init(struct crypto_tfm *tfm)
755{ 494{
756 struct cryptd_ablkcipher *cryptd_tfm; 495 return ablk_init_common(tfm, "fpu(pcbc(__driver-aes-aesni))");
757
758 cryptd_tfm = cryptd_alloc_ablkcipher("fpu(pcbc(__driver-aes-aesni))",
759 0, 0);
760 if (IS_ERR(cryptd_tfm))
761 return PTR_ERR(cryptd_tfm);
762 ablk_init_common(tfm, cryptd_tfm);
763 return 0;
764} 496}
765
766static struct crypto_alg ablk_pcbc_alg = {
767 .cra_name = "pcbc(aes)",
768 .cra_driver_name = "pcbc-aes-aesni",
769 .cra_priority = 400,
770 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER|CRYPTO_ALG_ASYNC,
771 .cra_blocksize = AES_BLOCK_SIZE,
772 .cra_ctxsize = sizeof(struct async_aes_ctx),
773 .cra_alignmask = 0,
774 .cra_type = &crypto_ablkcipher_type,
775 .cra_module = THIS_MODULE,
776 .cra_list = LIST_HEAD_INIT(ablk_pcbc_alg.cra_list),
777 .cra_init = ablk_pcbc_init,
778 .cra_exit = ablk_exit,
779 .cra_u = {
780 .ablkcipher = {
781 .min_keysize = AES_MIN_KEY_SIZE,
782 .max_keysize = AES_MAX_KEY_SIZE,
783 .ivsize = AES_BLOCK_SIZE,
784 .setkey = ablk_set_key,
785 .encrypt = ablk_encrypt,
786 .decrypt = ablk_decrypt,
787 },
788 },
789};
790#endif 497#endif
791 498
792#ifdef HAS_XTS 499#ifdef HAS_XTS
793static int ablk_xts_init(struct crypto_tfm *tfm) 500static int ablk_xts_init(struct crypto_tfm *tfm)
794{ 501{
795 struct cryptd_ablkcipher *cryptd_tfm; 502 return ablk_init_common(tfm, "fpu(xts(__driver-aes-aesni))");
796
797 cryptd_tfm = cryptd_alloc_ablkcipher("fpu(xts(__driver-aes-aesni))",
798 0, 0);
799 if (IS_ERR(cryptd_tfm))
800 return PTR_ERR(cryptd_tfm);
801 ablk_init_common(tfm, cryptd_tfm);
802 return 0;
803} 503}
804
805static struct crypto_alg ablk_xts_alg = {
806 .cra_name = "xts(aes)",
807 .cra_driver_name = "xts-aes-aesni",
808 .cra_priority = 400,
809 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER|CRYPTO_ALG_ASYNC,
810 .cra_blocksize = AES_BLOCK_SIZE,
811 .cra_ctxsize = sizeof(struct async_aes_ctx),
812 .cra_alignmask = 0,
813 .cra_type = &crypto_ablkcipher_type,
814 .cra_module = THIS_MODULE,
815 .cra_list = LIST_HEAD_INIT(ablk_xts_alg.cra_list),
816 .cra_init = ablk_xts_init,
817 .cra_exit = ablk_exit,
818 .cra_u = {
819 .ablkcipher = {
820 .min_keysize = 2 * AES_MIN_KEY_SIZE,
821 .max_keysize = 2 * AES_MAX_KEY_SIZE,
822 .ivsize = AES_BLOCK_SIZE,
823 .setkey = ablk_set_key,
824 .encrypt = ablk_encrypt,
825 .decrypt = ablk_decrypt,
826 },
827 },
828};
829#endif 504#endif
830 505
831#ifdef CONFIG_X86_64 506#ifdef CONFIG_X86_64
@@ -1050,32 +725,6 @@ static int rfc4106_decrypt(struct aead_request *req)
1050 } 725 }
1051} 726}
1052 727
1053static struct crypto_alg rfc4106_alg = {
1054 .cra_name = "rfc4106(gcm(aes))",
1055 .cra_driver_name = "rfc4106-gcm-aesni",
1056 .cra_priority = 400,
1057 .cra_flags = CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_ASYNC,
1058 .cra_blocksize = 1,
1059 .cra_ctxsize = sizeof(struct aesni_rfc4106_gcm_ctx) + AESNI_ALIGN,
1060 .cra_alignmask = 0,
1061 .cra_type = &crypto_nivaead_type,
1062 .cra_module = THIS_MODULE,
1063 .cra_list = LIST_HEAD_INIT(rfc4106_alg.cra_list),
1064 .cra_init = rfc4106_init,
1065 .cra_exit = rfc4106_exit,
1066 .cra_u = {
1067 .aead = {
1068 .setkey = rfc4106_set_key,
1069 .setauthsize = rfc4106_set_authsize,
1070 .encrypt = rfc4106_encrypt,
1071 .decrypt = rfc4106_decrypt,
1072 .geniv = "seqiv",
1073 .ivsize = 8,
1074 .maxauthsize = 16,
1075 },
1076 },
1077};
1078
1079static int __driver_rfc4106_encrypt(struct aead_request *req) 728static int __driver_rfc4106_encrypt(struct aead_request *req)
1080{ 729{
1081 u8 one_entry_in_sg = 0; 730 u8 one_entry_in_sg = 0;
@@ -1233,26 +882,316 @@ static int __driver_rfc4106_decrypt(struct aead_request *req)
1233 } 882 }
1234 return retval; 883 return retval;
1235} 884}
885#endif
1236 886
1237static struct crypto_alg __rfc4106_alg = { 887static struct crypto_alg aesni_algs[] = { {
888 .cra_name = "aes",
889 .cra_driver_name = "aes-aesni",
890 .cra_priority = 300,
891 .cra_flags = CRYPTO_ALG_TYPE_CIPHER,
892 .cra_blocksize = AES_BLOCK_SIZE,
893 .cra_ctxsize = sizeof(struct crypto_aes_ctx) +
894 AESNI_ALIGN - 1,
895 .cra_alignmask = 0,
896 .cra_module = THIS_MODULE,
897 .cra_u = {
898 .cipher = {
899 .cia_min_keysize = AES_MIN_KEY_SIZE,
900 .cia_max_keysize = AES_MAX_KEY_SIZE,
901 .cia_setkey = aes_set_key,
902 .cia_encrypt = aes_encrypt,
903 .cia_decrypt = aes_decrypt
904 }
905 }
906}, {
907 .cra_name = "__aes-aesni",
908 .cra_driver_name = "__driver-aes-aesni",
909 .cra_priority = 0,
910 .cra_flags = CRYPTO_ALG_TYPE_CIPHER,
911 .cra_blocksize = AES_BLOCK_SIZE,
912 .cra_ctxsize = sizeof(struct crypto_aes_ctx) +
913 AESNI_ALIGN - 1,
914 .cra_alignmask = 0,
915 .cra_module = THIS_MODULE,
916 .cra_u = {
917 .cipher = {
918 .cia_min_keysize = AES_MIN_KEY_SIZE,
919 .cia_max_keysize = AES_MAX_KEY_SIZE,
920 .cia_setkey = aes_set_key,
921 .cia_encrypt = __aes_encrypt,
922 .cia_decrypt = __aes_decrypt
923 }
924 }
925}, {
926 .cra_name = "__ecb-aes-aesni",
927 .cra_driver_name = "__driver-ecb-aes-aesni",
928 .cra_priority = 0,
929 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
930 .cra_blocksize = AES_BLOCK_SIZE,
931 .cra_ctxsize = sizeof(struct crypto_aes_ctx) +
932 AESNI_ALIGN - 1,
933 .cra_alignmask = 0,
934 .cra_type = &crypto_blkcipher_type,
935 .cra_module = THIS_MODULE,
936 .cra_u = {
937 .blkcipher = {
938 .min_keysize = AES_MIN_KEY_SIZE,
939 .max_keysize = AES_MAX_KEY_SIZE,
940 .setkey = aes_set_key,
941 .encrypt = ecb_encrypt,
942 .decrypt = ecb_decrypt,
943 },
944 },
945}, {
946 .cra_name = "__cbc-aes-aesni",
947 .cra_driver_name = "__driver-cbc-aes-aesni",
948 .cra_priority = 0,
949 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
950 .cra_blocksize = AES_BLOCK_SIZE,
951 .cra_ctxsize = sizeof(struct crypto_aes_ctx) +
952 AESNI_ALIGN - 1,
953 .cra_alignmask = 0,
954 .cra_type = &crypto_blkcipher_type,
955 .cra_module = THIS_MODULE,
956 .cra_u = {
957 .blkcipher = {
958 .min_keysize = AES_MIN_KEY_SIZE,
959 .max_keysize = AES_MAX_KEY_SIZE,
960 .setkey = aes_set_key,
961 .encrypt = cbc_encrypt,
962 .decrypt = cbc_decrypt,
963 },
964 },
965}, {
966 .cra_name = "ecb(aes)",
967 .cra_driver_name = "ecb-aes-aesni",
968 .cra_priority = 400,
969 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
970 .cra_blocksize = AES_BLOCK_SIZE,
971 .cra_ctxsize = sizeof(struct async_aes_ctx),
972 .cra_alignmask = 0,
973 .cra_type = &crypto_ablkcipher_type,
974 .cra_module = THIS_MODULE,
975 .cra_init = ablk_ecb_init,
976 .cra_exit = ablk_exit,
977 .cra_u = {
978 .ablkcipher = {
979 .min_keysize = AES_MIN_KEY_SIZE,
980 .max_keysize = AES_MAX_KEY_SIZE,
981 .setkey = ablk_set_key,
982 .encrypt = ablk_encrypt,
983 .decrypt = ablk_decrypt,
984 },
985 },
986}, {
987 .cra_name = "cbc(aes)",
988 .cra_driver_name = "cbc-aes-aesni",
989 .cra_priority = 400,
990 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
991 .cra_blocksize = AES_BLOCK_SIZE,
992 .cra_ctxsize = sizeof(struct async_aes_ctx),
993 .cra_alignmask = 0,
994 .cra_type = &crypto_ablkcipher_type,
995 .cra_module = THIS_MODULE,
996 .cra_init = ablk_cbc_init,
997 .cra_exit = ablk_exit,
998 .cra_u = {
999 .ablkcipher = {
1000 .min_keysize = AES_MIN_KEY_SIZE,
1001 .max_keysize = AES_MAX_KEY_SIZE,
1002 .ivsize = AES_BLOCK_SIZE,
1003 .setkey = ablk_set_key,
1004 .encrypt = ablk_encrypt,
1005 .decrypt = ablk_decrypt,
1006 },
1007 },
1008#ifdef CONFIG_X86_64
1009}, {
1010 .cra_name = "__ctr-aes-aesni",
1011 .cra_driver_name = "__driver-ctr-aes-aesni",
1012 .cra_priority = 0,
1013 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
1014 .cra_blocksize = 1,
1015 .cra_ctxsize = sizeof(struct crypto_aes_ctx) +
1016 AESNI_ALIGN - 1,
1017 .cra_alignmask = 0,
1018 .cra_type = &crypto_blkcipher_type,
1019 .cra_module = THIS_MODULE,
1020 .cra_u = {
1021 .blkcipher = {
1022 .min_keysize = AES_MIN_KEY_SIZE,
1023 .max_keysize = AES_MAX_KEY_SIZE,
1024 .ivsize = AES_BLOCK_SIZE,
1025 .setkey = aes_set_key,
1026 .encrypt = ctr_crypt,
1027 .decrypt = ctr_crypt,
1028 },
1029 },
1030}, {
1031 .cra_name = "ctr(aes)",
1032 .cra_driver_name = "ctr-aes-aesni",
1033 .cra_priority = 400,
1034 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
1035 .cra_blocksize = 1,
1036 .cra_ctxsize = sizeof(struct async_aes_ctx),
1037 .cra_alignmask = 0,
1038 .cra_type = &crypto_ablkcipher_type,
1039 .cra_module = THIS_MODULE,
1040 .cra_init = ablk_ctr_init,
1041 .cra_exit = ablk_exit,
1042 .cra_u = {
1043 .ablkcipher = {
1044 .min_keysize = AES_MIN_KEY_SIZE,
1045 .max_keysize = AES_MAX_KEY_SIZE,
1046 .ivsize = AES_BLOCK_SIZE,
1047 .setkey = ablk_set_key,
1048 .encrypt = ablk_encrypt,
1049 .decrypt = ablk_encrypt,
1050 .geniv = "chainiv",
1051 },
1052 },
1053}, {
1238 .cra_name = "__gcm-aes-aesni", 1054 .cra_name = "__gcm-aes-aesni",
1239 .cra_driver_name = "__driver-gcm-aes-aesni", 1055 .cra_driver_name = "__driver-gcm-aes-aesni",
1240 .cra_priority = 0, 1056 .cra_priority = 0,
1241 .cra_flags = CRYPTO_ALG_TYPE_AEAD, 1057 .cra_flags = CRYPTO_ALG_TYPE_AEAD,
1242 .cra_blocksize = 1, 1058 .cra_blocksize = 1,
1243 .cra_ctxsize = sizeof(struct aesni_rfc4106_gcm_ctx) + AESNI_ALIGN, 1059 .cra_ctxsize = sizeof(struct aesni_rfc4106_gcm_ctx) +
1060 AESNI_ALIGN,
1244 .cra_alignmask = 0, 1061 .cra_alignmask = 0,
1245 .cra_type = &crypto_aead_type, 1062 .cra_type = &crypto_aead_type,
1246 .cra_module = THIS_MODULE, 1063 .cra_module = THIS_MODULE,
1247 .cra_list = LIST_HEAD_INIT(__rfc4106_alg.cra_list),
1248 .cra_u = { 1064 .cra_u = {
1249 .aead = { 1065 .aead = {
1250 .encrypt = __driver_rfc4106_encrypt, 1066 .encrypt = __driver_rfc4106_encrypt,
1251 .decrypt = __driver_rfc4106_decrypt, 1067 .decrypt = __driver_rfc4106_decrypt,
1252 }, 1068 },
1253 }, 1069 },
1254}; 1070}, {
1071 .cra_name = "rfc4106(gcm(aes))",
1072 .cra_driver_name = "rfc4106-gcm-aesni",
1073 .cra_priority = 400,
1074 .cra_flags = CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_ASYNC,
1075 .cra_blocksize = 1,
1076 .cra_ctxsize = sizeof(struct aesni_rfc4106_gcm_ctx) +
1077 AESNI_ALIGN,
1078 .cra_alignmask = 0,
1079 .cra_type = &crypto_nivaead_type,
1080 .cra_module = THIS_MODULE,
1081 .cra_init = rfc4106_init,
1082 .cra_exit = rfc4106_exit,
1083 .cra_u = {
1084 .aead = {
1085 .setkey = rfc4106_set_key,
1086 .setauthsize = rfc4106_set_authsize,
1087 .encrypt = rfc4106_encrypt,
1088 .decrypt = rfc4106_decrypt,
1089 .geniv = "seqiv",
1090 .ivsize = 8,
1091 .maxauthsize = 16,
1092 },
1093 },
1094#ifdef HAS_CTR
1095}, {
1096 .cra_name = "rfc3686(ctr(aes))",
1097 .cra_driver_name = "rfc3686-ctr-aes-aesni",
1098 .cra_priority = 400,
1099 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
1100 .cra_blocksize = 1,
1101 .cra_ctxsize = sizeof(struct async_aes_ctx),
1102 .cra_alignmask = 0,
1103 .cra_type = &crypto_ablkcipher_type,
1104 .cra_module = THIS_MODULE,
1105 .cra_init = ablk_rfc3686_ctr_init,
1106 .cra_exit = ablk_exit,
1107 .cra_u = {
1108 .ablkcipher = {
1109 .min_keysize = AES_MIN_KEY_SIZE +
1110 CTR_RFC3686_NONCE_SIZE,
1111 .max_keysize = AES_MAX_KEY_SIZE +
1112 CTR_RFC3686_NONCE_SIZE,
1113 .ivsize = CTR_RFC3686_IV_SIZE,
1114 .setkey = ablk_set_key,
1115 .encrypt = ablk_encrypt,
1116 .decrypt = ablk_decrypt,
1117 .geniv = "seqiv",
1118 },
1119 },
1120#endif
1121#endif
1122#ifdef HAS_LRW
1123}, {
1124 .cra_name = "lrw(aes)",
1125 .cra_driver_name = "lrw-aes-aesni",
1126 .cra_priority = 400,
1127 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
1128 .cra_blocksize = AES_BLOCK_SIZE,
1129 .cra_ctxsize = sizeof(struct async_aes_ctx),
1130 .cra_alignmask = 0,
1131 .cra_type = &crypto_ablkcipher_type,
1132 .cra_module = THIS_MODULE,
1133 .cra_init = ablk_lrw_init,
1134 .cra_exit = ablk_exit,
1135 .cra_u = {
1136 .ablkcipher = {
1137 .min_keysize = AES_MIN_KEY_SIZE + AES_BLOCK_SIZE,
1138 .max_keysize = AES_MAX_KEY_SIZE + AES_BLOCK_SIZE,
1139 .ivsize = AES_BLOCK_SIZE,
1140 .setkey = ablk_set_key,
1141 .encrypt = ablk_encrypt,
1142 .decrypt = ablk_decrypt,
1143 },
1144 },
1145#endif
1146#ifdef HAS_PCBC
1147}, {
1148 .cra_name = "pcbc(aes)",
1149 .cra_driver_name = "pcbc-aes-aesni",
1150 .cra_priority = 400,
1151 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
1152 .cra_blocksize = AES_BLOCK_SIZE,
1153 .cra_ctxsize = sizeof(struct async_aes_ctx),
1154 .cra_alignmask = 0,
1155 .cra_type = &crypto_ablkcipher_type,
1156 .cra_module = THIS_MODULE,
1157 .cra_init = ablk_pcbc_init,
1158 .cra_exit = ablk_exit,
1159 .cra_u = {
1160 .ablkcipher = {
1161 .min_keysize = AES_MIN_KEY_SIZE,
1162 .max_keysize = AES_MAX_KEY_SIZE,
1163 .ivsize = AES_BLOCK_SIZE,
1164 .setkey = ablk_set_key,
1165 .encrypt = ablk_encrypt,
1166 .decrypt = ablk_decrypt,
1167 },
1168 },
1255#endif 1169#endif
1170#ifdef HAS_XTS
1171}, {
1172 .cra_name = "xts(aes)",
1173 .cra_driver_name = "xts-aes-aesni",
1174 .cra_priority = 400,
1175 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
1176 .cra_blocksize = AES_BLOCK_SIZE,
1177 .cra_ctxsize = sizeof(struct async_aes_ctx),
1178 .cra_alignmask = 0,
1179 .cra_type = &crypto_ablkcipher_type,
1180 .cra_module = THIS_MODULE,
1181 .cra_init = ablk_xts_init,
1182 .cra_exit = ablk_exit,
1183 .cra_u = {
1184 .ablkcipher = {
1185 .min_keysize = 2 * AES_MIN_KEY_SIZE,
1186 .max_keysize = 2 * AES_MAX_KEY_SIZE,
1187 .ivsize = AES_BLOCK_SIZE,
1188 .setkey = ablk_set_key,
1189 .encrypt = ablk_encrypt,
1190 .decrypt = ablk_decrypt,
1191 },
1192 },
1193#endif
1194} };
1256 1195
1257 1196
1258static const struct x86_cpu_id aesni_cpu_id[] = { 1197static const struct x86_cpu_id aesni_cpu_id[] = {
@@ -1263,120 +1202,24 @@ MODULE_DEVICE_TABLE(x86cpu, aesni_cpu_id);
1263 1202
1264static int __init aesni_init(void) 1203static int __init aesni_init(void)
1265{ 1204{
1266 int err; 1205 int err, i;
1267 1206
1268 if (!x86_match_cpu(aesni_cpu_id)) 1207 if (!x86_match_cpu(aesni_cpu_id))
1269 return -ENODEV; 1208 return -ENODEV;
1270 1209
1271 if ((err = crypto_fpu_init())) 1210 err = crypto_fpu_init();
1272 goto fpu_err; 1211 if (err)
1273 if ((err = crypto_register_alg(&aesni_alg))) 1212 return err;
1274 goto aes_err;
1275 if ((err = crypto_register_alg(&__aesni_alg)))
1276 goto __aes_err;
1277 if ((err = crypto_register_alg(&blk_ecb_alg)))
1278 goto blk_ecb_err;
1279 if ((err = crypto_register_alg(&blk_cbc_alg)))
1280 goto blk_cbc_err;
1281 if ((err = crypto_register_alg(&ablk_ecb_alg)))
1282 goto ablk_ecb_err;
1283 if ((err = crypto_register_alg(&ablk_cbc_alg)))
1284 goto ablk_cbc_err;
1285#ifdef CONFIG_X86_64
1286 if ((err = crypto_register_alg(&blk_ctr_alg)))
1287 goto blk_ctr_err;
1288 if ((err = crypto_register_alg(&ablk_ctr_alg)))
1289 goto ablk_ctr_err;
1290 if ((err = crypto_register_alg(&__rfc4106_alg)))
1291 goto __aead_gcm_err;
1292 if ((err = crypto_register_alg(&rfc4106_alg)))
1293 goto aead_gcm_err;
1294#ifdef HAS_CTR
1295 if ((err = crypto_register_alg(&ablk_rfc3686_ctr_alg)))
1296 goto ablk_rfc3686_ctr_err;
1297#endif
1298#endif
1299#ifdef HAS_LRW
1300 if ((err = crypto_register_alg(&ablk_lrw_alg)))
1301 goto ablk_lrw_err;
1302#endif
1303#ifdef HAS_PCBC
1304 if ((err = crypto_register_alg(&ablk_pcbc_alg)))
1305 goto ablk_pcbc_err;
1306#endif
1307#ifdef HAS_XTS
1308 if ((err = crypto_register_alg(&ablk_xts_alg)))
1309 goto ablk_xts_err;
1310#endif
1311 return err;
1312 1213
1313#ifdef HAS_XTS 1214 for (i = 0; i < ARRAY_SIZE(aesni_algs); i++)
1314ablk_xts_err: 1215 INIT_LIST_HEAD(&aesni_algs[i].cra_list);
1315#endif 1216
1316#ifdef HAS_PCBC 1217 return crypto_register_algs(aesni_algs, ARRAY_SIZE(aesni_algs));
1317 crypto_unregister_alg(&ablk_pcbc_alg);
1318ablk_pcbc_err:
1319#endif
1320#ifdef HAS_LRW
1321 crypto_unregister_alg(&ablk_lrw_alg);
1322ablk_lrw_err:
1323#endif
1324#ifdef CONFIG_X86_64
1325#ifdef HAS_CTR
1326 crypto_unregister_alg(&ablk_rfc3686_ctr_alg);
1327ablk_rfc3686_ctr_err:
1328#endif
1329 crypto_unregister_alg(&rfc4106_alg);
1330aead_gcm_err:
1331 crypto_unregister_alg(&__rfc4106_alg);
1332__aead_gcm_err:
1333 crypto_unregister_alg(&ablk_ctr_alg);
1334ablk_ctr_err:
1335 crypto_unregister_alg(&blk_ctr_alg);
1336blk_ctr_err:
1337#endif
1338 crypto_unregister_alg(&ablk_cbc_alg);
1339ablk_cbc_err:
1340 crypto_unregister_alg(&ablk_ecb_alg);
1341ablk_ecb_err:
1342 crypto_unregister_alg(&blk_cbc_alg);
1343blk_cbc_err:
1344 crypto_unregister_alg(&blk_ecb_alg);
1345blk_ecb_err:
1346 crypto_unregister_alg(&__aesni_alg);
1347__aes_err:
1348 crypto_unregister_alg(&aesni_alg);
1349aes_err:
1350fpu_err:
1351 return err;
1352} 1218}
1353 1219
1354static void __exit aesni_exit(void) 1220static void __exit aesni_exit(void)
1355{ 1221{
1356#ifdef HAS_XTS 1222 crypto_unregister_algs(aesni_algs, ARRAY_SIZE(aesni_algs));
1357 crypto_unregister_alg(&ablk_xts_alg);
1358#endif
1359#ifdef HAS_PCBC
1360 crypto_unregister_alg(&ablk_pcbc_alg);
1361#endif
1362#ifdef HAS_LRW
1363 crypto_unregister_alg(&ablk_lrw_alg);
1364#endif
1365#ifdef CONFIG_X86_64
1366#ifdef HAS_CTR
1367 crypto_unregister_alg(&ablk_rfc3686_ctr_alg);
1368#endif
1369 crypto_unregister_alg(&rfc4106_alg);
1370 crypto_unregister_alg(&__rfc4106_alg);
1371 crypto_unregister_alg(&ablk_ctr_alg);
1372 crypto_unregister_alg(&blk_ctr_alg);
1373#endif
1374 crypto_unregister_alg(&ablk_cbc_alg);
1375 crypto_unregister_alg(&ablk_ecb_alg);
1376 crypto_unregister_alg(&blk_cbc_alg);
1377 crypto_unregister_alg(&blk_ecb_alg);
1378 crypto_unregister_alg(&__aesni_alg);
1379 crypto_unregister_alg(&aesni_alg);
1380 1223
1381 crypto_fpu_exit(); 1224 crypto_fpu_exit();
1382} 1225}