diff options
-rw-r--r-- | arch/x86/crypto/aesni-intel_glue.c | 267 | ||||
-rw-r--r-- | crypto/Kconfig | 5 |
2 files changed, 271 insertions, 1 deletions
diff --git a/arch/x86/crypto/aesni-intel_glue.c b/arch/x86/crypto/aesni-intel_glue.c index 02af0af65497..4e663398f77f 100644 --- a/arch/x86/crypto/aesni-intel_glue.c +++ b/arch/x86/crypto/aesni-intel_glue.c | |||
@@ -21,6 +21,22 @@ | |||
21 | #include <asm/i387.h> | 21 | #include <asm/i387.h> |
22 | #include <asm/aes.h> | 22 | #include <asm/aes.h> |
23 | 23 | ||
24 | #if defined(CONFIG_CRYPTO_CTR) || defined(CONFIG_CRYPTO_CTR_MODULE) | ||
25 | #define HAS_CTR | ||
26 | #endif | ||
27 | |||
28 | #if defined(CONFIG_CRYPTO_LRW) || defined(CONFIG_CRYPTO_LRW_MODULE) | ||
29 | #define HAS_LRW | ||
30 | #endif | ||
31 | |||
32 | #if defined(CONFIG_CRYPTO_PCBC) || defined(CONFIG_CRYPTO_PCBC_MODULE) | ||
33 | #define HAS_PCBC | ||
34 | #endif | ||
35 | |||
36 | #if defined(CONFIG_CRYPTO_XTS) || defined(CONFIG_CRYPTO_XTS_MODULE) | ||
37 | #define HAS_XTS | ||
38 | #endif | ||
39 | |||
24 | struct async_aes_ctx { | 40 | struct async_aes_ctx { |
25 | struct cryptd_ablkcipher *cryptd_tfm; | 41 | struct cryptd_ablkcipher *cryptd_tfm; |
26 | }; | 42 | }; |
@@ -137,6 +153,41 @@ static struct crypto_alg aesni_alg = { | |||
137 | } | 153 | } |
138 | }; | 154 | }; |
139 | 155 | ||
156 | static void __aes_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) | ||
157 | { | ||
158 | struct crypto_aes_ctx *ctx = aes_ctx(crypto_tfm_ctx(tfm)); | ||
159 | |||
160 | aesni_enc(ctx, dst, src); | ||
161 | } | ||
162 | |||
163 | static void __aes_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) | ||
164 | { | ||
165 | struct crypto_aes_ctx *ctx = aes_ctx(crypto_tfm_ctx(tfm)); | ||
166 | |||
167 | aesni_dec(ctx, dst, src); | ||
168 | } | ||
169 | |||
170 | static struct crypto_alg __aesni_alg = { | ||
171 | .cra_name = "__aes-aesni", | ||
172 | .cra_driver_name = "__driver-aes-aesni", | ||
173 | .cra_priority = 0, | ||
174 | .cra_flags = CRYPTO_ALG_TYPE_CIPHER, | ||
175 | .cra_blocksize = AES_BLOCK_SIZE, | ||
176 | .cra_ctxsize = sizeof(struct crypto_aes_ctx)+AESNI_ALIGN-1, | ||
177 | .cra_alignmask = 0, | ||
178 | .cra_module = THIS_MODULE, | ||
179 | .cra_list = LIST_HEAD_INIT(__aesni_alg.cra_list), | ||
180 | .cra_u = { | ||
181 | .cipher = { | ||
182 | .cia_min_keysize = AES_MIN_KEY_SIZE, | ||
183 | .cia_max_keysize = AES_MAX_KEY_SIZE, | ||
184 | .cia_setkey = aes_set_key, | ||
185 | .cia_encrypt = __aes_encrypt, | ||
186 | .cia_decrypt = __aes_decrypt | ||
187 | } | ||
188 | } | ||
189 | }; | ||
190 | |||
140 | static int ecb_encrypt(struct blkcipher_desc *desc, | 191 | static int ecb_encrypt(struct blkcipher_desc *desc, |
141 | struct scatterlist *dst, struct scatterlist *src, | 192 | struct scatterlist *dst, struct scatterlist *src, |
142 | unsigned int nbytes) | 193 | unsigned int nbytes) |
@@ -277,8 +328,16 @@ static int ablk_set_key(struct crypto_ablkcipher *tfm, const u8 *key, | |||
277 | unsigned int key_len) | 328 | unsigned int key_len) |
278 | { | 329 | { |
279 | struct async_aes_ctx *ctx = crypto_ablkcipher_ctx(tfm); | 330 | struct async_aes_ctx *ctx = crypto_ablkcipher_ctx(tfm); |
331 | struct crypto_ablkcipher *child = &ctx->cryptd_tfm->base; | ||
332 | int err; | ||
280 | 333 | ||
281 | return crypto_ablkcipher_setkey(&ctx->cryptd_tfm->base, key, key_len); | 334 | crypto_ablkcipher_clear_flags(child, CRYPTO_TFM_REQ_MASK); |
335 | crypto_ablkcipher_set_flags(child, crypto_ablkcipher_get_flags(tfm) | ||
336 | & CRYPTO_TFM_REQ_MASK); | ||
337 | err = crypto_ablkcipher_setkey(child, key, key_len); | ||
338 | crypto_ablkcipher_set_flags(tfm, crypto_ablkcipher_get_flags(child) | ||
339 | & CRYPTO_TFM_RES_MASK); | ||
340 | return err; | ||
282 | } | 341 | } |
283 | 342 | ||
284 | static int ablk_encrypt(struct ablkcipher_request *req) | 343 | static int ablk_encrypt(struct ablkcipher_request *req) |
@@ -411,6 +470,163 @@ static struct crypto_alg ablk_cbc_alg = { | |||
411 | }, | 470 | }, |
412 | }; | 471 | }; |
413 | 472 | ||
473 | #ifdef HAS_CTR | ||
474 | static int ablk_ctr_init(struct crypto_tfm *tfm) | ||
475 | { | ||
476 | struct cryptd_ablkcipher *cryptd_tfm; | ||
477 | |||
478 | cryptd_tfm = cryptd_alloc_ablkcipher("fpu(ctr(__driver-aes-aesni))", | ||
479 | 0, 0); | ||
480 | if (IS_ERR(cryptd_tfm)) | ||
481 | return PTR_ERR(cryptd_tfm); | ||
482 | ablk_init_common(tfm, cryptd_tfm); | ||
483 | return 0; | ||
484 | } | ||
485 | |||
486 | static struct crypto_alg ablk_ctr_alg = { | ||
487 | .cra_name = "ctr(aes)", | ||
488 | .cra_driver_name = "ctr-aes-aesni", | ||
489 | .cra_priority = 400, | ||
490 | .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER|CRYPTO_ALG_ASYNC, | ||
491 | .cra_blocksize = 1, | ||
492 | .cra_ctxsize = sizeof(struct async_aes_ctx), | ||
493 | .cra_alignmask = 0, | ||
494 | .cra_type = &crypto_ablkcipher_type, | ||
495 | .cra_module = THIS_MODULE, | ||
496 | .cra_list = LIST_HEAD_INIT(ablk_ctr_alg.cra_list), | ||
497 | .cra_init = ablk_ctr_init, | ||
498 | .cra_exit = ablk_exit, | ||
499 | .cra_u = { | ||
500 | .ablkcipher = { | ||
501 | .min_keysize = AES_MIN_KEY_SIZE, | ||
502 | .max_keysize = AES_MAX_KEY_SIZE, | ||
503 | .ivsize = AES_BLOCK_SIZE, | ||
504 | .setkey = ablk_set_key, | ||
505 | .encrypt = ablk_encrypt, | ||
506 | .decrypt = ablk_decrypt, | ||
507 | .geniv = "chainiv", | ||
508 | }, | ||
509 | }, | ||
510 | }; | ||
511 | #endif | ||
512 | |||
513 | #ifdef HAS_LRW | ||
514 | static int ablk_lrw_init(struct crypto_tfm *tfm) | ||
515 | { | ||
516 | struct cryptd_ablkcipher *cryptd_tfm; | ||
517 | |||
518 | cryptd_tfm = cryptd_alloc_ablkcipher("fpu(lrw(__driver-aes-aesni))", | ||
519 | 0, 0); | ||
520 | if (IS_ERR(cryptd_tfm)) | ||
521 | return PTR_ERR(cryptd_tfm); | ||
522 | ablk_init_common(tfm, cryptd_tfm); | ||
523 | return 0; | ||
524 | } | ||
525 | |||
526 | static struct crypto_alg ablk_lrw_alg = { | ||
527 | .cra_name = "lrw(aes)", | ||
528 | .cra_driver_name = "lrw-aes-aesni", | ||
529 | .cra_priority = 400, | ||
530 | .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER|CRYPTO_ALG_ASYNC, | ||
531 | .cra_blocksize = AES_BLOCK_SIZE, | ||
532 | .cra_ctxsize = sizeof(struct async_aes_ctx), | ||
533 | .cra_alignmask = 0, | ||
534 | .cra_type = &crypto_ablkcipher_type, | ||
535 | .cra_module = THIS_MODULE, | ||
536 | .cra_list = LIST_HEAD_INIT(ablk_lrw_alg.cra_list), | ||
537 | .cra_init = ablk_lrw_init, | ||
538 | .cra_exit = ablk_exit, | ||
539 | .cra_u = { | ||
540 | .ablkcipher = { | ||
541 | .min_keysize = AES_MIN_KEY_SIZE + AES_BLOCK_SIZE, | ||
542 | .max_keysize = AES_MAX_KEY_SIZE + AES_BLOCK_SIZE, | ||
543 | .ivsize = AES_BLOCK_SIZE, | ||
544 | .setkey = ablk_set_key, | ||
545 | .encrypt = ablk_encrypt, | ||
546 | .decrypt = ablk_decrypt, | ||
547 | }, | ||
548 | }, | ||
549 | }; | ||
550 | #endif | ||
551 | |||
552 | #ifdef HAS_PCBC | ||
553 | static int ablk_pcbc_init(struct crypto_tfm *tfm) | ||
554 | { | ||
555 | struct cryptd_ablkcipher *cryptd_tfm; | ||
556 | |||
557 | cryptd_tfm = cryptd_alloc_ablkcipher("fpu(pcbc(__driver-aes-aesni))", | ||
558 | 0, 0); | ||
559 | if (IS_ERR(cryptd_tfm)) | ||
560 | return PTR_ERR(cryptd_tfm); | ||
561 | ablk_init_common(tfm, cryptd_tfm); | ||
562 | return 0; | ||
563 | } | ||
564 | |||
565 | static struct crypto_alg ablk_pcbc_alg = { | ||
566 | .cra_name = "pcbc(aes)", | ||
567 | .cra_driver_name = "pcbc-aes-aesni", | ||
568 | .cra_priority = 400, | ||
569 | .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER|CRYPTO_ALG_ASYNC, | ||
570 | .cra_blocksize = AES_BLOCK_SIZE, | ||
571 | .cra_ctxsize = sizeof(struct async_aes_ctx), | ||
572 | .cra_alignmask = 0, | ||
573 | .cra_type = &crypto_ablkcipher_type, | ||
574 | .cra_module = THIS_MODULE, | ||
575 | .cra_list = LIST_HEAD_INIT(ablk_pcbc_alg.cra_list), | ||
576 | .cra_init = ablk_pcbc_init, | ||
577 | .cra_exit = ablk_exit, | ||
578 | .cra_u = { | ||
579 | .ablkcipher = { | ||
580 | .min_keysize = AES_MIN_KEY_SIZE, | ||
581 | .max_keysize = AES_MAX_KEY_SIZE, | ||
582 | .ivsize = AES_BLOCK_SIZE, | ||
583 | .setkey = ablk_set_key, | ||
584 | .encrypt = ablk_encrypt, | ||
585 | .decrypt = ablk_decrypt, | ||
586 | }, | ||
587 | }, | ||
588 | }; | ||
589 | #endif | ||
590 | |||
591 | #ifdef HAS_XTS | ||
592 | static int ablk_xts_init(struct crypto_tfm *tfm) | ||
593 | { | ||
594 | struct cryptd_ablkcipher *cryptd_tfm; | ||
595 | |||
596 | cryptd_tfm = cryptd_alloc_ablkcipher("fpu(xts(__driver-aes-aesni))", | ||
597 | 0, 0); | ||
598 | if (IS_ERR(cryptd_tfm)) | ||
599 | return PTR_ERR(cryptd_tfm); | ||
600 | ablk_init_common(tfm, cryptd_tfm); | ||
601 | return 0; | ||
602 | } | ||
603 | |||
604 | static struct crypto_alg ablk_xts_alg = { | ||
605 | .cra_name = "xts(aes)", | ||
606 | .cra_driver_name = "xts-aes-aesni", | ||
607 | .cra_priority = 400, | ||
608 | .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER|CRYPTO_ALG_ASYNC, | ||
609 | .cra_blocksize = AES_BLOCK_SIZE, | ||
610 | .cra_ctxsize = sizeof(struct async_aes_ctx), | ||
611 | .cra_alignmask = 0, | ||
612 | .cra_type = &crypto_ablkcipher_type, | ||
613 | .cra_module = THIS_MODULE, | ||
614 | .cra_list = LIST_HEAD_INIT(ablk_xts_alg.cra_list), | ||
615 | .cra_init = ablk_xts_init, | ||
616 | .cra_exit = ablk_exit, | ||
617 | .cra_u = { | ||
618 | .ablkcipher = { | ||
619 | .min_keysize = 2 * AES_MIN_KEY_SIZE, | ||
620 | .max_keysize = 2 * AES_MAX_KEY_SIZE, | ||
621 | .ivsize = AES_BLOCK_SIZE, | ||
622 | .setkey = ablk_set_key, | ||
623 | .encrypt = ablk_encrypt, | ||
624 | .decrypt = ablk_decrypt, | ||
625 | }, | ||
626 | }, | ||
627 | }; | ||
628 | #endif | ||
629 | |||
414 | static int __init aesni_init(void) | 630 | static int __init aesni_init(void) |
415 | { | 631 | { |
416 | int err; | 632 | int err; |
@@ -421,6 +637,8 @@ static int __init aesni_init(void) | |||
421 | } | 637 | } |
422 | if ((err = crypto_register_alg(&aesni_alg))) | 638 | if ((err = crypto_register_alg(&aesni_alg))) |
423 | goto aes_err; | 639 | goto aes_err; |
640 | if ((err = crypto_register_alg(&__aesni_alg))) | ||
641 | goto __aes_err; | ||
424 | if ((err = crypto_register_alg(&blk_ecb_alg))) | 642 | if ((err = crypto_register_alg(&blk_ecb_alg))) |
425 | goto blk_ecb_err; | 643 | goto blk_ecb_err; |
426 | if ((err = crypto_register_alg(&blk_cbc_alg))) | 644 | if ((err = crypto_register_alg(&blk_cbc_alg))) |
@@ -429,9 +647,41 @@ static int __init aesni_init(void) | |||
429 | goto ablk_ecb_err; | 647 | goto ablk_ecb_err; |
430 | if ((err = crypto_register_alg(&ablk_cbc_alg))) | 648 | if ((err = crypto_register_alg(&ablk_cbc_alg))) |
431 | goto ablk_cbc_err; | 649 | goto ablk_cbc_err; |
650 | #ifdef HAS_CTR | ||
651 | if ((err = crypto_register_alg(&ablk_ctr_alg))) | ||
652 | goto ablk_ctr_err; | ||
653 | #endif | ||
654 | #ifdef HAS_LRW | ||
655 | if ((err = crypto_register_alg(&ablk_lrw_alg))) | ||
656 | goto ablk_lrw_err; | ||
657 | #endif | ||
658 | #ifdef HAS_PCBC | ||
659 | if ((err = crypto_register_alg(&ablk_pcbc_alg))) | ||
660 | goto ablk_pcbc_err; | ||
661 | #endif | ||
662 | #ifdef HAS_XTS | ||
663 | if ((err = crypto_register_alg(&ablk_xts_alg))) | ||
664 | goto ablk_xts_err; | ||
665 | #endif | ||
432 | 666 | ||
433 | return err; | 667 | return err; |
434 | 668 | ||
669 | #ifdef HAS_XTS | ||
670 | ablk_xts_err: | ||
671 | #endif | ||
672 | #ifdef HAS_PCBC | ||
673 | crypto_unregister_alg(&ablk_pcbc_alg); | ||
674 | ablk_pcbc_err: | ||
675 | #endif | ||
676 | #ifdef HAS_LRW | ||
677 | crypto_unregister_alg(&ablk_lrw_alg); | ||
678 | ablk_lrw_err: | ||
679 | #endif | ||
680 | #ifdef HAS_CTR | ||
681 | crypto_unregister_alg(&ablk_ctr_alg); | ||
682 | ablk_ctr_err: | ||
683 | #endif | ||
684 | crypto_unregister_alg(&ablk_cbc_alg); | ||
435 | ablk_cbc_err: | 685 | ablk_cbc_err: |
436 | crypto_unregister_alg(&ablk_ecb_alg); | 686 | crypto_unregister_alg(&ablk_ecb_alg); |
437 | ablk_ecb_err: | 687 | ablk_ecb_err: |
@@ -439,6 +689,8 @@ ablk_ecb_err: | |||
439 | blk_cbc_err: | 689 | blk_cbc_err: |
440 | crypto_unregister_alg(&blk_ecb_alg); | 690 | crypto_unregister_alg(&blk_ecb_alg); |
441 | blk_ecb_err: | 691 | blk_ecb_err: |
692 | crypto_unregister_alg(&__aesni_alg); | ||
693 | __aes_err: | ||
442 | crypto_unregister_alg(&aesni_alg); | 694 | crypto_unregister_alg(&aesni_alg); |
443 | aes_err: | 695 | aes_err: |
444 | return err; | 696 | return err; |
@@ -446,10 +698,23 @@ aes_err: | |||
446 | 698 | ||
447 | static void __exit aesni_exit(void) | 699 | static void __exit aesni_exit(void) |
448 | { | 700 | { |
701 | #ifdef HAS_XTS | ||
702 | crypto_unregister_alg(&ablk_xts_alg); | ||
703 | #endif | ||
704 | #ifdef HAS_PCBC | ||
705 | crypto_unregister_alg(&ablk_pcbc_alg); | ||
706 | #endif | ||
707 | #ifdef HAS_LRW | ||
708 | crypto_unregister_alg(&ablk_lrw_alg); | ||
709 | #endif | ||
710 | #ifdef HAS_CTR | ||
711 | crypto_unregister_alg(&ablk_ctr_alg); | ||
712 | #endif | ||
449 | crypto_unregister_alg(&ablk_cbc_alg); | 713 | crypto_unregister_alg(&ablk_cbc_alg); |
450 | crypto_unregister_alg(&ablk_ecb_alg); | 714 | crypto_unregister_alg(&ablk_ecb_alg); |
451 | crypto_unregister_alg(&blk_cbc_alg); | 715 | crypto_unregister_alg(&blk_cbc_alg); |
452 | crypto_unregister_alg(&blk_ecb_alg); | 716 | crypto_unregister_alg(&blk_ecb_alg); |
717 | crypto_unregister_alg(&__aesni_alg); | ||
453 | crypto_unregister_alg(&aesni_alg); | 718 | crypto_unregister_alg(&aesni_alg); |
454 | } | 719 | } |
455 | 720 | ||
diff --git a/crypto/Kconfig b/crypto/Kconfig index 66ff22a36ed9..4dfdd03e708f 100644 --- a/crypto/Kconfig +++ b/crypto/Kconfig | |||
@@ -491,6 +491,7 @@ config CRYPTO_AES_NI_INTEL | |||
491 | select CRYPTO_AES_X86_64 | 491 | select CRYPTO_AES_X86_64 |
492 | select CRYPTO_CRYPTD | 492 | select CRYPTO_CRYPTD |
493 | select CRYPTO_ALGAPI | 493 | select CRYPTO_ALGAPI |
494 | select CRYPTO_FPU | ||
494 | help | 495 | help |
495 | Use Intel AES-NI instructions for AES algorithm. | 496 | Use Intel AES-NI instructions for AES algorithm. |
496 | 497 | ||
@@ -510,6 +511,10 @@ config CRYPTO_AES_NI_INTEL | |||
510 | 511 | ||
511 | See <http://csrc.nist.gov/encryption/aes/> for more information. | 512 | See <http://csrc.nist.gov/encryption/aes/> for more information. |
512 | 513 | ||
514 | In addition to AES cipher algorithm support, the | ||
515 | acceleration for some popular block cipher mode is supported | ||
516 | too, including ECB, CBC, CTR, LRW, PCBC, XTS. | ||
517 | |||
513 | config CRYPTO_ANUBIS | 518 | config CRYPTO_ANUBIS |
514 | tristate "Anubis cipher algorithm" | 519 | tristate "Anubis cipher algorithm" |
515 | select CRYPTO_ALGAPI | 520 | select CRYPTO_ALGAPI |