diff options
| -rw-r--r-- | crypto/cryptd.c | 206 | ||||
| -rw-r--r-- | include/crypto/cryptd.h | 24 |
2 files changed, 227 insertions, 3 deletions
diff --git a/crypto/cryptd.c b/crypto/cryptd.c index ef71318976c7..e46d21ae26bc 100644 --- a/crypto/cryptd.c +++ b/crypto/cryptd.c | |||
| @@ -3,6 +3,13 @@ | |||
| 3 | * | 3 | * |
| 4 | * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au> | 4 | * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au> |
| 5 | * | 5 | * |
| 6 | * Added AEAD support to cryptd. | ||
| 7 | * Authors: Tadeusz Struk (tadeusz.struk@intel.com) | ||
| 8 | * Adrian Hoban <adrian.hoban@intel.com> | ||
| 9 | * Gabriele Paoloni <gabriele.paoloni@intel.com> | ||
| 10 | * Aidan O'Mahony (aidan.o.mahony@intel.com) | ||
| 11 | * Copyright (c) 2010, Intel Corporation. | ||
| 12 | * | ||
| 6 | * This program is free software; you can redistribute it and/or modify it | 13 | * This program is free software; you can redistribute it and/or modify it |
| 7 | * under the terms of the GNU General Public License as published by the Free | 14 | * under the terms of the GNU General Public License as published by the Free |
| 8 | * Software Foundation; either version 2 of the License, or (at your option) | 15 | * Software Foundation; either version 2 of the License, or (at your option) |
| @@ -12,6 +19,7 @@ | |||
| 12 | 19 | ||
| 13 | #include <crypto/algapi.h> | 20 | #include <crypto/algapi.h> |
| 14 | #include <crypto/internal/hash.h> | 21 | #include <crypto/internal/hash.h> |
| 22 | #include <crypto/internal/aead.h> | ||
| 15 | #include <crypto/cryptd.h> | 23 | #include <crypto/cryptd.h> |
| 16 | #include <crypto/crypto_wq.h> | 24 | #include <crypto/crypto_wq.h> |
| 17 | #include <linux/err.h> | 25 | #include <linux/err.h> |
| @@ -44,6 +52,11 @@ struct hashd_instance_ctx { | |||
| 44 | struct cryptd_queue *queue; | 52 | struct cryptd_queue *queue; |
| 45 | }; | 53 | }; |
| 46 | 54 | ||
| 55 | struct aead_instance_ctx { | ||
| 56 | struct crypto_aead_spawn aead_spawn; | ||
| 57 | struct cryptd_queue *queue; | ||
| 58 | }; | ||
| 59 | |||
| 47 | struct cryptd_blkcipher_ctx { | 60 | struct cryptd_blkcipher_ctx { |
| 48 | struct crypto_blkcipher *child; | 61 | struct crypto_blkcipher *child; |
| 49 | }; | 62 | }; |
| @@ -61,6 +74,14 @@ struct cryptd_hash_request_ctx { | |||
| 61 | struct shash_desc desc; | 74 | struct shash_desc desc; |
| 62 | }; | 75 | }; |
| 63 | 76 | ||
| 77 | struct cryptd_aead_ctx { | ||
| 78 | struct crypto_aead *child; | ||
| 79 | }; | ||
| 80 | |||
| 81 | struct cryptd_aead_request_ctx { | ||
| 82 | crypto_completion_t complete; | ||
| 83 | }; | ||
| 84 | |||
| 64 | static void cryptd_queue_worker(struct work_struct *work); | 85 | static void cryptd_queue_worker(struct work_struct *work); |
| 65 | 86 | ||
| 66 | static int cryptd_init_queue(struct cryptd_queue *queue, | 87 | static int cryptd_init_queue(struct cryptd_queue *queue, |
| @@ -601,6 +622,144 @@ out_put_alg: | |||
| 601 | return err; | 622 | return err; |
| 602 | } | 623 | } |
| 603 | 624 | ||
| 625 | static void cryptd_aead_crypt(struct aead_request *req, | ||
| 626 | struct crypto_aead *child, | ||
| 627 | int err, | ||
| 628 | int (*crypt)(struct aead_request *req)) | ||
| 629 | { | ||
| 630 | struct cryptd_aead_request_ctx *rctx; | ||
| 631 | rctx = aead_request_ctx(req); | ||
| 632 | |||
| 633 | if (unlikely(err == -EINPROGRESS)) | ||
| 634 | goto out; | ||
| 635 | aead_request_set_tfm(req, child); | ||
| 636 | err = crypt( req ); | ||
| 637 | req->base.complete = rctx->complete; | ||
| 638 | out: | ||
| 639 | local_bh_disable(); | ||
| 640 | rctx->complete(&req->base, err); | ||
| 641 | local_bh_enable(); | ||
| 642 | } | ||
| 643 | |||
| 644 | static void cryptd_aead_encrypt(struct crypto_async_request *areq, int err) | ||
| 645 | { | ||
| 646 | struct cryptd_aead_ctx *ctx = crypto_tfm_ctx(areq->tfm); | ||
| 647 | struct crypto_aead *child = ctx->child; | ||
| 648 | struct aead_request *req; | ||
| 649 | |||
| 650 | req = container_of(areq, struct aead_request, base); | ||
| 651 | cryptd_aead_crypt(req, child, err, crypto_aead_crt(child)->encrypt); | ||
| 652 | } | ||
| 653 | |||
| 654 | static void cryptd_aead_decrypt(struct crypto_async_request *areq, int err) | ||
| 655 | { | ||
| 656 | struct cryptd_aead_ctx *ctx = crypto_tfm_ctx(areq->tfm); | ||
| 657 | struct crypto_aead *child = ctx->child; | ||
| 658 | struct aead_request *req; | ||
| 659 | |||
| 660 | req = container_of(areq, struct aead_request, base); | ||
| 661 | cryptd_aead_crypt(req, child, err, crypto_aead_crt(child)->decrypt); | ||
| 662 | } | ||
| 663 | |||
| 664 | static int cryptd_aead_enqueue(struct aead_request *req, | ||
| 665 | crypto_completion_t complete) | ||
| 666 | { | ||
| 667 | struct cryptd_aead_request_ctx *rctx = aead_request_ctx(req); | ||
| 668 | struct crypto_aead *tfm = crypto_aead_reqtfm(req); | ||
| 669 | struct cryptd_queue *queue = cryptd_get_queue(crypto_aead_tfm(tfm)); | ||
| 670 | |||
| 671 | rctx->complete = req->base.complete; | ||
| 672 | req->base.complete = complete; | ||
| 673 | return cryptd_enqueue_request(queue, &req->base); | ||
| 674 | } | ||
| 675 | |||
| 676 | static int cryptd_aead_encrypt_enqueue(struct aead_request *req) | ||
| 677 | { | ||
| 678 | return cryptd_aead_enqueue(req, cryptd_aead_encrypt ); | ||
| 679 | } | ||
| 680 | |||
| 681 | static int cryptd_aead_decrypt_enqueue(struct aead_request *req) | ||
| 682 | { | ||
| 683 | return cryptd_aead_enqueue(req, cryptd_aead_decrypt ); | ||
| 684 | } | ||
| 685 | |||
| 686 | static int cryptd_aead_init_tfm(struct crypto_tfm *tfm) | ||
| 687 | { | ||
| 688 | struct crypto_instance *inst = crypto_tfm_alg_instance(tfm); | ||
| 689 | struct aead_instance_ctx *ictx = crypto_instance_ctx(inst); | ||
| 690 | struct crypto_aead_spawn *spawn = &ictx->aead_spawn; | ||
| 691 | struct cryptd_aead_ctx *ctx = crypto_tfm_ctx(tfm); | ||
| 692 | struct crypto_aead *cipher; | ||
| 693 | |||
| 694 | cipher = crypto_spawn_aead(spawn); | ||
| 695 | if (IS_ERR(cipher)) | ||
| 696 | return PTR_ERR(cipher); | ||
| 697 | |||
| 698 | crypto_aead_set_flags(cipher, CRYPTO_TFM_REQ_MAY_SLEEP); | ||
| 699 | ctx->child = cipher; | ||
| 700 | tfm->crt_aead.reqsize = sizeof(struct cryptd_aead_request_ctx); | ||
| 701 | return 0; | ||
| 702 | } | ||
| 703 | |||
| 704 | static void cryptd_aead_exit_tfm(struct crypto_tfm *tfm) | ||
| 705 | { | ||
| 706 | struct cryptd_aead_ctx *ctx = crypto_tfm_ctx(tfm); | ||
| 707 | crypto_free_aead(ctx->child); | ||
| 708 | } | ||
| 709 | |||
| 710 | static int cryptd_create_aead(struct crypto_template *tmpl, | ||
| 711 | struct rtattr **tb, | ||
| 712 | struct cryptd_queue *queue) | ||
| 713 | { | ||
| 714 | struct aead_instance_ctx *ctx; | ||
| 715 | struct crypto_instance *inst; | ||
| 716 | struct crypto_alg *alg; | ||
| 717 | int err; | ||
| 718 | |||
| 719 | alg = crypto_get_attr_alg(tb, CRYPTO_ALG_TYPE_AEAD, | ||
| 720 | CRYPTO_ALG_TYPE_MASK); | ||
| 721 | if (IS_ERR(alg)) | ||
| 722 | return PTR_ERR(alg); | ||
| 723 | |||
| 724 | inst = cryptd_alloc_instance(alg, 0, sizeof(*ctx)); | ||
| 725 | err = PTR_ERR(inst); | ||
| 726 | if (IS_ERR(inst)) | ||
| 727 | goto out_put_alg; | ||
| 728 | |||
| 729 | ctx = crypto_instance_ctx(inst); | ||
| 730 | ctx->queue = queue; | ||
| 731 | |||
| 732 | err = crypto_init_spawn(&ctx->aead_spawn.base, alg, inst, | ||
| 733 | CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_ASYNC); | ||
| 734 | if (err) | ||
| 735 | goto out_free_inst; | ||
| 736 | |||
| 737 | inst->alg.cra_flags = CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_ASYNC; | ||
| 738 | inst->alg.cra_type = alg->cra_type; | ||
| 739 | inst->alg.cra_ctxsize = sizeof(struct cryptd_aead_ctx); | ||
| 740 | inst->alg.cra_init = cryptd_aead_init_tfm; | ||
| 741 | inst->alg.cra_exit = cryptd_aead_exit_tfm; | ||
| 742 | inst->alg.cra_aead.setkey = alg->cra_aead.setkey; | ||
| 743 | inst->alg.cra_aead.setauthsize = alg->cra_aead.setauthsize; | ||
| 744 | inst->alg.cra_aead.geniv = alg->cra_aead.geniv; | ||
| 745 | inst->alg.cra_aead.ivsize = alg->cra_aead.ivsize; | ||
| 746 | inst->alg.cra_aead.maxauthsize = alg->cra_aead.maxauthsize; | ||
| 747 | inst->alg.cra_aead.encrypt = cryptd_aead_encrypt_enqueue; | ||
| 748 | inst->alg.cra_aead.decrypt = cryptd_aead_decrypt_enqueue; | ||
| 749 | inst->alg.cra_aead.givencrypt = alg->cra_aead.givencrypt; | ||
| 750 | inst->alg.cra_aead.givdecrypt = alg->cra_aead.givdecrypt; | ||
| 751 | |||
| 752 | err = crypto_register_instance(tmpl, inst); | ||
| 753 | if (err) { | ||
| 754 | crypto_drop_spawn(&ctx->aead_spawn.base); | ||
| 755 | out_free_inst: | ||
| 756 | kfree(inst); | ||
| 757 | } | ||
| 758 | out_put_alg: | ||
| 759 | crypto_mod_put(alg); | ||
| 760 | return err; | ||
| 761 | } | ||
| 762 | |||
| 604 | static struct cryptd_queue queue; | 763 | static struct cryptd_queue queue; |
| 605 | 764 | ||
| 606 | static int cryptd_create(struct crypto_template *tmpl, struct rtattr **tb) | 765 | static int cryptd_create(struct crypto_template *tmpl, struct rtattr **tb) |
| @@ -616,6 +775,8 @@ static int cryptd_create(struct crypto_template *tmpl, struct rtattr **tb) | |||
| 616 | return cryptd_create_blkcipher(tmpl, tb, &queue); | 775 | return cryptd_create_blkcipher(tmpl, tb, &queue); |
| 617 | case CRYPTO_ALG_TYPE_DIGEST: | 776 | case CRYPTO_ALG_TYPE_DIGEST: |
| 618 | return cryptd_create_hash(tmpl, tb, &queue); | 777 | return cryptd_create_hash(tmpl, tb, &queue); |
| 778 | case CRYPTO_ALG_TYPE_AEAD: | ||
| 779 | return cryptd_create_aead(tmpl, tb, &queue); | ||
| 619 | } | 780 | } |
| 620 | 781 | ||
| 621 | return -EINVAL; | 782 | return -EINVAL; |
| @@ -625,16 +786,21 @@ static void cryptd_free(struct crypto_instance *inst) | |||
| 625 | { | 786 | { |
| 626 | struct cryptd_instance_ctx *ctx = crypto_instance_ctx(inst); | 787 | struct cryptd_instance_ctx *ctx = crypto_instance_ctx(inst); |
| 627 | struct hashd_instance_ctx *hctx = crypto_instance_ctx(inst); | 788 | struct hashd_instance_ctx *hctx = crypto_instance_ctx(inst); |
| 789 | struct aead_instance_ctx *aead_ctx = crypto_instance_ctx(inst); | ||
| 628 | 790 | ||
| 629 | switch (inst->alg.cra_flags & CRYPTO_ALG_TYPE_MASK) { | 791 | switch (inst->alg.cra_flags & CRYPTO_ALG_TYPE_MASK) { |
| 630 | case CRYPTO_ALG_TYPE_AHASH: | 792 | case CRYPTO_ALG_TYPE_AHASH: |
| 631 | crypto_drop_shash(&hctx->spawn); | 793 | crypto_drop_shash(&hctx->spawn); |
| 632 | kfree(ahash_instance(inst)); | 794 | kfree(ahash_instance(inst)); |
| 633 | return; | 795 | return; |
| 796 | case CRYPTO_ALG_TYPE_AEAD: | ||
| 797 | crypto_drop_spawn(&aead_ctx->aead_spawn.base); | ||
| 798 | kfree(inst); | ||
| 799 | return; | ||
| 800 | default: | ||
| 801 | crypto_drop_spawn(&ctx->spawn); | ||
| 802 | kfree(inst); | ||
| 634 | } | 803 | } |
| 635 | |||
| 636 | crypto_drop_spawn(&ctx->spawn); | ||
| 637 | kfree(inst); | ||
| 638 | } | 804 | } |
| 639 | 805 | ||
| 640 | static struct crypto_template cryptd_tmpl = { | 806 | static struct crypto_template cryptd_tmpl = { |
| @@ -724,6 +890,40 @@ void cryptd_free_ahash(struct cryptd_ahash *tfm) | |||
| 724 | } | 890 | } |
| 725 | EXPORT_SYMBOL_GPL(cryptd_free_ahash); | 891 | EXPORT_SYMBOL_GPL(cryptd_free_ahash); |
| 726 | 892 | ||
| 893 | struct cryptd_aead *cryptd_alloc_aead(const char *alg_name, | ||
| 894 | u32 type, u32 mask) | ||
| 895 | { | ||
| 896 | char cryptd_alg_name[CRYPTO_MAX_ALG_NAME]; | ||
| 897 | struct crypto_aead *tfm; | ||
| 898 | |||
| 899 | if (snprintf(cryptd_alg_name, CRYPTO_MAX_ALG_NAME, | ||
| 900 | "cryptd(%s)", alg_name) >= CRYPTO_MAX_ALG_NAME) | ||
| 901 | return ERR_PTR(-EINVAL); | ||
| 902 | tfm = crypto_alloc_aead(cryptd_alg_name, type, mask); | ||
| 903 | if (IS_ERR(tfm)) | ||
| 904 | return ERR_CAST(tfm); | ||
| 905 | if (tfm->base.__crt_alg->cra_module != THIS_MODULE) { | ||
| 906 | crypto_free_aead(tfm); | ||
| 907 | return ERR_PTR(-EINVAL); | ||
| 908 | } | ||
| 909 | return __cryptd_aead_cast(tfm); | ||
| 910 | } | ||
| 911 | EXPORT_SYMBOL_GPL(cryptd_alloc_aead); | ||
| 912 | |||
| 913 | struct crypto_aead *cryptd_aead_child(struct cryptd_aead *tfm) | ||
| 914 | { | ||
| 915 | struct cryptd_aead_ctx *ctx; | ||
| 916 | ctx = crypto_aead_ctx(&tfm->base); | ||
| 917 | return ctx->child; | ||
| 918 | } | ||
| 919 | EXPORT_SYMBOL_GPL(cryptd_aead_child); | ||
| 920 | |||
| 921 | void cryptd_free_aead(struct cryptd_aead *tfm) | ||
| 922 | { | ||
| 923 | crypto_free_aead(&tfm->base); | ||
| 924 | } | ||
| 925 | EXPORT_SYMBOL_GPL(cryptd_free_aead); | ||
| 926 | |||
| 727 | static int __init cryptd_init(void) | 927 | static int __init cryptd_init(void) |
| 728 | { | 928 | { |
| 729 | int err; | 929 | int err; |
diff --git a/include/crypto/cryptd.h b/include/crypto/cryptd.h index 1c96b255017c..ba98918bbd9b 100644 --- a/include/crypto/cryptd.h +++ b/include/crypto/cryptd.h | |||
| @@ -1,5 +1,12 @@ | |||
| 1 | /* | 1 | /* |
| 2 | * Software async crypto daemon | 2 | * Software async crypto daemon |
| 3 | * | ||
| 4 | * Added AEAD support to cryptd. | ||
| 5 | * Authors: Tadeusz Struk (tadeusz.struk@intel.com) | ||
| 6 | * Adrian Hoban <adrian.hoban@intel.com> | ||
| 7 | * Gabriele Paoloni <gabriele.paoloni@intel.com> | ||
| 8 | * Aidan O'Mahony (aidan.o.mahony@intel.com) | ||
| 9 | * Copyright (c) 2010, Intel Corporation. | ||
| 3 | */ | 10 | */ |
| 4 | 11 | ||
| 5 | #ifndef _CRYPTO_CRYPT_H | 12 | #ifndef _CRYPTO_CRYPT_H |
| @@ -42,4 +49,21 @@ struct crypto_shash *cryptd_ahash_child(struct cryptd_ahash *tfm); | |||
| 42 | struct shash_desc *cryptd_shash_desc(struct ahash_request *req); | 49 | struct shash_desc *cryptd_shash_desc(struct ahash_request *req); |
| 43 | void cryptd_free_ahash(struct cryptd_ahash *tfm); | 50 | void cryptd_free_ahash(struct cryptd_ahash *tfm); |
| 44 | 51 | ||
| 52 | struct cryptd_aead { | ||
| 53 | struct crypto_aead base; | ||
| 54 | }; | ||
| 55 | |||
| 56 | static inline struct cryptd_aead *__cryptd_aead_cast( | ||
| 57 | struct crypto_aead *tfm) | ||
| 58 | { | ||
| 59 | return (struct cryptd_aead *)tfm; | ||
| 60 | } | ||
| 61 | |||
| 62 | struct cryptd_aead *cryptd_alloc_aead(const char *alg_name, | ||
| 63 | u32 type, u32 mask); | ||
| 64 | |||
| 65 | struct crypto_aead *cryptd_aead_child(struct cryptd_aead *tfm); | ||
| 66 | |||
| 67 | void cryptd_free_aead(struct cryptd_aead *tfm); | ||
| 68 | |||
| 45 | #endif | 69 | #endif |
