aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorMegha Dey <megha.dey@linux.intel.com>2016-06-21 21:21:46 -0400
committerHerbert Xu <herbert@gondor.apana.org.au>2016-06-23 06:29:55 -0400
commit331bf739c4f9992a73547d20bd8f2378b97d386a (patch)
treeeff59e08cb974478786c2f0906e1b302679d1bbe
parent820573ebd60d85afb8bb07fa3547ebbf842c59d4 (diff)
crypto: sha1-mb - async implementation for sha1-mb
Herbert wants the sha1-mb algorithm to have an async implementation: https://lkml.org/lkml/2016/4/5/286. Currently, sha1-mb uses an async interface for the outer algorithm and a sync interface for the inner algorithm. This patch introduces a async interface for even the inner algorithm. Signed-off-by: Megha Dey <megha.dey@linux.intel.com> Signed-off-by: Tim Chen <tim.c.chen@linux.intel.com> Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
-rw-r--r--arch/x86/crypto/sha-mb/sha1_mb.c182
-rw-r--r--crypto/mcryptd.c132
-rw-r--r--include/crypto/internal/hash.h12
-rw-r--r--include/crypto/mcryptd.h8
4 files changed, 165 insertions, 169 deletions
diff --git a/arch/x86/crypto/sha-mb/sha1_mb.c b/arch/x86/crypto/sha-mb/sha1_mb.c
index 0a464919542c..669cc37268e1 100644
--- a/arch/x86/crypto/sha-mb/sha1_mb.c
+++ b/arch/x86/crypto/sha-mb/sha1_mb.c
@@ -80,10 +80,10 @@ struct sha1_mb_ctx {
80static inline struct mcryptd_hash_request_ctx 80static inline struct mcryptd_hash_request_ctx
81 *cast_hash_to_mcryptd_ctx(struct sha1_hash_ctx *hash_ctx) 81 *cast_hash_to_mcryptd_ctx(struct sha1_hash_ctx *hash_ctx)
82{ 82{
83 struct shash_desc *desc; 83 struct ahash_request *areq;
84 84
85 desc = container_of((void *) hash_ctx, struct shash_desc, __ctx); 85 areq = container_of((void *) hash_ctx, struct ahash_request, __ctx);
86 return container_of(desc, struct mcryptd_hash_request_ctx, desc); 86 return container_of(areq, struct mcryptd_hash_request_ctx, areq);
87} 87}
88 88
89static inline struct ahash_request 89static inline struct ahash_request
@@ -93,7 +93,7 @@ static inline struct ahash_request
93} 93}
94 94
95static void req_ctx_init(struct mcryptd_hash_request_ctx *rctx, 95static void req_ctx_init(struct mcryptd_hash_request_ctx *rctx,
96 struct shash_desc *desc) 96 struct ahash_request *areq)
97{ 97{
98 rctx->flag = HASH_UPDATE; 98 rctx->flag = HASH_UPDATE;
99} 99}
@@ -375,9 +375,9 @@ static struct sha1_hash_ctx *sha1_ctx_mgr_flush(struct sha1_ctx_mgr *mgr)
375 } 375 }
376} 376}
377 377
378static int sha1_mb_init(struct shash_desc *desc) 378static int sha1_mb_init(struct ahash_request *areq)
379{ 379{
380 struct sha1_hash_ctx *sctx = shash_desc_ctx(desc); 380 struct sha1_hash_ctx *sctx = ahash_request_ctx(areq);
381 381
382 hash_ctx_init(sctx); 382 hash_ctx_init(sctx);
383 sctx->job.result_digest[0] = SHA1_H0; 383 sctx->job.result_digest[0] = SHA1_H0;
@@ -395,7 +395,7 @@ static int sha1_mb_init(struct shash_desc *desc)
395static int sha1_mb_set_results(struct mcryptd_hash_request_ctx *rctx) 395static int sha1_mb_set_results(struct mcryptd_hash_request_ctx *rctx)
396{ 396{
397 int i; 397 int i;
398 struct sha1_hash_ctx *sctx = shash_desc_ctx(&rctx->desc); 398 struct sha1_hash_ctx *sctx = ahash_request_ctx(&rctx->areq);
399 __be32 *dst = (__be32 *) rctx->out; 399 __be32 *dst = (__be32 *) rctx->out;
400 400
401 for (i = 0; i < 5; ++i) 401 for (i = 0; i < 5; ++i)
@@ -427,7 +427,7 @@ static int sha_finish_walk(struct mcryptd_hash_request_ctx **ret_rctx,
427 427
428 } 428 }
429 sha_ctx = (struct sha1_hash_ctx *) 429 sha_ctx = (struct sha1_hash_ctx *)
430 shash_desc_ctx(&rctx->desc); 430 ahash_request_ctx(&rctx->areq);
431 kernel_fpu_begin(); 431 kernel_fpu_begin();
432 sha_ctx = sha1_ctx_mgr_submit(cstate->mgr, sha_ctx, 432 sha_ctx = sha1_ctx_mgr_submit(cstate->mgr, sha_ctx,
433 rctx->walk.data, nbytes, flag); 433 rctx->walk.data, nbytes, flag);
@@ -519,11 +519,10 @@ static void sha1_mb_add_list(struct mcryptd_hash_request_ctx *rctx,
519 mcryptd_arm_flusher(cstate, delay); 519 mcryptd_arm_flusher(cstate, delay);
520} 520}
521 521
522static int sha1_mb_update(struct shash_desc *desc, const u8 *data, 522static int sha1_mb_update(struct ahash_request *areq)
523 unsigned int len)
524{ 523{
525 struct mcryptd_hash_request_ctx *rctx = 524 struct mcryptd_hash_request_ctx *rctx =
526 container_of(desc, struct mcryptd_hash_request_ctx, desc); 525 container_of(areq, struct mcryptd_hash_request_ctx, areq);
527 struct mcryptd_alg_cstate *cstate = 526 struct mcryptd_alg_cstate *cstate =
528 this_cpu_ptr(sha1_mb_alg_state.alg_cstate); 527 this_cpu_ptr(sha1_mb_alg_state.alg_cstate);
529 528
@@ -539,7 +538,7 @@ static int sha1_mb_update(struct shash_desc *desc, const u8 *data,
539 } 538 }
540 539
541 /* need to init context */ 540 /* need to init context */
542 req_ctx_init(rctx, desc); 541 req_ctx_init(rctx, areq);
543 542
544 nbytes = crypto_ahash_walk_first(req, &rctx->walk); 543 nbytes = crypto_ahash_walk_first(req, &rctx->walk);
545 544
@@ -552,7 +551,7 @@ static int sha1_mb_update(struct shash_desc *desc, const u8 *data,
552 rctx->flag |= HASH_DONE; 551 rctx->flag |= HASH_DONE;
553 552
554 /* submit */ 553 /* submit */
555 sha_ctx = (struct sha1_hash_ctx *) shash_desc_ctx(desc); 554 sha_ctx = (struct sha1_hash_ctx *) ahash_request_ctx(areq);
556 sha1_mb_add_list(rctx, cstate); 555 sha1_mb_add_list(rctx, cstate);
557 kernel_fpu_begin(); 556 kernel_fpu_begin();
558 sha_ctx = sha1_ctx_mgr_submit(cstate->mgr, sha_ctx, rctx->walk.data, 557 sha_ctx = sha1_ctx_mgr_submit(cstate->mgr, sha_ctx, rctx->walk.data,
@@ -579,11 +578,10 @@ done:
579 return ret; 578 return ret;
580} 579}
581 580
582static int sha1_mb_finup(struct shash_desc *desc, const u8 *data, 581static int sha1_mb_finup(struct ahash_request *areq)
583 unsigned int len, u8 *out)
584{ 582{
585 struct mcryptd_hash_request_ctx *rctx = 583 struct mcryptd_hash_request_ctx *rctx =
586 container_of(desc, struct mcryptd_hash_request_ctx, desc); 584 container_of(areq, struct mcryptd_hash_request_ctx, areq);
587 struct mcryptd_alg_cstate *cstate = 585 struct mcryptd_alg_cstate *cstate =
588 this_cpu_ptr(sha1_mb_alg_state.alg_cstate); 586 this_cpu_ptr(sha1_mb_alg_state.alg_cstate);
589 587
@@ -598,7 +596,7 @@ static int sha1_mb_finup(struct shash_desc *desc, const u8 *data,
598 } 596 }
599 597
600 /* need to init context */ 598 /* need to init context */
601 req_ctx_init(rctx, desc); 599 req_ctx_init(rctx, areq);
602 600
603 nbytes = crypto_ahash_walk_first(req, &rctx->walk); 601 nbytes = crypto_ahash_walk_first(req, &rctx->walk);
604 602
@@ -611,11 +609,10 @@ static int sha1_mb_finup(struct shash_desc *desc, const u8 *data,
611 rctx->flag |= HASH_DONE; 609 rctx->flag |= HASH_DONE;
612 flag = HASH_LAST; 610 flag = HASH_LAST;
613 } 611 }
614 rctx->out = out;
615 612
616 /* submit */ 613 /* submit */
617 rctx->flag |= HASH_FINAL; 614 rctx->flag |= HASH_FINAL;
618 sha_ctx = (struct sha1_hash_ctx *) shash_desc_ctx(desc); 615 sha_ctx = (struct sha1_hash_ctx *) ahash_request_ctx(areq);
619 sha1_mb_add_list(rctx, cstate); 616 sha1_mb_add_list(rctx, cstate);
620 617
621 kernel_fpu_begin(); 618 kernel_fpu_begin();
@@ -641,10 +638,10 @@ done:
641 return ret; 638 return ret;
642} 639}
643 640
644static int sha1_mb_final(struct shash_desc *desc, u8 *out) 641static int sha1_mb_final(struct ahash_request *areq)
645{ 642{
646 struct mcryptd_hash_request_ctx *rctx = 643 struct mcryptd_hash_request_ctx *rctx =
647 container_of(desc, struct mcryptd_hash_request_ctx, desc); 644 container_of(areq, struct mcryptd_hash_request_ctx, areq);
648 struct mcryptd_alg_cstate *cstate = 645 struct mcryptd_alg_cstate *cstate =
649 this_cpu_ptr(sha1_mb_alg_state.alg_cstate); 646 this_cpu_ptr(sha1_mb_alg_state.alg_cstate);
650 647
@@ -659,12 +656,11 @@ static int sha1_mb_final(struct shash_desc *desc, u8 *out)
659 } 656 }
660 657
661 /* need to init context */ 658 /* need to init context */
662 req_ctx_init(rctx, desc); 659 req_ctx_init(rctx, areq);
663 660
664 rctx->out = out;
665 rctx->flag |= HASH_DONE | HASH_FINAL; 661 rctx->flag |= HASH_DONE | HASH_FINAL;
666 662
667 sha_ctx = (struct sha1_hash_ctx *) shash_desc_ctx(desc); 663 sha_ctx = (struct sha1_hash_ctx *) ahash_request_ctx(areq);
668 /* flag HASH_FINAL and 0 data size */ 664 /* flag HASH_FINAL and 0 data size */
669 sha1_mb_add_list(rctx, cstate); 665 sha1_mb_add_list(rctx, cstate);
670 kernel_fpu_begin(); 666 kernel_fpu_begin();
@@ -691,48 +687,98 @@ done:
691 return ret; 687 return ret;
692} 688}
693 689
694static int sha1_mb_export(struct shash_desc *desc, void *out) 690static int sha1_mb_export(struct ahash_request *areq, void *out)
695{ 691{
696 struct sha1_hash_ctx *sctx = shash_desc_ctx(desc); 692 struct sha1_hash_ctx *sctx = ahash_request_ctx(areq);
697 693
698 memcpy(out, sctx, sizeof(*sctx)); 694 memcpy(out, sctx, sizeof(*sctx));
699 695
700 return 0; 696 return 0;
701} 697}
702 698
703static int sha1_mb_import(struct shash_desc *desc, const void *in) 699static int sha1_mb_import(struct ahash_request *areq, const void *in)
704{ 700{
705 struct sha1_hash_ctx *sctx = shash_desc_ctx(desc); 701 struct sha1_hash_ctx *sctx = ahash_request_ctx(areq);
706 702
707 memcpy(sctx, in, sizeof(*sctx)); 703 memcpy(sctx, in, sizeof(*sctx));
708 704
709 return 0; 705 return 0;
710} 706}
711 707
708static int sha1_mb_async_init_tfm(struct crypto_tfm *tfm)
709{
710 struct mcryptd_ahash *mcryptd_tfm;
711 struct sha1_mb_ctx *ctx = crypto_tfm_ctx(tfm);
712 struct mcryptd_hash_ctx *mctx;
712 713
713static struct shash_alg sha1_mb_shash_alg = { 714 mcryptd_tfm = mcryptd_alloc_ahash("__intel_sha1-mb",
714 .digestsize = SHA1_DIGEST_SIZE, 715 CRYPTO_ALG_INTERNAL,
716 CRYPTO_ALG_INTERNAL);
717 if (IS_ERR(mcryptd_tfm))
718 return PTR_ERR(mcryptd_tfm);
719 mctx = crypto_ahash_ctx(&mcryptd_tfm->base);
720 mctx->alg_state = &sha1_mb_alg_state;
721 ctx->mcryptd_tfm = mcryptd_tfm;
722 crypto_ahash_set_reqsize(__crypto_ahash_cast(tfm),
723 sizeof(struct ahash_request) +
724 crypto_ahash_reqsize(&mcryptd_tfm->base));
725
726 return 0;
727}
728
729static void sha1_mb_async_exit_tfm(struct crypto_tfm *tfm)
730{
731 struct sha1_mb_ctx *ctx = crypto_tfm_ctx(tfm);
732
733 mcryptd_free_ahash(ctx->mcryptd_tfm);
734}
735
736static int sha1_mb_areq_init_tfm(struct crypto_tfm *tfm)
737{
738 crypto_ahash_set_reqsize(__crypto_ahash_cast(tfm),
739 sizeof(struct ahash_request) +
740 sizeof(struct sha1_hash_ctx));
741
742 return 0;
743}
744
745static void sha1_mb_areq_exit_tfm(struct crypto_tfm *tfm)
746{
747 struct sha1_mb_ctx *ctx = crypto_tfm_ctx(tfm);
748
749 mcryptd_free_ahash(ctx->mcryptd_tfm);
750}
751
752static struct ahash_alg sha1_mb_areq_alg = {
715 .init = sha1_mb_init, 753 .init = sha1_mb_init,
716 .update = sha1_mb_update, 754 .update = sha1_mb_update,
717 .final = sha1_mb_final, 755 .final = sha1_mb_final,
718 .finup = sha1_mb_finup, 756 .finup = sha1_mb_finup,
719 .export = sha1_mb_export, 757 .export = sha1_mb_export,
720 .import = sha1_mb_import, 758 .import = sha1_mb_import,
721 .descsize = sizeof(struct sha1_hash_ctx), 759 .halg = {
722 .statesize = sizeof(struct sha1_hash_ctx), 760 .digestsize = SHA1_DIGEST_SIZE,
723 .base = { 761 .statesize = sizeof(struct sha1_hash_ctx),
724 .cra_name = "__sha1-mb", 762 .base = {
725 .cra_driver_name = "__intel_sha1-mb", 763 .cra_name = "__sha1-mb",
726 .cra_priority = 100, 764 .cra_driver_name = "__intel_sha1-mb",
727 /* 765 .cra_priority = 100,
728 * use ASYNC flag as some buffers in multi-buffer 766 /*
729 * algo may not have completed before hashing thread sleep 767 * use ASYNC flag as some buffers in multi-buffer
730 */ 768 * algo may not have completed before hashing thread
731 .cra_flags = CRYPTO_ALG_TYPE_SHASH | CRYPTO_ALG_ASYNC | 769 * sleep
732 CRYPTO_ALG_INTERNAL, 770 */
733 .cra_blocksize = SHA1_BLOCK_SIZE, 771 .cra_flags = CRYPTO_ALG_TYPE_AHASH |
734 .cra_module = THIS_MODULE, 772 CRYPTO_ALG_ASYNC |
735 .cra_list = LIST_HEAD_INIT(sha1_mb_shash_alg.base.cra_list), 773 CRYPTO_ALG_INTERNAL,
774 .cra_blocksize = SHA1_BLOCK_SIZE,
775 .cra_module = THIS_MODULE,
776 .cra_list = LIST_HEAD_INIT
777 (sha1_mb_areq_alg.halg.base.cra_list),
778 .cra_init = sha1_mb_areq_init_tfm,
779 .cra_exit = sha1_mb_areq_exit_tfm,
780 .cra_ctxsize = sizeof(struct sha1_hash_ctx),
781 }
736 } 782 }
737}; 783};
738 784
@@ -817,46 +863,20 @@ static int sha1_mb_async_import(struct ahash_request *req, const void *in)
817 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); 863 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
818 struct sha1_mb_ctx *ctx = crypto_ahash_ctx(tfm); 864 struct sha1_mb_ctx *ctx = crypto_ahash_ctx(tfm);
819 struct mcryptd_ahash *mcryptd_tfm = ctx->mcryptd_tfm; 865 struct mcryptd_ahash *mcryptd_tfm = ctx->mcryptd_tfm;
820 struct crypto_shash *child = mcryptd_ahash_child(mcryptd_tfm); 866 struct crypto_ahash *child = mcryptd_ahash_child(mcryptd_tfm);
821 struct mcryptd_hash_request_ctx *rctx; 867 struct mcryptd_hash_request_ctx *rctx;
822 struct shash_desc *desc; 868 struct ahash_request *areq;
823 869
824 memcpy(mcryptd_req, req, sizeof(*req)); 870 memcpy(mcryptd_req, req, sizeof(*req));
825 ahash_request_set_tfm(mcryptd_req, &mcryptd_tfm->base); 871 ahash_request_set_tfm(mcryptd_req, &mcryptd_tfm->base);
826 rctx = ahash_request_ctx(mcryptd_req); 872 rctx = ahash_request_ctx(mcryptd_req);
827 desc = &rctx->desc; 873 areq = &rctx->areq;
828 desc->tfm = child;
829 desc->flags = CRYPTO_TFM_REQ_MAY_SLEEP;
830
831 return crypto_ahash_import(mcryptd_req, in);
832}
833
834static int sha1_mb_async_init_tfm(struct crypto_tfm *tfm)
835{
836 struct mcryptd_ahash *mcryptd_tfm;
837 struct sha1_mb_ctx *ctx = crypto_tfm_ctx(tfm);
838 struct mcryptd_hash_ctx *mctx;
839 874
840 mcryptd_tfm = mcryptd_alloc_ahash("__intel_sha1-mb", 875 ahash_request_set_tfm(areq, child);
841 CRYPTO_ALG_INTERNAL, 876 ahash_request_set_callback(areq, CRYPTO_TFM_REQ_MAY_SLEEP,
842 CRYPTO_ALG_INTERNAL); 877 rctx->complete, req);
843 if (IS_ERR(mcryptd_tfm))
844 return PTR_ERR(mcryptd_tfm);
845 mctx = crypto_ahash_ctx(&mcryptd_tfm->base);
846 mctx->alg_state = &sha1_mb_alg_state;
847 ctx->mcryptd_tfm = mcryptd_tfm;
848 crypto_ahash_set_reqsize(__crypto_ahash_cast(tfm),
849 sizeof(struct ahash_request) +
850 crypto_ahash_reqsize(&mcryptd_tfm->base));
851
852 return 0;
853}
854
855static void sha1_mb_async_exit_tfm(struct crypto_tfm *tfm)
856{
857 struct sha1_mb_ctx *ctx = crypto_tfm_ctx(tfm);
858 878
859 mcryptd_free_ahash(ctx->mcryptd_tfm); 879 return crypto_ahash_import(mcryptd_req, in);
860} 880}
861 881
862static struct ahash_alg sha1_mb_async_alg = { 882static struct ahash_alg sha1_mb_async_alg = {
@@ -965,7 +985,7 @@ static int __init sha1_mb_mod_init(void)
965 } 985 }
966 sha1_mb_alg_state.flusher = &sha1_mb_flusher; 986 sha1_mb_alg_state.flusher = &sha1_mb_flusher;
967 987
968 err = crypto_register_shash(&sha1_mb_shash_alg); 988 err = crypto_register_ahash(&sha1_mb_areq_alg);
969 if (err) 989 if (err)
970 goto err2; 990 goto err2;
971 err = crypto_register_ahash(&sha1_mb_async_alg); 991 err = crypto_register_ahash(&sha1_mb_async_alg);
@@ -975,7 +995,7 @@ static int __init sha1_mb_mod_init(void)
975 995
976 return 0; 996 return 0;
977err1: 997err1:
978 crypto_unregister_shash(&sha1_mb_shash_alg); 998 crypto_unregister_ahash(&sha1_mb_areq_alg);
979err2: 999err2:
980 for_each_possible_cpu(cpu) { 1000 for_each_possible_cpu(cpu) {
981 cpu_state = per_cpu_ptr(sha1_mb_alg_state.alg_cstate, cpu); 1001 cpu_state = per_cpu_ptr(sha1_mb_alg_state.alg_cstate, cpu);
@@ -991,7 +1011,7 @@ static void __exit sha1_mb_mod_fini(void)
991 struct mcryptd_alg_cstate *cpu_state; 1011 struct mcryptd_alg_cstate *cpu_state;
992 1012
993 crypto_unregister_ahash(&sha1_mb_async_alg); 1013 crypto_unregister_ahash(&sha1_mb_async_alg);
994 crypto_unregister_shash(&sha1_mb_shash_alg); 1014 crypto_unregister_ahash(&sha1_mb_areq_alg);
995 for_each_possible_cpu(cpu) { 1015 for_each_possible_cpu(cpu) {
996 cpu_state = per_cpu_ptr(sha1_mb_alg_state.alg_cstate, cpu); 1016 cpu_state = per_cpu_ptr(sha1_mb_alg_state.alg_cstate, cpu);
997 kfree(cpu_state->mgr); 1017 kfree(cpu_state->mgr);
diff --git a/crypto/mcryptd.c b/crypto/mcryptd.c
index c4eb9da49d4f..86fb59b109a9 100644
--- a/crypto/mcryptd.c
+++ b/crypto/mcryptd.c
@@ -41,7 +41,7 @@ struct mcryptd_flush_list {
41static struct mcryptd_flush_list __percpu *mcryptd_flist; 41static struct mcryptd_flush_list __percpu *mcryptd_flist;
42 42
43struct hashd_instance_ctx { 43struct hashd_instance_ctx {
44 struct crypto_shash_spawn spawn; 44 struct crypto_ahash_spawn spawn;
45 struct mcryptd_queue *queue; 45 struct mcryptd_queue *queue;
46}; 46};
47 47
@@ -272,18 +272,18 @@ static int mcryptd_hash_init_tfm(struct crypto_tfm *tfm)
272{ 272{
273 struct crypto_instance *inst = crypto_tfm_alg_instance(tfm); 273 struct crypto_instance *inst = crypto_tfm_alg_instance(tfm);
274 struct hashd_instance_ctx *ictx = crypto_instance_ctx(inst); 274 struct hashd_instance_ctx *ictx = crypto_instance_ctx(inst);
275 struct crypto_shash_spawn *spawn = &ictx->spawn; 275 struct crypto_ahash_spawn *spawn = &ictx->spawn;
276 struct mcryptd_hash_ctx *ctx = crypto_tfm_ctx(tfm); 276 struct mcryptd_hash_ctx *ctx = crypto_tfm_ctx(tfm);
277 struct crypto_shash *hash; 277 struct crypto_ahash *hash;
278 278
279 hash = crypto_spawn_shash(spawn); 279 hash = crypto_spawn_ahash(spawn);
280 if (IS_ERR(hash)) 280 if (IS_ERR(hash))
281 return PTR_ERR(hash); 281 return PTR_ERR(hash);
282 282
283 ctx->child = hash; 283 ctx->child = hash;
284 crypto_ahash_set_reqsize(__crypto_ahash_cast(tfm), 284 crypto_ahash_set_reqsize(__crypto_ahash_cast(tfm),
285 sizeof(struct mcryptd_hash_request_ctx) + 285 sizeof(struct mcryptd_hash_request_ctx) +
286 crypto_shash_descsize(hash)); 286 crypto_ahash_reqsize(hash));
287 return 0; 287 return 0;
288} 288}
289 289
@@ -291,21 +291,21 @@ static void mcryptd_hash_exit_tfm(struct crypto_tfm *tfm)
291{ 291{
292 struct mcryptd_hash_ctx *ctx = crypto_tfm_ctx(tfm); 292 struct mcryptd_hash_ctx *ctx = crypto_tfm_ctx(tfm);
293 293
294 crypto_free_shash(ctx->child); 294 crypto_free_ahash(ctx->child);
295} 295}
296 296
297static int mcryptd_hash_setkey(struct crypto_ahash *parent, 297static int mcryptd_hash_setkey(struct crypto_ahash *parent,
298 const u8 *key, unsigned int keylen) 298 const u8 *key, unsigned int keylen)
299{ 299{
300 struct mcryptd_hash_ctx *ctx = crypto_ahash_ctx(parent); 300 struct mcryptd_hash_ctx *ctx = crypto_ahash_ctx(parent);
301 struct crypto_shash *child = ctx->child; 301 struct crypto_ahash *child = ctx->child;
302 int err; 302 int err;
303 303
304 crypto_shash_clear_flags(child, CRYPTO_TFM_REQ_MASK); 304 crypto_ahash_clear_flags(child, CRYPTO_TFM_REQ_MASK);
305 crypto_shash_set_flags(child, crypto_ahash_get_flags(parent) & 305 crypto_ahash_set_flags(child, crypto_ahash_get_flags(parent) &
306 CRYPTO_TFM_REQ_MASK); 306 CRYPTO_TFM_REQ_MASK);
307 err = crypto_shash_setkey(child, key, keylen); 307 err = crypto_ahash_setkey(child, key, keylen);
308 crypto_ahash_set_flags(parent, crypto_shash_get_flags(child) & 308 crypto_ahash_set_flags(parent, crypto_ahash_get_flags(child) &
309 CRYPTO_TFM_RES_MASK); 309 CRYPTO_TFM_RES_MASK);
310 return err; 310 return err;
311} 311}
@@ -331,20 +331,20 @@ static int mcryptd_hash_enqueue(struct ahash_request *req,
331static void mcryptd_hash_init(struct crypto_async_request *req_async, int err) 331static void mcryptd_hash_init(struct crypto_async_request *req_async, int err)
332{ 332{
333 struct mcryptd_hash_ctx *ctx = crypto_tfm_ctx(req_async->tfm); 333 struct mcryptd_hash_ctx *ctx = crypto_tfm_ctx(req_async->tfm);
334 struct crypto_shash *child = ctx->child; 334 struct crypto_ahash *child = ctx->child;
335 struct ahash_request *req = ahash_request_cast(req_async); 335 struct ahash_request *req = ahash_request_cast(req_async);
336 struct mcryptd_hash_request_ctx *rctx = ahash_request_ctx(req); 336 struct mcryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
337 struct shash_desc *desc = &rctx->desc; 337 struct ahash_request *desc = &rctx->areq;
338 338
339 if (unlikely(err == -EINPROGRESS)) 339 if (unlikely(err == -EINPROGRESS))
340 goto out; 340 goto out;
341 341
342 desc->tfm = child; 342 ahash_request_set_tfm(desc, child);
343 desc->flags = CRYPTO_TFM_REQ_MAY_SLEEP; 343 ahash_request_set_callback(desc, CRYPTO_TFM_REQ_MAY_SLEEP,
344 rctx->complete, req_async);
344 345
345 err = crypto_shash_init(desc); 346 rctx->out = req->result;
346 347 err = crypto_ahash_init(desc);
347 req->base.complete = rctx->complete;
348 348
349out: 349out:
350 local_bh_disable(); 350 local_bh_disable();
@@ -365,7 +365,8 @@ static void mcryptd_hash_update(struct crypto_async_request *req_async, int err)
365 if (unlikely(err == -EINPROGRESS)) 365 if (unlikely(err == -EINPROGRESS))
366 goto out; 366 goto out;
367 367
368 err = shash_ahash_mcryptd_update(req, &rctx->desc); 368 rctx->out = req->result;
369 err = ahash_mcryptd_update(&rctx->areq);
369 if (err) { 370 if (err) {
370 req->base.complete = rctx->complete; 371 req->base.complete = rctx->complete;
371 goto out; 372 goto out;
@@ -391,7 +392,8 @@ static void mcryptd_hash_final(struct crypto_async_request *req_async, int err)
391 if (unlikely(err == -EINPROGRESS)) 392 if (unlikely(err == -EINPROGRESS))
392 goto out; 393 goto out;
393 394
394 err = shash_ahash_mcryptd_final(req, &rctx->desc); 395 rctx->out = req->result;
396 err = ahash_mcryptd_final(&rctx->areq);
395 if (err) { 397 if (err) {
396 req->base.complete = rctx->complete; 398 req->base.complete = rctx->complete;
397 goto out; 399 goto out;
@@ -416,8 +418,8 @@ static void mcryptd_hash_finup(struct crypto_async_request *req_async, int err)
416 418
417 if (unlikely(err == -EINPROGRESS)) 419 if (unlikely(err == -EINPROGRESS))
418 goto out; 420 goto out;
419 421 rctx->out = req->result;
420 err = shash_ahash_mcryptd_finup(req, &rctx->desc); 422 err = ahash_mcryptd_finup(&rctx->areq);
421 423
422 if (err) { 424 if (err) {
423 req->base.complete = rctx->complete; 425 req->base.complete = rctx->complete;
@@ -439,25 +441,21 @@ static int mcryptd_hash_finup_enqueue(struct ahash_request *req)
439static void mcryptd_hash_digest(struct crypto_async_request *req_async, int err) 441static void mcryptd_hash_digest(struct crypto_async_request *req_async, int err)
440{ 442{
441 struct mcryptd_hash_ctx *ctx = crypto_tfm_ctx(req_async->tfm); 443 struct mcryptd_hash_ctx *ctx = crypto_tfm_ctx(req_async->tfm);
442 struct crypto_shash *child = ctx->child; 444 struct crypto_ahash *child = ctx->child;
443 struct ahash_request *req = ahash_request_cast(req_async); 445 struct ahash_request *req = ahash_request_cast(req_async);
444 struct mcryptd_hash_request_ctx *rctx = ahash_request_ctx(req); 446 struct mcryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
445 struct shash_desc *desc = &rctx->desc; 447 struct ahash_request *desc = &rctx->areq;
446 448
447 if (unlikely(err == -EINPROGRESS)) 449 if (unlikely(err == -EINPROGRESS))
448 goto out; 450 goto out;
449 451
450 desc->tfm = child; 452 ahash_request_set_tfm(desc, child);
451 desc->flags = CRYPTO_TFM_REQ_MAY_SLEEP; /* check this again */ 453 ahash_request_set_callback(desc, CRYPTO_TFM_REQ_MAY_SLEEP,
452 454 rctx->complete, req_async);
453 err = shash_ahash_mcryptd_digest(req, desc);
454 455
455 if (err) { 456 rctx->out = req->result;
456 req->base.complete = rctx->complete; 457 err = ahash_mcryptd_digest(desc);
457 goto out;
458 }
459 458
460 return;
461out: 459out:
462 local_bh_disable(); 460 local_bh_disable();
463 rctx->complete(&req->base, err); 461 rctx->complete(&req->base, err);
@@ -473,14 +471,14 @@ static int mcryptd_hash_export(struct ahash_request *req, void *out)
473{ 471{
474 struct mcryptd_hash_request_ctx *rctx = ahash_request_ctx(req); 472 struct mcryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
475 473
476 return crypto_shash_export(&rctx->desc, out); 474 return crypto_ahash_export(&rctx->areq, out);
477} 475}
478 476
479static int mcryptd_hash_import(struct ahash_request *req, const void *in) 477static int mcryptd_hash_import(struct ahash_request *req, const void *in)
480{ 478{
481 struct mcryptd_hash_request_ctx *rctx = ahash_request_ctx(req); 479 struct mcryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
482 480
483 return crypto_shash_import(&rctx->desc, in); 481 return crypto_ahash_import(&rctx->areq, in);
484} 482}
485 483
486static int mcryptd_create_hash(struct crypto_template *tmpl, struct rtattr **tb, 484static int mcryptd_create_hash(struct crypto_template *tmpl, struct rtattr **tb,
@@ -488,7 +486,7 @@ static int mcryptd_create_hash(struct crypto_template *tmpl, struct rtattr **tb,
488{ 486{
489 struct hashd_instance_ctx *ctx; 487 struct hashd_instance_ctx *ctx;
490 struct ahash_instance *inst; 488 struct ahash_instance *inst;
491 struct shash_alg *salg; 489 struct hash_alg_common *halg;
492 struct crypto_alg *alg; 490 struct crypto_alg *alg;
493 u32 type = 0; 491 u32 type = 0;
494 u32 mask = 0; 492 u32 mask = 0;
@@ -496,11 +494,11 @@ static int mcryptd_create_hash(struct crypto_template *tmpl, struct rtattr **tb,
496 494
497 mcryptd_check_internal(tb, &type, &mask); 495 mcryptd_check_internal(tb, &type, &mask);
498 496
499 salg = shash_attr_alg(tb[1], type, mask); 497 halg = ahash_attr_alg(tb[1], type, mask);
500 if (IS_ERR(salg)) 498 if (IS_ERR(halg))
501 return PTR_ERR(salg); 499 return PTR_ERR(halg);
502 500
503 alg = &salg->base; 501 alg = &halg->base;
504 pr_debug("crypto: mcryptd hash alg: %s\n", alg->cra_name); 502 pr_debug("crypto: mcryptd hash alg: %s\n", alg->cra_name);
505 inst = mcryptd_alloc_instance(alg, ahash_instance_headroom(), 503 inst = mcryptd_alloc_instance(alg, ahash_instance_headroom(),
506 sizeof(*ctx)); 504 sizeof(*ctx));
@@ -511,7 +509,7 @@ static int mcryptd_create_hash(struct crypto_template *tmpl, struct rtattr **tb,
511 ctx = ahash_instance_ctx(inst); 509 ctx = ahash_instance_ctx(inst);
512 ctx->queue = queue; 510 ctx->queue = queue;
513 511
514 err = crypto_init_shash_spawn(&ctx->spawn, salg, 512 err = crypto_init_ahash_spawn(&ctx->spawn, halg,
515 ahash_crypto_instance(inst)); 513 ahash_crypto_instance(inst));
516 if (err) 514 if (err)
517 goto out_free_inst; 515 goto out_free_inst;
@@ -521,8 +519,8 @@ static int mcryptd_create_hash(struct crypto_template *tmpl, struct rtattr **tb,
521 type |= CRYPTO_ALG_INTERNAL; 519 type |= CRYPTO_ALG_INTERNAL;
522 inst->alg.halg.base.cra_flags = type; 520 inst->alg.halg.base.cra_flags = type;
523 521
524 inst->alg.halg.digestsize = salg->digestsize; 522 inst->alg.halg.digestsize = halg->digestsize;
525 inst->alg.halg.statesize = salg->statesize; 523 inst->alg.halg.statesize = halg->statesize;
526 inst->alg.halg.base.cra_ctxsize = sizeof(struct mcryptd_hash_ctx); 524 inst->alg.halg.base.cra_ctxsize = sizeof(struct mcryptd_hash_ctx);
527 525
528 inst->alg.halg.base.cra_init = mcryptd_hash_init_tfm; 526 inst->alg.halg.base.cra_init = mcryptd_hash_init_tfm;
@@ -539,7 +537,7 @@ static int mcryptd_create_hash(struct crypto_template *tmpl, struct rtattr **tb,
539 537
540 err = ahash_register_instance(tmpl, inst); 538 err = ahash_register_instance(tmpl, inst);
541 if (err) { 539 if (err) {
542 crypto_drop_shash(&ctx->spawn); 540 crypto_drop_ahash(&ctx->spawn);
543out_free_inst: 541out_free_inst:
544 kfree(inst); 542 kfree(inst);
545 } 543 }
@@ -575,7 +573,7 @@ static void mcryptd_free(struct crypto_instance *inst)
575 573
576 switch (inst->alg.cra_flags & CRYPTO_ALG_TYPE_MASK) { 574 switch (inst->alg.cra_flags & CRYPTO_ALG_TYPE_MASK) {
577 case CRYPTO_ALG_TYPE_AHASH: 575 case CRYPTO_ALG_TYPE_AHASH:
578 crypto_drop_shash(&hctx->spawn); 576 crypto_drop_ahash(&hctx->spawn);
579 kfree(ahash_instance(inst)); 577 kfree(ahash_instance(inst));
580 return; 578 return;
581 default: 579 default:
@@ -612,55 +610,38 @@ struct mcryptd_ahash *mcryptd_alloc_ahash(const char *alg_name,
612} 610}
613EXPORT_SYMBOL_GPL(mcryptd_alloc_ahash); 611EXPORT_SYMBOL_GPL(mcryptd_alloc_ahash);
614 612
615int shash_ahash_mcryptd_digest(struct ahash_request *req, 613int ahash_mcryptd_digest(struct ahash_request *desc)
616 struct shash_desc *desc)
617{ 614{
618 int err; 615 int err;
619 616
620 err = crypto_shash_init(desc) ?: 617 err = crypto_ahash_init(desc) ?:
621 shash_ahash_mcryptd_finup(req, desc); 618 ahash_mcryptd_finup(desc);
622 619
623 return err; 620 return err;
624} 621}
625EXPORT_SYMBOL_GPL(shash_ahash_mcryptd_digest);
626 622
627int shash_ahash_mcryptd_update(struct ahash_request *req, 623int ahash_mcryptd_update(struct ahash_request *desc)
628 struct shash_desc *desc)
629{ 624{
630 struct crypto_shash *tfm = desc->tfm;
631 struct shash_alg *shash = crypto_shash_alg(tfm);
632
633 /* alignment is to be done by multi-buffer crypto algorithm if needed */ 625 /* alignment is to be done by multi-buffer crypto algorithm if needed */
634 626
635 return shash->update(desc, NULL, 0); 627 return crypto_ahash_update(desc);
636} 628}
637EXPORT_SYMBOL_GPL(shash_ahash_mcryptd_update);
638 629
639int shash_ahash_mcryptd_finup(struct ahash_request *req, 630int ahash_mcryptd_finup(struct ahash_request *desc)
640 struct shash_desc *desc)
641{ 631{
642 struct crypto_shash *tfm = desc->tfm;
643 struct shash_alg *shash = crypto_shash_alg(tfm);
644
645 /* alignment is to be done by multi-buffer crypto algorithm if needed */ 632 /* alignment is to be done by multi-buffer crypto algorithm if needed */
646 633
647 return shash->finup(desc, NULL, 0, req->result); 634 return crypto_ahash_finup(desc);
648} 635}
649EXPORT_SYMBOL_GPL(shash_ahash_mcryptd_finup);
650 636
651int shash_ahash_mcryptd_final(struct ahash_request *req, 637int ahash_mcryptd_final(struct ahash_request *desc)
652 struct shash_desc *desc)
653{ 638{
654 struct crypto_shash *tfm = desc->tfm;
655 struct shash_alg *shash = crypto_shash_alg(tfm);
656
657 /* alignment is to be done by multi-buffer crypto algorithm if needed */ 639 /* alignment is to be done by multi-buffer crypto algorithm if needed */
658 640
659 return shash->final(desc, req->result); 641 return crypto_ahash_final(desc);
660} 642}
661EXPORT_SYMBOL_GPL(shash_ahash_mcryptd_final);
662 643
663struct crypto_shash *mcryptd_ahash_child(struct mcryptd_ahash *tfm) 644struct crypto_ahash *mcryptd_ahash_child(struct mcryptd_ahash *tfm)
664{ 645{
665 struct mcryptd_hash_ctx *ctx = crypto_ahash_ctx(&tfm->base); 646 struct mcryptd_hash_ctx *ctx = crypto_ahash_ctx(&tfm->base);
666 647
@@ -668,12 +649,12 @@ struct crypto_shash *mcryptd_ahash_child(struct mcryptd_ahash *tfm)
668} 649}
669EXPORT_SYMBOL_GPL(mcryptd_ahash_child); 650EXPORT_SYMBOL_GPL(mcryptd_ahash_child);
670 651
671struct shash_desc *mcryptd_shash_desc(struct ahash_request *req) 652struct ahash_request *mcryptd_ahash_desc(struct ahash_request *req)
672{ 653{
673 struct mcryptd_hash_request_ctx *rctx = ahash_request_ctx(req); 654 struct mcryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
674 return &rctx->desc; 655 return &rctx->areq;
675} 656}
676EXPORT_SYMBOL_GPL(mcryptd_shash_desc); 657EXPORT_SYMBOL_GPL(mcryptd_ahash_desc);
677 658
678void mcryptd_free_ahash(struct mcryptd_ahash *tfm) 659void mcryptd_free_ahash(struct mcryptd_ahash *tfm)
679{ 660{
@@ -681,7 +662,6 @@ void mcryptd_free_ahash(struct mcryptd_ahash *tfm)
681} 662}
682EXPORT_SYMBOL_GPL(mcryptd_free_ahash); 663EXPORT_SYMBOL_GPL(mcryptd_free_ahash);
683 664
684
685static int __init mcryptd_init(void) 665static int __init mcryptd_init(void)
686{ 666{
687 int err, cpu; 667 int err, cpu;
diff --git a/include/crypto/internal/hash.h b/include/crypto/internal/hash.h
index 49dae16f8929..1d4f365d8f03 100644
--- a/include/crypto/internal/hash.h
+++ b/include/crypto/internal/hash.h
@@ -114,14 +114,10 @@ int shash_ahash_update(struct ahash_request *req, struct shash_desc *desc);
114int shash_ahash_finup(struct ahash_request *req, struct shash_desc *desc); 114int shash_ahash_finup(struct ahash_request *req, struct shash_desc *desc);
115int shash_ahash_digest(struct ahash_request *req, struct shash_desc *desc); 115int shash_ahash_digest(struct ahash_request *req, struct shash_desc *desc);
116 116
117int shash_ahash_mcryptd_update(struct ahash_request *req, 117int ahash_mcryptd_update(struct ahash_request *desc);
118 struct shash_desc *desc); 118int ahash_mcryptd_final(struct ahash_request *desc);
119int shash_ahash_mcryptd_final(struct ahash_request *req, 119int ahash_mcryptd_finup(struct ahash_request *desc);
120 struct shash_desc *desc); 120int ahash_mcryptd_digest(struct ahash_request *desc);
121int shash_ahash_mcryptd_finup(struct ahash_request *req,
122 struct shash_desc *desc);
123int shash_ahash_mcryptd_digest(struct ahash_request *req,
124 struct shash_desc *desc);
125 121
126int crypto_init_shash_ops_async(struct crypto_tfm *tfm); 122int crypto_init_shash_ops_async(struct crypto_tfm *tfm);
127 123
diff --git a/include/crypto/mcryptd.h b/include/crypto/mcryptd.h
index c23ee1f7ee80..4a53c0d38cd2 100644
--- a/include/crypto/mcryptd.h
+++ b/include/crypto/mcryptd.h
@@ -39,7 +39,7 @@ struct mcryptd_instance_ctx {
39}; 39};
40 40
41struct mcryptd_hash_ctx { 41struct mcryptd_hash_ctx {
42 struct crypto_shash *child; 42 struct crypto_ahash *child;
43 struct mcryptd_alg_state *alg_state; 43 struct mcryptd_alg_state *alg_state;
44}; 44};
45 45
@@ -59,13 +59,13 @@ struct mcryptd_hash_request_ctx {
59 struct crypto_hash_walk walk; 59 struct crypto_hash_walk walk;
60 u8 *out; 60 u8 *out;
61 int flag; 61 int flag;
62 struct shash_desc desc; 62 struct ahash_request areq;
63}; 63};
64 64
65struct mcryptd_ahash *mcryptd_alloc_ahash(const char *alg_name, 65struct mcryptd_ahash *mcryptd_alloc_ahash(const char *alg_name,
66 u32 type, u32 mask); 66 u32 type, u32 mask);
67struct crypto_shash *mcryptd_ahash_child(struct mcryptd_ahash *tfm); 67struct crypto_ahash *mcryptd_ahash_child(struct mcryptd_ahash *tfm);
68struct shash_desc *mcryptd_shash_desc(struct ahash_request *req); 68struct ahash_request *mcryptd_ahash_desc(struct ahash_request *req);
69void mcryptd_free_ahash(struct mcryptd_ahash *tfm); 69void mcryptd_free_ahash(struct mcryptd_ahash *tfm);
70void mcryptd_flusher(struct work_struct *work); 70void mcryptd_flusher(struct work_struct *work);
71 71