aboutsummaryrefslogtreecommitdiffstats
path: root/drivers/crypto
diff options
context:
space:
mode:
authorHerbert Xu <herbert@gondor.apana.org.au>2016-06-29 06:04:05 -0400
committerHerbert Xu <herbert@gondor.apana.org.au>2016-07-01 11:45:10 -0400
commit678adecd117f953cd21088064d95ceffac79a2a6 (patch)
tree591733cf3244d64376db5e4551ff84dc2130c7af /drivers/crypto
parent2d20ce070d3b78f0974408ef648223967d0efb0a (diff)
crypto: sahara - Use skcipher for fallback
This patch replaces use of the obsolete ablkcipher with skcipher. It also removes shash_fallback which is totally unused. Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
Diffstat (limited to 'drivers/crypto')
-rw-r--r--drivers/crypto/sahara.c112
1 files changed, 50 insertions, 62 deletions
diff --git a/drivers/crypto/sahara.c b/drivers/crypto/sahara.c
index c3f3d89e4831..0c49956ee0ce 100644
--- a/drivers/crypto/sahara.c
+++ b/drivers/crypto/sahara.c
@@ -14,10 +14,9 @@
14 * Based on omap-aes.c and tegra-aes.c 14 * Based on omap-aes.c and tegra-aes.c
15 */ 15 */
16 16
17#include <crypto/algapi.h>
18#include <crypto/aes.h> 17#include <crypto/aes.h>
19#include <crypto/hash.h>
20#include <crypto/internal/hash.h> 18#include <crypto/internal/hash.h>
19#include <crypto/internal/skcipher.h>
21#include <crypto/scatterwalk.h> 20#include <crypto/scatterwalk.h>
22#include <crypto/sha.h> 21#include <crypto/sha.h>
23 22
@@ -150,10 +149,7 @@ struct sahara_ctx {
150 /* AES-specific context */ 149 /* AES-specific context */
151 int keylen; 150 int keylen;
152 u8 key[AES_KEYSIZE_128]; 151 u8 key[AES_KEYSIZE_128];
153 struct crypto_ablkcipher *fallback; 152 struct crypto_skcipher *fallback;
154
155 /* SHA-specific context */
156 struct crypto_shash *shash_fallback;
157}; 153};
158 154
159struct sahara_aes_reqctx { 155struct sahara_aes_reqctx {
@@ -620,25 +616,21 @@ static int sahara_aes_setkey(struct crypto_ablkcipher *tfm, const u8 *key,
620 return 0; 616 return 0;
621 } 617 }
622 618
623 if (keylen != AES_KEYSIZE_128 && 619 if (keylen != AES_KEYSIZE_192 && keylen != AES_KEYSIZE_256)
624 keylen != AES_KEYSIZE_192 && keylen != AES_KEYSIZE_256)
625 return -EINVAL; 620 return -EINVAL;
626 621
627 /* 622 /*
628 * The requested key size is not supported by HW, do a fallback. 623 * The requested key size is not supported by HW, do a fallback.
629 */ 624 */
630 ctx->fallback->base.crt_flags &= ~CRYPTO_TFM_REQ_MASK; 625 crypto_skcipher_clear_flags(ctx->fallback, CRYPTO_TFM_REQ_MASK);
631 ctx->fallback->base.crt_flags |= 626 crypto_skcipher_set_flags(ctx->fallback, tfm->base.crt_flags &
632 (tfm->base.crt_flags & CRYPTO_TFM_REQ_MASK); 627 CRYPTO_TFM_REQ_MASK);
633 628
634 ret = crypto_ablkcipher_setkey(ctx->fallback, key, keylen); 629 ret = crypto_skcipher_setkey(ctx->fallback, key, keylen);
635 if (ret) {
636 struct crypto_tfm *tfm_aux = crypto_ablkcipher_tfm(tfm);
637 630
638 tfm_aux->crt_flags &= ~CRYPTO_TFM_RES_MASK; 631 tfm->base.crt_flags &= ~CRYPTO_TFM_RES_MASK;
639 tfm_aux->crt_flags |= 632 tfm->base.crt_flags |= crypto_skcipher_get_flags(ctx->fallback) &
640 (ctx->fallback->base.crt_flags & CRYPTO_TFM_RES_MASK); 633 CRYPTO_TFM_RES_MASK;
641 }
642 return ret; 634 return ret;
643} 635}
644 636
@@ -670,16 +662,20 @@ static int sahara_aes_crypt(struct ablkcipher_request *req, unsigned long mode)
670 662
671static int sahara_aes_ecb_encrypt(struct ablkcipher_request *req) 663static int sahara_aes_ecb_encrypt(struct ablkcipher_request *req)
672{ 664{
673 struct crypto_tfm *tfm =
674 crypto_ablkcipher_tfm(crypto_ablkcipher_reqtfm(req));
675 struct sahara_ctx *ctx = crypto_ablkcipher_ctx( 665 struct sahara_ctx *ctx = crypto_ablkcipher_ctx(
676 crypto_ablkcipher_reqtfm(req)); 666 crypto_ablkcipher_reqtfm(req));
677 int err; 667 int err;
678 668
679 if (unlikely(ctx->keylen != AES_KEYSIZE_128)) { 669 if (unlikely(ctx->keylen != AES_KEYSIZE_128)) {
680 ablkcipher_request_set_tfm(req, ctx->fallback); 670 SKCIPHER_REQUEST_ON_STACK(subreq, ctx->fallback);
681 err = crypto_ablkcipher_encrypt(req); 671
682 ablkcipher_request_set_tfm(req, __crypto_ablkcipher_cast(tfm)); 672 skcipher_request_set_tfm(subreq, ctx->fallback);
673 skcipher_request_set_callback(subreq, req->base.flags,
674 NULL, NULL);
675 skcipher_request_set_crypt(subreq, req->src, req->dst,
676 req->nbytes, req->info);
677 err = crypto_skcipher_encrypt(subreq);
678 skcipher_request_zero(subreq);
683 return err; 679 return err;
684 } 680 }
685 681
@@ -688,16 +684,20 @@ static int sahara_aes_ecb_encrypt(struct ablkcipher_request *req)
688 684
689static int sahara_aes_ecb_decrypt(struct ablkcipher_request *req) 685static int sahara_aes_ecb_decrypt(struct ablkcipher_request *req)
690{ 686{
691 struct crypto_tfm *tfm =
692 crypto_ablkcipher_tfm(crypto_ablkcipher_reqtfm(req));
693 struct sahara_ctx *ctx = crypto_ablkcipher_ctx( 687 struct sahara_ctx *ctx = crypto_ablkcipher_ctx(
694 crypto_ablkcipher_reqtfm(req)); 688 crypto_ablkcipher_reqtfm(req));
695 int err; 689 int err;
696 690
697 if (unlikely(ctx->keylen != AES_KEYSIZE_128)) { 691 if (unlikely(ctx->keylen != AES_KEYSIZE_128)) {
698 ablkcipher_request_set_tfm(req, ctx->fallback); 692 SKCIPHER_REQUEST_ON_STACK(subreq, ctx->fallback);
699 err = crypto_ablkcipher_decrypt(req); 693
700 ablkcipher_request_set_tfm(req, __crypto_ablkcipher_cast(tfm)); 694 skcipher_request_set_tfm(subreq, ctx->fallback);
695 skcipher_request_set_callback(subreq, req->base.flags,
696 NULL, NULL);
697 skcipher_request_set_crypt(subreq, req->src, req->dst,
698 req->nbytes, req->info);
699 err = crypto_skcipher_decrypt(subreq);
700 skcipher_request_zero(subreq);
701 return err; 701 return err;
702 } 702 }
703 703
@@ -706,16 +706,20 @@ static int sahara_aes_ecb_decrypt(struct ablkcipher_request *req)
706 706
707static int sahara_aes_cbc_encrypt(struct ablkcipher_request *req) 707static int sahara_aes_cbc_encrypt(struct ablkcipher_request *req)
708{ 708{
709 struct crypto_tfm *tfm =
710 crypto_ablkcipher_tfm(crypto_ablkcipher_reqtfm(req));
711 struct sahara_ctx *ctx = crypto_ablkcipher_ctx( 709 struct sahara_ctx *ctx = crypto_ablkcipher_ctx(
712 crypto_ablkcipher_reqtfm(req)); 710 crypto_ablkcipher_reqtfm(req));
713 int err; 711 int err;
714 712
715 if (unlikely(ctx->keylen != AES_KEYSIZE_128)) { 713 if (unlikely(ctx->keylen != AES_KEYSIZE_128)) {
716 ablkcipher_request_set_tfm(req, ctx->fallback); 714 SKCIPHER_REQUEST_ON_STACK(subreq, ctx->fallback);
717 err = crypto_ablkcipher_encrypt(req); 715
718 ablkcipher_request_set_tfm(req, __crypto_ablkcipher_cast(tfm)); 716 skcipher_request_set_tfm(subreq, ctx->fallback);
717 skcipher_request_set_callback(subreq, req->base.flags,
718 NULL, NULL);
719 skcipher_request_set_crypt(subreq, req->src, req->dst,
720 req->nbytes, req->info);
721 err = crypto_skcipher_encrypt(subreq);
722 skcipher_request_zero(subreq);
719 return err; 723 return err;
720 } 724 }
721 725
@@ -724,16 +728,20 @@ static int sahara_aes_cbc_encrypt(struct ablkcipher_request *req)
724 728
725static int sahara_aes_cbc_decrypt(struct ablkcipher_request *req) 729static int sahara_aes_cbc_decrypt(struct ablkcipher_request *req)
726{ 730{
727 struct crypto_tfm *tfm =
728 crypto_ablkcipher_tfm(crypto_ablkcipher_reqtfm(req));
729 struct sahara_ctx *ctx = crypto_ablkcipher_ctx( 731 struct sahara_ctx *ctx = crypto_ablkcipher_ctx(
730 crypto_ablkcipher_reqtfm(req)); 732 crypto_ablkcipher_reqtfm(req));
731 int err; 733 int err;
732 734
733 if (unlikely(ctx->keylen != AES_KEYSIZE_128)) { 735 if (unlikely(ctx->keylen != AES_KEYSIZE_128)) {
734 ablkcipher_request_set_tfm(req, ctx->fallback); 736 SKCIPHER_REQUEST_ON_STACK(subreq, ctx->fallback);
735 err = crypto_ablkcipher_decrypt(req); 737
736 ablkcipher_request_set_tfm(req, __crypto_ablkcipher_cast(tfm)); 738 skcipher_request_set_tfm(subreq, ctx->fallback);
739 skcipher_request_set_callback(subreq, req->base.flags,
740 NULL, NULL);
741 skcipher_request_set_crypt(subreq, req->src, req->dst,
742 req->nbytes, req->info);
743 err = crypto_skcipher_decrypt(subreq);
744 skcipher_request_zero(subreq);
737 return err; 745 return err;
738 } 746 }
739 747
@@ -745,8 +753,9 @@ static int sahara_aes_cra_init(struct crypto_tfm *tfm)
745 const char *name = crypto_tfm_alg_name(tfm); 753 const char *name = crypto_tfm_alg_name(tfm);
746 struct sahara_ctx *ctx = crypto_tfm_ctx(tfm); 754 struct sahara_ctx *ctx = crypto_tfm_ctx(tfm);
747 755
748 ctx->fallback = crypto_alloc_ablkcipher(name, 0, 756 ctx->fallback = crypto_alloc_skcipher(name, 0,
749 CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK); 757 CRYPTO_ALG_ASYNC |
758 CRYPTO_ALG_NEED_FALLBACK);
750 if (IS_ERR(ctx->fallback)) { 759 if (IS_ERR(ctx->fallback)) {
751 pr_err("Error allocating fallback algo %s\n", name); 760 pr_err("Error allocating fallback algo %s\n", name);
752 return PTR_ERR(ctx->fallback); 761 return PTR_ERR(ctx->fallback);
@@ -761,9 +770,7 @@ static void sahara_aes_cra_exit(struct crypto_tfm *tfm)
761{ 770{
762 struct sahara_ctx *ctx = crypto_tfm_ctx(tfm); 771 struct sahara_ctx *ctx = crypto_tfm_ctx(tfm);
763 772
764 if (ctx->fallback) 773 crypto_free_skcipher(ctx->fallback);
765 crypto_free_ablkcipher(ctx->fallback);
766 ctx->fallback = NULL;
767} 774}
768 775
769static u32 sahara_sha_init_hdr(struct sahara_dev *dev, 776static u32 sahara_sha_init_hdr(struct sahara_dev *dev,
@@ -1180,15 +1187,6 @@ static int sahara_sha_import(struct ahash_request *req, const void *in)
1180 1187
1181static int sahara_sha_cra_init(struct crypto_tfm *tfm) 1188static int sahara_sha_cra_init(struct crypto_tfm *tfm)
1182{ 1189{
1183 const char *name = crypto_tfm_alg_name(tfm);
1184 struct sahara_ctx *ctx = crypto_tfm_ctx(tfm);
1185
1186 ctx->shash_fallback = crypto_alloc_shash(name, 0,
1187 CRYPTO_ALG_NEED_FALLBACK);
1188 if (IS_ERR(ctx->shash_fallback)) {
1189 pr_err("Error allocating fallback algo %s\n", name);
1190 return PTR_ERR(ctx->shash_fallback);
1191 }
1192 crypto_ahash_set_reqsize(__crypto_ahash_cast(tfm), 1190 crypto_ahash_set_reqsize(__crypto_ahash_cast(tfm),
1193 sizeof(struct sahara_sha_reqctx) + 1191 sizeof(struct sahara_sha_reqctx) +
1194 SHA_BUFFER_LEN + SHA256_BLOCK_SIZE); 1192 SHA_BUFFER_LEN + SHA256_BLOCK_SIZE);
@@ -1196,14 +1194,6 @@ static int sahara_sha_cra_init(struct crypto_tfm *tfm)
1196 return 0; 1194 return 0;
1197} 1195}
1198 1196
1199static void sahara_sha_cra_exit(struct crypto_tfm *tfm)
1200{
1201 struct sahara_ctx *ctx = crypto_tfm_ctx(tfm);
1202
1203 crypto_free_shash(ctx->shash_fallback);
1204 ctx->shash_fallback = NULL;
1205}
1206
1207static struct crypto_alg aes_algs[] = { 1197static struct crypto_alg aes_algs[] = {
1208{ 1198{
1209 .cra_name = "ecb(aes)", 1199 .cra_name = "ecb(aes)",
@@ -1272,7 +1262,6 @@ static struct ahash_alg sha_v3_algs[] = {
1272 .cra_alignmask = 0, 1262 .cra_alignmask = 0,
1273 .cra_module = THIS_MODULE, 1263 .cra_module = THIS_MODULE,
1274 .cra_init = sahara_sha_cra_init, 1264 .cra_init = sahara_sha_cra_init,
1275 .cra_exit = sahara_sha_cra_exit,
1276 } 1265 }
1277}, 1266},
1278}; 1267};
@@ -1300,7 +1289,6 @@ static struct ahash_alg sha_v4_algs[] = {
1300 .cra_alignmask = 0, 1289 .cra_alignmask = 0,
1301 .cra_module = THIS_MODULE, 1290 .cra_module = THIS_MODULE,
1302 .cra_init = sahara_sha_cra_init, 1291 .cra_init = sahara_sha_cra_init,
1303 .cra_exit = sahara_sha_cra_exit,
1304 } 1292 }
1305}, 1293},
1306}; 1294};