summaryrefslogtreecommitdiffstats
path: root/drivers/crypto
diff options
context:
space:
mode:
authorLionel Debieve <lionel.debieve@st.com>2019-04-24 09:34:52 -0400
committerHerbert Xu <herbert@gondor.apana.org.au>2019-05-03 02:03:29 -0400
commit29aed438e8702096a2dda3c5aed88e9176656be7 (patch)
tree7796a9d1a1928903a141e247b54a40b0a32bb093 /drivers/crypto
parent7ee27f5a3f8020124017624c010e9a8473bfbb14 (diff)
crypto: stm32/cryp - remove request mutex protection
Mutex is badly used between threaded irq and driver. This mutex must be removed as the framework must ensure that requests must be serialized to avoid issue. Rework req to avoid crash during finalize by fixing the NULL pointer issue. Signed-off-by: Lionel Debieve <lionel.debieve@st.com> Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
Diffstat (limited to 'drivers/crypto')
-rw-r--r--drivers/crypto/stm32/stm32-cryp.c26
1 files changed, 7 insertions, 19 deletions
diff --git a/drivers/crypto/stm32/stm32-cryp.c b/drivers/crypto/stm32/stm32-cryp.c
index cfcb640c20d0..eb525669d4eb 100644
--- a/drivers/crypto/stm32/stm32-cryp.c
+++ b/drivers/crypto/stm32/stm32-cryp.c
@@ -137,7 +137,6 @@ struct stm32_cryp {
137 137
138 struct crypto_engine *engine; 138 struct crypto_engine *engine;
139 139
140 struct mutex lock; /* protects req / areq */
141 struct ablkcipher_request *req; 140 struct ablkcipher_request *req;
142 struct aead_request *areq; 141 struct aead_request *areq;
143 142
@@ -645,18 +644,13 @@ static void stm32_cryp_finish_req(struct stm32_cryp *cryp, int err)
645 pm_runtime_mark_last_busy(cryp->dev); 644 pm_runtime_mark_last_busy(cryp->dev);
646 pm_runtime_put_autosuspend(cryp->dev); 645 pm_runtime_put_autosuspend(cryp->dev);
647 646
648 if (is_gcm(cryp) || is_ccm(cryp)) { 647 if (is_gcm(cryp) || is_ccm(cryp))
649 crypto_finalize_aead_request(cryp->engine, cryp->areq, err); 648 crypto_finalize_aead_request(cryp->engine, cryp->areq, err);
650 cryp->areq = NULL; 649 else
651 } else {
652 crypto_finalize_ablkcipher_request(cryp->engine, cryp->req, 650 crypto_finalize_ablkcipher_request(cryp->engine, cryp->req,
653 err); 651 err);
654 cryp->req = NULL;
655 }
656 652
657 memset(cryp->ctx->key, 0, cryp->ctx->keylen); 653 memset(cryp->ctx->key, 0, cryp->ctx->keylen);
658
659 mutex_unlock(&cryp->lock);
660} 654}
661 655
662static int stm32_cryp_cpu_start(struct stm32_cryp *cryp) 656static int stm32_cryp_cpu_start(struct stm32_cryp *cryp)
@@ -933,8 +927,6 @@ static int stm32_cryp_prepare_req(struct ablkcipher_request *req,
933 if (!cryp) 927 if (!cryp)
934 return -ENODEV; 928 return -ENODEV;
935 929
936 mutex_lock(&cryp->lock);
937
938 rctx = req ? ablkcipher_request_ctx(req) : aead_request_ctx(areq); 930 rctx = req ? ablkcipher_request_ctx(req) : aead_request_ctx(areq);
939 rctx->mode &= FLG_MODE_MASK; 931 rctx->mode &= FLG_MODE_MASK;
940 932
@@ -946,6 +938,7 @@ static int stm32_cryp_prepare_req(struct ablkcipher_request *req,
946 938
947 if (req) { 939 if (req) {
948 cryp->req = req; 940 cryp->req = req;
941 cryp->areq = NULL;
949 cryp->total_in = req->nbytes; 942 cryp->total_in = req->nbytes;
950 cryp->total_out = cryp->total_in; 943 cryp->total_out = cryp->total_in;
951 } else { 944 } else {
@@ -971,6 +964,7 @@ static int stm32_cryp_prepare_req(struct ablkcipher_request *req,
971 * <---------- total_out -----------------> 964 * <---------- total_out ----------------->
972 */ 965 */
973 cryp->areq = areq; 966 cryp->areq = areq;
967 cryp->req = NULL;
974 cryp->authsize = crypto_aead_authsize(crypto_aead_reqtfm(areq)); 968 cryp->authsize = crypto_aead_authsize(crypto_aead_reqtfm(areq));
975 cryp->total_in = areq->assoclen + areq->cryptlen; 969 cryp->total_in = areq->assoclen + areq->cryptlen;
976 if (is_encrypt(cryp)) 970 if (is_encrypt(cryp))
@@ -992,19 +986,19 @@ static int stm32_cryp_prepare_req(struct ablkcipher_request *req,
992 if (cryp->in_sg_len < 0) { 986 if (cryp->in_sg_len < 0) {
993 dev_err(cryp->dev, "Cannot get in_sg_len\n"); 987 dev_err(cryp->dev, "Cannot get in_sg_len\n");
994 ret = cryp->in_sg_len; 988 ret = cryp->in_sg_len;
995 goto out; 989 return ret;
996 } 990 }
997 991
998 cryp->out_sg_len = sg_nents_for_len(cryp->out_sg, cryp->total_out); 992 cryp->out_sg_len = sg_nents_for_len(cryp->out_sg, cryp->total_out);
999 if (cryp->out_sg_len < 0) { 993 if (cryp->out_sg_len < 0) {
1000 dev_err(cryp->dev, "Cannot get out_sg_len\n"); 994 dev_err(cryp->dev, "Cannot get out_sg_len\n");
1001 ret = cryp->out_sg_len; 995 ret = cryp->out_sg_len;
1002 goto out; 996 return ret;
1003 } 997 }
1004 998
1005 ret = stm32_cryp_copy_sgs(cryp); 999 ret = stm32_cryp_copy_sgs(cryp);
1006 if (ret) 1000 if (ret)
1007 goto out; 1001 return ret;
1008 1002
1009 scatterwalk_start(&cryp->in_walk, cryp->in_sg); 1003 scatterwalk_start(&cryp->in_walk, cryp->in_sg);
1010 scatterwalk_start(&cryp->out_walk, cryp->out_sg); 1004 scatterwalk_start(&cryp->out_walk, cryp->out_sg);
@@ -1016,10 +1010,6 @@ static int stm32_cryp_prepare_req(struct ablkcipher_request *req,
1016 } 1010 }
1017 1011
1018 ret = stm32_cryp_hw_init(cryp); 1012 ret = stm32_cryp_hw_init(cryp);
1019out:
1020 if (ret)
1021 mutex_unlock(&cryp->lock);
1022
1023 return ret; 1013 return ret;
1024} 1014}
1025 1015
@@ -1959,8 +1949,6 @@ static int stm32_cryp_probe(struct platform_device *pdev)
1959 1949
1960 cryp->dev = dev; 1950 cryp->dev = dev;
1961 1951
1962 mutex_init(&cryp->lock);
1963
1964 res = platform_get_resource(pdev, IORESOURCE_MEM, 0); 1952 res = platform_get_resource(pdev, IORESOURCE_MEM, 0);
1965 cryp->regs = devm_ioremap_resource(dev, res); 1953 cryp->regs = devm_ioremap_resource(dev, res);
1966 if (IS_ERR(cryp->regs)) 1954 if (IS_ERR(cryp->regs))