summaryrefslogtreecommitdiffstats
path: root/drivers/crypto/inside-secure
diff options
context:
space:
mode:
authorAntoine Tenart <antoine.tenart@bootlin.com>2019-05-27 10:51:05 -0400
committerHerbert Xu <herbert@gondor.apana.org.au>2019-06-06 02:38:56 -0400
commit583d7e195f48753c5da8cc75a8ed332cad7287b0 (patch)
treec8c287d4c6d5b224a765b33acd97fd76df2cccab /drivers/crypto/inside-secure
parent082ec2d48467b61aa89783e954645ec441714c4e (diff)
crypto: inside-secure - fix use of the SG list
Replace sg_nents_for_len by sg_nents when DMA mapping/unmapping buffers and when looping over the SG entries. This fix cases where the SG entries aren't used fully, which would in such cases led to using fewer SG entries than needed (and thus the engine wouldn't have access to the full input data and the result would be wrong). Signed-off-by: Antoine Tenart <antoine.tenart@bootlin.com> Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
Diffstat (limited to 'drivers/crypto/inside-secure')
-rw-r--r--drivers/crypto/inside-secure/safexcel_cipher.c39
-rw-r--r--drivers/crypto/inside-secure/safexcel_hash.c3
2 files changed, 12 insertions, 30 deletions
diff --git a/drivers/crypto/inside-secure/safexcel_cipher.c b/drivers/crypto/inside-secure/safexcel_cipher.c
index cedfb121c278..6e193baccec7 100644
--- a/drivers/crypto/inside-secure/safexcel_cipher.c
+++ b/drivers/crypto/inside-secure/safexcel_cipher.c
@@ -369,16 +369,10 @@ static int safexcel_handle_req_result(struct safexcel_crypto_priv *priv, int rin
369 safexcel_complete(priv, ring); 369 safexcel_complete(priv, ring);
370 370
371 if (src == dst) { 371 if (src == dst) {
372 dma_unmap_sg(priv->dev, src, 372 dma_unmap_sg(priv->dev, src, sg_nents(src), DMA_BIDIRECTIONAL);
373 sg_nents_for_len(src, cryptlen),
374 DMA_BIDIRECTIONAL);
375 } else { 373 } else {
376 dma_unmap_sg(priv->dev, src, 374 dma_unmap_sg(priv->dev, src, sg_nents(src), DMA_TO_DEVICE);
377 sg_nents_for_len(src, cryptlen), 375 dma_unmap_sg(priv->dev, dst, sg_nents(dst), DMA_FROM_DEVICE);
378 DMA_TO_DEVICE);
379 dma_unmap_sg(priv->dev, dst,
380 sg_nents_for_len(dst, cryptlen),
381 DMA_FROM_DEVICE);
382 } 376 }
383 377
384 *should_complete = true; 378 *should_complete = true;
@@ -403,26 +397,21 @@ static int safexcel_send_req(struct crypto_async_request *base, int ring,
403 int i, ret = 0; 397 int i, ret = 0;
404 398
405 if (src == dst) { 399 if (src == dst) {
406 nr_src = dma_map_sg(priv->dev, src, 400 nr_src = dma_map_sg(priv->dev, src, sg_nents(src),
407 sg_nents_for_len(src, totlen),
408 DMA_BIDIRECTIONAL); 401 DMA_BIDIRECTIONAL);
409 nr_dst = nr_src; 402 nr_dst = nr_src;
410 if (!nr_src) 403 if (!nr_src)
411 return -EINVAL; 404 return -EINVAL;
412 } else { 405 } else {
413 nr_src = dma_map_sg(priv->dev, src, 406 nr_src = dma_map_sg(priv->dev, src, sg_nents(src),
414 sg_nents_for_len(src, totlen),
415 DMA_TO_DEVICE); 407 DMA_TO_DEVICE);
416 if (!nr_src) 408 if (!nr_src)
417 return -EINVAL; 409 return -EINVAL;
418 410
419 nr_dst = dma_map_sg(priv->dev, dst, 411 nr_dst = dma_map_sg(priv->dev, dst, sg_nents(dst),
420 sg_nents_for_len(dst, totlen),
421 DMA_FROM_DEVICE); 412 DMA_FROM_DEVICE);
422 if (!nr_dst) { 413 if (!nr_dst) {
423 dma_unmap_sg(priv->dev, src, 414 dma_unmap_sg(priv->dev, src, nr_src, DMA_TO_DEVICE);
424 sg_nents_for_len(src, totlen),
425 DMA_TO_DEVICE);
426 return -EINVAL; 415 return -EINVAL;
427 } 416 }
428 } 417 }
@@ -472,7 +461,7 @@ static int safexcel_send_req(struct crypto_async_request *base, int ring,
472 461
473 /* result descriptors */ 462 /* result descriptors */
474 for_each_sg(dst, sg, nr_dst, i) { 463 for_each_sg(dst, sg, nr_dst, i) {
475 bool first = !i, last = (i == nr_dst - 1); 464 bool first = !i, last = sg_is_last(sg);
476 u32 len = sg_dma_len(sg); 465 u32 len = sg_dma_len(sg);
477 466
478 rdesc = safexcel_add_rdesc(priv, ring, first, last, 467 rdesc = safexcel_add_rdesc(priv, ring, first, last,
@@ -501,16 +490,10 @@ cdesc_rollback:
501 safexcel_ring_rollback_wptr(priv, &priv->ring[ring].cdr); 490 safexcel_ring_rollback_wptr(priv, &priv->ring[ring].cdr);
502 491
503 if (src == dst) { 492 if (src == dst) {
504 dma_unmap_sg(priv->dev, src, 493 dma_unmap_sg(priv->dev, src, nr_src, DMA_BIDIRECTIONAL);
505 sg_nents_for_len(src, totlen),
506 DMA_BIDIRECTIONAL);
507 } else { 494 } else {
508 dma_unmap_sg(priv->dev, src, 495 dma_unmap_sg(priv->dev, src, nr_src, DMA_TO_DEVICE);
509 sg_nents_for_len(src, totlen), 496 dma_unmap_sg(priv->dev, dst, nr_dst, DMA_FROM_DEVICE);
510 DMA_TO_DEVICE);
511 dma_unmap_sg(priv->dev, dst,
512 sg_nents_for_len(dst, totlen),
513 DMA_FROM_DEVICE);
514 } 497 }
515 498
516 return ret; 499 return ret;
diff --git a/drivers/crypto/inside-secure/safexcel_hash.c b/drivers/crypto/inside-secure/safexcel_hash.c
index 20950744ea4e..a80a5e757b1f 100644
--- a/drivers/crypto/inside-secure/safexcel_hash.c
+++ b/drivers/crypto/inside-secure/safexcel_hash.c
@@ -273,8 +273,7 @@ static int safexcel_ahash_send_req(struct crypto_async_request *async, int ring,
273 } 273 }
274 274
275 /* Now handle the current ahash request buffer(s) */ 275 /* Now handle the current ahash request buffer(s) */
276 req->nents = dma_map_sg(priv->dev, areq->src, 276 req->nents = dma_map_sg(priv->dev, areq->src, sg_nents(areq->src),
277 sg_nents_for_len(areq->src, areq->nbytes),
278 DMA_TO_DEVICE); 277 DMA_TO_DEVICE);
279 if (!req->nents) { 278 if (!req->nents) {
280 ret = -ENOMEM; 279 ret = -ENOMEM;