aboutsummaryrefslogtreecommitdiffstats
path: root/drivers/crypto/atmel-aes.c
diff options
context:
space:
mode:
authorCyrille Pitchen <cyrille.pitchen@atmel.com>2015-12-17 11:48:39 -0500
committerHerbert Xu <herbert@gondor.apana.org.au>2015-12-23 05:19:51 -0500
commitccbf72980b22b04f0b7afb2b82eb699ee7da635c (patch)
tree9cb456f013e05a2a284886ee1f1d07c0150ca262 /drivers/crypto/atmel-aes.c
parentcdfab4a7e3f16224e3a52dfe990a9bd870363690 (diff)
crypto: atmel-aes - make crypto request queue management more generic
This patch changes atmel_aes_handle_queue() to make it more generic. The function argument is now a pointer to struct crypto_async_request, which is the common base of struct ablkcipher_request and struct aead_request. Also this patch introduces struct atmel_aes_base_ctx which will be the common base of all the transformation contexts. Hence the very same queue will be used to manage both block cipher and AEAD requests (such as gcm and authenc implemented in further patches). Signed-off-by: Cyrille Pitchen <cyrille.pitchen@atmel.com> Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
Diffstat (limited to 'drivers/crypto/atmel-aes.c')
-rw-r--r--drivers/crypto/atmel-aes.c75
1 files changed, 49 insertions, 26 deletions
diff --git a/drivers/crypto/atmel-aes.c b/drivers/crypto/atmel-aes.c
index ccb8a8322bf7..48407a7f6f61 100644
--- a/drivers/crypto/atmel-aes.c
+++ b/drivers/crypto/atmel-aes.c
@@ -78,8 +78,13 @@ struct atmel_aes_caps {
78 78
79struct atmel_aes_dev; 79struct atmel_aes_dev;
80 80
81struct atmel_aes_ctx { 81
82typedef int (*atmel_aes_fn_t)(struct atmel_aes_dev *);
83
84
85struct atmel_aes_base_ctx {
82 struct atmel_aes_dev *dd; 86 struct atmel_aes_dev *dd;
87 atmel_aes_fn_t start;
83 88
84 int keylen; 89 int keylen;
85 u32 key[AES_KEYSIZE_256 / sizeof(u32)]; 90 u32 key[AES_KEYSIZE_256 / sizeof(u32)];
@@ -87,6 +92,10 @@ struct atmel_aes_ctx {
87 u16 block_size; 92 u16 block_size;
88}; 93};
89 94
95struct atmel_aes_ctx {
96 struct atmel_aes_base_ctx base;
97};
98
90struct atmel_aes_reqctx { 99struct atmel_aes_reqctx {
91 unsigned long mode; 100 unsigned long mode;
92}; 101};
@@ -101,7 +110,9 @@ struct atmel_aes_dev {
101 unsigned long phys_base; 110 unsigned long phys_base;
102 void __iomem *io_base; 111 void __iomem *io_base;
103 112
104 struct atmel_aes_ctx *ctx; 113 struct crypto_async_request *areq;
114 struct atmel_aes_base_ctx *ctx;
115
105 struct device *dev; 116 struct device *dev;
106 struct clk *iclk; 117 struct clk *iclk;
107 int irq; 118 int irq;
@@ -115,7 +126,6 @@ struct atmel_aes_dev {
115 struct tasklet_struct done_task; 126 struct tasklet_struct done_task;
116 struct tasklet_struct queue_task; 127 struct tasklet_struct queue_task;
117 128
118 struct ablkcipher_request *req;
119 size_t total; 129 size_t total;
120 130
121 struct scatterlist *in_sg; 131 struct scatterlist *in_sg;
@@ -236,7 +246,7 @@ static void atmel_aes_write_n(struct atmel_aes_dev *dd, u32 offset,
236 atmel_aes_write(dd, offset, *value); 246 atmel_aes_write(dd, offset, *value);
237} 247}
238 248
239static struct atmel_aes_dev *atmel_aes_find_dev(struct atmel_aes_ctx *ctx) 249static struct atmel_aes_dev *atmel_aes_find_dev(struct atmel_aes_base_ctx *ctx)
240{ 250{
241 struct atmel_aes_dev *aes_dd = NULL; 251 struct atmel_aes_dev *aes_dd = NULL;
242 struct atmel_aes_dev *tmp; 252 struct atmel_aes_dev *tmp;
@@ -298,7 +308,7 @@ static int atmel_aes_hw_version_init(struct atmel_aes_dev *dd)
298 308
299static void atmel_aes_finish_req(struct atmel_aes_dev *dd, int err) 309static void atmel_aes_finish_req(struct atmel_aes_dev *dd, int err)
300{ 310{
301 struct ablkcipher_request *req = dd->req; 311 struct ablkcipher_request *req = ablkcipher_request_cast(dd->areq);
302 312
303 clk_disable_unprepare(dd->iclk); 313 clk_disable_unprepare(dd->iclk);
304 dd->flags &= ~AES_FLAGS_BUSY; 314 dd->flags &= ~AES_FLAGS_BUSY;
@@ -396,6 +406,8 @@ static int atmel_aes_crypt_dma(struct atmel_aes_dev *dd,
396 406
397static int atmel_aes_crypt_cpu_start(struct atmel_aes_dev *dd) 407static int atmel_aes_crypt_cpu_start(struct atmel_aes_dev *dd)
398{ 408{
409 struct ablkcipher_request *req = ablkcipher_request_cast(dd->areq);
410
399 dd->flags &= ~AES_FLAGS_DMA; 411 dd->flags &= ~AES_FLAGS_DMA;
400 412
401 dma_sync_single_for_cpu(dd->dev, dd->dma_addr_in, 413 dma_sync_single_for_cpu(dd->dev, dd->dma_addr_in,
@@ -404,11 +416,11 @@ static int atmel_aes_crypt_cpu_start(struct atmel_aes_dev *dd)
404 dd->dma_size, DMA_FROM_DEVICE); 416 dd->dma_size, DMA_FROM_DEVICE);
405 417
406 /* use cache buffers */ 418 /* use cache buffers */
407 dd->nb_in_sg = atmel_aes_sg_length(dd->req, dd->in_sg); 419 dd->nb_in_sg = atmel_aes_sg_length(req, dd->in_sg);
408 if (!dd->nb_in_sg) 420 if (!dd->nb_in_sg)
409 return -EINVAL; 421 return -EINVAL;
410 422
411 dd->nb_out_sg = atmel_aes_sg_length(dd->req, dd->out_sg); 423 dd->nb_out_sg = atmel_aes_sg_length(req, dd->out_sg);
412 if (!dd->nb_out_sg) 424 if (!dd->nb_out_sg)
413 return -EINVAL; 425 return -EINVAL;
414 426
@@ -556,38 +568,49 @@ static void atmel_aes_write_ctrl(struct atmel_aes_dev *dd, bool use_dma,
556} 568}
557 569
558static int atmel_aes_handle_queue(struct atmel_aes_dev *dd, 570static int atmel_aes_handle_queue(struct atmel_aes_dev *dd,
559 struct ablkcipher_request *req) 571 struct crypto_async_request *new_areq)
560{ 572{
561 struct crypto_async_request *async_req, *backlog; 573 struct crypto_async_request *areq, *backlog;
562 struct atmel_aes_ctx *ctx; 574 struct atmel_aes_base_ctx *ctx;
563 struct atmel_aes_reqctx *rctx;
564 unsigned long flags; 575 unsigned long flags;
565 int err, ret = 0; 576 int err, ret = 0;
566 bool use_dma;
567 577
568 spin_lock_irqsave(&dd->lock, flags); 578 spin_lock_irqsave(&dd->lock, flags);
569 if (req) 579 if (new_areq)
570 ret = ablkcipher_enqueue_request(&dd->queue, req); 580 ret = crypto_enqueue_request(&dd->queue, new_areq);
571 if (dd->flags & AES_FLAGS_BUSY) { 581 if (dd->flags & AES_FLAGS_BUSY) {
572 spin_unlock_irqrestore(&dd->lock, flags); 582 spin_unlock_irqrestore(&dd->lock, flags);
573 return ret; 583 return ret;
574 } 584 }
575 backlog = crypto_get_backlog(&dd->queue); 585 backlog = crypto_get_backlog(&dd->queue);
576 async_req = crypto_dequeue_request(&dd->queue); 586 areq = crypto_dequeue_request(&dd->queue);
577 if (async_req) 587 if (areq)
578 dd->flags |= AES_FLAGS_BUSY; 588 dd->flags |= AES_FLAGS_BUSY;
579 spin_unlock_irqrestore(&dd->lock, flags); 589 spin_unlock_irqrestore(&dd->lock, flags);
580 590
581 if (!async_req) 591 if (!areq)
582 return ret; 592 return ret;
583 593
584 if (backlog) 594 if (backlog)
585 backlog->complete(backlog, -EINPROGRESS); 595 backlog->complete(backlog, -EINPROGRESS);
586 596
587 req = ablkcipher_request_cast(async_req); 597 ctx = crypto_tfm_ctx(areq->tfm);
598
599 dd->areq = areq;
600 dd->ctx = ctx;
601
602 err = ctx->start(dd);
603 return (areq != new_areq) ? ret : err;
604}
605
606static int atmel_aes_start(struct atmel_aes_dev *dd)
607{
608 struct ablkcipher_request *req = ablkcipher_request_cast(dd->areq);
609 struct atmel_aes_reqctx *rctx;
610 bool use_dma;
611 int err;
588 612
589 /* assign new request to device */ 613 /* assign new request to device */
590 dd->req = req;
591 dd->total = req->nbytes; 614 dd->total = req->nbytes;
592 dd->in_offset = 0; 615 dd->in_offset = 0;
593 dd->in_sg = req->src; 616 dd->in_sg = req->src;
@@ -595,11 +618,8 @@ static int atmel_aes_handle_queue(struct atmel_aes_dev *dd,
595 dd->out_sg = req->dst; 618 dd->out_sg = req->dst;
596 619
597 rctx = ablkcipher_request_ctx(req); 620 rctx = ablkcipher_request_ctx(req);
598 ctx = crypto_ablkcipher_ctx(crypto_ablkcipher_reqtfm(req));
599 rctx->mode &= AES_FLAGS_MODE_MASK; 621 rctx->mode &= AES_FLAGS_MODE_MASK;
600 dd->flags = (dd->flags & ~AES_FLAGS_MODE_MASK) | rctx->mode; 622 dd->flags = (dd->flags & ~AES_FLAGS_MODE_MASK) | rctx->mode;
601 dd->ctx = ctx;
602 ctx->dd = dd;
603 623
604 err = atmel_aes_hw_init(dd); 624 err = atmel_aes_hw_init(dd);
605 if (!err) { 625 if (!err) {
@@ -616,7 +636,7 @@ static int atmel_aes_handle_queue(struct atmel_aes_dev *dd,
616 tasklet_schedule(&dd->queue_task); 636 tasklet_schedule(&dd->queue_task);
617 } 637 }
618 638
619 return ret; 639 return -EINPROGRESS;
620} 640}
621 641
622static int atmel_aes_crypt_dma_stop(struct atmel_aes_dev *dd) 642static int atmel_aes_crypt_dma_stop(struct atmel_aes_dev *dd)
@@ -704,7 +724,7 @@ static void atmel_aes_buff_cleanup(struct atmel_aes_dev *dd)
704 724
705static int atmel_aes_crypt(struct ablkcipher_request *req, unsigned long mode) 725static int atmel_aes_crypt(struct ablkcipher_request *req, unsigned long mode)
706{ 726{
707 struct atmel_aes_ctx *ctx = crypto_ablkcipher_ctx( 727 struct atmel_aes_base_ctx *ctx = crypto_ablkcipher_ctx(
708 crypto_ablkcipher_reqtfm(req)); 728 crypto_ablkcipher_reqtfm(req));
709 struct atmel_aes_reqctx *rctx = ablkcipher_request_ctx(req); 729 struct atmel_aes_reqctx *rctx = ablkcipher_request_ctx(req);
710 struct atmel_aes_dev *dd; 730 struct atmel_aes_dev *dd;
@@ -747,7 +767,7 @@ static int atmel_aes_crypt(struct ablkcipher_request *req, unsigned long mode)
747 767
748 rctx->mode = mode; 768 rctx->mode = mode;
749 769
750 return atmel_aes_handle_queue(dd, req); 770 return atmel_aes_handle_queue(dd, &req->base);
751} 771}
752 772
753static bool atmel_aes_filter(struct dma_chan *chan, void *slave) 773static bool atmel_aes_filter(struct dma_chan *chan, void *slave)
@@ -822,7 +842,7 @@ static void atmel_aes_dma_cleanup(struct atmel_aes_dev *dd)
822static int atmel_aes_setkey(struct crypto_ablkcipher *tfm, const u8 *key, 842static int atmel_aes_setkey(struct crypto_ablkcipher *tfm, const u8 *key,
823 unsigned int keylen) 843 unsigned int keylen)
824{ 844{
825 struct atmel_aes_ctx *ctx = crypto_ablkcipher_ctx(tfm); 845 struct atmel_aes_base_ctx *ctx = crypto_ablkcipher_ctx(tfm);
826 846
827 if (keylen != AES_KEYSIZE_128 && keylen != AES_KEYSIZE_192 && 847 if (keylen != AES_KEYSIZE_128 && keylen != AES_KEYSIZE_192 &&
828 keylen != AES_KEYSIZE_256) { 848 keylen != AES_KEYSIZE_256) {
@@ -946,7 +966,10 @@ static int atmel_aes_ctr_decrypt(struct ablkcipher_request *req)
946 966
947static int atmel_aes_cra_init(struct crypto_tfm *tfm) 967static int atmel_aes_cra_init(struct crypto_tfm *tfm)
948{ 968{
969 struct atmel_aes_ctx *ctx = crypto_tfm_ctx(tfm);
970
949 tfm->crt_ablkcipher.reqsize = sizeof(struct atmel_aes_reqctx); 971 tfm->crt_ablkcipher.reqsize = sizeof(struct atmel_aes_reqctx);
972 ctx->base.start = atmel_aes_start;
950 973
951 return 0; 974 return 0;
952} 975}