aboutsummaryrefslogtreecommitdiffstats
path: root/drivers/crypto
diff options
context:
space:
mode:
authorDmitry Kasatkin <dmitry.kasatkin@nokia.com>2010-11-30 03:13:31 -0500
committerHerbert Xu <herbert@gondor.apana.org.au>2010-12-02 03:37:07 -0500
commit83ea7e0fe1471508ab8e8d7b317e743fe7a05a5f (patch)
tree1358c0b0f636f882a98f90e4e596f1dc51a1e7f0 /drivers/crypto
parent67a730ce449561f6df838f0b38a2b72cbf4e3c4c (diff)
crypto: omap-aes - initialize aes module once per request
AES module was initialized for every DMA transaction. That is redundant. Now it is initialized once per request. Signed-off-by: Dmitry Kasatkin <dmitry.kasatkin@nokia.com> Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
Diffstat (limited to 'drivers/crypto')
-rw-r--r--drivers/crypto/omap-aes.c54
1 files changed, 28 insertions, 26 deletions
diff --git a/drivers/crypto/omap-aes.c b/drivers/crypto/omap-aes.c
index 0b21dcef0289..b69da4f07c89 100644
--- a/drivers/crypto/omap-aes.c
+++ b/drivers/crypto/omap-aes.c
@@ -176,6 +176,11 @@ static int omap_aes_wait(struct omap_aes_dev *dd, u32 offset, u32 bit)
176 176
177static int omap_aes_hw_init(struct omap_aes_dev *dd) 177static int omap_aes_hw_init(struct omap_aes_dev *dd)
178{ 178{
179 /*
180 * clocks are enabled when request starts and disabled when finished.
181 * It may be long delays between requests.
182 * Device might go to off mode to save power.
183 */
179 clk_enable(dd->iclk); 184 clk_enable(dd->iclk);
180 185
181 if (!(dd->flags & FLAGS_INIT)) { 186 if (!(dd->flags & FLAGS_INIT)) {
@@ -190,10 +195,9 @@ static int omap_aes_hw_init(struct omap_aes_dev *dd)
190 __asm__ __volatile__("nop"); 195 __asm__ __volatile__("nop");
191 196
192 if (omap_aes_wait(dd, AES_REG_SYSSTATUS, 197 if (omap_aes_wait(dd, AES_REG_SYSSTATUS,
193 AES_REG_SYSSTATUS_RESETDONE)) { 198 AES_REG_SYSSTATUS_RESETDONE))
194 clk_disable(dd->iclk);
195 return -ETIMEDOUT; 199 return -ETIMEDOUT;
196 } 200
197 dd->flags |= FLAGS_INIT; 201 dd->flags |= FLAGS_INIT;
198 dd->err = 0; 202 dd->err = 0;
199 } 203 }
@@ -243,9 +247,19 @@ static int omap_aes_write_ctrl(struct omap_aes_dev *dd)
243 247
244 omap_aes_write_mask(dd, AES_REG_CTRL, val, mask); 248 omap_aes_write_mask(dd, AES_REG_CTRL, val, mask);
245 249
246 /* start DMA or disable idle mode */ 250 /* IN */
247 omap_aes_write_mask(dd, AES_REG_MASK, AES_REG_MASK_START, 251 omap_set_dma_dest_params(dd->dma_lch_in, 0, OMAP_DMA_AMODE_CONSTANT,
248 AES_REG_MASK_START); 252 dd->phys_base + AES_REG_DATA, 0, 4);
253
254 omap_set_dma_dest_burst_mode(dd->dma_lch_in, OMAP_DMA_DATA_BURST_4);
255 omap_set_dma_src_burst_mode(dd->dma_lch_in, OMAP_DMA_DATA_BURST_4);
256
257 /* OUT */
258 omap_set_dma_src_params(dd->dma_lch_out, 0, OMAP_DMA_AMODE_CONSTANT,
259 dd->phys_base + AES_REG_DATA, 0, 4);
260
261 omap_set_dma_src_burst_mode(dd->dma_lch_out, OMAP_DMA_DATA_BURST_4);
262 omap_set_dma_dest_burst_mode(dd->dma_lch_out, OMAP_DMA_DATA_BURST_4);
249 263
250 return 0; 264 return 0;
251} 265}
@@ -419,7 +433,6 @@ static int omap_aes_crypt_dma(struct crypto_tfm *tfm, dma_addr_t dma_addr_in,
419 struct omap_aes_ctx *ctx = crypto_tfm_ctx(tfm); 433 struct omap_aes_ctx *ctx = crypto_tfm_ctx(tfm);
420 struct omap_aes_dev *dd = ctx->dd; 434 struct omap_aes_dev *dd = ctx->dd;
421 int len32; 435 int len32;
422 int err;
423 436
424 pr_debug("len: %d\n", length); 437 pr_debug("len: %d\n", length);
425 438
@@ -432,12 +445,6 @@ static int omap_aes_crypt_dma(struct crypto_tfm *tfm, dma_addr_t dma_addr_in,
432 len32 = DIV_ROUND_UP(length, sizeof(u32)); 445 len32 = DIV_ROUND_UP(length, sizeof(u32));
433 446
434 /* IN */ 447 /* IN */
435 omap_set_dma_dest_params(dd->dma_lch_in, 0, OMAP_DMA_AMODE_CONSTANT,
436 dd->phys_base + AES_REG_DATA, 0, 4);
437
438 omap_set_dma_dest_burst_mode(dd->dma_lch_in, OMAP_DMA_DATA_BURST_4);
439 omap_set_dma_src_burst_mode(dd->dma_lch_in, OMAP_DMA_DATA_BURST_4);
440
441 omap_set_dma_transfer_params(dd->dma_lch_in, OMAP_DMA_DATA_TYPE_S32, 448 omap_set_dma_transfer_params(dd->dma_lch_in, OMAP_DMA_DATA_TYPE_S32,
442 len32, 1, OMAP_DMA_SYNC_PACKET, dd->dma_in, 449 len32, 1, OMAP_DMA_SYNC_PACKET, dd->dma_in,
443 OMAP_DMA_DST_SYNC); 450 OMAP_DMA_DST_SYNC);
@@ -446,12 +453,6 @@ static int omap_aes_crypt_dma(struct crypto_tfm *tfm, dma_addr_t dma_addr_in,
446 dma_addr_in, 0, 0); 453 dma_addr_in, 0, 0);
447 454
448 /* OUT */ 455 /* OUT */
449 omap_set_dma_src_params(dd->dma_lch_out, 0, OMAP_DMA_AMODE_CONSTANT,
450 dd->phys_base + AES_REG_DATA, 0, 4);
451
452 omap_set_dma_src_burst_mode(dd->dma_lch_out, OMAP_DMA_DATA_BURST_4);
453 omap_set_dma_dest_burst_mode(dd->dma_lch_out, OMAP_DMA_DATA_BURST_4);
454
455 omap_set_dma_transfer_params(dd->dma_lch_out, OMAP_DMA_DATA_TYPE_S32, 456 omap_set_dma_transfer_params(dd->dma_lch_out, OMAP_DMA_DATA_TYPE_S32,
456 len32, 1, OMAP_DMA_SYNC_PACKET, 457 len32, 1, OMAP_DMA_SYNC_PACKET,
457 dd->dma_out, OMAP_DMA_SRC_SYNC); 458 dd->dma_out, OMAP_DMA_SRC_SYNC);
@@ -459,13 +460,13 @@ static int omap_aes_crypt_dma(struct crypto_tfm *tfm, dma_addr_t dma_addr_in,
459 omap_set_dma_dest_params(dd->dma_lch_out, 0, OMAP_DMA_AMODE_POST_INC, 460 omap_set_dma_dest_params(dd->dma_lch_out, 0, OMAP_DMA_AMODE_POST_INC,
460 dma_addr_out, 0, 0); 461 dma_addr_out, 0, 0);
461 462
462 err = omap_aes_write_ctrl(dd);
463 if (err)
464 return err;
465
466 omap_start_dma(dd->dma_lch_in); 463 omap_start_dma(dd->dma_lch_in);
467 omap_start_dma(dd->dma_lch_out); 464 omap_start_dma(dd->dma_lch_out);
468 465
466 /* start DMA or disable idle mode */
467 omap_aes_write_mask(dd, AES_REG_MASK, AES_REG_MASK_START,
468 AES_REG_MASK_START);
469
469 return 0; 470 return 0;
470} 471}
471 472
@@ -545,6 +546,7 @@ static void omap_aes_finish_req(struct omap_aes_dev *dd, int err)
545 546
546 pr_debug("err: %d\n", err); 547 pr_debug("err: %d\n", err);
547 548
549 clk_disable(dd->iclk);
548 dd->flags &= ~FLAGS_BUSY; 550 dd->flags &= ~FLAGS_BUSY;
549 551
550 req->base.complete(&req->base, err); 552 req->base.complete(&req->base, err);
@@ -562,8 +564,6 @@ static int omap_aes_crypt_dma_stop(struct omap_aes_dev *dd)
562 omap_stop_dma(dd->dma_lch_in); 564 omap_stop_dma(dd->dma_lch_in);
563 omap_stop_dma(dd->dma_lch_out); 565 omap_stop_dma(dd->dma_lch_out);
564 566
565 clk_disable(dd->iclk);
566
567 if (dd->flags & FLAGS_FAST) { 567 if (dd->flags & FLAGS_FAST) {
568 dma_unmap_sg(dd->dev, dd->out_sg, 1, DMA_FROM_DEVICE); 568 dma_unmap_sg(dd->dev, dd->out_sg, 1, DMA_FROM_DEVICE);
569 dma_unmap_sg(dd->dev, dd->in_sg, 1, DMA_TO_DEVICE); 569 dma_unmap_sg(dd->dev, dd->in_sg, 1, DMA_TO_DEVICE);
@@ -629,7 +629,9 @@ static int omap_aes_handle_queue(struct omap_aes_dev *dd,
629 dd->ctx = ctx; 629 dd->ctx = ctx;
630 ctx->dd = dd; 630 ctx->dd = dd;
631 631
632 err = omap_aes_crypt_dma_start(dd); 632 err = omap_aes_write_ctrl(dd);
633 if (!err)
634 err = omap_aes_crypt_dma_start(dd);
633 if (err) { 635 if (err) {
634 /* aes_task will not finish it, so do it here */ 636 /* aes_task will not finish it, so do it here */
635 omap_aes_finish_req(dd, err); 637 omap_aes_finish_req(dd, err);