aboutsummaryrefslogtreecommitdiffstats
path: root/drivers/crypto
diff options
context:
space:
mode:
authorSonic Zhang <sonic.zhang@analog.com>2014-02-25 21:39:16 -0500
committerHerbert Xu <herbert@gondor.apana.org.au>2014-03-10 08:13:33 -0400
commit4ea5d9998a9de1c85167582d3fd2760cacf40f7d (patch)
treee81d0ba5240188c1eddb4cc03f5ee686a7f800bf /drivers/crypto
parent3d6f1d12f5ed7603caeeb1870174882256bd0889 (diff)
crypt: bfin_crc - Remove useless SSYNC instruction and cache flush to DMA coherent memory
1) SSYNC instruction is blackfin specific and takes no effect in this driver. 2) DMA descriptor and SG middle buffer are in DMA coherent memory. No need to flush. 3) Turn kzalloc, ioremap and request_irq into managed device APIs respectively. Signed-off-by: Sonic Zhang <sonic.zhang@analog.com> Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
Diffstat (limited to 'drivers/crypto')
-rw-r--r--drivers/crypto/bfin_crc.c45
1 files changed, 11 insertions, 34 deletions
diff --git a/drivers/crypto/bfin_crc.c b/drivers/crypto/bfin_crc.c
index d797f31f5d85..c9ff298e6d26 100644
--- a/drivers/crypto/bfin_crc.c
+++ b/drivers/crypto/bfin_crc.c
@@ -139,7 +139,6 @@ static int bfin_crypto_crc_init_hw(struct bfin_crypto_crc *crc, u32 key)
139 /* setup CRC interrupts */ 139 /* setup CRC interrupts */
140 crc->regs->status = CMPERRI | DCNTEXPI; 140 crc->regs->status = CMPERRI | DCNTEXPI;
141 crc->regs->intrenset = CMPERRI | DCNTEXPI; 141 crc->regs->intrenset = CMPERRI | DCNTEXPI;
142 SSYNC();
143 142
144 return 0; 143 return 0;
145} 144}
@@ -285,17 +284,12 @@ static void bfin_crypto_crc_config_dma(struct bfin_crypto_crc *crc)
285 if (i == 0) 284 if (i == 0)
286 return; 285 return;
287 286
288 flush_dcache_range((unsigned int)crc->sg_cpu,
289 (unsigned int)crc->sg_cpu +
290 i * sizeof(struct dma_desc_array));
291
292 /* Set the last descriptor to stop mode */ 287 /* Set the last descriptor to stop mode */
293 crc->sg_cpu[i - 1].cfg &= ~(DMAFLOW | NDSIZE); 288 crc->sg_cpu[i - 1].cfg &= ~(DMAFLOW | NDSIZE);
294 crc->sg_cpu[i - 1].cfg |= DI_EN; 289 crc->sg_cpu[i - 1].cfg |= DI_EN;
295 set_dma_curr_desc_addr(crc->dma_ch, (unsigned long *)crc->sg_dma); 290 set_dma_curr_desc_addr(crc->dma_ch, (unsigned long *)crc->sg_dma);
296 set_dma_x_count(crc->dma_ch, 0); 291 set_dma_x_count(crc->dma_ch, 0);
297 set_dma_x_modify(crc->dma_ch, 0); 292 set_dma_x_modify(crc->dma_ch, 0);
298 SSYNC();
299 set_dma_config(crc->dma_ch, dma_config); 293 set_dma_config(crc->dma_ch, dma_config);
300} 294}
301 295
@@ -415,7 +409,6 @@ finish_update:
415 409
416 /* finally kick off CRC operation */ 410 /* finally kick off CRC operation */
417 crc->regs->control |= BLKEN; 411 crc->regs->control |= BLKEN;
418 SSYNC();
419 412
420 return -EINPROGRESS; 413 return -EINPROGRESS;
421} 414}
@@ -539,7 +532,6 @@ static irqreturn_t bfin_crypto_crc_handler(int irq, void *dev_id)
539 532
540 if (crc->regs->status & DCNTEXP) { 533 if (crc->regs->status & DCNTEXP) {
541 crc->regs->status = DCNTEXP; 534 crc->regs->status = DCNTEXP;
542 SSYNC();
543 535
544 /* prepare results */ 536 /* prepare results */
545 put_unaligned_le32(crc->regs->result, crc->req->result); 537 put_unaligned_le32(crc->regs->result, crc->req->result);
@@ -594,7 +586,7 @@ static int bfin_crypto_crc_probe(struct platform_device *pdev)
594 unsigned int timeout = 100000; 586 unsigned int timeout = 100000;
595 int ret; 587 int ret;
596 588
597 crc = kzalloc(sizeof(*crc), GFP_KERNEL); 589 crc = devm_kzalloc(dev, sizeof(*crc), GFP_KERNEL);
598 if (!crc) { 590 if (!crc) {
599 dev_err(&pdev->dev, "fail to malloc bfin_crypto_crc\n"); 591 dev_err(&pdev->dev, "fail to malloc bfin_crypto_crc\n");
600 return -ENOMEM; 592 return -ENOMEM;
@@ -610,42 +602,39 @@ static int bfin_crypto_crc_probe(struct platform_device *pdev)
610 res = platform_get_resource(pdev, IORESOURCE_MEM, 0); 602 res = platform_get_resource(pdev, IORESOURCE_MEM, 0);
611 if (res == NULL) { 603 if (res == NULL) {
612 dev_err(&pdev->dev, "Cannot get IORESOURCE_MEM\n"); 604 dev_err(&pdev->dev, "Cannot get IORESOURCE_MEM\n");
613 ret = -ENOENT; 605 return -ENOENT;
614 goto out_error_free_mem;
615 } 606 }
616 607
617 crc->regs = ioremap(res->start, resource_size(res)); 608 crc->regs = devm_ioremap_resource(dev, res);
618 if (!crc->regs) { 609 if (IS_ERR((void *)crc->regs)) {
619 dev_err(&pdev->dev, "Cannot map CRC IO\n"); 610 dev_err(&pdev->dev, "Cannot map CRC IO\n");
620 ret = -ENXIO; 611 return PTR_ERR((void *)crc->regs);
621 goto out_error_free_mem;
622 } 612 }
623 613
624 crc->irq = platform_get_irq(pdev, 0); 614 crc->irq = platform_get_irq(pdev, 0);
625 if (crc->irq < 0) { 615 if (crc->irq < 0) {
626 dev_err(&pdev->dev, "No CRC DCNTEXP IRQ specified\n"); 616 dev_err(&pdev->dev, "No CRC DCNTEXP IRQ specified\n");
627 ret = -ENOENT; 617 return -ENOENT;
628 goto out_error_unmap;
629 } 618 }
630 619
631 ret = request_irq(crc->irq, bfin_crypto_crc_handler, IRQF_SHARED, dev_name(dev), crc); 620 ret = devm_request_irq(dev, crc->irq, bfin_crypto_crc_handler,
621 IRQF_SHARED, dev_name(dev), crc);
632 if (ret) { 622 if (ret) {
633 dev_err(&pdev->dev, "Unable to request blackfin crc irq\n"); 623 dev_err(&pdev->dev, "Unable to request blackfin crc irq\n");
634 goto out_error_unmap; 624 return ret;
635 } 625 }
636 626
637 res = platform_get_resource(pdev, IORESOURCE_DMA, 0); 627 res = platform_get_resource(pdev, IORESOURCE_DMA, 0);
638 if (res == NULL) { 628 if (res == NULL) {
639 dev_err(&pdev->dev, "No CRC DMA channel specified\n"); 629 dev_err(&pdev->dev, "No CRC DMA channel specified\n");
640 ret = -ENOENT; 630 return -ENOENT;
641 goto out_error_irq;
642 } 631 }
643 crc->dma_ch = res->start; 632 crc->dma_ch = res->start;
644 633
645 ret = request_dma(crc->dma_ch, dev_name(dev)); 634 ret = request_dma(crc->dma_ch, dev_name(dev));
646 if (ret) { 635 if (ret) {
647 dev_err(&pdev->dev, "Unable to attach Blackfin CRC DMA channel\n"); 636 dev_err(&pdev->dev, "Unable to attach Blackfin CRC DMA channel\n");
648 goto out_error_irq; 637 return ret;
649 } 638 }
650 639
651 crc->sg_cpu = dma_alloc_coherent(&pdev->dev, PAGE_SIZE, &crc->sg_dma, GFP_KERNEL); 640 crc->sg_cpu = dma_alloc_coherent(&pdev->dev, PAGE_SIZE, &crc->sg_dma, GFP_KERNEL);
@@ -660,9 +649,7 @@ static int bfin_crypto_crc_probe(struct platform_device *pdev)
660 crc->sg_mid_buf = (u8 *)(crc->sg_cpu + ((CRC_MAX_DMA_DESC + 1) << 1)); 649 crc->sg_mid_buf = (u8 *)(crc->sg_cpu + ((CRC_MAX_DMA_DESC + 1) << 1));
661 650
662 crc->regs->control = 0; 651 crc->regs->control = 0;
663 SSYNC();
664 crc->regs->poly = crc->poly = (u32)pdev->dev.platform_data; 652 crc->regs->poly = crc->poly = (u32)pdev->dev.platform_data;
665 SSYNC();
666 653
667 while (!(crc->regs->status & LUTDONE) && (--timeout) > 0) 654 while (!(crc->regs->status & LUTDONE) && (--timeout) > 0)
668 cpu_relax(); 655 cpu_relax();
@@ -693,12 +680,6 @@ out_error_dma:
693 if (crc->sg_cpu) 680 if (crc->sg_cpu)
694 dma_free_coherent(&pdev->dev, PAGE_SIZE, crc->sg_cpu, crc->sg_dma); 681 dma_free_coherent(&pdev->dev, PAGE_SIZE, crc->sg_cpu, crc->sg_dma);
695 free_dma(crc->dma_ch); 682 free_dma(crc->dma_ch);
696out_error_irq:
697 free_irq(crc->irq, crc);
698out_error_unmap:
699 iounmap((void *)crc->regs);
700out_error_free_mem:
701 kfree(crc);
702 683
703 return ret; 684 return ret;
704} 685}
@@ -721,10 +702,6 @@ static int bfin_crypto_crc_remove(struct platform_device *pdev)
721 crypto_unregister_ahash(&algs); 702 crypto_unregister_ahash(&algs);
722 tasklet_kill(&crc->done_task); 703 tasklet_kill(&crc->done_task);
723 free_dma(crc->dma_ch); 704 free_dma(crc->dma_ch);
724 if (crc->irq > 0)
725 free_irq(crc->irq, crc);
726 iounmap((void *)crc->regs);
727 kfree(crc);
728 705
729 return 0; 706 return 0;
730} 707}