summaryrefslogtreecommitdiffstats
path: root/drivers/spi/spi-sprd.c
diff options
context:
space:
mode:
Diffstat (limited to 'drivers/spi/spi-sprd.c')
-rw-r--r--drivers/spi/spi-sprd.c293
1 files changed, 290 insertions, 3 deletions
diff --git a/drivers/spi/spi-sprd.c b/drivers/spi/spi-sprd.c
index 06578412b04d..a4398e737650 100644
--- a/drivers/spi/spi-sprd.c
+++ b/drivers/spi/spi-sprd.c
@@ -2,6 +2,9 @@
2// Copyright (C) 2018 Spreadtrum Communications Inc. 2// Copyright (C) 2018 Spreadtrum Communications Inc.
3 3
4#include <linux/clk.h> 4#include <linux/clk.h>
5#include <linux/dmaengine.h>
6#include <linux/dma-mapping.h>
7#include <linux/dma/sprd-dma.h>
5#include <linux/interrupt.h> 8#include <linux/interrupt.h>
6#include <linux/io.h> 9#include <linux/io.h>
7#include <linux/iopoll.h> 10#include <linux/iopoll.h>
@@ -9,6 +12,7 @@
9#include <linux/module.h> 12#include <linux/module.h>
10#include <linux/of.h> 13#include <linux/of.h>
11#include <linux/of_device.h> 14#include <linux/of_device.h>
15#include <linux/of_dma.h>
12#include <linux/platform_device.h> 16#include <linux/platform_device.h>
13#include <linux/pm_runtime.h> 17#include <linux/pm_runtime.h>
14#include <linux/spi/spi.h> 18#include <linux/spi/spi.h>
@@ -128,9 +132,25 @@
128#define SPRD_SPI_DEFAULT_SOURCE 26000000 132#define SPRD_SPI_DEFAULT_SOURCE 26000000
129#define SPRD_SPI_MAX_SPEED_HZ 48000000 133#define SPRD_SPI_MAX_SPEED_HZ 48000000
130#define SPRD_SPI_AUTOSUSPEND_DELAY 100 134#define SPRD_SPI_AUTOSUSPEND_DELAY 100
135#define SPRD_SPI_DMA_STEP 8
136
137enum sprd_spi_dma_channel {
138 SPI_RX,
139 SPI_TX,
140 SPI_MAX,
141};
142
143struct sprd_spi_dma {
144 bool enable;
145 struct dma_chan *dma_chan[SPI_MAX];
146 enum dma_slave_buswidth width;
147 u32 fragmens_len;
148 u32 rx_len;
149};
131 150
132struct sprd_spi { 151struct sprd_spi {
133 void __iomem *base; 152 void __iomem *base;
153 phys_addr_t phy_base;
134 struct device *dev; 154 struct device *dev;
135 struct clk *clk; 155 struct clk *clk;
136 int irq; 156 int irq;
@@ -142,6 +162,7 @@ struct sprd_spi {
142 u32 hw_speed_hz; 162 u32 hw_speed_hz;
143 u32 len; 163 u32 len;
144 int status; 164 int status;
165 struct sprd_spi_dma dma;
145 struct completion xfer_completion; 166 struct completion xfer_completion;
146 const void *tx_buf; 167 const void *tx_buf;
147 void *rx_buf; 168 void *rx_buf;
@@ -431,6 +452,208 @@ complete:
431 return ret; 452 return ret;
432} 453}
433 454
455static void sprd_spi_irq_enable(struct sprd_spi *ss)
456{
457 u32 val;
458
459 /* Clear interrupt status before enabling interrupt. */
460 writel_relaxed(SPRD_SPI_TX_END_CLR | SPRD_SPI_RX_END_CLR,
461 ss->base + SPRD_SPI_INT_CLR);
462 /* Enable SPI interrupt only in DMA mode. */
463 val = readl_relaxed(ss->base + SPRD_SPI_INT_EN);
464 writel_relaxed(val | SPRD_SPI_TX_END_INT_EN |
465 SPRD_SPI_RX_END_INT_EN,
466 ss->base + SPRD_SPI_INT_EN);
467}
468
469static void sprd_spi_irq_disable(struct sprd_spi *ss)
470{
471 writel_relaxed(0, ss->base + SPRD_SPI_INT_EN);
472}
473
474static void sprd_spi_dma_enable(struct sprd_spi *ss, bool enable)
475{
476 u32 val = readl_relaxed(ss->base + SPRD_SPI_CTL2);
477
478 if (enable)
479 val |= SPRD_SPI_DMA_EN;
480 else
481 val &= ~SPRD_SPI_DMA_EN;
482
483 writel_relaxed(val, ss->base + SPRD_SPI_CTL2);
484}
485
486static int sprd_spi_dma_submit(struct dma_chan *dma_chan,
487 struct dma_slave_config *c,
488 struct sg_table *sg,
489 enum dma_transfer_direction dir)
490{
491 struct dma_async_tx_descriptor *desc;
492 dma_cookie_t cookie;
493 unsigned long flags;
494 int ret;
495
496 ret = dmaengine_slave_config(dma_chan, c);
497 if (ret < 0)
498 return ret;
499
500 flags = SPRD_DMA_FLAGS(SPRD_DMA_CHN_MODE_NONE, SPRD_DMA_NO_TRG,
501 SPRD_DMA_FRAG_REQ, SPRD_DMA_TRANS_INT);
502 desc = dmaengine_prep_slave_sg(dma_chan, sg->sgl, sg->nents, dir, flags);
503 if (!desc)
504 return -ENODEV;
505
506 cookie = dmaengine_submit(desc);
507 if (dma_submit_error(cookie))
508 return dma_submit_error(cookie);
509
510 dma_async_issue_pending(dma_chan);
511
512 return 0;
513}
514
515static int sprd_spi_dma_rx_config(struct sprd_spi *ss, struct spi_transfer *t)
516{
517 struct dma_chan *dma_chan = ss->dma.dma_chan[SPI_RX];
518 struct dma_slave_config config = {
519 .src_addr = ss->phy_base,
520 .src_addr_width = ss->dma.width,
521 .dst_addr_width = ss->dma.width,
522 .dst_maxburst = ss->dma.fragmens_len,
523 };
524 int ret;
525
526 ret = sprd_spi_dma_submit(dma_chan, &config, &t->rx_sg, DMA_DEV_TO_MEM);
527 if (ret)
528 return ret;
529
530 return ss->dma.rx_len;
531}
532
533static int sprd_spi_dma_tx_config(struct sprd_spi *ss, struct spi_transfer *t)
534{
535 struct dma_chan *dma_chan = ss->dma.dma_chan[SPI_TX];
536 struct dma_slave_config config = {
537 .dst_addr = ss->phy_base,
538 .src_addr_width = ss->dma.width,
539 .dst_addr_width = ss->dma.width,
540 .src_maxburst = ss->dma.fragmens_len,
541 };
542 int ret;
543
544 ret = sprd_spi_dma_submit(dma_chan, &config, &t->tx_sg, DMA_MEM_TO_DEV);
545 if (ret)
546 return ret;
547
548 return t->len;
549}
550
551static int sprd_spi_dma_request(struct sprd_spi *ss)
552{
553 ss->dma.dma_chan[SPI_RX] = dma_request_chan(ss->dev, "rx_chn");
554 if (IS_ERR_OR_NULL(ss->dma.dma_chan[SPI_RX])) {
555 if (PTR_ERR(ss->dma.dma_chan[SPI_RX]) == -EPROBE_DEFER)
556 return PTR_ERR(ss->dma.dma_chan[SPI_RX]);
557
558 dev_err(ss->dev, "request RX DMA channel failed!\n");
559 return PTR_ERR(ss->dma.dma_chan[SPI_RX]);
560 }
561
562 ss->dma.dma_chan[SPI_TX] = dma_request_chan(ss->dev, "tx_chn");
563 if (IS_ERR_OR_NULL(ss->dma.dma_chan[SPI_TX])) {
564 if (PTR_ERR(ss->dma.dma_chan[SPI_TX]) == -EPROBE_DEFER)
565 return PTR_ERR(ss->dma.dma_chan[SPI_TX]);
566
567 dev_err(ss->dev, "request TX DMA channel failed!\n");
568 dma_release_channel(ss->dma.dma_chan[SPI_RX]);
569 return PTR_ERR(ss->dma.dma_chan[SPI_TX]);
570 }
571
572 return 0;
573}
574
575static void sprd_spi_dma_release(struct sprd_spi *ss)
576{
577 if (ss->dma.dma_chan[SPI_RX])
578 dma_release_channel(ss->dma.dma_chan[SPI_RX]);
579
580 if (ss->dma.dma_chan[SPI_TX])
581 dma_release_channel(ss->dma.dma_chan[SPI_TX]);
582}
583
584static int sprd_spi_dma_txrx_bufs(struct spi_device *sdev,
585 struct spi_transfer *t)
586{
587 struct sprd_spi *ss = spi_master_get_devdata(sdev->master);
588 u32 trans_len = ss->trans_len;
589 int ret, write_size = 0;
590
591 reinit_completion(&ss->xfer_completion);
592 sprd_spi_irq_enable(ss);
593 if (ss->trans_mode & SPRD_SPI_TX_MODE) {
594 write_size = sprd_spi_dma_tx_config(ss, t);
595 sprd_spi_set_tx_length(ss, trans_len);
596
597 /*
598 * For our 3 wires mode or dual TX line mode, we need
599 * to request the controller to transfer.
600 */
601 if (ss->hw_mode & SPI_3WIRE || ss->hw_mode & SPI_TX_DUAL)
602 sprd_spi_tx_req(ss);
603 } else {
604 sprd_spi_set_rx_length(ss, trans_len);
605
606 /*
607 * For our 3 wires mode or dual TX line mode, we need
608 * to request the controller to read.
609 */
610 if (ss->hw_mode & SPI_3WIRE || ss->hw_mode & SPI_TX_DUAL)
611 sprd_spi_rx_req(ss);
612 else
613 write_size = ss->write_bufs(ss, trans_len);
614 }
615
616 if (write_size < 0) {
617 ret = write_size;
618 dev_err(ss->dev, "failed to write, ret = %d\n", ret);
619 goto trans_complete;
620 }
621
622 if (ss->trans_mode & SPRD_SPI_RX_MODE) {
623 /*
624 * Set up the DMA receive data length, which must be an
625 * integral multiple of fragment length. But when the length
626 * of received data is less than fragment length, DMA can be
627 * configured to receive data according to the actual length
628 * of received data.
629 */
630 ss->dma.rx_len = t->len > ss->dma.fragmens_len ?
631 (t->len - t->len % ss->dma.fragmens_len) :
632 t->len;
633 ret = sprd_spi_dma_rx_config(ss, t);
634 if (ret < 0) {
635 dev_err(&sdev->dev,
636 "failed to configure rx DMA, ret = %d\n", ret);
637 goto trans_complete;
638 }
639 }
640
641 sprd_spi_dma_enable(ss, true);
642 wait_for_completion(&(ss->xfer_completion));
643
644 if (ss->trans_mode & SPRD_SPI_TX_MODE)
645 ret = write_size;
646 else
647 ret = ss->dma.rx_len;
648
649trans_complete:
650 sprd_spi_dma_enable(ss, false);
651 sprd_spi_enter_idle(ss);
652 sprd_spi_irq_disable(ss);
653
654 return ret;
655}
656
434static void sprd_spi_set_speed(struct sprd_spi *ss, u32 speed_hz) 657static void sprd_spi_set_speed(struct sprd_spi *ss, u32 speed_hz)
435{ 658{
436 /* 659 /*
@@ -516,16 +739,22 @@ static int sprd_spi_setup_transfer(struct spi_device *sdev,
516 ss->trans_len = t->len; 739 ss->trans_len = t->len;
517 ss->read_bufs = sprd_spi_read_bufs_u8; 740 ss->read_bufs = sprd_spi_read_bufs_u8;
518 ss->write_bufs = sprd_spi_write_bufs_u8; 741 ss->write_bufs = sprd_spi_write_bufs_u8;
742 ss->dma.width = DMA_SLAVE_BUSWIDTH_1_BYTE;
743 ss->dma.fragmens_len = SPRD_SPI_DMA_STEP;
519 break; 744 break;
520 case 16: 745 case 16:
521 ss->trans_len = t->len >> 1; 746 ss->trans_len = t->len >> 1;
522 ss->read_bufs = sprd_spi_read_bufs_u16; 747 ss->read_bufs = sprd_spi_read_bufs_u16;
523 ss->write_bufs = sprd_spi_write_bufs_u16; 748 ss->write_bufs = sprd_spi_write_bufs_u16;
749 ss->dma.width = DMA_SLAVE_BUSWIDTH_2_BYTES;
750 ss->dma.fragmens_len = SPRD_SPI_DMA_STEP << 1;
524 break; 751 break;
525 case 32: 752 case 32:
526 ss->trans_len = t->len >> 2; 753 ss->trans_len = t->len >> 2;
527 ss->read_bufs = sprd_spi_read_bufs_u32; 754 ss->read_bufs = sprd_spi_read_bufs_u32;
528 ss->write_bufs = sprd_spi_write_bufs_u32; 755 ss->write_bufs = sprd_spi_write_bufs_u32;
756 ss->dma.width = DMA_SLAVE_BUSWIDTH_4_BYTES;
757 ss->dma.fragmens_len = SPRD_SPI_DMA_STEP << 2;
529 break; 758 break;
530 default: 759 default:
531 return -EINVAL; 760 return -EINVAL;
@@ -563,7 +792,11 @@ static int sprd_spi_transfer_one(struct spi_controller *sctlr,
563 if (ret) 792 if (ret)
564 goto setup_err; 793 goto setup_err;
565 794
566 ret = sprd_spi_txrx_bufs(sdev, t); 795 if (sctlr->can_dma(sctlr, sdev, t))
796 ret = sprd_spi_dma_txrx_bufs(sdev, t);
797 else
798 ret = sprd_spi_txrx_bufs(sdev, t);
799
567 if (ret == t->len) 800 if (ret == t->len)
568 ret = 0; 801 ret = 0;
569 else if (ret >= 0) 802 else if (ret >= 0)
@@ -590,6 +823,11 @@ static irqreturn_t sprd_spi_handle_irq(int irq, void *data)
590 823
591 if (val & SPRD_SPI_MASK_RX_END) { 824 if (val & SPRD_SPI_MASK_RX_END) {
592 writel_relaxed(SPRD_SPI_RX_END_CLR, ss->base + SPRD_SPI_INT_CLR); 825 writel_relaxed(SPRD_SPI_RX_END_CLR, ss->base + SPRD_SPI_INT_CLR);
826 if (ss->dma.rx_len < ss->len) {
827 ss->rx_buf += ss->dma.rx_len;
828 ss->dma.rx_len +=
829 ss->read_bufs(ss, ss->len - ss->dma.rx_len);
830 }
593 complete(&ss->xfer_completion); 831 complete(&ss->xfer_completion);
594 832
595 return IRQ_HANDLED; 833 return IRQ_HANDLED;
@@ -647,6 +885,35 @@ static int sprd_spi_clk_init(struct platform_device *pdev, struct sprd_spi *ss)
647 return 0; 885 return 0;
648} 886}
649 887
888static bool sprd_spi_can_dma(struct spi_controller *sctlr,
889 struct spi_device *spi, struct spi_transfer *t)
890{
891 struct sprd_spi *ss = spi_controller_get_devdata(sctlr);
892
893 return ss->dma.enable && (t->len > SPRD_SPI_FIFO_SIZE);
894}
895
896static int sprd_spi_dma_init(struct platform_device *pdev, struct sprd_spi *ss)
897{
898 int ret;
899
900 ret = sprd_spi_dma_request(ss);
901 if (ret) {
902 if (ret == -EPROBE_DEFER)
903 return ret;
904
905 dev_warn(&pdev->dev,
906 "failed to request dma, enter no dma mode, ret = %d\n",
907 ret);
908
909 return 0;
910 }
911
912 ss->dma.enable = true;
913
914 return 0;
915}
916
650static int sprd_spi_probe(struct platform_device *pdev) 917static int sprd_spi_probe(struct platform_device *pdev)
651{ 918{
652 struct spi_controller *sctlr; 919 struct spi_controller *sctlr;
@@ -667,12 +934,14 @@ static int sprd_spi_probe(struct platform_device *pdev)
667 goto free_controller; 934 goto free_controller;
668 } 935 }
669 936
937 ss->phy_base = res->start;
670 ss->dev = &pdev->dev; 938 ss->dev = &pdev->dev;
671 sctlr->dev.of_node = pdev->dev.of_node; 939 sctlr->dev.of_node = pdev->dev.of_node;
672 sctlr->mode_bits = SPI_CPOL | SPI_CPHA | SPI_3WIRE | SPI_TX_DUAL; 940 sctlr->mode_bits = SPI_CPOL | SPI_CPHA | SPI_3WIRE | SPI_TX_DUAL;
673 sctlr->bus_num = pdev->id; 941 sctlr->bus_num = pdev->id;
674 sctlr->set_cs = sprd_spi_chipselect; 942 sctlr->set_cs = sprd_spi_chipselect;
675 sctlr->transfer_one = sprd_spi_transfer_one; 943 sctlr->transfer_one = sprd_spi_transfer_one;
944 sctlr->can_dma = sprd_spi_can_dma;
676 sctlr->auto_runtime_pm = true; 945 sctlr->auto_runtime_pm = true;
677 sctlr->max_speed_hz = min_t(u32, ss->src_clk >> 1, 946 sctlr->max_speed_hz = min_t(u32, ss->src_clk >> 1,
678 SPRD_SPI_MAX_SPEED_HZ); 947 SPRD_SPI_MAX_SPEED_HZ);
@@ -687,10 +956,14 @@ static int sprd_spi_probe(struct platform_device *pdev)
687 if (ret) 956 if (ret)
688 goto free_controller; 957 goto free_controller;
689 958
690 ret = clk_prepare_enable(ss->clk); 959 ret = sprd_spi_dma_init(pdev, ss);
691 if (ret) 960 if (ret)
692 goto free_controller; 961 goto free_controller;
693 962
963 ret = clk_prepare_enable(ss->clk);
964 if (ret)
965 goto release_dma;
966
694 ret = pm_runtime_set_active(&pdev->dev); 967 ret = pm_runtime_set_active(&pdev->dev);
695 if (ret < 0) 968 if (ret < 0)
696 goto disable_clk; 969 goto disable_clk;
@@ -719,6 +992,8 @@ err_rpm_put:
719 pm_runtime_disable(&pdev->dev); 992 pm_runtime_disable(&pdev->dev);
720disable_clk: 993disable_clk:
721 clk_disable_unprepare(ss->clk); 994 clk_disable_unprepare(ss->clk);
995release_dma:
996 sprd_spi_dma_release(ss);
722free_controller: 997free_controller:
723 spi_controller_put(sctlr); 998 spi_controller_put(sctlr);
724 999
@@ -739,6 +1014,8 @@ static int sprd_spi_remove(struct platform_device *pdev)
739 1014
740 spi_controller_suspend(sctlr); 1015 spi_controller_suspend(sctlr);
741 1016
1017 if (ss->dma.enable)
1018 sprd_spi_dma_release(ss);
742 clk_disable_unprepare(ss->clk); 1019 clk_disable_unprepare(ss->clk);
743 pm_runtime_put_noidle(&pdev->dev); 1020 pm_runtime_put_noidle(&pdev->dev);
744 pm_runtime_disable(&pdev->dev); 1021 pm_runtime_disable(&pdev->dev);
@@ -751,6 +1028,9 @@ static int __maybe_unused sprd_spi_runtime_suspend(struct device *dev)
751 struct spi_controller *sctlr = dev_get_drvdata(dev); 1028 struct spi_controller *sctlr = dev_get_drvdata(dev);
752 struct sprd_spi *ss = spi_controller_get_devdata(sctlr); 1029 struct sprd_spi *ss = spi_controller_get_devdata(sctlr);
753 1030
1031 if (ss->dma.enable)
1032 sprd_spi_dma_release(ss);
1033
754 clk_disable_unprepare(ss->clk); 1034 clk_disable_unprepare(ss->clk);
755 1035
756 return 0; 1036 return 0;
@@ -766,7 +1046,14 @@ static int __maybe_unused sprd_spi_runtime_resume(struct device *dev)
766 if (ret) 1046 if (ret)
767 return ret; 1047 return ret;
768 1048
769 return 0; 1049 if (!ss->dma.enable)
1050 return 0;
1051
1052 ret = sprd_spi_dma_request(ss);
1053 if (ret)
1054 clk_disable_unprepare(ss->clk);
1055
1056 return ret;
770} 1057}
771 1058
772static const struct dev_pm_ops sprd_spi_pm_ops = { 1059static const struct dev_pm_ops sprd_spi_pm_ops = {