aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorCyrille Pitchen <cyrille.pitchen@atmel.com>2016-11-24 06:24:59 -0500
committerMark Brown <broonie@kernel.org>2016-11-25 08:07:25 -0500
commit04242ca4e8917999ac2bbc3d2b10409661f60272 (patch)
treeb86546a6ac2cd0e44530fc5e5ff0218c66bd887d
parent7910d9af000acc155745e44be55a5d0dc9e26ce7 (diff)
spi: atmel: Use SPI core DMA mapping framework
Use the SPI core DMA mapping framework instead of our own in case of DMA support. PDC support is not converted to this framework. The driver is now able to transfer a complete sg list through DMA. This eventually fix an issue with vmalloc'ed DMA memory that is provided for example by UBI/UBIFS layers. Signed-off-by: Cyrille Pitchen <cyrille.pitchen@atmel.com> [nicolas.ferre@atmel.com: restrict the use to non-PDC DMA] Signed-off-by: Nicolas Ferre <nicolas.ferre@atmel.com> Signed-off-by: Mark Brown <broonie@kernel.org>
-rw-r--r--drivers/spi/spi-atmel.c57
1 files changed, 25 insertions, 32 deletions
diff --git a/drivers/spi/spi-atmel.c b/drivers/spi/spi-atmel.c
index 8f20d4f75e4a..7e03e221d307 100644
--- a/drivers/spi/spi-atmel.c
+++ b/drivers/spi/spi-atmel.c
@@ -268,8 +268,6 @@
268struct atmel_spi_dma { 268struct atmel_spi_dma {
269 struct dma_chan *chan_rx; 269 struct dma_chan *chan_rx;
270 struct dma_chan *chan_tx; 270 struct dma_chan *chan_tx;
271 struct scatterlist sgrx;
272 struct scatterlist sgtx;
273 struct dma_async_tx_descriptor *data_desc_rx; 271 struct dma_async_tx_descriptor *data_desc_rx;
274 struct dma_async_tx_descriptor *data_desc_tx; 272 struct dma_async_tx_descriptor *data_desc_tx;
275 273
@@ -453,6 +451,15 @@ static inline bool atmel_spi_use_dma(struct atmel_spi *as,
453 return as->use_dma && xfer->len >= DMA_MIN_BYTES; 451 return as->use_dma && xfer->len >= DMA_MIN_BYTES;
454} 452}
455 453
454static bool atmel_spi_can_dma(struct spi_master *master,
455 struct spi_device *spi,
456 struct spi_transfer *xfer)
457{
458 struct atmel_spi *as = spi_master_get_devdata(master);
459
460 return atmel_spi_use_dma(as, xfer);
461}
462
456static int atmel_spi_dma_slave_config(struct atmel_spi *as, 463static int atmel_spi_dma_slave_config(struct atmel_spi *as,
457 struct dma_slave_config *slave_config, 464 struct dma_slave_config *slave_config,
458 u8 bits_per_word) 465 u8 bits_per_word)
@@ -720,7 +727,6 @@ static int atmel_spi_next_xfer_dma_submit(struct spi_master *master,
720 struct dma_async_tx_descriptor *txdesc; 727 struct dma_async_tx_descriptor *txdesc;
721 struct dma_slave_config slave_config; 728 struct dma_slave_config slave_config;
722 dma_cookie_t cookie; 729 dma_cookie_t cookie;
723 u32 len = *plen;
724 730
725 dev_vdbg(master->dev.parent, "atmel_spi_next_xfer_dma_submit\n"); 731 dev_vdbg(master->dev.parent, "atmel_spi_next_xfer_dma_submit\n");
726 732
@@ -731,34 +737,22 @@ static int atmel_spi_next_xfer_dma_submit(struct spi_master *master,
731 /* release lock for DMA operations */ 737 /* release lock for DMA operations */
732 atmel_spi_unlock(as); 738 atmel_spi_unlock(as);
733 739
734 /* prepare the RX dma transfer */ 740 *plen = xfer->len;
735 sg_init_table(&as->dma.sgrx, 1);
736 as->dma.sgrx.dma_address = xfer->rx_dma + xfer->len - *plen;
737
738 /* prepare the TX dma transfer */
739 sg_init_table(&as->dma.sgtx, 1);
740 as->dma.sgtx.dma_address = xfer->tx_dma + xfer->len - *plen;
741
742 if (len > master->max_dma_len)
743 len = master->max_dma_len;
744
745 sg_dma_len(&as->dma.sgtx) = len;
746 sg_dma_len(&as->dma.sgrx) = len;
747
748 *plen = len;
749 741
750 if (atmel_spi_dma_slave_config(as, &slave_config, 742 if (atmel_spi_dma_slave_config(as, &slave_config,
751 xfer->bits_per_word)) 743 xfer->bits_per_word))
752 goto err_exit; 744 goto err_exit;
753 745
754 /* Send both scatterlists */ 746 /* Send both scatterlists */
755 rxdesc = dmaengine_prep_slave_sg(rxchan, &as->dma.sgrx, 1, 747 rxdesc = dmaengine_prep_slave_sg(rxchan,
748 xfer->rx_sg.sgl, xfer->rx_sg.nents,
756 DMA_FROM_DEVICE, 749 DMA_FROM_DEVICE,
757 DMA_PREP_INTERRUPT | DMA_CTRL_ACK); 750 DMA_PREP_INTERRUPT | DMA_CTRL_ACK);
758 if (!rxdesc) 751 if (!rxdesc)
759 goto err_dma; 752 goto err_dma;
760 753
761 txdesc = dmaengine_prep_slave_sg(txchan, &as->dma.sgtx, 1, 754 txdesc = dmaengine_prep_slave_sg(txchan,
755 xfer->tx_sg.sgl, xfer->tx_sg.nents,
762 DMA_TO_DEVICE, 756 DMA_TO_DEVICE,
763 DMA_PREP_INTERRUPT | DMA_CTRL_ACK); 757 DMA_PREP_INTERRUPT | DMA_CTRL_ACK);
764 if (!txdesc) 758 if (!txdesc)
@@ -804,15 +798,10 @@ static void atmel_spi_next_xfer_data(struct spi_master *master,
804 dma_addr_t *rx_dma, 798 dma_addr_t *rx_dma,
805 u32 *plen) 799 u32 *plen)
806{ 800{
807 struct atmel_spi *as = spi_master_get_devdata(master);
808 u32 len = *plen;
809
810 *rx_dma = xfer->rx_dma + xfer->len - *plen; 801 *rx_dma = xfer->rx_dma + xfer->len - *plen;
811 *tx_dma = xfer->tx_dma + xfer->len - *plen; 802 *tx_dma = xfer->tx_dma + xfer->len - *plen;
812 if (len > master->max_dma_len) 803 if (*plen > master->max_dma_len)
813 len = master->max_dma_len; 804 *plen = master->max_dma_len;
814
815 *plen = len;
816} 805}
817 806
818static int atmel_spi_set_xfer_speed(struct atmel_spi *as, 807static int atmel_spi_set_xfer_speed(struct atmel_spi *as,
@@ -1252,7 +1241,7 @@ static int atmel_spi_one_transfer(struct spi_master *master,
1252 * better fault reporting. 1241 * better fault reporting.
1253 */ 1242 */
1254 if ((!msg->is_dma_mapped) 1243 if ((!msg->is_dma_mapped)
1255 && (atmel_spi_use_dma(as, xfer) || as->use_pdc)) { 1244 && as->use_pdc) {
1256 if (atmel_spi_dma_map_xfer(as, xfer) < 0) 1245 if (atmel_spi_dma_map_xfer(as, xfer) < 0)
1257 return -ENOMEM; 1246 return -ENOMEM;
1258 } 1247 }
@@ -1329,7 +1318,7 @@ static int atmel_spi_one_transfer(struct spi_master *master,
1329 } 1318 }
1330 1319
1331 if (!msg->is_dma_mapped 1320 if (!msg->is_dma_mapped
1332 && (atmel_spi_use_dma(as, xfer) || as->use_pdc)) 1321 && as->use_pdc)
1333 atmel_spi_dma_unmap_xfer(master, xfer); 1322 atmel_spi_dma_unmap_xfer(master, xfer);
1334 1323
1335 return 0; 1324 return 0;
@@ -1340,7 +1329,7 @@ static int atmel_spi_one_transfer(struct spi_master *master,
1340 } 1329 }
1341 1330
1342 if (!msg->is_dma_mapped 1331 if (!msg->is_dma_mapped
1343 && (atmel_spi_use_dma(as, xfer) || as->use_pdc)) 1332 && as->use_pdc)
1344 atmel_spi_dma_unmap_xfer(master, xfer); 1333 atmel_spi_dma_unmap_xfer(master, xfer);
1345 1334
1346 if (xfer->delay_usecs) 1335 if (xfer->delay_usecs)
@@ -1518,6 +1507,7 @@ static int atmel_spi_probe(struct platform_device *pdev)
1518 master->cleanup = atmel_spi_cleanup; 1507 master->cleanup = atmel_spi_cleanup;
1519 master->auto_runtime_pm = true; 1508 master->auto_runtime_pm = true;
1520 master->max_dma_len = SPI_MAX_DMA_XFER; 1509 master->max_dma_len = SPI_MAX_DMA_XFER;
1510 master->can_dma = atmel_spi_can_dma;
1521 platform_set_drvdata(pdev, master); 1511 platform_set_drvdata(pdev, master);
1522 1512
1523 as = spi_master_get_devdata(master); 1513 as = spi_master_get_devdata(master);
@@ -1554,10 +1544,13 @@ static int atmel_spi_probe(struct platform_device *pdev)
1554 as->use_pdc = false; 1544 as->use_pdc = false;
1555 if (as->caps.has_dma_support) { 1545 if (as->caps.has_dma_support) {
1556 ret = atmel_spi_configure_dma(as); 1546 ret = atmel_spi_configure_dma(as);
1557 if (ret == 0) 1547 if (ret == 0) {
1548 master->dma_tx = as->dma.chan_tx;
1549 master->dma_rx = as->dma.chan_rx;
1558 as->use_dma = true; 1550 as->use_dma = true;
1559 else if (ret == -EPROBE_DEFER) 1551 } else if (ret == -EPROBE_DEFER) {
1560 return ret; 1552 return ret;
1553 }
1561 } else { 1554 } else {
1562 as->use_pdc = true; 1555 as->use_pdc = true;
1563 } 1556 }