aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorMarek Szyprowski <m.szyprowski@samsung.com>2017-01-09 05:36:10 -0500
committerMark Brown <broonie@kernel.org>2017-01-09 07:21:04 -0500
commit3d63a47a380a873408dad10ca62bd8299b2208f1 (patch)
tree14ad2c65ae0f53b4dfb674b6b643e09eeed47a61
parent5c47e3cfd4f9d12e9f594d99118c1381c163ff98 (diff)
spi: s3c64xx: Don't request/release DMA channels for each SPI transfer
Requesting a DMA channel might be a time consuming operation, so there is no need to acquire and release DMA channel for each SPI transfer. DMA channels can be requested during driver probe and kept all the time, also because there are no shared nor dynamically allocated channels on Samsung S3C/S5P/Exynos platforms. While moving dma_requrest_slave_channel calls, lets switch to dma_request_slave_channel_reason(), which returns error codes on failure, which can be properly propagated to the caller (this for example defers SPI probe when DMA controller is not yet available). Signed-off-by: Marek Szyprowski <m.szyprowski@samsung.com> Reviewed-by: Andi Shyti <andi.shyti@samsung.com> Tested-by: Andi Shyti <andi.shyti@samsung.com> Reviewed-by: Krzysztof Kozlowski <krzk@kernel.org> Signed-off-by: Mark Brown <broonie@kernel.org>
-rw-r--r--drivers/spi/spi-s3c64xx.c57
1 files changed, 29 insertions, 28 deletions
diff --git a/drivers/spi/spi-s3c64xx.c b/drivers/spi/spi-s3c64xx.c
index 28dfdce4beae..849ee82483e4 100644
--- a/drivers/spi/spi-s3c64xx.c
+++ b/drivers/spi/spi-s3c64xx.c
@@ -341,43 +341,16 @@ static void s3c64xx_spi_set_cs(struct spi_device *spi, bool enable)
341static int s3c64xx_spi_prepare_transfer(struct spi_master *spi) 341static int s3c64xx_spi_prepare_transfer(struct spi_master *spi)
342{ 342{
343 struct s3c64xx_spi_driver_data *sdd = spi_master_get_devdata(spi); 343 struct s3c64xx_spi_driver_data *sdd = spi_master_get_devdata(spi);
344 struct device *dev = &sdd->pdev->dev;
345 344
346 if (is_polling(sdd)) 345 if (is_polling(sdd))
347 return 0; 346 return 0;
348 347
349 /* Acquire DMA channels */
350 sdd->rx_dma.ch = dma_request_slave_channel(dev, "rx");
351 if (!sdd->rx_dma.ch) {
352 dev_err(dev, "Failed to get RX DMA channel\n");
353 return -EBUSY;
354 }
355 spi->dma_rx = sdd->rx_dma.ch; 348 spi->dma_rx = sdd->rx_dma.ch;
356
357 sdd->tx_dma.ch = dma_request_slave_channel(dev, "tx");
358 if (!sdd->tx_dma.ch) {
359 dev_err(dev, "Failed to get TX DMA channel\n");
360 dma_release_channel(sdd->rx_dma.ch);
361 return -EBUSY;
362 }
363 spi->dma_tx = sdd->tx_dma.ch; 349 spi->dma_tx = sdd->tx_dma.ch;
364 350
365 return 0; 351 return 0;
366} 352}
367 353
368static int s3c64xx_spi_unprepare_transfer(struct spi_master *spi)
369{
370 struct s3c64xx_spi_driver_data *sdd = spi_master_get_devdata(spi);
371
372 /* Free DMA channels */
373 if (!is_polling(sdd)) {
374 dma_release_channel(sdd->rx_dma.ch);
375 dma_release_channel(sdd->tx_dma.ch);
376 }
377
378 return 0;
379}
380
381static bool s3c64xx_spi_can_dma(struct spi_master *master, 354static bool s3c64xx_spi_can_dma(struct spi_master *master,
382 struct spi_device *spi, 355 struct spi_device *spi,
383 struct spi_transfer *xfer) 356 struct spi_transfer *xfer)
@@ -1094,7 +1067,6 @@ static int s3c64xx_spi_probe(struct platform_device *pdev)
1094 master->prepare_transfer_hardware = s3c64xx_spi_prepare_transfer; 1067 master->prepare_transfer_hardware = s3c64xx_spi_prepare_transfer;
1095 master->prepare_message = s3c64xx_spi_prepare_message; 1068 master->prepare_message = s3c64xx_spi_prepare_message;
1096 master->transfer_one = s3c64xx_spi_transfer_one; 1069 master->transfer_one = s3c64xx_spi_transfer_one;
1097 master->unprepare_transfer_hardware = s3c64xx_spi_unprepare_transfer;
1098 master->num_chipselect = sci->num_cs; 1070 master->num_chipselect = sci->num_cs;
1099 master->dma_alignment = 8; 1071 master->dma_alignment = 8;
1100 master->bits_per_word_mask = SPI_BPW_MASK(32) | SPI_BPW_MASK(16) | 1072 master->bits_per_word_mask = SPI_BPW_MASK(32) | SPI_BPW_MASK(16) |
@@ -1161,6 +1133,24 @@ static int s3c64xx_spi_probe(struct platform_device *pdev)
1161 } 1133 }
1162 } 1134 }
1163 1135
1136 if (!is_polling(sdd)) {
1137 /* Acquire DMA channels */
1138 sdd->rx_dma.ch = dma_request_slave_channel_reason(&pdev->dev,
1139 "rx");
1140 if (IS_ERR(sdd->rx_dma.ch)) {
1141 dev_err(&pdev->dev, "Failed to get RX DMA channel\n");
1142 ret = PTR_ERR(sdd->rx_dma.ch);
1143 goto err_disable_io_clk;
1144 }
1145 sdd->tx_dma.ch = dma_request_slave_channel_reason(&pdev->dev,
1146 "tx");
1147 if (IS_ERR(sdd->tx_dma.ch)) {
1148 dev_err(&pdev->dev, "Failed to get TX DMA channel\n");
1149 ret = PTR_ERR(sdd->tx_dma.ch);
1150 goto err_release_tx_dma;
1151 }
1152 }
1153
1164 pm_runtime_set_autosuspend_delay(&pdev->dev, AUTOSUSPEND_TIMEOUT); 1154 pm_runtime_set_autosuspend_delay(&pdev->dev, AUTOSUSPEND_TIMEOUT);
1165 pm_runtime_use_autosuspend(&pdev->dev); 1155 pm_runtime_use_autosuspend(&pdev->dev);
1166 pm_runtime_set_active(&pdev->dev); 1156 pm_runtime_set_active(&pdev->dev);
@@ -1206,6 +1196,12 @@ err_pm_put:
1206 pm_runtime_disable(&pdev->dev); 1196 pm_runtime_disable(&pdev->dev);
1207 pm_runtime_set_suspended(&pdev->dev); 1197 pm_runtime_set_suspended(&pdev->dev);
1208 1198
1199 if (!is_polling(sdd))
1200 dma_release_channel(sdd->rx_dma.ch);
1201err_release_tx_dma:
1202 if (!is_polling(sdd))
1203 dma_release_channel(sdd->tx_dma.ch);
1204err_disable_io_clk:
1209 clk_disable_unprepare(sdd->ioclk); 1205 clk_disable_unprepare(sdd->ioclk);
1210err_disable_src_clk: 1206err_disable_src_clk:
1211 clk_disable_unprepare(sdd->src_clk); 1207 clk_disable_unprepare(sdd->src_clk);
@@ -1226,6 +1222,11 @@ static int s3c64xx_spi_remove(struct platform_device *pdev)
1226 1222
1227 writel(0, sdd->regs + S3C64XX_SPI_INT_EN); 1223 writel(0, sdd->regs + S3C64XX_SPI_INT_EN);
1228 1224
1225 if (!is_polling(sdd)) {
1226 dma_release_channel(sdd->rx_dma.ch);
1227 dma_release_channel(sdd->tx_dma.ch);
1228 }
1229
1229 clk_disable_unprepare(sdd->ioclk); 1230 clk_disable_unprepare(sdd->ioclk);
1230 1231
1231 clk_disable_unprepare(sdd->src_clk); 1232 clk_disable_unprepare(sdd->src_clk);