aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorSanchayan Maity <maitysanchayan@gmail.com>2016-11-10 07:19:15 -0500
committerMark Brown <broonie@kernel.org>2016-11-11 07:21:00 -0500
commit90ba37033cb94207e97c4ced9be575770438213b (patch)
tree10b04c4ec8910891a98095cf7ac8e25067270e51
parent1001354ca34179f3db924eb66672442a173147dc (diff)
spi: spi-fsl-dspi: Add DMA support for Vybrid
Add DMA support for Vybrid. Signed-off-by: Sanchayan Maity <maitysanchayan@gmail.com> Signed-off-by: Mark Brown <broonie@kernel.org>
-rw-r--r--drivers/spi/spi-fsl-dspi.c301
1 files changed, 300 insertions, 1 deletions
diff --git a/drivers/spi/spi-fsl-dspi.c b/drivers/spi/spi-fsl-dspi.c
index 35c0dd945668..bc64700b514d 100644
--- a/drivers/spi/spi-fsl-dspi.c
+++ b/drivers/spi/spi-fsl-dspi.c
@@ -15,6 +15,8 @@
15 15
16#include <linux/clk.h> 16#include <linux/clk.h>
17#include <linux/delay.h> 17#include <linux/delay.h>
18#include <linux/dmaengine.h>
19#include <linux/dma-mapping.h>
18#include <linux/err.h> 20#include <linux/err.h>
19#include <linux/errno.h> 21#include <linux/errno.h>
20#include <linux/interrupt.h> 22#include <linux/interrupt.h>
@@ -40,6 +42,7 @@
40#define TRAN_STATE_WORD_ODD_NUM 0x04 42#define TRAN_STATE_WORD_ODD_NUM 0x04
41 43
42#define DSPI_FIFO_SIZE 4 44#define DSPI_FIFO_SIZE 4
45#define DSPI_DMA_BUFSIZE (DSPI_FIFO_SIZE * 1024)
43 46
44#define SPI_MCR 0x00 47#define SPI_MCR 0x00
45#define SPI_MCR_MASTER (1 << 31) 48#define SPI_MCR_MASTER (1 << 31)
@@ -71,6 +74,11 @@
71#define SPI_SR_EOQF 0x10000000 74#define SPI_SR_EOQF 0x10000000
72#define SPI_SR_TCFQF 0x80000000 75#define SPI_SR_TCFQF 0x80000000
73 76
77#define SPI_RSER_TFFFE BIT(25)
78#define SPI_RSER_TFFFD BIT(24)
79#define SPI_RSER_RFDFE BIT(17)
80#define SPI_RSER_RFDFD BIT(16)
81
74#define SPI_RSER 0x30 82#define SPI_RSER 0x30
75#define SPI_RSER_EOQFE 0x10000000 83#define SPI_RSER_EOQFE 0x10000000
76#define SPI_RSER_TCFQE 0x80000000 84#define SPI_RSER_TCFQE 0x80000000
@@ -108,6 +116,8 @@
108 116
109#define SPI_TCR_TCNT_MAX 0x10000 117#define SPI_TCR_TCNT_MAX 0x10000
110 118
119#define DMA_COMPLETION_TIMEOUT msecs_to_jiffies(3000)
120
111struct chip_data { 121struct chip_data {
112 u32 mcr_val; 122 u32 mcr_val;
113 u32 ctar_val; 123 u32 ctar_val;
@@ -117,6 +127,7 @@ struct chip_data {
117enum dspi_trans_mode { 127enum dspi_trans_mode {
118 DSPI_EOQ_MODE = 0, 128 DSPI_EOQ_MODE = 0,
119 DSPI_TCFQ_MODE, 129 DSPI_TCFQ_MODE,
130 DSPI_DMA_MODE,
120}; 131};
121 132
122struct fsl_dspi_devtype_data { 133struct fsl_dspi_devtype_data {
@@ -125,7 +136,7 @@ struct fsl_dspi_devtype_data {
125}; 136};
126 137
127static const struct fsl_dspi_devtype_data vf610_data = { 138static const struct fsl_dspi_devtype_data vf610_data = {
128 .trans_mode = DSPI_EOQ_MODE, 139 .trans_mode = DSPI_DMA_MODE,
129 .max_clock_factor = 2, 140 .max_clock_factor = 2,
130}; 141};
131 142
@@ -139,6 +150,22 @@ static const struct fsl_dspi_devtype_data ls2085a_data = {
139 .max_clock_factor = 8, 150 .max_clock_factor = 8,
140}; 151};
141 152
153struct fsl_dspi_dma {
154 u32 curr_xfer_len;
155
156 u32 *tx_dma_buf;
157 struct dma_chan *chan_tx;
158 dma_addr_t tx_dma_phys;
159 struct completion cmd_tx_complete;
160 struct dma_async_tx_descriptor *tx_desc;
161
162 u32 *rx_dma_buf;
163 struct dma_chan *chan_rx;
164 dma_addr_t rx_dma_phys;
165 struct completion cmd_rx_complete;
166 struct dma_async_tx_descriptor *rx_desc;
167};
168
142struct fsl_dspi { 169struct fsl_dspi {
143 struct spi_master *master; 170 struct spi_master *master;
144 struct platform_device *pdev; 171 struct platform_device *pdev;
@@ -165,6 +192,7 @@ struct fsl_dspi {
165 u32 waitflags; 192 u32 waitflags;
166 193
167 u32 spi_tcnt; 194 u32 spi_tcnt;
195 struct fsl_dspi_dma *dma;
168}; 196};
169 197
170static inline int is_double_byte_mode(struct fsl_dspi *dspi) 198static inline int is_double_byte_mode(struct fsl_dspi *dspi)
@@ -176,6 +204,263 @@ static inline int is_double_byte_mode(struct fsl_dspi *dspi)
176 return ((val & SPI_FRAME_BITS_MASK) == SPI_FRAME_BITS(8)) ? 0 : 1; 204 return ((val & SPI_FRAME_BITS_MASK) == SPI_FRAME_BITS(8)) ? 0 : 1;
177} 205}
178 206
207static void dspi_tx_dma_callback(void *arg)
208{
209 struct fsl_dspi *dspi = arg;
210 struct fsl_dspi_dma *dma = dspi->dma;
211
212 complete(&dma->cmd_tx_complete);
213}
214
215static void dspi_rx_dma_callback(void *arg)
216{
217 struct fsl_dspi *dspi = arg;
218 struct fsl_dspi_dma *dma = dspi->dma;
219 int rx_word;
220 int i, len;
221 u16 d;
222
223 rx_word = is_double_byte_mode(dspi);
224
225 len = rx_word ? (dma->curr_xfer_len / 2) : dma->curr_xfer_len;
226
227 if (!(dspi->dataflags & TRAN_STATE_RX_VOID)) {
228 for (i = 0; i < len; i++) {
229 d = dspi->dma->rx_dma_buf[i];
230 rx_word ? (*(u16 *)dspi->rx = d) :
231 (*(u8 *)dspi->rx = d);
232 dspi->rx += rx_word + 1;
233 }
234 }
235
236 complete(&dma->cmd_rx_complete);
237}
238
239static int dspi_next_xfer_dma_submit(struct fsl_dspi *dspi)
240{
241 struct fsl_dspi_dma *dma = dspi->dma;
242 struct device *dev = &dspi->pdev->dev;
243 int time_left;
244 int tx_word;
245 int i, len;
246 u16 val;
247
248 tx_word = is_double_byte_mode(dspi);
249
250 len = tx_word ? (dma->curr_xfer_len / 2) : dma->curr_xfer_len;
251
252 for (i = 0; i < len - 1; i++) {
253 val = tx_word ? *(u16 *) dspi->tx : *(u8 *) dspi->tx;
254 dspi->dma->tx_dma_buf[i] =
255 SPI_PUSHR_TXDATA(val) | SPI_PUSHR_PCS(dspi->cs) |
256 SPI_PUSHR_CTAS(0) | SPI_PUSHR_CONT;
257 dspi->tx += tx_word + 1;
258 }
259
260 val = tx_word ? *(u16 *) dspi->tx : *(u8 *) dspi->tx;
261 dspi->dma->tx_dma_buf[i] = SPI_PUSHR_TXDATA(val) |
262 SPI_PUSHR_PCS(dspi->cs) |
263 SPI_PUSHR_CTAS(0);
264 dspi->tx += tx_word + 1;
265
266 dma->tx_desc = dmaengine_prep_slave_single(dma->chan_tx,
267 dma->tx_dma_phys,
268 DSPI_DMA_BUFSIZE, DMA_MEM_TO_DEV,
269 DMA_PREP_INTERRUPT | DMA_CTRL_ACK);
270 if (!dma->tx_desc) {
271 dev_err(dev, "Not able to get desc for DMA xfer\n");
272 return -EIO;
273 }
274
275 dma->tx_desc->callback = dspi_tx_dma_callback;
276 dma->tx_desc->callback_param = dspi;
277 if (dma_submit_error(dmaengine_submit(dma->tx_desc))) {
278 dev_err(dev, "DMA submit failed\n");
279 return -EINVAL;
280 }
281
282 dma->rx_desc = dmaengine_prep_slave_single(dma->chan_rx,
283 dma->rx_dma_phys,
284 DSPI_DMA_BUFSIZE, DMA_DEV_TO_MEM,
285 DMA_PREP_INTERRUPT | DMA_CTRL_ACK);
286 if (!dma->rx_desc) {
287 dev_err(dev, "Not able to get desc for DMA xfer\n");
288 return -EIO;
289 }
290
291 dma->rx_desc->callback = dspi_rx_dma_callback;
292 dma->rx_desc->callback_param = dspi;
293 if (dma_submit_error(dmaengine_submit(dma->rx_desc))) {
294 dev_err(dev, "DMA submit failed\n");
295 return -EINVAL;
296 }
297
298 reinit_completion(&dspi->dma->cmd_rx_complete);
299 reinit_completion(&dspi->dma->cmd_tx_complete);
300
301 dma_async_issue_pending(dma->chan_rx);
302 dma_async_issue_pending(dma->chan_tx);
303
304 time_left = wait_for_completion_timeout(&dspi->dma->cmd_tx_complete,
305 DMA_COMPLETION_TIMEOUT);
306 if (time_left == 0) {
307 dev_err(dev, "DMA tx timeout\n");
308 dmaengine_terminate_all(dma->chan_tx);
309 dmaengine_terminate_all(dma->chan_rx);
310 return -ETIMEDOUT;
311 }
312
313 time_left = wait_for_completion_timeout(&dspi->dma->cmd_rx_complete,
314 DMA_COMPLETION_TIMEOUT);
315 if (time_left == 0) {
316 dev_err(dev, "DMA rx timeout\n");
317 dmaengine_terminate_all(dma->chan_tx);
318 dmaengine_terminate_all(dma->chan_rx);
319 return -ETIMEDOUT;
320 }
321
322 return 0;
323}
324
325static int dspi_dma_xfer(struct fsl_dspi *dspi)
326{
327 struct fsl_dspi_dma *dma = dspi->dma;
328 struct device *dev = &dspi->pdev->dev;
329 int curr_remaining_bytes;
330 int bytes_per_buffer;
331 int tx_word;
332 int ret = 0;
333
334 tx_word = is_double_byte_mode(dspi);
335 curr_remaining_bytes = dspi->len;
336 while (curr_remaining_bytes) {
337 /* Check if current transfer fits the DMA buffer */
338 dma->curr_xfer_len = curr_remaining_bytes;
339 bytes_per_buffer = DSPI_DMA_BUFSIZE /
340 (DSPI_FIFO_SIZE / (tx_word ? 2 : 1));
341 if (curr_remaining_bytes > bytes_per_buffer)
342 dma->curr_xfer_len = bytes_per_buffer;
343
344 ret = dspi_next_xfer_dma_submit(dspi);
345 if (ret) {
346 dev_err(dev, "DMA transfer failed\n");
347 goto exit;
348
349 } else {
350 curr_remaining_bytes -= dma->curr_xfer_len;
351 if (curr_remaining_bytes < 0)
352 curr_remaining_bytes = 0;
353 dspi->len = curr_remaining_bytes;
354 }
355 }
356
357exit:
358 return ret;
359}
360
361static int dspi_request_dma(struct fsl_dspi *dspi, phys_addr_t phy_addr)
362{
363 struct fsl_dspi_dma *dma;
364 struct dma_slave_config cfg;
365 struct device *dev = &dspi->pdev->dev;
366 int ret;
367
368 dma = devm_kzalloc(dev, sizeof(*dma), GFP_KERNEL);
369 if (!dma)
370 return -ENOMEM;
371
372 dma->chan_rx = dma_request_slave_channel(dev, "rx");
373 if (!dma->chan_rx) {
374 dev_err(dev, "rx dma channel not available\n");
375 ret = -ENODEV;
376 return ret;
377 }
378
379 dma->chan_tx = dma_request_slave_channel(dev, "tx");
380 if (!dma->chan_tx) {
381 dev_err(dev, "tx dma channel not available\n");
382 ret = -ENODEV;
383 goto err_tx_channel;
384 }
385
386 dma->tx_dma_buf = dma_alloc_coherent(dev, DSPI_DMA_BUFSIZE,
387 &dma->tx_dma_phys, GFP_KERNEL);
388 if (!dma->tx_dma_buf) {
389 ret = -ENOMEM;
390 goto err_tx_dma_buf;
391 }
392
393 dma->rx_dma_buf = dma_alloc_coherent(dev, DSPI_DMA_BUFSIZE,
394 &dma->rx_dma_phys, GFP_KERNEL);
395 if (!dma->rx_dma_buf) {
396 ret = -ENOMEM;
397 goto err_rx_dma_buf;
398 }
399
400 cfg.src_addr = phy_addr + SPI_POPR;
401 cfg.dst_addr = phy_addr + SPI_PUSHR;
402 cfg.src_addr_width = DMA_SLAVE_BUSWIDTH_4_BYTES;
403 cfg.dst_addr_width = DMA_SLAVE_BUSWIDTH_4_BYTES;
404 cfg.src_maxburst = 1;
405 cfg.dst_maxburst = 1;
406
407 cfg.direction = DMA_DEV_TO_MEM;
408 ret = dmaengine_slave_config(dma->chan_rx, &cfg);
409 if (ret) {
410 dev_err(dev, "can't configure rx dma channel\n");
411 ret = -EINVAL;
412 goto err_slave_config;
413 }
414
415 cfg.direction = DMA_MEM_TO_DEV;
416 ret = dmaengine_slave_config(dma->chan_tx, &cfg);
417 if (ret) {
418 dev_err(dev, "can't configure tx dma channel\n");
419 ret = -EINVAL;
420 goto err_slave_config;
421 }
422
423 dspi->dma = dma;
424 init_completion(&dma->cmd_tx_complete);
425 init_completion(&dma->cmd_rx_complete);
426
427 return 0;
428
429err_slave_config:
430 devm_kfree(dev, dma->rx_dma_buf);
431err_rx_dma_buf:
432 devm_kfree(dev, dma->tx_dma_buf);
433err_tx_dma_buf:
434 dma_release_channel(dma->chan_tx);
435err_tx_channel:
436 dma_release_channel(dma->chan_rx);
437
438 devm_kfree(dev, dma);
439 dspi->dma = NULL;
440
441 return ret;
442}
443
444static void dspi_release_dma(struct fsl_dspi *dspi)
445{
446 struct fsl_dspi_dma *dma = dspi->dma;
447 struct device *dev = &dspi->pdev->dev;
448
449 if (dma) {
450 if (dma->chan_tx) {
451 dma_unmap_single(dev, dma->tx_dma_phys,
452 DSPI_DMA_BUFSIZE, DMA_TO_DEVICE);
453 dma_release_channel(dma->chan_tx);
454 }
455
456 if (dma->chan_rx) {
457 dma_unmap_single(dev, dma->rx_dma_phys,
458 DSPI_DMA_BUFSIZE, DMA_FROM_DEVICE);
459 dma_release_channel(dma->chan_rx);
460 }
461 }
462}
463
179static void hz_to_spi_baud(char *pbr, char *br, int speed_hz, 464static void hz_to_spi_baud(char *pbr, char *br, int speed_hz,
180 unsigned long clkrate) 465 unsigned long clkrate)
181{ 466{
@@ -424,6 +709,12 @@ static int dspi_transfer_one_message(struct spi_master *master,
424 regmap_write(dspi->regmap, SPI_RSER, SPI_RSER_TCFQE); 709 regmap_write(dspi->regmap, SPI_RSER, SPI_RSER_TCFQE);
425 dspi_tcfq_write(dspi); 710 dspi_tcfq_write(dspi);
426 break; 711 break;
712 case DSPI_DMA_MODE:
713 regmap_write(dspi->regmap, SPI_RSER,
714 SPI_RSER_TFFFE | SPI_RSER_TFFFD |
715 SPI_RSER_RFDFE | SPI_RSER_RFDFD);
716 status = dspi_dma_xfer(dspi);
717 goto out;
427 default: 718 default:
428 dev_err(&dspi->pdev->dev, "unsupported trans_mode %u\n", 719 dev_err(&dspi->pdev->dev, "unsupported trans_mode %u\n",
429 trans_mode); 720 trans_mode);
@@ -733,6 +1024,13 @@ static int dspi_probe(struct platform_device *pdev)
733 if (ret) 1024 if (ret)
734 goto out_master_put; 1025 goto out_master_put;
735 1026
1027 if (dspi->devtype_data->trans_mode == DSPI_DMA_MODE) {
1028 if (dspi_request_dma(dspi, res->start)) {
1029 dev_err(&pdev->dev, "can't get dma channels\n");
1030 goto out_clk_put;
1031 }
1032 }
1033
736 master->max_speed_hz = 1034 master->max_speed_hz =
737 clk_get_rate(dspi->clk) / dspi->devtype_data->max_clock_factor; 1035 clk_get_rate(dspi->clk) / dspi->devtype_data->max_clock_factor;
738 1036
@@ -761,6 +1059,7 @@ static int dspi_remove(struct platform_device *pdev)
761 struct fsl_dspi *dspi = spi_master_get_devdata(master); 1059 struct fsl_dspi *dspi = spi_master_get_devdata(master);
762 1060
763 /* Disconnect from the SPI framework */ 1061 /* Disconnect from the SPI framework */
1062 dspi_release_dma(dspi);
764 clk_disable_unprepare(dspi->clk); 1063 clk_disable_unprepare(dspi->clk);
765 spi_unregister_master(dspi->master); 1064 spi_unregister_master(dspi->master);
766 1065