aboutsummaryrefslogtreecommitdiffstats
path: root/drivers/spi/spi-sirf.c
diff options
context:
space:
mode:
authorQipan Li <Qipan.Li@csr.com>2014-04-15 03:24:59 -0400
committerMark Brown <broonie@linaro.org>2014-04-15 07:40:15 -0400
commitc908ef345d2c314ec25cfac113a8f9bb2b6b3a25 (patch)
tree18b314cb307107bb8578e6085ca92e7b079936d7 /drivers/spi/spi-sirf.c
parentbf83fd6402a856eeb9a22c364c50ccf9bbdf9b17 (diff)
spi: sirf: refactor spi transfer functions
split sirfsoc_spi_transfer function into 3 sub-functions: spi_sirfsoc_cmd_transfer, spi_sirfsoc_pio_transfer and spi_sirfsoc_dma_transfer. Signed-off-by: Qipan Li <Qipan.Li@csr.com> Signed-off-by: Barry Song <Baohua.Song@csr.com> Signed-off-by: Mark Brown <broonie@linaro.org>
Diffstat (limited to 'drivers/spi/spi-sirf.c')
-rw-r--r--drivers/spi/spi-sirf.c232
1 files changed, 123 insertions, 109 deletions
diff --git a/drivers/spi/spi-sirf.c b/drivers/spi/spi-sirf.c
index 0c039d4542a5..2d238990115c 100644
--- a/drivers/spi/spi-sirf.c
+++ b/drivers/spi/spi-sirf.c
@@ -10,6 +10,7 @@
10#include <linux/kernel.h> 10#include <linux/kernel.h>
11#include <linux/slab.h> 11#include <linux/slab.h>
12#include <linux/clk.h> 12#include <linux/clk.h>
13#include <linux/completion.h>
13#include <linux/interrupt.h> 14#include <linux/interrupt.h>
14#include <linux/io.h> 15#include <linux/io.h>
15#include <linux/of.h> 16#include <linux/of.h>
@@ -309,59 +310,51 @@ static void spi_sirfsoc_dma_fini_callback(void *data)
309 complete(dma_complete); 310 complete(dma_complete);
310} 311}
311 312
312static int spi_sirfsoc_transfer(struct spi_device *spi, struct spi_transfer *t) 313static int spi_sirfsoc_cmd_transfer(struct spi_device *spi,
314 struct spi_transfer *t)
313{ 315{
314 struct sirfsoc_spi *sspi; 316 struct sirfsoc_spi *sspi;
315 int timeout = t->len * 10; 317 int timeout = t->len * 10;
316 sspi = spi_master_get_devdata(spi->master); 318 u32 cmd;
317 319
318 sspi->tx = t->tx_buf ? t->tx_buf : sspi->dummypage; 320 sspi = spi_master_get_devdata(spi->master);
319 sspi->rx = t->rx_buf ? t->rx_buf : sspi->dummypage; 321 memcpy(&cmd, sspi->tx, t->len);
320 sspi->left_tx_word = sspi->left_rx_word = t->len / sspi->word_width; 322 if (sspi->word_width == 1 && !(spi->mode & SPI_LSB_FIRST))
321 reinit_completion(&sspi->rx_done); 323 cmd = cpu_to_be32(cmd) >>
322 reinit_completion(&sspi->tx_done); 324 ((SIRFSOC_MAX_CMD_BYTES - t->len) * 8);
323 325 if (sspi->word_width == 2 && t->len == 4 &&
324 writel(SIRFSOC_SPI_INT_MASK_ALL, sspi->base + SIRFSOC_SPI_INT_STATUS); 326 (!(spi->mode & SPI_LSB_FIRST)))
325 327 cmd = ((cmd & 0xffff) << 16) | (cmd >> 16);
326 /* 328 writel(cmd, sspi->base + SIRFSOC_SPI_CMD);
327 * fill tx_buf into command register and wait for its completion 329 writel(SIRFSOC_SPI_FRM_END_INT_EN,
328 */ 330 sspi->base + SIRFSOC_SPI_INT_EN);
329 if (sspi->tx_by_cmd) { 331 writel(SIRFSOC_SPI_CMD_TX_EN,
330 u32 cmd; 332 sspi->base + SIRFSOC_SPI_TX_RX_EN);
331 memcpy(&cmd, sspi->tx, t->len); 333 if (wait_for_completion_timeout(&sspi->tx_done, timeout) == 0) {
332 334 dev_err(&spi->dev, "cmd transfer timeout\n");
333 if (sspi->word_width == 1 && !(spi->mode & SPI_LSB_FIRST)) 335 return 0;
334 cmd = cpu_to_be32(cmd) >> 336 }
335 ((SIRFSOC_MAX_CMD_BYTES - t->len) * 8);
336 if (sspi->word_width == 2 && t->len == 4 &&
337 (!(spi->mode & SPI_LSB_FIRST)))
338 cmd = ((cmd & 0xffff) << 16) | (cmd >> 16);
339
340 writel(cmd, sspi->base + SIRFSOC_SPI_CMD);
341 writel(SIRFSOC_SPI_FRM_END_INT_EN,
342 sspi->base + SIRFSOC_SPI_INT_EN);
343 writel(SIRFSOC_SPI_CMD_TX_EN,
344 sspi->base + SIRFSOC_SPI_TX_RX_EN);
345 337
346 if (wait_for_completion_timeout(&sspi->tx_done, timeout) == 0) { 338 return t->len;
347 dev_err(&spi->dev, "transfer timeout\n"); 339}
348 return 0;
349 }
350 340
351 return t->len; 341static void spi_sirfsoc_dma_transfer(struct spi_device *spi,
352 } 342 struct spi_transfer *t)
343{
344 struct sirfsoc_spi *sspi;
345 struct dma_async_tx_descriptor *rx_desc, *tx_desc;
346 int timeout = t->len * 10;
353 347
354 if (sspi->left_tx_word == 1) { 348 sspi = spi_master_get_devdata(spi->master);
355 writel(readl(sspi->base + SIRFSOC_SPI_CTRL) | 349 writel(SIRFSOC_SPI_FIFO_RESET, sspi->base + SIRFSOC_SPI_RXFIFO_OP);
356 SIRFSOC_SPI_ENA_AUTO_CLR, 350 writel(SIRFSOC_SPI_FIFO_RESET, sspi->base + SIRFSOC_SPI_TXFIFO_OP);
357 sspi->base + SIRFSOC_SPI_CTRL); 351 writel(SIRFSOC_SPI_FIFO_START, sspi->base + SIRFSOC_SPI_RXFIFO_OP);
358 writel(0, sspi->base + SIRFSOC_SPI_TX_DMA_IO_LEN); 352 writel(SIRFSOC_SPI_FIFO_START, sspi->base + SIRFSOC_SPI_TXFIFO_OP);
359 writel(0, sspi->base + SIRFSOC_SPI_RX_DMA_IO_LEN); 353 writel(0, sspi->base + SIRFSOC_SPI_INT_EN);
360 } else if ((sspi->left_tx_word > 1) && (sspi->left_tx_word < 354 writel(SIRFSOC_SPI_INT_MASK_ALL, sspi->base + SIRFSOC_SPI_INT_STATUS);
361 SIRFSOC_SPI_DAT_FRM_LEN_MAX)) { 355 if (sspi->left_tx_word < SIRFSOC_SPI_DAT_FRM_LEN_MAX) {
362 writel(readl(sspi->base + SIRFSOC_SPI_CTRL) | 356 writel(readl(sspi->base + SIRFSOC_SPI_CTRL) |
363 SIRFSOC_SPI_MUL_DAT_MODE | 357 SIRFSOC_SPI_ENA_AUTO_CLR | SIRFSOC_SPI_MUL_DAT_MODE,
364 SIRFSOC_SPI_ENA_AUTO_CLR,
365 sspi->base + SIRFSOC_SPI_CTRL); 358 sspi->base + SIRFSOC_SPI_CTRL);
366 writel(sspi->left_tx_word - 1, 359 writel(sspi->left_tx_word - 1,
367 sspi->base + SIRFSOC_SPI_TX_DMA_IO_LEN); 360 sspi->base + SIRFSOC_SPI_TX_DMA_IO_LEN);
@@ -373,88 +366,109 @@ static int spi_sirfsoc_transfer(struct spi_device *spi, struct spi_transfer *t)
373 writel(0, sspi->base + SIRFSOC_SPI_TX_DMA_IO_LEN); 366 writel(0, sspi->base + SIRFSOC_SPI_TX_DMA_IO_LEN);
374 writel(0, sspi->base + SIRFSOC_SPI_RX_DMA_IO_LEN); 367 writel(0, sspi->base + SIRFSOC_SPI_RX_DMA_IO_LEN);
375 } 368 }
376 369 sspi->dst_start = dma_map_single(&spi->dev, sspi->rx, t->len,
377 writel(SIRFSOC_SPI_FIFO_RESET, sspi->base + SIRFSOC_SPI_RXFIFO_OP); 370 (t->tx_buf != t->rx_buf) ?
378 writel(SIRFSOC_SPI_FIFO_RESET, sspi->base + SIRFSOC_SPI_TXFIFO_OP); 371 DMA_FROM_DEVICE : DMA_BIDIRECTIONAL);
379 writel(SIRFSOC_SPI_FIFO_START, sspi->base + SIRFSOC_SPI_RXFIFO_OP); 372 rx_desc = dmaengine_prep_slave_single(sspi->rx_chan,
380 writel(SIRFSOC_SPI_FIFO_START, sspi->base + SIRFSOC_SPI_TXFIFO_OP); 373 sspi->dst_start, t->len, DMA_DEV_TO_MEM,
381 374 DMA_PREP_INTERRUPT | DMA_CTRL_ACK);
382 if (IS_DMA_VALID(t)) { 375 rx_desc->callback = spi_sirfsoc_dma_fini_callback;
383 struct dma_async_tx_descriptor *rx_desc, *tx_desc; 376 rx_desc->callback_param = &sspi->rx_done;
384 377
385 sspi->dst_start = dma_map_single(&spi->dev, 378 sspi->src_start = dma_map_single(&spi->dev, (void *)sspi->tx, t->len,
386 sspi->rx, t->len, (t->tx_buf != t->rx_buf) ? 379 (t->tx_buf != t->rx_buf) ?
387 DMA_FROM_DEVICE : DMA_BIDIRECTIONAL); 380 DMA_TO_DEVICE : DMA_BIDIRECTIONAL);
388 rx_desc = dmaengine_prep_slave_single(sspi->rx_chan, 381 tx_desc = dmaengine_prep_slave_single(sspi->tx_chan,
389 sspi->dst_start, t->len, DMA_DEV_TO_MEM, 382 sspi->src_start, t->len, DMA_MEM_TO_DEV,
390 DMA_PREP_INTERRUPT | DMA_CTRL_ACK); 383 DMA_PREP_INTERRUPT | DMA_CTRL_ACK);
391 rx_desc->callback = spi_sirfsoc_dma_fini_callback; 384 tx_desc->callback = spi_sirfsoc_dma_fini_callback;
392 rx_desc->callback_param = &sspi->rx_done; 385 tx_desc->callback_param = &sspi->tx_done;
393 386
394 sspi->src_start = dma_map_single(&spi->dev, 387 dmaengine_submit(tx_desc);
395 (void *)sspi->tx, t->len, 388 dmaengine_submit(rx_desc);
396 (t->tx_buf != t->rx_buf) ? 389 dma_async_issue_pending(sspi->tx_chan);
397 DMA_TO_DEVICE : DMA_BIDIRECTIONAL); 390 dma_async_issue_pending(sspi->rx_chan);
398 tx_desc = dmaengine_prep_slave_single(sspi->tx_chan,
399 sspi->src_start, t->len, DMA_MEM_TO_DEV,
400 DMA_PREP_INTERRUPT | DMA_CTRL_ACK);
401 tx_desc->callback = spi_sirfsoc_dma_fini_callback;
402 tx_desc->callback_param = &sspi->tx_done;
403
404 dmaengine_submit(tx_desc);
405 dmaengine_submit(rx_desc);
406 dma_async_issue_pending(sspi->tx_chan);
407 dma_async_issue_pending(sspi->rx_chan);
408 } else {
409 /* Send the first word to trigger the whole tx/rx process */
410 sspi->tx_word(sspi);
411
412 writel(SIRFSOC_SPI_RX_OFLOW_INT_EN |
413 SIRFSOC_SPI_TX_UFLOW_INT_EN |
414 SIRFSOC_SPI_RXFIFO_THD_INT_EN |
415 SIRFSOC_SPI_TXFIFO_THD_INT_EN |
416 SIRFSOC_SPI_FRM_END_INT_EN |
417 SIRFSOC_SPI_RXFIFO_FULL_INT_EN |
418 SIRFSOC_SPI_TXFIFO_EMPTY_INT_EN,
419 sspi->base + SIRFSOC_SPI_INT_EN);
420 }
421
422 writel(SIRFSOC_SPI_RX_EN | SIRFSOC_SPI_TX_EN, 391 writel(SIRFSOC_SPI_RX_EN | SIRFSOC_SPI_TX_EN,
423 sspi->base + SIRFSOC_SPI_TX_RX_EN); 392 sspi->base + SIRFSOC_SPI_TX_RX_EN);
424 393 if (wait_for_completion_timeout(&sspi->rx_done, timeout) == 0) {
425 if (!IS_DMA_VALID(t)) { /* for PIO */
426 if (wait_for_completion_timeout(&sspi->rx_done, timeout) == 0)
427 dev_err(&spi->dev, "transfer timeout\n");
428 } else if (wait_for_completion_timeout(&sspi->rx_done, timeout) == 0) {
429 dev_err(&spi->dev, "transfer timeout\n"); 394 dev_err(&spi->dev, "transfer timeout\n");
430 dmaengine_terminate_all(sspi->rx_chan); 395 dmaengine_terminate_all(sspi->rx_chan);
431 } else 396 } else
432 sspi->left_rx_word = 0; 397 sspi->left_rx_word = 0;
433
434 /* 398 /*
435 * we only wait tx-done event if transferring by DMA. for PIO, 399 * we only wait tx-done event if transferring by DMA. for PIO,
436 * we get rx data by writing tx data, so if rx is done, tx has 400 * we get rx data by writing tx data, so if rx is done, tx has
437 * done earlier 401 * done earlier
438 */ 402 */
439 if (IS_DMA_VALID(t)) { 403 if (wait_for_completion_timeout(&sspi->tx_done, timeout) == 0) {
440 if (wait_for_completion_timeout(&sspi->tx_done, timeout) == 0) { 404 dev_err(&spi->dev, "transfer timeout\n");
441 dev_err(&spi->dev, "transfer timeout\n"); 405 dmaengine_terminate_all(sspi->tx_chan);
442 dmaengine_terminate_all(sspi->tx_chan);
443 }
444 } 406 }
407 dma_unmap_single(&spi->dev, sspi->src_start, t->len, DMA_TO_DEVICE);
408 dma_unmap_single(&spi->dev, sspi->dst_start, t->len, DMA_FROM_DEVICE);
409 /* TX, RX FIFO stop */
410 writel(0, sspi->base + SIRFSOC_SPI_RXFIFO_OP);
411 writel(0, sspi->base + SIRFSOC_SPI_TXFIFO_OP);
412 if (sspi->left_tx_word >= SIRFSOC_SPI_DAT_FRM_LEN_MAX)
413 writel(0, sspi->base + SIRFSOC_SPI_TX_RX_EN);
414}
445 415
446 if (IS_DMA_VALID(t)) { 416static void spi_sirfsoc_pio_transfer(struct spi_device *spi,
447 dma_unmap_single(&spi->dev, 417 struct spi_transfer *t)
448 sspi->src_start, t->len, DMA_TO_DEVICE); 418{
449 dma_unmap_single(&spi->dev, 419 struct sirfsoc_spi *sspi;
450 sspi->dst_start, t->len, DMA_FROM_DEVICE); 420 int timeout = t->len * 10;
451 }
452 421
453 /* TX, RX FIFO stop */ 422 sspi = spi_master_get_devdata(spi->master);
423 writel(SIRFSOC_SPI_FIFO_RESET, sspi->base + SIRFSOC_SPI_RXFIFO_OP);
424 writel(SIRFSOC_SPI_FIFO_RESET, sspi->base + SIRFSOC_SPI_TXFIFO_OP);
425 writel(SIRFSOC_SPI_FIFO_START, sspi->base + SIRFSOC_SPI_RXFIFO_OP);
426 writel(SIRFSOC_SPI_FIFO_START, sspi->base + SIRFSOC_SPI_TXFIFO_OP);
427 writel(0, sspi->base + SIRFSOC_SPI_INT_EN);
428 writel(SIRFSOC_SPI_INT_MASK_ALL, sspi->base + SIRFSOC_SPI_INT_STATUS);
429 writel(readl(sspi->base + SIRFSOC_SPI_CTRL) | SIRFSOC_SPI_MUL_DAT_MODE |
430 SIRFSOC_SPI_ENA_AUTO_CLR, sspi->base + SIRFSOC_SPI_CTRL);
431 writel(sspi->left_tx_word - 1,
432 sspi->base + SIRFSOC_SPI_TX_DMA_IO_LEN);
433 writel(sspi->left_rx_word - 1,
434 sspi->base + SIRFSOC_SPI_RX_DMA_IO_LEN);
435 sspi->tx_word(sspi);
436 writel(SIRFSOC_SPI_TXFIFO_EMPTY_INT_EN | SIRFSOC_SPI_TX_UFLOW_INT_EN |
437 SIRFSOC_SPI_RX_OFLOW_INT_EN | SIRFSOC_SPI_RXFIFO_THD_INT_EN |
438 SIRFSOC_SPI_TXFIFO_THD_INT_EN | SIRFSOC_SPI_FRM_END_INT_EN|
439 SIRFSOC_SPI_RXFIFO_FULL_INT_EN,
440 sspi->base + SIRFSOC_SPI_INT_EN);
441 writel(SIRFSOC_SPI_RX_EN | SIRFSOC_SPI_TX_EN,
442 sspi->base + SIRFSOC_SPI_TX_RX_EN);
443 if (wait_for_completion_timeout(&sspi->rx_done, timeout) == 0)
444 dev_err(&spi->dev, "transfer timeout\n");
454 writel(0, sspi->base + SIRFSOC_SPI_RXFIFO_OP); 445 writel(0, sspi->base + SIRFSOC_SPI_RXFIFO_OP);
455 writel(0, sspi->base + SIRFSOC_SPI_TXFIFO_OP); 446 writel(0, sspi->base + SIRFSOC_SPI_TXFIFO_OP);
456 writel(0, sspi->base + SIRFSOC_SPI_TX_RX_EN); 447 writel(0, sspi->base + SIRFSOC_SPI_TX_RX_EN);
457 writel(0, sspi->base + SIRFSOC_SPI_INT_EN); 448 writel(0, sspi->base + SIRFSOC_SPI_INT_EN);
449}
450
451static int spi_sirfsoc_transfer(struct spi_device *spi, struct spi_transfer *t)
452{
453 struct sirfsoc_spi *sspi;
454 sspi = spi_master_get_devdata(spi->master);
455
456 sspi->tx = t->tx_buf ? t->tx_buf : sspi->dummypage;
457 sspi->rx = t->rx_buf ? t->rx_buf : sspi->dummypage;
458 sspi->left_tx_word = sspi->left_rx_word = t->len / sspi->word_width;
459 reinit_completion(&sspi->rx_done);
460 reinit_completion(&sspi->tx_done);
461 /*
462 * in the transfer, if transfer data using command register with rx_buf
463 * null, just fill command data into command register and wait for its
464 * completion.
465 */
466 if (sspi->tx_by_cmd)
467 spi_sirfsoc_cmd_transfer(spi, t);
468 else if (IS_DMA_VALID(t))
469 spi_sirfsoc_dma_transfer(spi, t);
470 else
471 spi_sirfsoc_pio_transfer(spi, t);
458 472
459 return t->len - sspi->left_rx_word * sspi->word_width; 473 return t->len - sspi->left_rx_word * sspi->word_width;
460} 474}