diff options
-rw-r--r-- | drivers/spi/spi-rspi.c | 140 |
1 files changed, 50 insertions, 90 deletions
diff --git a/drivers/spi/spi-rspi.c b/drivers/spi/spi-rspi.c index 7b993f75a3cf..753ac7bdfd55 100644 --- a/drivers/spi/spi-rspi.c +++ b/drivers/spi/spi-rspi.c | |||
@@ -195,10 +195,6 @@ struct rspi_data { | |||
195 | int rx_irq, tx_irq; | 195 | int rx_irq, tx_irq; |
196 | const struct spi_ops *ops; | 196 | const struct spi_ops *ops; |
197 | 197 | ||
198 | /* for dmaengine */ | ||
199 | struct dma_chan *chan_tx; | ||
200 | struct dma_chan *chan_rx; | ||
201 | |||
202 | unsigned dma_callbacked:1; | 198 | unsigned dma_callbacked:1; |
203 | unsigned byte_access:1; | 199 | unsigned byte_access:1; |
204 | }; | 200 | }; |
@@ -251,6 +247,7 @@ struct spi_ops { | |||
251 | struct spi_transfer *xfer); | 247 | struct spi_transfer *xfer); |
252 | u16 mode_bits; | 248 | u16 mode_bits; |
253 | u16 flags; | 249 | u16 flags; |
250 | u16 fifo_size; | ||
254 | }; | 251 | }; |
255 | 252 | ||
256 | /* | 253 | /* |
@@ -466,39 +463,16 @@ static void rspi_dma_complete(void *arg) | |||
466 | wake_up_interruptible(&rspi->wait); | 463 | wake_up_interruptible(&rspi->wait); |
467 | } | 464 | } |
468 | 465 | ||
469 | static int rspi_dma_map_sg(struct scatterlist *sg, const void *buf, | ||
470 | unsigned len, struct dma_chan *chan, | ||
471 | enum dma_transfer_direction dir) | ||
472 | { | ||
473 | sg_init_table(sg, 1); | ||
474 | sg_set_buf(sg, buf, len); | ||
475 | sg_dma_len(sg) = len; | ||
476 | return dma_map_sg(chan->device->dev, sg, 1, dir); | ||
477 | } | ||
478 | |||
479 | static void rspi_dma_unmap_sg(struct scatterlist *sg, struct dma_chan *chan, | ||
480 | enum dma_transfer_direction dir) | ||
481 | { | ||
482 | dma_unmap_sg(chan->device->dev, sg, 1, dir); | ||
483 | } | ||
484 | |||
485 | static int rspi_send_dma(struct rspi_data *rspi, struct spi_transfer *t) | 466 | static int rspi_send_dma(struct rspi_data *rspi, struct spi_transfer *t) |
486 | { | 467 | { |
487 | struct scatterlist sg; | ||
488 | const void *buf = t->tx_buf; | ||
489 | struct dma_async_tx_descriptor *desc; | 468 | struct dma_async_tx_descriptor *desc; |
490 | unsigned int len = t->len; | 469 | int ret; |
491 | int ret = 0; | ||
492 | |||
493 | if (!rspi_dma_map_sg(&sg, buf, len, rspi->chan_tx, DMA_TO_DEVICE)) | ||
494 | return -EFAULT; | ||
495 | 470 | ||
496 | desc = dmaengine_prep_slave_sg(rspi->chan_tx, &sg, 1, DMA_TO_DEVICE, | 471 | desc = dmaengine_prep_slave_sg(rspi->master->dma_tx, t->tx_sg.sgl, |
472 | t->tx_sg.nents, DMA_TO_DEVICE, | ||
497 | DMA_PREP_INTERRUPT | DMA_CTRL_ACK); | 473 | DMA_PREP_INTERRUPT | DMA_CTRL_ACK); |
498 | if (!desc) { | 474 | if (!desc) |
499 | ret = -EIO; | 475 | return -EIO; |
500 | goto end; | ||
501 | } | ||
502 | 476 | ||
503 | /* | 477 | /* |
504 | * DMAC needs SPTIE, but if SPTIE is set, this IRQ routine will be | 478 | * DMAC needs SPTIE, but if SPTIE is set, this IRQ routine will be |
@@ -513,7 +487,7 @@ static int rspi_send_dma(struct rspi_data *rspi, struct spi_transfer *t) | |||
513 | desc->callback = rspi_dma_complete; | 487 | desc->callback = rspi_dma_complete; |
514 | desc->callback_param = rspi; | 488 | desc->callback_param = rspi; |
515 | dmaengine_submit(desc); | 489 | dmaengine_submit(desc); |
516 | dma_async_issue_pending(rspi->chan_tx); | 490 | dma_async_issue_pending(rspi->master->dma_tx); |
517 | 491 | ||
518 | ret = wait_event_interruptible_timeout(rspi->wait, | 492 | ret = wait_event_interruptible_timeout(rspi->wait, |
519 | rspi->dma_callbacked, HZ); | 493 | rspi->dma_callbacked, HZ); |
@@ -524,9 +498,6 @@ static int rspi_send_dma(struct rspi_data *rspi, struct spi_transfer *t) | |||
524 | rspi_disable_irq(rspi, SPCR_SPTIE); | 498 | rspi_disable_irq(rspi, SPCR_SPTIE); |
525 | 499 | ||
526 | enable_irq(rspi->tx_irq); | 500 | enable_irq(rspi->tx_irq); |
527 | |||
528 | end: | ||
529 | rspi_dma_unmap_sg(&sg, rspi->chan_tx, DMA_TO_DEVICE); | ||
530 | return ret; | 501 | return ret; |
531 | } | 502 | } |
532 | 503 | ||
@@ -562,39 +533,22 @@ static void qspi_receive_init(const struct rspi_data *rspi) | |||
562 | 533 | ||
563 | static int rspi_send_receive_dma(struct rspi_data *rspi, struct spi_transfer *t) | 534 | static int rspi_send_receive_dma(struct rspi_data *rspi, struct spi_transfer *t) |
564 | { | 535 | { |
565 | struct scatterlist sg_rx, sg_tx; | ||
566 | const void *tx_buf = t->tx_buf; | ||
567 | void *rx_buf = t->rx_buf; | ||
568 | struct dma_async_tx_descriptor *desc_tx, *desc_rx; | 536 | struct dma_async_tx_descriptor *desc_tx, *desc_rx; |
569 | unsigned int len = t->len; | 537 | int ret; |
570 | int ret = 0; | ||
571 | 538 | ||
572 | /* prepare transmit transfer */ | 539 | /* prepare transmit transfer */ |
573 | if (!rspi_dma_map_sg(&sg_tx, tx_buf, len, rspi->chan_tx, | 540 | desc_tx = dmaengine_prep_slave_sg(rspi->master->dma_tx, t->tx_sg.sgl, |
574 | DMA_TO_DEVICE)) | 541 | t->tx_sg.nents, DMA_TO_DEVICE, |
575 | return -EFAULT; | 542 | DMA_PREP_INTERRUPT | DMA_CTRL_ACK); |
576 | 543 | if (!desc_tx) | |
577 | desc_tx = dmaengine_prep_slave_sg(rspi->chan_tx, &sg_tx, 1, | 544 | return -EIO; |
578 | DMA_TO_DEVICE, DMA_PREP_INTERRUPT | DMA_CTRL_ACK); | ||
579 | if (!desc_tx) { | ||
580 | ret = -EIO; | ||
581 | goto end_tx_mapped; | ||
582 | } | ||
583 | 545 | ||
584 | /* prepare receive transfer */ | 546 | /* prepare receive transfer */ |
585 | if (!rspi_dma_map_sg(&sg_rx, rx_buf, len, rspi->chan_rx, | 547 | desc_rx = dmaengine_prep_slave_sg(rspi->master->dma_rx, t->rx_sg.sgl, |
586 | DMA_FROM_DEVICE)) { | 548 | t->rx_sg.nents, DMA_FROM_DEVICE, |
587 | ret = -EFAULT; | ||
588 | goto end_tx_mapped; | ||
589 | |||
590 | } | ||
591 | desc_rx = dmaengine_prep_slave_sg(rspi->chan_rx, &sg_rx, 1, | ||
592 | DMA_FROM_DEVICE, | ||
593 | DMA_PREP_INTERRUPT | DMA_CTRL_ACK); | 549 | DMA_PREP_INTERRUPT | DMA_CTRL_ACK); |
594 | if (!desc_rx) { | 550 | if (!desc_rx) |
595 | ret = -EIO; | 551 | return -EIO; |
596 | goto end; | ||
597 | } | ||
598 | 552 | ||
599 | rspi_receive_init(rspi); | 553 | rspi_receive_init(rspi); |
600 | 554 | ||
@@ -613,11 +567,11 @@ static int rspi_send_receive_dma(struct rspi_data *rspi, struct spi_transfer *t) | |||
613 | desc_rx->callback = rspi_dma_complete; | 567 | desc_rx->callback = rspi_dma_complete; |
614 | desc_rx->callback_param = rspi; | 568 | desc_rx->callback_param = rspi; |
615 | dmaengine_submit(desc_rx); | 569 | dmaengine_submit(desc_rx); |
616 | dma_async_issue_pending(rspi->chan_rx); | 570 | dma_async_issue_pending(rspi->master->dma_rx); |
617 | 571 | ||
618 | desc_tx->callback = NULL; /* No callback */ | 572 | desc_tx->callback = NULL; /* No callback */ |
619 | dmaengine_submit(desc_tx); | 573 | dmaengine_submit(desc_tx); |
620 | dma_async_issue_pending(rspi->chan_tx); | 574 | dma_async_issue_pending(rspi->master->dma_tx); |
621 | 575 | ||
622 | ret = wait_event_interruptible_timeout(rspi->wait, | 576 | ret = wait_event_interruptible_timeout(rspi->wait, |
623 | rspi->dma_callbacked, HZ); | 577 | rspi->dma_callbacked, HZ); |
@@ -631,19 +585,21 @@ static int rspi_send_receive_dma(struct rspi_data *rspi, struct spi_transfer *t) | |||
631 | if (rspi->rx_irq != rspi->tx_irq) | 585 | if (rspi->rx_irq != rspi->tx_irq) |
632 | enable_irq(rspi->rx_irq); | 586 | enable_irq(rspi->rx_irq); |
633 | 587 | ||
634 | end: | ||
635 | rspi_dma_unmap_sg(&sg_rx, rspi->chan_rx, DMA_FROM_DEVICE); | ||
636 | end_tx_mapped: | ||
637 | rspi_dma_unmap_sg(&sg_tx, rspi->chan_tx, DMA_TO_DEVICE); | ||
638 | return ret; | 588 | return ret; |
639 | } | 589 | } |
640 | 590 | ||
641 | static int rspi_is_dma(const struct rspi_data *rspi, struct spi_transfer *t) | 591 | static bool __rspi_can_dma(const struct rspi_data *rspi, |
592 | const struct spi_transfer *xfer) | ||
642 | { | 593 | { |
643 | if (rspi->chan_tx) | 594 | return xfer->len > rspi->ops->fifo_size; |
644 | return 1; | 595 | } |
645 | 596 | ||
646 | return 0; | 597 | static bool rspi_can_dma(struct spi_master *master, struct spi_device *spi, |
598 | struct spi_transfer *xfer) | ||
599 | { | ||
600 | struct rspi_data *rspi = spi_master_get_devdata(master); | ||
601 | |||
602 | return __rspi_can_dma(rspi, xfer); | ||
647 | } | 603 | } |
648 | 604 | ||
649 | static int rspi_transfer_out_in(struct rspi_data *rspi, | 605 | static int rspi_transfer_out_in(struct rspi_data *rspi, |
@@ -676,7 +632,7 @@ static int rspi_transfer_one(struct spi_master *master, struct spi_device *spi, | |||
676 | { | 632 | { |
677 | struct rspi_data *rspi = spi_master_get_devdata(master); | 633 | struct rspi_data *rspi = spi_master_get_devdata(master); |
678 | 634 | ||
679 | if (!rspi_is_dma(rspi, xfer)) | 635 | if (!master->can_dma || !__rspi_can_dma(rspi, xfer)) |
680 | return rspi_transfer_out_in(rspi, xfer); | 636 | return rspi_transfer_out_in(rspi, xfer); |
681 | 637 | ||
682 | if (xfer->rx_buf) | 638 | if (xfer->rx_buf) |
@@ -976,7 +932,7 @@ static struct dma_chan *rspi_request_dma_chan(struct device *dev, | |||
976 | return chan; | 932 | return chan; |
977 | } | 933 | } |
978 | 934 | ||
979 | static int rspi_request_dma(struct device *dev, struct rspi_data *rspi, | 935 | static int rspi_request_dma(struct device *dev, struct spi_master *master, |
980 | const struct resource *res) | 936 | const struct resource *res) |
981 | { | 937 | { |
982 | const struct rspi_plat_data *rspi_pd = dev_get_platdata(dev); | 938 | const struct rspi_plat_data *rspi_pd = dev_get_platdata(dev); |
@@ -984,31 +940,32 @@ static int rspi_request_dma(struct device *dev, struct rspi_data *rspi, | |||
984 | if (!rspi_pd || !rspi_pd->dma_rx_id || !rspi_pd->dma_tx_id) | 940 | if (!rspi_pd || !rspi_pd->dma_rx_id || !rspi_pd->dma_tx_id) |
985 | return 0; /* The driver assumes no error. */ | 941 | return 0; /* The driver assumes no error. */ |
986 | 942 | ||
987 | rspi->chan_rx = rspi_request_dma_chan(dev, DMA_DEV_TO_MEM, | 943 | master->dma_rx = rspi_request_dma_chan(dev, DMA_DEV_TO_MEM, |
988 | rspi_pd->dma_rx_id, | 944 | rspi_pd->dma_rx_id, |
989 | res->start + RSPI_SPDR); | 945 | res->start + RSPI_SPDR); |
990 | if (!rspi->chan_rx) | 946 | if (!master->dma_rx) |
991 | return -ENODEV; | 947 | return -ENODEV; |
992 | 948 | ||
993 | rspi->chan_tx = rspi_request_dma_chan(dev, DMA_MEM_TO_DEV, | 949 | master->dma_tx = rspi_request_dma_chan(dev, DMA_MEM_TO_DEV, |
994 | rspi_pd->dma_tx_id, | 950 | rspi_pd->dma_tx_id, |
995 | res->start + RSPI_SPDR); | 951 | res->start + RSPI_SPDR); |
996 | if (!rspi->chan_tx) { | 952 | if (!master->dma_tx) { |
997 | dma_release_channel(rspi->chan_rx); | 953 | dma_release_channel(master->dma_rx); |
998 | rspi->chan_rx = NULL; | 954 | master->dma_rx = NULL; |
999 | return -ENODEV; | 955 | return -ENODEV; |
1000 | } | 956 | } |
1001 | 957 | ||
958 | master->can_dma = rspi_can_dma; | ||
1002 | dev_info(dev, "DMA available"); | 959 | dev_info(dev, "DMA available"); |
1003 | return 0; | 960 | return 0; |
1004 | } | 961 | } |
1005 | 962 | ||
1006 | static void rspi_release_dma(struct rspi_data *rspi) | 963 | static void rspi_release_dma(struct rspi_data *rspi) |
1007 | { | 964 | { |
1008 | if (rspi->chan_tx) | 965 | if (rspi->master->dma_tx) |
1009 | dma_release_channel(rspi->chan_tx); | 966 | dma_release_channel(rspi->master->dma_tx); |
1010 | if (rspi->chan_rx) | 967 | if (rspi->master->dma_rx) |
1011 | dma_release_channel(rspi->chan_rx); | 968 | dma_release_channel(rspi->master->dma_rx); |
1012 | } | 969 | } |
1013 | 970 | ||
1014 | static int rspi_remove(struct platform_device *pdev) | 971 | static int rspi_remove(struct platform_device *pdev) |
@@ -1026,6 +983,7 @@ static const struct spi_ops rspi_ops = { | |||
1026 | .transfer_one = rspi_transfer_one, | 983 | .transfer_one = rspi_transfer_one, |
1027 | .mode_bits = SPI_CPHA | SPI_CPOL | SPI_LOOP, | 984 | .mode_bits = SPI_CPHA | SPI_CPOL | SPI_LOOP, |
1028 | .flags = SPI_MASTER_MUST_TX, | 985 | .flags = SPI_MASTER_MUST_TX, |
986 | .fifo_size = 8, | ||
1029 | }; | 987 | }; |
1030 | 988 | ||
1031 | static const struct spi_ops rspi_rz_ops = { | 989 | static const struct spi_ops rspi_rz_ops = { |
@@ -1033,6 +991,7 @@ static const struct spi_ops rspi_rz_ops = { | |||
1033 | .transfer_one = rspi_rz_transfer_one, | 991 | .transfer_one = rspi_rz_transfer_one, |
1034 | .mode_bits = SPI_CPHA | SPI_CPOL | SPI_LOOP, | 992 | .mode_bits = SPI_CPHA | SPI_CPOL | SPI_LOOP, |
1035 | .flags = SPI_MASTER_MUST_RX | SPI_MASTER_MUST_TX, | 993 | .flags = SPI_MASTER_MUST_RX | SPI_MASTER_MUST_TX, |
994 | .fifo_size = 8, /* 8 for TX, 32 for RX */ | ||
1036 | }; | 995 | }; |
1037 | 996 | ||
1038 | static const struct spi_ops qspi_ops = { | 997 | static const struct spi_ops qspi_ops = { |
@@ -1042,6 +1001,7 @@ static const struct spi_ops qspi_ops = { | |||
1042 | SPI_TX_DUAL | SPI_TX_QUAD | | 1001 | SPI_TX_DUAL | SPI_TX_QUAD | |
1043 | SPI_RX_DUAL | SPI_RX_QUAD, | 1002 | SPI_RX_DUAL | SPI_RX_QUAD, |
1044 | .flags = SPI_MASTER_MUST_RX | SPI_MASTER_MUST_TX, | 1003 | .flags = SPI_MASTER_MUST_RX | SPI_MASTER_MUST_TX, |
1004 | .fifo_size = 32, | ||
1045 | }; | 1005 | }; |
1046 | 1006 | ||
1047 | #ifdef CONFIG_OF | 1007 | #ifdef CONFIG_OF |
@@ -1199,7 +1159,7 @@ static int rspi_probe(struct platform_device *pdev) | |||
1199 | goto error2; | 1159 | goto error2; |
1200 | } | 1160 | } |
1201 | 1161 | ||
1202 | ret = rspi_request_dma(&pdev->dev, rspi, res); | 1162 | ret = rspi_request_dma(&pdev->dev, master, res); |
1203 | if (ret < 0) | 1163 | if (ret < 0) |
1204 | dev_warn(&pdev->dev, "DMA not available, using PIO\n"); | 1164 | dev_warn(&pdev->dev, "DMA not available, using PIO\n"); |
1205 | 1165 | ||