aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorVinod Koul <vinod.koul@intel.com>2017-05-04 06:38:21 -0400
committerVinod Koul <vinod.koul@intel.com>2017-05-04 06:38:21 -0400
commitb7b0201a6743c4d9b634da68ad8bb94db28b8e69 (patch)
treef3188634f590f8fb821aa904be69dec3c7b3045b
parent3378e7a49fda9537d48953cd41cf7836c472ba2f (diff)
parentdb6a3d03e22f3daf09d8d1532bd1d3747062a561 (diff)
Merge branch 'topic/xilinx' into for-linus
-rw-r--r--drivers/dma/xilinx/xilinx_dma.c63
1 files changed, 35 insertions, 28 deletions
diff --git a/drivers/dma/xilinx/xilinx_dma.c b/drivers/dma/xilinx/xilinx_dma.c
index 8288fe4d17c3..8cf87b1a284b 100644
--- a/drivers/dma/xilinx/xilinx_dma.c
+++ b/drivers/dma/xilinx/xilinx_dma.c
@@ -331,6 +331,7 @@ struct xilinx_dma_tx_descriptor {
331 * @seg_v: Statically allocated segments base 331 * @seg_v: Statically allocated segments base
332 * @cyclic_seg_v: Statically allocated segment base for cyclic transfers 332 * @cyclic_seg_v: Statically allocated segment base for cyclic transfers
333 * @start_transfer: Differentiate b/w DMA IP's transfer 333 * @start_transfer: Differentiate b/w DMA IP's transfer
334 * @stop_transfer: Differentiate b/w DMA IP's quiesce
334 */ 335 */
335struct xilinx_dma_chan { 336struct xilinx_dma_chan {
336 struct xilinx_dma_device *xdev; 337 struct xilinx_dma_device *xdev;
@@ -361,6 +362,7 @@ struct xilinx_dma_chan {
361 struct xilinx_axidma_tx_segment *seg_v; 362 struct xilinx_axidma_tx_segment *seg_v;
362 struct xilinx_axidma_tx_segment *cyclic_seg_v; 363 struct xilinx_axidma_tx_segment *cyclic_seg_v;
363 void (*start_transfer)(struct xilinx_dma_chan *chan); 364 void (*start_transfer)(struct xilinx_dma_chan *chan);
365 int (*stop_transfer)(struct xilinx_dma_chan *chan);
364 u16 tdest; 366 u16 tdest;
365}; 367};
366 368
@@ -946,26 +948,32 @@ static bool xilinx_dma_is_idle(struct xilinx_dma_chan *chan)
946} 948}
947 949
948/** 950/**
949 * xilinx_dma_halt - Halt DMA channel 951 * xilinx_dma_stop_transfer - Halt DMA channel
950 * @chan: Driver specific DMA channel 952 * @chan: Driver specific DMA channel
951 */ 953 */
952static void xilinx_dma_halt(struct xilinx_dma_chan *chan) 954static int xilinx_dma_stop_transfer(struct xilinx_dma_chan *chan)
953{ 955{
954 int err;
955 u32 val; 956 u32 val;
956 957
957 dma_ctrl_clr(chan, XILINX_DMA_REG_DMACR, XILINX_DMA_DMACR_RUNSTOP); 958 dma_ctrl_clr(chan, XILINX_DMA_REG_DMACR, XILINX_DMA_DMACR_RUNSTOP);
958 959
959 /* Wait for the hardware to halt */ 960 /* Wait for the hardware to halt */
960 err = xilinx_dma_poll_timeout(chan, XILINX_DMA_REG_DMASR, val, 961 return xilinx_dma_poll_timeout(chan, XILINX_DMA_REG_DMASR, val,
961 (val & XILINX_DMA_DMASR_HALTED), 0, 962 val & XILINX_DMA_DMASR_HALTED, 0,
962 XILINX_DMA_LOOP_COUNT); 963 XILINX_DMA_LOOP_COUNT);
964}
963 965
964 if (err) { 966/**
965 dev_err(chan->dev, "Cannot stop channel %p: %x\n", 967 * xilinx_cdma_stop_transfer - Wait for the current transfer to complete
966 chan, dma_ctrl_read(chan, XILINX_DMA_REG_DMASR)); 968 * @chan: Driver specific DMA channel
967 chan->err = true; 969 */
968 } 970static int xilinx_cdma_stop_transfer(struct xilinx_dma_chan *chan)
971{
972 u32 val;
973
974 return xilinx_dma_poll_timeout(chan, XILINX_DMA_REG_DMASR, val,
975 val & XILINX_DMA_DMASR_IDLE, 0,
976 XILINX_DMA_LOOP_COUNT);
969} 977}
970 978
971/** 979/**
@@ -1653,7 +1661,7 @@ xilinx_cdma_prep_memcpy(struct dma_chan *dchan, dma_addr_t dma_dst,
1653{ 1661{
1654 struct xilinx_dma_chan *chan = to_xilinx_chan(dchan); 1662 struct xilinx_dma_chan *chan = to_xilinx_chan(dchan);
1655 struct xilinx_dma_tx_descriptor *desc; 1663 struct xilinx_dma_tx_descriptor *desc;
1656 struct xilinx_cdma_tx_segment *segment, *prev; 1664 struct xilinx_cdma_tx_segment *segment;
1657 struct xilinx_cdma_desc_hw *hw; 1665 struct xilinx_cdma_desc_hw *hw;
1658 1666
1659 if (!len || len > XILINX_DMA_MAX_TRANS_LEN) 1667 if (!len || len > XILINX_DMA_MAX_TRANS_LEN)
@@ -1680,21 +1688,11 @@ xilinx_cdma_prep_memcpy(struct dma_chan *dchan, dma_addr_t dma_dst,
1680 hw->dest_addr_msb = upper_32_bits(dma_dst); 1688 hw->dest_addr_msb = upper_32_bits(dma_dst);
1681 } 1689 }
1682 1690
1683 /* Fill the previous next descriptor with current */
1684 prev = list_last_entry(&desc->segments,
1685 struct xilinx_cdma_tx_segment, node);
1686 prev->hw.next_desc = segment->phys;
1687
1688 /* Insert the segment into the descriptor segments list. */ 1691 /* Insert the segment into the descriptor segments list. */
1689 list_add_tail(&segment->node, &desc->segments); 1692 list_add_tail(&segment->node, &desc->segments);
1690 1693
1691 prev = segment;
1692
1693 /* Link the last hardware descriptor with the first. */
1694 segment = list_first_entry(&desc->segments,
1695 struct xilinx_cdma_tx_segment, node);
1696 desc->async_tx.phys = segment->phys; 1694 desc->async_tx.phys = segment->phys;
1697 prev->hw.next_desc = segment->phys; 1695 hw->next_desc = segment->phys;
1698 1696
1699 return &desc->async_tx; 1697 return &desc->async_tx;
1700 1698
@@ -2003,12 +2001,17 @@ static int xilinx_dma_terminate_all(struct dma_chan *dchan)
2003{ 2001{
2004 struct xilinx_dma_chan *chan = to_xilinx_chan(dchan); 2002 struct xilinx_dma_chan *chan = to_xilinx_chan(dchan);
2005 u32 reg; 2003 u32 reg;
2004 int err;
2006 2005
2007 if (chan->cyclic) 2006 if (chan->cyclic)
2008 xilinx_dma_chan_reset(chan); 2007 xilinx_dma_chan_reset(chan);
2009 2008
2010 /* Halt the DMA engine */ 2009 err = chan->stop_transfer(chan);
2011 xilinx_dma_halt(chan); 2010 if (err) {
2011 dev_err(chan->dev, "Cannot stop channel %p: %x\n",
2012 chan, dma_ctrl_read(chan, XILINX_DMA_REG_DMASR));
2013 chan->err = true;
2014 }
2012 2015
2013 /* Remove and free all of the descriptors in the lists */ 2016 /* Remove and free all of the descriptors in the lists */
2014 xilinx_dma_free_descriptors(chan); 2017 xilinx_dma_free_descriptors(chan);
@@ -2397,12 +2400,16 @@ static int xilinx_dma_chan_probe(struct xilinx_dma_device *xdev,
2397 return err; 2400 return err;
2398 } 2401 }
2399 2402
2400 if (xdev->dma_config->dmatype == XDMA_TYPE_AXIDMA) 2403 if (xdev->dma_config->dmatype == XDMA_TYPE_AXIDMA) {
2401 chan->start_transfer = xilinx_dma_start_transfer; 2404 chan->start_transfer = xilinx_dma_start_transfer;
2402 else if (xdev->dma_config->dmatype == XDMA_TYPE_CDMA) 2405 chan->stop_transfer = xilinx_dma_stop_transfer;
2406 } else if (xdev->dma_config->dmatype == XDMA_TYPE_CDMA) {
2403 chan->start_transfer = xilinx_cdma_start_transfer; 2407 chan->start_transfer = xilinx_cdma_start_transfer;
2404 else 2408 chan->stop_transfer = xilinx_cdma_stop_transfer;
2409 } else {
2405 chan->start_transfer = xilinx_vdma_start_transfer; 2410 chan->start_transfer = xilinx_vdma_start_transfer;
2411 chan->stop_transfer = xilinx_dma_stop_transfer;
2412 }
2406 2413
2407 /* Initialize the tasklet */ 2414 /* Initialize the tasklet */
2408 tasklet_init(&chan->tasklet, xilinx_dma_do_tasklet, 2415 tasklet_init(&chan->tasklet, xilinx_dma_do_tasklet,