aboutsummaryrefslogtreecommitdiffstats
path: root/drivers/dma/pch_dma.c
diff options
context:
space:
mode:
authorVinod Koul <vinod.koul@linux.intel.com>2011-10-13 13:04:23 -0400
committerVinod Koul <vinod.koul@linux.intel.com>2011-10-27 11:23:43 -0400
commitdb8196df4bb6f117caa163aa73b0f16fd62290bd (patch)
treeb86531031482037d9b31ad57479f2f7091020957 /drivers/dma/pch_dma.c
parent49920bc66984a512f4bcc7735a61642cd0e4d6f2 (diff)
dmaengine: move drivers to dma_transfer_direction
fixup usage of dma direction by introducing dma_transfer_direction, this patch moves dma/drivers/* to use new enum Cc: Jassi Brar <jaswinder.singh@linaro.org> Cc: Russell King <rmk+kernel@arm.linux.org.uk> Cc: Viresh Kumar <viresh.kumar@st.com> Cc: Linus Walleij <linus.walleij@linaro.org> Cc: Nicolas Ferre <nicolas.ferre@atmel.com> Cc: Mika Westerberg <mika.westerberg@iki.fi> Cc: H Hartley Sweeten <hartleys@visionengravers.com> Cc: Li Yang <leoli@freescale.com> Cc: Zhang Wei <zw@zh-kernel.org> Cc: Sascha Hauer <s.hauer@pengutronix.de> Cc: Guennadi Liakhovetski <g.liakhovetski@gmx.de> Cc: Shawn Guo <shawn.guo@freescale.com> Cc: Yong Wang <yong.y.wang@intel.com> Cc: Tomoya MORINAGA <tomoya-linux@dsn.lapis-semi.com> Cc: Boojin Kim <boojin.kim@samsung.com> Cc: Barry Song <Baohua.Song@csr.com> Acked-by: Mika Westerberg <mika.westerberg@iki.fi> Acked-by: Linus Walleij <linus.walleij@linaro.org> Acked-by: Viresh Kumar <viresh.kumar@st.com> Acked-by: Nicolas Ferre <nicolas.ferre@atmel.com> Signed-off-by: Vinod Koul <vinod.koul@linux.intel.com>
Diffstat (limited to 'drivers/dma/pch_dma.c')
-rw-r--r--drivers/dma/pch_dma.c12
1 files changed, 6 insertions, 6 deletions
diff --git a/drivers/dma/pch_dma.c b/drivers/dma/pch_dma.c
index a6d0e3dbed07..9944e8295498 100644
--- a/drivers/dma/pch_dma.c
+++ b/drivers/dma/pch_dma.c
@@ -99,7 +99,7 @@ struct pch_dma_desc {
99struct pch_dma_chan { 99struct pch_dma_chan {
100 struct dma_chan chan; 100 struct dma_chan chan;
101 void __iomem *membase; 101 void __iomem *membase;
102 enum dma_data_direction dir; 102 enum dma_transfer_direction dir;
103 struct tasklet_struct tasklet; 103 struct tasklet_struct tasklet;
104 unsigned long err_status; 104 unsigned long err_status;
105 105
@@ -224,7 +224,7 @@ static void pdc_set_dir(struct dma_chan *chan)
224 mask_ctl = DMA_MASK_CTL0_MODE & ~(DMA_CTL0_MODE_MASK_BITS << 224 mask_ctl = DMA_MASK_CTL0_MODE & ~(DMA_CTL0_MODE_MASK_BITS <<
225 (DMA_CTL0_BITS_PER_CH * chan->chan_id)); 225 (DMA_CTL0_BITS_PER_CH * chan->chan_id));
226 val &= mask_mode; 226 val &= mask_mode;
227 if (pd_chan->dir == DMA_TO_DEVICE) 227 if (pd_chan->dir == DMA_MEM_TO_DEV)
228 val |= 0x1 << (DMA_CTL0_BITS_PER_CH * chan->chan_id + 228 val |= 0x1 << (DMA_CTL0_BITS_PER_CH * chan->chan_id +
229 DMA_CTL0_DIR_SHIFT_BITS); 229 DMA_CTL0_DIR_SHIFT_BITS);
230 else 230 else
@@ -242,7 +242,7 @@ static void pdc_set_dir(struct dma_chan *chan)
242 mask_ctl = DMA_MASK_CTL2_MODE & ~(DMA_CTL0_MODE_MASK_BITS << 242 mask_ctl = DMA_MASK_CTL2_MODE & ~(DMA_CTL0_MODE_MASK_BITS <<
243 (DMA_CTL0_BITS_PER_CH * ch)); 243 (DMA_CTL0_BITS_PER_CH * ch));
244 val &= mask_mode; 244 val &= mask_mode;
245 if (pd_chan->dir == DMA_TO_DEVICE) 245 if (pd_chan->dir == DMA_MEM_TO_DEV)
246 val |= 0x1 << (DMA_CTL0_BITS_PER_CH * ch + 246 val |= 0x1 << (DMA_CTL0_BITS_PER_CH * ch +
247 DMA_CTL0_DIR_SHIFT_BITS); 247 DMA_CTL0_DIR_SHIFT_BITS);
248 else 248 else
@@ -607,7 +607,7 @@ static void pd_issue_pending(struct dma_chan *chan)
607 607
608static struct dma_async_tx_descriptor *pd_prep_slave_sg(struct dma_chan *chan, 608static struct dma_async_tx_descriptor *pd_prep_slave_sg(struct dma_chan *chan,
609 struct scatterlist *sgl, unsigned int sg_len, 609 struct scatterlist *sgl, unsigned int sg_len,
610 enum dma_data_direction direction, unsigned long flags) 610 enum dma_transfer_direction direction, unsigned long flags)
611{ 611{
612 struct pch_dma_chan *pd_chan = to_pd_chan(chan); 612 struct pch_dma_chan *pd_chan = to_pd_chan(chan);
613 struct pch_dma_slave *pd_slave = chan->private; 613 struct pch_dma_slave *pd_slave = chan->private;
@@ -623,9 +623,9 @@ static struct dma_async_tx_descriptor *pd_prep_slave_sg(struct dma_chan *chan,
623 return NULL; 623 return NULL;
624 } 624 }
625 625
626 if (direction == DMA_FROM_DEVICE) 626 if (direction == DMA_DEV_TO_MEM)
627 reg = pd_slave->rx_reg; 627 reg = pd_slave->rx_reg;
628 else if (direction == DMA_TO_DEVICE) 628 else if (direction == DMA_MEM_TO_DEV)
629 reg = pd_slave->tx_reg; 629 reg = pd_slave->tx_reg;
630 else 630 else
631 return NULL; 631 return NULL;