aboutsummaryrefslogtreecommitdiffstats
path: root/drivers/dma/ste_dma40.c
diff options
context:
space:
mode:
authorVinod Koul <vinod.koul@linux.intel.com>2011-10-13 13:04:23 -0400
committerVinod Koul <vinod.koul@linux.intel.com>2011-10-27 11:23:43 -0400
commitdb8196df4bb6f117caa163aa73b0f16fd62290bd (patch)
treeb86531031482037d9b31ad57479f2f7091020957 /drivers/dma/ste_dma40.c
parent49920bc66984a512f4bcc7735a61642cd0e4d6f2 (diff)
dmaengine: move drivers to dma_transfer_direction
fixup usage of dma direction by introducing dma_transfer_direction, this patch moves dma/drivers/* to use new enum Cc: Jassi Brar <jaswinder.singh@linaro.org> Cc: Russell King <rmk+kernel@arm.linux.org.uk> Cc: Viresh Kumar <viresh.kumar@st.com> Cc: Linus Walleij <linus.walleij@linaro.org> Cc: Nicolas Ferre <nicolas.ferre@atmel.com> Cc: Mika Westerberg <mika.westerberg@iki.fi> Cc: H Hartley Sweeten <hartleys@visionengravers.com> Cc: Li Yang <leoli@freescale.com> Cc: Zhang Wei <zw@zh-kernel.org> Cc: Sascha Hauer <s.hauer@pengutronix.de> Cc: Guennadi Liakhovetski <g.liakhovetski@gmx.de> Cc: Shawn Guo <shawn.guo@freescale.com> Cc: Yong Wang <yong.y.wang@intel.com> Cc: Tomoya MORINAGA <tomoya-linux@dsn.lapis-semi.com> Cc: Boojin Kim <boojin.kim@samsung.com> Cc: Barry Song <Baohua.Song@csr.com> Acked-by: Mika Westerberg <mika.westerberg@iki.fi> Acked-by: Linus Walleij <linus.walleij@linaro.org> Acked-by: Viresh Kumar <viresh.kumar@st.com> Acked-by: Nicolas Ferre <nicolas.ferre@atmel.com> Signed-off-by: Vinod Koul <vinod.koul@linux.intel.com>
Diffstat (limited to 'drivers/dma/ste_dma40.c')
-rw-r--r--drivers/dma/ste_dma40.c26
1 files changed, 13 insertions, 13 deletions
diff --git a/drivers/dma/ste_dma40.c b/drivers/dma/ste_dma40.c
index 467e4dcb20a0..0c6cbacb8321 100644
--- a/drivers/dma/ste_dma40.c
+++ b/drivers/dma/ste_dma40.c
@@ -216,7 +216,7 @@ struct d40_chan {
216 struct d40_log_lli_full *lcpa; 216 struct d40_log_lli_full *lcpa;
217 /* Runtime reconfiguration */ 217 /* Runtime reconfiguration */
218 dma_addr_t runtime_addr; 218 dma_addr_t runtime_addr;
219 enum dma_data_direction runtime_direction; 219 enum dma_transfer_direction runtime_direction;
220}; 220};
221 221
222/** 222/**
@@ -1854,7 +1854,7 @@ err:
1854} 1854}
1855 1855
1856static dma_addr_t 1856static dma_addr_t
1857d40_get_dev_addr(struct d40_chan *chan, enum dma_data_direction direction) 1857d40_get_dev_addr(struct d40_chan *chan, enum dma_transfer_direction direction)
1858{ 1858{
1859 struct stedma40_platform_data *plat = chan->base->plat_data; 1859 struct stedma40_platform_data *plat = chan->base->plat_data;
1860 struct stedma40_chan_cfg *cfg = &chan->dma_cfg; 1860 struct stedma40_chan_cfg *cfg = &chan->dma_cfg;
@@ -1863,9 +1863,9 @@ d40_get_dev_addr(struct d40_chan *chan, enum dma_data_direction direction)
1863 if (chan->runtime_addr) 1863 if (chan->runtime_addr)
1864 return chan->runtime_addr; 1864 return chan->runtime_addr;
1865 1865
1866 if (direction == DMA_FROM_DEVICE) 1866 if (direction == DMA_DEV_TO_MEM)
1867 addr = plat->dev_rx[cfg->src_dev_type]; 1867 addr = plat->dev_rx[cfg->src_dev_type];
1868 else if (direction == DMA_TO_DEVICE) 1868 else if (direction == DMA_MEM_TO_DEV)
1869 addr = plat->dev_tx[cfg->dst_dev_type]; 1869 addr = plat->dev_tx[cfg->dst_dev_type];
1870 1870
1871 return addr; 1871 return addr;
@@ -1874,7 +1874,7 @@ d40_get_dev_addr(struct d40_chan *chan, enum dma_data_direction direction)
1874static struct dma_async_tx_descriptor * 1874static struct dma_async_tx_descriptor *
1875d40_prep_sg(struct dma_chan *dchan, struct scatterlist *sg_src, 1875d40_prep_sg(struct dma_chan *dchan, struct scatterlist *sg_src,
1876 struct scatterlist *sg_dst, unsigned int sg_len, 1876 struct scatterlist *sg_dst, unsigned int sg_len,
1877 enum dma_data_direction direction, unsigned long dma_flags) 1877 enum dma_transfer_direction direction, unsigned long dma_flags)
1878{ 1878{
1879 struct d40_chan *chan = container_of(dchan, struct d40_chan, chan); 1879 struct d40_chan *chan = container_of(dchan, struct d40_chan, chan);
1880 dma_addr_t src_dev_addr = 0; 1880 dma_addr_t src_dev_addr = 0;
@@ -1901,9 +1901,9 @@ d40_prep_sg(struct dma_chan *dchan, struct scatterlist *sg_src,
1901 if (direction != DMA_NONE) { 1901 if (direction != DMA_NONE) {
1902 dma_addr_t dev_addr = d40_get_dev_addr(chan, direction); 1902 dma_addr_t dev_addr = d40_get_dev_addr(chan, direction);
1903 1903
1904 if (direction == DMA_FROM_DEVICE) 1904 if (direction == DMA_DEV_TO_MEM)
1905 src_dev_addr = dev_addr; 1905 src_dev_addr = dev_addr;
1906 else if (direction == DMA_TO_DEVICE) 1906 else if (direction == DMA_MEM_TO_DEV)
1907 dst_dev_addr = dev_addr; 1907 dst_dev_addr = dev_addr;
1908 } 1908 }
1909 1909
@@ -2107,10 +2107,10 @@ d40_prep_memcpy_sg(struct dma_chan *chan,
2107static struct dma_async_tx_descriptor *d40_prep_slave_sg(struct dma_chan *chan, 2107static struct dma_async_tx_descriptor *d40_prep_slave_sg(struct dma_chan *chan,
2108 struct scatterlist *sgl, 2108 struct scatterlist *sgl,
2109 unsigned int sg_len, 2109 unsigned int sg_len,
2110 enum dma_data_direction direction, 2110 enum dma_transfer_direction direction,
2111 unsigned long dma_flags) 2111 unsigned long dma_flags)
2112{ 2112{
2113 if (direction != DMA_FROM_DEVICE && direction != DMA_TO_DEVICE) 2113 if (direction != DMA_DEV_TO_MEM && direction != DMA_MEM_TO_DEV)
2114 return NULL; 2114 return NULL;
2115 2115
2116 return d40_prep_sg(chan, sgl, sgl, sg_len, direction, dma_flags); 2116 return d40_prep_sg(chan, sgl, sgl, sg_len, direction, dma_flags);
@@ -2119,7 +2119,7 @@ static struct dma_async_tx_descriptor *d40_prep_slave_sg(struct dma_chan *chan,
2119static struct dma_async_tx_descriptor * 2119static struct dma_async_tx_descriptor *
2120dma40_prep_dma_cyclic(struct dma_chan *chan, dma_addr_t dma_addr, 2120dma40_prep_dma_cyclic(struct dma_chan *chan, dma_addr_t dma_addr,
2121 size_t buf_len, size_t period_len, 2121 size_t buf_len, size_t period_len,
2122 enum dma_data_direction direction) 2122 enum dma_transfer_direction direction)
2123{ 2123{
2124 unsigned int periods = buf_len / period_len; 2124 unsigned int periods = buf_len / period_len;
2125 struct dma_async_tx_descriptor *txd; 2125 struct dma_async_tx_descriptor *txd;
@@ -2268,7 +2268,7 @@ static int d40_set_runtime_config(struct dma_chan *chan,
2268 dst_addr_width = config->dst_addr_width; 2268 dst_addr_width = config->dst_addr_width;
2269 dst_maxburst = config->dst_maxburst; 2269 dst_maxburst = config->dst_maxburst;
2270 2270
2271 if (config->direction == DMA_FROM_DEVICE) { 2271 if (config->direction == DMA_DEV_TO_MEM) {
2272 dma_addr_t dev_addr_rx = 2272 dma_addr_t dev_addr_rx =
2273 d40c->base->plat_data->dev_rx[cfg->src_dev_type]; 2273 d40c->base->plat_data->dev_rx[cfg->src_dev_type];
2274 2274
@@ -2291,7 +2291,7 @@ static int d40_set_runtime_config(struct dma_chan *chan,
2291 if (dst_maxburst == 0) 2291 if (dst_maxburst == 0)
2292 dst_maxburst = src_maxburst; 2292 dst_maxburst = src_maxburst;
2293 2293
2294 } else if (config->direction == DMA_TO_DEVICE) { 2294 } else if (config->direction == DMA_MEM_TO_DEV) {
2295 dma_addr_t dev_addr_tx = 2295 dma_addr_t dev_addr_tx =
2296 d40c->base->plat_data->dev_tx[cfg->dst_dev_type]; 2296 d40c->base->plat_data->dev_tx[cfg->dst_dev_type];
2297 2297
@@ -2356,7 +2356,7 @@ static int d40_set_runtime_config(struct dma_chan *chan,
2356 "configured channel %s for %s, data width %d/%d, " 2356 "configured channel %s for %s, data width %d/%d, "
2357 "maxburst %d/%d elements, LE, no flow control\n", 2357 "maxburst %d/%d elements, LE, no flow control\n",
2358 dma_chan_name(chan), 2358 dma_chan_name(chan),
2359 (config->direction == DMA_FROM_DEVICE) ? "RX" : "TX", 2359 (config->direction == DMA_DEV_TO_MEM) ? "RX" : "TX",
2360 src_addr_width, dst_addr_width, 2360 src_addr_width, dst_addr_width,
2361 src_maxburst, dst_maxburst); 2361 src_maxburst, dst_maxburst);
2362 2362