aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--arch/arm/mach-ep93xx/include/mach/dma.h6
-rw-r--r--arch/arm/plat-nomadik/include/plat/ste_dma40.h4
-rw-r--r--arch/arm/plat-samsung/dma-ops.c4
-rw-r--r--arch/arm/plat-samsung/include/plat/dma-ops.h4
-rw-r--r--drivers/dma/amba-pl08x.c24
-rw-r--r--drivers/dma/at_hdmac.c22
-rw-r--r--drivers/dma/coh901318.c12
-rw-r--r--drivers/dma/coh901318_lli.c23
-rw-r--r--drivers/dma/coh901318_lli.h4
-rw-r--r--drivers/dma/dw_dmac.c14
-rw-r--r--drivers/dma/ep93xx_dma.c22
-rw-r--r--drivers/dma/fsldma.c4
-rw-r--r--drivers/dma/imx-dma.c10
-rw-r--r--drivers/dma/imx-sdma.c10
-rw-r--r--drivers/dma/intel_mid_dma.c14
-rw-r--r--drivers/dma/intel_mid_dma_regs.h2
-rw-r--r--drivers/dma/ipu/ipu_idmac.c4
-rw-r--r--drivers/dma/mxs-dma.c8
-rw-r--r--drivers/dma/pch_dma.c12
-rw-r--r--drivers/dma/pl330.c18
-rw-r--r--drivers/dma/shdma.c25
-rw-r--r--drivers/dma/ste_dma40.c26
-rw-r--r--drivers/dma/timb_dma.c18
-rw-r--r--drivers/dma/txx9dmac.c12
-rw-r--r--drivers/media/video/mx3_camera.c2
-rw-r--r--drivers/media/video/timblogiw.c2
-rw-r--r--drivers/misc/carma/carma-fpga-program.c2
-rw-r--r--drivers/mmc/host/atmel-mci.c10
-rw-r--r--drivers/mmc/host/mmci.c11
-rw-r--r--drivers/mmc/host/mxcmmc.c10
-rw-r--r--drivers/mmc/host/mxs-mmc.c7
-rw-r--r--drivers/mmc/host/sh_mmcif.c4
-rw-r--r--drivers/mmc/host/tmio_mmc_dma.c4
-rw-r--r--drivers/net/ethernet/micrel/ks8842.c4
-rw-r--r--drivers/spi/spi-dw-mid.c8
-rw-r--r--drivers/spi/spi-ep93xx.c9
-rw-r--r--drivers/spi/spi-pl022.c8
-rw-r--r--drivers/spi/spi-topcliff-pch.c4
-rw-r--r--drivers/tty/serial/amba-pl011.c8
-rw-r--r--drivers/tty/serial/pch_uart.c4
-rw-r--r--drivers/tty/serial/sh-sci.c4
-rw-r--r--drivers/usb/musb/ux500_dma.c4
-rw-r--r--drivers/usb/renesas_usbhs/fifo.c4
-rw-r--r--drivers/video/mx3fb.c4
-rw-r--r--include/linux/amba/pl08x.h4
-rw-r--r--include/linux/dmaengine.h22
-rw-r--r--include/linux/dw_dmac.h2
-rw-r--r--include/linux/sh_dma.h2
-rw-r--r--sound/atmel/abdac.c2
-rw-r--r--sound/atmel/ac97c.c10
-rw-r--r--sound/soc/ep93xx/ep93xx-pcm.c4
-rw-r--r--sound/soc/imx/imx-pcm-dma-mx2.c6
-rw-r--r--sound/soc/samsung/dma.c4
-rw-r--r--sound/soc/sh/siu_pcm.c4
-rw-r--r--sound/soc/txx9/txx9aclc.c2
55 files changed, 253 insertions, 224 deletions
diff --git a/arch/arm/mach-ep93xx/include/mach/dma.h b/arch/arm/mach-ep93xx/include/mach/dma.h
index 46d4d876e6fb..e82c642fa53c 100644
--- a/arch/arm/mach-ep93xx/include/mach/dma.h
+++ b/arch/arm/mach-ep93xx/include/mach/dma.h
@@ -37,7 +37,7 @@
37 */ 37 */
38struct ep93xx_dma_data { 38struct ep93xx_dma_data {
39 int port; 39 int port;
40 enum dma_data_direction direction; 40 enum dma_transfer_direction direction;
41 const char *name; 41 const char *name;
42}; 42};
43 43
@@ -80,14 +80,14 @@ static inline bool ep93xx_dma_chan_is_m2p(struct dma_chan *chan)
80 * channel supports given DMA direction. Only M2P channels have such 80 * channel supports given DMA direction. Only M2P channels have such
81 * limitation, for M2M channels the direction is configurable. 81 * limitation, for M2M channels the direction is configurable.
82 */ 82 */
83static inline enum dma_data_direction 83static inline enum dma_transfer_direction
84ep93xx_dma_chan_direction(struct dma_chan *chan) 84ep93xx_dma_chan_direction(struct dma_chan *chan)
85{ 85{
86 if (!ep93xx_dma_chan_is_m2p(chan)) 86 if (!ep93xx_dma_chan_is_m2p(chan))
87 return DMA_NONE; 87 return DMA_NONE;
88 88
89 /* even channels are for TX, odd for RX */ 89 /* even channels are for TX, odd for RX */
90 return (chan->chan_id % 2 == 0) ? DMA_TO_DEVICE : DMA_FROM_DEVICE; 90 return (chan->chan_id % 2 == 0) ? DMA_MEM_TO_DEV : DMA_DEV_TO_MEM;
91} 91}
92 92
93#endif /* __ASM_ARCH_DMA_H */ 93#endif /* __ASM_ARCH_DMA_H */
diff --git a/arch/arm/plat-nomadik/include/plat/ste_dma40.h b/arch/arm/plat-nomadik/include/plat/ste_dma40.h
index 685c78716d95..38b041a40db4 100644
--- a/arch/arm/plat-nomadik/include/plat/ste_dma40.h
+++ b/arch/arm/plat-nomadik/include/plat/ste_dma40.h
@@ -187,7 +187,7 @@ static inline struct
187dma_async_tx_descriptor *stedma40_slave_mem(struct dma_chan *chan, 187dma_async_tx_descriptor *stedma40_slave_mem(struct dma_chan *chan,
188 dma_addr_t addr, 188 dma_addr_t addr,
189 unsigned int size, 189 unsigned int size,
190 enum dma_data_direction direction, 190 enum dma_transfer_direction direction,
191 unsigned long flags) 191 unsigned long flags)
192{ 192{
193 struct scatterlist sg; 193 struct scatterlist sg;
@@ -209,7 +209,7 @@ static inline struct
209dma_async_tx_descriptor *stedma40_slave_mem(struct dma_chan *chan, 209dma_async_tx_descriptor *stedma40_slave_mem(struct dma_chan *chan,
210 dma_addr_t addr, 210 dma_addr_t addr,
211 unsigned int size, 211 unsigned int size,
212 enum dma_data_direction direction, 212 enum dma_transfer_direction direction,
213 unsigned long flags) 213 unsigned long flags)
214{ 214{
215 return NULL; 215 return NULL;
diff --git a/arch/arm/plat-samsung/dma-ops.c b/arch/arm/plat-samsung/dma-ops.c
index 93a994a5dd8f..1baa8ce82432 100644
--- a/arch/arm/plat-samsung/dma-ops.c
+++ b/arch/arm/plat-samsung/dma-ops.c
@@ -36,14 +36,14 @@ static unsigned samsung_dmadev_request(enum dma_ch dma_ch,
36 36
37 chan = dma_request_channel(mask, pl330_filter, (void *)dma_ch); 37 chan = dma_request_channel(mask, pl330_filter, (void *)dma_ch);
38 38
39 if (info->direction == DMA_FROM_DEVICE) { 39 if (info->direction == DMA_DEV_TO_MEM) {
40 memset(&slave_config, 0, sizeof(struct dma_slave_config)); 40 memset(&slave_config, 0, sizeof(struct dma_slave_config));
41 slave_config.direction = info->direction; 41 slave_config.direction = info->direction;
42 slave_config.src_addr = info->fifo; 42 slave_config.src_addr = info->fifo;
43 slave_config.src_addr_width = info->width; 43 slave_config.src_addr_width = info->width;
44 slave_config.src_maxburst = 1; 44 slave_config.src_maxburst = 1;
45 dmaengine_slave_config(chan, &slave_config); 45 dmaengine_slave_config(chan, &slave_config);
46 } else if (info->direction == DMA_TO_DEVICE) { 46 } else if (info->direction == DMA_MEM_TO_DEV) {
47 memset(&slave_config, 0, sizeof(struct dma_slave_config)); 47 memset(&slave_config, 0, sizeof(struct dma_slave_config));
48 slave_config.direction = info->direction; 48 slave_config.direction = info->direction;
49 slave_config.dst_addr = info->fifo; 49 slave_config.dst_addr = info->fifo;
diff --git a/arch/arm/plat-samsung/include/plat/dma-ops.h b/arch/arm/plat-samsung/include/plat/dma-ops.h
index 4c1a363526cf..12561152fb97 100644
--- a/arch/arm/plat-samsung/include/plat/dma-ops.h
+++ b/arch/arm/plat-samsung/include/plat/dma-ops.h
@@ -17,7 +17,7 @@
17 17
18struct samsung_dma_prep_info { 18struct samsung_dma_prep_info {
19 enum dma_transaction_type cap; 19 enum dma_transaction_type cap;
20 enum dma_data_direction direction; 20 enum dma_transfer_direction direction;
21 dma_addr_t buf; 21 dma_addr_t buf;
22 unsigned long period; 22 unsigned long period;
23 unsigned long len; 23 unsigned long len;
@@ -27,7 +27,7 @@ struct samsung_dma_prep_info {
27 27
28struct samsung_dma_info { 28struct samsung_dma_info {
29 enum dma_transaction_type cap; 29 enum dma_transaction_type cap;
30 enum dma_data_direction direction; 30 enum dma_transfer_direction direction;
31 enum dma_slave_buswidth width; 31 enum dma_slave_buswidth width;
32 dma_addr_t fifo; 32 dma_addr_t fifo;
33 struct s3c2410_dma_client *client; 33 struct s3c2410_dma_client *client;
diff --git a/drivers/dma/amba-pl08x.c b/drivers/dma/amba-pl08x.c
index b7cbd1ab1db1..41c62fd0680d 100644
--- a/drivers/dma/amba-pl08x.c
+++ b/drivers/dma/amba-pl08x.c
@@ -882,9 +882,9 @@ static int prep_phy_channel(struct pl08x_dma_chan *plchan,
882 ch->signal = ret; 882 ch->signal = ret;
883 883
884 /* Assign the flow control signal to this channel */ 884 /* Assign the flow control signal to this channel */
885 if (txd->direction == DMA_TO_DEVICE) 885 if (txd->direction == DMA_MEM_TO_DEV)
886 txd->ccfg |= ch->signal << PL080_CONFIG_DST_SEL_SHIFT; 886 txd->ccfg |= ch->signal << PL080_CONFIG_DST_SEL_SHIFT;
887 else if (txd->direction == DMA_FROM_DEVICE) 887 else if (txd->direction == DMA_DEV_TO_MEM)
888 txd->ccfg |= ch->signal << PL080_CONFIG_SRC_SEL_SHIFT; 888 txd->ccfg |= ch->signal << PL080_CONFIG_SRC_SEL_SHIFT;
889 } 889 }
890 890
@@ -1102,10 +1102,10 @@ static int dma_set_runtime_config(struct dma_chan *chan,
1102 1102
1103 /* Transfer direction */ 1103 /* Transfer direction */
1104 plchan->runtime_direction = config->direction; 1104 plchan->runtime_direction = config->direction;
1105 if (config->direction == DMA_TO_DEVICE) { 1105 if (config->direction == DMA_MEM_TO_DEV) {
1106 addr_width = config->dst_addr_width; 1106 addr_width = config->dst_addr_width;
1107 maxburst = config->dst_maxburst; 1107 maxburst = config->dst_maxburst;
1108 } else if (config->direction == DMA_FROM_DEVICE) { 1108 } else if (config->direction == DMA_DEV_TO_MEM) {
1109 addr_width = config->src_addr_width; 1109 addr_width = config->src_addr_width;
1110 maxburst = config->src_maxburst; 1110 maxburst = config->src_maxburst;
1111 } else { 1111 } else {
@@ -1136,7 +1136,7 @@ static int dma_set_runtime_config(struct dma_chan *chan,
1136 cctl |= burst << PL080_CONTROL_SB_SIZE_SHIFT; 1136 cctl |= burst << PL080_CONTROL_SB_SIZE_SHIFT;
1137 cctl |= burst << PL080_CONTROL_DB_SIZE_SHIFT; 1137 cctl |= burst << PL080_CONTROL_DB_SIZE_SHIFT;
1138 1138
1139 if (plchan->runtime_direction == DMA_FROM_DEVICE) { 1139 if (plchan->runtime_direction == DMA_DEV_TO_MEM) {
1140 plchan->src_addr = config->src_addr; 1140 plchan->src_addr = config->src_addr;
1141 plchan->src_cctl = pl08x_cctl(cctl) | PL080_CONTROL_DST_INCR | 1141 plchan->src_cctl = pl08x_cctl(cctl) | PL080_CONTROL_DST_INCR |
1142 pl08x_select_bus(plchan->cd->periph_buses, 1142 pl08x_select_bus(plchan->cd->periph_buses,
@@ -1152,7 +1152,7 @@ static int dma_set_runtime_config(struct dma_chan *chan,
1152 "configured channel %s (%s) for %s, data width %d, " 1152 "configured channel %s (%s) for %s, data width %d, "
1153 "maxburst %d words, LE, CCTL=0x%08x\n", 1153 "maxburst %d words, LE, CCTL=0x%08x\n",
1154 dma_chan_name(chan), plchan->name, 1154 dma_chan_name(chan), plchan->name,
1155 (config->direction == DMA_FROM_DEVICE) ? "RX" : "TX", 1155 (config->direction == DMA_DEV_TO_MEM) ? "RX" : "TX",
1156 addr_width, 1156 addr_width,
1157 maxburst, 1157 maxburst,
1158 cctl); 1158 cctl);
@@ -1322,7 +1322,7 @@ static struct dma_async_tx_descriptor *pl08x_prep_dma_memcpy(
1322 1322
1323static struct dma_async_tx_descriptor *pl08x_prep_slave_sg( 1323static struct dma_async_tx_descriptor *pl08x_prep_slave_sg(
1324 struct dma_chan *chan, struct scatterlist *sgl, 1324 struct dma_chan *chan, struct scatterlist *sgl,
1325 unsigned int sg_len, enum dma_data_direction direction, 1325 unsigned int sg_len, enum dma_transfer_direction direction,
1326 unsigned long flags) 1326 unsigned long flags)
1327{ 1327{
1328 struct pl08x_dma_chan *plchan = to_pl08x_chan(chan); 1328 struct pl08x_dma_chan *plchan = to_pl08x_chan(chan);
@@ -1354,10 +1354,10 @@ static struct dma_async_tx_descriptor *pl08x_prep_slave_sg(
1354 */ 1354 */
1355 txd->direction = direction; 1355 txd->direction = direction;
1356 1356
1357 if (direction == DMA_TO_DEVICE) { 1357 if (direction == DMA_MEM_TO_DEV) {
1358 txd->cctl = plchan->dst_cctl; 1358 txd->cctl = plchan->dst_cctl;
1359 slave_addr = plchan->dst_addr; 1359 slave_addr = plchan->dst_addr;
1360 } else if (direction == DMA_FROM_DEVICE) { 1360 } else if (direction == DMA_DEV_TO_MEM) {
1361 txd->cctl = plchan->src_cctl; 1361 txd->cctl = plchan->src_cctl;
1362 slave_addr = plchan->src_addr; 1362 slave_addr = plchan->src_addr;
1363 } else { 1363 } else {
@@ -1368,10 +1368,10 @@ static struct dma_async_tx_descriptor *pl08x_prep_slave_sg(
1368 } 1368 }
1369 1369
1370 if (plchan->cd->device_fc) 1370 if (plchan->cd->device_fc)
1371 tmp = (direction == DMA_TO_DEVICE) ? PL080_FLOW_MEM2PER_PER : 1371 tmp = (direction == DMA_MEM_TO_DEV) ? PL080_FLOW_MEM2PER_PER :
1372 PL080_FLOW_PER2MEM_PER; 1372 PL080_FLOW_PER2MEM_PER;
1373 else 1373 else
1374 tmp = (direction == DMA_TO_DEVICE) ? PL080_FLOW_MEM2PER : 1374 tmp = (direction == DMA_MEM_TO_DEV) ? PL080_FLOW_MEM2PER :
1375 PL080_FLOW_PER2MEM; 1375 PL080_FLOW_PER2MEM;
1376 1376
1377 txd->ccfg |= tmp << PL080_CONFIG_FLOW_CONTROL_SHIFT; 1377 txd->ccfg |= tmp << PL080_CONFIG_FLOW_CONTROL_SHIFT;
@@ -1387,7 +1387,7 @@ static struct dma_async_tx_descriptor *pl08x_prep_slave_sg(
1387 list_add_tail(&dsg->node, &txd->dsg_list); 1387 list_add_tail(&dsg->node, &txd->dsg_list);
1388 1388
1389 dsg->len = sg_dma_len(sg); 1389 dsg->len = sg_dma_len(sg);
1390 if (direction == DMA_TO_DEVICE) { 1390 if (direction == DMA_MEM_TO_DEV) {
1391 dsg->src_addr = sg_phys(sg); 1391 dsg->src_addr = sg_phys(sg);
1392 dsg->dst_addr = slave_addr; 1392 dsg->dst_addr = slave_addr;
1393 } else { 1393 } else {
diff --git a/drivers/dma/at_hdmac.c b/drivers/dma/at_hdmac.c
index f3cb4a009e7d..3b0c28ff4ad6 100644
--- a/drivers/dma/at_hdmac.c
+++ b/drivers/dma/at_hdmac.c
@@ -662,7 +662,7 @@ err_desc_get:
662 */ 662 */
663static struct dma_async_tx_descriptor * 663static struct dma_async_tx_descriptor *
664atc_prep_slave_sg(struct dma_chan *chan, struct scatterlist *sgl, 664atc_prep_slave_sg(struct dma_chan *chan, struct scatterlist *sgl,
665 unsigned int sg_len, enum dma_data_direction direction, 665 unsigned int sg_len, enum dma_transfer_direction direction,
666 unsigned long flags) 666 unsigned long flags)
667{ 667{
668 struct at_dma_chan *atchan = to_at_dma_chan(chan); 668 struct at_dma_chan *atchan = to_at_dma_chan(chan);
@@ -680,7 +680,7 @@ atc_prep_slave_sg(struct dma_chan *chan, struct scatterlist *sgl,
680 680
681 dev_vdbg(chan2dev(chan), "prep_slave_sg (%d): %s f0x%lx\n", 681 dev_vdbg(chan2dev(chan), "prep_slave_sg (%d): %s f0x%lx\n",
682 sg_len, 682 sg_len,
683 direction == DMA_TO_DEVICE ? "TO DEVICE" : "FROM DEVICE", 683 direction == DMA_MEM_TO_DEV ? "TO DEVICE" : "FROM DEVICE",
684 flags); 684 flags);
685 685
686 if (unlikely(!atslave || !sg_len)) { 686 if (unlikely(!atslave || !sg_len)) {
@@ -694,7 +694,7 @@ atc_prep_slave_sg(struct dma_chan *chan, struct scatterlist *sgl,
694 ctrlb = ATC_IEN; 694 ctrlb = ATC_IEN;
695 695
696 switch (direction) { 696 switch (direction) {
697 case DMA_TO_DEVICE: 697 case DMA_MEM_TO_DEV:
698 ctrla |= ATC_DST_WIDTH(reg_width); 698 ctrla |= ATC_DST_WIDTH(reg_width);
699 ctrlb |= ATC_DST_ADDR_MODE_FIXED 699 ctrlb |= ATC_DST_ADDR_MODE_FIXED
700 | ATC_SRC_ADDR_MODE_INCR 700 | ATC_SRC_ADDR_MODE_INCR
@@ -727,7 +727,7 @@ atc_prep_slave_sg(struct dma_chan *chan, struct scatterlist *sgl,
727 total_len += len; 727 total_len += len;
728 } 728 }
729 break; 729 break;
730 case DMA_FROM_DEVICE: 730 case DMA_DEV_TO_MEM:
731 ctrla |= ATC_SRC_WIDTH(reg_width); 731 ctrla |= ATC_SRC_WIDTH(reg_width);
732 ctrlb |= ATC_DST_ADDR_MODE_INCR 732 ctrlb |= ATC_DST_ADDR_MODE_INCR
733 | ATC_SRC_ADDR_MODE_FIXED 733 | ATC_SRC_ADDR_MODE_FIXED
@@ -789,7 +789,7 @@ err_desc_get:
789 */ 789 */
790static int 790static int
791atc_dma_cyclic_check_values(unsigned int reg_width, dma_addr_t buf_addr, 791atc_dma_cyclic_check_values(unsigned int reg_width, dma_addr_t buf_addr,
792 size_t period_len, enum dma_data_direction direction) 792 size_t period_len, enum dma_transfer_direction direction)
793{ 793{
794 if (period_len > (ATC_BTSIZE_MAX << reg_width)) 794 if (period_len > (ATC_BTSIZE_MAX << reg_width))
795 goto err_out; 795 goto err_out;
@@ -797,7 +797,7 @@ atc_dma_cyclic_check_values(unsigned int reg_width, dma_addr_t buf_addr,
797 goto err_out; 797 goto err_out;
798 if (unlikely(buf_addr & ((1 << reg_width) - 1))) 798 if (unlikely(buf_addr & ((1 << reg_width) - 1)))
799 goto err_out; 799 goto err_out;
800 if (unlikely(!(direction & (DMA_TO_DEVICE | DMA_FROM_DEVICE)))) 800 if (unlikely(!(direction & (DMA_DEV_TO_MEM | DMA_MEM_TO_DEV))))
801 goto err_out; 801 goto err_out;
802 802
803 return 0; 803 return 0;
@@ -812,7 +812,7 @@ err_out:
812static int 812static int
813atc_dma_cyclic_fill_desc(struct at_dma_slave *atslave, struct at_desc *desc, 813atc_dma_cyclic_fill_desc(struct at_dma_slave *atslave, struct at_desc *desc,
814 unsigned int period_index, dma_addr_t buf_addr, 814 unsigned int period_index, dma_addr_t buf_addr,
815 size_t period_len, enum dma_data_direction direction) 815 size_t period_len, enum dma_transfer_direction direction)
816{ 816{
817 u32 ctrla; 817 u32 ctrla;
818 unsigned int reg_width = atslave->reg_width; 818 unsigned int reg_width = atslave->reg_width;
@@ -824,7 +824,7 @@ atc_dma_cyclic_fill_desc(struct at_dma_slave *atslave, struct at_desc *desc,
824 | period_len >> reg_width; 824 | period_len >> reg_width;
825 825
826 switch (direction) { 826 switch (direction) {
827 case DMA_TO_DEVICE: 827 case DMA_MEM_TO_DEV:
828 desc->lli.saddr = buf_addr + (period_len * period_index); 828 desc->lli.saddr = buf_addr + (period_len * period_index);
829 desc->lli.daddr = atslave->tx_reg; 829 desc->lli.daddr = atslave->tx_reg;
830 desc->lli.ctrla = ctrla; 830 desc->lli.ctrla = ctrla;
@@ -835,7 +835,7 @@ atc_dma_cyclic_fill_desc(struct at_dma_slave *atslave, struct at_desc *desc,
835 | ATC_DIF(AT_DMA_PER_IF); 835 | ATC_DIF(AT_DMA_PER_IF);
836 break; 836 break;
837 837
838 case DMA_FROM_DEVICE: 838 case DMA_DEV_TO_MEM:
839 desc->lli.saddr = atslave->rx_reg; 839 desc->lli.saddr = atslave->rx_reg;
840 desc->lli.daddr = buf_addr + (period_len * period_index); 840 desc->lli.daddr = buf_addr + (period_len * period_index);
841 desc->lli.ctrla = ctrla; 841 desc->lli.ctrla = ctrla;
@@ -863,7 +863,7 @@ atc_dma_cyclic_fill_desc(struct at_dma_slave *atslave, struct at_desc *desc,
863 */ 863 */
864static struct dma_async_tx_descriptor * 864static struct dma_async_tx_descriptor *
865atc_prep_dma_cyclic(struct dma_chan *chan, dma_addr_t buf_addr, size_t buf_len, 865atc_prep_dma_cyclic(struct dma_chan *chan, dma_addr_t buf_addr, size_t buf_len,
866 size_t period_len, enum dma_data_direction direction) 866 size_t period_len, enum dma_transfer_direction direction)
867{ 867{
868 struct at_dma_chan *atchan = to_at_dma_chan(chan); 868 struct at_dma_chan *atchan = to_at_dma_chan(chan);
869 struct at_dma_slave *atslave = chan->private; 869 struct at_dma_slave *atslave = chan->private;
@@ -874,7 +874,7 @@ atc_prep_dma_cyclic(struct dma_chan *chan, dma_addr_t buf_addr, size_t buf_len,
874 unsigned int i; 874 unsigned int i;
875 875
876 dev_vdbg(chan2dev(chan), "prep_dma_cyclic: %s buf@0x%08x - %d (%d/%d)\n", 876 dev_vdbg(chan2dev(chan), "prep_dma_cyclic: %s buf@0x%08x - %d (%d/%d)\n",
877 direction == DMA_TO_DEVICE ? "TO DEVICE" : "FROM DEVICE", 877 direction == DMA_MEM_TO_DEV ? "TO DEVICE" : "FROM DEVICE",
878 buf_addr, 878 buf_addr,
879 periods, buf_len, period_len); 879 periods, buf_len, period_len);
880 880
diff --git a/drivers/dma/coh901318.c b/drivers/dma/coh901318.c
index 4234f416ef11..d65a718c0f9b 100644
--- a/drivers/dma/coh901318.c
+++ b/drivers/dma/coh901318.c
@@ -39,7 +39,7 @@ struct coh901318_desc {
39 struct scatterlist *sg; 39 struct scatterlist *sg;
40 unsigned int sg_len; 40 unsigned int sg_len;
41 struct coh901318_lli *lli; 41 struct coh901318_lli *lli;
42 enum dma_data_direction dir; 42 enum dma_transfer_direction dir;
43 unsigned long flags; 43 unsigned long flags;
44 u32 head_config; 44 u32 head_config;
45 u32 head_ctrl; 45 u32 head_ctrl;
@@ -1034,7 +1034,7 @@ coh901318_prep_memcpy(struct dma_chan *chan, dma_addr_t dest, dma_addr_t src,
1034 1034
1035static struct dma_async_tx_descriptor * 1035static struct dma_async_tx_descriptor *
1036coh901318_prep_slave_sg(struct dma_chan *chan, struct scatterlist *sgl, 1036coh901318_prep_slave_sg(struct dma_chan *chan, struct scatterlist *sgl,
1037 unsigned int sg_len, enum dma_data_direction direction, 1037 unsigned int sg_len, enum dma_transfer_direction direction,
1038 unsigned long flags) 1038 unsigned long flags)
1039{ 1039{
1040 struct coh901318_chan *cohc = to_coh901318_chan(chan); 1040 struct coh901318_chan *cohc = to_coh901318_chan(chan);
@@ -1077,7 +1077,7 @@ coh901318_prep_slave_sg(struct dma_chan *chan, struct scatterlist *sgl,
1077 ctrl_last |= cohc->runtime_ctrl; 1077 ctrl_last |= cohc->runtime_ctrl;
1078 ctrl |= cohc->runtime_ctrl; 1078 ctrl |= cohc->runtime_ctrl;
1079 1079
1080 if (direction == DMA_TO_DEVICE) { 1080 if (direction == DMA_MEM_TO_DEV) {
1081 u32 tx_flags = COH901318_CX_CTRL_PRDD_SOURCE | 1081 u32 tx_flags = COH901318_CX_CTRL_PRDD_SOURCE |
1082 COH901318_CX_CTRL_SRC_ADDR_INC_ENABLE; 1082 COH901318_CX_CTRL_SRC_ADDR_INC_ENABLE;
1083 1083
@@ -1085,7 +1085,7 @@ coh901318_prep_slave_sg(struct dma_chan *chan, struct scatterlist *sgl,
1085 ctrl_chained |= tx_flags; 1085 ctrl_chained |= tx_flags;
1086 ctrl_last |= tx_flags; 1086 ctrl_last |= tx_flags;
1087 ctrl |= tx_flags; 1087 ctrl |= tx_flags;
1088 } else if (direction == DMA_FROM_DEVICE) { 1088 } else if (direction == DMA_DEV_TO_MEM) {
1089 u32 rx_flags = COH901318_CX_CTRL_PRDD_DEST | 1089 u32 rx_flags = COH901318_CX_CTRL_PRDD_DEST |
1090 COH901318_CX_CTRL_DST_ADDR_INC_ENABLE; 1090 COH901318_CX_CTRL_DST_ADDR_INC_ENABLE;
1091 1091
@@ -1274,11 +1274,11 @@ static void coh901318_dma_set_runtimeconfig(struct dma_chan *chan,
1274 int i = 0; 1274 int i = 0;
1275 1275
1276 /* We only support mem to per or per to mem transfers */ 1276 /* We only support mem to per or per to mem transfers */
1277 if (config->direction == DMA_FROM_DEVICE) { 1277 if (config->direction == DMA_DEV_TO_MEM) {
1278 addr = config->src_addr; 1278 addr = config->src_addr;
1279 addr_width = config->src_addr_width; 1279 addr_width = config->src_addr_width;
1280 maxburst = config->src_maxburst; 1280 maxburst = config->src_maxburst;
1281 } else if (config->direction == DMA_TO_DEVICE) { 1281 } else if (config->direction == DMA_MEM_TO_DEV) {
1282 addr = config->dst_addr; 1282 addr = config->dst_addr;
1283 addr_width = config->dst_addr_width; 1283 addr_width = config->dst_addr_width;
1284 maxburst = config->dst_maxburst; 1284 maxburst = config->dst_maxburst;
diff --git a/drivers/dma/coh901318_lli.c b/drivers/dma/coh901318_lli.c
index 9f7e0e6a7eea..6c0e2d4c6682 100644
--- a/drivers/dma/coh901318_lli.c
+++ b/drivers/dma/coh901318_lli.c
@@ -7,11 +7,10 @@
7 * Author: Per Friden <per.friden@stericsson.com> 7 * Author: Per Friden <per.friden@stericsson.com>
8 */ 8 */
9 9
10#include <linux/dma-mapping.h>
11#include <linux/spinlock.h> 10#include <linux/spinlock.h>
12#include <linux/dmapool.h>
13#include <linux/memory.h> 11#include <linux/memory.h>
14#include <linux/gfp.h> 12#include <linux/gfp.h>
13#include <linux/dmapool.h>
15#include <mach/coh901318.h> 14#include <mach/coh901318.h>
16 15
17#include "coh901318_lli.h" 16#include "coh901318_lli.h"
@@ -177,18 +176,18 @@ coh901318_lli_fill_single(struct coh901318_pool *pool,
177 struct coh901318_lli *lli, 176 struct coh901318_lli *lli,
178 dma_addr_t buf, unsigned int size, 177 dma_addr_t buf, unsigned int size,
179 dma_addr_t dev_addr, u32 ctrl_chained, u32 ctrl_eom, 178 dma_addr_t dev_addr, u32 ctrl_chained, u32 ctrl_eom,
180 enum dma_data_direction dir) 179 enum dma_transfer_direction dir)
181{ 180{
182 int s = size; 181 int s = size;
183 dma_addr_t src; 182 dma_addr_t src;
184 dma_addr_t dst; 183 dma_addr_t dst;
185 184
186 185
187 if (dir == DMA_TO_DEVICE) { 186 if (dir == DMA_MEM_TO_DEV) {
188 src = buf; 187 src = buf;
189 dst = dev_addr; 188 dst = dev_addr;
190 189
191 } else if (dir == DMA_FROM_DEVICE) { 190 } else if (dir == DMA_DEV_TO_MEM) {
192 191
193 src = dev_addr; 192 src = dev_addr;
194 dst = buf; 193 dst = buf;
@@ -215,9 +214,9 @@ coh901318_lli_fill_single(struct coh901318_pool *pool,
215 214
216 lli = coh901318_lli_next(lli); 215 lli = coh901318_lli_next(lli);
217 216
218 if (dir == DMA_TO_DEVICE) 217 if (dir == DMA_MEM_TO_DEV)
219 src += block_size; 218 src += block_size;
220 else if (dir == DMA_FROM_DEVICE) 219 else if (dir == DMA_DEV_TO_MEM)
221 dst += block_size; 220 dst += block_size;
222 } 221 }
223 222
@@ -234,7 +233,7 @@ coh901318_lli_fill_sg(struct coh901318_pool *pool,
234 struct scatterlist *sgl, unsigned int nents, 233 struct scatterlist *sgl, unsigned int nents,
235 dma_addr_t dev_addr, u32 ctrl_chained, u32 ctrl, 234 dma_addr_t dev_addr, u32 ctrl_chained, u32 ctrl,
236 u32 ctrl_last, 235 u32 ctrl_last,
237 enum dma_data_direction dir, u32 ctrl_irq_mask) 236 enum dma_transfer_direction dir, u32 ctrl_irq_mask)
238{ 237{
239 int i; 238 int i;
240 struct scatterlist *sg; 239 struct scatterlist *sg;
@@ -249,9 +248,9 @@ coh901318_lli_fill_sg(struct coh901318_pool *pool,
249 248
250 spin_lock(&pool->lock); 249 spin_lock(&pool->lock);
251 250
252 if (dir == DMA_TO_DEVICE) 251 if (dir == DMA_MEM_TO_DEV)
253 dst = dev_addr; 252 dst = dev_addr;
254 else if (dir == DMA_FROM_DEVICE) 253 else if (dir == DMA_DEV_TO_MEM)
255 src = dev_addr; 254 src = dev_addr;
256 else 255 else
257 goto err; 256 goto err;
@@ -269,7 +268,7 @@ coh901318_lli_fill_sg(struct coh901318_pool *pool,
269 ctrl_sg = ctrl ? ctrl : ctrl_last; 268 ctrl_sg = ctrl ? ctrl : ctrl_last;
270 269
271 270
272 if (dir == DMA_TO_DEVICE) 271 if (dir == DMA_MEM_TO_DEV)
273 /* increment source address */ 272 /* increment source address */
274 src = sg_phys(sg); 273 src = sg_phys(sg);
275 else 274 else
@@ -293,7 +292,7 @@ coh901318_lli_fill_sg(struct coh901318_pool *pool,
293 lli->src_addr = src; 292 lli->src_addr = src;
294 lli->dst_addr = dst; 293 lli->dst_addr = dst;
295 294
296 if (dir == DMA_FROM_DEVICE) 295 if (dir == DMA_DEV_TO_MEM)
297 dst += elem_size; 296 dst += elem_size;
298 else 297 else
299 src += elem_size; 298 src += elem_size;
diff --git a/drivers/dma/coh901318_lli.h b/drivers/dma/coh901318_lli.h
index 7a5c80990e9e..abff3714fdda 100644
--- a/drivers/dma/coh901318_lli.h
+++ b/drivers/dma/coh901318_lli.h
@@ -97,7 +97,7 @@ coh901318_lli_fill_single(struct coh901318_pool *pool,
97 struct coh901318_lli *lli, 97 struct coh901318_lli *lli,
98 dma_addr_t buf, unsigned int size, 98 dma_addr_t buf, unsigned int size,
99 dma_addr_t dev_addr, u32 ctrl_chained, u32 ctrl_last, 99 dma_addr_t dev_addr, u32 ctrl_chained, u32 ctrl_last,
100 enum dma_data_direction dir); 100 enum dma_transfer_direction dir);
101 101
102/** 102/**
103 * coh901318_lli_fill_single() - Prepares the lli:s for dma scatter list transfer 103 * coh901318_lli_fill_single() - Prepares the lli:s for dma scatter list transfer
@@ -119,6 +119,6 @@ coh901318_lli_fill_sg(struct coh901318_pool *pool,
119 struct scatterlist *sg, unsigned int nents, 119 struct scatterlist *sg, unsigned int nents,
120 dma_addr_t dev_addr, u32 ctrl_chained, 120 dma_addr_t dev_addr, u32 ctrl_chained,
121 u32 ctrl, u32 ctrl_last, 121 u32 ctrl, u32 ctrl_last,
122 enum dma_data_direction dir, u32 ctrl_irq_mask); 122 enum dma_transfer_direction dir, u32 ctrl_irq_mask);
123 123
124#endif /* COH901318_LLI_H */ 124#endif /* COH901318_LLI_H */
diff --git a/drivers/dma/dw_dmac.c b/drivers/dma/dw_dmac.c
index 9bfd6d360718..decca1c3c83d 100644
--- a/drivers/dma/dw_dmac.c
+++ b/drivers/dma/dw_dmac.c
@@ -696,7 +696,7 @@ err_desc_get:
696 696
697static struct dma_async_tx_descriptor * 697static struct dma_async_tx_descriptor *
698dwc_prep_slave_sg(struct dma_chan *chan, struct scatterlist *sgl, 698dwc_prep_slave_sg(struct dma_chan *chan, struct scatterlist *sgl,
699 unsigned int sg_len, enum dma_data_direction direction, 699 unsigned int sg_len, enum dma_transfer_direction direction,
700 unsigned long flags) 700 unsigned long flags)
701{ 701{
702 struct dw_dma_chan *dwc = to_dw_dma_chan(chan); 702 struct dw_dma_chan *dwc = to_dw_dma_chan(chan);
@@ -720,7 +720,7 @@ dwc_prep_slave_sg(struct dma_chan *chan, struct scatterlist *sgl,
720 prev = first = NULL; 720 prev = first = NULL;
721 721
722 switch (direction) { 722 switch (direction) {
723 case DMA_TO_DEVICE: 723 case DMA_MEM_TO_DEV:
724 ctllo = (DWC_DEFAULT_CTLLO(chan->private) 724 ctllo = (DWC_DEFAULT_CTLLO(chan->private)
725 | DWC_CTLL_DST_WIDTH(reg_width) 725 | DWC_CTLL_DST_WIDTH(reg_width)
726 | DWC_CTLL_DST_FIX 726 | DWC_CTLL_DST_FIX
@@ -777,7 +777,7 @@ slave_sg_todev_fill_desc:
777 goto slave_sg_todev_fill_desc; 777 goto slave_sg_todev_fill_desc;
778 } 778 }
779 break; 779 break;
780 case DMA_FROM_DEVICE: 780 case DMA_DEV_TO_MEM:
781 ctllo = (DWC_DEFAULT_CTLLO(chan->private) 781 ctllo = (DWC_DEFAULT_CTLLO(chan->private)
782 | DWC_CTLL_SRC_WIDTH(reg_width) 782 | DWC_CTLL_SRC_WIDTH(reg_width)
783 | DWC_CTLL_DST_INC 783 | DWC_CTLL_DST_INC
@@ -1165,7 +1165,7 @@ EXPORT_SYMBOL(dw_dma_cyclic_stop);
1165 */ 1165 */
1166struct dw_cyclic_desc *dw_dma_cyclic_prep(struct dma_chan *chan, 1166struct dw_cyclic_desc *dw_dma_cyclic_prep(struct dma_chan *chan,
1167 dma_addr_t buf_addr, size_t buf_len, size_t period_len, 1167 dma_addr_t buf_addr, size_t buf_len, size_t period_len,
1168 enum dma_data_direction direction) 1168 enum dma_transfer_direction direction)
1169{ 1169{
1170 struct dw_dma_chan *dwc = to_dw_dma_chan(chan); 1170 struct dw_dma_chan *dwc = to_dw_dma_chan(chan);
1171 struct dw_cyclic_desc *cdesc; 1171 struct dw_cyclic_desc *cdesc;
@@ -1206,7 +1206,7 @@ struct dw_cyclic_desc *dw_dma_cyclic_prep(struct dma_chan *chan,
1206 goto out_err; 1206 goto out_err;
1207 if (unlikely(buf_addr & ((1 << reg_width) - 1))) 1207 if (unlikely(buf_addr & ((1 << reg_width) - 1)))
1208 goto out_err; 1208 goto out_err;
1209 if (unlikely(!(direction & (DMA_TO_DEVICE | DMA_FROM_DEVICE)))) 1209 if (unlikely(!(direction & (DMA_MEM_TO_DEV | DMA_DEV_TO_MEM))))
1210 goto out_err; 1210 goto out_err;
1211 1211
1212 retval = ERR_PTR(-ENOMEM); 1212 retval = ERR_PTR(-ENOMEM);
@@ -1228,7 +1228,7 @@ struct dw_cyclic_desc *dw_dma_cyclic_prep(struct dma_chan *chan,
1228 goto out_err_desc_get; 1228 goto out_err_desc_get;
1229 1229
1230 switch (direction) { 1230 switch (direction) {
1231 case DMA_TO_DEVICE: 1231 case DMA_MEM_TO_DEV:
1232 desc->lli.dar = dws->tx_reg; 1232 desc->lli.dar = dws->tx_reg;
1233 desc->lli.sar = buf_addr + (period_len * i); 1233 desc->lli.sar = buf_addr + (period_len * i);
1234 desc->lli.ctllo = (DWC_DEFAULT_CTLLO(chan->private) 1234 desc->lli.ctllo = (DWC_DEFAULT_CTLLO(chan->private)
@@ -1239,7 +1239,7 @@ struct dw_cyclic_desc *dw_dma_cyclic_prep(struct dma_chan *chan,
1239 | DWC_CTLL_FC(dws->fc) 1239 | DWC_CTLL_FC(dws->fc)
1240 | DWC_CTLL_INT_EN); 1240 | DWC_CTLL_INT_EN);
1241 break; 1241 break;
1242 case DMA_FROM_DEVICE: 1242 case DMA_DEV_TO_MEM:
1243 desc->lli.dar = buf_addr + (period_len * i); 1243 desc->lli.dar = buf_addr + (period_len * i);
1244 desc->lli.sar = dws->rx_reg; 1244 desc->lli.sar = dws->rx_reg;
1245 desc->lli.ctllo = (DWC_DEFAULT_CTLLO(chan->private) 1245 desc->lli.ctllo = (DWC_DEFAULT_CTLLO(chan->private)
diff --git a/drivers/dma/ep93xx_dma.c b/drivers/dma/ep93xx_dma.c
index b47e2b803faf..009851b2aeea 100644
--- a/drivers/dma/ep93xx_dma.c
+++ b/drivers/dma/ep93xx_dma.c
@@ -330,7 +330,7 @@ static void m2p_fill_desc(struct ep93xx_dma_chan *edmac)
330 struct ep93xx_dma_desc *desc = ep93xx_dma_get_active(edmac); 330 struct ep93xx_dma_desc *desc = ep93xx_dma_get_active(edmac);
331 u32 bus_addr; 331 u32 bus_addr;
332 332
333 if (ep93xx_dma_chan_direction(&edmac->chan) == DMA_TO_DEVICE) 333 if (ep93xx_dma_chan_direction(&edmac->chan) == DMA_MEM_TO_DEV)
334 bus_addr = desc->src_addr; 334 bus_addr = desc->src_addr;
335 else 335 else
336 bus_addr = desc->dst_addr; 336 bus_addr = desc->dst_addr;
@@ -443,7 +443,7 @@ static int m2m_hw_setup(struct ep93xx_dma_chan *edmac)
443 control = (5 << M2M_CONTROL_PWSC_SHIFT); 443 control = (5 << M2M_CONTROL_PWSC_SHIFT);
444 control |= M2M_CONTROL_NO_HDSK; 444 control |= M2M_CONTROL_NO_HDSK;
445 445
446 if (data->direction == DMA_TO_DEVICE) { 446 if (data->direction == DMA_MEM_TO_DEV) {
447 control |= M2M_CONTROL_DAH; 447 control |= M2M_CONTROL_DAH;
448 control |= M2M_CONTROL_TM_TX; 448 control |= M2M_CONTROL_TM_TX;
449 control |= M2M_CONTROL_RSS_SSPTX; 449 control |= M2M_CONTROL_RSS_SSPTX;
@@ -463,7 +463,7 @@ static int m2m_hw_setup(struct ep93xx_dma_chan *edmac)
463 control |= M2M_CONTROL_RSS_IDE; 463 control |= M2M_CONTROL_RSS_IDE;
464 control |= M2M_CONTROL_PW_16; 464 control |= M2M_CONTROL_PW_16;
465 465
466 if (data->direction == DMA_TO_DEVICE) { 466 if (data->direction == DMA_MEM_TO_DEV) {
467 /* Worst case from the UG */ 467 /* Worst case from the UG */
468 control = (3 << M2M_CONTROL_PWSC_SHIFT); 468 control = (3 << M2M_CONTROL_PWSC_SHIFT);
469 control |= M2M_CONTROL_DAH; 469 control |= M2M_CONTROL_DAH;
@@ -803,8 +803,8 @@ static int ep93xx_dma_alloc_chan_resources(struct dma_chan *chan)
803 switch (data->port) { 803 switch (data->port) {
804 case EP93XX_DMA_SSP: 804 case EP93XX_DMA_SSP:
805 case EP93XX_DMA_IDE: 805 case EP93XX_DMA_IDE:
806 if (data->direction != DMA_TO_DEVICE && 806 if (data->direction != DMA_MEM_TO_DEV &&
807 data->direction != DMA_FROM_DEVICE) 807 data->direction != DMA_DEV_TO_MEM)
808 return -EINVAL; 808 return -EINVAL;
809 break; 809 break;
810 default: 810 default:
@@ -952,7 +952,7 @@ fail:
952 */ 952 */
953static struct dma_async_tx_descriptor * 953static struct dma_async_tx_descriptor *
954ep93xx_dma_prep_slave_sg(struct dma_chan *chan, struct scatterlist *sgl, 954ep93xx_dma_prep_slave_sg(struct dma_chan *chan, struct scatterlist *sgl,
955 unsigned int sg_len, enum dma_data_direction dir, 955 unsigned int sg_len, enum dma_transfer_direction dir,
956 unsigned long flags) 956 unsigned long flags)
957{ 957{
958 struct ep93xx_dma_chan *edmac = to_ep93xx_dma_chan(chan); 958 struct ep93xx_dma_chan *edmac = to_ep93xx_dma_chan(chan);
@@ -988,7 +988,7 @@ ep93xx_dma_prep_slave_sg(struct dma_chan *chan, struct scatterlist *sgl,
988 goto fail; 988 goto fail;
989 } 989 }
990 990
991 if (dir == DMA_TO_DEVICE) { 991 if (dir == DMA_MEM_TO_DEV) {
992 desc->src_addr = sg_dma_address(sg); 992 desc->src_addr = sg_dma_address(sg);
993 desc->dst_addr = edmac->runtime_addr; 993 desc->dst_addr = edmac->runtime_addr;
994 } else { 994 } else {
@@ -1032,7 +1032,7 @@ fail:
1032static struct dma_async_tx_descriptor * 1032static struct dma_async_tx_descriptor *
1033ep93xx_dma_prep_dma_cyclic(struct dma_chan *chan, dma_addr_t dma_addr, 1033ep93xx_dma_prep_dma_cyclic(struct dma_chan *chan, dma_addr_t dma_addr,
1034 size_t buf_len, size_t period_len, 1034 size_t buf_len, size_t period_len,
1035 enum dma_data_direction dir) 1035 enum dma_transfer_direction dir)
1036{ 1036{
1037 struct ep93xx_dma_chan *edmac = to_ep93xx_dma_chan(chan); 1037 struct ep93xx_dma_chan *edmac = to_ep93xx_dma_chan(chan);
1038 struct ep93xx_dma_desc *desc, *first; 1038 struct ep93xx_dma_desc *desc, *first;
@@ -1065,7 +1065,7 @@ ep93xx_dma_prep_dma_cyclic(struct dma_chan *chan, dma_addr_t dma_addr,
1065 goto fail; 1065 goto fail;
1066 } 1066 }
1067 1067
1068 if (dir == DMA_TO_DEVICE) { 1068 if (dir == DMA_MEM_TO_DEV) {
1069 desc->src_addr = dma_addr + offset; 1069 desc->src_addr = dma_addr + offset;
1070 desc->dst_addr = edmac->runtime_addr; 1070 desc->dst_addr = edmac->runtime_addr;
1071 } else { 1071 } else {
@@ -1133,12 +1133,12 @@ static int ep93xx_dma_slave_config(struct ep93xx_dma_chan *edmac,
1133 return -EINVAL; 1133 return -EINVAL;
1134 1134
1135 switch (config->direction) { 1135 switch (config->direction) {
1136 case DMA_FROM_DEVICE: 1136 case DMA_DEV_TO_MEM:
1137 width = config->src_addr_width; 1137 width = config->src_addr_width;
1138 addr = config->src_addr; 1138 addr = config->src_addr;
1139 break; 1139 break;
1140 1140
1141 case DMA_TO_DEVICE: 1141 case DMA_MEM_TO_DEV:
1142 width = config->dst_addr_width; 1142 width = config->dst_addr_width;
1143 addr = config->dst_addr; 1143 addr = config->dst_addr;
1144 break; 1144 break;
diff --git a/drivers/dma/fsldma.c b/drivers/dma/fsldma.c
index 8a781540590c..b98070c33ca9 100644
--- a/drivers/dma/fsldma.c
+++ b/drivers/dma/fsldma.c
@@ -772,7 +772,7 @@ fail:
772 */ 772 */
773static struct dma_async_tx_descriptor *fsl_dma_prep_slave_sg( 773static struct dma_async_tx_descriptor *fsl_dma_prep_slave_sg(
774 struct dma_chan *dchan, struct scatterlist *sgl, unsigned int sg_len, 774 struct dma_chan *dchan, struct scatterlist *sgl, unsigned int sg_len,
775 enum dma_data_direction direction, unsigned long flags) 775 enum dma_transfer_direction direction, unsigned long flags)
776{ 776{
777 /* 777 /*
778 * This operation is not supported on the Freescale DMA controller 778 * This operation is not supported on the Freescale DMA controller
@@ -819,7 +819,7 @@ static int fsl_dma_device_control(struct dma_chan *dchan,
819 return -ENXIO; 819 return -ENXIO;
820 820
821 /* we set the controller burst size depending on direction */ 821 /* we set the controller burst size depending on direction */
822 if (config->direction == DMA_TO_DEVICE) 822 if (config->direction == DMA_MEM_TO_DEV)
823 size = config->dst_addr_width * config->dst_maxburst; 823 size = config->dst_addr_width * config->dst_maxburst;
824 else 824 else
825 size = config->src_addr_width * config->src_maxburst; 825 size = config->src_addr_width * config->src_maxburst;
diff --git a/drivers/dma/imx-dma.c b/drivers/dma/imx-dma.c
index 4be55f9bb6c1..e4383ee2c9ac 100644
--- a/drivers/dma/imx-dma.c
+++ b/drivers/dma/imx-dma.c
@@ -107,7 +107,7 @@ static int imxdma_control(struct dma_chan *chan, enum dma_ctrl_cmd cmd,
107 imx_dma_disable(imxdmac->imxdma_channel); 107 imx_dma_disable(imxdmac->imxdma_channel);
108 return 0; 108 return 0;
109 case DMA_SLAVE_CONFIG: 109 case DMA_SLAVE_CONFIG:
110 if (dmaengine_cfg->direction == DMA_FROM_DEVICE) { 110 if (dmaengine_cfg->direction == DMA_DEV_TO_MEM) {
111 imxdmac->per_address = dmaengine_cfg->src_addr; 111 imxdmac->per_address = dmaengine_cfg->src_addr;
112 imxdmac->watermark_level = dmaengine_cfg->src_maxburst; 112 imxdmac->watermark_level = dmaengine_cfg->src_maxburst;
113 imxdmac->word_size = dmaengine_cfg->src_addr_width; 113 imxdmac->word_size = dmaengine_cfg->src_addr_width;
@@ -224,7 +224,7 @@ static void imxdma_free_chan_resources(struct dma_chan *chan)
224 224
225static struct dma_async_tx_descriptor *imxdma_prep_slave_sg( 225static struct dma_async_tx_descriptor *imxdma_prep_slave_sg(
226 struct dma_chan *chan, struct scatterlist *sgl, 226 struct dma_chan *chan, struct scatterlist *sgl,
227 unsigned int sg_len, enum dma_data_direction direction, 227 unsigned int sg_len, enum dma_transfer_direction direction,
228 unsigned long flags) 228 unsigned long flags)
229{ 229{
230 struct imxdma_channel *imxdmac = to_imxdma_chan(chan); 230 struct imxdma_channel *imxdmac = to_imxdma_chan(chan);
@@ -241,7 +241,7 @@ static struct dma_async_tx_descriptor *imxdma_prep_slave_sg(
241 dma_length += sg->length; 241 dma_length += sg->length;
242 } 242 }
243 243
244 if (direction == DMA_FROM_DEVICE) 244 if (direction == DMA_DEV_TO_MEM)
245 dmamode = DMA_MODE_READ; 245 dmamode = DMA_MODE_READ;
246 else 246 else
247 dmamode = DMA_MODE_WRITE; 247 dmamode = DMA_MODE_WRITE;
@@ -271,7 +271,7 @@ static struct dma_async_tx_descriptor *imxdma_prep_slave_sg(
271 271
272static struct dma_async_tx_descriptor *imxdma_prep_dma_cyclic( 272static struct dma_async_tx_descriptor *imxdma_prep_dma_cyclic(
273 struct dma_chan *chan, dma_addr_t dma_addr, size_t buf_len, 273 struct dma_chan *chan, dma_addr_t dma_addr, size_t buf_len,
274 size_t period_len, enum dma_data_direction direction) 274 size_t period_len, enum dma_transfer_direction direction)
275{ 275{
276 struct imxdma_channel *imxdmac = to_imxdma_chan(chan); 276 struct imxdma_channel *imxdmac = to_imxdma_chan(chan);
277 struct imxdma_engine *imxdma = imxdmac->imxdma; 277 struct imxdma_engine *imxdma = imxdmac->imxdma;
@@ -317,7 +317,7 @@ static struct dma_async_tx_descriptor *imxdma_prep_dma_cyclic(
317 imxdmac->sg_list[periods].page_link = 317 imxdmac->sg_list[periods].page_link =
318 ((unsigned long)imxdmac->sg_list | 0x01) & ~0x02; 318 ((unsigned long)imxdmac->sg_list | 0x01) & ~0x02;
319 319
320 if (direction == DMA_FROM_DEVICE) 320 if (direction == DMA_DEV_TO_MEM)
321 dmamode = DMA_MODE_READ; 321 dmamode = DMA_MODE_READ;
322 else 322 else
323 dmamode = DMA_MODE_WRITE; 323 dmamode = DMA_MODE_WRITE;
diff --git a/drivers/dma/imx-sdma.c b/drivers/dma/imx-sdma.c
index f993955a640c..2e0357178d19 100644
--- a/drivers/dma/imx-sdma.c
+++ b/drivers/dma/imx-sdma.c
@@ -247,7 +247,7 @@ struct sdma_engine;
247struct sdma_channel { 247struct sdma_channel {
248 struct sdma_engine *sdma; 248 struct sdma_engine *sdma;
249 unsigned int channel; 249 unsigned int channel;
250 enum dma_data_direction direction; 250 enum dma_transfer_direction direction;
251 enum sdma_peripheral_type peripheral_type; 251 enum sdma_peripheral_type peripheral_type;
252 unsigned int event_id0; 252 unsigned int event_id0;
253 unsigned int event_id1; 253 unsigned int event_id1;
@@ -650,7 +650,7 @@ static int sdma_load_context(struct sdma_channel *sdmac)
650 struct sdma_buffer_descriptor *bd0 = sdma->channel[0].bd; 650 struct sdma_buffer_descriptor *bd0 = sdma->channel[0].bd;
651 int ret; 651 int ret;
652 652
653 if (sdmac->direction == DMA_FROM_DEVICE) { 653 if (sdmac->direction == DMA_DEV_TO_MEM) {
654 load_address = sdmac->pc_from_device; 654 load_address = sdmac->pc_from_device;
655 } else { 655 } else {
656 load_address = sdmac->pc_to_device; 656 load_address = sdmac->pc_to_device;
@@ -911,7 +911,7 @@ static void sdma_free_chan_resources(struct dma_chan *chan)
911 911
912static struct dma_async_tx_descriptor *sdma_prep_slave_sg( 912static struct dma_async_tx_descriptor *sdma_prep_slave_sg(
913 struct dma_chan *chan, struct scatterlist *sgl, 913 struct dma_chan *chan, struct scatterlist *sgl,
914 unsigned int sg_len, enum dma_data_direction direction, 914 unsigned int sg_len, enum dma_transfer_direction direction,
915 unsigned long flags) 915 unsigned long flags)
916{ 916{
917 struct sdma_channel *sdmac = to_sdma_chan(chan); 917 struct sdma_channel *sdmac = to_sdma_chan(chan);
@@ -1008,7 +1008,7 @@ err_out:
1008 1008
1009static struct dma_async_tx_descriptor *sdma_prep_dma_cyclic( 1009static struct dma_async_tx_descriptor *sdma_prep_dma_cyclic(
1010 struct dma_chan *chan, dma_addr_t dma_addr, size_t buf_len, 1010 struct dma_chan *chan, dma_addr_t dma_addr, size_t buf_len,
1011 size_t period_len, enum dma_data_direction direction) 1011 size_t period_len, enum dma_transfer_direction direction)
1012{ 1012{
1013 struct sdma_channel *sdmac = to_sdma_chan(chan); 1013 struct sdma_channel *sdmac = to_sdma_chan(chan);
1014 struct sdma_engine *sdma = sdmac->sdma; 1014 struct sdma_engine *sdma = sdmac->sdma;
@@ -1093,7 +1093,7 @@ static int sdma_control(struct dma_chan *chan, enum dma_ctrl_cmd cmd,
1093 sdma_disable_channel(sdmac); 1093 sdma_disable_channel(sdmac);
1094 return 0; 1094 return 0;
1095 case DMA_SLAVE_CONFIG: 1095 case DMA_SLAVE_CONFIG:
1096 if (dmaengine_cfg->direction == DMA_FROM_DEVICE) { 1096 if (dmaengine_cfg->direction == DMA_DEV_TO_MEM) {
1097 sdmac->per_address = dmaengine_cfg->src_addr; 1097 sdmac->per_address = dmaengine_cfg->src_addr;
1098 sdmac->watermark_level = dmaengine_cfg->src_maxburst; 1098 sdmac->watermark_level = dmaengine_cfg->src_maxburst;
1099 sdmac->word_size = dmaengine_cfg->src_addr_width; 1099 sdmac->word_size = dmaengine_cfg->src_addr_width;
diff --git a/drivers/dma/intel_mid_dma.c b/drivers/dma/intel_mid_dma.c
index 19a0c64d45d3..01929ed6659d 100644
--- a/drivers/dma/intel_mid_dma.c
+++ b/drivers/dma/intel_mid_dma.c
@@ -395,10 +395,10 @@ static int midc_lli_fill_sg(struct intel_mid_dma_chan *midc,
395 midc->dma->block_size); 395 midc->dma->block_size);
396 /*Populate SAR and DAR values*/ 396 /*Populate SAR and DAR values*/
397 sg_phy_addr = sg_phys(sg); 397 sg_phy_addr = sg_phys(sg);
398 if (desc->dirn == DMA_TO_DEVICE) { 398 if (desc->dirn == DMA_MEM_TO_DEV) {
399 lli_bloc_desc->sar = sg_phy_addr; 399 lli_bloc_desc->sar = sg_phy_addr;
400 lli_bloc_desc->dar = mids->dma_slave.dst_addr; 400 lli_bloc_desc->dar = mids->dma_slave.dst_addr;
401 } else if (desc->dirn == DMA_FROM_DEVICE) { 401 } else if (desc->dirn == DMA_DEV_TO_MEM) {
402 lli_bloc_desc->sar = mids->dma_slave.src_addr; 402 lli_bloc_desc->sar = mids->dma_slave.src_addr;
403 lli_bloc_desc->dar = sg_phy_addr; 403 lli_bloc_desc->dar = sg_phy_addr;
404 } 404 }
@@ -632,13 +632,13 @@ static struct dma_async_tx_descriptor *intel_mid_dma_prep_memcpy(
632 if (midc->dma->pimr_mask) { 632 if (midc->dma->pimr_mask) {
633 cfg_hi.cfgx.protctl = 0x0; /*default value*/ 633 cfg_hi.cfgx.protctl = 0x0; /*default value*/
634 cfg_hi.cfgx.fifo_mode = 1; 634 cfg_hi.cfgx.fifo_mode = 1;
635 if (mids->dma_slave.direction == DMA_TO_DEVICE) { 635 if (mids->dma_slave.direction == DMA_MEM_TO_DEV) {
636 cfg_hi.cfgx.src_per = 0; 636 cfg_hi.cfgx.src_per = 0;
637 if (mids->device_instance == 0) 637 if (mids->device_instance == 0)
638 cfg_hi.cfgx.dst_per = 3; 638 cfg_hi.cfgx.dst_per = 3;
639 if (mids->device_instance == 1) 639 if (mids->device_instance == 1)
640 cfg_hi.cfgx.dst_per = 1; 640 cfg_hi.cfgx.dst_per = 1;
641 } else if (mids->dma_slave.direction == DMA_FROM_DEVICE) { 641 } else if (mids->dma_slave.direction == DMA_DEV_TO_MEM) {
642 if (mids->device_instance == 0) 642 if (mids->device_instance == 0)
643 cfg_hi.cfgx.src_per = 2; 643 cfg_hi.cfgx.src_per = 2;
644 if (mids->device_instance == 1) 644 if (mids->device_instance == 1)
@@ -682,11 +682,11 @@ static struct dma_async_tx_descriptor *intel_mid_dma_prep_memcpy(
682 ctl_lo.ctlx.sinc = 0; 682 ctl_lo.ctlx.sinc = 0;
683 ctl_lo.ctlx.dinc = 0; 683 ctl_lo.ctlx.dinc = 0;
684 } else { 684 } else {
685 if (mids->dma_slave.direction == DMA_TO_DEVICE) { 685 if (mids->dma_slave.direction == DMA_MEM_TO_DEV) {
686 ctl_lo.ctlx.sinc = 0; 686 ctl_lo.ctlx.sinc = 0;
687 ctl_lo.ctlx.dinc = 2; 687 ctl_lo.ctlx.dinc = 2;
688 ctl_lo.ctlx.tt_fc = 1; 688 ctl_lo.ctlx.tt_fc = 1;
689 } else if (mids->dma_slave.direction == DMA_FROM_DEVICE) { 689 } else if (mids->dma_slave.direction == DMA_DEV_TO_MEM) {
690 ctl_lo.ctlx.sinc = 2; 690 ctl_lo.ctlx.sinc = 2;
691 ctl_lo.ctlx.dinc = 0; 691 ctl_lo.ctlx.dinc = 0;
692 ctl_lo.ctlx.tt_fc = 2; 692 ctl_lo.ctlx.tt_fc = 2;
@@ -732,7 +732,7 @@ err_desc_get:
732 */ 732 */
733static struct dma_async_tx_descriptor *intel_mid_dma_prep_slave_sg( 733static struct dma_async_tx_descriptor *intel_mid_dma_prep_slave_sg(
734 struct dma_chan *chan, struct scatterlist *sgl, 734 struct dma_chan *chan, struct scatterlist *sgl,
735 unsigned int sg_len, enum dma_data_direction direction, 735 unsigned int sg_len, enum dma_transfer_direction direction,
736 unsigned long flags) 736 unsigned long flags)
737{ 737{
738 struct intel_mid_dma_chan *midc = NULL; 738 struct intel_mid_dma_chan *midc = NULL;
diff --git a/drivers/dma/intel_mid_dma_regs.h b/drivers/dma/intel_mid_dma_regs.h
index aea5ee88ce03..c6de919a6401 100644
--- a/drivers/dma/intel_mid_dma_regs.h
+++ b/drivers/dma/intel_mid_dma_regs.h
@@ -262,7 +262,7 @@ struct intel_mid_dma_desc {
262 unsigned int lli_length; 262 unsigned int lli_length;
263 unsigned int current_lli; 263 unsigned int current_lli;
264 dma_addr_t next; 264 dma_addr_t next;
265 enum dma_data_direction dirn; 265 enum dma_transfer_direction dirn;
266 enum dma_status status; 266 enum dma_status status;
267 enum dma_slave_buswidth width; /*width of DMA txn*/ 267 enum dma_slave_buswidth width; /*width of DMA txn*/
268 enum intel_mid_dma_mode cfg_mode; /*mode configuration*/ 268 enum intel_mid_dma_mode cfg_mode; /*mode configuration*/
diff --git a/drivers/dma/ipu/ipu_idmac.c b/drivers/dma/ipu/ipu_idmac.c
index 0e5ef33f90a1..e16e280b26eb 100644
--- a/drivers/dma/ipu/ipu_idmac.c
+++ b/drivers/dma/ipu/ipu_idmac.c
@@ -1364,7 +1364,7 @@ static void ipu_gc_tasklet(unsigned long arg)
1364/* Allocate and initialise a transfer descriptor. */ 1364/* Allocate and initialise a transfer descriptor. */
1365static struct dma_async_tx_descriptor *idmac_prep_slave_sg(struct dma_chan *chan, 1365static struct dma_async_tx_descriptor *idmac_prep_slave_sg(struct dma_chan *chan,
1366 struct scatterlist *sgl, unsigned int sg_len, 1366 struct scatterlist *sgl, unsigned int sg_len,
1367 enum dma_data_direction direction, unsigned long tx_flags) 1367 enum dma_transfer_direction direction, unsigned long tx_flags)
1368{ 1368{
1369 struct idmac_channel *ichan = to_idmac_chan(chan); 1369 struct idmac_channel *ichan = to_idmac_chan(chan);
1370 struct idmac_tx_desc *desc = NULL; 1370 struct idmac_tx_desc *desc = NULL;
@@ -1376,7 +1376,7 @@ static struct dma_async_tx_descriptor *idmac_prep_slave_sg(struct dma_chan *chan
1376 chan->chan_id != IDMAC_IC_7) 1376 chan->chan_id != IDMAC_IC_7)
1377 return NULL; 1377 return NULL;
1378 1378
1379 if (direction != DMA_FROM_DEVICE && direction != DMA_TO_DEVICE) { 1379 if (direction != DMA_DEV_TO_MEM && direction != DMA_MEM_TO_DEV) {
1380 dev_err(chan->device->dev, "Invalid DMA direction %d!\n", direction); 1380 dev_err(chan->device->dev, "Invalid DMA direction %d!\n", direction);
1381 return NULL; 1381 return NULL;
1382 } 1382 }
diff --git a/drivers/dma/mxs-dma.c b/drivers/dma/mxs-dma.c
index b4588bdd98bb..bdf4672b2553 100644
--- a/drivers/dma/mxs-dma.c
+++ b/drivers/dma/mxs-dma.c
@@ -377,7 +377,7 @@ static void mxs_dma_free_chan_resources(struct dma_chan *chan)
377 377
378static struct dma_async_tx_descriptor *mxs_dma_prep_slave_sg( 378static struct dma_async_tx_descriptor *mxs_dma_prep_slave_sg(
379 struct dma_chan *chan, struct scatterlist *sgl, 379 struct dma_chan *chan, struct scatterlist *sgl,
380 unsigned int sg_len, enum dma_data_direction direction, 380 unsigned int sg_len, enum dma_transfer_direction direction,
381 unsigned long append) 381 unsigned long append)
382{ 382{
383 struct mxs_dma_chan *mxs_chan = to_mxs_dma_chan(chan); 383 struct mxs_dma_chan *mxs_chan = to_mxs_dma_chan(chan);
@@ -450,7 +450,7 @@ static struct dma_async_tx_descriptor *mxs_dma_prep_slave_sg(
450 ccw->bits |= CCW_CHAIN; 450 ccw->bits |= CCW_CHAIN;
451 ccw->bits |= CCW_HALT_ON_TERM; 451 ccw->bits |= CCW_HALT_ON_TERM;
452 ccw->bits |= CCW_TERM_FLUSH; 452 ccw->bits |= CCW_TERM_FLUSH;
453 ccw->bits |= BF_CCW(direction == DMA_FROM_DEVICE ? 453 ccw->bits |= BF_CCW(direction == DMA_DEV_TO_MEM ?
454 MXS_DMA_CMD_WRITE : MXS_DMA_CMD_READ, 454 MXS_DMA_CMD_WRITE : MXS_DMA_CMD_READ,
455 COMMAND); 455 COMMAND);
456 456
@@ -472,7 +472,7 @@ err_out:
472 472
473static struct dma_async_tx_descriptor *mxs_dma_prep_dma_cyclic( 473static struct dma_async_tx_descriptor *mxs_dma_prep_dma_cyclic(
474 struct dma_chan *chan, dma_addr_t dma_addr, size_t buf_len, 474 struct dma_chan *chan, dma_addr_t dma_addr, size_t buf_len,
475 size_t period_len, enum dma_data_direction direction) 475 size_t period_len, enum dma_transfer_direction direction)
476{ 476{
477 struct mxs_dma_chan *mxs_chan = to_mxs_dma_chan(chan); 477 struct mxs_dma_chan *mxs_chan = to_mxs_dma_chan(chan);
478 struct mxs_dma_engine *mxs_dma = mxs_chan->mxs_dma; 478 struct mxs_dma_engine *mxs_dma = mxs_chan->mxs_dma;
@@ -515,7 +515,7 @@ static struct dma_async_tx_descriptor *mxs_dma_prep_dma_cyclic(
515 ccw->bits |= CCW_IRQ; 515 ccw->bits |= CCW_IRQ;
516 ccw->bits |= CCW_HALT_ON_TERM; 516 ccw->bits |= CCW_HALT_ON_TERM;
517 ccw->bits |= CCW_TERM_FLUSH; 517 ccw->bits |= CCW_TERM_FLUSH;
518 ccw->bits |= BF_CCW(direction == DMA_FROM_DEVICE ? 518 ccw->bits |= BF_CCW(direction == DMA_DEV_TO_MEM ?
519 MXS_DMA_CMD_WRITE : MXS_DMA_CMD_READ, COMMAND); 519 MXS_DMA_CMD_WRITE : MXS_DMA_CMD_READ, COMMAND);
520 520
521 dma_addr += period_len; 521 dma_addr += period_len;
diff --git a/drivers/dma/pch_dma.c b/drivers/dma/pch_dma.c
index 60aca37e22ea..823f58179f9d 100644
--- a/drivers/dma/pch_dma.c
+++ b/drivers/dma/pch_dma.c
@@ -99,7 +99,7 @@ struct pch_dma_desc {
99struct pch_dma_chan { 99struct pch_dma_chan {
100 struct dma_chan chan; 100 struct dma_chan chan;
101 void __iomem *membase; 101 void __iomem *membase;
102 enum dma_data_direction dir; 102 enum dma_transfer_direction dir;
103 struct tasklet_struct tasklet; 103 struct tasklet_struct tasklet;
104 unsigned long err_status; 104 unsigned long err_status;
105 105
@@ -224,7 +224,7 @@ static void pdc_set_dir(struct dma_chan *chan)
224 mask_ctl = DMA_MASK_CTL0_MODE & ~(DMA_CTL0_MODE_MASK_BITS << 224 mask_ctl = DMA_MASK_CTL0_MODE & ~(DMA_CTL0_MODE_MASK_BITS <<
225 (DMA_CTL0_BITS_PER_CH * chan->chan_id)); 225 (DMA_CTL0_BITS_PER_CH * chan->chan_id));
226 val &= mask_mode; 226 val &= mask_mode;
227 if (pd_chan->dir == DMA_TO_DEVICE) 227 if (pd_chan->dir == DMA_MEM_TO_DEV)
228 val |= 0x1 << (DMA_CTL0_BITS_PER_CH * chan->chan_id + 228 val |= 0x1 << (DMA_CTL0_BITS_PER_CH * chan->chan_id +
229 DMA_CTL0_DIR_SHIFT_BITS); 229 DMA_CTL0_DIR_SHIFT_BITS);
230 else 230 else
@@ -242,7 +242,7 @@ static void pdc_set_dir(struct dma_chan *chan)
242 mask_ctl = DMA_MASK_CTL2_MODE & ~(DMA_CTL0_MODE_MASK_BITS << 242 mask_ctl = DMA_MASK_CTL2_MODE & ~(DMA_CTL0_MODE_MASK_BITS <<
243 (DMA_CTL0_BITS_PER_CH * ch)); 243 (DMA_CTL0_BITS_PER_CH * ch));
244 val &= mask_mode; 244 val &= mask_mode;
245 if (pd_chan->dir == DMA_TO_DEVICE) 245 if (pd_chan->dir == DMA_MEM_TO_DEV)
246 val |= 0x1 << (DMA_CTL0_BITS_PER_CH * ch + 246 val |= 0x1 << (DMA_CTL0_BITS_PER_CH * ch +
247 DMA_CTL0_DIR_SHIFT_BITS); 247 DMA_CTL0_DIR_SHIFT_BITS);
248 else 248 else
@@ -607,7 +607,7 @@ static void pd_issue_pending(struct dma_chan *chan)
607 607
608static struct dma_async_tx_descriptor *pd_prep_slave_sg(struct dma_chan *chan, 608static struct dma_async_tx_descriptor *pd_prep_slave_sg(struct dma_chan *chan,
609 struct scatterlist *sgl, unsigned int sg_len, 609 struct scatterlist *sgl, unsigned int sg_len,
610 enum dma_data_direction direction, unsigned long flags) 610 enum dma_transfer_direction direction, unsigned long flags)
611{ 611{
612 struct pch_dma_chan *pd_chan = to_pd_chan(chan); 612 struct pch_dma_chan *pd_chan = to_pd_chan(chan);
613 struct pch_dma_slave *pd_slave = chan->private; 613 struct pch_dma_slave *pd_slave = chan->private;
@@ -623,9 +623,9 @@ static struct dma_async_tx_descriptor *pd_prep_slave_sg(struct dma_chan *chan,
623 return NULL; 623 return NULL;
624 } 624 }
625 625
626 if (direction == DMA_FROM_DEVICE) 626 if (direction == DMA_DEV_TO_MEM)
627 reg = pd_slave->rx_reg; 627 reg = pd_slave->rx_reg;
628 else if (direction == DMA_TO_DEVICE) 628 else if (direction == DMA_MEM_TO_DEV)
629 reg = pd_slave->tx_reg; 629 reg = pd_slave->tx_reg;
630 else 630 else
631 return NULL; 631 return NULL;
diff --git a/drivers/dma/pl330.c b/drivers/dma/pl330.c
index 186b8221803e..e0da795bdcb3 100644
--- a/drivers/dma/pl330.c
+++ b/drivers/dma/pl330.c
@@ -320,14 +320,14 @@ static int pl330_control(struct dma_chan *chan, enum dma_ctrl_cmd cmd, unsigned
320 case DMA_SLAVE_CONFIG: 320 case DMA_SLAVE_CONFIG:
321 slave_config = (struct dma_slave_config *)arg; 321 slave_config = (struct dma_slave_config *)arg;
322 322
323 if (slave_config->direction == DMA_TO_DEVICE) { 323 if (slave_config->direction == DMA_MEM_TO_DEV) {
324 if (slave_config->dst_addr) 324 if (slave_config->dst_addr)
325 pch->fifo_addr = slave_config->dst_addr; 325 pch->fifo_addr = slave_config->dst_addr;
326 if (slave_config->dst_addr_width) 326 if (slave_config->dst_addr_width)
327 pch->burst_sz = __ffs(slave_config->dst_addr_width); 327 pch->burst_sz = __ffs(slave_config->dst_addr_width);
328 if (slave_config->dst_maxburst) 328 if (slave_config->dst_maxburst)
329 pch->burst_len = slave_config->dst_maxburst; 329 pch->burst_len = slave_config->dst_maxburst;
330 } else if (slave_config->direction == DMA_FROM_DEVICE) { 330 } else if (slave_config->direction == DMA_DEV_TO_MEM) {
331 if (slave_config->src_addr) 331 if (slave_config->src_addr)
332 pch->fifo_addr = slave_config->src_addr; 332 pch->fifo_addr = slave_config->src_addr;
333 if (slave_config->src_addr_width) 333 if (slave_config->src_addr_width)
@@ -597,7 +597,7 @@ static inline int get_burst_len(struct dma_pl330_desc *desc, size_t len)
597 597
598static struct dma_async_tx_descriptor *pl330_prep_dma_cyclic( 598static struct dma_async_tx_descriptor *pl330_prep_dma_cyclic(
599 struct dma_chan *chan, dma_addr_t dma_addr, size_t len, 599 struct dma_chan *chan, dma_addr_t dma_addr, size_t len,
600 size_t period_len, enum dma_data_direction direction) 600 size_t period_len, enum dma_transfer_direction direction)
601{ 601{
602 struct dma_pl330_desc *desc; 602 struct dma_pl330_desc *desc;
603 struct dma_pl330_chan *pch = to_pchan(chan); 603 struct dma_pl330_chan *pch = to_pchan(chan);
@@ -612,13 +612,13 @@ static struct dma_async_tx_descriptor *pl330_prep_dma_cyclic(
612 } 612 }
613 613
614 switch (direction) { 614 switch (direction) {
615 case DMA_TO_DEVICE: 615 case DMA_MEM_TO_DEV:
616 desc->rqcfg.src_inc = 1; 616 desc->rqcfg.src_inc = 1;
617 desc->rqcfg.dst_inc = 0; 617 desc->rqcfg.dst_inc = 0;
618 src = dma_addr; 618 src = dma_addr;
619 dst = pch->fifo_addr; 619 dst = pch->fifo_addr;
620 break; 620 break;
621 case DMA_FROM_DEVICE: 621 case DMA_DEV_TO_MEM:
622 desc->rqcfg.src_inc = 0; 622 desc->rqcfg.src_inc = 0;
623 desc->rqcfg.dst_inc = 1; 623 desc->rqcfg.dst_inc = 1;
624 src = pch->fifo_addr; 624 src = pch->fifo_addr;
@@ -687,7 +687,7 @@ pl330_prep_dma_memcpy(struct dma_chan *chan, dma_addr_t dst,
687 687
688static struct dma_async_tx_descriptor * 688static struct dma_async_tx_descriptor *
689pl330_prep_slave_sg(struct dma_chan *chan, struct scatterlist *sgl, 689pl330_prep_slave_sg(struct dma_chan *chan, struct scatterlist *sgl,
690 unsigned int sg_len, enum dma_data_direction direction, 690 unsigned int sg_len, enum dma_transfer_direction direction,
691 unsigned long flg) 691 unsigned long flg)
692{ 692{
693 struct dma_pl330_desc *first, *desc = NULL; 693 struct dma_pl330_desc *first, *desc = NULL;
@@ -702,9 +702,9 @@ pl330_prep_slave_sg(struct dma_chan *chan, struct scatterlist *sgl,
702 return NULL; 702 return NULL;
703 703
704 /* Make sure the direction is consistent */ 704 /* Make sure the direction is consistent */
705 if ((direction == DMA_TO_DEVICE && 705 if ((direction == DMA_MEM_TO_DEV &&
706 peri->rqtype != MEMTODEV) || 706 peri->rqtype != MEMTODEV) ||
707 (direction == DMA_FROM_DEVICE && 707 (direction == DMA_DEV_TO_MEM &&
708 peri->rqtype != DEVTOMEM)) { 708 peri->rqtype != DEVTOMEM)) {
709 dev_err(pch->dmac->pif.dev, "%s:%d Invalid Direction\n", 709 dev_err(pch->dmac->pif.dev, "%s:%d Invalid Direction\n",
710 __func__, __LINE__); 710 __func__, __LINE__);
@@ -747,7 +747,7 @@ pl330_prep_slave_sg(struct dma_chan *chan, struct scatterlist *sgl,
747 else 747 else
748 list_add_tail(&desc->node, &first->node); 748 list_add_tail(&desc->node, &first->node);
749 749
750 if (direction == DMA_TO_DEVICE) { 750 if (direction == DMA_MEM_TO_DEV) {
751 desc->rqcfg.src_inc = 1; 751 desc->rqcfg.src_inc = 1;
752 desc->rqcfg.dst_inc = 0; 752 desc->rqcfg.dst_inc = 0;
753 fill_px(&desc->px, 753 fill_px(&desc->px,
diff --git a/drivers/dma/shdma.c b/drivers/dma/shdma.c
index 81809c2b46ab..592304fb41a6 100644
--- a/drivers/dma/shdma.c
+++ b/drivers/dma/shdma.c
@@ -23,7 +23,6 @@
23#include <linux/interrupt.h> 23#include <linux/interrupt.h>
24#include <linux/dmaengine.h> 24#include <linux/dmaengine.h>
25#include <linux/delay.h> 25#include <linux/delay.h>
26#include <linux/dma-mapping.h>
27#include <linux/platform_device.h> 26#include <linux/platform_device.h>
28#include <linux/pm_runtime.h> 27#include <linux/pm_runtime.h>
29#include <linux/sh_dma.h> 28#include <linux/sh_dma.h>
@@ -479,19 +478,19 @@ static void sh_dmae_free_chan_resources(struct dma_chan *chan)
479 * @sh_chan: DMA channel 478 * @sh_chan: DMA channel
480 * @flags: DMA transfer flags 479 * @flags: DMA transfer flags
481 * @dest: destination DMA address, incremented when direction equals 480 * @dest: destination DMA address, incremented when direction equals
482 * DMA_FROM_DEVICE or DMA_BIDIRECTIONAL 481 * DMA_DEV_TO_MEM
483 * @src: source DMA address, incremented when direction equals 482 * @src: source DMA address, incremented when direction equals
484 * DMA_TO_DEVICE or DMA_BIDIRECTIONAL 483 * DMA_MEM_TO_DEV
485 * @len: DMA transfer length 484 * @len: DMA transfer length
486 * @first: if NULL, set to the current descriptor and cookie set to -EBUSY 485 * @first: if NULL, set to the current descriptor and cookie set to -EBUSY
487 * @direction: needed for slave DMA to decide which address to keep constant, 486 * @direction: needed for slave DMA to decide which address to keep constant,
488 * equals DMA_BIDIRECTIONAL for MEMCPY 487 * equals DMA_MEM_TO_MEM for MEMCPY
489 * Returns 0 or an error 488 * Returns 0 or an error
490 * Locks: called with desc_lock held 489 * Locks: called with desc_lock held
491 */ 490 */
492static struct sh_desc *sh_dmae_add_desc(struct sh_dmae_chan *sh_chan, 491static struct sh_desc *sh_dmae_add_desc(struct sh_dmae_chan *sh_chan,
493 unsigned long flags, dma_addr_t *dest, dma_addr_t *src, size_t *len, 492 unsigned long flags, dma_addr_t *dest, dma_addr_t *src, size_t *len,
494 struct sh_desc **first, enum dma_data_direction direction) 493 struct sh_desc **first, enum dma_transfer_direction direction)
495{ 494{
496 struct sh_desc *new; 495 struct sh_desc *new;
497 size_t copy_size; 496 size_t copy_size;
@@ -531,9 +530,9 @@ static struct sh_desc *sh_dmae_add_desc(struct sh_dmae_chan *sh_chan,
531 new->direction = direction; 530 new->direction = direction;
532 531
533 *len -= copy_size; 532 *len -= copy_size;
534 if (direction == DMA_BIDIRECTIONAL || direction == DMA_TO_DEVICE) 533 if (direction == DMA_MEM_TO_MEM || direction == DMA_MEM_TO_DEV)
535 *src += copy_size; 534 *src += copy_size;
536 if (direction == DMA_BIDIRECTIONAL || direction == DMA_FROM_DEVICE) 535 if (direction == DMA_MEM_TO_MEM || direction == DMA_DEV_TO_MEM)
537 *dest += copy_size; 536 *dest += copy_size;
538 537
539 return new; 538 return new;
@@ -546,12 +545,12 @@ static struct sh_desc *sh_dmae_add_desc(struct sh_dmae_chan *sh_chan,
546 * converted to scatter-gather to guarantee consistent locking and a correct 545 * converted to scatter-gather to guarantee consistent locking and a correct
547 * list manipulation. For slave DMA direction carries the usual meaning, and, 546 * list manipulation. For slave DMA direction carries the usual meaning, and,
548 * logically, the SG list is RAM and the addr variable contains slave address, 547 * logically, the SG list is RAM and the addr variable contains slave address,
549 * e.g., the FIFO I/O register. For MEMCPY direction equals DMA_BIDIRECTIONAL 548 * e.g., the FIFO I/O register. For MEMCPY direction equals DMA_MEM_TO_MEM
550 * and the SG list contains only one element and points at the source buffer. 549 * and the SG list contains only one element and points at the source buffer.
551 */ 550 */
552static struct dma_async_tx_descriptor *sh_dmae_prep_sg(struct sh_dmae_chan *sh_chan, 551static struct dma_async_tx_descriptor *sh_dmae_prep_sg(struct sh_dmae_chan *sh_chan,
553 struct scatterlist *sgl, unsigned int sg_len, dma_addr_t *addr, 552 struct scatterlist *sgl, unsigned int sg_len, dma_addr_t *addr,
554 enum dma_data_direction direction, unsigned long flags) 553 enum dma_transfer_direction direction, unsigned long flags)
555{ 554{
556 struct scatterlist *sg; 555 struct scatterlist *sg;
557 struct sh_desc *first = NULL, *new = NULL /* compiler... */; 556 struct sh_desc *first = NULL, *new = NULL /* compiler... */;
@@ -592,7 +591,7 @@ static struct dma_async_tx_descriptor *sh_dmae_prep_sg(struct sh_dmae_chan *sh_c
592 dev_dbg(sh_chan->dev, "Add SG #%d@%p[%d], dma %llx\n", 591 dev_dbg(sh_chan->dev, "Add SG #%d@%p[%d], dma %llx\n",
593 i, sg, len, (unsigned long long)sg_addr); 592 i, sg, len, (unsigned long long)sg_addr);
594 593
595 if (direction == DMA_FROM_DEVICE) 594 if (direction == DMA_DEV_TO_MEM)
596 new = sh_dmae_add_desc(sh_chan, flags, 595 new = sh_dmae_add_desc(sh_chan, flags,
597 &sg_addr, addr, &len, &first, 596 &sg_addr, addr, &len, &first,
598 direction); 597 direction);
@@ -646,13 +645,13 @@ static struct dma_async_tx_descriptor *sh_dmae_prep_memcpy(
646 sg_dma_address(&sg) = dma_src; 645 sg_dma_address(&sg) = dma_src;
647 sg_dma_len(&sg) = len; 646 sg_dma_len(&sg) = len;
648 647
649 return sh_dmae_prep_sg(sh_chan, &sg, 1, &dma_dest, DMA_BIDIRECTIONAL, 648 return sh_dmae_prep_sg(sh_chan, &sg, 1, &dma_dest, DMA_MEM_TO_MEM,
650 flags); 649 flags);
651} 650}
652 651
653static struct dma_async_tx_descriptor *sh_dmae_prep_slave_sg( 652static struct dma_async_tx_descriptor *sh_dmae_prep_slave_sg(
654 struct dma_chan *chan, struct scatterlist *sgl, unsigned int sg_len, 653 struct dma_chan *chan, struct scatterlist *sgl, unsigned int sg_len,
655 enum dma_data_direction direction, unsigned long flags) 654 enum dma_transfer_direction direction, unsigned long flags)
656{ 655{
657 struct sh_dmae_slave *param; 656 struct sh_dmae_slave *param;
658 struct sh_dmae_chan *sh_chan; 657 struct sh_dmae_chan *sh_chan;
@@ -996,7 +995,7 @@ static void dmae_do_tasklet(unsigned long data)
996 spin_lock_irq(&sh_chan->desc_lock); 995 spin_lock_irq(&sh_chan->desc_lock);
997 list_for_each_entry(desc, &sh_chan->ld_queue, node) { 996 list_for_each_entry(desc, &sh_chan->ld_queue, node) {
998 if (desc->mark == DESC_SUBMITTED && 997 if (desc->mark == DESC_SUBMITTED &&
999 ((desc->direction == DMA_FROM_DEVICE && 998 ((desc->direction == DMA_DEV_TO_MEM &&
1000 (desc->hw.dar + desc->hw.tcr) == dar_buf) || 999 (desc->hw.dar + desc->hw.tcr) == dar_buf) ||
1001 (desc->hw.sar + desc->hw.tcr) == sar_buf)) { 1000 (desc->hw.sar + desc->hw.tcr) == sar_buf)) {
1002 dev_dbg(sh_chan->dev, "done #%d@%p dst %u\n", 1001 dev_dbg(sh_chan->dev, "done #%d@%p dst %u\n",
diff --git a/drivers/dma/ste_dma40.c b/drivers/dma/ste_dma40.c
index 13259cad0ceb..15b311d54b74 100644
--- a/drivers/dma/ste_dma40.c
+++ b/drivers/dma/ste_dma40.c
@@ -217,7 +217,7 @@ struct d40_chan {
217 struct d40_log_lli_full *lcpa; 217 struct d40_log_lli_full *lcpa;
218 /* Runtime reconfiguration */ 218 /* Runtime reconfiguration */
219 dma_addr_t runtime_addr; 219 dma_addr_t runtime_addr;
220 enum dma_data_direction runtime_direction; 220 enum dma_transfer_direction runtime_direction;
221}; 221};
222 222
223/** 223/**
@@ -1855,7 +1855,7 @@ err:
1855} 1855}
1856 1856
1857static dma_addr_t 1857static dma_addr_t
1858d40_get_dev_addr(struct d40_chan *chan, enum dma_data_direction direction) 1858d40_get_dev_addr(struct d40_chan *chan, enum dma_transfer_direction direction)
1859{ 1859{
1860 struct stedma40_platform_data *plat = chan->base->plat_data; 1860 struct stedma40_platform_data *plat = chan->base->plat_data;
1861 struct stedma40_chan_cfg *cfg = &chan->dma_cfg; 1861 struct stedma40_chan_cfg *cfg = &chan->dma_cfg;
@@ -1864,9 +1864,9 @@ d40_get_dev_addr(struct d40_chan *chan, enum dma_data_direction direction)
1864 if (chan->runtime_addr) 1864 if (chan->runtime_addr)
1865 return chan->runtime_addr; 1865 return chan->runtime_addr;
1866 1866
1867 if (direction == DMA_FROM_DEVICE) 1867 if (direction == DMA_DEV_TO_MEM)
1868 addr = plat->dev_rx[cfg->src_dev_type]; 1868 addr = plat->dev_rx[cfg->src_dev_type];
1869 else if (direction == DMA_TO_DEVICE) 1869 else if (direction == DMA_MEM_TO_DEV)
1870 addr = plat->dev_tx[cfg->dst_dev_type]; 1870 addr = plat->dev_tx[cfg->dst_dev_type];
1871 1871
1872 return addr; 1872 return addr;
@@ -1875,7 +1875,7 @@ d40_get_dev_addr(struct d40_chan *chan, enum dma_data_direction direction)
1875static struct dma_async_tx_descriptor * 1875static struct dma_async_tx_descriptor *
1876d40_prep_sg(struct dma_chan *dchan, struct scatterlist *sg_src, 1876d40_prep_sg(struct dma_chan *dchan, struct scatterlist *sg_src,
1877 struct scatterlist *sg_dst, unsigned int sg_len, 1877 struct scatterlist *sg_dst, unsigned int sg_len,
1878 enum dma_data_direction direction, unsigned long dma_flags) 1878 enum dma_transfer_direction direction, unsigned long dma_flags)
1879{ 1879{
1880 struct d40_chan *chan = container_of(dchan, struct d40_chan, chan); 1880 struct d40_chan *chan = container_of(dchan, struct d40_chan, chan);
1881 dma_addr_t src_dev_addr = 0; 1881 dma_addr_t src_dev_addr = 0;
@@ -1902,9 +1902,9 @@ d40_prep_sg(struct dma_chan *dchan, struct scatterlist *sg_src,
1902 if (direction != DMA_NONE) { 1902 if (direction != DMA_NONE) {
1903 dma_addr_t dev_addr = d40_get_dev_addr(chan, direction); 1903 dma_addr_t dev_addr = d40_get_dev_addr(chan, direction);
1904 1904
1905 if (direction == DMA_FROM_DEVICE) 1905 if (direction == DMA_DEV_TO_MEM)
1906 src_dev_addr = dev_addr; 1906 src_dev_addr = dev_addr;
1907 else if (direction == DMA_TO_DEVICE) 1907 else if (direction == DMA_MEM_TO_DEV)
1908 dst_dev_addr = dev_addr; 1908 dst_dev_addr = dev_addr;
1909 } 1909 }
1910 1910
@@ -2108,10 +2108,10 @@ d40_prep_memcpy_sg(struct dma_chan *chan,
2108static struct dma_async_tx_descriptor *d40_prep_slave_sg(struct dma_chan *chan, 2108static struct dma_async_tx_descriptor *d40_prep_slave_sg(struct dma_chan *chan,
2109 struct scatterlist *sgl, 2109 struct scatterlist *sgl,
2110 unsigned int sg_len, 2110 unsigned int sg_len,
2111 enum dma_data_direction direction, 2111 enum dma_transfer_direction direction,
2112 unsigned long dma_flags) 2112 unsigned long dma_flags)
2113{ 2113{
2114 if (direction != DMA_FROM_DEVICE && direction != DMA_TO_DEVICE) 2114 if (direction != DMA_DEV_TO_MEM && direction != DMA_MEM_TO_DEV)
2115 return NULL; 2115 return NULL;
2116 2116
2117 return d40_prep_sg(chan, sgl, sgl, sg_len, direction, dma_flags); 2117 return d40_prep_sg(chan, sgl, sgl, sg_len, direction, dma_flags);
@@ -2120,7 +2120,7 @@ static struct dma_async_tx_descriptor *d40_prep_slave_sg(struct dma_chan *chan,
2120static struct dma_async_tx_descriptor * 2120static struct dma_async_tx_descriptor *
2121dma40_prep_dma_cyclic(struct dma_chan *chan, dma_addr_t dma_addr, 2121dma40_prep_dma_cyclic(struct dma_chan *chan, dma_addr_t dma_addr,
2122 size_t buf_len, size_t period_len, 2122 size_t buf_len, size_t period_len,
2123 enum dma_data_direction direction) 2123 enum dma_transfer_direction direction)
2124{ 2124{
2125 unsigned int periods = buf_len / period_len; 2125 unsigned int periods = buf_len / period_len;
2126 struct dma_async_tx_descriptor *txd; 2126 struct dma_async_tx_descriptor *txd;
@@ -2269,7 +2269,7 @@ static int d40_set_runtime_config(struct dma_chan *chan,
2269 dst_addr_width = config->dst_addr_width; 2269 dst_addr_width = config->dst_addr_width;
2270 dst_maxburst = config->dst_maxburst; 2270 dst_maxburst = config->dst_maxburst;
2271 2271
2272 if (config->direction == DMA_FROM_DEVICE) { 2272 if (config->direction == DMA_DEV_TO_MEM) {
2273 dma_addr_t dev_addr_rx = 2273 dma_addr_t dev_addr_rx =
2274 d40c->base->plat_data->dev_rx[cfg->src_dev_type]; 2274 d40c->base->plat_data->dev_rx[cfg->src_dev_type];
2275 2275
@@ -2292,7 +2292,7 @@ static int d40_set_runtime_config(struct dma_chan *chan,
2292 if (dst_maxburst == 0) 2292 if (dst_maxburst == 0)
2293 dst_maxburst = src_maxburst; 2293 dst_maxburst = src_maxburst;
2294 2294
2295 } else if (config->direction == DMA_TO_DEVICE) { 2295 } else if (config->direction == DMA_MEM_TO_DEV) {
2296 dma_addr_t dev_addr_tx = 2296 dma_addr_t dev_addr_tx =
2297 d40c->base->plat_data->dev_tx[cfg->dst_dev_type]; 2297 d40c->base->plat_data->dev_tx[cfg->dst_dev_type];
2298 2298
@@ -2357,7 +2357,7 @@ static int d40_set_runtime_config(struct dma_chan *chan,
2357 "configured channel %s for %s, data width %d/%d, " 2357 "configured channel %s for %s, data width %d/%d, "
2358 "maxburst %d/%d elements, LE, no flow control\n", 2358 "maxburst %d/%d elements, LE, no flow control\n",
2359 dma_chan_name(chan), 2359 dma_chan_name(chan),
2360 (config->direction == DMA_FROM_DEVICE) ? "RX" : "TX", 2360 (config->direction == DMA_DEV_TO_MEM) ? "RX" : "TX",
2361 src_addr_width, dst_addr_width, 2361 src_addr_width, dst_addr_width,
2362 src_maxburst, dst_maxburst); 2362 src_maxburst, dst_maxburst);
2363 2363
diff --git a/drivers/dma/timb_dma.c b/drivers/dma/timb_dma.c
index a4a398f2ef61..8c880729b094 100644
--- a/drivers/dma/timb_dma.c
+++ b/drivers/dma/timb_dma.c
@@ -90,7 +90,7 @@ struct timb_dma_chan {
90 struct list_head queue; 90 struct list_head queue;
91 struct list_head free_list; 91 struct list_head free_list;
92 unsigned int bytes_per_line; 92 unsigned int bytes_per_line;
93 enum dma_data_direction direction; 93 enum dma_transfer_direction direction;
94 unsigned int descs; /* Descriptors to allocate */ 94 unsigned int descs; /* Descriptors to allocate */
95 unsigned int desc_elems; /* number of elems per descriptor */ 95 unsigned int desc_elems; /* number of elems per descriptor */
96}; 96};
@@ -235,7 +235,7 @@ static void __td_start_dma(struct timb_dma_chan *td_chan)
235 "td_chan: %p, chan: %d, membase: %p\n", 235 "td_chan: %p, chan: %d, membase: %p\n",
236 td_chan, td_chan->chan.chan_id, td_chan->membase); 236 td_chan, td_chan->chan.chan_id, td_chan->membase);
237 237
238 if (td_chan->direction == DMA_FROM_DEVICE) { 238 if (td_chan->direction == DMA_DEV_TO_MEM) {
239 239
240 /* descriptor address */ 240 /* descriptor address */
241 iowrite32(0, td_chan->membase + TIMBDMA_OFFS_RX_DHAR); 241 iowrite32(0, td_chan->membase + TIMBDMA_OFFS_RX_DHAR);
@@ -278,7 +278,7 @@ static void __td_finish(struct timb_dma_chan *td_chan)
278 txd->cookie); 278 txd->cookie);
279 279
280 /* make sure to stop the transfer */ 280 /* make sure to stop the transfer */
281 if (td_chan->direction == DMA_FROM_DEVICE) 281 if (td_chan->direction == DMA_DEV_TO_MEM)
282 iowrite32(0, td_chan->membase + TIMBDMA_OFFS_RX_ER); 282 iowrite32(0, td_chan->membase + TIMBDMA_OFFS_RX_ER);
283/* Currently no support for stopping DMA transfers 283/* Currently no support for stopping DMA transfers
284 else 284 else
@@ -398,7 +398,7 @@ static struct timb_dma_desc *td_alloc_init_desc(struct timb_dma_chan *td_chan)
398 td_desc->txd.flags = DMA_CTRL_ACK; 398 td_desc->txd.flags = DMA_CTRL_ACK;
399 399
400 td_desc->txd.phys = dma_map_single(chan2dmadev(chan), 400 td_desc->txd.phys = dma_map_single(chan2dmadev(chan),
401 td_desc->desc_list, td_desc->desc_list_len, DMA_TO_DEVICE); 401 td_desc->desc_list, td_desc->desc_list_len, DMA_MEM_TO_DEV);
402 402
403 err = dma_mapping_error(chan2dmadev(chan), td_desc->txd.phys); 403 err = dma_mapping_error(chan2dmadev(chan), td_desc->txd.phys);
404 if (err) { 404 if (err) {
@@ -419,7 +419,7 @@ static void td_free_desc(struct timb_dma_desc *td_desc)
419{ 419{
420 dev_dbg(chan2dev(td_desc->txd.chan), "Freeing desc: %p\n", td_desc); 420 dev_dbg(chan2dev(td_desc->txd.chan), "Freeing desc: %p\n", td_desc);
421 dma_unmap_single(chan2dmadev(td_desc->txd.chan), td_desc->txd.phys, 421 dma_unmap_single(chan2dmadev(td_desc->txd.chan), td_desc->txd.phys,
422 td_desc->desc_list_len, DMA_TO_DEVICE); 422 td_desc->desc_list_len, DMA_MEM_TO_DEV);
423 423
424 kfree(td_desc->desc_list); 424 kfree(td_desc->desc_list);
425 kfree(td_desc); 425 kfree(td_desc);
@@ -558,7 +558,7 @@ static void td_issue_pending(struct dma_chan *chan)
558 558
559static struct dma_async_tx_descriptor *td_prep_slave_sg(struct dma_chan *chan, 559static struct dma_async_tx_descriptor *td_prep_slave_sg(struct dma_chan *chan,
560 struct scatterlist *sgl, unsigned int sg_len, 560 struct scatterlist *sgl, unsigned int sg_len,
561 enum dma_data_direction direction, unsigned long flags) 561 enum dma_transfer_direction direction, unsigned long flags)
562{ 562{
563 struct timb_dma_chan *td_chan = 563 struct timb_dma_chan *td_chan =
564 container_of(chan, struct timb_dma_chan, chan); 564 container_of(chan, struct timb_dma_chan, chan);
@@ -606,7 +606,7 @@ static struct dma_async_tx_descriptor *td_prep_slave_sg(struct dma_chan *chan,
606 } 606 }
607 607
608 dma_sync_single_for_device(chan2dmadev(chan), td_desc->txd.phys, 608 dma_sync_single_for_device(chan2dmadev(chan), td_desc->txd.phys,
609 td_desc->desc_list_len, DMA_TO_DEVICE); 609 td_desc->desc_list_len, DMA_MEM_TO_DEV);
610 610
611 return &td_desc->txd; 611 return &td_desc->txd;
612} 612}
@@ -775,8 +775,8 @@ static int __devinit td_probe(struct platform_device *pdev)
775 td_chan->descs = pchan->descriptors; 775 td_chan->descs = pchan->descriptors;
776 td_chan->desc_elems = pchan->descriptor_elements; 776 td_chan->desc_elems = pchan->descriptor_elements;
777 td_chan->bytes_per_line = pchan->bytes_per_line; 777 td_chan->bytes_per_line = pchan->bytes_per_line;
778 td_chan->direction = pchan->rx ? DMA_FROM_DEVICE : 778 td_chan->direction = pchan->rx ? DMA_DEV_TO_MEM :
779 DMA_TO_DEVICE; 779 DMA_MEM_TO_DEV;
780 780
781 td_chan->membase = td->membase + 781 td_chan->membase = td->membase +
782 (i / 2) * TIMBDMA_INSTANCE_OFFSET + 782 (i / 2) * TIMBDMA_INSTANCE_OFFSET +
diff --git a/drivers/dma/txx9dmac.c b/drivers/dma/txx9dmac.c
index cbd83e362b5e..6122c364cf11 100644
--- a/drivers/dma/txx9dmac.c
+++ b/drivers/dma/txx9dmac.c
@@ -845,7 +845,7 @@ txx9dmac_prep_dma_memcpy(struct dma_chan *chan, dma_addr_t dest, dma_addr_t src,
845 845
846static struct dma_async_tx_descriptor * 846static struct dma_async_tx_descriptor *
847txx9dmac_prep_slave_sg(struct dma_chan *chan, struct scatterlist *sgl, 847txx9dmac_prep_slave_sg(struct dma_chan *chan, struct scatterlist *sgl,
848 unsigned int sg_len, enum dma_data_direction direction, 848 unsigned int sg_len, enum dma_transfer_direction direction,
849 unsigned long flags) 849 unsigned long flags)
850{ 850{
851 struct txx9dmac_chan *dc = to_txx9dmac_chan(chan); 851 struct txx9dmac_chan *dc = to_txx9dmac_chan(chan);
@@ -860,9 +860,9 @@ txx9dmac_prep_slave_sg(struct dma_chan *chan, struct scatterlist *sgl,
860 860
861 BUG_ON(!ds || !ds->reg_width); 861 BUG_ON(!ds || !ds->reg_width);
862 if (ds->tx_reg) 862 if (ds->tx_reg)
863 BUG_ON(direction != DMA_TO_DEVICE); 863 BUG_ON(direction != DMA_MEM_TO_DEV);
864 else 864 else
865 BUG_ON(direction != DMA_FROM_DEVICE); 865 BUG_ON(direction != DMA_DEV_TO_MEM);
866 if (unlikely(!sg_len)) 866 if (unlikely(!sg_len))
867 return NULL; 867 return NULL;
868 868
@@ -882,7 +882,7 @@ txx9dmac_prep_slave_sg(struct dma_chan *chan, struct scatterlist *sgl,
882 mem = sg_dma_address(sg); 882 mem = sg_dma_address(sg);
883 883
884 if (__is_dmac64(ddev)) { 884 if (__is_dmac64(ddev)) {
885 if (direction == DMA_TO_DEVICE) { 885 if (direction == DMA_MEM_TO_DEV) {
886 desc->hwdesc.SAR = mem; 886 desc->hwdesc.SAR = mem;
887 desc->hwdesc.DAR = ds->tx_reg; 887 desc->hwdesc.DAR = ds->tx_reg;
888 } else { 888 } else {
@@ -891,7 +891,7 @@ txx9dmac_prep_slave_sg(struct dma_chan *chan, struct scatterlist *sgl,
891 } 891 }
892 desc->hwdesc.CNTR = sg_dma_len(sg); 892 desc->hwdesc.CNTR = sg_dma_len(sg);
893 } else { 893 } else {
894 if (direction == DMA_TO_DEVICE) { 894 if (direction == DMA_MEM_TO_DEV) {
895 desc->hwdesc32.SAR = mem; 895 desc->hwdesc32.SAR = mem;
896 desc->hwdesc32.DAR = ds->tx_reg; 896 desc->hwdesc32.DAR = ds->tx_reg;
897 } else { 897 } else {
@@ -900,7 +900,7 @@ txx9dmac_prep_slave_sg(struct dma_chan *chan, struct scatterlist *sgl,
900 } 900 }
901 desc->hwdesc32.CNTR = sg_dma_len(sg); 901 desc->hwdesc32.CNTR = sg_dma_len(sg);
902 } 902 }
903 if (direction == DMA_TO_DEVICE) { 903 if (direction == DMA_MEM_TO_DEV) {
904 sai = ds->reg_width; 904 sai = ds->reg_width;
905 dai = 0; 905 dai = 0;
906 } else { 906 } else {
diff --git a/drivers/media/video/mx3_camera.c b/drivers/media/video/mx3_camera.c
index f96f92f00f92..dec2419b9456 100644
--- a/drivers/media/video/mx3_camera.c
+++ b/drivers/media/video/mx3_camera.c
@@ -287,7 +287,7 @@ static void mx3_videobuf_queue(struct vb2_buffer *vb)
287 sg_dma_len(sg) = new_size; 287 sg_dma_len(sg) = new_size;
288 288
289 txd = ichan->dma_chan.device->device_prep_slave_sg( 289 txd = ichan->dma_chan.device->device_prep_slave_sg(
290 &ichan->dma_chan, sg, 1, DMA_FROM_DEVICE, 290 &ichan->dma_chan, sg, 1, DMA_DEV_TO_MEM,
291 DMA_PREP_INTERRUPT); 291 DMA_PREP_INTERRUPT);
292 if (!txd) 292 if (!txd)
293 goto error; 293 goto error;
diff --git a/drivers/media/video/timblogiw.c b/drivers/media/video/timblogiw.c
index a0895bf07487..6876f7e471da 100644
--- a/drivers/media/video/timblogiw.c
+++ b/drivers/media/video/timblogiw.c
@@ -565,7 +565,7 @@ static void buffer_queue(struct videobuf_queue *vq, struct videobuf_buffer *vb)
565 spin_unlock_irq(&fh->queue_lock); 565 spin_unlock_irq(&fh->queue_lock);
566 566
567 desc = fh->chan->device->device_prep_slave_sg(fh->chan, 567 desc = fh->chan->device->device_prep_slave_sg(fh->chan,
568 buf->sg, sg_elems, DMA_FROM_DEVICE, 568 buf->sg, sg_elems, DMA_DEV_TO_MEM,
569 DMA_PREP_INTERRUPT | DMA_COMPL_SKIP_SRC_UNMAP); 569 DMA_PREP_INTERRUPT | DMA_COMPL_SKIP_SRC_UNMAP);
570 if (!desc) { 570 if (!desc) {
571 spin_lock_irq(&fh->queue_lock); 571 spin_lock_irq(&fh->queue_lock);
diff --git a/drivers/misc/carma/carma-fpga-program.c b/drivers/misc/carma/carma-fpga-program.c
index 7ce6065dc20e..20ef1eac5599 100644
--- a/drivers/misc/carma/carma-fpga-program.c
+++ b/drivers/misc/carma/carma-fpga-program.c
@@ -513,7 +513,7 @@ static noinline int fpga_program_dma(struct fpga_dev *priv)
513 * transaction, and then put it under external control 513 * transaction, and then put it under external control
514 */ 514 */
515 memset(&config, 0, sizeof(config)); 515 memset(&config, 0, sizeof(config));
516 config.direction = DMA_TO_DEVICE; 516 config.direction = DMA_MEM_TO_DEV;
517 config.dst_addr_width = DMA_SLAVE_BUSWIDTH_4_BYTES; 517 config.dst_addr_width = DMA_SLAVE_BUSWIDTH_4_BYTES;
518 config.dst_maxburst = fpga_fifo_size(priv->regs) / 2 / 4; 518 config.dst_maxburst = fpga_fifo_size(priv->regs) / 2 / 4;
519 ret = chan->device->device_control(chan, DMA_SLAVE_CONFIG, 519 ret = chan->device->device_control(chan, DMA_SLAVE_CONFIG,
diff --git a/drivers/mmc/host/atmel-mci.c b/drivers/mmc/host/atmel-mci.c
index a7ee50271465..fcfe1eb5acc8 100644
--- a/drivers/mmc/host/atmel-mci.c
+++ b/drivers/mmc/host/atmel-mci.c
@@ -823,6 +823,7 @@ atmci_prepare_data_dma(struct atmel_mci *host, struct mmc_data *data)
823 struct scatterlist *sg; 823 struct scatterlist *sg;
824 unsigned int i; 824 unsigned int i;
825 enum dma_data_direction direction; 825 enum dma_data_direction direction;
826 enum dma_transfer_direction slave_dirn;
826 unsigned int sglen; 827 unsigned int sglen;
827 u32 iflags; 828 u32 iflags;
828 829
@@ -860,16 +861,19 @@ atmci_prepare_data_dma(struct atmel_mci *host, struct mmc_data *data)
860 if (host->caps.has_dma) 861 if (host->caps.has_dma)
861 atmci_writel(host, ATMCI_DMA, ATMCI_DMA_CHKSIZE(3) | ATMCI_DMAEN); 862 atmci_writel(host, ATMCI_DMA, ATMCI_DMA_CHKSIZE(3) | ATMCI_DMAEN);
862 863
863 if (data->flags & MMC_DATA_READ) 864 if (data->flags & MMC_DATA_READ) {
864 direction = DMA_FROM_DEVICE; 865 direction = DMA_FROM_DEVICE;
865 else 866 slave_dirn = DMA_DEV_TO_MEM;
867 } else {
866 direction = DMA_TO_DEVICE; 868 direction = DMA_TO_DEVICE;
869 slave_dirn = DMA_MEM_TO_DEV;
870 }
867 871
868 sglen = dma_map_sg(chan->device->dev, data->sg, 872 sglen = dma_map_sg(chan->device->dev, data->sg,
869 data->sg_len, direction); 873 data->sg_len, direction);
870 874
871 desc = chan->device->device_prep_slave_sg(chan, 875 desc = chan->device->device_prep_slave_sg(chan,
872 data->sg, sglen, direction, 876 data->sg, sglen, slave_dirn,
873 DMA_PREP_INTERRUPT | DMA_CTRL_ACK); 877 DMA_PREP_INTERRUPT | DMA_CTRL_ACK);
874 if (!desc) 878 if (!desc)
875 goto unmap_exit; 879 goto unmap_exit;
diff --git a/drivers/mmc/host/mmci.c b/drivers/mmc/host/mmci.c
index 50b5f9926f64..0b44d6bbe5d7 100644
--- a/drivers/mmc/host/mmci.c
+++ b/drivers/mmc/host/mmci.c
@@ -374,6 +374,7 @@ static int mmci_dma_prep_data(struct mmci_host *host, struct mmc_data *data,
374 struct dma_chan *chan; 374 struct dma_chan *chan;
375 struct dma_device *device; 375 struct dma_device *device;
376 struct dma_async_tx_descriptor *desc; 376 struct dma_async_tx_descriptor *desc;
377 enum dma_data_direction buffer_dirn;
377 int nr_sg; 378 int nr_sg;
378 379
379 /* Check if next job is already prepared */ 380 /* Check if next job is already prepared */
@@ -387,10 +388,12 @@ static int mmci_dma_prep_data(struct mmci_host *host, struct mmc_data *data,
387 } 388 }
388 389
389 if (data->flags & MMC_DATA_READ) { 390 if (data->flags & MMC_DATA_READ) {
390 conf.direction = DMA_FROM_DEVICE; 391 conf.direction = DMA_DEV_TO_MEM;
392 buffer_dirn = DMA_FROM_DEVICE;
391 chan = host->dma_rx_channel; 393 chan = host->dma_rx_channel;
392 } else { 394 } else {
393 conf.direction = DMA_TO_DEVICE; 395 conf.direction = DMA_MEM_TO_DEV;
396 buffer_dirn = DMA_TO_DEVICE;
394 chan = host->dma_tx_channel; 397 chan = host->dma_tx_channel;
395 } 398 }
396 399
@@ -403,7 +406,7 @@ static int mmci_dma_prep_data(struct mmci_host *host, struct mmc_data *data,
403 return -EINVAL; 406 return -EINVAL;
404 407
405 device = chan->device; 408 device = chan->device;
406 nr_sg = dma_map_sg(device->dev, data->sg, data->sg_len, conf.direction); 409 nr_sg = dma_map_sg(device->dev, data->sg, data->sg_len, buffer_dirn);
407 if (nr_sg == 0) 410 if (nr_sg == 0)
408 return -EINVAL; 411 return -EINVAL;
409 412
@@ -426,7 +429,7 @@ static int mmci_dma_prep_data(struct mmci_host *host, struct mmc_data *data,
426 unmap_exit: 429 unmap_exit:
427 if (!next) 430 if (!next)
428 dmaengine_terminate_all(chan); 431 dmaengine_terminate_all(chan);
429 dma_unmap_sg(device->dev, data->sg, data->sg_len, conf.direction); 432 dma_unmap_sg(device->dev, data->sg, data->sg_len, buffer_dirn);
430 return -ENOMEM; 433 return -ENOMEM;
431} 434}
432 435
diff --git a/drivers/mmc/host/mxcmmc.c b/drivers/mmc/host/mxcmmc.c
index 325ea61e12d3..a17bc1217859 100644
--- a/drivers/mmc/host/mxcmmc.c
+++ b/drivers/mmc/host/mxcmmc.c
@@ -218,6 +218,7 @@ static int mxcmci_setup_data(struct mxcmci_host *host, struct mmc_data *data)
218 unsigned int blksz = data->blksz; 218 unsigned int blksz = data->blksz;
219 unsigned int datasize = nob * blksz; 219 unsigned int datasize = nob * blksz;
220 struct scatterlist *sg; 220 struct scatterlist *sg;
221 enum dma_transfer_direction slave_dirn;
221 int i, nents; 222 int i, nents;
222 223
223 if (data->flags & MMC_DATA_STREAM) 224 if (data->flags & MMC_DATA_STREAM)
@@ -240,10 +241,13 @@ static int mxcmci_setup_data(struct mxcmci_host *host, struct mmc_data *data)
240 } 241 }
241 } 242 }
242 243
243 if (data->flags & MMC_DATA_READ) 244 if (data->flags & MMC_DATA_READ) {
244 host->dma_dir = DMA_FROM_DEVICE; 245 host->dma_dir = DMA_FROM_DEVICE;
245 else 246 slave_dirn = DMA_DEV_TO_MEM;
247 } else {
246 host->dma_dir = DMA_TO_DEVICE; 248 host->dma_dir = DMA_TO_DEVICE;
249 slave_dirn = DMA_MEM_TO_DEV;
250 }
247 251
248 nents = dma_map_sg(host->dma->device->dev, data->sg, 252 nents = dma_map_sg(host->dma->device->dev, data->sg,
249 data->sg_len, host->dma_dir); 253 data->sg_len, host->dma_dir);
@@ -251,7 +255,7 @@ static int mxcmci_setup_data(struct mxcmci_host *host, struct mmc_data *data)
251 return -EINVAL; 255 return -EINVAL;
252 256
253 host->desc = host->dma->device->device_prep_slave_sg(host->dma, 257 host->desc = host->dma->device->device_prep_slave_sg(host->dma,
254 data->sg, data->sg_len, host->dma_dir, 258 data->sg, data->sg_len, slave_dirn,
255 DMA_PREP_INTERRUPT | DMA_CTRL_ACK); 259 DMA_PREP_INTERRUPT | DMA_CTRL_ACK);
256 260
257 if (!host->desc) { 261 if (!host->desc) {
diff --git a/drivers/mmc/host/mxs-mmc.c b/drivers/mmc/host/mxs-mmc.c
index 99b449d26a4d..0a878b181341 100644
--- a/drivers/mmc/host/mxs-mmc.c
+++ b/drivers/mmc/host/mxs-mmc.c
@@ -154,6 +154,7 @@ struct mxs_mmc_host {
154 struct dma_chan *dmach; 154 struct dma_chan *dmach;
155 struct mxs_dma_data dma_data; 155 struct mxs_dma_data dma_data;
156 unsigned int dma_dir; 156 unsigned int dma_dir;
157 enum dma_transfer_direction slave_dirn;
157 u32 ssp_pio_words[SSP_PIO_NUM]; 158 u32 ssp_pio_words[SSP_PIO_NUM];
158 159
159 unsigned int version; 160 unsigned int version;
@@ -324,7 +325,7 @@ static struct dma_async_tx_descriptor *mxs_mmc_prep_dma(
324 } 325 }
325 326
326 desc = host->dmach->device->device_prep_slave_sg(host->dmach, 327 desc = host->dmach->device->device_prep_slave_sg(host->dmach,
327 sgl, sg_len, host->dma_dir, append); 328 sgl, sg_len, host->slave_dirn, append);
328 if (desc) { 329 if (desc) {
329 desc->callback = mxs_mmc_dma_irq_callback; 330 desc->callback = mxs_mmc_dma_irq_callback;
330 desc->callback_param = host; 331 desc->callback_param = host;
@@ -433,6 +434,7 @@ static void mxs_mmc_adtc(struct mxs_mmc_host *host)
433 int i; 434 int i;
434 435
435 unsigned short dma_data_dir, timeout; 436 unsigned short dma_data_dir, timeout;
437 enum dma_transfer_direction slave_dirn;
436 unsigned int data_size = 0, log2_blksz; 438 unsigned int data_size = 0, log2_blksz;
437 unsigned int blocks = data->blocks; 439 unsigned int blocks = data->blocks;
438 440
@@ -448,9 +450,11 @@ static void mxs_mmc_adtc(struct mxs_mmc_host *host)
448 450
449 if (data->flags & MMC_DATA_WRITE) { 451 if (data->flags & MMC_DATA_WRITE) {
450 dma_data_dir = DMA_TO_DEVICE; 452 dma_data_dir = DMA_TO_DEVICE;
453 slave_dirn = DMA_MEM_TO_DEV;
451 read = 0; 454 read = 0;
452 } else { 455 } else {
453 dma_data_dir = DMA_FROM_DEVICE; 456 dma_data_dir = DMA_FROM_DEVICE;
457 slave_dirn = DMA_DEV_TO_MEM;
454 read = BM_SSP_CTRL0_READ; 458 read = BM_SSP_CTRL0_READ;
455 } 459 }
456 460
@@ -518,6 +522,7 @@ static void mxs_mmc_adtc(struct mxs_mmc_host *host)
518 WARN_ON(host->data != NULL); 522 WARN_ON(host->data != NULL);
519 host->data = data; 523 host->data = data;
520 host->dma_dir = dma_data_dir; 524 host->dma_dir = dma_data_dir;
525 host->slave_dirn = slave_dirn;
521 desc = mxs_mmc_prep_dma(host, 1); 526 desc = mxs_mmc_prep_dma(host, 1);
522 if (!desc) 527 if (!desc)
523 goto out; 528 goto out;
diff --git a/drivers/mmc/host/sh_mmcif.c b/drivers/mmc/host/sh_mmcif.c
index 369366c8e205..1c0c10bd2d3b 100644
--- a/drivers/mmc/host/sh_mmcif.c
+++ b/drivers/mmc/host/sh_mmcif.c
@@ -233,7 +233,7 @@ static void sh_mmcif_start_dma_rx(struct sh_mmcif_host *host)
233 if (ret > 0) { 233 if (ret > 0) {
234 host->dma_active = true; 234 host->dma_active = true;
235 desc = chan->device->device_prep_slave_sg(chan, sg, ret, 235 desc = chan->device->device_prep_slave_sg(chan, sg, ret,
236 DMA_FROM_DEVICE, DMA_PREP_INTERRUPT | DMA_CTRL_ACK); 236 DMA_DEV_TO_MEM, DMA_PREP_INTERRUPT | DMA_CTRL_ACK);
237 } 237 }
238 238
239 if (desc) { 239 if (desc) {
@@ -281,7 +281,7 @@ static void sh_mmcif_start_dma_tx(struct sh_mmcif_host *host)
281 if (ret > 0) { 281 if (ret > 0) {
282 host->dma_active = true; 282 host->dma_active = true;
283 desc = chan->device->device_prep_slave_sg(chan, sg, ret, 283 desc = chan->device->device_prep_slave_sg(chan, sg, ret,
284 DMA_TO_DEVICE, DMA_PREP_INTERRUPT | DMA_CTRL_ACK); 284 DMA_MEM_TO_DEV, DMA_PREP_INTERRUPT | DMA_CTRL_ACK);
285 } 285 }
286 286
287 if (desc) { 287 if (desc) {
diff --git a/drivers/mmc/host/tmio_mmc_dma.c b/drivers/mmc/host/tmio_mmc_dma.c
index 86f259cdfcbc..7a6e6cc8f8b8 100644
--- a/drivers/mmc/host/tmio_mmc_dma.c
+++ b/drivers/mmc/host/tmio_mmc_dma.c
@@ -77,7 +77,7 @@ static void tmio_mmc_start_dma_rx(struct tmio_mmc_host *host)
77 ret = dma_map_sg(chan->device->dev, sg, host->sg_len, DMA_FROM_DEVICE); 77 ret = dma_map_sg(chan->device->dev, sg, host->sg_len, DMA_FROM_DEVICE);
78 if (ret > 0) 78 if (ret > 0)
79 desc = chan->device->device_prep_slave_sg(chan, sg, ret, 79 desc = chan->device->device_prep_slave_sg(chan, sg, ret,
80 DMA_FROM_DEVICE, DMA_CTRL_ACK); 80 DMA_DEV_TO_MEM, DMA_CTRL_ACK);
81 81
82 if (desc) { 82 if (desc) {
83 cookie = dmaengine_submit(desc); 83 cookie = dmaengine_submit(desc);
@@ -158,7 +158,7 @@ static void tmio_mmc_start_dma_tx(struct tmio_mmc_host *host)
158 ret = dma_map_sg(chan->device->dev, sg, host->sg_len, DMA_TO_DEVICE); 158 ret = dma_map_sg(chan->device->dev, sg, host->sg_len, DMA_TO_DEVICE);
159 if (ret > 0) 159 if (ret > 0)
160 desc = chan->device->device_prep_slave_sg(chan, sg, ret, 160 desc = chan->device->device_prep_slave_sg(chan, sg, ret,
161 DMA_TO_DEVICE, DMA_CTRL_ACK); 161 DMA_MEM_TO_DEV, DMA_CTRL_ACK);
162 162
163 if (desc) { 163 if (desc) {
164 cookie = dmaengine_submit(desc); 164 cookie = dmaengine_submit(desc);
diff --git a/drivers/net/ethernet/micrel/ks8842.c b/drivers/net/ethernet/micrel/ks8842.c
index 4a6ae057e3b1..de9f2e205962 100644
--- a/drivers/net/ethernet/micrel/ks8842.c
+++ b/drivers/net/ethernet/micrel/ks8842.c
@@ -459,7 +459,7 @@ static int ks8842_tx_frame_dma(struct sk_buff *skb, struct net_device *netdev)
459 sg_dma_len(&ctl->sg) += 4 - sg_dma_len(&ctl->sg) % 4; 459 sg_dma_len(&ctl->sg) += 4 - sg_dma_len(&ctl->sg) % 4;
460 460
461 ctl->adesc = ctl->chan->device->device_prep_slave_sg(ctl->chan, 461 ctl->adesc = ctl->chan->device->device_prep_slave_sg(ctl->chan,
462 &ctl->sg, 1, DMA_TO_DEVICE, 462 &ctl->sg, 1, DMA_MEM_TO_DEV,
463 DMA_PREP_INTERRUPT | DMA_COMPL_SKIP_SRC_UNMAP); 463 DMA_PREP_INTERRUPT | DMA_COMPL_SKIP_SRC_UNMAP);
464 if (!ctl->adesc) 464 if (!ctl->adesc)
465 return NETDEV_TX_BUSY; 465 return NETDEV_TX_BUSY;
@@ -571,7 +571,7 @@ static int __ks8842_start_new_rx_dma(struct net_device *netdev)
571 sg_dma_len(sg) = DMA_BUFFER_SIZE; 571 sg_dma_len(sg) = DMA_BUFFER_SIZE;
572 572
573 ctl->adesc = ctl->chan->device->device_prep_slave_sg(ctl->chan, 573 ctl->adesc = ctl->chan->device->device_prep_slave_sg(ctl->chan,
574 sg, 1, DMA_FROM_DEVICE, 574 sg, 1, DMA_DEV_TO_MEM,
575 DMA_PREP_INTERRUPT | DMA_COMPL_SKIP_SRC_UNMAP); 575 DMA_PREP_INTERRUPT | DMA_COMPL_SKIP_SRC_UNMAP);
576 576
577 if (!ctl->adesc) 577 if (!ctl->adesc)
diff --git a/drivers/spi/spi-dw-mid.c b/drivers/spi/spi-dw-mid.c
index e743a45ee92c..8418eb036651 100644
--- a/drivers/spi/spi-dw-mid.c
+++ b/drivers/spi/spi-dw-mid.c
@@ -131,7 +131,7 @@ static int mid_spi_dma_transfer(struct dw_spi *dws, int cs_change)
131 rxchan = dws->rxchan; 131 rxchan = dws->rxchan;
132 132
133 /* 2. Prepare the TX dma transfer */ 133 /* 2. Prepare the TX dma transfer */
134 txconf.direction = DMA_TO_DEVICE; 134 txconf.direction = DMA_MEM_TO_DEV;
135 txconf.dst_addr = dws->dma_addr; 135 txconf.dst_addr = dws->dma_addr;
136 txconf.dst_maxburst = LNW_DMA_MSIZE_16; 136 txconf.dst_maxburst = LNW_DMA_MSIZE_16;
137 txconf.src_addr_width = DMA_SLAVE_BUSWIDTH_4_BYTES; 137 txconf.src_addr_width = DMA_SLAVE_BUSWIDTH_4_BYTES;
@@ -147,13 +147,13 @@ static int mid_spi_dma_transfer(struct dw_spi *dws, int cs_change)
147 txdesc = txchan->device->device_prep_slave_sg(txchan, 147 txdesc = txchan->device->device_prep_slave_sg(txchan,
148 &dws->tx_sgl, 148 &dws->tx_sgl,
149 1, 149 1,
150 DMA_TO_DEVICE, 150 DMA_MEM_TO_DEV,
151 DMA_PREP_INTERRUPT | DMA_COMPL_SKIP_DEST_UNMAP); 151 DMA_PREP_INTERRUPT | DMA_COMPL_SKIP_DEST_UNMAP);
152 txdesc->callback = dw_spi_dma_done; 152 txdesc->callback = dw_spi_dma_done;
153 txdesc->callback_param = dws; 153 txdesc->callback_param = dws;
154 154
155 /* 3. Prepare the RX dma transfer */ 155 /* 3. Prepare the RX dma transfer */
156 rxconf.direction = DMA_FROM_DEVICE; 156 rxconf.direction = DMA_DEV_TO_MEM;
157 rxconf.src_addr = dws->dma_addr; 157 rxconf.src_addr = dws->dma_addr;
158 rxconf.src_maxburst = LNW_DMA_MSIZE_16; 158 rxconf.src_maxburst = LNW_DMA_MSIZE_16;
159 rxconf.dst_addr_width = DMA_SLAVE_BUSWIDTH_4_BYTES; 159 rxconf.dst_addr_width = DMA_SLAVE_BUSWIDTH_4_BYTES;
@@ -169,7 +169,7 @@ static int mid_spi_dma_transfer(struct dw_spi *dws, int cs_change)
169 rxdesc = rxchan->device->device_prep_slave_sg(rxchan, 169 rxdesc = rxchan->device->device_prep_slave_sg(rxchan,
170 &dws->rx_sgl, 170 &dws->rx_sgl,
171 1, 171 1,
172 DMA_FROM_DEVICE, 172 DMA_DEV_TO_MEM,
173 DMA_PREP_INTERRUPT | DMA_COMPL_SKIP_DEST_UNMAP); 173 DMA_PREP_INTERRUPT | DMA_COMPL_SKIP_DEST_UNMAP);
174 rxdesc->callback = dw_spi_dma_done; 174 rxdesc->callback = dw_spi_dma_done;
175 rxdesc->callback_param = dws; 175 rxdesc->callback_param = dws;
diff --git a/drivers/spi/spi-ep93xx.c b/drivers/spi/spi-ep93xx.c
index 0a282e5fcc9c..d46e55c720b7 100644
--- a/drivers/spi/spi-ep93xx.c
+++ b/drivers/spi/spi-ep93xx.c
@@ -551,6 +551,7 @@ ep93xx_spi_dma_prepare(struct ep93xx_spi *espi, enum dma_data_direction dir)
551 struct dma_async_tx_descriptor *txd; 551 struct dma_async_tx_descriptor *txd;
552 enum dma_slave_buswidth buswidth; 552 enum dma_slave_buswidth buswidth;
553 struct dma_slave_config conf; 553 struct dma_slave_config conf;
554 enum dma_transfer_direction slave_dirn;
554 struct scatterlist *sg; 555 struct scatterlist *sg;
555 struct sg_table *sgt; 556 struct sg_table *sgt;
556 struct dma_chan *chan; 557 struct dma_chan *chan;
@@ -573,6 +574,7 @@ ep93xx_spi_dma_prepare(struct ep93xx_spi *espi, enum dma_data_direction dir)
573 574
574 conf.src_addr = espi->sspdr_phys; 575 conf.src_addr = espi->sspdr_phys;
575 conf.src_addr_width = buswidth; 576 conf.src_addr_width = buswidth;
577 slave_dirn = DMA_DEV_TO_MEM;
576 } else { 578 } else {
577 chan = espi->dma_tx; 579 chan = espi->dma_tx;
578 buf = t->tx_buf; 580 buf = t->tx_buf;
@@ -580,6 +582,7 @@ ep93xx_spi_dma_prepare(struct ep93xx_spi *espi, enum dma_data_direction dir)
580 582
581 conf.dst_addr = espi->sspdr_phys; 583 conf.dst_addr = espi->sspdr_phys;
582 conf.dst_addr_width = buswidth; 584 conf.dst_addr_width = buswidth;
585 slave_dirn = DMA_MEM_TO_DEV;
583 } 586 }
584 587
585 ret = dmaengine_slave_config(chan, &conf); 588 ret = dmaengine_slave_config(chan, &conf);
@@ -631,7 +634,7 @@ ep93xx_spi_dma_prepare(struct ep93xx_spi *espi, enum dma_data_direction dir)
631 return ERR_PTR(-ENOMEM); 634 return ERR_PTR(-ENOMEM);
632 635
633 txd = chan->device->device_prep_slave_sg(chan, sgt->sgl, nents, 636 txd = chan->device->device_prep_slave_sg(chan, sgt->sgl, nents,
634 dir, DMA_CTRL_ACK); 637 slave_dirn, DMA_CTRL_ACK);
635 if (!txd) { 638 if (!txd) {
636 dma_unmap_sg(chan->device->dev, sgt->sgl, sgt->nents, dir); 639 dma_unmap_sg(chan->device->dev, sgt->sgl, sgt->nents, dir);
637 return ERR_PTR(-ENOMEM); 640 return ERR_PTR(-ENOMEM);
@@ -979,7 +982,7 @@ static int ep93xx_spi_setup_dma(struct ep93xx_spi *espi)
979 dma_cap_set(DMA_SLAVE, mask); 982 dma_cap_set(DMA_SLAVE, mask);
980 983
981 espi->dma_rx_data.port = EP93XX_DMA_SSP; 984 espi->dma_rx_data.port = EP93XX_DMA_SSP;
982 espi->dma_rx_data.direction = DMA_FROM_DEVICE; 985 espi->dma_rx_data.direction = DMA_DEV_TO_MEM;
983 espi->dma_rx_data.name = "ep93xx-spi-rx"; 986 espi->dma_rx_data.name = "ep93xx-spi-rx";
984 987
985 espi->dma_rx = dma_request_channel(mask, ep93xx_spi_dma_filter, 988 espi->dma_rx = dma_request_channel(mask, ep93xx_spi_dma_filter,
@@ -990,7 +993,7 @@ static int ep93xx_spi_setup_dma(struct ep93xx_spi *espi)
990 } 993 }
991 994
992 espi->dma_tx_data.port = EP93XX_DMA_SSP; 995 espi->dma_tx_data.port = EP93XX_DMA_SSP;
993 espi->dma_tx_data.direction = DMA_TO_DEVICE; 996 espi->dma_tx_data.direction = DMA_MEM_TO_DEV;
994 espi->dma_tx_data.name = "ep93xx-spi-tx"; 997 espi->dma_tx_data.name = "ep93xx-spi-tx";
995 998
996 espi->dma_tx = dma_request_channel(mask, ep93xx_spi_dma_filter, 999 espi->dma_tx = dma_request_channel(mask, ep93xx_spi_dma_filter,
diff --git a/drivers/spi/spi-pl022.c b/drivers/spi/spi-pl022.c
index f103e470cb63..95400fa99d90 100644
--- a/drivers/spi/spi-pl022.c
+++ b/drivers/spi/spi-pl022.c
@@ -904,11 +904,11 @@ static int configure_dma(struct pl022 *pl022)
904{ 904{
905 struct dma_slave_config rx_conf = { 905 struct dma_slave_config rx_conf = {
906 .src_addr = SSP_DR(pl022->phybase), 906 .src_addr = SSP_DR(pl022->phybase),
907 .direction = DMA_FROM_DEVICE, 907 .direction = DMA_DEV_TO_MEM,
908 }; 908 };
909 struct dma_slave_config tx_conf = { 909 struct dma_slave_config tx_conf = {
910 .dst_addr = SSP_DR(pl022->phybase), 910 .dst_addr = SSP_DR(pl022->phybase),
911 .direction = DMA_TO_DEVICE, 911 .direction = DMA_MEM_TO_DEV,
912 }; 912 };
913 unsigned int pages; 913 unsigned int pages;
914 int ret; 914 int ret;
@@ -1045,7 +1045,7 @@ static int configure_dma(struct pl022 *pl022)
1045 rxdesc = rxchan->device->device_prep_slave_sg(rxchan, 1045 rxdesc = rxchan->device->device_prep_slave_sg(rxchan,
1046 pl022->sgt_rx.sgl, 1046 pl022->sgt_rx.sgl,
1047 rx_sglen, 1047 rx_sglen,
1048 DMA_FROM_DEVICE, 1048 DMA_DEV_TO_MEM,
1049 DMA_PREP_INTERRUPT | DMA_CTRL_ACK); 1049 DMA_PREP_INTERRUPT | DMA_CTRL_ACK);
1050 if (!rxdesc) 1050 if (!rxdesc)
1051 goto err_rxdesc; 1051 goto err_rxdesc;
@@ -1053,7 +1053,7 @@ static int configure_dma(struct pl022 *pl022)
1053 txdesc = txchan->device->device_prep_slave_sg(txchan, 1053 txdesc = txchan->device->device_prep_slave_sg(txchan,
1054 pl022->sgt_tx.sgl, 1054 pl022->sgt_tx.sgl,
1055 tx_sglen, 1055 tx_sglen,
1056 DMA_TO_DEVICE, 1056 DMA_MEM_TO_DEV,
1057 DMA_PREP_INTERRUPT | DMA_CTRL_ACK); 1057 DMA_PREP_INTERRUPT | DMA_CTRL_ACK);
1058 if (!txdesc) 1058 if (!txdesc)
1059 goto err_txdesc; 1059 goto err_txdesc;
diff --git a/drivers/spi/spi-topcliff-pch.c b/drivers/spi/spi-topcliff-pch.c
index 6a80749391db..99ec279bc46d 100644
--- a/drivers/spi/spi-topcliff-pch.c
+++ b/drivers/spi/spi-topcliff-pch.c
@@ -1076,7 +1076,7 @@ static void pch_spi_handle_dma(struct pch_spi_data *data, int *bpw)
1076 } 1076 }
1077 sg = dma->sg_rx_p; 1077 sg = dma->sg_rx_p;
1078 desc_rx = dma->chan_rx->device->device_prep_slave_sg(dma->chan_rx, sg, 1078 desc_rx = dma->chan_rx->device->device_prep_slave_sg(dma->chan_rx, sg,
1079 num, DMA_FROM_DEVICE, 1079 num, DMA_DEV_TO_MEM,
1080 DMA_PREP_INTERRUPT | DMA_CTRL_ACK); 1080 DMA_PREP_INTERRUPT | DMA_CTRL_ACK);
1081 if (!desc_rx) { 1081 if (!desc_rx) {
1082 dev_err(&data->master->dev, "%s:device_prep_slave_sg Failed\n", 1082 dev_err(&data->master->dev, "%s:device_prep_slave_sg Failed\n",
@@ -1121,7 +1121,7 @@ static void pch_spi_handle_dma(struct pch_spi_data *data, int *bpw)
1121 } 1121 }
1122 sg = dma->sg_tx_p; 1122 sg = dma->sg_tx_p;
1123 desc_tx = dma->chan_tx->device->device_prep_slave_sg(dma->chan_tx, 1123 desc_tx = dma->chan_tx->device->device_prep_slave_sg(dma->chan_tx,
1124 sg, num, DMA_TO_DEVICE, 1124 sg, num, DMA_MEM_TO_DEV,
1125 DMA_PREP_INTERRUPT | DMA_CTRL_ACK); 1125 DMA_PREP_INTERRUPT | DMA_CTRL_ACK);
1126 if (!desc_tx) { 1126 if (!desc_tx) {
1127 dev_err(&data->master->dev, "%s:device_prep_slave_sg Failed\n", 1127 dev_err(&data->master->dev, "%s:device_prep_slave_sg Failed\n",
diff --git a/drivers/tty/serial/amba-pl011.c b/drivers/tty/serial/amba-pl011.c
index 00233af1acc4..e4d5a21e1a63 100644
--- a/drivers/tty/serial/amba-pl011.c
+++ b/drivers/tty/serial/amba-pl011.c
@@ -268,7 +268,7 @@ static void pl011_dma_probe_initcall(struct uart_amba_port *uap)
268 struct dma_slave_config tx_conf = { 268 struct dma_slave_config tx_conf = {
269 .dst_addr = uap->port.mapbase + UART01x_DR, 269 .dst_addr = uap->port.mapbase + UART01x_DR,
270 .dst_addr_width = DMA_SLAVE_BUSWIDTH_1_BYTE, 270 .dst_addr_width = DMA_SLAVE_BUSWIDTH_1_BYTE,
271 .direction = DMA_TO_DEVICE, 271 .direction = DMA_MEM_TO_DEV,
272 .dst_maxburst = uap->fifosize >> 1, 272 .dst_maxburst = uap->fifosize >> 1,
273 }; 273 };
274 struct dma_chan *chan; 274 struct dma_chan *chan;
@@ -301,7 +301,7 @@ static void pl011_dma_probe_initcall(struct uart_amba_port *uap)
301 struct dma_slave_config rx_conf = { 301 struct dma_slave_config rx_conf = {
302 .src_addr = uap->port.mapbase + UART01x_DR, 302 .src_addr = uap->port.mapbase + UART01x_DR,
303 .src_addr_width = DMA_SLAVE_BUSWIDTH_1_BYTE, 303 .src_addr_width = DMA_SLAVE_BUSWIDTH_1_BYTE,
304 .direction = DMA_FROM_DEVICE, 304 .direction = DMA_DEV_TO_MEM,
305 .src_maxburst = uap->fifosize >> 1, 305 .src_maxburst = uap->fifosize >> 1,
306 }; 306 };
307 307
@@ -480,7 +480,7 @@ static int pl011_dma_tx_refill(struct uart_amba_port *uap)
480 return -EBUSY; 480 return -EBUSY;
481 } 481 }
482 482
483 desc = dma_dev->device_prep_slave_sg(chan, &dmatx->sg, 1, DMA_TO_DEVICE, 483 desc = dma_dev->device_prep_slave_sg(chan, &dmatx->sg, 1, DMA_MEM_TO_DEV,
484 DMA_PREP_INTERRUPT | DMA_CTRL_ACK); 484 DMA_PREP_INTERRUPT | DMA_CTRL_ACK);
485 if (!desc) { 485 if (!desc) {
486 dma_unmap_sg(dma_dev->dev, &dmatx->sg, 1, DMA_TO_DEVICE); 486 dma_unmap_sg(dma_dev->dev, &dmatx->sg, 1, DMA_TO_DEVICE);
@@ -676,7 +676,7 @@ static int pl011_dma_rx_trigger_dma(struct uart_amba_port *uap)
676 &uap->dmarx.sgbuf_b : &uap->dmarx.sgbuf_a; 676 &uap->dmarx.sgbuf_b : &uap->dmarx.sgbuf_a;
677 dma_dev = rxchan->device; 677 dma_dev = rxchan->device;
678 desc = rxchan->device->device_prep_slave_sg(rxchan, &sgbuf->sg, 1, 678 desc = rxchan->device->device_prep_slave_sg(rxchan, &sgbuf->sg, 1,
679 DMA_FROM_DEVICE, 679 DMA_DEV_TO_MEM,
680 DMA_PREP_INTERRUPT | DMA_CTRL_ACK); 680 DMA_PREP_INTERRUPT | DMA_CTRL_ACK);
681 /* 681 /*
682 * If the DMA engine is busy and cannot prepare a 682 * If the DMA engine is busy and cannot prepare a
diff --git a/drivers/tty/serial/pch_uart.c b/drivers/tty/serial/pch_uart.c
index 21febef926aa..6b8019c5b36d 100644
--- a/drivers/tty/serial/pch_uart.c
+++ b/drivers/tty/serial/pch_uart.c
@@ -749,7 +749,7 @@ static int dma_handle_rx(struct eg20t_port *priv)
749 sg_dma_address(sg) = priv->rx_buf_dma; 749 sg_dma_address(sg) = priv->rx_buf_dma;
750 750
751 desc = priv->chan_rx->device->device_prep_slave_sg(priv->chan_rx, 751 desc = priv->chan_rx->device->device_prep_slave_sg(priv->chan_rx,
752 sg, 1, DMA_FROM_DEVICE, 752 sg, 1, DMA_DEV_TO_MEM,
753 DMA_PREP_INTERRUPT | DMA_CTRL_ACK); 753 DMA_PREP_INTERRUPT | DMA_CTRL_ACK);
754 754
755 if (!desc) 755 if (!desc)
@@ -908,7 +908,7 @@ static unsigned int dma_handle_tx(struct eg20t_port *priv)
908 } 908 }
909 909
910 desc = priv->chan_tx->device->device_prep_slave_sg(priv->chan_tx, 910 desc = priv->chan_tx->device->device_prep_slave_sg(priv->chan_tx,
911 priv->sg_tx_p, nent, DMA_TO_DEVICE, 911 priv->sg_tx_p, nent, DMA_MEM_TO_DEV,
912 DMA_PREP_INTERRUPT | DMA_CTRL_ACK); 912 DMA_PREP_INTERRUPT | DMA_CTRL_ACK);
913 if (!desc) { 913 if (!desc) {
914 dev_err(priv->port.dev, "%s:device_prep_slave_sg Failed\n", 914 dev_err(priv->port.dev, "%s:device_prep_slave_sg Failed\n",
diff --git a/drivers/tty/serial/sh-sci.c b/drivers/tty/serial/sh-sci.c
index 1945c70539c2..9900e8560452 100644
--- a/drivers/tty/serial/sh-sci.c
+++ b/drivers/tty/serial/sh-sci.c
@@ -1214,7 +1214,7 @@ static void sci_submit_rx(struct sci_port *s)
1214 struct dma_async_tx_descriptor *desc; 1214 struct dma_async_tx_descriptor *desc;
1215 1215
1216 desc = chan->device->device_prep_slave_sg(chan, 1216 desc = chan->device->device_prep_slave_sg(chan,
1217 sg, 1, DMA_FROM_DEVICE, DMA_PREP_INTERRUPT); 1217 sg, 1, DMA_DEV_TO_MEM, DMA_PREP_INTERRUPT);
1218 1218
1219 if (desc) { 1219 if (desc) {
1220 s->desc_rx[i] = desc; 1220 s->desc_rx[i] = desc;
@@ -1329,7 +1329,7 @@ static void work_fn_tx(struct work_struct *work)
1329 BUG_ON(!sg_dma_len(sg)); 1329 BUG_ON(!sg_dma_len(sg));
1330 1330
1331 desc = chan->device->device_prep_slave_sg(chan, 1331 desc = chan->device->device_prep_slave_sg(chan,
1332 sg, s->sg_len_tx, DMA_TO_DEVICE, 1332 sg, s->sg_len_tx, DMA_MEM_TO_DEV,
1333 DMA_PREP_INTERRUPT | DMA_CTRL_ACK); 1333 DMA_PREP_INTERRUPT | DMA_CTRL_ACK);
1334 if (!desc) { 1334 if (!desc) {
1335 /* switch to PIO */ 1335 /* switch to PIO */
diff --git a/drivers/usb/musb/ux500_dma.c b/drivers/usb/musb/ux500_dma.c
index ef4333f4bbe0..f8fd0ddee14a 100644
--- a/drivers/usb/musb/ux500_dma.c
+++ b/drivers/usb/musb/ux500_dma.c
@@ -112,7 +112,7 @@ static bool ux500_configure_channel(struct dma_channel *channel,
112 struct musb_hw_ep *hw_ep = ux500_channel->hw_ep; 112 struct musb_hw_ep *hw_ep = ux500_channel->hw_ep;
113 struct dma_chan *dma_chan = ux500_channel->dma_chan; 113 struct dma_chan *dma_chan = ux500_channel->dma_chan;
114 struct dma_async_tx_descriptor *dma_desc; 114 struct dma_async_tx_descriptor *dma_desc;
115 enum dma_data_direction direction; 115 enum dma_transfer_direction direction;
116 struct scatterlist sg; 116 struct scatterlist sg;
117 struct dma_slave_config slave_conf; 117 struct dma_slave_config slave_conf;
118 enum dma_slave_buswidth addr_width; 118 enum dma_slave_buswidth addr_width;
@@ -132,7 +132,7 @@ static bool ux500_configure_channel(struct dma_channel *channel,
132 sg_dma_address(&sg) = dma_addr; 132 sg_dma_address(&sg) = dma_addr;
133 sg_dma_len(&sg) = len; 133 sg_dma_len(&sg) = len;
134 134
135 direction = ux500_channel->is_tx ? DMA_TO_DEVICE : DMA_FROM_DEVICE; 135 direction = ux500_channel->is_tx ? DMA_MEM_TO_DEV : DMA_DEV_TO_MEM;
136 addr_width = (len & 0x3) ? DMA_SLAVE_BUSWIDTH_1_BYTE : 136 addr_width = (len & 0x3) ? DMA_SLAVE_BUSWIDTH_1_BYTE :
137 DMA_SLAVE_BUSWIDTH_4_BYTES; 137 DMA_SLAVE_BUSWIDTH_4_BYTES;
138 138
diff --git a/drivers/usb/renesas_usbhs/fifo.c b/drivers/usb/renesas_usbhs/fifo.c
index 8da685e796d1..e2f22d4b8c3a 100644
--- a/drivers/usb/renesas_usbhs/fifo.c
+++ b/drivers/usb/renesas_usbhs/fifo.c
@@ -765,10 +765,10 @@ static void usbhsf_dma_prepare_tasklet(unsigned long data)
765 struct dma_async_tx_descriptor *desc; 765 struct dma_async_tx_descriptor *desc;
766 struct dma_chan *chan = usbhsf_dma_chan_get(fifo, pkt); 766 struct dma_chan *chan = usbhsf_dma_chan_get(fifo, pkt);
767 struct device *dev = usbhs_priv_to_dev(priv); 767 struct device *dev = usbhs_priv_to_dev(priv);
768 enum dma_data_direction dir; 768 enum dma_transfer_direction dir;
769 dma_cookie_t cookie; 769 dma_cookie_t cookie;
770 770
771 dir = usbhs_pipe_is_dir_in(pipe) ? DMA_FROM_DEVICE : DMA_TO_DEVICE; 771 dir = usbhs_pipe_is_dir_in(pipe) ? DMA_DEV_TO_MEM : DMA_MEM_TO_DEV;
772 772
773 sg_init_table(&sg, 1); 773 sg_init_table(&sg, 1);
774 sg_set_page(&sg, virt_to_page(pkt->dma), 774 sg_set_page(&sg, virt_to_page(pkt->dma),
diff --git a/drivers/video/mx3fb.c b/drivers/video/mx3fb.c
index e3406ab31305..1981e3665d07 100644
--- a/drivers/video/mx3fb.c
+++ b/drivers/video/mx3fb.c
@@ -334,7 +334,7 @@ static void sdc_enable_channel(struct mx3fb_info *mx3_fbi)
334 /* This enables the channel */ 334 /* This enables the channel */
335 if (mx3_fbi->cookie < 0) { 335 if (mx3_fbi->cookie < 0) {
336 mx3_fbi->txd = dma_chan->device->device_prep_slave_sg(dma_chan, 336 mx3_fbi->txd = dma_chan->device->device_prep_slave_sg(dma_chan,
337 &mx3_fbi->sg[0], 1, DMA_TO_DEVICE, DMA_PREP_INTERRUPT); 337 &mx3_fbi->sg[0], 1, DMA_MEM_TO_DEV, DMA_PREP_INTERRUPT);
338 if (!mx3_fbi->txd) { 338 if (!mx3_fbi->txd) {
339 dev_err(mx3fb->dev, "Cannot allocate descriptor on %d\n", 339 dev_err(mx3fb->dev, "Cannot allocate descriptor on %d\n",
340 dma_chan->chan_id); 340 dma_chan->chan_id);
@@ -1117,7 +1117,7 @@ static int mx3fb_pan_display(struct fb_var_screeninfo *var,
1117 async_tx_ack(mx3_fbi->txd); 1117 async_tx_ack(mx3_fbi->txd);
1118 1118
1119 txd = dma_chan->device->device_prep_slave_sg(dma_chan, sg + 1119 txd = dma_chan->device->device_prep_slave_sg(dma_chan, sg +
1120 mx3_fbi->cur_ipu_buf, 1, DMA_TO_DEVICE, DMA_PREP_INTERRUPT); 1120 mx3_fbi->cur_ipu_buf, 1, DMA_MEM_TO_DEV, DMA_PREP_INTERRUPT);
1121 if (!txd) { 1121 if (!txd) {
1122 dev_err(fbi->device, 1122 dev_err(fbi->device,
1123 "Error preparing a DMA transaction descriptor.\n"); 1123 "Error preparing a DMA transaction descriptor.\n");
diff --git a/include/linux/amba/pl08x.h b/include/linux/amba/pl08x.h
index 9eabffbc4e50..033f6aa670de 100644
--- a/include/linux/amba/pl08x.h
+++ b/include/linux/amba/pl08x.h
@@ -134,7 +134,7 @@ struct pl08x_txd {
134 struct dma_async_tx_descriptor tx; 134 struct dma_async_tx_descriptor tx;
135 struct list_head node; 135 struct list_head node;
136 struct list_head dsg_list; 136 struct list_head dsg_list;
137 enum dma_data_direction direction; 137 enum dma_transfer_direction direction;
138 dma_addr_t llis_bus; 138 dma_addr_t llis_bus;
139 struct pl08x_lli *llis_va; 139 struct pl08x_lli *llis_va;
140 /* Default cctl value for LLIs */ 140 /* Default cctl value for LLIs */
@@ -197,7 +197,7 @@ struct pl08x_dma_chan {
197 dma_addr_t dst_addr; 197 dma_addr_t dst_addr;
198 u32 src_cctl; 198 u32 src_cctl;
199 u32 dst_cctl; 199 u32 dst_cctl;
200 enum dma_data_direction runtime_direction; 200 enum dma_transfer_direction runtime_direction;
201 dma_cookie_t lc; 201 dma_cookie_t lc;
202 struct list_head pend_list; 202 struct list_head pend_list;
203 struct pl08x_txd *at; 203 struct pl08x_txd *at;
diff --git a/include/linux/dmaengine.h b/include/linux/dmaengine.h
index 75f53f874b24..a865b3a354cd 100644
--- a/include/linux/dmaengine.h
+++ b/include/linux/dmaengine.h
@@ -23,7 +23,6 @@
23 23
24#include <linux/device.h> 24#include <linux/device.h>
25#include <linux/uio.h> 25#include <linux/uio.h>
26#include <linux/dma-direction.h>
27#include <linux/scatterlist.h> 26#include <linux/scatterlist.h>
28#include <linux/bitmap.h> 27#include <linux/bitmap.h>
29#include <asm/page.h> 28#include <asm/page.h>
@@ -77,6 +76,19 @@ enum dma_transaction_type {
77/* last transaction type for creation of the capabilities mask */ 76/* last transaction type for creation of the capabilities mask */
78#define DMA_TX_TYPE_END (DMA_CYCLIC + 1) 77#define DMA_TX_TYPE_END (DMA_CYCLIC + 1)
79 78
79/**
80 * enum dma_transfer_direction - dma transfer mode and direction indicator
81 * @DMA_MEM_TO_MEM: Async/Memcpy mode
82 * @DMA_MEM_TO_DEV: Slave mode & From Memory to Device
83 * @DMA_DEV_TO_MEM: Slave mode & From Device to Memory
84 * @DMA_DEV_TO_DEV: Slave mode & From Device to Device
85 */
86enum dma_transfer_direction {
87 DMA_MEM_TO_MEM,
88 DMA_MEM_TO_DEV,
89 DMA_DEV_TO_MEM,
90 DMA_DEV_TO_DEV,
91};
80 92
81/** 93/**
82 * enum dma_ctrl_flags - DMA flags to augment operation preparation, 94 * enum dma_ctrl_flags - DMA flags to augment operation preparation,
@@ -269,7 +281,7 @@ enum dma_slave_buswidth {
269 * struct, if applicable. 281 * struct, if applicable.
270 */ 282 */
271struct dma_slave_config { 283struct dma_slave_config {
272 enum dma_data_direction direction; 284 enum dma_transfer_direction direction;
273 dma_addr_t src_addr; 285 dma_addr_t src_addr;
274 dma_addr_t dst_addr; 286 dma_addr_t dst_addr;
275 enum dma_slave_buswidth src_addr_width; 287 enum dma_slave_buswidth src_addr_width;
@@ -492,11 +504,11 @@ struct dma_device {
492 504
493 struct dma_async_tx_descriptor *(*device_prep_slave_sg)( 505 struct dma_async_tx_descriptor *(*device_prep_slave_sg)(
494 struct dma_chan *chan, struct scatterlist *sgl, 506 struct dma_chan *chan, struct scatterlist *sgl,
495 unsigned int sg_len, enum dma_data_direction direction, 507 unsigned int sg_len, enum dma_transfer_direction direction,
496 unsigned long flags); 508 unsigned long flags);
497 struct dma_async_tx_descriptor *(*device_prep_dma_cyclic)( 509 struct dma_async_tx_descriptor *(*device_prep_dma_cyclic)(
498 struct dma_chan *chan, dma_addr_t buf_addr, size_t buf_len, 510 struct dma_chan *chan, dma_addr_t buf_addr, size_t buf_len,
499 size_t period_len, enum dma_data_direction direction); 511 size_t period_len, enum dma_transfer_direction direction);
500 int (*device_control)(struct dma_chan *chan, enum dma_ctrl_cmd cmd, 512 int (*device_control)(struct dma_chan *chan, enum dma_ctrl_cmd cmd,
501 unsigned long arg); 513 unsigned long arg);
502 514
@@ -522,7 +534,7 @@ static inline int dmaengine_slave_config(struct dma_chan *chan,
522 534
523static inline struct dma_async_tx_descriptor *dmaengine_prep_slave_single( 535static inline struct dma_async_tx_descriptor *dmaengine_prep_slave_single(
524 struct dma_chan *chan, void *buf, size_t len, 536 struct dma_chan *chan, void *buf, size_t len,
525 enum dma_data_direction dir, unsigned long flags) 537 enum dma_transfer_direction dir, unsigned long flags)
526{ 538{
527 struct scatterlist sg; 539 struct scatterlist sg;
528 sg_init_one(&sg, buf, len); 540 sg_init_one(&sg, buf, len);
diff --git a/include/linux/dw_dmac.h b/include/linux/dw_dmac.h
index 4bfe0a2f7d50..f2c64f92c4a0 100644
--- a/include/linux/dw_dmac.h
+++ b/include/linux/dw_dmac.h
@@ -127,7 +127,7 @@ struct dw_cyclic_desc {
127 127
128struct dw_cyclic_desc *dw_dma_cyclic_prep(struct dma_chan *chan, 128struct dw_cyclic_desc *dw_dma_cyclic_prep(struct dma_chan *chan,
129 dma_addr_t buf_addr, size_t buf_len, size_t period_len, 129 dma_addr_t buf_addr, size_t buf_len, size_t period_len,
130 enum dma_data_direction direction); 130 enum dma_transfer_direction direction);
131void dw_dma_cyclic_free(struct dma_chan *chan); 131void dw_dma_cyclic_free(struct dma_chan *chan);
132int dw_dma_cyclic_start(struct dma_chan *chan); 132int dw_dma_cyclic_start(struct dma_chan *chan);
133void dw_dma_cyclic_stop(struct dma_chan *chan); 133void dw_dma_cyclic_stop(struct dma_chan *chan);
diff --git a/include/linux/sh_dma.h b/include/linux/sh_dma.h
index cb2dd118cc0f..62ef6938da10 100644
--- a/include/linux/sh_dma.h
+++ b/include/linux/sh_dma.h
@@ -30,7 +30,7 @@ struct sh_desc {
30 struct sh_dmae_regs hw; 30 struct sh_dmae_regs hw;
31 struct list_head node; 31 struct list_head node;
32 struct dma_async_tx_descriptor async_tx; 32 struct dma_async_tx_descriptor async_tx;
33 enum dma_data_direction direction; 33 enum dma_transfer_direction direction;
34 dma_cookie_t cookie; 34 dma_cookie_t cookie;
35 size_t partial; 35 size_t partial;
36 int chunks; 36 int chunks;
diff --git a/sound/atmel/abdac.c b/sound/atmel/abdac.c
index 6fd9391b3a6c..4fa1dbd8ee83 100644
--- a/sound/atmel/abdac.c
+++ b/sound/atmel/abdac.c
@@ -133,7 +133,7 @@ static int atmel_abdac_prepare_dma(struct atmel_abdac *dac,
133 period_len = frames_to_bytes(runtime, runtime->period_size); 133 period_len = frames_to_bytes(runtime, runtime->period_size);
134 134
135 cdesc = dw_dma_cyclic_prep(chan, runtime->dma_addr, buffer_len, 135 cdesc = dw_dma_cyclic_prep(chan, runtime->dma_addr, buffer_len,
136 period_len, DMA_TO_DEVICE); 136 period_len, DMA_MEM_TO_DEV);
137 if (IS_ERR(cdesc)) { 137 if (IS_ERR(cdesc)) {
138 dev_dbg(&dac->pdev->dev, "could not prepare cyclic DMA\n"); 138 dev_dbg(&dac->pdev->dev, "could not prepare cyclic DMA\n");
139 return PTR_ERR(cdesc); 139 return PTR_ERR(cdesc);
diff --git a/sound/atmel/ac97c.c b/sound/atmel/ac97c.c
index 6e5addeb236b..cd9428b24a36 100644
--- a/sound/atmel/ac97c.c
+++ b/sound/atmel/ac97c.c
@@ -102,7 +102,7 @@ static void atmel_ac97c_dma_capture_period_done(void *arg)
102 102
103static int atmel_ac97c_prepare_dma(struct atmel_ac97c *chip, 103static int atmel_ac97c_prepare_dma(struct atmel_ac97c *chip,
104 struct snd_pcm_substream *substream, 104 struct snd_pcm_substream *substream,
105 enum dma_data_direction direction) 105 enum dma_transfer_direction direction)
106{ 106{
107 struct dma_chan *chan; 107 struct dma_chan *chan;
108 struct dw_cyclic_desc *cdesc; 108 struct dw_cyclic_desc *cdesc;
@@ -118,7 +118,7 @@ static int atmel_ac97c_prepare_dma(struct atmel_ac97c *chip,
118 return -EINVAL; 118 return -EINVAL;
119 } 119 }
120 120
121 if (direction == DMA_TO_DEVICE) 121 if (direction == DMA_MEM_TO_DEV)
122 chan = chip->dma.tx_chan; 122 chan = chip->dma.tx_chan;
123 else 123 else
124 chan = chip->dma.rx_chan; 124 chan = chip->dma.rx_chan;
@@ -133,7 +133,7 @@ static int atmel_ac97c_prepare_dma(struct atmel_ac97c *chip,
133 return PTR_ERR(cdesc); 133 return PTR_ERR(cdesc);
134 } 134 }
135 135
136 if (direction == DMA_TO_DEVICE) { 136 if (direction == DMA_MEM_TO_DEV) {
137 cdesc->period_callback = atmel_ac97c_dma_playback_period_done; 137 cdesc->period_callback = atmel_ac97c_dma_playback_period_done;
138 set_bit(DMA_TX_READY, &chip->flags); 138 set_bit(DMA_TX_READY, &chip->flags);
139 } else { 139 } else {
@@ -393,7 +393,7 @@ static int atmel_ac97c_playback_prepare(struct snd_pcm_substream *substream)
393 if (cpu_is_at32ap7000()) { 393 if (cpu_is_at32ap7000()) {
394 if (!test_bit(DMA_TX_READY, &chip->flags)) 394 if (!test_bit(DMA_TX_READY, &chip->flags))
395 retval = atmel_ac97c_prepare_dma(chip, substream, 395 retval = atmel_ac97c_prepare_dma(chip, substream,
396 DMA_TO_DEVICE); 396 DMA_MEM_TO_DEV);
397 } else { 397 } else {
398 /* Initialize and start the PDC */ 398 /* Initialize and start the PDC */
399 writel(runtime->dma_addr, chip->regs + ATMEL_PDC_TPR); 399 writel(runtime->dma_addr, chip->regs + ATMEL_PDC_TPR);
@@ -484,7 +484,7 @@ static int atmel_ac97c_capture_prepare(struct snd_pcm_substream *substream)
484 if (cpu_is_at32ap7000()) { 484 if (cpu_is_at32ap7000()) {
485 if (!test_bit(DMA_RX_READY, &chip->flags)) 485 if (!test_bit(DMA_RX_READY, &chip->flags))
486 retval = atmel_ac97c_prepare_dma(chip, substream, 486 retval = atmel_ac97c_prepare_dma(chip, substream,
487 DMA_FROM_DEVICE); 487 DMA_DEV_TO_MEM);
488 } else { 488 } else {
489 /* Initialize and start the PDC */ 489 /* Initialize and start the PDC */
490 writel(runtime->dma_addr, chip->regs + ATMEL_PDC_RPR); 490 writel(runtime->dma_addr, chip->regs + ATMEL_PDC_RPR);
diff --git a/sound/soc/ep93xx/ep93xx-pcm.c b/sound/soc/ep93xx/ep93xx-pcm.c
index d00230a591b1..23de7927810c 100644
--- a/sound/soc/ep93xx/ep93xx-pcm.c
+++ b/sound/soc/ep93xx/ep93xx-pcm.c
@@ -113,9 +113,9 @@ static int ep93xx_pcm_open(struct snd_pcm_substream *substream)
113 rtd->dma_data.name = dma_params->name; 113 rtd->dma_data.name = dma_params->name;
114 114
115 if (substream->stream == SNDRV_PCM_STREAM_PLAYBACK) 115 if (substream->stream == SNDRV_PCM_STREAM_PLAYBACK)
116 rtd->dma_data.direction = DMA_TO_DEVICE; 116 rtd->dma_data.direction = DMA_MEM_TO_DEV;
117 else 117 else
118 rtd->dma_data.direction = DMA_FROM_DEVICE; 118 rtd->dma_data.direction = DMA_DEV_TO_MEM;
119 119
120 rtd->dma_chan = dma_request_channel(mask, ep93xx_pcm_dma_filter, 120 rtd->dma_chan = dma_request_channel(mask, ep93xx_pcm_dma_filter,
121 &rtd->dma_data); 121 &rtd->dma_data);
diff --git a/sound/soc/imx/imx-pcm-dma-mx2.c b/sound/soc/imx/imx-pcm-dma-mx2.c
index 43fdc24f7e8d..7d28de9758f3 100644
--- a/sound/soc/imx/imx-pcm-dma-mx2.c
+++ b/sound/soc/imx/imx-pcm-dma-mx2.c
@@ -107,12 +107,12 @@ static int imx_ssi_dma_alloc(struct snd_pcm_substream *substream,
107 } 107 }
108 108
109 if (substream->stream == SNDRV_PCM_STREAM_PLAYBACK) { 109 if (substream->stream == SNDRV_PCM_STREAM_PLAYBACK) {
110 slave_config.direction = DMA_TO_DEVICE; 110 slave_config.direction = DMA_MEM_TO_DEV;
111 slave_config.dst_addr = dma_params->dma_addr; 111 slave_config.dst_addr = dma_params->dma_addr;
112 slave_config.dst_addr_width = buswidth; 112 slave_config.dst_addr_width = buswidth;
113 slave_config.dst_maxburst = dma_params->burstsize; 113 slave_config.dst_maxburst = dma_params->burstsize;
114 } else { 114 } else {
115 slave_config.direction = DMA_FROM_DEVICE; 115 slave_config.direction = DMA_DEV_TO_MEM;
116 slave_config.src_addr = dma_params->dma_addr; 116 slave_config.src_addr = dma_params->dma_addr;
117 slave_config.src_addr_width = buswidth; 117 slave_config.src_addr_width = buswidth;
118 slave_config.src_maxburst = dma_params->burstsize; 118 slave_config.src_maxburst = dma_params->burstsize;
@@ -159,7 +159,7 @@ static int snd_imx_pcm_hw_params(struct snd_pcm_substream *substream,
159 iprtd->period_bytes * iprtd->periods, 159 iprtd->period_bytes * iprtd->periods,
160 iprtd->period_bytes, 160 iprtd->period_bytes,
161 substream->stream == SNDRV_PCM_STREAM_PLAYBACK ? 161 substream->stream == SNDRV_PCM_STREAM_PLAYBACK ?
162 DMA_TO_DEVICE : DMA_FROM_DEVICE); 162 DMA_MEM_TO_DEV : DMA_DEV_TO_MEM);
163 if (!iprtd->desc) { 163 if (!iprtd->desc) {
164 dev_err(&chan->dev->device, "cannot prepare slave dma\n"); 164 dev_err(&chan->dev->device, "cannot prepare slave dma\n");
165 return -EINVAL; 165 return -EINVAL;
diff --git a/sound/soc/samsung/dma.c b/sound/soc/samsung/dma.c
index a68b26441784..d400ed0a71f2 100644
--- a/sound/soc/samsung/dma.c
+++ b/sound/soc/samsung/dma.c
@@ -86,7 +86,7 @@ static void dma_enqueue(struct snd_pcm_substream *substream)
86 dma_info.cap = (samsung_dma_has_circular() ? DMA_CYCLIC : DMA_SLAVE); 86 dma_info.cap = (samsung_dma_has_circular() ? DMA_CYCLIC : DMA_SLAVE);
87 dma_info.direction = 87 dma_info.direction =
88 (substream->stream == SNDRV_PCM_STREAM_PLAYBACK 88 (substream->stream == SNDRV_PCM_STREAM_PLAYBACK
89 ? DMA_TO_DEVICE : DMA_FROM_DEVICE); 89 ? DMA_MEM_TO_DEV : DMA_DEV_TO_MEM);
90 dma_info.fp = audio_buffdone; 90 dma_info.fp = audio_buffdone;
91 dma_info.fp_param = substream; 91 dma_info.fp_param = substream;
92 dma_info.period = prtd->dma_period; 92 dma_info.period = prtd->dma_period;
@@ -171,7 +171,7 @@ static int dma_hw_params(struct snd_pcm_substream *substream,
171 dma_info.client = prtd->params->client; 171 dma_info.client = prtd->params->client;
172 dma_info.direction = 172 dma_info.direction =
173 (substream->stream == SNDRV_PCM_STREAM_PLAYBACK 173 (substream->stream == SNDRV_PCM_STREAM_PLAYBACK
174 ? DMA_TO_DEVICE : DMA_FROM_DEVICE); 174 ? DMA_MEM_TO_DEV : DMA_DEV_TO_MEM);
175 dma_info.width = prtd->params->dma_size; 175 dma_info.width = prtd->params->dma_size;
176 dma_info.fifo = prtd->params->dma_addr; 176 dma_info.fifo = prtd->params->dma_addr;
177 prtd->params->ch = prtd->params->ops->request( 177 prtd->params->ch = prtd->params->ops->request(
diff --git a/sound/soc/sh/siu_pcm.c b/sound/soc/sh/siu_pcm.c
index f8f681690a71..0193e595d415 100644
--- a/sound/soc/sh/siu_pcm.c
+++ b/sound/soc/sh/siu_pcm.c
@@ -131,7 +131,7 @@ static int siu_pcm_wr_set(struct siu_port *port_info,
131 sg_dma_address(&sg) = buff; 131 sg_dma_address(&sg) = buff;
132 132
133 desc = siu_stream->chan->device->device_prep_slave_sg(siu_stream->chan, 133 desc = siu_stream->chan->device->device_prep_slave_sg(siu_stream->chan,
134 &sg, 1, DMA_TO_DEVICE, DMA_PREP_INTERRUPT | DMA_CTRL_ACK); 134 &sg, 1, DMA_MEM_TO_DEV, DMA_PREP_INTERRUPT | DMA_CTRL_ACK);
135 if (!desc) { 135 if (!desc) {
136 dev_err(dev, "Failed to allocate a dma descriptor\n"); 136 dev_err(dev, "Failed to allocate a dma descriptor\n");
137 return -ENOMEM; 137 return -ENOMEM;
@@ -181,7 +181,7 @@ static int siu_pcm_rd_set(struct siu_port *port_info,
181 sg_dma_address(&sg) = buff; 181 sg_dma_address(&sg) = buff;
182 182
183 desc = siu_stream->chan->device->device_prep_slave_sg(siu_stream->chan, 183 desc = siu_stream->chan->device->device_prep_slave_sg(siu_stream->chan,
184 &sg, 1, DMA_FROM_DEVICE, DMA_PREP_INTERRUPT | DMA_CTRL_ACK); 184 &sg, 1, DMA_DEV_TO_MEM, DMA_PREP_INTERRUPT | DMA_CTRL_ACK);
185 if (!desc) { 185 if (!desc) {
186 dev_err(dev, "Failed to allocate dma descriptor\n"); 186 dev_err(dev, "Failed to allocate dma descriptor\n");
187 return -ENOMEM; 187 return -ENOMEM;
diff --git a/sound/soc/txx9/txx9aclc.c b/sound/soc/txx9/txx9aclc.c
index 3de99af8cb82..602bb68b8784 100644
--- a/sound/soc/txx9/txx9aclc.c
+++ b/sound/soc/txx9/txx9aclc.c
@@ -134,7 +134,7 @@ txx9aclc_dma_submit(struct txx9aclc_dmadata *dmadata, dma_addr_t buf_dma_addr)
134 sg_dma_address(&sg) = buf_dma_addr; 134 sg_dma_address(&sg) = buf_dma_addr;
135 desc = chan->device->device_prep_slave_sg(chan, &sg, 1, 135 desc = chan->device->device_prep_slave_sg(chan, &sg, 1,
136 dmadata->substream->stream == SNDRV_PCM_STREAM_PLAYBACK ? 136 dmadata->substream->stream == SNDRV_PCM_STREAM_PLAYBACK ?
137 DMA_TO_DEVICE : DMA_FROM_DEVICE, 137 DMA_MEM_TO_DEV : DMA_DEV_TO_MEM,
138 DMA_PREP_INTERRUPT | DMA_CTRL_ACK); 138 DMA_PREP_INTERRUPT | DMA_CTRL_ACK);
139 if (!desc) { 139 if (!desc) {
140 dev_err(&chan->dev->device, "cannot prepare slave dma\n"); 140 dev_err(&chan->dev->device, "cannot prepare slave dma\n");