aboutsummaryrefslogtreecommitdiffstats
path: root/drivers/dma/intel_mid_dma.c
diff options
context:
space:
mode:
authorVinod Koul <vinod.koul@linux.intel.com>2011-10-13 13:04:23 -0400
committerVinod Koul <vinod.koul@linux.intel.com>2011-10-27 11:23:43 -0400
commitdb8196df4bb6f117caa163aa73b0f16fd62290bd (patch)
treeb86531031482037d9b31ad57479f2f7091020957 /drivers/dma/intel_mid_dma.c
parent49920bc66984a512f4bcc7735a61642cd0e4d6f2 (diff)
dmaengine: move drivers to dma_transfer_direction
fixup usage of dma direction by introducing dma_transfer_direction, this patch moves dma/drivers/* to use new enum Cc: Jassi Brar <jaswinder.singh@linaro.org> Cc: Russell King <rmk+kernel@arm.linux.org.uk> Cc: Viresh Kumar <viresh.kumar@st.com> Cc: Linus Walleij <linus.walleij@linaro.org> Cc: Nicolas Ferre <nicolas.ferre@atmel.com> Cc: Mika Westerberg <mika.westerberg@iki.fi> Cc: H Hartley Sweeten <hartleys@visionengravers.com> Cc: Li Yang <leoli@freescale.com> Cc: Zhang Wei <zw@zh-kernel.org> Cc: Sascha Hauer <s.hauer@pengutronix.de> Cc: Guennadi Liakhovetski <g.liakhovetski@gmx.de> Cc: Shawn Guo <shawn.guo@freescale.com> Cc: Yong Wang <yong.y.wang@intel.com> Cc: Tomoya MORINAGA <tomoya-linux@dsn.lapis-semi.com> Cc: Boojin Kim <boojin.kim@samsung.com> Cc: Barry Song <Baohua.Song@csr.com> Acked-by: Mika Westerberg <mika.westerberg@iki.fi> Acked-by: Linus Walleij <linus.walleij@linaro.org> Acked-by: Viresh Kumar <viresh.kumar@st.com> Acked-by: Nicolas Ferre <nicolas.ferre@atmel.com> Signed-off-by: Vinod Koul <vinod.koul@linux.intel.com>
Diffstat (limited to 'drivers/dma/intel_mid_dma.c')
-rw-r--r--drivers/dma/intel_mid_dma.c14
1 files changed, 7 insertions, 7 deletions
diff --git a/drivers/dma/intel_mid_dma.c b/drivers/dma/intel_mid_dma.c
index 9e96c43a846a..6deda25fd0a8 100644
--- a/drivers/dma/intel_mid_dma.c
+++ b/drivers/dma/intel_mid_dma.c
@@ -394,10 +394,10 @@ static int midc_lli_fill_sg(struct intel_mid_dma_chan *midc,
394 midc->dma->block_size); 394 midc->dma->block_size);
395 /*Populate SAR and DAR values*/ 395 /*Populate SAR and DAR values*/
396 sg_phy_addr = sg_phys(sg); 396 sg_phy_addr = sg_phys(sg);
397 if (desc->dirn == DMA_TO_DEVICE) { 397 if (desc->dirn == DMA_MEM_TO_DEV) {
398 lli_bloc_desc->sar = sg_phy_addr; 398 lli_bloc_desc->sar = sg_phy_addr;
399 lli_bloc_desc->dar = mids->dma_slave.dst_addr; 399 lli_bloc_desc->dar = mids->dma_slave.dst_addr;
400 } else if (desc->dirn == DMA_FROM_DEVICE) { 400 } else if (desc->dirn == DMA_DEV_TO_MEM) {
401 lli_bloc_desc->sar = mids->dma_slave.src_addr; 401 lli_bloc_desc->sar = mids->dma_slave.src_addr;
402 lli_bloc_desc->dar = sg_phy_addr; 402 lli_bloc_desc->dar = sg_phy_addr;
403 } 403 }
@@ -631,13 +631,13 @@ static struct dma_async_tx_descriptor *intel_mid_dma_prep_memcpy(
631 if (midc->dma->pimr_mask) { 631 if (midc->dma->pimr_mask) {
632 cfg_hi.cfgx.protctl = 0x0; /*default value*/ 632 cfg_hi.cfgx.protctl = 0x0; /*default value*/
633 cfg_hi.cfgx.fifo_mode = 1; 633 cfg_hi.cfgx.fifo_mode = 1;
634 if (mids->dma_slave.direction == DMA_TO_DEVICE) { 634 if (mids->dma_slave.direction == DMA_MEM_TO_DEV) {
635 cfg_hi.cfgx.src_per = 0; 635 cfg_hi.cfgx.src_per = 0;
636 if (mids->device_instance == 0) 636 if (mids->device_instance == 0)
637 cfg_hi.cfgx.dst_per = 3; 637 cfg_hi.cfgx.dst_per = 3;
638 if (mids->device_instance == 1) 638 if (mids->device_instance == 1)
639 cfg_hi.cfgx.dst_per = 1; 639 cfg_hi.cfgx.dst_per = 1;
640 } else if (mids->dma_slave.direction == DMA_FROM_DEVICE) { 640 } else if (mids->dma_slave.direction == DMA_DEV_TO_MEM) {
641 if (mids->device_instance == 0) 641 if (mids->device_instance == 0)
642 cfg_hi.cfgx.src_per = 2; 642 cfg_hi.cfgx.src_per = 2;
643 if (mids->device_instance == 1) 643 if (mids->device_instance == 1)
@@ -681,11 +681,11 @@ static struct dma_async_tx_descriptor *intel_mid_dma_prep_memcpy(
681 ctl_lo.ctlx.sinc = 0; 681 ctl_lo.ctlx.sinc = 0;
682 ctl_lo.ctlx.dinc = 0; 682 ctl_lo.ctlx.dinc = 0;
683 } else { 683 } else {
684 if (mids->dma_slave.direction == DMA_TO_DEVICE) { 684 if (mids->dma_slave.direction == DMA_MEM_TO_DEV) {
685 ctl_lo.ctlx.sinc = 0; 685 ctl_lo.ctlx.sinc = 0;
686 ctl_lo.ctlx.dinc = 2; 686 ctl_lo.ctlx.dinc = 2;
687 ctl_lo.ctlx.tt_fc = 1; 687 ctl_lo.ctlx.tt_fc = 1;
688 } else if (mids->dma_slave.direction == DMA_FROM_DEVICE) { 688 } else if (mids->dma_slave.direction == DMA_DEV_TO_MEM) {
689 ctl_lo.ctlx.sinc = 2; 689 ctl_lo.ctlx.sinc = 2;
690 ctl_lo.ctlx.dinc = 0; 690 ctl_lo.ctlx.dinc = 0;
691 ctl_lo.ctlx.tt_fc = 2; 691 ctl_lo.ctlx.tt_fc = 2;
@@ -731,7 +731,7 @@ err_desc_get:
731 */ 731 */
732static struct dma_async_tx_descriptor *intel_mid_dma_prep_slave_sg( 732static struct dma_async_tx_descriptor *intel_mid_dma_prep_slave_sg(
733 struct dma_chan *chan, struct scatterlist *sgl, 733 struct dma_chan *chan, struct scatterlist *sgl,
734 unsigned int sg_len, enum dma_data_direction direction, 734 unsigned int sg_len, enum dma_transfer_direction direction,
735 unsigned long flags) 735 unsigned long flags)
736{ 736{
737 struct intel_mid_dma_chan *midc = NULL; 737 struct intel_mid_dma_chan *midc = NULL;