aboutsummaryrefslogtreecommitdiffstats
path: root/drivers/dma/imx-sdma.c
diff options
context:
space:
mode:
Diffstat (limited to 'drivers/dma/imx-sdma.c')
-rw-r--r--drivers/dma/imx-sdma.c27
1 files changed, 18 insertions, 9 deletions
diff --git a/drivers/dma/imx-sdma.c b/drivers/dma/imx-sdma.c
index f993955a640c..a8af379680c1 100644
--- a/drivers/dma/imx-sdma.c
+++ b/drivers/dma/imx-sdma.c
@@ -247,7 +247,7 @@ struct sdma_engine;
247struct sdma_channel { 247struct sdma_channel {
248 struct sdma_engine *sdma; 248 struct sdma_engine *sdma;
249 unsigned int channel; 249 unsigned int channel;
250 enum dma_data_direction direction; 250 enum dma_transfer_direction direction;
251 enum sdma_peripheral_type peripheral_type; 251 enum sdma_peripheral_type peripheral_type;
252 unsigned int event_id0; 252 unsigned int event_id0;
253 unsigned int event_id1; 253 unsigned int event_id1;
@@ -268,6 +268,8 @@ struct sdma_channel {
268 struct dma_async_tx_descriptor desc; 268 struct dma_async_tx_descriptor desc;
269 dma_cookie_t last_completed; 269 dma_cookie_t last_completed;
270 enum dma_status status; 270 enum dma_status status;
271 unsigned int chn_count;
272 unsigned int chn_real_count;
271}; 273};
272 274
273#define IMX_DMA_SG_LOOP (1 << 0) 275#define IMX_DMA_SG_LOOP (1 << 0)
@@ -503,6 +505,7 @@ static void mxc_sdma_handle_channel_normal(struct sdma_channel *sdmac)
503 struct sdma_buffer_descriptor *bd; 505 struct sdma_buffer_descriptor *bd;
504 int i, error = 0; 506 int i, error = 0;
505 507
508 sdmac->chn_real_count = 0;
506 /* 509 /*
507 * non loop mode. Iterate over all descriptors, collect 510 * non loop mode. Iterate over all descriptors, collect
508 * errors and call callback function 511 * errors and call callback function
@@ -512,6 +515,7 @@ static void mxc_sdma_handle_channel_normal(struct sdma_channel *sdmac)
512 515
513 if (bd->mode.status & (BD_DONE | BD_RROR)) 516 if (bd->mode.status & (BD_DONE | BD_RROR))
514 error = -EIO; 517 error = -EIO;
518 sdmac->chn_real_count += bd->mode.count;
515 } 519 }
516 520
517 if (error) 521 if (error)
@@ -519,9 +523,9 @@ static void mxc_sdma_handle_channel_normal(struct sdma_channel *sdmac)
519 else 523 else
520 sdmac->status = DMA_SUCCESS; 524 sdmac->status = DMA_SUCCESS;
521 525
526 sdmac->last_completed = sdmac->desc.cookie;
522 if (sdmac->desc.callback) 527 if (sdmac->desc.callback)
523 sdmac->desc.callback(sdmac->desc.callback_param); 528 sdmac->desc.callback(sdmac->desc.callback_param);
524 sdmac->last_completed = sdmac->desc.cookie;
525} 529}
526 530
527static void mxc_sdma_handle_channel(struct sdma_channel *sdmac) 531static void mxc_sdma_handle_channel(struct sdma_channel *sdmac)
@@ -650,7 +654,7 @@ static int sdma_load_context(struct sdma_channel *sdmac)
650 struct sdma_buffer_descriptor *bd0 = sdma->channel[0].bd; 654 struct sdma_buffer_descriptor *bd0 = sdma->channel[0].bd;
651 int ret; 655 int ret;
652 656
653 if (sdmac->direction == DMA_FROM_DEVICE) { 657 if (sdmac->direction == DMA_DEV_TO_MEM) {
654 load_address = sdmac->pc_from_device; 658 load_address = sdmac->pc_from_device;
655 } else { 659 } else {
656 load_address = sdmac->pc_to_device; 660 load_address = sdmac->pc_to_device;
@@ -832,17 +836,18 @@ static struct sdma_channel *to_sdma_chan(struct dma_chan *chan)
832 836
833static dma_cookie_t sdma_tx_submit(struct dma_async_tx_descriptor *tx) 837static dma_cookie_t sdma_tx_submit(struct dma_async_tx_descriptor *tx)
834{ 838{
839 unsigned long flags;
835 struct sdma_channel *sdmac = to_sdma_chan(tx->chan); 840 struct sdma_channel *sdmac = to_sdma_chan(tx->chan);
836 struct sdma_engine *sdma = sdmac->sdma; 841 struct sdma_engine *sdma = sdmac->sdma;
837 dma_cookie_t cookie; 842 dma_cookie_t cookie;
838 843
839 spin_lock_irq(&sdmac->lock); 844 spin_lock_irqsave(&sdmac->lock, flags);
840 845
841 cookie = sdma_assign_cookie(sdmac); 846 cookie = sdma_assign_cookie(sdmac);
842 847
843 sdma_enable_channel(sdma, sdmac->channel); 848 sdma_enable_channel(sdma, sdmac->channel);
844 849
845 spin_unlock_irq(&sdmac->lock); 850 spin_unlock_irqrestore(&sdmac->lock, flags);
846 851
847 return cookie; 852 return cookie;
848} 853}
@@ -911,7 +916,7 @@ static void sdma_free_chan_resources(struct dma_chan *chan)
911 916
912static struct dma_async_tx_descriptor *sdma_prep_slave_sg( 917static struct dma_async_tx_descriptor *sdma_prep_slave_sg(
913 struct dma_chan *chan, struct scatterlist *sgl, 918 struct dma_chan *chan, struct scatterlist *sgl,
914 unsigned int sg_len, enum dma_data_direction direction, 919 unsigned int sg_len, enum dma_transfer_direction direction,
915 unsigned long flags) 920 unsigned long flags)
916{ 921{
917 struct sdma_channel *sdmac = to_sdma_chan(chan); 922 struct sdma_channel *sdmac = to_sdma_chan(chan);
@@ -941,6 +946,7 @@ static struct dma_async_tx_descriptor *sdma_prep_slave_sg(
941 goto err_out; 946 goto err_out;
942 } 947 }
943 948
949 sdmac->chn_count = 0;
944 for_each_sg(sgl, sg, sg_len, i) { 950 for_each_sg(sgl, sg, sg_len, i) {
945 struct sdma_buffer_descriptor *bd = &sdmac->bd[i]; 951 struct sdma_buffer_descriptor *bd = &sdmac->bd[i];
946 int param; 952 int param;
@@ -957,6 +963,7 @@ static struct dma_async_tx_descriptor *sdma_prep_slave_sg(
957 } 963 }
958 964
959 bd->mode.count = count; 965 bd->mode.count = count;
966 sdmac->chn_count += count;
960 967
961 if (sdmac->word_size > DMA_SLAVE_BUSWIDTH_4_BYTES) { 968 if (sdmac->word_size > DMA_SLAVE_BUSWIDTH_4_BYTES) {
962 ret = -EINVAL; 969 ret = -EINVAL;
@@ -1008,7 +1015,7 @@ err_out:
1008 1015
1009static struct dma_async_tx_descriptor *sdma_prep_dma_cyclic( 1016static struct dma_async_tx_descriptor *sdma_prep_dma_cyclic(
1010 struct dma_chan *chan, dma_addr_t dma_addr, size_t buf_len, 1017 struct dma_chan *chan, dma_addr_t dma_addr, size_t buf_len,
1011 size_t period_len, enum dma_data_direction direction) 1018 size_t period_len, enum dma_transfer_direction direction)
1012{ 1019{
1013 struct sdma_channel *sdmac = to_sdma_chan(chan); 1020 struct sdma_channel *sdmac = to_sdma_chan(chan);
1014 struct sdma_engine *sdma = sdmac->sdma; 1021 struct sdma_engine *sdma = sdmac->sdma;
@@ -1093,7 +1100,7 @@ static int sdma_control(struct dma_chan *chan, enum dma_ctrl_cmd cmd,
1093 sdma_disable_channel(sdmac); 1100 sdma_disable_channel(sdmac);
1094 return 0; 1101 return 0;
1095 case DMA_SLAVE_CONFIG: 1102 case DMA_SLAVE_CONFIG:
1096 if (dmaengine_cfg->direction == DMA_FROM_DEVICE) { 1103 if (dmaengine_cfg->direction == DMA_DEV_TO_MEM) {
1097 sdmac->per_address = dmaengine_cfg->src_addr; 1104 sdmac->per_address = dmaengine_cfg->src_addr;
1098 sdmac->watermark_level = dmaengine_cfg->src_maxburst; 1105 sdmac->watermark_level = dmaengine_cfg->src_maxburst;
1099 sdmac->word_size = dmaengine_cfg->src_addr_width; 1106 sdmac->word_size = dmaengine_cfg->src_addr_width;
@@ -1102,6 +1109,7 @@ static int sdma_control(struct dma_chan *chan, enum dma_ctrl_cmd cmd,
1102 sdmac->watermark_level = dmaengine_cfg->dst_maxburst; 1109 sdmac->watermark_level = dmaengine_cfg->dst_maxburst;
1103 sdmac->word_size = dmaengine_cfg->dst_addr_width; 1110 sdmac->word_size = dmaengine_cfg->dst_addr_width;
1104 } 1111 }
1112 sdmac->direction = dmaengine_cfg->direction;
1105 return sdma_config_channel(sdmac); 1113 return sdma_config_channel(sdmac);
1106 default: 1114 default:
1107 return -ENOSYS; 1115 return -ENOSYS;
@@ -1119,7 +1127,8 @@ static enum dma_status sdma_tx_status(struct dma_chan *chan,
1119 1127
1120 last_used = chan->cookie; 1128 last_used = chan->cookie;
1121 1129
1122 dma_set_tx_state(txstate, sdmac->last_completed, last_used, 0); 1130 dma_set_tx_state(txstate, sdmac->last_completed, last_used,
1131 sdmac->chn_count - sdmac->chn_real_count);
1123 1132
1124 return sdmac->status; 1133 return sdmac->status;
1125} 1134}