aboutsummaryrefslogtreecommitdiffstats
path: root/drivers/dma/dw_dmac.c
diff options
context:
space:
mode:
Diffstat (limited to 'drivers/dma/dw_dmac.c')
-rw-r--r--drivers/dma/dw_dmac.c103
1 files changed, 66 insertions, 37 deletions
diff --git a/drivers/dma/dw_dmac.c b/drivers/dma/dw_dmac.c
index a3991ab0d67e..9c25c7d099e4 100644
--- a/drivers/dma/dw_dmac.c
+++ b/drivers/dma/dw_dmac.c
@@ -32,26 +32,30 @@
32 * which does not support descriptor writeback. 32 * which does not support descriptor writeback.
33 */ 33 */
34 34
35/* NOTE: DMS+SMS is system-specific. We should get this information 35#define DWC_DEFAULT_CTLLO(private) ({ \
36 * from the platform code somehow. 36 struct dw_dma_slave *__slave = (private); \
37 */ 37 int dms = __slave ? __slave->dst_master : 0; \
38#define DWC_DEFAULT_CTLLO (DWC_CTLL_DST_MSIZE(0) \ 38 int sms = __slave ? __slave->src_master : 1; \
39 | DWC_CTLL_SRC_MSIZE(0) \ 39 u8 smsize = __slave ? __slave->src_msize : DW_DMA_MSIZE_16; \
40 | DWC_CTLL_DMS(0) \ 40 u8 dmsize = __slave ? __slave->dst_msize : DW_DMA_MSIZE_16; \
41 | DWC_CTLL_SMS(1) \ 41 \
42 | DWC_CTLL_LLP_D_EN \ 42 (DWC_CTLL_DST_MSIZE(dmsize) \
43 | DWC_CTLL_LLP_S_EN) 43 | DWC_CTLL_SRC_MSIZE(smsize) \
44 | DWC_CTLL_LLP_D_EN \
45 | DWC_CTLL_LLP_S_EN \
46 | DWC_CTLL_DMS(dms) \
47 | DWC_CTLL_SMS(sms)); \
48 })
44 49
45/* 50/*
46 * This is configuration-dependent and usually a funny size like 4095. 51 * This is configuration-dependent and usually a funny size like 4095.
47 * Let's round it down to the nearest power of two.
48 * 52 *
49 * Note that this is a transfer count, i.e. if we transfer 32-bit 53 * Note that this is a transfer count, i.e. if we transfer 32-bit
50 * words, we can do 8192 bytes per descriptor. 54 * words, we can do 16380 bytes per descriptor.
51 * 55 *
52 * This parameter is also system-specific. 56 * This parameter is also system-specific.
53 */ 57 */
54#define DWC_MAX_COUNT 2048U 58#define DWC_MAX_COUNT 4095U
55 59
56/* 60/*
57 * Number of descriptors to allocate for each channel. This should be 61 * Number of descriptors to allocate for each channel. This should be
@@ -84,11 +88,6 @@ static struct dw_desc *dwc_first_active(struct dw_dma_chan *dwc)
84 return list_entry(dwc->active_list.next, struct dw_desc, desc_node); 88 return list_entry(dwc->active_list.next, struct dw_desc, desc_node);
85} 89}
86 90
87static struct dw_desc *dwc_first_queued(struct dw_dma_chan *dwc)
88{
89 return list_entry(dwc->queue.next, struct dw_desc, desc_node);
90}
91
92static struct dw_desc *dwc_desc_get(struct dw_dma_chan *dwc) 91static struct dw_desc *dwc_desc_get(struct dw_dma_chan *dwc)
93{ 92{
94 struct dw_desc *desc, *_desc; 93 struct dw_desc *desc, *_desc;
@@ -201,6 +200,7 @@ dwc_descriptor_complete(struct dw_dma_chan *dwc, struct dw_desc *desc)
201 dma_async_tx_callback callback; 200 dma_async_tx_callback callback;
202 void *param; 201 void *param;
203 struct dma_async_tx_descriptor *txd = &desc->txd; 202 struct dma_async_tx_descriptor *txd = &desc->txd;
203 struct dw_desc *child;
204 204
205 dev_vdbg(chan2dev(&dwc->chan), "descriptor %u complete\n", txd->cookie); 205 dev_vdbg(chan2dev(&dwc->chan), "descriptor %u complete\n", txd->cookie);
206 206
@@ -209,6 +209,12 @@ dwc_descriptor_complete(struct dw_dma_chan *dwc, struct dw_desc *desc)
209 param = txd->callback_param; 209 param = txd->callback_param;
210 210
211 dwc_sync_desc_for_cpu(dwc, desc); 211 dwc_sync_desc_for_cpu(dwc, desc);
212
213 /* async_tx_ack */
214 list_for_each_entry(child, &desc->tx_list, desc_node)
215 async_tx_ack(&child->txd);
216 async_tx_ack(&desc->txd);
217
212 list_splice_init(&desc->tx_list, &dwc->free_list); 218 list_splice_init(&desc->tx_list, &dwc->free_list);
213 list_move(&desc->desc_node, &dwc->free_list); 219 list_move(&desc->desc_node, &dwc->free_list);
214 220
@@ -259,10 +265,11 @@ static void dwc_complete_all(struct dw_dma *dw, struct dw_dma_chan *dwc)
259 * Submit queued descriptors ASAP, i.e. before we go through 265 * Submit queued descriptors ASAP, i.e. before we go through
260 * the completed ones. 266 * the completed ones.
261 */ 267 */
262 if (!list_empty(&dwc->queue))
263 dwc_dostart(dwc, dwc_first_queued(dwc));
264 list_splice_init(&dwc->active_list, &list); 268 list_splice_init(&dwc->active_list, &list);
265 list_splice_init(&dwc->queue, &dwc->active_list); 269 if (!list_empty(&dwc->queue)) {
270 list_move(dwc->queue.next, &dwc->active_list);
271 dwc_dostart(dwc, dwc_first_active(dwc));
272 }
266 273
267 list_for_each_entry_safe(desc, _desc, &list, desc_node) 274 list_for_each_entry_safe(desc, _desc, &list, desc_node)
268 dwc_descriptor_complete(dwc, desc); 275 dwc_descriptor_complete(dwc, desc);
@@ -291,6 +298,9 @@ static void dwc_scan_descriptors(struct dw_dma *dw, struct dw_dma_chan *dwc)
291 return; 298 return;
292 } 299 }
293 300
301 if (list_empty(&dwc->active_list))
302 return;
303
294 dev_vdbg(chan2dev(&dwc->chan), "scan_descriptors: llp=0x%x\n", llp); 304 dev_vdbg(chan2dev(&dwc->chan), "scan_descriptors: llp=0x%x\n", llp);
295 305
296 list_for_each_entry_safe(desc, _desc, &dwc->active_list, desc_node) { 306 list_for_each_entry_safe(desc, _desc, &dwc->active_list, desc_node) {
@@ -319,8 +329,8 @@ static void dwc_scan_descriptors(struct dw_dma *dw, struct dw_dma_chan *dwc)
319 cpu_relax(); 329 cpu_relax();
320 330
321 if (!list_empty(&dwc->queue)) { 331 if (!list_empty(&dwc->queue)) {
322 dwc_dostart(dwc, dwc_first_queued(dwc)); 332 list_move(dwc->queue.next, &dwc->active_list);
323 list_splice_init(&dwc->queue, &dwc->active_list); 333 dwc_dostart(dwc, dwc_first_active(dwc));
324 } 334 }
325} 335}
326 336
@@ -346,7 +356,7 @@ static void dwc_handle_error(struct dw_dma *dw, struct dw_dma_chan *dwc)
346 */ 356 */
347 bad_desc = dwc_first_active(dwc); 357 bad_desc = dwc_first_active(dwc);
348 list_del_init(&bad_desc->desc_node); 358 list_del_init(&bad_desc->desc_node);
349 list_splice_init(&dwc->queue, dwc->active_list.prev); 359 list_move(dwc->queue.next, dwc->active_list.prev);
350 360
351 /* Clear the error flag and try to restart the controller */ 361 /* Clear the error flag and try to restart the controller */
352 dma_writel(dw, CLEAR.ERROR, dwc->mask); 362 dma_writel(dw, CLEAR.ERROR, dwc->mask);
@@ -541,8 +551,8 @@ static dma_cookie_t dwc_tx_submit(struct dma_async_tx_descriptor *tx)
541 if (list_empty(&dwc->active_list)) { 551 if (list_empty(&dwc->active_list)) {
542 dev_vdbg(chan2dev(tx->chan), "tx_submit: started %u\n", 552 dev_vdbg(chan2dev(tx->chan), "tx_submit: started %u\n",
543 desc->txd.cookie); 553 desc->txd.cookie);
544 dwc_dostart(dwc, desc);
545 list_add_tail(&desc->desc_node, &dwc->active_list); 554 list_add_tail(&desc->desc_node, &dwc->active_list);
555 dwc_dostart(dwc, dwc_first_active(dwc));
546 } else { 556 } else {
547 dev_vdbg(chan2dev(tx->chan), "tx_submit: queued %u\n", 557 dev_vdbg(chan2dev(tx->chan), "tx_submit: queued %u\n",
548 desc->txd.cookie); 558 desc->txd.cookie);
@@ -581,14 +591,16 @@ dwc_prep_dma_memcpy(struct dma_chan *chan, dma_addr_t dest, dma_addr_t src,
581 * We can be a lot more clever here, but this should take care 591 * We can be a lot more clever here, but this should take care
582 * of the most common optimization. 592 * of the most common optimization.
583 */ 593 */
584 if (!((src | dest | len) & 3)) 594 if (!((src | dest | len) & 7))
595 src_width = dst_width = 3;
596 else if (!((src | dest | len) & 3))
585 src_width = dst_width = 2; 597 src_width = dst_width = 2;
586 else if (!((src | dest | len) & 1)) 598 else if (!((src | dest | len) & 1))
587 src_width = dst_width = 1; 599 src_width = dst_width = 1;
588 else 600 else
589 src_width = dst_width = 0; 601 src_width = dst_width = 0;
590 602
591 ctllo = DWC_DEFAULT_CTLLO 603 ctllo = DWC_DEFAULT_CTLLO(chan->private)
592 | DWC_CTLL_DST_WIDTH(dst_width) 604 | DWC_CTLL_DST_WIDTH(dst_width)
593 | DWC_CTLL_SRC_WIDTH(src_width) 605 | DWC_CTLL_SRC_WIDTH(src_width)
594 | DWC_CTLL_DST_INC 606 | DWC_CTLL_DST_INC
@@ -669,11 +681,11 @@ dwc_prep_slave_sg(struct dma_chan *chan, struct scatterlist *sgl,
669 681
670 switch (direction) { 682 switch (direction) {
671 case DMA_TO_DEVICE: 683 case DMA_TO_DEVICE:
672 ctllo = (DWC_DEFAULT_CTLLO 684 ctllo = (DWC_DEFAULT_CTLLO(chan->private)
673 | DWC_CTLL_DST_WIDTH(reg_width) 685 | DWC_CTLL_DST_WIDTH(reg_width)
674 | DWC_CTLL_DST_FIX 686 | DWC_CTLL_DST_FIX
675 | DWC_CTLL_SRC_INC 687 | DWC_CTLL_SRC_INC
676 | DWC_CTLL_FC_M2P); 688 | DWC_CTLL_FC(dws->fc));
677 reg = dws->tx_reg; 689 reg = dws->tx_reg;
678 for_each_sg(sgl, sg, sg_len, i) { 690 for_each_sg(sgl, sg, sg_len, i) {
679 struct dw_desc *desc; 691 struct dw_desc *desc;
@@ -714,11 +726,11 @@ dwc_prep_slave_sg(struct dma_chan *chan, struct scatterlist *sgl,
714 } 726 }
715 break; 727 break;
716 case DMA_FROM_DEVICE: 728 case DMA_FROM_DEVICE:
717 ctllo = (DWC_DEFAULT_CTLLO 729 ctllo = (DWC_DEFAULT_CTLLO(chan->private)
718 | DWC_CTLL_SRC_WIDTH(reg_width) 730 | DWC_CTLL_SRC_WIDTH(reg_width)
719 | DWC_CTLL_DST_INC 731 | DWC_CTLL_DST_INC
720 | DWC_CTLL_SRC_FIX 732 | DWC_CTLL_SRC_FIX
721 | DWC_CTLL_FC_P2M); 733 | DWC_CTLL_FC(dws->fc));
722 734
723 reg = dws->rx_reg; 735 reg = dws->rx_reg;
724 for_each_sg(sgl, sg, sg_len, i) { 736 for_each_sg(sgl, sg, sg_len, i) {
@@ -834,7 +846,9 @@ dwc_tx_status(struct dma_chan *chan,
834 846
835 ret = dma_async_is_complete(cookie, last_complete, last_used); 847 ret = dma_async_is_complete(cookie, last_complete, last_used);
836 if (ret != DMA_SUCCESS) { 848 if (ret != DMA_SUCCESS) {
849 spin_lock_bh(&dwc->lock);
837 dwc_scan_descriptors(to_dw_dma(chan->device), dwc); 850 dwc_scan_descriptors(to_dw_dma(chan->device), dwc);
851 spin_unlock_bh(&dwc->lock);
838 852
839 last_complete = dwc->completed; 853 last_complete = dwc->completed;
840 last_used = chan->cookie; 854 last_used = chan->cookie;
@@ -889,8 +903,11 @@ static int dwc_alloc_chan_resources(struct dma_chan *chan)
889 BUG_ON(!dws->dma_dev || dws->dma_dev != dw->dma.dev); 903 BUG_ON(!dws->dma_dev || dws->dma_dev != dw->dma.dev);
890 904
891 cfghi = dws->cfg_hi; 905 cfghi = dws->cfg_hi;
892 cfglo = dws->cfg_lo; 906 cfglo = dws->cfg_lo & ~DWC_CFGL_CH_PRIOR_MASK;
893 } 907 }
908
909 cfglo |= DWC_CFGL_CH_PRIOR(dwc->priority);
910
894 channel_writel(dwc, CFG_LO, cfglo); 911 channel_writel(dwc, CFG_LO, cfglo);
895 channel_writel(dwc, CFG_HI, cfghi); 912 channel_writel(dwc, CFG_HI, cfghi);
896 913
@@ -1126,23 +1143,23 @@ struct dw_cyclic_desc *dw_dma_cyclic_prep(struct dma_chan *chan,
1126 case DMA_TO_DEVICE: 1143 case DMA_TO_DEVICE:
1127 desc->lli.dar = dws->tx_reg; 1144 desc->lli.dar = dws->tx_reg;
1128 desc->lli.sar = buf_addr + (period_len * i); 1145 desc->lli.sar = buf_addr + (period_len * i);
1129 desc->lli.ctllo = (DWC_DEFAULT_CTLLO 1146 desc->lli.ctllo = (DWC_DEFAULT_CTLLO(chan->private)
1130 | DWC_CTLL_DST_WIDTH(reg_width) 1147 | DWC_CTLL_DST_WIDTH(reg_width)
1131 | DWC_CTLL_SRC_WIDTH(reg_width) 1148 | DWC_CTLL_SRC_WIDTH(reg_width)
1132 | DWC_CTLL_DST_FIX 1149 | DWC_CTLL_DST_FIX
1133 | DWC_CTLL_SRC_INC 1150 | DWC_CTLL_SRC_INC
1134 | DWC_CTLL_FC_M2P 1151 | DWC_CTLL_FC(dws->fc)
1135 | DWC_CTLL_INT_EN); 1152 | DWC_CTLL_INT_EN);
1136 break; 1153 break;
1137 case DMA_FROM_DEVICE: 1154 case DMA_FROM_DEVICE:
1138 desc->lli.dar = buf_addr + (period_len * i); 1155 desc->lli.dar = buf_addr + (period_len * i);
1139 desc->lli.sar = dws->rx_reg; 1156 desc->lli.sar = dws->rx_reg;
1140 desc->lli.ctllo = (DWC_DEFAULT_CTLLO 1157 desc->lli.ctllo = (DWC_DEFAULT_CTLLO(chan->private)
1141 | DWC_CTLL_SRC_WIDTH(reg_width) 1158 | DWC_CTLL_SRC_WIDTH(reg_width)
1142 | DWC_CTLL_DST_WIDTH(reg_width) 1159 | DWC_CTLL_DST_WIDTH(reg_width)
1143 | DWC_CTLL_DST_INC 1160 | DWC_CTLL_DST_INC
1144 | DWC_CTLL_SRC_FIX 1161 | DWC_CTLL_SRC_FIX
1145 | DWC_CTLL_FC_P2M 1162 | DWC_CTLL_FC(dws->fc)
1146 | DWC_CTLL_INT_EN); 1163 | DWC_CTLL_INT_EN);
1147 break; 1164 break;
1148 default: 1165 default:
@@ -1307,7 +1324,17 @@ static int __init dw_probe(struct platform_device *pdev)
1307 dwc->chan.device = &dw->dma; 1324 dwc->chan.device = &dw->dma;
1308 dwc->chan.cookie = dwc->completed = 1; 1325 dwc->chan.cookie = dwc->completed = 1;
1309 dwc->chan.chan_id = i; 1326 dwc->chan.chan_id = i;
1310 list_add_tail(&dwc->chan.device_node, &dw->dma.channels); 1327 if (pdata->chan_allocation_order == CHAN_ALLOCATION_ASCENDING)
1328 list_add_tail(&dwc->chan.device_node,
1329 &dw->dma.channels);
1330 else
1331 list_add(&dwc->chan.device_node, &dw->dma.channels);
1332
1333 /* 7 is highest priority & 0 is lowest. */
1334 if (pdata->chan_priority == CHAN_PRIORITY_ASCENDING)
1335 dwc->priority = 7 - i;
1336 else
1337 dwc->priority = i;
1311 1338
1312 dwc->ch_regs = &__dw_regs(dw)->CHAN[i]; 1339 dwc->ch_regs = &__dw_regs(dw)->CHAN[i];
1313 spin_lock_init(&dwc->lock); 1340 spin_lock_init(&dwc->lock);
@@ -1335,6 +1362,8 @@ static int __init dw_probe(struct platform_device *pdev)
1335 1362
1336 dma_cap_set(DMA_MEMCPY, dw->dma.cap_mask); 1363 dma_cap_set(DMA_MEMCPY, dw->dma.cap_mask);
1337 dma_cap_set(DMA_SLAVE, dw->dma.cap_mask); 1364 dma_cap_set(DMA_SLAVE, dw->dma.cap_mask);
1365 if (pdata->is_private)
1366 dma_cap_set(DMA_PRIVATE, dw->dma.cap_mask);
1338 dw->dma.dev = &pdev->dev; 1367 dw->dma.dev = &pdev->dev;
1339 dw->dma.device_alloc_chan_resources = dwc_alloc_chan_resources; 1368 dw->dma.device_alloc_chan_resources = dwc_alloc_chan_resources;
1340 dw->dma.device_free_chan_resources = dwc_free_chan_resources; 1369 dw->dma.device_free_chan_resources = dwc_free_chan_resources;
@@ -1447,7 +1476,7 @@ static int __init dw_init(void)
1447{ 1476{
1448 return platform_driver_probe(&dw_driver, dw_probe); 1477 return platform_driver_probe(&dw_driver, dw_probe);
1449} 1478}
1450module_init(dw_init); 1479subsys_initcall(dw_init);
1451 1480
1452static void __exit dw_exit(void) 1481static void __exit dw_exit(void)
1453{ 1482{