aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorLars-Peter Clausen <lars@metafoo.de>2013-07-23 04:24:50 -0400
committerVinod Koul <vinod.koul@intel.com>2013-07-28 09:38:33 -0400
commitfc51446021f42aca8906e701fc2292965aafcb15 (patch)
treea225fd3f5671140ffeb2ae6b771c2756e8fcba4a
parent27abb2ffb07a70bdebf9a785658f68f10600281c (diff)
dma: pl330: Fix cyclic transfers
Allocate a descriptor for each period of a cyclic transfer, not just the first. Also since the callback needs to be called for each finished period make sure to initialize the callback and callback_param fields of each descriptor in a cyclic transfer. Cc: stable@vger.kernel.org Signed-off-by: Lars-Peter Clausen <lars@metafoo.de> Signed-off-by: Vinod Koul <vinod.koul@intel.com>
-rw-r--r--drivers/dma/pl330.c93
1 files changed, 67 insertions, 26 deletions
diff --git a/drivers/dma/pl330.c b/drivers/dma/pl330.c
index 593827b3fdd4..fa645d825009 100644
--- a/drivers/dma/pl330.c
+++ b/drivers/dma/pl330.c
@@ -2505,6 +2505,10 @@ static dma_cookie_t pl330_tx_submit(struct dma_async_tx_descriptor *tx)
2505 /* Assign cookies to all nodes */ 2505 /* Assign cookies to all nodes */
2506 while (!list_empty(&last->node)) { 2506 while (!list_empty(&last->node)) {
2507 desc = list_entry(last->node.next, struct dma_pl330_desc, node); 2507 desc = list_entry(last->node.next, struct dma_pl330_desc, node);
2508 if (pch->cyclic) {
2509 desc->txd.callback = last->txd.callback;
2510 desc->txd.callback_param = last->txd.callback_param;
2511 }
2508 2512
2509 dma_cookie_assign(&desc->txd); 2513 dma_cookie_assign(&desc->txd);
2510 2514
@@ -2688,45 +2692,82 @@ static struct dma_async_tx_descriptor *pl330_prep_dma_cyclic(
2688 size_t period_len, enum dma_transfer_direction direction, 2692 size_t period_len, enum dma_transfer_direction direction,
2689 unsigned long flags, void *context) 2693 unsigned long flags, void *context)
2690{ 2694{
2691 struct dma_pl330_desc *desc; 2695 struct dma_pl330_desc *desc = NULL, *first = NULL;
2692 struct dma_pl330_chan *pch = to_pchan(chan); 2696 struct dma_pl330_chan *pch = to_pchan(chan);
2697 struct dma_pl330_dmac *pdmac = pch->dmac;
2698 unsigned int i;
2693 dma_addr_t dst; 2699 dma_addr_t dst;
2694 dma_addr_t src; 2700 dma_addr_t src;
2695 2701
2696 desc = pl330_get_desc(pch); 2702 if (len % period_len != 0)
2697 if (!desc) {
2698 dev_err(pch->dmac->pif.dev, "%s:%d Unable to fetch desc\n",
2699 __func__, __LINE__);
2700 return NULL; 2703 return NULL;
2701 }
2702 2704
2703 switch (direction) { 2705 if (!is_slave_direction(direction)) {
2704 case DMA_MEM_TO_DEV:
2705 desc->rqcfg.src_inc = 1;
2706 desc->rqcfg.dst_inc = 0;
2707 desc->req.rqtype = MEMTODEV;
2708 src = dma_addr;
2709 dst = pch->fifo_addr;
2710 break;
2711 case DMA_DEV_TO_MEM:
2712 desc->rqcfg.src_inc = 0;
2713 desc->rqcfg.dst_inc = 1;
2714 desc->req.rqtype = DEVTOMEM;
2715 src = pch->fifo_addr;
2716 dst = dma_addr;
2717 break;
2718 default:
2719 dev_err(pch->dmac->pif.dev, "%s:%d Invalid dma direction\n", 2706 dev_err(pch->dmac->pif.dev, "%s:%d Invalid dma direction\n",
2720 __func__, __LINE__); 2707 __func__, __LINE__);
2721 return NULL; 2708 return NULL;
2722 } 2709 }
2723 2710
2724 desc->rqcfg.brst_size = pch->burst_sz; 2711 for (i = 0; i < len / period_len; i++) {
2725 desc->rqcfg.brst_len = 1; 2712 desc = pl330_get_desc(pch);
2713 if (!desc) {
2714 dev_err(pch->dmac->pif.dev, "%s:%d Unable to fetch desc\n",
2715 __func__, __LINE__);
2726 2716
2727 pch->cyclic = true; 2717 if (!first)
2718 return NULL;
2719
2720 spin_lock_irqsave(&pdmac->pool_lock, flags);
2721
2722 while (!list_empty(&first->node)) {
2723 desc = list_entry(first->node.next,
2724 struct dma_pl330_desc, node);
2725 list_move_tail(&desc->node, &pdmac->desc_pool);
2726 }
2727
2728 list_move_tail(&first->node, &pdmac->desc_pool);
2728 2729
2729 fill_px(&desc->px, dst, src, period_len); 2730 spin_unlock_irqrestore(&pdmac->pool_lock, flags);
2731
2732 return NULL;
2733 }
2734
2735 switch (direction) {
2736 case DMA_MEM_TO_DEV:
2737 desc->rqcfg.src_inc = 1;
2738 desc->rqcfg.dst_inc = 0;
2739 desc->req.rqtype = MEMTODEV;
2740 src = dma_addr;
2741 dst = pch->fifo_addr;
2742 break;
2743 case DMA_DEV_TO_MEM:
2744 desc->rqcfg.src_inc = 0;
2745 desc->rqcfg.dst_inc = 1;
2746 desc->req.rqtype = DEVTOMEM;
2747 src = pch->fifo_addr;
2748 dst = dma_addr;
2749 break;
2750 default:
2751 break;
2752 }
2753
2754 desc->rqcfg.brst_size = pch->burst_sz;
2755 desc->rqcfg.brst_len = 1;
2756 fill_px(&desc->px, dst, src, period_len);
2757
2758 if (!first)
2759 first = desc;
2760 else
2761 list_add_tail(&desc->node, &first->node);
2762
2763 dma_addr += period_len;
2764 }
2765
2766 if (!desc)
2767 return NULL;
2768
2769 pch->cyclic = true;
2770 desc->txd.flags = flags;
2730 2771
2731 return &desc->txd; 2772 return &desc->txd;
2732} 2773}