aboutsummaryrefslogtreecommitdiffstats
path: root/drivers/dma/mpc512x_dma.c
diff options
context:
space:
mode:
Diffstat (limited to 'drivers/dma/mpc512x_dma.c')
-rw-r--r--drivers/dma/mpc512x_dma.c25
1 files changed, 8 insertions, 17 deletions
diff --git a/drivers/dma/mpc512x_dma.c b/drivers/dma/mpc512x_dma.c
index 4d6d4cf66949..2ab0a3d0eed5 100644
--- a/drivers/dma/mpc512x_dma.c
+++ b/drivers/dma/mpc512x_dma.c
@@ -44,6 +44,8 @@
44 44
45#include <linux/random.h> 45#include <linux/random.h>
46 46
47#include "dmaengine.h"
48
47/* Number of DMA Transfer descriptors allocated per channel */ 49/* Number of DMA Transfer descriptors allocated per channel */
48#define MPC_DMA_DESCRIPTORS 64 50#define MPC_DMA_DESCRIPTORS 64
49 51
@@ -188,7 +190,6 @@ struct mpc_dma_chan {
188 struct list_head completed; 190 struct list_head completed;
189 struct mpc_dma_tcd *tcd; 191 struct mpc_dma_tcd *tcd;
190 dma_addr_t tcd_paddr; 192 dma_addr_t tcd_paddr;
191 dma_cookie_t completed_cookie;
192 193
193 /* Lock for this structure */ 194 /* Lock for this structure */
194 spinlock_t lock; 195 spinlock_t lock;
@@ -365,7 +366,7 @@ static void mpc_dma_process_completed(struct mpc_dma *mdma)
365 /* Free descriptors */ 366 /* Free descriptors */
366 spin_lock_irqsave(&mchan->lock, flags); 367 spin_lock_irqsave(&mchan->lock, flags);
367 list_splice_tail_init(&list, &mchan->free); 368 list_splice_tail_init(&list, &mchan->free);
368 mchan->completed_cookie = last_cookie; 369 mchan->chan.completed_cookie = last_cookie;
369 spin_unlock_irqrestore(&mchan->lock, flags); 370 spin_unlock_irqrestore(&mchan->lock, flags);
370 } 371 }
371} 372}
@@ -438,13 +439,7 @@ static dma_cookie_t mpc_dma_tx_submit(struct dma_async_tx_descriptor *txd)
438 mpc_dma_execute(mchan); 439 mpc_dma_execute(mchan);
439 440
440 /* Update cookie */ 441 /* Update cookie */
441 cookie = mchan->chan.cookie + 1; 442 cookie = dma_cookie_assign(txd);
442 if (cookie <= 0)
443 cookie = 1;
444
445 mchan->chan.cookie = cookie;
446 mdesc->desc.cookie = cookie;
447
448 spin_unlock_irqrestore(&mchan->lock, flags); 443 spin_unlock_irqrestore(&mchan->lock, flags);
449 444
450 return cookie; 445 return cookie;
@@ -562,17 +557,14 @@ mpc_dma_tx_status(struct dma_chan *chan, dma_cookie_t cookie,
562 struct dma_tx_state *txstate) 557 struct dma_tx_state *txstate)
563{ 558{
564 struct mpc_dma_chan *mchan = dma_chan_to_mpc_dma_chan(chan); 559 struct mpc_dma_chan *mchan = dma_chan_to_mpc_dma_chan(chan);
560 enum dma_status ret;
565 unsigned long flags; 561 unsigned long flags;
566 dma_cookie_t last_used;
567 dma_cookie_t last_complete;
568 562
569 spin_lock_irqsave(&mchan->lock, flags); 563 spin_lock_irqsave(&mchan->lock, flags);
570 last_used = mchan->chan.cookie; 564 ret = dma_cookie_status(chan, cookie, txstate);
571 last_complete = mchan->completed_cookie;
572 spin_unlock_irqrestore(&mchan->lock, flags); 565 spin_unlock_irqrestore(&mchan->lock, flags);
573 566
574 dma_set_tx_state(txstate, last_complete, last_used, 0); 567 return ret;
575 return dma_async_is_complete(cookie, last_complete, last_used);
576} 568}
577 569
578/* Prepare descriptor for memory to memory copy */ 570/* Prepare descriptor for memory to memory copy */
@@ -741,8 +733,7 @@ static int __devinit mpc_dma_probe(struct platform_device *op)
741 mchan = &mdma->channels[i]; 733 mchan = &mdma->channels[i];
742 734
743 mchan->chan.device = dma; 735 mchan->chan.device = dma;
744 mchan->chan.cookie = 1; 736 dma_cookie_init(&mchan->chan);
745 mchan->completed_cookie = mchan->chan.cookie;
746 737
747 INIT_LIST_HEAD(&mchan->free); 738 INIT_LIST_HEAD(&mchan->free);
748 INIT_LIST_HEAD(&mchan->prepared); 739 INIT_LIST_HEAD(&mchan->prepared);