aboutsummaryrefslogtreecommitdiffstats
path: root/drivers/dma/sirf-dma.c
diff options
context:
space:
mode:
Diffstat (limited to 'drivers/dma/sirf-dma.c')
-rw-r--r--drivers/dma/sirf-dma.c27
1 files changed, 9 insertions, 18 deletions
diff --git a/drivers/dma/sirf-dma.c b/drivers/dma/sirf-dma.c
index 2333810d1688..434ad31174f2 100644
--- a/drivers/dma/sirf-dma.c
+++ b/drivers/dma/sirf-dma.c
@@ -18,6 +18,8 @@
18#include <linux/of_platform.h> 18#include <linux/of_platform.h>
19#include <linux/sirfsoc_dma.h> 19#include <linux/sirfsoc_dma.h>
20 20
21#include "dmaengine.h"
22
21#define SIRFSOC_DMA_DESCRIPTORS 16 23#define SIRFSOC_DMA_DESCRIPTORS 16
22#define SIRFSOC_DMA_CHANNELS 16 24#define SIRFSOC_DMA_CHANNELS 16
23 25
@@ -59,7 +61,6 @@ struct sirfsoc_dma_chan {
59 struct list_head queued; 61 struct list_head queued;
60 struct list_head active; 62 struct list_head active;
61 struct list_head completed; 63 struct list_head completed;
62 dma_cookie_t completed_cookie;
63 unsigned long happened_cyclic; 64 unsigned long happened_cyclic;
64 unsigned long completed_cyclic; 65 unsigned long completed_cyclic;
65 66
@@ -208,7 +209,7 @@ static void sirfsoc_dma_process_completed(struct sirfsoc_dma *sdma)
208 /* Free descriptors */ 209 /* Free descriptors */
209 spin_lock_irqsave(&schan->lock, flags); 210 spin_lock_irqsave(&schan->lock, flags);
210 list_splice_tail_init(&list, &schan->free); 211 list_splice_tail_init(&list, &schan->free);
211 schan->completed_cookie = last_cookie; 212 schan->chan.completed_cookie = last_cookie;
212 spin_unlock_irqrestore(&schan->lock, flags); 213 spin_unlock_irqrestore(&schan->lock, flags);
213 } else { 214 } else {
214 /* for cyclic channel, desc is always in active list */ 215 /* for cyclic channel, desc is always in active list */
@@ -258,13 +259,7 @@ static dma_cookie_t sirfsoc_dma_tx_submit(struct dma_async_tx_descriptor *txd)
258 /* Move descriptor to queue */ 259 /* Move descriptor to queue */
259 list_move_tail(&sdesc->node, &schan->queued); 260 list_move_tail(&sdesc->node, &schan->queued);
260 261
261 /* Update cookie */ 262 cookie = dma_cookie_assign(txd);
262 cookie = schan->chan.cookie + 1;
263 if (cookie <= 0)
264 cookie = 1;
265
266 schan->chan.cookie = cookie;
267 sdesc->desc.cookie = cookie;
268 263
269 spin_unlock_irqrestore(&schan->lock, flags); 264 spin_unlock_irqrestore(&schan->lock, flags);
270 265
@@ -414,16 +409,13 @@ sirfsoc_dma_tx_status(struct dma_chan *chan, dma_cookie_t cookie,
414{ 409{
415 struct sirfsoc_dma_chan *schan = dma_chan_to_sirfsoc_dma_chan(chan); 410 struct sirfsoc_dma_chan *schan = dma_chan_to_sirfsoc_dma_chan(chan);
416 unsigned long flags; 411 unsigned long flags;
417 dma_cookie_t last_used; 412 enum dma_status ret;
418 dma_cookie_t last_complete;
419 413
420 spin_lock_irqsave(&schan->lock, flags); 414 spin_lock_irqsave(&schan->lock, flags);
421 last_used = schan->chan.cookie; 415 ret = dma_cookie_status(chan, cookie, txstate);
422 last_complete = schan->completed_cookie;
423 spin_unlock_irqrestore(&schan->lock, flags); 416 spin_unlock_irqrestore(&schan->lock, flags);
424 417
425 dma_set_tx_state(txstate, last_complete, last_used, 0); 418 return ret;
426 return dma_async_is_complete(cookie, last_complete, last_used);
427} 419}
428 420
429static struct dma_async_tx_descriptor *sirfsoc_dma_prep_interleaved( 421static struct dma_async_tx_descriptor *sirfsoc_dma_prep_interleaved(
@@ -497,7 +489,7 @@ err_dir:
497static struct dma_async_tx_descriptor * 489static struct dma_async_tx_descriptor *
498sirfsoc_dma_prep_cyclic(struct dma_chan *chan, dma_addr_t addr, 490sirfsoc_dma_prep_cyclic(struct dma_chan *chan, dma_addr_t addr,
499 size_t buf_len, size_t period_len, 491 size_t buf_len, size_t period_len,
500 enum dma_transfer_direction direction) 492 enum dma_transfer_direction direction, void *context)
501{ 493{
502 struct sirfsoc_dma_chan *schan = dma_chan_to_sirfsoc_dma_chan(chan); 494 struct sirfsoc_dma_chan *schan = dma_chan_to_sirfsoc_dma_chan(chan);
503 struct sirfsoc_dma_desc *sdesc = NULL; 495 struct sirfsoc_dma_desc *sdesc = NULL;
@@ -635,8 +627,7 @@ static int __devinit sirfsoc_dma_probe(struct platform_device *op)
635 schan = &sdma->channels[i]; 627 schan = &sdma->channels[i];
636 628
637 schan->chan.device = dma; 629 schan->chan.device = dma;
638 schan->chan.cookie = 1; 630 dma_cookie_init(&schan->chan);
639 schan->completed_cookie = schan->chan.cookie;
640 631
641 INIT_LIST_HEAD(&schan->free); 632 INIT_LIST_HEAD(&schan->free);
642 INIT_LIST_HEAD(&schan->prepared); 633 INIT_LIST_HEAD(&schan->prepared);