diff options
author | Boojin Kim <boojin.kim@samsung.com> | 2011-09-01 20:44:33 -0400 |
---|---|---|
committer | Vinod Koul <vinod.koul@intel.com> | 2011-09-14 01:40:01 -0400 |
commit | 42bc9cf45939c26a5c5eb946d4fd35f1a7b0f9f8 (patch) | |
tree | 2cc6df2b089a13ddcf04107ef4d7c3b5e8ca2ebb /drivers/dma | |
parent | ae43b886f174297366d4e09a008ad8e6592d95df (diff) |
DMA: PL330: Add DMA_CYCLIC capability
This patch adds DMA_CYCLIC capability that is used for audio driver.
DMA driver activated with it reuses the dma requests that were submitted
through tx_submit().
Signed-off-by: Boojin Kim <boojin.kim@samsung.com>
Acked-by: Linus Walleij <linus.walleij@linaro.org>
Acked-by: Vinod Koul <vinod.koul@intel.com>
Cc: Dan Williams <dan.j.williams@intel.com>
Signed-off-by: Kukjin Kim <kgene.kim@samsung.com>
Signed-off-by: Vinod Koul <vinod.koul@intel.com>
Diffstat (limited to 'drivers/dma')
-rw-r--r-- | drivers/dma/pl330.c | 84 |
1 files changed, 83 insertions, 1 deletions
diff --git a/drivers/dma/pl330.c b/drivers/dma/pl330.c index 59943ec1e74a..621134fdba4c 100644 --- a/drivers/dma/pl330.c +++ b/drivers/dma/pl330.c | |||
@@ -75,6 +75,9 @@ struct dma_pl330_chan { | |||
75 | int burst_sz; /* the peripheral fifo width */ | 75 | int burst_sz; /* the peripheral fifo width */ |
76 | int burst_len; /* the number of burst */ | 76 | int burst_len; /* the number of burst */ |
77 | dma_addr_t fifo_addr; | 77 | dma_addr_t fifo_addr; |
78 | |||
79 | /* for cyclic capability */ | ||
80 | bool cyclic; | ||
78 | }; | 81 | }; |
79 | 82 | ||
80 | struct dma_pl330_dmac { | 83 | struct dma_pl330_dmac { |
@@ -161,6 +164,31 @@ static inline void free_desc_list(struct list_head *list) | |||
161 | spin_unlock_irqrestore(&pdmac->pool_lock, flags); | 164 | spin_unlock_irqrestore(&pdmac->pool_lock, flags); |
162 | } | 165 | } |
163 | 166 | ||
167 | static inline void handle_cyclic_desc_list(struct list_head *list) | ||
168 | { | ||
169 | struct dma_pl330_desc *desc; | ||
170 | struct dma_pl330_chan *pch; | ||
171 | unsigned long flags; | ||
172 | |||
173 | if (list_empty(list)) | ||
174 | return; | ||
175 | |||
176 | list_for_each_entry(desc, list, node) { | ||
177 | dma_async_tx_callback callback; | ||
178 | |||
179 | /* Change status to reload it */ | ||
180 | desc->status = PREP; | ||
181 | pch = desc->pchan; | ||
182 | callback = desc->txd.callback; | ||
183 | if (callback) | ||
184 | callback(desc->txd.callback_param); | ||
185 | } | ||
186 | |||
187 | spin_lock_irqsave(&pch->lock, flags); | ||
188 | list_splice_tail_init(list, &pch->work_list); | ||
189 | spin_unlock_irqrestore(&pch->lock, flags); | ||
190 | } | ||
191 | |||
164 | static inline void fill_queue(struct dma_pl330_chan *pch) | 192 | static inline void fill_queue(struct dma_pl330_chan *pch) |
165 | { | 193 | { |
166 | struct dma_pl330_desc *desc; | 194 | struct dma_pl330_desc *desc; |
@@ -214,7 +242,10 @@ static void pl330_tasklet(unsigned long data) | |||
214 | 242 | ||
215 | spin_unlock_irqrestore(&pch->lock, flags); | 243 | spin_unlock_irqrestore(&pch->lock, flags); |
216 | 244 | ||
217 | free_desc_list(&list); | 245 | if (pch->cyclic) |
246 | handle_cyclic_desc_list(&list); | ||
247 | else | ||
248 | free_desc_list(&list); | ||
218 | } | 249 | } |
219 | 250 | ||
220 | static void dma_pl330_rqcb(void *token, enum pl330_op_err err) | 251 | static void dma_pl330_rqcb(void *token, enum pl330_op_err err) |
@@ -245,6 +276,7 @@ static int pl330_alloc_chan_resources(struct dma_chan *chan) | |||
245 | spin_lock_irqsave(&pch->lock, flags); | 276 | spin_lock_irqsave(&pch->lock, flags); |
246 | 277 | ||
247 | pch->completed = chan->cookie = 1; | 278 | pch->completed = chan->cookie = 1; |
279 | pch->cyclic = false; | ||
248 | 280 | ||
249 | pch->pl330_chid = pl330_request_channel(&pdmac->pif); | 281 | pch->pl330_chid = pl330_request_channel(&pdmac->pif); |
250 | if (!pch->pl330_chid) { | 282 | if (!pch->pl330_chid) { |
@@ -324,6 +356,9 @@ static void pl330_free_chan_resources(struct dma_chan *chan) | |||
324 | pl330_release_channel(pch->pl330_chid); | 356 | pl330_release_channel(pch->pl330_chid); |
325 | pch->pl330_chid = NULL; | 357 | pch->pl330_chid = NULL; |
326 | 358 | ||
359 | if (pch->cyclic) | ||
360 | list_splice_tail_init(&pch->work_list, &pch->dmac->desc_pool); | ||
361 | |||
327 | spin_unlock_irqrestore(&pch->lock, flags); | 362 | spin_unlock_irqrestore(&pch->lock, flags); |
328 | } | 363 | } |
329 | 364 | ||
@@ -560,6 +595,51 @@ static inline int get_burst_len(struct dma_pl330_desc *desc, size_t len) | |||
560 | return burst_len; | 595 | return burst_len; |
561 | } | 596 | } |
562 | 597 | ||
598 | static struct dma_async_tx_descriptor *pl330_prep_dma_cyclic( | ||
599 | struct dma_chan *chan, dma_addr_t dma_addr, size_t len, | ||
600 | size_t period_len, enum dma_data_direction direction) | ||
601 | { | ||
602 | struct dma_pl330_desc *desc; | ||
603 | struct dma_pl330_chan *pch = to_pchan(chan); | ||
604 | dma_addr_t dst; | ||
605 | dma_addr_t src; | ||
606 | |||
607 | desc = pl330_get_desc(pch); | ||
608 | if (!desc) { | ||
609 | dev_err(pch->dmac->pif.dev, "%s:%d Unable to fetch desc\n", | ||
610 | __func__, __LINE__); | ||
611 | return NULL; | ||
612 | } | ||
613 | |||
614 | switch (direction) { | ||
615 | case DMA_TO_DEVICE: | ||
616 | desc->rqcfg.src_inc = 1; | ||
617 | desc->rqcfg.dst_inc = 0; | ||
618 | src = dma_addr; | ||
619 | dst = pch->fifo_addr; | ||
620 | break; | ||
621 | case DMA_FROM_DEVICE: | ||
622 | desc->rqcfg.src_inc = 0; | ||
623 | desc->rqcfg.dst_inc = 1; | ||
624 | src = pch->fifo_addr; | ||
625 | dst = dma_addr; | ||
626 | break; | ||
627 | default: | ||
628 | dev_err(pch->dmac->pif.dev, "%s:%d Invalid dma direction\n", | ||
629 | __func__, __LINE__); | ||
630 | return NULL; | ||
631 | } | ||
632 | |||
633 | desc->rqcfg.brst_size = pch->burst_sz; | ||
634 | desc->rqcfg.brst_len = 1; | ||
635 | |||
636 | pch->cyclic = true; | ||
637 | |||
638 | fill_px(&desc->px, dst, src, period_len); | ||
639 | |||
640 | return &desc->txd; | ||
641 | } | ||
642 | |||
563 | static struct dma_async_tx_descriptor * | 643 | static struct dma_async_tx_descriptor * |
564 | pl330_prep_dma_memcpy(struct dma_chan *chan, dma_addr_t dst, | 644 | pl330_prep_dma_memcpy(struct dma_chan *chan, dma_addr_t dst, |
565 | dma_addr_t src, size_t len, unsigned long flags) | 645 | dma_addr_t src, size_t len, unsigned long flags) |
@@ -791,6 +871,7 @@ pl330_probe(struct amba_device *adev, const struct amba_id *id) | |||
791 | case MEMTODEV: | 871 | case MEMTODEV: |
792 | case DEVTOMEM: | 872 | case DEVTOMEM: |
793 | dma_cap_set(DMA_SLAVE, pd->cap_mask); | 873 | dma_cap_set(DMA_SLAVE, pd->cap_mask); |
874 | dma_cap_set(DMA_CYCLIC, pd->cap_mask); | ||
794 | break; | 875 | break; |
795 | default: | 876 | default: |
796 | dev_err(&adev->dev, "DEVTODEV Not Supported\n"); | 877 | dev_err(&adev->dev, "DEVTODEV Not Supported\n"); |
@@ -819,6 +900,7 @@ pl330_probe(struct amba_device *adev, const struct amba_id *id) | |||
819 | pd->device_alloc_chan_resources = pl330_alloc_chan_resources; | 900 | pd->device_alloc_chan_resources = pl330_alloc_chan_resources; |
820 | pd->device_free_chan_resources = pl330_free_chan_resources; | 901 | pd->device_free_chan_resources = pl330_free_chan_resources; |
821 | pd->device_prep_dma_memcpy = pl330_prep_dma_memcpy; | 902 | pd->device_prep_dma_memcpy = pl330_prep_dma_memcpy; |
903 | pd->device_prep_dma_cyclic = pl330_prep_dma_cyclic; | ||
822 | pd->device_tx_status = pl330_tx_status; | 904 | pd->device_tx_status = pl330_tx_status; |
823 | pd->device_prep_slave_sg = pl330_prep_slave_sg; | 905 | pd->device_prep_slave_sg = pl330_prep_slave_sg; |
824 | pd->device_control = pl330_control; | 906 | pd->device_control = pl330_control; |