aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorVinod Koul <vinod.koul@intel.com>2018-01-31 03:21:26 -0500
committerVinod Koul <vinod.koul@intel.com>2018-01-31 03:21:26 -0500
commitb8e1a963593551431ec0b3ac7bfce50f4a32ee35 (patch)
tree0552e21b5b997e7f87080660d62e25b189f47c53
parentcea0dd4d96e548b720657a5e38ef291be6d4fff3 (diff)
parent2c6929d2ea708fbdcab6e6721c9a57b54a7e5a6e (diff)
Merge branch 'topic/virt-dma' into for-linus
-rw-r--r--drivers/dma/amba-pl08x.c11
-rw-r--r--drivers/dma/bcm2835-dma.c10
-rw-r--r--drivers/dma/dma-jz4780.c10
-rw-r--r--drivers/dma/edma.c7
-rw-r--r--drivers/dma/img-mdc-dma.c17
-rw-r--r--drivers/dma/k3dma.c10
-rw-r--r--drivers/dma/omap-dma.c2
-rw-r--r--drivers/dma/s3c24xx-dma.c11
-rw-r--r--drivers/dma/virt-dma.c5
-rw-r--r--drivers/dma/virt-dma.h44
10 files changed, 107 insertions, 20 deletions
diff --git a/drivers/dma/amba-pl08x.c b/drivers/dma/amba-pl08x.c
index b52b0d55247e..97483df1f82e 100644
--- a/drivers/dma/amba-pl08x.c
+++ b/drivers/dma/amba-pl08x.c
@@ -2182,7 +2182,7 @@ static int pl08x_terminate_all(struct dma_chan *chan)
2182 } 2182 }
2183 /* Dequeue jobs and free LLIs */ 2183 /* Dequeue jobs and free LLIs */
2184 if (plchan->at) { 2184 if (plchan->at) {
2185 pl08x_desc_free(&plchan->at->vd); 2185 vchan_terminate_vdesc(&plchan->at->vd);
2186 plchan->at = NULL; 2186 plchan->at = NULL;
2187 } 2187 }
2188 /* Dequeue jobs not yet fired as well */ 2188 /* Dequeue jobs not yet fired as well */
@@ -2193,6 +2193,13 @@ static int pl08x_terminate_all(struct dma_chan *chan)
2193 return 0; 2193 return 0;
2194} 2194}
2195 2195
2196static void pl08x_synchronize(struct dma_chan *chan)
2197{
2198 struct pl08x_dma_chan *plchan = to_pl08x_chan(chan);
2199
2200 vchan_synchronize(&plchan->vc);
2201}
2202
2196static int pl08x_pause(struct dma_chan *chan) 2203static int pl08x_pause(struct dma_chan *chan)
2197{ 2204{
2198 struct pl08x_dma_chan *plchan = to_pl08x_chan(chan); 2205 struct pl08x_dma_chan *plchan = to_pl08x_chan(chan);
@@ -2773,6 +2780,7 @@ static int pl08x_probe(struct amba_device *adev, const struct amba_id *id)
2773 pl08x->memcpy.device_pause = pl08x_pause; 2780 pl08x->memcpy.device_pause = pl08x_pause;
2774 pl08x->memcpy.device_resume = pl08x_resume; 2781 pl08x->memcpy.device_resume = pl08x_resume;
2775 pl08x->memcpy.device_terminate_all = pl08x_terminate_all; 2782 pl08x->memcpy.device_terminate_all = pl08x_terminate_all;
2783 pl08x->memcpy.device_synchronize = pl08x_synchronize;
2776 pl08x->memcpy.src_addr_widths = PL80X_DMA_BUSWIDTHS; 2784 pl08x->memcpy.src_addr_widths = PL80X_DMA_BUSWIDTHS;
2777 pl08x->memcpy.dst_addr_widths = PL80X_DMA_BUSWIDTHS; 2785 pl08x->memcpy.dst_addr_widths = PL80X_DMA_BUSWIDTHS;
2778 pl08x->memcpy.directions = BIT(DMA_MEM_TO_MEM); 2786 pl08x->memcpy.directions = BIT(DMA_MEM_TO_MEM);
@@ -2802,6 +2810,7 @@ static int pl08x_probe(struct amba_device *adev, const struct amba_id *id)
2802 pl08x->slave.device_pause = pl08x_pause; 2810 pl08x->slave.device_pause = pl08x_pause;
2803 pl08x->slave.device_resume = pl08x_resume; 2811 pl08x->slave.device_resume = pl08x_resume;
2804 pl08x->slave.device_terminate_all = pl08x_terminate_all; 2812 pl08x->slave.device_terminate_all = pl08x_terminate_all;
2813 pl08x->slave.device_synchronize = pl08x_synchronize;
2805 pl08x->slave.src_addr_widths = PL80X_DMA_BUSWIDTHS; 2814 pl08x->slave.src_addr_widths = PL80X_DMA_BUSWIDTHS;
2806 pl08x->slave.dst_addr_widths = PL80X_DMA_BUSWIDTHS; 2815 pl08x->slave.dst_addr_widths = PL80X_DMA_BUSWIDTHS;
2807 pl08x->slave.directions = 2816 pl08x->slave.directions =
diff --git a/drivers/dma/bcm2835-dma.c b/drivers/dma/bcm2835-dma.c
index 6204cc32d09c..847f84a41a69 100644
--- a/drivers/dma/bcm2835-dma.c
+++ b/drivers/dma/bcm2835-dma.c
@@ -812,7 +812,7 @@ static int bcm2835_dma_terminate_all(struct dma_chan *chan)
812 * c->desc is NULL and exit.) 812 * c->desc is NULL and exit.)
813 */ 813 */
814 if (c->desc) { 814 if (c->desc) {
815 bcm2835_dma_desc_free(&c->desc->vd); 815 vchan_terminate_vdesc(&c->desc->vd);
816 c->desc = NULL; 816 c->desc = NULL;
817 bcm2835_dma_abort(c->chan_base); 817 bcm2835_dma_abort(c->chan_base);
818 818
@@ -836,6 +836,13 @@ static int bcm2835_dma_terminate_all(struct dma_chan *chan)
836 return 0; 836 return 0;
837} 837}
838 838
839static void bcm2835_dma_synchronize(struct dma_chan *chan)
840{
841 struct bcm2835_chan *c = to_bcm2835_dma_chan(chan);
842
843 vchan_synchronize(&c->vc);
844}
845
839static int bcm2835_dma_chan_init(struct bcm2835_dmadev *d, int chan_id, 846static int bcm2835_dma_chan_init(struct bcm2835_dmadev *d, int chan_id,
840 int irq, unsigned int irq_flags) 847 int irq, unsigned int irq_flags)
841{ 848{
@@ -942,6 +949,7 @@ static int bcm2835_dma_probe(struct platform_device *pdev)
942 od->ddev.device_prep_dma_memcpy = bcm2835_dma_prep_dma_memcpy; 949 od->ddev.device_prep_dma_memcpy = bcm2835_dma_prep_dma_memcpy;
943 od->ddev.device_config = bcm2835_dma_slave_config; 950 od->ddev.device_config = bcm2835_dma_slave_config;
944 od->ddev.device_terminate_all = bcm2835_dma_terminate_all; 951 od->ddev.device_terminate_all = bcm2835_dma_terminate_all;
952 od->ddev.device_synchronize = bcm2835_dma_synchronize;
945 od->ddev.src_addr_widths = BIT(DMA_SLAVE_BUSWIDTH_4_BYTES); 953 od->ddev.src_addr_widths = BIT(DMA_SLAVE_BUSWIDTH_4_BYTES);
946 od->ddev.dst_addr_widths = BIT(DMA_SLAVE_BUSWIDTH_4_BYTES); 954 od->ddev.dst_addr_widths = BIT(DMA_SLAVE_BUSWIDTH_4_BYTES);
947 od->ddev.directions = BIT(DMA_DEV_TO_MEM) | BIT(DMA_MEM_TO_DEV) | 955 od->ddev.directions = BIT(DMA_DEV_TO_MEM) | BIT(DMA_MEM_TO_DEV) |
diff --git a/drivers/dma/dma-jz4780.c b/drivers/dma/dma-jz4780.c
index 7373b7a555ec..85820a2d69d4 100644
--- a/drivers/dma/dma-jz4780.c
+++ b/drivers/dma/dma-jz4780.c
@@ -511,7 +511,7 @@ static int jz4780_dma_terminate_all(struct dma_chan *chan)
511 /* Clear the DMA status and stop the transfer. */ 511 /* Clear the DMA status and stop the transfer. */
512 jz4780_dma_writel(jzdma, JZ_DMA_REG_DCS(jzchan->id), 0); 512 jz4780_dma_writel(jzdma, JZ_DMA_REG_DCS(jzchan->id), 0);
513 if (jzchan->desc) { 513 if (jzchan->desc) {
514 jz4780_dma_desc_free(&jzchan->desc->vdesc); 514 vchan_terminate_vdesc(&jzchan->desc->vdesc);
515 jzchan->desc = NULL; 515 jzchan->desc = NULL;
516 } 516 }
517 517
@@ -523,6 +523,13 @@ static int jz4780_dma_terminate_all(struct dma_chan *chan)
523 return 0; 523 return 0;
524} 524}
525 525
526static void jz4780_dma_synchronize(struct dma_chan *chan)
527{
528 struct jz4780_dma_chan *jzchan = to_jz4780_dma_chan(chan);
529
530 vchan_synchronize(&jzchan->vchan);
531}
532
526static int jz4780_dma_config(struct dma_chan *chan, 533static int jz4780_dma_config(struct dma_chan *chan,
527 struct dma_slave_config *config) 534 struct dma_slave_config *config)
528{ 535{
@@ -813,6 +820,7 @@ static int jz4780_dma_probe(struct platform_device *pdev)
813 dd->device_prep_dma_memcpy = jz4780_dma_prep_dma_memcpy; 820 dd->device_prep_dma_memcpy = jz4780_dma_prep_dma_memcpy;
814 dd->device_config = jz4780_dma_config; 821 dd->device_config = jz4780_dma_config;
815 dd->device_terminate_all = jz4780_dma_terminate_all; 822 dd->device_terminate_all = jz4780_dma_terminate_all;
823 dd->device_synchronize = jz4780_dma_synchronize;
816 dd->device_tx_status = jz4780_dma_tx_status; 824 dd->device_tx_status = jz4780_dma_tx_status;
817 dd->device_issue_pending = jz4780_dma_issue_pending; 825 dd->device_issue_pending = jz4780_dma_issue_pending;
818 dd->src_addr_widths = JZ_DMA_BUSWIDTHS; 826 dd->src_addr_widths = JZ_DMA_BUSWIDTHS;
diff --git a/drivers/dma/edma.c b/drivers/dma/edma.c
index 9364a3ed345a..948df1ab5f1a 100644
--- a/drivers/dma/edma.c
+++ b/drivers/dma/edma.c
@@ -860,11 +860,8 @@ static int edma_terminate_all(struct dma_chan *chan)
860 /* Move the cyclic channel back to default queue */ 860 /* Move the cyclic channel back to default queue */
861 if (!echan->tc && echan->edesc->cyclic) 861 if (!echan->tc && echan->edesc->cyclic)
862 edma_assign_channel_eventq(echan, EVENTQ_DEFAULT); 862 edma_assign_channel_eventq(echan, EVENTQ_DEFAULT);
863 /* 863
864 * free the running request descriptor 864 vchan_terminate_vdesc(&echan->edesc->vdesc);
865 * since it is not in any of the vdesc lists
866 */
867 edma_desc_free(&echan->edesc->vdesc);
868 echan->edesc = NULL; 865 echan->edesc = NULL;
869 } 866 }
870 867
diff --git a/drivers/dma/img-mdc-dma.c b/drivers/dma/img-mdc-dma.c
index 0391f930aecc..25cec9c243e1 100644
--- a/drivers/dma/img-mdc-dma.c
+++ b/drivers/dma/img-mdc-dma.c
@@ -694,7 +694,6 @@ static unsigned int mdc_get_new_events(struct mdc_chan *mchan)
694static int mdc_terminate_all(struct dma_chan *chan) 694static int mdc_terminate_all(struct dma_chan *chan)
695{ 695{
696 struct mdc_chan *mchan = to_mdc_chan(chan); 696 struct mdc_chan *mchan = to_mdc_chan(chan);
697 struct mdc_tx_desc *mdesc;
698 unsigned long flags; 697 unsigned long flags;
699 LIST_HEAD(head); 698 LIST_HEAD(head);
700 699
@@ -703,21 +702,28 @@ static int mdc_terminate_all(struct dma_chan *chan)
703 mdc_chan_writel(mchan, MDC_CONTROL_AND_STATUS_CANCEL, 702 mdc_chan_writel(mchan, MDC_CONTROL_AND_STATUS_CANCEL,
704 MDC_CONTROL_AND_STATUS); 703 MDC_CONTROL_AND_STATUS);
705 704
706 mdesc = mchan->desc; 705 if (mchan->desc) {
707 mchan->desc = NULL; 706 vchan_terminate_vdesc(&mchan->desc->vd);
707 mchan->desc = NULL;
708 }
708 vchan_get_all_descriptors(&mchan->vc, &head); 709 vchan_get_all_descriptors(&mchan->vc, &head);
709 710
710 mdc_get_new_events(mchan); 711 mdc_get_new_events(mchan);
711 712
712 spin_unlock_irqrestore(&mchan->vc.lock, flags); 713 spin_unlock_irqrestore(&mchan->vc.lock, flags);
713 714
714 if (mdesc)
715 mdc_desc_free(&mdesc->vd);
716 vchan_dma_desc_free_list(&mchan->vc, &head); 715 vchan_dma_desc_free_list(&mchan->vc, &head);
717 716
718 return 0; 717 return 0;
719} 718}
720 719
720static void mdc_synchronize(struct dma_chan *chan)
721{
722 struct mdc_chan *mchan = to_mdc_chan(chan);
723
724 vchan_synchronize(&mchan->vc);
725}
726
721static int mdc_slave_config(struct dma_chan *chan, 727static int mdc_slave_config(struct dma_chan *chan,
722 struct dma_slave_config *config) 728 struct dma_slave_config *config)
723{ 729{
@@ -952,6 +958,7 @@ static int mdc_dma_probe(struct platform_device *pdev)
952 mdma->dma_dev.device_tx_status = mdc_tx_status; 958 mdma->dma_dev.device_tx_status = mdc_tx_status;
953 mdma->dma_dev.device_issue_pending = mdc_issue_pending; 959 mdma->dma_dev.device_issue_pending = mdc_issue_pending;
954 mdma->dma_dev.device_terminate_all = mdc_terminate_all; 960 mdma->dma_dev.device_terminate_all = mdc_terminate_all;
961 mdma->dma_dev.device_synchronize = mdc_synchronize;
955 mdma->dma_dev.device_config = mdc_slave_config; 962 mdma->dma_dev.device_config = mdc_slave_config;
956 963
957 mdma->dma_dev.directions = BIT(DMA_DEV_TO_MEM) | BIT(DMA_MEM_TO_DEV); 964 mdma->dma_dev.directions = BIT(DMA_DEV_TO_MEM) | BIT(DMA_MEM_TO_DEV);
diff --git a/drivers/dma/k3dma.c b/drivers/dma/k3dma.c
index 01d2a750a621..26b67455208f 100644
--- a/drivers/dma/k3dma.c
+++ b/drivers/dma/k3dma.c
@@ -719,7 +719,7 @@ static int k3_dma_terminate_all(struct dma_chan *chan)
719 c->phy = NULL; 719 c->phy = NULL;
720 p->vchan = NULL; 720 p->vchan = NULL;
721 if (p->ds_run) { 721 if (p->ds_run) {
722 k3_dma_free_desc(&p->ds_run->vd); 722 vchan_terminate_vdesc(&p->ds_run->vd);
723 p->ds_run = NULL; 723 p->ds_run = NULL;
724 } 724 }
725 p->ds_done = NULL; 725 p->ds_done = NULL;
@@ -730,6 +730,13 @@ static int k3_dma_terminate_all(struct dma_chan *chan)
730 return 0; 730 return 0;
731} 731}
732 732
733static void k3_dma_synchronize(struct dma_chan *chan)
734{
735 struct k3_dma_chan *c = to_k3_chan(chan);
736
737 vchan_synchronize(&c->vc);
738}
739
733static int k3_dma_transfer_pause(struct dma_chan *chan) 740static int k3_dma_transfer_pause(struct dma_chan *chan)
734{ 741{
735 struct k3_dma_chan *c = to_k3_chan(chan); 742 struct k3_dma_chan *c = to_k3_chan(chan);
@@ -868,6 +875,7 @@ static int k3_dma_probe(struct platform_device *op)
868 d->slave.device_pause = k3_dma_transfer_pause; 875 d->slave.device_pause = k3_dma_transfer_pause;
869 d->slave.device_resume = k3_dma_transfer_resume; 876 d->slave.device_resume = k3_dma_transfer_resume;
870 d->slave.device_terminate_all = k3_dma_terminate_all; 877 d->slave.device_terminate_all = k3_dma_terminate_all;
878 d->slave.device_synchronize = k3_dma_synchronize;
871 d->slave.copy_align = DMAENGINE_ALIGN_8_BYTES; 879 d->slave.copy_align = DMAENGINE_ALIGN_8_BYTES;
872 880
873 /* init virtual channel */ 881 /* init virtual channel */
diff --git a/drivers/dma/omap-dma.c b/drivers/dma/omap-dma.c
index f6dd849159d8..d21c19822feb 100644
--- a/drivers/dma/omap-dma.c
+++ b/drivers/dma/omap-dma.c
@@ -1311,7 +1311,7 @@ static int omap_dma_terminate_all(struct dma_chan *chan)
1311 * c->desc is NULL and exit.) 1311 * c->desc is NULL and exit.)
1312 */ 1312 */
1313 if (c->desc) { 1313 if (c->desc) {
1314 omap_dma_desc_free(&c->desc->vd); 1314 vchan_terminate_vdesc(&c->desc->vd);
1315 c->desc = NULL; 1315 c->desc = NULL;
1316 /* Avoid stopping the dma twice */ 1316 /* Avoid stopping the dma twice */
1317 if (!c->paused) 1317 if (!c->paused)
diff --git a/drivers/dma/s3c24xx-dma.c b/drivers/dma/s3c24xx-dma.c
index f04c4702d98b..cd92d696bcf9 100644
--- a/drivers/dma/s3c24xx-dma.c
+++ b/drivers/dma/s3c24xx-dma.c
@@ -732,7 +732,7 @@ static int s3c24xx_dma_terminate_all(struct dma_chan *chan)
732 732
733 /* Dequeue current job */ 733 /* Dequeue current job */
734 if (s3cchan->at) { 734 if (s3cchan->at) {
735 s3c24xx_dma_desc_free(&s3cchan->at->vd); 735 vchan_terminate_vdesc(&s3cchan->at->vd);
736 s3cchan->at = NULL; 736 s3cchan->at = NULL;
737 } 737 }
738 738
@@ -744,6 +744,13 @@ unlock:
744 return ret; 744 return ret;
745} 745}
746 746
747static void s3c24xx_dma_synchronize(struct dma_chan *chan)
748{
749 struct s3c24xx_dma_chan *s3cchan = to_s3c24xx_dma_chan(chan);
750
751 vchan_synchronize(&s3cchan->vc);
752}
753
747static void s3c24xx_dma_free_chan_resources(struct dma_chan *chan) 754static void s3c24xx_dma_free_chan_resources(struct dma_chan *chan)
748{ 755{
749 /* Ensure all queued descriptors are freed */ 756 /* Ensure all queued descriptors are freed */
@@ -1282,6 +1289,7 @@ static int s3c24xx_dma_probe(struct platform_device *pdev)
1282 s3cdma->memcpy.device_issue_pending = s3c24xx_dma_issue_pending; 1289 s3cdma->memcpy.device_issue_pending = s3c24xx_dma_issue_pending;
1283 s3cdma->memcpy.device_config = s3c24xx_dma_set_runtime_config; 1290 s3cdma->memcpy.device_config = s3c24xx_dma_set_runtime_config;
1284 s3cdma->memcpy.device_terminate_all = s3c24xx_dma_terminate_all; 1291 s3cdma->memcpy.device_terminate_all = s3c24xx_dma_terminate_all;
1292 s3cdma->memcpy.device_synchronize = s3c24xx_dma_synchronize;
1285 1293
1286 /* Initialize slave engine for SoC internal dedicated peripherals */ 1294 /* Initialize slave engine for SoC internal dedicated peripherals */
1287 dma_cap_set(DMA_SLAVE, s3cdma->slave.cap_mask); 1295 dma_cap_set(DMA_SLAVE, s3cdma->slave.cap_mask);
@@ -1296,6 +1304,7 @@ static int s3c24xx_dma_probe(struct platform_device *pdev)
1296 s3cdma->slave.device_prep_dma_cyclic = s3c24xx_dma_prep_dma_cyclic; 1304 s3cdma->slave.device_prep_dma_cyclic = s3c24xx_dma_prep_dma_cyclic;
1297 s3cdma->slave.device_config = s3c24xx_dma_set_runtime_config; 1305 s3cdma->slave.device_config = s3c24xx_dma_set_runtime_config;
1298 s3cdma->slave.device_terminate_all = s3c24xx_dma_terminate_all; 1306 s3cdma->slave.device_terminate_all = s3c24xx_dma_terminate_all;
1307 s3cdma->slave.device_synchronize = s3c24xx_dma_synchronize;
1299 s3cdma->slave.filter.map = pdata->slave_map; 1308 s3cdma->slave.filter.map = pdata->slave_map;
1300 s3cdma->slave.filter.mapcnt = pdata->slavecnt; 1309 s3cdma->slave.filter.mapcnt = pdata->slavecnt;
1301 s3cdma->slave.filter.fn = s3c24xx_dma_filter; 1310 s3cdma->slave.filter.fn = s3c24xx_dma_filter;
diff --git a/drivers/dma/virt-dma.c b/drivers/dma/virt-dma.c
index 545e97279083..88ad8ed2a8d6 100644
--- a/drivers/dma/virt-dma.c
+++ b/drivers/dma/virt-dma.c
@@ -107,10 +107,7 @@ static void vchan_complete(unsigned long arg)
107 dmaengine_desc_get_callback(&vd->tx, &cb); 107 dmaengine_desc_get_callback(&vd->tx, &cb);
108 108
109 list_del(&vd->node); 109 list_del(&vd->node);
110 if (dmaengine_desc_test_reuse(&vd->tx)) 110 vchan_vdesc_fini(vd);
111 list_add(&vd->node, &vc->desc_allocated);
112 else
113 vc->desc_free(vd);
114 111
115 dmaengine_desc_callback_invoke(&cb, NULL); 112 dmaengine_desc_callback_invoke(&cb, NULL);
116 } 113 }
diff --git a/drivers/dma/virt-dma.h b/drivers/dma/virt-dma.h
index 3f776a46a29c..b09b75ab0751 100644
--- a/drivers/dma/virt-dma.h
+++ b/drivers/dma/virt-dma.h
@@ -35,6 +35,7 @@ struct virt_dma_chan {
35 struct list_head desc_completed; 35 struct list_head desc_completed;
36 36
37 struct virt_dma_desc *cyclic; 37 struct virt_dma_desc *cyclic;
38 struct virt_dma_desc *vd_terminated;
38}; 39};
39 40
40static inline struct virt_dma_chan *to_virt_chan(struct dma_chan *chan) 41static inline struct virt_dma_chan *to_virt_chan(struct dma_chan *chan)
@@ -104,6 +105,20 @@ static inline void vchan_cookie_complete(struct virt_dma_desc *vd)
104} 105}
105 106
106/** 107/**
108 * vchan_vdesc_fini - Free or reuse a descriptor
109 * @vd: virtual descriptor to free/reuse
110 */
111static inline void vchan_vdesc_fini(struct virt_dma_desc *vd)
112{
113 struct virt_dma_chan *vc = to_virt_chan(vd->tx.chan);
114
115 if (dmaengine_desc_test_reuse(&vd->tx))
116 list_add(&vd->node, &vc->desc_allocated);
117 else
118 vc->desc_free(vd);
119}
120
121/**
107 * vchan_cyclic_callback - report the completion of a period 122 * vchan_cyclic_callback - report the completion of a period
108 * @vd: virtual descriptor 123 * @vd: virtual descriptor
109 */ 124 */
@@ -116,6 +131,25 @@ static inline void vchan_cyclic_callback(struct virt_dma_desc *vd)
116} 131}
117 132
118/** 133/**
134 * vchan_terminate_vdesc - Disable pending cyclic callback
135 * @vd: virtual descriptor to be terminated
136 *
137 * vc.lock must be held by caller
138 */
139static inline void vchan_terminate_vdesc(struct virt_dma_desc *vd)
140{
141 struct virt_dma_chan *vc = to_virt_chan(vd->tx.chan);
142
143 /* free up stuck descriptor */
144 if (vc->vd_terminated)
145 vchan_vdesc_fini(vc->vd_terminated);
146
147 vc->vd_terminated = vd;
148 if (vc->cyclic == vd)
149 vc->cyclic = NULL;
150}
151
152/**
119 * vchan_next_desc - peek at the next descriptor to be processed 153 * vchan_next_desc - peek at the next descriptor to be processed
120 * @vc: virtual channel to obtain descriptor from 154 * @vc: virtual channel to obtain descriptor from
121 * 155 *
@@ -168,10 +202,20 @@ static inline void vchan_free_chan_resources(struct virt_dma_chan *vc)
168 * Makes sure that all scheduled or active callbacks have finished running. For 202 * Makes sure that all scheduled or active callbacks have finished running. For
169 * proper operation the caller has to ensure that no new callbacks are scheduled 203 * proper operation the caller has to ensure that no new callbacks are scheduled
170 * after the invocation of this function started. 204 * after the invocation of this function started.
205 * Free up the terminated cyclic descriptor to prevent memory leakage.
171 */ 206 */
172static inline void vchan_synchronize(struct virt_dma_chan *vc) 207static inline void vchan_synchronize(struct virt_dma_chan *vc)
173{ 208{
209 unsigned long flags;
210
174 tasklet_kill(&vc->task); 211 tasklet_kill(&vc->task);
212
213 spin_lock_irqsave(&vc->lock, flags);
214 if (vc->vd_terminated) {
215 vchan_vdesc_fini(vc->vd_terminated);
216 vc->vd_terminated = NULL;
217 }
218 spin_unlock_irqrestore(&vc->lock, flags);
175} 219}
176 220
177#endif 221#endif