aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorPeter Ujfalusi <peter.ujfalusi@ti.com>2019-07-16 04:26:55 -0400
committerVinod Koul <vkoul@kernel.org>2019-07-29 02:41:38 -0400
commitaa3c6ce4eab8fb0e967954be1ba1cad3b715f63b (patch)
treebd01cc1b347fa53e617ea734ad5497c46c41c0cb
parent097ffdc75259139ba157b7f924cfeb0d6b00559e (diff)
dmaengine: ti: edma: Support for polled (memcpy) completion
When a DMA client driver does not set the DMA_PREP_INTERRUPT because it does not want to use interrupts for DMA completion or because it can not rely on DMA interrupts due to executing the memcpy when interrupts are disabled it will poll the status of the transfer. Since we can not tell from any EDMA register that the transfer is completed, we can only know that the paRAM set has been sent to TPTC for processing we need to check the residue of the transfer, if it is 0 then the transfer is completed. Signed-off-by: Peter Ujfalusi <peter.ujfalusi@ti.com> Link: https://lore.kernel.org/r/20190716082655.1620-4-peter.ujfalusi@ti.com Signed-off-by: Vinod Koul <vkoul@kernel.org>
-rw-r--r--drivers/dma/ti/edma.c37
1 files changed, 33 insertions, 4 deletions
diff --git a/drivers/dma/ti/edma.c b/drivers/dma/ti/edma.c
index 201b838ec808..fe468e2f7e67 100644
--- a/drivers/dma/ti/edma.c
+++ b/drivers/dma/ti/edma.c
@@ -179,6 +179,7 @@ struct edma_desc {
179 struct list_head node; 179 struct list_head node;
180 enum dma_transfer_direction direction; 180 enum dma_transfer_direction direction;
181 int cyclic; 181 int cyclic;
182 bool polled;
182 int absync; 183 int absync;
183 int pset_nr; 184 int pset_nr;
184 struct edma_chan *echan; 185 struct edma_chan *echan;
@@ -1226,8 +1227,9 @@ static struct dma_async_tx_descriptor *edma_prep_dma_memcpy(
1226 1227
1227 edesc->pset[0].param.opt |= ITCCHEN; 1228 edesc->pset[0].param.opt |= ITCCHEN;
1228 if (nslots == 1) { 1229 if (nslots == 1) {
1229 /* Enable transfer complete interrupt */ 1230 /* Enable transfer complete interrupt if requested */
1230 edesc->pset[0].param.opt |= TCINTEN; 1231 if (tx_flags & DMA_PREP_INTERRUPT)
1232 edesc->pset[0].param.opt |= TCINTEN;
1231 } else { 1233 } else {
1232 /* Enable transfer complete chaining for the first slot */ 1234 /* Enable transfer complete chaining for the first slot */
1233 edesc->pset[0].param.opt |= TCCHEN; 1235 edesc->pset[0].param.opt |= TCCHEN;
@@ -1254,9 +1256,14 @@ static struct dma_async_tx_descriptor *edma_prep_dma_memcpy(
1254 } 1256 }
1255 1257
1256 edesc->pset[1].param.opt |= ITCCHEN; 1258 edesc->pset[1].param.opt |= ITCCHEN;
1257 edesc->pset[1].param.opt |= TCINTEN; 1259 /* Enable transfer complete interrupt if requested */
1260 if (tx_flags & DMA_PREP_INTERRUPT)
1261 edesc->pset[1].param.opt |= TCINTEN;
1258 } 1262 }
1259 1263
1264 if (!(tx_flags & DMA_PREP_INTERRUPT))
1265 edesc->polled = true;
1266
1260 return vchan_tx_prep(&echan->vchan, &edesc->vdesc, tx_flags); 1267 return vchan_tx_prep(&echan->vchan, &edesc->vdesc, tx_flags);
1261} 1268}
1262 1269
@@ -1826,18 +1833,40 @@ static enum dma_status edma_tx_status(struct dma_chan *chan,
1826{ 1833{
1827 struct edma_chan *echan = to_edma_chan(chan); 1834 struct edma_chan *echan = to_edma_chan(chan);
1828 struct virt_dma_desc *vdesc; 1835 struct virt_dma_desc *vdesc;
1836 struct dma_tx_state txstate_tmp;
1829 enum dma_status ret; 1837 enum dma_status ret;
1830 unsigned long flags; 1838 unsigned long flags;
1831 1839
1832 ret = dma_cookie_status(chan, cookie, txstate); 1840 ret = dma_cookie_status(chan, cookie, txstate);
1833 if (ret == DMA_COMPLETE || !txstate) 1841
1842 if (ret == DMA_COMPLETE)
1834 return ret; 1843 return ret;
1835 1844
1845 /* Provide a dummy dma_tx_state for completion checking */
1846 if (!txstate)
1847 txstate = &txstate_tmp;
1848
1849 txstate->residue = 0;
1836 spin_lock_irqsave(&echan->vchan.lock, flags); 1850 spin_lock_irqsave(&echan->vchan.lock, flags);
1837 if (echan->edesc && echan->edesc->vdesc.tx.cookie == cookie) 1851 if (echan->edesc && echan->edesc->vdesc.tx.cookie == cookie)
1838 txstate->residue = edma_residue(echan->edesc); 1852 txstate->residue = edma_residue(echan->edesc);
1839 else if ((vdesc = vchan_find_desc(&echan->vchan, cookie))) 1853 else if ((vdesc = vchan_find_desc(&echan->vchan, cookie)))
1840 txstate->residue = to_edma_desc(&vdesc->tx)->residue; 1854 txstate->residue = to_edma_desc(&vdesc->tx)->residue;
1855
1856 /*
1857 * Mark the cookie completed if the residue is 0 for non cyclic
1858 * transfers
1859 */
1860 if (ret != DMA_COMPLETE && !txstate->residue &&
1861 echan->edesc && echan->edesc->polled &&
1862 echan->edesc->vdesc.tx.cookie == cookie) {
1863 edma_stop(echan);
1864 vchan_cookie_complete(&echan->edesc->vdesc);
1865 echan->edesc = NULL;
1866 edma_execute(echan);
1867 ret = DMA_COMPLETE;
1868 }
1869
1841 spin_unlock_irqrestore(&echan->vchan.lock, flags); 1870 spin_unlock_irqrestore(&echan->vchan.lock, flags);
1842 1871
1843 return ret; 1872 return ret;