aboutsummaryrefslogtreecommitdiffstats
path: root/drivers/dma
diff options
context:
space:
mode:
authorAlban Bedel <alban.bedel@avionic-design.de>2013-08-11 13:59:20 -0400
committerVinod Koul <vinod.koul@intel.com>2013-09-02 02:19:56 -0400
commit3b24c20b208aa493c488835f5b9e722c88756845 (patch)
tree743fb9d20b01c2b03726bdd2933b32ca9f20a179 /drivers/dma
parentf3287a5206cae1244601d50a4d2a9a96a521c1ee (diff)
dmaengine: PL08x: Add cyclic transfer support
Many audio interface drivers require support of cyclic transfers to work correctly, for example Samsung ASoC DMA driver. This patch adds support for cyclic transfers to the amba-pl08x driver. Signed-off-by: Alban Bedel <alban.bedel@avionic-design.de> [tfiga: Rebase and slightly beautify the original patch.] Signed-off-by: Tomasz Figa <tomasz.figa@gmail.com> Acked-by: Linus Walleij <linus.walleij@linaro.org> Signed-off-by: Vinod Koul <vinod.koul@intel.com>
Diffstat (limited to 'drivers/dma')
-rw-r--r--drivers/dma/amba-pl08x.c147
1 files changed, 118 insertions, 29 deletions
diff --git a/drivers/dma/amba-pl08x.c b/drivers/dma/amba-pl08x.c
index 6b9cba2fd7f1..cd294340c851 100644
--- a/drivers/dma/amba-pl08x.c
+++ b/drivers/dma/amba-pl08x.c
@@ -173,6 +173,7 @@ struct pl08x_sg {
173 * @ccfg: config reg values for current txd 173 * @ccfg: config reg values for current txd
174 * @done: this marks completed descriptors, which should not have their 174 * @done: this marks completed descriptors, which should not have their
175 * mux released. 175 * mux released.
176 * @cyclic: indicate cyclic transfers
176 */ 177 */
177struct pl08x_txd { 178struct pl08x_txd {
178 struct virt_dma_desc vd; 179 struct virt_dma_desc vd;
@@ -187,6 +188,7 @@ struct pl08x_txd {
187 */ 188 */
188 u32 ccfg; 189 u32 ccfg;
189 bool done; 190 bool done;
191 bool cyclic;
190}; 192};
191 193
192/** 194/**
@@ -574,9 +576,9 @@ static u32 pl08x_getbytes_chan(struct pl08x_dma_chan *plchan)
574 bytes += get_bytes_in_cctl(llis_va[PL080_LLI_CCTL]); 576 bytes += get_bytes_in_cctl(llis_va[PL080_LLI_CCTL]);
575 577
576 /* 578 /*
577 * A LLI pointer of 0 terminates the LLI list 579 * A LLI pointer going backward terminates the LLI list
578 */ 580 */
579 if (!llis_va[PL080_LLI_LLI]) 581 if (llis_va[PL080_LLI_LLI] <= clli)
580 break; 582 break;
581 } 583 }
582 584
@@ -1125,10 +1127,16 @@ static int pl08x_fill_llis_for_desc(struct pl08x_driver_data *pl08x,
1125 1127
1126 llis_va = txd->llis_va; 1128 llis_va = txd->llis_va;
1127 last_lli = llis_va + (num_llis - 1) * pl08x->lli_words; 1129 last_lli = llis_va + (num_llis - 1) * pl08x->lli_words;
1128 /* The final LLI terminates the LLI. */ 1130
1129 last_lli[PL080_LLI_LLI] = 0; 1131 if (txd->cyclic) {
1130 /* The final LLI element shall also fire an interrupt. */ 1132 /* Link back to the first LLI. */
1131 last_lli[PL080_LLI_CCTL] |= PL080_CONTROL_TC_IRQ_EN; 1133 last_lli[PL080_LLI_LLI] = txd->llis_bus | bd.lli_bus;
1134 } else {
1135 /* The final LLI terminates the LLI. */
1136 last_lli[PL080_LLI_LLI] = 0;
1137 /* The final LLI element shall also fire an interrupt. */
1138 last_lli[PL080_LLI_CCTL] |= PL080_CONTROL_TC_IRQ_EN;
1139 }
1132 1140
1133 pl08x_dump_lli(pl08x, llis_va, num_llis); 1141 pl08x_dump_lli(pl08x, llis_va, num_llis);
1134 1142
@@ -1513,25 +1521,19 @@ static struct dma_async_tx_descriptor *pl08x_prep_dma_memcpy(
1513 return vchan_tx_prep(&plchan->vc, &txd->vd, flags); 1521 return vchan_tx_prep(&plchan->vc, &txd->vd, flags);
1514} 1522}
1515 1523
1516static struct dma_async_tx_descriptor *pl08x_prep_slave_sg( 1524static struct pl08x_txd *pl08x_init_txd(
1517 struct dma_chan *chan, struct scatterlist *sgl, 1525 struct dma_chan *chan,
1518 unsigned int sg_len, enum dma_transfer_direction direction, 1526 enum dma_transfer_direction direction,
1519 unsigned long flags, void *context) 1527 dma_addr_t *slave_addr)
1520{ 1528{
1521 struct pl08x_dma_chan *plchan = to_pl08x_chan(chan); 1529 struct pl08x_dma_chan *plchan = to_pl08x_chan(chan);
1522 struct pl08x_driver_data *pl08x = plchan->host; 1530 struct pl08x_driver_data *pl08x = plchan->host;
1523 struct pl08x_txd *txd; 1531 struct pl08x_txd *txd;
1524 struct pl08x_sg *dsg;
1525 struct scatterlist *sg;
1526 enum dma_slave_buswidth addr_width; 1532 enum dma_slave_buswidth addr_width;
1527 dma_addr_t slave_addr;
1528 int ret, tmp; 1533 int ret, tmp;
1529 u8 src_buses, dst_buses; 1534 u8 src_buses, dst_buses;
1530 u32 maxburst, cctl; 1535 u32 maxburst, cctl;
1531 1536
1532 dev_dbg(&pl08x->adev->dev, "%s prepare transaction of %d bytes from %s\n",
1533 __func__, sg_dma_len(sgl), plchan->name);
1534
1535 txd = pl08x_get_txd(plchan); 1537 txd = pl08x_get_txd(plchan);
1536 if (!txd) { 1538 if (!txd) {
1537 dev_err(&pl08x->adev->dev, "%s no txd\n", __func__); 1539 dev_err(&pl08x->adev->dev, "%s no txd\n", __func__);
@@ -1545,14 +1547,14 @@ static struct dma_async_tx_descriptor *pl08x_prep_slave_sg(
1545 */ 1547 */
1546 if (direction == DMA_MEM_TO_DEV) { 1548 if (direction == DMA_MEM_TO_DEV) {
1547 cctl = PL080_CONTROL_SRC_INCR; 1549 cctl = PL080_CONTROL_SRC_INCR;
1548 slave_addr = plchan->cfg.dst_addr; 1550 *slave_addr = plchan->cfg.dst_addr;
1549 addr_width = plchan->cfg.dst_addr_width; 1551 addr_width = plchan->cfg.dst_addr_width;
1550 maxburst = plchan->cfg.dst_maxburst; 1552 maxburst = plchan->cfg.dst_maxburst;
1551 src_buses = pl08x->mem_buses; 1553 src_buses = pl08x->mem_buses;
1552 dst_buses = plchan->cd->periph_buses; 1554 dst_buses = plchan->cd->periph_buses;
1553 } else if (direction == DMA_DEV_TO_MEM) { 1555 } else if (direction == DMA_DEV_TO_MEM) {
1554 cctl = PL080_CONTROL_DST_INCR; 1556 cctl = PL080_CONTROL_DST_INCR;
1555 slave_addr = plchan->cfg.src_addr; 1557 *slave_addr = plchan->cfg.src_addr;
1556 addr_width = plchan->cfg.src_addr_width; 1558 addr_width = plchan->cfg.src_addr_width;
1557 maxburst = plchan->cfg.src_maxburst; 1559 maxburst = plchan->cfg.src_maxburst;
1558 src_buses = plchan->cd->periph_buses; 1560 src_buses = plchan->cd->periph_buses;
@@ -1601,24 +1603,107 @@ static struct dma_async_tx_descriptor *pl08x_prep_slave_sg(
1601 else 1603 else
1602 txd->ccfg |= plchan->signal << PL080_CONFIG_SRC_SEL_SHIFT; 1604 txd->ccfg |= plchan->signal << PL080_CONFIG_SRC_SEL_SHIFT;
1603 1605
1606 return txd;
1607}
1608
1609static int pl08x_tx_add_sg(struct pl08x_txd *txd,
1610 enum dma_transfer_direction direction,
1611 dma_addr_t slave_addr,
1612 dma_addr_t buf_addr,
1613 unsigned int len)
1614{
1615 struct pl08x_sg *dsg;
1616
1617 dsg = kzalloc(sizeof(struct pl08x_sg), GFP_NOWAIT);
1618 if (!dsg)
1619 return -ENOMEM;
1620
1621 list_add_tail(&dsg->node, &txd->dsg_list);
1622
1623 dsg->len = len;
1624 if (direction == DMA_MEM_TO_DEV) {
1625 dsg->src_addr = buf_addr;
1626 dsg->dst_addr = slave_addr;
1627 } else {
1628 dsg->src_addr = slave_addr;
1629 dsg->dst_addr = buf_addr;
1630 }
1631
1632 return 0;
1633}
1634
1635static struct dma_async_tx_descriptor *pl08x_prep_slave_sg(
1636 struct dma_chan *chan, struct scatterlist *sgl,
1637 unsigned int sg_len, enum dma_transfer_direction direction,
1638 unsigned long flags, void *context)
1639{
1640 struct pl08x_dma_chan *plchan = to_pl08x_chan(chan);
1641 struct pl08x_driver_data *pl08x = plchan->host;
1642 struct pl08x_txd *txd;
1643 struct scatterlist *sg;
1644 int ret, tmp;
1645 dma_addr_t slave_addr;
1646
1647 dev_dbg(&pl08x->adev->dev, "%s prepare transaction of %d bytes from %s\n",
1648 __func__, sg_dma_len(sgl), plchan->name);
1649
1650 txd = pl08x_init_txd(chan, direction, &slave_addr);
1651 if (!txd)
1652 return NULL;
1653
1604 for_each_sg(sgl, sg, sg_len, tmp) { 1654 for_each_sg(sgl, sg, sg_len, tmp) {
1605 dsg = kzalloc(sizeof(struct pl08x_sg), GFP_NOWAIT); 1655 ret = pl08x_tx_add_sg(txd, direction, slave_addr,
1606 if (!dsg) { 1656 sg_dma_address(sg),
1657 sg_dma_len(sg));
1658 if (ret) {
1607 pl08x_release_mux(plchan); 1659 pl08x_release_mux(plchan);
1608 pl08x_free_txd(pl08x, txd); 1660 pl08x_free_txd(pl08x, txd);
1609 dev_err(&pl08x->adev->dev, "%s no mem for pl080 sg\n", 1661 dev_err(&pl08x->adev->dev, "%s no mem for pl080 sg\n",
1610 __func__); 1662 __func__);
1611 return NULL; 1663 return NULL;
1612 } 1664 }
1613 list_add_tail(&dsg->node, &txd->dsg_list); 1665 }
1614 1666
1615 dsg->len = sg_dma_len(sg); 1667 ret = pl08x_fill_llis_for_desc(plchan->host, txd);
1616 if (direction == DMA_MEM_TO_DEV) { 1668 if (!ret) {
1617 dsg->src_addr = sg_dma_address(sg); 1669 pl08x_release_mux(plchan);
1618 dsg->dst_addr = slave_addr; 1670 pl08x_free_txd(pl08x, txd);
1619 } else { 1671 return NULL;
1620 dsg->src_addr = slave_addr; 1672 }
1621 dsg->dst_addr = sg_dma_address(sg); 1673
1674 return vchan_tx_prep(&plchan->vc, &txd->vd, flags);
1675}
1676
1677static struct dma_async_tx_descriptor *pl08x_prep_dma_cyclic(
1678 struct dma_chan *chan, dma_addr_t buf_addr, size_t buf_len,
1679 size_t period_len, enum dma_transfer_direction direction,
1680 unsigned long flags, void *context)
1681{
1682 struct pl08x_dma_chan *plchan = to_pl08x_chan(chan);
1683 struct pl08x_driver_data *pl08x = plchan->host;
1684 struct pl08x_txd *txd;
1685 int ret, tmp;
1686 dma_addr_t slave_addr;
1687
1688 dev_dbg(&pl08x->adev->dev,
1689 "%s prepare cyclic transaction of %d/%d bytes %s %s\n",
1690 __func__, period_len, buf_len,
1691 direction == DMA_MEM_TO_DEV ? "to" : "from",
1692 plchan->name);
1693
1694 txd = pl08x_init_txd(chan, direction, &slave_addr);
1695 if (!txd)
1696 return NULL;
1697
1698 txd->cyclic = true;
1699 txd->cctl |= PL080_CONTROL_TC_IRQ_EN;
1700 for (tmp = 0; tmp < buf_len; tmp += period_len) {
1701 ret = pl08x_tx_add_sg(txd, direction, slave_addr,
1702 buf_addr + tmp, period_len);
1703 if (ret) {
1704 pl08x_release_mux(plchan);
1705 pl08x_free_txd(pl08x, txd);
1706 return NULL;
1622 } 1707 }
1623 } 1708 }
1624 1709
@@ -1761,7 +1846,9 @@ static irqreturn_t pl08x_irq(int irq, void *dev)
1761 1846
1762 spin_lock(&plchan->vc.lock); 1847 spin_lock(&plchan->vc.lock);
1763 tx = plchan->at; 1848 tx = plchan->at;
1764 if (tx) { 1849 if (tx && tx->cyclic) {
1850 vchan_cyclic_callback(&tx->vd);
1851 } else if (tx) {
1765 plchan->at = NULL; 1852 plchan->at = NULL;
1766 /* 1853 /*
1767 * This descriptor is done, release its mux 1854 * This descriptor is done, release its mux
@@ -1983,6 +2070,7 @@ static int pl08x_probe(struct amba_device *adev, const struct amba_id *id)
1983 2070
1984 /* Initialize slave engine */ 2071 /* Initialize slave engine */
1985 dma_cap_set(DMA_SLAVE, pl08x->slave.cap_mask); 2072 dma_cap_set(DMA_SLAVE, pl08x->slave.cap_mask);
2073 dma_cap_set(DMA_CYCLIC, pl08x->slave.cap_mask);
1986 pl08x->slave.dev = &adev->dev; 2074 pl08x->slave.dev = &adev->dev;
1987 pl08x->slave.device_alloc_chan_resources = pl08x_alloc_chan_resources; 2075 pl08x->slave.device_alloc_chan_resources = pl08x_alloc_chan_resources;
1988 pl08x->slave.device_free_chan_resources = pl08x_free_chan_resources; 2076 pl08x->slave.device_free_chan_resources = pl08x_free_chan_resources;
@@ -1990,6 +2078,7 @@ static int pl08x_probe(struct amba_device *adev, const struct amba_id *id)
1990 pl08x->slave.device_tx_status = pl08x_dma_tx_status; 2078 pl08x->slave.device_tx_status = pl08x_dma_tx_status;
1991 pl08x->slave.device_issue_pending = pl08x_issue_pending; 2079 pl08x->slave.device_issue_pending = pl08x_issue_pending;
1992 pl08x->slave.device_prep_slave_sg = pl08x_prep_slave_sg; 2080 pl08x->slave.device_prep_slave_sg = pl08x_prep_slave_sg;
2081 pl08x->slave.device_prep_dma_cyclic = pl08x_prep_dma_cyclic;
1993 pl08x->slave.device_control = pl08x_control; 2082 pl08x->slave.device_control = pl08x_control;
1994 2083
1995 /* Get the platform data */ 2084 /* Get the platform data */