aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--drivers/dma/ste_dma40.c194
1 files changed, 84 insertions, 110 deletions
diff --git a/drivers/dma/ste_dma40.c b/drivers/dma/ste_dma40.c
index 65b5aad1fc4c..8c6abc23db80 100644
--- a/drivers/dma/ste_dma40.c
+++ b/drivers/dma/ste_dma40.c
@@ -1618,6 +1618,81 @@ static u32 stedma40_residue(struct dma_chan *chan)
1618 return bytes_left; 1618 return bytes_left;
1619} 1619}
1620 1620
1621static int
1622d40_prep_sg_log(struct d40_chan *chan, struct d40_desc *desc,
1623 struct scatterlist *sg_src, struct scatterlist *sg_dst,
1624 unsigned int sg_len, enum dma_data_direction direction,
1625 dma_addr_t dev_addr)
1626{
1627 struct stedma40_chan_cfg *cfg = &chan->dma_cfg;
1628 struct stedma40_half_channel_info *src_info = &cfg->src_info;
1629 struct stedma40_half_channel_info *dst_info = &cfg->dst_info;
1630
1631 if (direction == DMA_NONE) {
1632 /* memcpy */
1633 (void) d40_log_sg_to_lli(sg_src, sg_len,
1634 desc->lli_log.src,
1635 chan->log_def.lcsp1,
1636 src_info->data_width,
1637 dst_info->data_width);
1638
1639 (void) d40_log_sg_to_lli(sg_dst, sg_len,
1640 desc->lli_log.dst,
1641 chan->log_def.lcsp3,
1642 dst_info->data_width,
1643 src_info->data_width);
1644 } else {
1645 unsigned int total_size;
1646
1647 total_size = d40_log_sg_to_dev(sg_src, sg_len,
1648 &desc->lli_log,
1649 &chan->log_def,
1650 src_info->data_width,
1651 dst_info->data_width,
1652 direction, dev_addr);
1653 if (total_size < 0)
1654 return -EINVAL;
1655 }
1656
1657 return 0;
1658}
1659
1660static int
1661d40_prep_sg_phy(struct d40_chan *chan, struct d40_desc *desc,
1662 struct scatterlist *sg_src, struct scatterlist *sg_dst,
1663 unsigned int sg_len, enum dma_data_direction direction,
1664 dma_addr_t dev_addr)
1665{
1666 dma_addr_t src_dev_addr = direction == DMA_FROM_DEVICE ? dev_addr : 0;
1667 dma_addr_t dst_dev_addr = direction == DMA_TO_DEVICE ? dev_addr : 0;
1668 struct stedma40_chan_cfg *cfg = &chan->dma_cfg;
1669 struct stedma40_half_channel_info *src_info = &cfg->src_info;
1670 struct stedma40_half_channel_info *dst_info = &cfg->dst_info;
1671 int ret;
1672
1673 ret = d40_phy_sg_to_lli(sg_src, sg_len, src_dev_addr,
1674 desc->lli_phy.src,
1675 virt_to_phys(desc->lli_phy.src),
1676 chan->src_def_cfg,
1677 src_info->data_width,
1678 dst_info->data_width,
1679 src_info->psize);
1680
1681 ret = d40_phy_sg_to_lli(sg_dst, sg_len, dst_dev_addr,
1682 desc->lli_phy.dst,
1683 virt_to_phys(desc->lli_phy.dst),
1684 chan->dst_def_cfg,
1685 dst_info->data_width,
1686 src_info->data_width,
1687 dst_info->psize);
1688
1689 dma_sync_single_for_device(chan->base->dev, desc->lli_pool.dma_addr,
1690 desc->lli_pool.size, DMA_TO_DEVICE);
1691
1692 return ret < 0 ? ret : 0;
1693}
1694
1695
1621static struct d40_desc * 1696static struct d40_desc *
1622d40_prep_desc(struct d40_chan *chan, struct scatterlist *sg, 1697d40_prep_desc(struct d40_chan *chan, struct scatterlist *sg,
1623 unsigned int sg_len, unsigned long dma_flags) 1698 unsigned int sg_len, unsigned long dma_flags)
@@ -1663,7 +1738,6 @@ struct dma_async_tx_descriptor *stedma40_memcpy_sg(struct dma_chan *chan,
1663 unsigned int sgl_len, 1738 unsigned int sgl_len,
1664 unsigned long dma_flags) 1739 unsigned long dma_flags)
1665{ 1740{
1666 int res;
1667 struct d40_desc *d40d; 1741 struct d40_desc *d40d;
1668 struct d40_chan *d40c = container_of(chan, struct d40_chan, 1742 struct d40_chan *d40c = container_of(chan, struct d40_chan,
1669 chan); 1743 chan);
@@ -1681,49 +1755,11 @@ struct dma_async_tx_descriptor *stedma40_memcpy_sg(struct dma_chan *chan,
1681 goto err; 1755 goto err;
1682 1756
1683 if (chan_is_logical(d40c)) { 1757 if (chan_is_logical(d40c)) {
1684 (void) d40_log_sg_to_lli(sgl_src, 1758 d40_prep_sg_log(d40c, d40d, sgl_src, sgl_dst,
1685 sgl_len, 1759 sgl_len, DMA_NONE, 0);
1686 d40d->lli_log.src,
1687 d40c->log_def.lcsp1,
1688 d40c->dma_cfg.src_info.data_width,
1689 d40c->dma_cfg.dst_info.data_width);
1690
1691 (void) d40_log_sg_to_lli(sgl_dst,
1692 sgl_len,
1693 d40d->lli_log.dst,
1694 d40c->log_def.lcsp3,
1695 d40c->dma_cfg.dst_info.data_width,
1696 d40c->dma_cfg.src_info.data_width);
1697 } else { 1760 } else {
1698 res = d40_phy_sg_to_lli(sgl_src, 1761 d40_prep_sg_phy(d40c, d40d, sgl_src, sgl_dst,
1699 sgl_len, 1762 sgl_len, DMA_NONE, 0);
1700 0,
1701 d40d->lli_phy.src,
1702 virt_to_phys(d40d->lli_phy.src),
1703 d40c->src_def_cfg,
1704 d40c->dma_cfg.src_info.data_width,
1705 d40c->dma_cfg.dst_info.data_width,
1706 d40c->dma_cfg.src_info.psize);
1707
1708 if (res < 0)
1709 goto err;
1710
1711 res = d40_phy_sg_to_lli(sgl_dst,
1712 sgl_len,
1713 0,
1714 d40d->lli_phy.dst,
1715 virt_to_phys(d40d->lli_phy.dst),
1716 d40c->dst_def_cfg,
1717 d40c->dma_cfg.dst_info.data_width,
1718 d40c->dma_cfg.src_info.data_width,
1719 d40c->dma_cfg.dst_info.psize);
1720
1721 if (res < 0)
1722 goto err;
1723
1724 dma_sync_single_for_device(d40c->base->dev,
1725 d40d->lli_pool.dma_addr,
1726 d40d->lli_pool.size, DMA_TO_DEVICE);
1727 } 1763 }
1728 1764
1729 spin_unlock_irqrestore(&d40c->lock, flags); 1765 spin_unlock_irqrestore(&d40c->lock, flags);
@@ -1904,69 +1940,6 @@ d40_prep_sg(struct dma_chan *chan,
1904 return stedma40_memcpy_sg(chan, dst_sg, src_sg, dst_nents, dma_flags); 1940 return stedma40_memcpy_sg(chan, dst_sg, src_sg, dst_nents, dma_flags);
1905} 1941}
1906 1942
1907static int d40_prep_slave_sg_log(struct d40_desc *d40d,
1908 struct d40_chan *d40c,
1909 struct scatterlist *sgl,
1910 unsigned int sg_len,
1911 enum dma_data_direction direction,
1912 dma_addr_t dev_addr)
1913{
1914 int total_size;
1915
1916 total_size = d40_log_sg_to_dev(sgl, sg_len,
1917 &d40d->lli_log,
1918 &d40c->log_def,
1919 d40c->dma_cfg.src_info.data_width,
1920 d40c->dma_cfg.dst_info.data_width,
1921 direction,
1922 dev_addr);
1923
1924 if (total_size < 0)
1925 return -EINVAL;
1926
1927 return 0;
1928}
1929
1930static int d40_prep_slave_sg_phy(struct d40_desc *d40d,
1931 struct d40_chan *d40c,
1932 struct scatterlist *sgl,
1933 unsigned int sgl_len,
1934 enum dma_data_direction direction,
1935 dma_addr_t dev_addr)
1936{
1937 dma_addr_t src_dev_addr = direction == DMA_FROM_DEVICE ? dev_addr : 0;
1938 dma_addr_t dst_dev_addr = direction == DMA_TO_DEVICE ? dev_addr : 0;
1939 int res;
1940
1941 res = d40_phy_sg_to_lli(sgl,
1942 sgl_len,
1943 src_dev_addr,
1944 d40d->lli_phy.src,
1945 virt_to_phys(d40d->lli_phy.src),
1946 d40c->src_def_cfg,
1947 d40c->dma_cfg.src_info.data_width,
1948 d40c->dma_cfg.dst_info.data_width,
1949 d40c->dma_cfg.src_info.psize);
1950 if (res < 0)
1951 return res;
1952
1953 res = d40_phy_sg_to_lli(sgl,
1954 sgl_len,
1955 dst_dev_addr,
1956 d40d->lli_phy.dst,
1957 virt_to_phys(d40d->lli_phy.dst),
1958 d40c->dst_def_cfg,
1959 d40c->dma_cfg.dst_info.data_width,
1960 d40c->dma_cfg.src_info.data_width,
1961 d40c->dma_cfg.dst_info.psize);
1962 if (res < 0)
1963 return res;
1964
1965 dma_sync_single_for_device(d40c->base->dev, d40d->lli_pool.dma_addr,
1966 d40d->lli_pool.size, DMA_TO_DEVICE);
1967 return 0;
1968}
1969
1970static dma_addr_t 1943static dma_addr_t
1971d40_get_dev_addr(struct d40_chan *chan, enum dma_data_direction direction) 1944d40_get_dev_addr(struct d40_chan *chan, enum dma_data_direction direction)
1972{ 1945{
@@ -2015,11 +1988,12 @@ static struct dma_async_tx_descriptor *d40_prep_slave_sg(struct dma_chan *chan,
2015 dev_addr = d40_get_dev_addr(d40c, direction); 1988 dev_addr = d40_get_dev_addr(d40c, direction);
2016 1989
2017 if (chan_is_logical(d40c)) 1990 if (chan_is_logical(d40c))
2018 err = d40_prep_slave_sg_log(d40d, d40c, sgl, sg_len, 1991 err = d40_prep_sg_log(d40c, d40d, sgl, NULL,
2019 direction, dev_addr); 1992 sg_len, direction, dev_addr);
2020 else 1993 else
2021 err = d40_prep_slave_sg_phy(d40d, d40c, sgl, sg_len, 1994 err = d40_prep_sg_phy(d40c, d40d, sgl, NULL,
2022 direction, dev_addr); 1995 sg_len, direction, dev_addr);
1996
2023 if (err) { 1997 if (err) {
2024 chan_err(d40c, "Failed to prepare %s slave sg job: %d\n", 1998 chan_err(d40c, "Failed to prepare %s slave sg job: %d\n",
2025 chan_is_logical(d40c) ? "log" : "phy", err); 1999 chan_is_logical(d40c) ? "log" : "phy", err);