diff options
author | Rabin Vincent <rabin.vincent@stericsson.com> | 2011-01-25 05:18:23 -0500 |
---|---|---|
committer | Dan Williams <dan.j.williams@intel.com> | 2011-01-31 01:27:19 -0500 |
commit | cade1d30b2e071a687011c2a38c03ed7187ec501 (patch) | |
tree | 3ce0a1178095e3c4d54d60b596f52ce41f814f36 | |
parent | 10a946b3a4e1ad665a81981cbe33c3d3903cd7da (diff) |
dma40: combine mem and slave prep_sg functions
Acked-by: Per Forlin <per.forlin@stericsson.com>
Acked-by: Jonas Aaberg <jonas.aberg@stericsson.com>
Signed-off-by: Rabin Vincent <rabin.vincent@stericsson.com>
Signed-off-by: Linus Walleij <linus.walleij@stericsson.com>
Signed-off-by: Dan Williams <dan.j.williams@intel.com>
-rw-r--r-- | drivers/dma/ste_dma40.c | 156 |
1 files changed, 62 insertions, 94 deletions
diff --git a/drivers/dma/ste_dma40.c b/drivers/dma/ste_dma40.c index 0f5d61720ab8..4e9d6c5a7134 100644 --- a/drivers/dma/ste_dma40.c +++ b/drivers/dma/ste_dma40.c | |||
@@ -1732,44 +1732,70 @@ err: | |||
1732 | return NULL; | 1732 | return NULL; |
1733 | } | 1733 | } |
1734 | 1734 | ||
1735 | struct dma_async_tx_descriptor *stedma40_memcpy_sg(struct dma_chan *chan, | 1735 | static dma_addr_t |
1736 | static struct dma_async_tx_descriptor *stedma40_memcpy_sg(struct dma_chan *chan, | 1736 | d40_get_dev_addr(struct d40_chan *chan, enum dma_data_direction direction) |
1737 | struct scatterlist *sgl_dst, | ||
1738 | struct scatterlist *sgl_src, | ||
1739 | unsigned int sgl_len, | ||
1740 | unsigned long dma_flags) | ||
1741 | { | 1737 | { |
1742 | struct d40_desc *d40d; | 1738 | struct stedma40_platform_data *plat = chan->base->plat_data; |
1743 | struct d40_chan *d40c = container_of(chan, struct d40_chan, | 1739 | struct stedma40_chan_cfg *cfg = &chan->dma_cfg; |
1744 | chan); | 1740 | dma_addr_t addr; |
1741 | |||
1742 | if (chan->runtime_addr) | ||
1743 | return chan->runtime_addr; | ||
1744 | |||
1745 | if (direction == DMA_FROM_DEVICE) | ||
1746 | addr = plat->dev_rx[cfg->src_dev_type]; | ||
1747 | else if (direction == DMA_TO_DEVICE) | ||
1748 | addr = plat->dev_tx[cfg->dst_dev_type]; | ||
1749 | |||
1750 | return addr; | ||
1751 | } | ||
1752 | |||
1753 | static struct dma_async_tx_descriptor * | ||
1754 | d40_prep_sg(struct dma_chan *dchan, struct scatterlist *sg_src, | ||
1755 | struct scatterlist *sg_dst, unsigned int sg_len, | ||
1756 | enum dma_data_direction direction, unsigned long dma_flags) | ||
1757 | { | ||
1758 | struct d40_chan *chan = container_of(dchan, struct d40_chan, chan); | ||
1759 | dma_addr_t dev_addr = 0; | ||
1760 | struct d40_desc *desc; | ||
1745 | unsigned long flags; | 1761 | unsigned long flags; |
1762 | int ret; | ||
1746 | 1763 | ||
1747 | if (d40c->phy_chan == NULL) { | 1764 | if (!chan->phy_chan) { |
1748 | chan_err(d40c, "Unallocated channel.\n"); | 1765 | chan_err(chan, "Cannot prepare unallocated channel\n"); |
1749 | return ERR_PTR(-EINVAL); | 1766 | return NULL; |
1750 | } | 1767 | } |
1751 | 1768 | ||
1752 | spin_lock_irqsave(&d40c->lock, flags); | 1769 | spin_lock_irqsave(&chan->lock, flags); |
1753 | 1770 | ||
1754 | d40d = d40_prep_desc(d40c, sgl_dst, sgl_len, dma_flags); | 1771 | desc = d40_prep_desc(chan, sg_src, sg_len, dma_flags); |
1755 | if (!d40d) | 1772 | if (desc == NULL) |
1756 | goto err; | 1773 | goto err; |
1757 | 1774 | ||
1758 | if (chan_is_logical(d40c)) { | 1775 | if (direction != DMA_NONE) |
1759 | d40_prep_sg_log(d40c, d40d, sgl_src, sgl_dst, | 1776 | dev_addr = d40_get_dev_addr(chan, direction); |
1760 | sgl_len, DMA_NONE, 0); | 1777 | |
1761 | } else { | 1778 | if (chan_is_logical(chan)) |
1762 | d40_prep_sg_phy(d40c, d40d, sgl_src, sgl_dst, | 1779 | ret = d40_prep_sg_log(chan, desc, sg_src, sg_dst, |
1763 | sgl_len, DMA_NONE, 0); | 1780 | sg_len, direction, dev_addr); |
1781 | else | ||
1782 | ret = d40_prep_sg_phy(chan, desc, sg_src, sg_dst, | ||
1783 | sg_len, direction, dev_addr); | ||
1784 | |||
1785 | if (ret) { | ||
1786 | chan_err(chan, "Failed to prepare %s sg job: %d\n", | ||
1787 | chan_is_logical(chan) ? "log" : "phy", ret); | ||
1788 | goto err; | ||
1764 | } | 1789 | } |
1765 | 1790 | ||
1766 | spin_unlock_irqrestore(&d40c->lock, flags); | 1791 | spin_unlock_irqrestore(&chan->lock, flags); |
1792 | |||
1793 | return &desc->txd; | ||
1767 | 1794 | ||
1768 | return &d40d->txd; | ||
1769 | err: | 1795 | err: |
1770 | if (d40d) | 1796 | if (desc) |
1771 | d40_desc_free(d40c, d40d); | 1797 | d40_desc_free(chan, desc); |
1772 | spin_unlock_irqrestore(&d40c->lock, flags); | 1798 | spin_unlock_irqrestore(&chan->lock, flags); |
1773 | return NULL; | 1799 | return NULL; |
1774 | } | 1800 | } |
1775 | 1801 | ||
@@ -1925,37 +1951,19 @@ static struct dma_async_tx_descriptor *d40_prep_memcpy(struct dma_chan *chan, | |||
1925 | sg_dma_len(&dst_sg) = size; | 1951 | sg_dma_len(&dst_sg) = size; |
1926 | sg_dma_len(&src_sg) = size; | 1952 | sg_dma_len(&src_sg) = size; |
1927 | 1953 | ||
1928 | return stedma40_memcpy_sg(chan, &dst_sg, &src_sg, 1, dma_flags); | 1954 | return d40_prep_sg(chan, &src_sg, &dst_sg, 1, DMA_NONE, dma_flags); |
1929 | } | 1955 | } |
1930 | 1956 | ||
1931 | static struct dma_async_tx_descriptor * | 1957 | static struct dma_async_tx_descriptor * |
1932 | d40_prep_sg(struct dma_chan *chan, | 1958 | d40_prep_memcpy_sg(struct dma_chan *chan, |
1933 | struct scatterlist *dst_sg, unsigned int dst_nents, | 1959 | struct scatterlist *dst_sg, unsigned int dst_nents, |
1934 | struct scatterlist *src_sg, unsigned int src_nents, | 1960 | struct scatterlist *src_sg, unsigned int src_nents, |
1935 | unsigned long dma_flags) | 1961 | unsigned long dma_flags) |
1936 | { | 1962 | { |
1937 | if (dst_nents != src_nents) | 1963 | if (dst_nents != src_nents) |
1938 | return NULL; | 1964 | return NULL; |
1939 | 1965 | ||
1940 | return stedma40_memcpy_sg(chan, dst_sg, src_sg, dst_nents, dma_flags); | 1966 | return d40_prep_sg(chan, src_sg, dst_sg, src_nents, DMA_NONE, dma_flags); |
1941 | } | ||
1942 | |||
1943 | static dma_addr_t | ||
1944 | d40_get_dev_addr(struct d40_chan *chan, enum dma_data_direction direction) | ||
1945 | { | ||
1946 | struct stedma40_platform_data *plat = chan->base->plat_data; | ||
1947 | struct stedma40_chan_cfg *cfg = &chan->dma_cfg; | ||
1948 | dma_addr_t addr; | ||
1949 | |||
1950 | if (chan->runtime_addr) | ||
1951 | return chan->runtime_addr; | ||
1952 | |||
1953 | if (direction == DMA_FROM_DEVICE) | ||
1954 | addr = plat->dev_rx[cfg->src_dev_type]; | ||
1955 | else if (direction == DMA_TO_DEVICE) | ||
1956 | addr = plat->dev_tx[cfg->dst_dev_type]; | ||
1957 | |||
1958 | return addr; | ||
1959 | } | 1967 | } |
1960 | 1968 | ||
1961 | static struct dma_async_tx_descriptor *d40_prep_slave_sg(struct dma_chan *chan, | 1969 | static struct dma_async_tx_descriptor *d40_prep_slave_sg(struct dma_chan *chan, |
@@ -1964,50 +1972,10 @@ static struct dma_async_tx_descriptor *d40_prep_slave_sg(struct dma_chan *chan, | |||
1964 | enum dma_data_direction direction, | 1972 | enum dma_data_direction direction, |
1965 | unsigned long dma_flags) | 1973 | unsigned long dma_flags) |
1966 | { | 1974 | { |
1967 | struct d40_desc *d40d; | ||
1968 | struct d40_chan *d40c = container_of(chan, struct d40_chan, | ||
1969 | chan); | ||
1970 | dma_addr_t dev_addr; | ||
1971 | unsigned long flags; | ||
1972 | int err; | ||
1973 | |||
1974 | if (d40c->phy_chan == NULL) { | ||
1975 | chan_err(d40c, "Cannot prepare unallocated channel\n"); | ||
1976 | return ERR_PTR(-EINVAL); | ||
1977 | } | ||
1978 | |||
1979 | if (direction != DMA_FROM_DEVICE && direction != DMA_TO_DEVICE) | 1975 | if (direction != DMA_FROM_DEVICE && direction != DMA_TO_DEVICE) |
1980 | return NULL; | 1976 | return NULL; |
1981 | 1977 | ||
1982 | spin_lock_irqsave(&d40c->lock, flags); | 1978 | return d40_prep_sg(chan, sgl, sgl, sg_len, direction, dma_flags); |
1983 | |||
1984 | d40d = d40_prep_desc(d40c, sgl, sg_len, dma_flags); | ||
1985 | if (d40d == NULL) | ||
1986 | goto err; | ||
1987 | |||
1988 | dev_addr = d40_get_dev_addr(d40c, direction); | ||
1989 | |||
1990 | if (chan_is_logical(d40c)) | ||
1991 | err = d40_prep_sg_log(d40c, d40d, sgl, NULL, | ||
1992 | sg_len, direction, dev_addr); | ||
1993 | else | ||
1994 | err = d40_prep_sg_phy(d40c, d40d, sgl, NULL, | ||
1995 | sg_len, direction, dev_addr); | ||
1996 | |||
1997 | if (err) { | ||
1998 | chan_err(d40c, "Failed to prepare %s slave sg job: %d\n", | ||
1999 | chan_is_logical(d40c) ? "log" : "phy", err); | ||
2000 | goto err; | ||
2001 | } | ||
2002 | |||
2003 | spin_unlock_irqrestore(&d40c->lock, flags); | ||
2004 | return &d40d->txd; | ||
2005 | |||
2006 | err: | ||
2007 | if (d40d) | ||
2008 | d40_desc_free(d40c, d40d); | ||
2009 | spin_unlock_irqrestore(&d40c->lock, flags); | ||
2010 | return NULL; | ||
2011 | } | 1979 | } |
2012 | 1980 | ||
2013 | static enum dma_status d40_tx_status(struct dma_chan *chan, | 1981 | static enum dma_status d40_tx_status(struct dma_chan *chan, |
@@ -2267,7 +2235,7 @@ static int __init d40_dmaengine_init(struct d40_base *base, | |||
2267 | base->dma_slave.device_alloc_chan_resources = d40_alloc_chan_resources; | 2235 | base->dma_slave.device_alloc_chan_resources = d40_alloc_chan_resources; |
2268 | base->dma_slave.device_free_chan_resources = d40_free_chan_resources; | 2236 | base->dma_slave.device_free_chan_resources = d40_free_chan_resources; |
2269 | base->dma_slave.device_prep_dma_memcpy = d40_prep_memcpy; | 2237 | base->dma_slave.device_prep_dma_memcpy = d40_prep_memcpy; |
2270 | base->dma_slave.device_prep_dma_sg = d40_prep_sg; | 2238 | base->dma_slave.device_prep_dma_sg = d40_prep_memcpy_sg; |
2271 | base->dma_slave.device_prep_slave_sg = d40_prep_slave_sg; | 2239 | base->dma_slave.device_prep_slave_sg = d40_prep_slave_sg; |
2272 | base->dma_slave.device_tx_status = d40_tx_status; | 2240 | base->dma_slave.device_tx_status = d40_tx_status; |
2273 | base->dma_slave.device_issue_pending = d40_issue_pending; | 2241 | base->dma_slave.device_issue_pending = d40_issue_pending; |
@@ -2291,7 +2259,7 @@ static int __init d40_dmaengine_init(struct d40_base *base, | |||
2291 | base->dma_memcpy.device_alloc_chan_resources = d40_alloc_chan_resources; | 2259 | base->dma_memcpy.device_alloc_chan_resources = d40_alloc_chan_resources; |
2292 | base->dma_memcpy.device_free_chan_resources = d40_free_chan_resources; | 2260 | base->dma_memcpy.device_free_chan_resources = d40_free_chan_resources; |
2293 | base->dma_memcpy.device_prep_dma_memcpy = d40_prep_memcpy; | 2261 | base->dma_memcpy.device_prep_dma_memcpy = d40_prep_memcpy; |
2294 | base->dma_slave.device_prep_dma_sg = d40_prep_sg; | 2262 | base->dma_slave.device_prep_dma_sg = d40_prep_memcpy_sg; |
2295 | base->dma_memcpy.device_prep_slave_sg = d40_prep_slave_sg; | 2263 | base->dma_memcpy.device_prep_slave_sg = d40_prep_slave_sg; |
2296 | base->dma_memcpy.device_tx_status = d40_tx_status; | 2264 | base->dma_memcpy.device_tx_status = d40_tx_status; |
2297 | base->dma_memcpy.device_issue_pending = d40_issue_pending; | 2265 | base->dma_memcpy.device_issue_pending = d40_issue_pending; |
@@ -2322,7 +2290,7 @@ static int __init d40_dmaengine_init(struct d40_base *base, | |||
2322 | base->dma_both.device_alloc_chan_resources = d40_alloc_chan_resources; | 2290 | base->dma_both.device_alloc_chan_resources = d40_alloc_chan_resources; |
2323 | base->dma_both.device_free_chan_resources = d40_free_chan_resources; | 2291 | base->dma_both.device_free_chan_resources = d40_free_chan_resources; |
2324 | base->dma_both.device_prep_dma_memcpy = d40_prep_memcpy; | 2292 | base->dma_both.device_prep_dma_memcpy = d40_prep_memcpy; |
2325 | base->dma_slave.device_prep_dma_sg = d40_prep_sg; | 2293 | base->dma_slave.device_prep_dma_sg = d40_prep_memcpy_sg; |
2326 | base->dma_both.device_prep_slave_sg = d40_prep_slave_sg; | 2294 | base->dma_both.device_prep_slave_sg = d40_prep_slave_sg; |
2327 | base->dma_both.device_tx_status = d40_tx_status; | 2295 | base->dma_both.device_tx_status = d40_tx_status; |
2328 | base->dma_both.device_issue_pending = d40_issue_pending; | 2296 | base->dma_both.device_issue_pending = d40_issue_pending; |