aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorMaxime Ripard <maxime.ripard@free-electrons.com>2015-10-22 05:41:00 -0400
committerVinod Koul <vinod.koul@intel.com>2015-10-28 21:41:16 -0400
commit67d25f0d4e24775418aae403610cae99e27cdc3c (patch)
tree41aa45a73436dfc2b5fdb78d636672ff29605396
parentce2a673d66b2cab4b459981be1a28bbb6c071555 (diff)
dmaengine: hdmac: Add scatter-gathered memset support
Just like memset support, the HDMAC might be used to do a memset over a discontiguous memory area. In such a case, we'll just build up a chain of memset descriptors over the contiguous chunks of memory to set, in order to allow such a support. Signed-off-by: Maxime Ripard <maxime.ripard@free-electrons.com> Acked-by: Nicolas Ferre <nicolas.ferre@atmel.com> Signed-off-by: Vinod Koul <vinod.koul@intel.com>
-rw-r--r--drivers/dma/at_hdmac.c79
1 files changed, 79 insertions, 0 deletions
diff --git a/drivers/dma/at_hdmac.c b/drivers/dma/at_hdmac.c
index cad18f3660ae..4e55239c7a30 100644
--- a/drivers/dma/at_hdmac.c
+++ b/drivers/dma/at_hdmac.c
@@ -986,6 +986,83 @@ err_free_buffer:
986 return NULL; 986 return NULL;
987} 987}
988 988
989static struct dma_async_tx_descriptor *
990atc_prep_dma_memset_sg(struct dma_chan *chan,
991 struct scatterlist *sgl,
992 unsigned int sg_len, int value,
993 unsigned long flags)
994{
995 struct at_dma_chan *atchan = to_at_dma_chan(chan);
996 struct at_dma *atdma = to_at_dma(chan->device);
997 struct at_desc *desc = NULL, *first = NULL, *prev = NULL;
998 struct scatterlist *sg;
999 void __iomem *vaddr;
1000 dma_addr_t paddr;
1001 size_t total_len = 0;
1002 int i;
1003
1004 dev_vdbg(chan2dev(chan), "%s: v0x%x l0x%zx f0x%lx\n", __func__,
1005 value, sg_len, flags);
1006
1007 if (unlikely(!sgl || !sg_len)) {
1008 dev_dbg(chan2dev(chan), "%s: scatterlist is empty!\n",
1009 __func__);
1010 return NULL;
1011 }
1012
1013 vaddr = dma_pool_alloc(atdma->memset_pool, GFP_ATOMIC, &paddr);
1014 if (!vaddr) {
1015 dev_err(chan2dev(chan), "%s: couldn't allocate buffer\n",
1016 __func__);
1017 return NULL;
1018 }
1019 *(u32*)vaddr = value;
1020
1021 for_each_sg(sgl, sg, sg_len, i) {
1022 dma_addr_t dest = sg_dma_address(sg);
1023 size_t len = sg_dma_len(sg);
1024
1025 dev_vdbg(chan2dev(chan), "%s: d0x%08x, l0x%zx\n",
1026 __func__, dest, len);
1027
1028 if (!is_dma_fill_aligned(chan->device, dest, 0, len)) {
1029 dev_err(chan2dev(chan), "%s: buffer is not aligned\n",
1030 __func__);
1031 goto err_put_desc;
1032 }
1033
1034 desc = atc_create_memset_desc(chan, paddr, dest, len);
1035 if (!desc)
1036 goto err_put_desc;
1037
1038 atc_desc_chain(&first, &prev, desc);
1039
1040 total_len += len;
1041 }
1042
1043 /*
1044 * Only set the buffer pointers on the last descriptor to
1045 * avoid free'ing while we have our transfer still going
1046 */
1047 desc->memset_paddr = paddr;
1048 desc->memset_vaddr = vaddr;
1049 desc->memset_buffer = true;
1050
1051 first->txd.cookie = -EBUSY;
1052 first->total_len = total_len;
1053
1054 /* set end-of-link on the descriptor */
1055 set_desc_eol(desc);
1056
1057 first->txd.flags = flags;
1058
1059 return &first->txd;
1060
1061err_put_desc:
1062 atc_desc_put(atchan, first);
1063 return NULL;
1064}
1065
989/** 1066/**
990 * atc_prep_slave_sg - prepare descriptors for a DMA_SLAVE transaction 1067 * atc_prep_slave_sg - prepare descriptors for a DMA_SLAVE transaction
991 * @chan: DMA channel 1068 * @chan: DMA channel
@@ -1868,6 +1945,7 @@ static int __init at_dma_probe(struct platform_device *pdev)
1868 dma_cap_set(DMA_INTERLEAVE, at91sam9g45_config.cap_mask); 1945 dma_cap_set(DMA_INTERLEAVE, at91sam9g45_config.cap_mask);
1869 dma_cap_set(DMA_MEMCPY, at91sam9g45_config.cap_mask); 1946 dma_cap_set(DMA_MEMCPY, at91sam9g45_config.cap_mask);
1870 dma_cap_set(DMA_MEMSET, at91sam9g45_config.cap_mask); 1947 dma_cap_set(DMA_MEMSET, at91sam9g45_config.cap_mask);
1948 dma_cap_set(DMA_MEMSET_SG, at91sam9g45_config.cap_mask);
1871 dma_cap_set(DMA_PRIVATE, at91sam9g45_config.cap_mask); 1949 dma_cap_set(DMA_PRIVATE, at91sam9g45_config.cap_mask);
1872 dma_cap_set(DMA_SLAVE, at91sam9g45_config.cap_mask); 1950 dma_cap_set(DMA_SLAVE, at91sam9g45_config.cap_mask);
1873 dma_cap_set(DMA_SG, at91sam9g45_config.cap_mask); 1951 dma_cap_set(DMA_SG, at91sam9g45_config.cap_mask);
@@ -1989,6 +2067,7 @@ static int __init at_dma_probe(struct platform_device *pdev)
1989 2067
1990 if (dma_has_cap(DMA_MEMSET, atdma->dma_common.cap_mask)) { 2068 if (dma_has_cap(DMA_MEMSET, atdma->dma_common.cap_mask)) {
1991 atdma->dma_common.device_prep_dma_memset = atc_prep_dma_memset; 2069 atdma->dma_common.device_prep_dma_memset = atc_prep_dma_memset;
2070 atdma->dma_common.device_prep_dma_memset_sg = atc_prep_dma_memset_sg;
1992 atdma->dma_common.fill_align = DMAENGINE_ALIGN_4_BYTES; 2071 atdma->dma_common.fill_align = DMAENGINE_ALIGN_4_BYTES;
1993 } 2072 }
1994 2073