diff options
author | Pierre-Yves MORDRET <pierre-yves.mordret@st.com> | 2018-04-13 09:52:13 -0400 |
---|---|---|
committer | Vinod Koul <vkoul@kernel.org> | 2018-04-25 05:09:53 -0400 |
commit | bbb5a4e1e772c878783c2d59b97ac3b358d4a0a6 (patch) | |
tree | 947273b4e7a449e410430e63301e3923ed82fbe4 | |
parent | ee6de9ac527f0630d496303df3f270678128e6c6 (diff) |
dmaengine: stm32-mdma: Fix incomplete Hw descriptors allocator
Only 1 Hw Descriptor is allocated. Loop over required Hw descriptor for
proper allocation.
Signed-off-by: Pierre-Yves MORDRET <pierre-yves.mordret@st.com>
Signed-off-by: Vinod Koul <vkoul@kernel.org>
-rw-r--r-- | drivers/dma/stm32-mdma.c | 89 |
1 files changed, 55 insertions, 34 deletions
diff --git a/drivers/dma/stm32-mdma.c b/drivers/dma/stm32-mdma.c index 4c7634ca62a7..1ac775f93d9e 100644 --- a/drivers/dma/stm32-mdma.c +++ b/drivers/dma/stm32-mdma.c | |||
@@ -252,13 +252,17 @@ struct stm32_mdma_hwdesc { | |||
252 | u32 cmdr; | 252 | u32 cmdr; |
253 | } __aligned(64); | 253 | } __aligned(64); |
254 | 254 | ||
255 | struct stm32_mdma_desc_node { | ||
256 | struct stm32_mdma_hwdesc *hwdesc; | ||
257 | dma_addr_t hwdesc_phys; | ||
258 | }; | ||
259 | |||
255 | struct stm32_mdma_desc { | 260 | struct stm32_mdma_desc { |
256 | struct virt_dma_desc vdesc; | 261 | struct virt_dma_desc vdesc; |
257 | u32 ccr; | 262 | u32 ccr; |
258 | struct stm32_mdma_hwdesc *hwdesc; | ||
259 | dma_addr_t hwdesc_phys; | ||
260 | bool cyclic; | 263 | bool cyclic; |
261 | u32 count; | 264 | u32 count; |
265 | struct stm32_mdma_desc_node node[]; | ||
262 | }; | 266 | }; |
263 | 267 | ||
264 | struct stm32_mdma_chan { | 268 | struct stm32_mdma_chan { |
@@ -344,30 +348,42 @@ static struct stm32_mdma_desc *stm32_mdma_alloc_desc( | |||
344 | struct stm32_mdma_chan *chan, u32 count) | 348 | struct stm32_mdma_chan *chan, u32 count) |
345 | { | 349 | { |
346 | struct stm32_mdma_desc *desc; | 350 | struct stm32_mdma_desc *desc; |
351 | int i; | ||
347 | 352 | ||
348 | desc = kzalloc(sizeof(*desc), GFP_NOWAIT); | 353 | desc = kzalloc(offsetof(typeof(*desc), node[count]), GFP_NOWAIT); |
349 | if (!desc) | 354 | if (!desc) |
350 | return NULL; | 355 | return NULL; |
351 | 356 | ||
352 | desc->hwdesc = dma_pool_alloc(chan->desc_pool, GFP_NOWAIT, | 357 | for (i = 0; i < count; i++) { |
353 | &desc->hwdesc_phys); | 358 | desc->node[i].hwdesc = |
354 | if (!desc->hwdesc) { | 359 | dma_pool_alloc(chan->desc_pool, GFP_NOWAIT, |
355 | dev_err(chan2dev(chan), "Failed to allocate descriptor\n"); | 360 | &desc->node[i].hwdesc_phys); |
356 | kfree(desc); | 361 | if (!desc->node[i].hwdesc) |
357 | return NULL; | 362 | goto err; |
358 | } | 363 | } |
359 | 364 | ||
360 | desc->count = count; | 365 | desc->count = count; |
361 | 366 | ||
362 | return desc; | 367 | return desc; |
368 | |||
369 | err: | ||
370 | dev_err(chan2dev(chan), "Failed to allocate descriptor\n"); | ||
371 | while (--i >= 0) | ||
372 | dma_pool_free(chan->desc_pool, desc->node[i].hwdesc, | ||
373 | desc->node[i].hwdesc_phys); | ||
374 | kfree(desc); | ||
375 | return NULL; | ||
363 | } | 376 | } |
364 | 377 | ||
365 | static void stm32_mdma_desc_free(struct virt_dma_desc *vdesc) | 378 | static void stm32_mdma_desc_free(struct virt_dma_desc *vdesc) |
366 | { | 379 | { |
367 | struct stm32_mdma_desc *desc = to_stm32_mdma_desc(vdesc); | 380 | struct stm32_mdma_desc *desc = to_stm32_mdma_desc(vdesc); |
368 | struct stm32_mdma_chan *chan = to_stm32_mdma_chan(vdesc->tx.chan); | 381 | struct stm32_mdma_chan *chan = to_stm32_mdma_chan(vdesc->tx.chan); |
382 | int i; | ||
369 | 383 | ||
370 | dma_pool_free(chan->desc_pool, desc->hwdesc, desc->hwdesc_phys); | 384 | for (i = 0; i < desc->count; i++) |
385 | dma_pool_free(chan->desc_pool, desc->node[i].hwdesc, | ||
386 | desc->node[i].hwdesc_phys); | ||
371 | kfree(desc); | 387 | kfree(desc); |
372 | } | 388 | } |
373 | 389 | ||
@@ -666,18 +682,18 @@ static int stm32_mdma_set_xfer_param(struct stm32_mdma_chan *chan, | |||
666 | } | 682 | } |
667 | 683 | ||
668 | static void stm32_mdma_dump_hwdesc(struct stm32_mdma_chan *chan, | 684 | static void stm32_mdma_dump_hwdesc(struct stm32_mdma_chan *chan, |
669 | struct stm32_mdma_hwdesc *hwdesc) | 685 | struct stm32_mdma_desc_node *node) |
670 | { | 686 | { |
671 | dev_dbg(chan2dev(chan), "hwdesc: 0x%p\n", hwdesc); | 687 | dev_dbg(chan2dev(chan), "hwdesc: %pad\n", &node->hwdesc_phys); |
672 | dev_dbg(chan2dev(chan), "CTCR: 0x%08x\n", hwdesc->ctcr); | 688 | dev_dbg(chan2dev(chan), "CTCR: 0x%08x\n", node->hwdesc->ctcr); |
673 | dev_dbg(chan2dev(chan), "CBNDTR: 0x%08x\n", hwdesc->cbndtr); | 689 | dev_dbg(chan2dev(chan), "CBNDTR: 0x%08x\n", node->hwdesc->cbndtr); |
674 | dev_dbg(chan2dev(chan), "CSAR: 0x%08x\n", hwdesc->csar); | 690 | dev_dbg(chan2dev(chan), "CSAR: 0x%08x\n", node->hwdesc->csar); |
675 | dev_dbg(chan2dev(chan), "CDAR: 0x%08x\n", hwdesc->cdar); | 691 | dev_dbg(chan2dev(chan), "CDAR: 0x%08x\n", node->hwdesc->cdar); |
676 | dev_dbg(chan2dev(chan), "CBRUR: 0x%08x\n", hwdesc->cbrur); | 692 | dev_dbg(chan2dev(chan), "CBRUR: 0x%08x\n", node->hwdesc->cbrur); |
677 | dev_dbg(chan2dev(chan), "CLAR: 0x%08x\n", hwdesc->clar); | 693 | dev_dbg(chan2dev(chan), "CLAR: 0x%08x\n", node->hwdesc->clar); |
678 | dev_dbg(chan2dev(chan), "CTBR: 0x%08x\n", hwdesc->ctbr); | 694 | dev_dbg(chan2dev(chan), "CTBR: 0x%08x\n", node->hwdesc->ctbr); |
679 | dev_dbg(chan2dev(chan), "CMAR: 0x%08x\n", hwdesc->cmar); | 695 | dev_dbg(chan2dev(chan), "CMAR: 0x%08x\n", node->hwdesc->cmar); |
680 | dev_dbg(chan2dev(chan), "CMDR: 0x%08x\n\n", hwdesc->cmdr); | 696 | dev_dbg(chan2dev(chan), "CMDR: 0x%08x\n\n", node->hwdesc->cmdr); |
681 | } | 697 | } |
682 | 698 | ||
683 | static void stm32_mdma_setup_hwdesc(struct stm32_mdma_chan *chan, | 699 | static void stm32_mdma_setup_hwdesc(struct stm32_mdma_chan *chan, |
@@ -691,7 +707,7 @@ static void stm32_mdma_setup_hwdesc(struct stm32_mdma_chan *chan, | |||
691 | struct stm32_mdma_hwdesc *hwdesc; | 707 | struct stm32_mdma_hwdesc *hwdesc; |
692 | u32 next = count + 1; | 708 | u32 next = count + 1; |
693 | 709 | ||
694 | hwdesc = &desc->hwdesc[count]; | 710 | hwdesc = desc->node[count].hwdesc; |
695 | hwdesc->ctcr = ctcr; | 711 | hwdesc->ctcr = ctcr; |
696 | hwdesc->cbndtr &= ~(STM32_MDMA_CBNDTR_BRC_MK | | 712 | hwdesc->cbndtr &= ~(STM32_MDMA_CBNDTR_BRC_MK | |
697 | STM32_MDMA_CBNDTR_BRDUM | | 713 | STM32_MDMA_CBNDTR_BRDUM | |
@@ -701,19 +717,20 @@ static void stm32_mdma_setup_hwdesc(struct stm32_mdma_chan *chan, | |||
701 | hwdesc->csar = src_addr; | 717 | hwdesc->csar = src_addr; |
702 | hwdesc->cdar = dst_addr; | 718 | hwdesc->cdar = dst_addr; |
703 | hwdesc->cbrur = 0; | 719 | hwdesc->cbrur = 0; |
704 | hwdesc->clar = desc->hwdesc_phys + next * sizeof(*hwdesc); | ||
705 | hwdesc->ctbr = ctbr; | 720 | hwdesc->ctbr = ctbr; |
706 | hwdesc->cmar = config->mask_addr; | 721 | hwdesc->cmar = config->mask_addr; |
707 | hwdesc->cmdr = config->mask_data; | 722 | hwdesc->cmdr = config->mask_data; |
708 | 723 | ||
709 | if (is_last) { | 724 | if (is_last) { |
710 | if (is_cyclic) | 725 | if (is_cyclic) |
711 | hwdesc->clar = desc->hwdesc_phys; | 726 | hwdesc->clar = desc->node[0].hwdesc_phys; |
712 | else | 727 | else |
713 | hwdesc->clar = 0; | 728 | hwdesc->clar = 0; |
729 | } else { | ||
730 | hwdesc->clar = desc->node[next].hwdesc_phys; | ||
714 | } | 731 | } |
715 | 732 | ||
716 | stm32_mdma_dump_hwdesc(chan, hwdesc); | 733 | stm32_mdma_dump_hwdesc(chan, &desc->node[count]); |
717 | } | 734 | } |
718 | 735 | ||
719 | static int stm32_mdma_setup_xfer(struct stm32_mdma_chan *chan, | 736 | static int stm32_mdma_setup_xfer(struct stm32_mdma_chan *chan, |
@@ -777,7 +794,7 @@ stm32_mdma_prep_slave_sg(struct dma_chan *c, struct scatterlist *sgl, | |||
777 | { | 794 | { |
778 | struct stm32_mdma_chan *chan = to_stm32_mdma_chan(c); | 795 | struct stm32_mdma_chan *chan = to_stm32_mdma_chan(c); |
779 | struct stm32_mdma_desc *desc; | 796 | struct stm32_mdma_desc *desc; |
780 | int ret; | 797 | int i, ret; |
781 | 798 | ||
782 | /* | 799 | /* |
783 | * Once DMA is in setup cyclic mode the channel we cannot assign this | 800 | * Once DMA is in setup cyclic mode the channel we cannot assign this |
@@ -803,7 +820,9 @@ stm32_mdma_prep_slave_sg(struct dma_chan *c, struct scatterlist *sgl, | |||
803 | return vchan_tx_prep(&chan->vchan, &desc->vdesc, flags); | 820 | return vchan_tx_prep(&chan->vchan, &desc->vdesc, flags); |
804 | 821 | ||
805 | xfer_setup_err: | 822 | xfer_setup_err: |
806 | dma_pool_free(chan->desc_pool, &desc->hwdesc, desc->hwdesc_phys); | 823 | for (i = 0; i < desc->count; i++) |
824 | dma_pool_free(chan->desc_pool, desc->node[i].hwdesc, | ||
825 | desc->node[i].hwdesc_phys); | ||
807 | kfree(desc); | 826 | kfree(desc); |
808 | return NULL; | 827 | return NULL; |
809 | } | 828 | } |
@@ -892,7 +911,9 @@ stm32_mdma_prep_dma_cyclic(struct dma_chan *c, dma_addr_t buf_addr, | |||
892 | return vchan_tx_prep(&chan->vchan, &desc->vdesc, flags); | 911 | return vchan_tx_prep(&chan->vchan, &desc->vdesc, flags); |
893 | 912 | ||
894 | xfer_setup_err: | 913 | xfer_setup_err: |
895 | dma_pool_free(chan->desc_pool, &desc->hwdesc, desc->hwdesc_phys); | 914 | for (i = 0; i < desc->count; i++) |
915 | dma_pool_free(chan->desc_pool, desc->node[i].hwdesc, | ||
916 | desc->node[i].hwdesc_phys); | ||
896 | kfree(desc); | 917 | kfree(desc); |
897 | return NULL; | 918 | return NULL; |
898 | } | 919 | } |
@@ -1006,7 +1027,7 @@ stm32_mdma_prep_dma_memcpy(struct dma_chan *c, dma_addr_t dest, dma_addr_t src, | |||
1006 | ctcr |= STM32_MDMA_CTCR_PKE; | 1027 | ctcr |= STM32_MDMA_CTCR_PKE; |
1007 | 1028 | ||
1008 | /* Prepare hardware descriptor */ | 1029 | /* Prepare hardware descriptor */ |
1009 | hwdesc = desc->hwdesc; | 1030 | hwdesc = desc->node[0].hwdesc; |
1010 | hwdesc->ctcr = ctcr; | 1031 | hwdesc->ctcr = ctcr; |
1011 | hwdesc->cbndtr = cbndtr; | 1032 | hwdesc->cbndtr = cbndtr; |
1012 | hwdesc->csar = src; | 1033 | hwdesc->csar = src; |
@@ -1017,7 +1038,7 @@ stm32_mdma_prep_dma_memcpy(struct dma_chan *c, dma_addr_t dest, dma_addr_t src, | |||
1017 | hwdesc->cmar = 0; | 1038 | hwdesc->cmar = 0; |
1018 | hwdesc->cmdr = 0; | 1039 | hwdesc->cmdr = 0; |
1019 | 1040 | ||
1020 | stm32_mdma_dump_hwdesc(chan, hwdesc); | 1041 | stm32_mdma_dump_hwdesc(chan, &desc->node[0]); |
1021 | } else { | 1042 | } else { |
1022 | /* Setup a LLI transfer */ | 1043 | /* Setup a LLI transfer */ |
1023 | ctcr |= STM32_MDMA_CTCR_TRGM(STM32_MDMA_LINKED_LIST) | | 1044 | ctcr |= STM32_MDMA_CTCR_TRGM(STM32_MDMA_LINKED_LIST) | |
@@ -1117,7 +1138,7 @@ static void stm32_mdma_start_transfer(struct stm32_mdma_chan *chan) | |||
1117 | } | 1138 | } |
1118 | 1139 | ||
1119 | chan->desc = to_stm32_mdma_desc(vdesc); | 1140 | chan->desc = to_stm32_mdma_desc(vdesc); |
1120 | hwdesc = chan->desc->hwdesc; | 1141 | hwdesc = chan->desc->node[0].hwdesc; |
1121 | chan->curr_hwdesc = 0; | 1142 | chan->curr_hwdesc = 0; |
1122 | 1143 | ||
1123 | stm32_mdma_write(dmadev, STM32_MDMA_CCR(id), chan->desc->ccr); | 1144 | stm32_mdma_write(dmadev, STM32_MDMA_CCR(id), chan->desc->ccr); |
@@ -1195,7 +1216,7 @@ static int stm32_mdma_resume(struct dma_chan *c) | |||
1195 | unsigned long flags; | 1216 | unsigned long flags; |
1196 | u32 status, reg; | 1217 | u32 status, reg; |
1197 | 1218 | ||
1198 | hwdesc = &chan->desc->hwdesc[chan->curr_hwdesc]; | 1219 | hwdesc = chan->desc->node[chan->curr_hwdesc].hwdesc; |
1199 | 1220 | ||
1200 | spin_lock_irqsave(&chan->vchan.lock, flags); | 1221 | spin_lock_irqsave(&chan->vchan.lock, flags); |
1201 | 1222 | ||
@@ -1265,13 +1286,13 @@ static size_t stm32_mdma_desc_residue(struct stm32_mdma_chan *chan, | |||
1265 | u32 curr_hwdesc) | 1286 | u32 curr_hwdesc) |
1266 | { | 1287 | { |
1267 | struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan); | 1288 | struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan); |
1289 | struct stm32_mdma_hwdesc *hwdesc = desc->node[0].hwdesc; | ||
1268 | u32 cbndtr, residue, modulo, burst_size; | 1290 | u32 cbndtr, residue, modulo, burst_size; |
1269 | int i; | 1291 | int i; |
1270 | 1292 | ||
1271 | residue = 0; | 1293 | residue = 0; |
1272 | for (i = curr_hwdesc + 1; i < desc->count; i++) { | 1294 | for (i = curr_hwdesc + 1; i < desc->count; i++) { |
1273 | struct stm32_mdma_hwdesc *hwdesc = &desc->hwdesc[i]; | 1295 | hwdesc = desc->node[i].hwdesc; |
1274 | |||
1275 | residue += STM32_MDMA_CBNDTR_BNDT(hwdesc->cbndtr); | 1296 | residue += STM32_MDMA_CBNDTR_BNDT(hwdesc->cbndtr); |
1276 | } | 1297 | } |
1277 | cbndtr = stm32_mdma_read(dmadev, STM32_MDMA_CBNDTR(chan->id)); | 1298 | cbndtr = stm32_mdma_read(dmadev, STM32_MDMA_CBNDTR(chan->id)); |