diff options
author | Alex Smith <alex.smith@imgtec.com> | 2015-07-24 12:24:22 -0400 |
---|---|---|
committer | Vinod Koul <vinod.koul@intel.com> | 2015-08-18 12:58:49 -0400 |
commit | 839896ef3fdacf3a27460b6f6dabc6ac1475a00c (patch) | |
tree | a8e2c9dbdf9187c39253b9b1822415f47095c8a9 /drivers/dma/dma-jz4780.c | |
parent | dc578f314e2471ca93a4c1f80988ecc781836f72 (diff) |
dmaengine: jz4780: Fix error handling/signedness issues
There are a some signedness bugs such as testing for < 0 on unsigned
return values. Additionally there are some cases where functions which
should return NULL on error actually return a PTR_ERR value which can
result in oopses on error. Fix these issues.
Signed-off-by: Alex Smith <alex.smith@imgtec.com>
Cc: Vinod Koul <vinod.koul@intel.com>
Cc: Zubair Lutfullah Kakakhel <Zubair.Kakakhel@imgtec.com>
Cc: dmaengine@vger.kernel.org
Signed-off-by: Vinod Koul <vinod.koul@intel.com>
Diffstat (limited to 'drivers/dma/dma-jz4780.c')
-rw-r--r-- | drivers/dma/dma-jz4780.c | 33 |
1 files changed, 17 insertions, 16 deletions
diff --git a/drivers/dma/dma-jz4780.c b/drivers/dma/dma-jz4780.c index 7af886fc47af..e4a291c67e2a 100644 --- a/drivers/dma/dma-jz4780.c +++ b/drivers/dma/dma-jz4780.c | |||
@@ -250,7 +250,7 @@ static uint32_t jz4780_dma_transfer_size(unsigned long val, uint32_t *shift) | |||
250 | } | 250 | } |
251 | } | 251 | } |
252 | 252 | ||
253 | static uint32_t jz4780_dma_setup_hwdesc(struct jz4780_dma_chan *jzchan, | 253 | static int jz4780_dma_setup_hwdesc(struct jz4780_dma_chan *jzchan, |
254 | struct jz4780_dma_hwdesc *desc, dma_addr_t addr, size_t len, | 254 | struct jz4780_dma_hwdesc *desc, dma_addr_t addr, size_t len, |
255 | enum dma_transfer_direction direction) | 255 | enum dma_transfer_direction direction) |
256 | { | 256 | { |
@@ -301,6 +301,7 @@ static uint32_t jz4780_dma_setup_hwdesc(struct jz4780_dma_chan *jzchan, | |||
301 | desc->dcm |= width << JZ_DMA_DCM_DP_SHIFT; | 301 | desc->dcm |= width << JZ_DMA_DCM_DP_SHIFT; |
302 | 302 | ||
303 | desc->dtc = len >> jzchan->transfer_shift; | 303 | desc->dtc = len >> jzchan->transfer_shift; |
304 | return 0; | ||
304 | } | 305 | } |
305 | 306 | ||
306 | static struct dma_async_tx_descriptor *jz4780_dma_prep_slave_sg( | 307 | static struct dma_async_tx_descriptor *jz4780_dma_prep_slave_sg( |
@@ -319,12 +320,11 @@ static struct dma_async_tx_descriptor *jz4780_dma_prep_slave_sg( | |||
319 | 320 | ||
320 | for (i = 0; i < sg_len; i++) { | 321 | for (i = 0; i < sg_len; i++) { |
321 | err = jz4780_dma_setup_hwdesc(jzchan, &desc->desc[i], | 322 | err = jz4780_dma_setup_hwdesc(jzchan, &desc->desc[i], |
322 | sg_dma_address(&sgl[i]), | 323 | sg_dma_address(&sgl[i]), |
323 | sg_dma_len(&sgl[i]), | 324 | sg_dma_len(&sgl[i]), |
324 | direction); | 325 | direction); |
325 | if (err < 0) | 326 | if (err < 0) |
326 | return ERR_PTR(err); | 327 | return NULL; |
327 | |||
328 | 328 | ||
329 | desc->desc[i].dcm |= JZ_DMA_DCM_TIE; | 329 | desc->desc[i].dcm |= JZ_DMA_DCM_TIE; |
330 | 330 | ||
@@ -366,9 +366,9 @@ static struct dma_async_tx_descriptor *jz4780_dma_prep_dma_cyclic( | |||
366 | 366 | ||
367 | for (i = 0; i < periods; i++) { | 367 | for (i = 0; i < periods; i++) { |
368 | err = jz4780_dma_setup_hwdesc(jzchan, &desc->desc[i], buf_addr, | 368 | err = jz4780_dma_setup_hwdesc(jzchan, &desc->desc[i], buf_addr, |
369 | period_len, direction); | 369 | period_len, direction); |
370 | if (err < 0) | 370 | if (err < 0) |
371 | return ERR_PTR(err); | 371 | return NULL; |
372 | 372 | ||
373 | buf_addr += period_len; | 373 | buf_addr += period_len; |
374 | 374 | ||
@@ -417,7 +417,7 @@ struct dma_async_tx_descriptor *jz4780_dma_prep_dma_memcpy( | |||
417 | tsz << JZ_DMA_DCM_TSZ_SHIFT | | 417 | tsz << JZ_DMA_DCM_TSZ_SHIFT | |
418 | JZ_DMA_WIDTH_32_BIT << JZ_DMA_DCM_SP_SHIFT | | 418 | JZ_DMA_WIDTH_32_BIT << JZ_DMA_DCM_SP_SHIFT | |
419 | JZ_DMA_WIDTH_32_BIT << JZ_DMA_DCM_DP_SHIFT; | 419 | JZ_DMA_WIDTH_32_BIT << JZ_DMA_DCM_DP_SHIFT; |
420 | desc->desc[0].dtc = len >> ord; | 420 | desc->desc[0].dtc = len >> jzchan->transfer_shift; |
421 | 421 | ||
422 | return vchan_tx_prep(&jzchan->vchan, &desc->vdesc, flags); | 422 | return vchan_tx_prep(&jzchan->vchan, &desc->vdesc, flags); |
423 | } | 423 | } |
@@ -580,8 +580,8 @@ static enum dma_status jz4780_dma_tx_status(struct dma_chan *chan, | |||
580 | txstate->residue = 0; | 580 | txstate->residue = 0; |
581 | 581 | ||
582 | if (vdesc && jzchan->desc && vdesc == &jzchan->desc->vdesc | 582 | if (vdesc && jzchan->desc && vdesc == &jzchan->desc->vdesc |
583 | && jzchan->desc->status & (JZ_DMA_DCS_AR | JZ_DMA_DCS_HLT)) | 583 | && jzchan->desc->status & (JZ_DMA_DCS_AR | JZ_DMA_DCS_HLT)) |
584 | status = DMA_ERROR; | 584 | status = DMA_ERROR; |
585 | 585 | ||
586 | spin_unlock_irqrestore(&jzchan->vchan.lock, flags); | 586 | spin_unlock_irqrestore(&jzchan->vchan.lock, flags); |
587 | return status; | 587 | return status; |
@@ -756,17 +756,19 @@ static int jz4780_dma_probe(struct platform_device *pdev) | |||
756 | if (IS_ERR(jzdma->base)) | 756 | if (IS_ERR(jzdma->base)) |
757 | return PTR_ERR(jzdma->base); | 757 | return PTR_ERR(jzdma->base); |
758 | 758 | ||
759 | jzdma->irq = platform_get_irq(pdev, 0); | 759 | ret = platform_get_irq(pdev, 0); |
760 | if (jzdma->irq < 0) { | 760 | if (ret < 0) { |
761 | dev_err(dev, "failed to get IRQ: %d\n", ret); | 761 | dev_err(dev, "failed to get IRQ: %d\n", ret); |
762 | return jzdma->irq; | 762 | return ret; |
763 | } | 763 | } |
764 | 764 | ||
765 | jzdma->irq = ret; | ||
766 | |||
765 | ret = devm_request_irq(dev, jzdma->irq, jz4780_dma_irq_handler, 0, | 767 | ret = devm_request_irq(dev, jzdma->irq, jz4780_dma_irq_handler, 0, |
766 | dev_name(dev), jzdma); | 768 | dev_name(dev), jzdma); |
767 | if (ret) { | 769 | if (ret) { |
768 | dev_err(dev, "failed to request IRQ %u!\n", jzdma->irq); | 770 | dev_err(dev, "failed to request IRQ %u!\n", jzdma->irq); |
769 | return -EINVAL; | 771 | return ret; |
770 | } | 772 | } |
771 | 773 | ||
772 | jzdma->clk = devm_clk_get(dev, NULL); | 774 | jzdma->clk = devm_clk_get(dev, NULL); |
@@ -803,7 +805,6 @@ static int jz4780_dma_probe(struct platform_device *pdev) | |||
803 | dd->directions = BIT(DMA_DEV_TO_MEM) | BIT(DMA_MEM_TO_DEV); | 805 | dd->directions = BIT(DMA_DEV_TO_MEM) | BIT(DMA_MEM_TO_DEV); |
804 | dd->residue_granularity = DMA_RESIDUE_GRANULARITY_BURST; | 806 | dd->residue_granularity = DMA_RESIDUE_GRANULARITY_BURST; |
805 | 807 | ||
806 | |||
807 | /* | 808 | /* |
808 | * Enable DMA controller, mark all channels as not programmable. | 809 | * Enable DMA controller, mark all channels as not programmable. |
809 | * Also set the FMSC bit - it increases MSC performance, so it makes | 810 | * Also set the FMSC bit - it increases MSC performance, so it makes |