aboutsummaryrefslogtreecommitdiffstats
path: root/drivers/spi/amba-pl022.c
diff options
context:
space:
mode:
authorLinus Walleij <linus.walleij@stericsson.com>2010-12-22 17:13:37 -0500
committerGrant Likely <grant.likely@secretlab.ca>2010-12-23 23:06:46 -0500
commit082086f2ce53c69260396e977d29972128def1d7 (patch)
treef06554960ae266151e300193608f543863318783 /drivers/spi/amba-pl022.c
parentb729889686afb7d4366e07fe9c2f7a2737166462 (diff)
spi/pl022: pass the returned sglen to the DMA engine
The sglen return by the dma_map_sg() should be passed to the DMA engine, not the one passed in. If we one day have a DMA mapper that can coalesce entries, this will bug due to a too large number of entries being passed in. Reported-by: Russell King <linux@arm.linux.org.uk> Signed-off-by: Linus Walleij <linus.walleij@stericsson.com> Signed-off-by: Grant Likely <grant.likely@secretlab.ca>
Diffstat (limited to 'drivers/spi/amba-pl022.c')
-rw-r--r--drivers/spi/amba-pl022.c14
1 files changed, 7 insertions, 7 deletions
diff --git a/drivers/spi/amba-pl022.c b/drivers/spi/amba-pl022.c
index e29751af5c7b..36ec1327a19b 100644
--- a/drivers/spi/amba-pl022.c
+++ b/drivers/spi/amba-pl022.c
@@ -917,7 +917,7 @@ static int configure_dma(struct pl022 *pl022)
917 }; 917 };
918 unsigned int pages; 918 unsigned int pages;
919 int ret; 919 int ret;
920 int sglen; 920 int rx_sglen, tx_sglen;
921 struct dma_chan *rxchan = pl022->dma_rx_channel; 921 struct dma_chan *rxchan = pl022->dma_rx_channel;
922 struct dma_chan *txchan = pl022->dma_tx_channel; 922 struct dma_chan *txchan = pl022->dma_tx_channel;
923 struct dma_async_tx_descriptor *rxdesc; 923 struct dma_async_tx_descriptor *rxdesc;
@@ -991,20 +991,20 @@ static int configure_dma(struct pl022 *pl022)
991 pl022->cur_transfer->len, &pl022->sgt_tx); 991 pl022->cur_transfer->len, &pl022->sgt_tx);
992 992
993 /* Map DMA buffers */ 993 /* Map DMA buffers */
994 sglen = dma_map_sg(rxchan->device->dev, pl022->sgt_rx.sgl, 994 rx_sglen = dma_map_sg(rxchan->device->dev, pl022->sgt_rx.sgl,
995 pl022->sgt_rx.nents, DMA_FROM_DEVICE); 995 pl022->sgt_rx.nents, DMA_FROM_DEVICE);
996 if (!sglen) 996 if (!rx_sglen)
997 goto err_rx_sgmap; 997 goto err_rx_sgmap;
998 998
999 sglen = dma_map_sg(txchan->device->dev, pl022->sgt_tx.sgl, 999 tx_sglen = dma_map_sg(txchan->device->dev, pl022->sgt_tx.sgl,
1000 pl022->sgt_tx.nents, DMA_TO_DEVICE); 1000 pl022->sgt_tx.nents, DMA_TO_DEVICE);
1001 if (!sglen) 1001 if (!tx_sglen)
1002 goto err_tx_sgmap; 1002 goto err_tx_sgmap;
1003 1003
1004 /* Send both scatterlists */ 1004 /* Send both scatterlists */
1005 rxdesc = rxchan->device->device_prep_slave_sg(rxchan, 1005 rxdesc = rxchan->device->device_prep_slave_sg(rxchan,
1006 pl022->sgt_rx.sgl, 1006 pl022->sgt_rx.sgl,
1007 pl022->sgt_rx.nents, 1007 rx_sglen,
1008 DMA_FROM_DEVICE, 1008 DMA_FROM_DEVICE,
1009 DMA_PREP_INTERRUPT | DMA_CTRL_ACK); 1009 DMA_PREP_INTERRUPT | DMA_CTRL_ACK);
1010 if (!rxdesc) 1010 if (!rxdesc)
@@ -1012,7 +1012,7 @@ static int configure_dma(struct pl022 *pl022)
1012 1012
1013 txdesc = txchan->device->device_prep_slave_sg(txchan, 1013 txdesc = txchan->device->device_prep_slave_sg(txchan,
1014 pl022->sgt_tx.sgl, 1014 pl022->sgt_tx.sgl,
1015 pl022->sgt_tx.nents, 1015 tx_sglen,
1016 DMA_TO_DEVICE, 1016 DMA_TO_DEVICE,
1017 DMA_PREP_INTERRUPT | DMA_CTRL_ACK); 1017 DMA_PREP_INTERRUPT | DMA_CTRL_ACK);
1018 if (!txdesc) 1018 if (!txdesc)