aboutsummaryrefslogtreecommitdiffstats
path: root/drivers/spi
diff options
context:
space:
mode:
authorAndy Shevchenko <andriy.shevchenko@linux.intel.com>2014-10-28 12:25:02 -0400
committerMark Brown <broonie@kernel.org>2014-10-28 18:40:38 -0400
commit30c8eb52cc4a7bed59d85243e769ce420f179140 (patch)
tree24f623b6778fcebfdaf8b5c8e30abbedaa600fb4 /drivers/spi
parenta5c2db964d3eb26b41bd7abc1b13486f732b3aa2 (diff)
spi: dw-mid: split rx and tx callbacks when DMA
Currently driver wouldn't work properly if user asked for simplex transfer. The patch separates DMA rx and tx callbacks and finishes transfer correctly in any case. Signed-off-by: Andy Shevchenko <andriy.shevchenko@linux.intel.com> Signed-off-by: Mark Brown <broonie@kernel.org>
Diffstat (limited to 'drivers/spi')
-rw-r--r--drivers/spi/spi-dw-mid.c53
-rw-r--r--drivers/spi/spi-dw.h2
2 files changed, 40 insertions, 15 deletions
diff --git a/drivers/spi/spi-dw-mid.c b/drivers/spi/spi-dw-mid.c
index c8319ab0bbdf..7281316a5ecb 100644
--- a/drivers/spi/spi-dw-mid.c
+++ b/drivers/spi/spi-dw-mid.c
@@ -26,6 +26,9 @@
26#include <linux/intel_mid_dma.h> 26#include <linux/intel_mid_dma.h>
27#include <linux/pci.h> 27#include <linux/pci.h>
28 28
29#define RX_BUSY 0
30#define TX_BUSY 1
31
29struct mid_dma { 32struct mid_dma {
30 struct intel_mid_dma_slave dmas_tx; 33 struct intel_mid_dma_slave dmas_tx;
31 struct intel_mid_dma_slave dmas_rx; 34 struct intel_mid_dma_slave dmas_rx;
@@ -98,15 +101,14 @@ static void mid_spi_dma_exit(struct dw_spi *dws)
98} 101}
99 102
100/* 103/*
101 * dws->dma_chan_done is cleared before the dma transfer starts, 104 * dws->dma_chan_busy is set before the dma transfer starts, callback for tx
102 * callback for rx/tx channel will each increment it by 1. 105 * channel will clear a corresponding bit.
103 * Reaching 2 means the whole spi transaction is done.
104 */ 106 */
105static void dw_spi_dma_done(void *arg) 107static void dw_spi_dma_tx_done(void *arg)
106{ 108{
107 struct dw_spi *dws = arg; 109 struct dw_spi *dws = arg;
108 110
109 if (++dws->dma_chan_done != 2) 111 if (test_and_clear_bit(TX_BUSY, &dws->dma_chan_busy) & BIT(RX_BUSY))
110 return; 112 return;
111 dw_spi_xfer_done(dws); 113 dw_spi_xfer_done(dws);
112} 114}
@@ -116,6 +118,9 @@ static struct dma_async_tx_descriptor *dw_spi_dma_prepare_tx(struct dw_spi *dws)
116 struct dma_slave_config txconf; 118 struct dma_slave_config txconf;
117 struct dma_async_tx_descriptor *txdesc; 119 struct dma_async_tx_descriptor *txdesc;
118 120
121 if (!dws->tx_dma)
122 return NULL;
123
119 txconf.direction = DMA_MEM_TO_DEV; 124 txconf.direction = DMA_MEM_TO_DEV;
120 txconf.dst_addr = dws->dma_addr; 125 txconf.dst_addr = dws->dma_addr;
121 txconf.dst_maxburst = LNW_DMA_MSIZE_16; 126 txconf.dst_maxburst = LNW_DMA_MSIZE_16;
@@ -134,17 +139,33 @@ static struct dma_async_tx_descriptor *dw_spi_dma_prepare_tx(struct dw_spi *dws)
134 1, 139 1,
135 DMA_MEM_TO_DEV, 140 DMA_MEM_TO_DEV,
136 DMA_PREP_INTERRUPT | DMA_CTRL_ACK); 141 DMA_PREP_INTERRUPT | DMA_CTRL_ACK);
137 txdesc->callback = dw_spi_dma_done; 142 txdesc->callback = dw_spi_dma_tx_done;
138 txdesc->callback_param = dws; 143 txdesc->callback_param = dws;
139 144
140 return txdesc; 145 return txdesc;
141} 146}
142 147
148/*
149 * dws->dma_chan_busy is set before the dma transfer starts, callback for rx
150 * channel will clear a corresponding bit.
151 */
152static void dw_spi_dma_rx_done(void *arg)
153{
154 struct dw_spi *dws = arg;
155
156 if (test_and_clear_bit(RX_BUSY, &dws->dma_chan_busy) & BIT(TX_BUSY))
157 return;
158 dw_spi_xfer_done(dws);
159}
160
143static struct dma_async_tx_descriptor *dw_spi_dma_prepare_rx(struct dw_spi *dws) 161static struct dma_async_tx_descriptor *dw_spi_dma_prepare_rx(struct dw_spi *dws)
144{ 162{
145 struct dma_slave_config rxconf; 163 struct dma_slave_config rxconf;
146 struct dma_async_tx_descriptor *rxdesc; 164 struct dma_async_tx_descriptor *rxdesc;
147 165
166 if (!dws->rx_dma)
167 return NULL;
168
148 rxconf.direction = DMA_DEV_TO_MEM; 169 rxconf.direction = DMA_DEV_TO_MEM;
149 rxconf.src_addr = dws->dma_addr; 170 rxconf.src_addr = dws->dma_addr;
150 rxconf.src_maxburst = LNW_DMA_MSIZE_16; 171 rxconf.src_maxburst = LNW_DMA_MSIZE_16;
@@ -163,7 +184,7 @@ static struct dma_async_tx_descriptor *dw_spi_dma_prepare_rx(struct dw_spi *dws)
163 1, 184 1,
164 DMA_DEV_TO_MEM, 185 DMA_DEV_TO_MEM,
165 DMA_PREP_INTERRUPT | DMA_CTRL_ACK); 186 DMA_PREP_INTERRUPT | DMA_CTRL_ACK);
166 rxdesc->callback = dw_spi_dma_done; 187 rxdesc->callback = dw_spi_dma_rx_done;
167 rxdesc->callback_param = dws; 188 rxdesc->callback_param = dws;
168 189
169 return rxdesc; 190 return rxdesc;
@@ -195,8 +216,6 @@ static int mid_spi_dma_transfer(struct dw_spi *dws, int cs_change)
195 if (cs_change) 216 if (cs_change)
196 dw_spi_dma_setup(dws); 217 dw_spi_dma_setup(dws);
197 218
198 dws->dma_chan_done = 0;
199
200 /* 2. Prepare the TX dma transfer */ 219 /* 2. Prepare the TX dma transfer */
201 txdesc = dw_spi_dma_prepare_tx(dws); 220 txdesc = dw_spi_dma_prepare_tx(dws);
202 221
@@ -204,11 +223,17 @@ static int mid_spi_dma_transfer(struct dw_spi *dws, int cs_change)
204 rxdesc = dw_spi_dma_prepare_rx(dws); 223 rxdesc = dw_spi_dma_prepare_rx(dws);
205 224
206 /* rx must be started before tx due to spi instinct */ 225 /* rx must be started before tx due to spi instinct */
207 dmaengine_submit(rxdesc); 226 if (rxdesc) {
208 dma_async_issue_pending(dws->rxchan); 227 set_bit(RX_BUSY, &dws->dma_chan_busy);
209 228 dmaengine_submit(rxdesc);
210 dmaengine_submit(txdesc); 229 dma_async_issue_pending(dws->rxchan);
211 dma_async_issue_pending(dws->txchan); 230 }
231
232 if (txdesc) {
233 set_bit(TX_BUSY, &dws->dma_chan_busy);
234 dmaengine_submit(txdesc);
235 dma_async_issue_pending(dws->txchan);
236 }
212 237
213 return 0; 238 return 0;
214} 239}
diff --git a/drivers/spi/spi-dw.h b/drivers/spi/spi-dw.h
index 83a103a76481..3d32be68c142 100644
--- a/drivers/spi/spi-dw.h
+++ b/drivers/spi/spi-dw.h
@@ -139,7 +139,7 @@ struct dw_spi {
139 struct scatterlist tx_sgl; 139 struct scatterlist tx_sgl;
140 struct dma_chan *rxchan; 140 struct dma_chan *rxchan;
141 struct scatterlist rx_sgl; 141 struct scatterlist rx_sgl;
142 int dma_chan_done; 142 unsigned long dma_chan_busy;
143 struct device *dma_dev; 143 struct device *dma_dev;
144 dma_addr_t dma_addr; /* phy address of the Data register */ 144 dma_addr_t dma_addr; /* phy address of the Data register */
145 struct dw_spi_dma_ops *dma_ops; 145 struct dw_spi_dma_ops *dma_ops;