diff options
author | Andy Shevchenko <andriy.shevchenko@linux.intel.com> | 2015-03-09 10:48:49 -0400 |
---|---|---|
committer | Mark Brown <broonie@kernel.org> | 2015-03-09 14:11:13 -0400 |
commit | f89a6d8f43ebe9508bb5492c846ad997ad50eafe (patch) | |
tree | 9efc2db1c5c20e114e65638bedf7f0eb880ec595 /drivers | |
parent | 4d5ac1edfdd79aea31983333cb53dd5db29559f9 (diff) |
spi: dw-mid: move to use core SPI DMA mappings
SPI core has a comprehensive function set to map and unmap a message when it's
needed. This patch converts driver to use that advantage.
Signed-off-by: Andy Shevchenko <andriy.shevchenko@linux.intel.com>
Signed-off-by: Mark Brown <broonie@kernel.org>
Diffstat (limited to 'drivers')
-rw-r--r-- | drivers/spi/spi-dw-mid.c | 52 | ||||
-rw-r--r-- | drivers/spi/spi-dw.c | 40 | ||||
-rw-r--r-- | drivers/spi/spi-dw.h | 17 |
3 files changed, 43 insertions, 66 deletions
diff --git a/drivers/spi/spi-dw-mid.c b/drivers/spi/spi-dw-mid.c index e614190daef6..599dad40a3ec 100644 --- a/drivers/spi/spi-dw-mid.c +++ b/drivers/spi/spi-dw-mid.c | |||
@@ -69,6 +69,7 @@ static int mid_spi_dma_init(struct dw_spi *dws) | |||
69 | rxs->hs_mode = LNW_DMA_HW_HS; | 69 | rxs->hs_mode = LNW_DMA_HW_HS; |
70 | rxs->cfg_mode = LNW_DMA_PER_TO_MEM; | 70 | rxs->cfg_mode = LNW_DMA_PER_TO_MEM; |
71 | dws->rxchan->private = rxs; | 71 | dws->rxchan->private = rxs; |
72 | dws->master->dma_rx = dws->rxchan; | ||
72 | 73 | ||
73 | /* 2. Init tx channel */ | 74 | /* 2. Init tx channel */ |
74 | dws->txchan = dma_request_channel(mask, mid_spi_dma_chan_filter, dws); | 75 | dws->txchan = dma_request_channel(mask, mid_spi_dma_chan_filter, dws); |
@@ -78,6 +79,7 @@ static int mid_spi_dma_init(struct dw_spi *dws) | |||
78 | txs->hs_mode = LNW_DMA_HW_HS; | 79 | txs->hs_mode = LNW_DMA_HW_HS; |
79 | txs->cfg_mode = LNW_DMA_MEM_TO_PER; | 80 | txs->cfg_mode = LNW_DMA_MEM_TO_PER; |
80 | dws->txchan->private = txs; | 81 | dws->txchan->private = txs; |
82 | dws->master->dma_tx = dws->txchan; | ||
81 | 83 | ||
82 | dws->dma_inited = 1; | 84 | dws->dma_inited = 1; |
83 | return 0; | 85 | return 0; |
@@ -116,6 +118,17 @@ static irqreturn_t dma_transfer(struct dw_spi *dws) | |||
116 | return IRQ_HANDLED; | 118 | return IRQ_HANDLED; |
117 | } | 119 | } |
118 | 120 | ||
121 | static bool mid_spi_can_dma(struct spi_master *master, struct spi_device *spi, | ||
122 | struct spi_transfer *xfer) | ||
123 | { | ||
124 | struct dw_spi *dws = spi_master_get_devdata(master); | ||
125 | |||
126 | if (!dws->dma_inited) | ||
127 | return false; | ||
128 | |||
129 | return xfer->len > dws->fifo_len; | ||
130 | } | ||
131 | |||
119 | static enum dma_slave_buswidth convert_dma_width(u32 dma_width) { | 132 | static enum dma_slave_buswidth convert_dma_width(u32 dma_width) { |
120 | if (dma_width == 1) | 133 | if (dma_width == 1) |
121 | return DMA_SLAVE_BUSWIDTH_1_BYTE; | 134 | return DMA_SLAVE_BUSWIDTH_1_BYTE; |
@@ -139,12 +152,13 @@ static void dw_spi_dma_tx_done(void *arg) | |||
139 | spi_finalize_current_transfer(dws->master); | 152 | spi_finalize_current_transfer(dws->master); |
140 | } | 153 | } |
141 | 154 | ||
142 | static struct dma_async_tx_descriptor *dw_spi_dma_prepare_tx(struct dw_spi *dws) | 155 | static struct dma_async_tx_descriptor *dw_spi_dma_prepare_tx(struct dw_spi *dws, |
156 | struct spi_transfer *xfer) | ||
143 | { | 157 | { |
144 | struct dma_slave_config txconf; | 158 | struct dma_slave_config txconf; |
145 | struct dma_async_tx_descriptor *txdesc; | 159 | struct dma_async_tx_descriptor *txdesc; |
146 | 160 | ||
147 | if (!dws->tx_dma) | 161 | if (!xfer->tx_buf) |
148 | return NULL; | 162 | return NULL; |
149 | 163 | ||
150 | txconf.direction = DMA_MEM_TO_DEV; | 164 | txconf.direction = DMA_MEM_TO_DEV; |
@@ -156,13 +170,9 @@ static struct dma_async_tx_descriptor *dw_spi_dma_prepare_tx(struct dw_spi *dws) | |||
156 | 170 | ||
157 | dmaengine_slave_config(dws->txchan, &txconf); | 171 | dmaengine_slave_config(dws->txchan, &txconf); |
158 | 172 | ||
159 | memset(&dws->tx_sgl, 0, sizeof(dws->tx_sgl)); | ||
160 | dws->tx_sgl.dma_address = dws->tx_dma; | ||
161 | dws->tx_sgl.length = dws->len; | ||
162 | |||
163 | txdesc = dmaengine_prep_slave_sg(dws->txchan, | 173 | txdesc = dmaengine_prep_slave_sg(dws->txchan, |
164 | &dws->tx_sgl, | 174 | xfer->tx_sg.sgl, |
165 | 1, | 175 | xfer->tx_sg.nents, |
166 | DMA_MEM_TO_DEV, | 176 | DMA_MEM_TO_DEV, |
167 | DMA_PREP_INTERRUPT | DMA_CTRL_ACK); | 177 | DMA_PREP_INTERRUPT | DMA_CTRL_ACK); |
168 | if (!txdesc) | 178 | if (!txdesc) |
@@ -188,12 +198,13 @@ static void dw_spi_dma_rx_done(void *arg) | |||
188 | spi_finalize_current_transfer(dws->master); | 198 | spi_finalize_current_transfer(dws->master); |
189 | } | 199 | } |
190 | 200 | ||
191 | static struct dma_async_tx_descriptor *dw_spi_dma_prepare_rx(struct dw_spi *dws) | 201 | static struct dma_async_tx_descriptor *dw_spi_dma_prepare_rx(struct dw_spi *dws, |
202 | struct spi_transfer *xfer) | ||
192 | { | 203 | { |
193 | struct dma_slave_config rxconf; | 204 | struct dma_slave_config rxconf; |
194 | struct dma_async_tx_descriptor *rxdesc; | 205 | struct dma_async_tx_descriptor *rxdesc; |
195 | 206 | ||
196 | if (!dws->rx_dma) | 207 | if (!xfer->rx_buf) |
197 | return NULL; | 208 | return NULL; |
198 | 209 | ||
199 | rxconf.direction = DMA_DEV_TO_MEM; | 210 | rxconf.direction = DMA_DEV_TO_MEM; |
@@ -205,13 +216,9 @@ static struct dma_async_tx_descriptor *dw_spi_dma_prepare_rx(struct dw_spi *dws) | |||
205 | 216 | ||
206 | dmaengine_slave_config(dws->rxchan, &rxconf); | 217 | dmaengine_slave_config(dws->rxchan, &rxconf); |
207 | 218 | ||
208 | memset(&dws->rx_sgl, 0, sizeof(dws->rx_sgl)); | ||
209 | dws->rx_sgl.dma_address = dws->rx_dma; | ||
210 | dws->rx_sgl.length = dws->len; | ||
211 | |||
212 | rxdesc = dmaengine_prep_slave_sg(dws->rxchan, | 219 | rxdesc = dmaengine_prep_slave_sg(dws->rxchan, |
213 | &dws->rx_sgl, | 220 | xfer->rx_sg.sgl, |
214 | 1, | 221 | xfer->rx_sg.nents, |
215 | DMA_DEV_TO_MEM, | 222 | DMA_DEV_TO_MEM, |
216 | DMA_PREP_INTERRUPT | DMA_CTRL_ACK); | 223 | DMA_PREP_INTERRUPT | DMA_CTRL_ACK); |
217 | if (!rxdesc) | 224 | if (!rxdesc) |
@@ -223,16 +230,16 @@ static struct dma_async_tx_descriptor *dw_spi_dma_prepare_rx(struct dw_spi *dws) | |||
223 | return rxdesc; | 230 | return rxdesc; |
224 | } | 231 | } |
225 | 232 | ||
226 | static int mid_spi_dma_setup(struct dw_spi *dws) | 233 | static int mid_spi_dma_setup(struct dw_spi *dws, struct spi_transfer *xfer) |
227 | { | 234 | { |
228 | u16 dma_ctrl = 0; | 235 | u16 dma_ctrl = 0; |
229 | 236 | ||
230 | dw_writew(dws, DW_SPI_DMARDLR, 0xf); | 237 | dw_writew(dws, DW_SPI_DMARDLR, 0xf); |
231 | dw_writew(dws, DW_SPI_DMATDLR, 0x10); | 238 | dw_writew(dws, DW_SPI_DMATDLR, 0x10); |
232 | 239 | ||
233 | if (dws->tx_dma) | 240 | if (xfer->tx_buf) |
234 | dma_ctrl |= SPI_DMA_TDMAE; | 241 | dma_ctrl |= SPI_DMA_TDMAE; |
235 | if (dws->rx_dma) | 242 | if (xfer->rx_buf) |
236 | dma_ctrl |= SPI_DMA_RDMAE; | 243 | dma_ctrl |= SPI_DMA_RDMAE; |
237 | dw_writew(dws, DW_SPI_DMACR, dma_ctrl); | 244 | dw_writew(dws, DW_SPI_DMACR, dma_ctrl); |
238 | 245 | ||
@@ -244,15 +251,15 @@ static int mid_spi_dma_setup(struct dw_spi *dws) | |||
244 | return 0; | 251 | return 0; |
245 | } | 252 | } |
246 | 253 | ||
247 | static int mid_spi_dma_transfer(struct dw_spi *dws) | 254 | static int mid_spi_dma_transfer(struct dw_spi *dws, struct spi_transfer *xfer) |
248 | { | 255 | { |
249 | struct dma_async_tx_descriptor *txdesc, *rxdesc; | 256 | struct dma_async_tx_descriptor *txdesc, *rxdesc; |
250 | 257 | ||
251 | /* Prepare the TX dma transfer */ | 258 | /* Prepare the TX dma transfer */ |
252 | txdesc = dw_spi_dma_prepare_tx(dws); | 259 | txdesc = dw_spi_dma_prepare_tx(dws, xfer); |
253 | 260 | ||
254 | /* Prepare the RX dma transfer */ | 261 | /* Prepare the RX dma transfer */ |
255 | rxdesc = dw_spi_dma_prepare_rx(dws); | 262 | rxdesc = dw_spi_dma_prepare_rx(dws, xfer); |
256 | 263 | ||
257 | /* rx must be started before tx due to spi instinct */ | 264 | /* rx must be started before tx due to spi instinct */ |
258 | if (rxdesc) { | 265 | if (rxdesc) { |
@@ -286,6 +293,7 @@ static struct dw_spi_dma_ops mid_dma_ops = { | |||
286 | .dma_init = mid_spi_dma_init, | 293 | .dma_init = mid_spi_dma_init, |
287 | .dma_exit = mid_spi_dma_exit, | 294 | .dma_exit = mid_spi_dma_exit, |
288 | .dma_setup = mid_spi_dma_setup, | 295 | .dma_setup = mid_spi_dma_setup, |
296 | .can_dma = mid_spi_can_dma, | ||
289 | .dma_transfer = mid_spi_dma_transfer, | 297 | .dma_transfer = mid_spi_dma_transfer, |
290 | .dma_stop = mid_spi_dma_stop, | 298 | .dma_stop = mid_spi_dma_stop, |
291 | }; | 299 | }; |
diff --git a/drivers/spi/spi-dw.c b/drivers/spi/spi-dw.c index d53cffe7ff22..2437bfcbf2f8 100644 --- a/drivers/spi/spi-dw.c +++ b/drivers/spi/spi-dw.c | |||
@@ -217,32 +217,6 @@ static void dw_reader(struct dw_spi *dws) | |||
217 | } | 217 | } |
218 | } | 218 | } |
219 | 219 | ||
220 | /* | ||
221 | * Note: first step is the protocol driver prepares | ||
222 | * a dma-capable memory, and this func just need translate | ||
223 | * the virt addr to physical | ||
224 | */ | ||
225 | static int map_dma_buffers(struct spi_master *master, | ||
226 | struct spi_device *spi, struct spi_transfer *transfer) | ||
227 | { | ||
228 | struct dw_spi *dws = spi_master_get_devdata(master); | ||
229 | struct chip_data *chip = spi_get_ctldata(spi); | ||
230 | |||
231 | if (!master->cur_msg->is_dma_mapped | ||
232 | || !dws->dma_inited | ||
233 | || !chip->enable_dma | ||
234 | || !dws->dma_ops) | ||
235 | return 0; | ||
236 | |||
237 | if (transfer->tx_dma) | ||
238 | dws->tx_dma = transfer->tx_dma; | ||
239 | |||
240 | if (transfer->rx_dma) | ||
241 | dws->rx_dma = transfer->rx_dma; | ||
242 | |||
243 | return 1; | ||
244 | } | ||
245 | |||
246 | static void int_error_stop(struct dw_spi *dws, const char *msg) | 220 | static void int_error_stop(struct dw_spi *dws, const char *msg) |
247 | { | 221 | { |
248 | spi_reset_chip(dws); | 222 | spi_reset_chip(dws); |
@@ -322,11 +296,10 @@ static int dw_spi_transfer_one(struct spi_master *master, | |||
322 | u32 cr0 = 0; | 296 | u32 cr0 = 0; |
323 | int ret; | 297 | int ret; |
324 | 298 | ||
299 | dws->dma_mapped = 0; | ||
325 | dws->n_bytes = chip->n_bytes; | 300 | dws->n_bytes = chip->n_bytes; |
326 | dws->dma_width = chip->dma_width; | 301 | dws->dma_width = chip->dma_width; |
327 | 302 | ||
328 | dws->rx_dma = transfer->rx_dma; | ||
329 | dws->tx_dma = transfer->tx_dma; | ||
330 | dws->tx = (void *)transfer->tx_buf; | 303 | dws->tx = (void *)transfer->tx_buf; |
331 | dws->tx_end = dws->tx + transfer->len; | 304 | dws->tx_end = dws->tx + transfer->len; |
332 | dws->rx = transfer->rx_buf; | 305 | dws->rx = transfer->rx_buf; |
@@ -386,7 +359,8 @@ static int dw_spi_transfer_one(struct spi_master *master, | |||
386 | dw_writew(dws, DW_SPI_CTRL0, cr0); | 359 | dw_writew(dws, DW_SPI_CTRL0, cr0); |
387 | 360 | ||
388 | /* Check if current transfer is a DMA transaction */ | 361 | /* Check if current transfer is a DMA transaction */ |
389 | dws->dma_mapped = map_dma_buffers(master, spi, transfer); | 362 | if (master->can_dma && master->can_dma(master, spi, transfer)) |
363 | dws->dma_mapped = master->cur_msg_mapped; | ||
390 | 364 | ||
391 | /* For poll mode just disable all interrupts */ | 365 | /* For poll mode just disable all interrupts */ |
392 | spi_mask_intr(dws, 0xff); | 366 | spi_mask_intr(dws, 0xff); |
@@ -396,7 +370,7 @@ static int dw_spi_transfer_one(struct spi_master *master, | |||
396 | * we only need set the TXEI IRQ, as TX/RX always happen syncronizely | 370 | * we only need set the TXEI IRQ, as TX/RX always happen syncronizely |
397 | */ | 371 | */ |
398 | if (dws->dma_mapped) { | 372 | if (dws->dma_mapped) { |
399 | ret = dws->dma_ops->dma_setup(dws); | 373 | ret = dws->dma_ops->dma_setup(dws, transfer); |
400 | if (ret < 0) { | 374 | if (ret < 0) { |
401 | spi_enable_chip(dws, 1); | 375 | spi_enable_chip(dws, 1); |
402 | return ret; | 376 | return ret; |
@@ -416,7 +390,7 @@ static int dw_spi_transfer_one(struct spi_master *master, | |||
416 | spi_enable_chip(dws, 1); | 390 | spi_enable_chip(dws, 1); |
417 | 391 | ||
418 | if (dws->dma_mapped) { | 392 | if (dws->dma_mapped) { |
419 | ret = dws->dma_ops->dma_transfer(dws); | 393 | ret = dws->dma_ops->dma_transfer(dws, transfer); |
420 | if (ret < 0) | 394 | if (ret < 0) |
421 | return ret; | 395 | return ret; |
422 | } | 396 | } |
@@ -470,8 +444,6 @@ static int dw_spi_setup(struct spi_device *spi) | |||
470 | 444 | ||
471 | chip->rx_threshold = 0; | 445 | chip->rx_threshold = 0; |
472 | chip->tx_threshold = 0; | 446 | chip->tx_threshold = 0; |
473 | |||
474 | chip->enable_dma = chip_info->enable_dma; | ||
475 | } | 447 | } |
476 | 448 | ||
477 | if (spi->bits_per_word == 8) { | 449 | if (spi->bits_per_word == 8) { |
@@ -584,6 +556,8 @@ int dw_spi_add_host(struct device *dev, struct dw_spi *dws) | |||
584 | if (ret) { | 556 | if (ret) { |
585 | dev_warn(dev, "DMA init failed\n"); | 557 | dev_warn(dev, "DMA init failed\n"); |
586 | dws->dma_inited = 0; | 558 | dws->dma_inited = 0; |
559 | } else { | ||
560 | master->can_dma = dws->dma_ops->can_dma; | ||
587 | } | 561 | } |
588 | } | 562 | } |
589 | 563 | ||
diff --git a/drivers/spi/spi-dw.h b/drivers/spi/spi-dw.h index 7f130bd8f37a..f298df59381b 100644 --- a/drivers/spi/spi-dw.h +++ b/drivers/spi/spi-dw.h | |||
@@ -91,8 +91,10 @@ struct dw_spi; | |||
91 | struct dw_spi_dma_ops { | 91 | struct dw_spi_dma_ops { |
92 | int (*dma_init)(struct dw_spi *dws); | 92 | int (*dma_init)(struct dw_spi *dws); |
93 | void (*dma_exit)(struct dw_spi *dws); | 93 | void (*dma_exit)(struct dw_spi *dws); |
94 | int (*dma_setup)(struct dw_spi *dws); | 94 | int (*dma_setup)(struct dw_spi *dws, struct spi_transfer *xfer); |
95 | int (*dma_transfer)(struct dw_spi *dws); | 95 | bool (*can_dma)(struct spi_master *master, struct spi_device *spi, |
96 | struct spi_transfer *xfer); | ||
97 | int (*dma_transfer)(struct dw_spi *dws, struct spi_transfer *xfer); | ||
96 | void (*dma_stop)(struct dw_spi *dws); | 98 | void (*dma_stop)(struct dw_spi *dws); |
97 | }; | 99 | }; |
98 | 100 | ||
@@ -117,20 +119,14 @@ struct dw_spi { | |||
117 | void *rx; | 119 | void *rx; |
118 | void *rx_end; | 120 | void *rx_end; |
119 | int dma_mapped; | 121 | int dma_mapped; |
120 | dma_addr_t rx_dma; | ||
121 | dma_addr_t tx_dma; | ||
122 | size_t rx_map_len; | ||
123 | size_t tx_map_len; | ||
124 | u8 n_bytes; /* current is a 1/2 bytes op */ | 122 | u8 n_bytes; /* current is a 1/2 bytes op */ |
125 | u32 dma_width; | 123 | u32 dma_width; |
126 | irqreturn_t (*transfer_handler)(struct dw_spi *dws); | 124 | irqreturn_t (*transfer_handler)(struct dw_spi *dws); |
127 | 125 | ||
128 | /* Dma info */ | 126 | /* DMA info */ |
129 | int dma_inited; | 127 | int dma_inited; |
130 | struct dma_chan *txchan; | 128 | struct dma_chan *txchan; |
131 | struct scatterlist tx_sgl; | ||
132 | struct dma_chan *rxchan; | 129 | struct dma_chan *rxchan; |
133 | struct scatterlist rx_sgl; | ||
134 | unsigned long dma_chan_busy; | 130 | unsigned long dma_chan_busy; |
135 | struct device *dma_dev; | 131 | struct device *dma_dev; |
136 | dma_addr_t dma_addr; /* phy address of the Data register */ | 132 | dma_addr_t dma_addr; /* phy address of the Data register */ |
@@ -206,14 +202,13 @@ static inline void spi_reset_chip(struct dw_spi *dws) | |||
206 | 202 | ||
207 | /* | 203 | /* |
208 | * Each SPI slave device to work with dw_api controller should | 204 | * Each SPI slave device to work with dw_api controller should |
209 | * has such a structure claiming its working mode (PIO/DMA etc), | 205 | * has such a structure claiming its working mode (poll or PIO/DMA), |
210 | * which can be save in the "controller_data" member of the | 206 | * which can be save in the "controller_data" member of the |
211 | * struct spi_device. | 207 | * struct spi_device. |
212 | */ | 208 | */ |
213 | struct dw_spi_chip { | 209 | struct dw_spi_chip { |
214 | u8 poll_mode; /* 1 for controller polling mode */ | 210 | u8 poll_mode; /* 1 for controller polling mode */ |
215 | u8 type; /* SPI/SSP/MicroWire */ | 211 | u8 type; /* SPI/SSP/MicroWire */ |
216 | u8 enable_dma; | ||
217 | void (*cs_control)(u32 command); | 212 | void (*cs_control)(u32 command); |
218 | }; | 213 | }; |
219 | 214 | ||