diff options
| author | Mark Brown <broonie@linaro.org> | 2014-02-07 07:34:36 -0500 |
|---|---|---|
| committer | Mark Brown <broonie@linaro.org> | 2014-02-07 08:27:57 -0500 |
| commit | 90e73973d3e48b3f6b37ea944fdef8e1543d36fc (patch) | |
| tree | a71a8b0882276885641b8958a18a8ada72168c91 | |
| parent | 3f29588795bdd0d48f0ecc94e3ee06c128ca3300 (diff) | |
spi/s3c64xx: Remove S3C_DMA support
All the platforms which use the old S3C_DMA API have now been converted to
dmaengine so we can remove the legacy code from the driver, simplifying
maintenance.
Signed-off-by: Mark Brown <broonie@linaro.org>
| -rw-r--r-- | drivers/spi/spi-s3c64xx.c | 191 |
1 files changed, 0 insertions, 191 deletions
diff --git a/drivers/spi/spi-s3c64xx.c b/drivers/spi/spi-s3c64xx.c index 4490e8c499c0..18ec7e65a319 100644 --- a/drivers/spi/spi-s3c64xx.c +++ b/drivers/spi/spi-s3c64xx.c | |||
| @@ -34,10 +34,6 @@ | |||
| 34 | 34 | ||
| 35 | #include <linux/platform_data/spi-s3c64xx.h> | 35 | #include <linux/platform_data/spi-s3c64xx.h> |
| 36 | 36 | ||
| 37 | #ifdef CONFIG_S3C_DMA | ||
| 38 | #include <mach/dma.h> | ||
| 39 | #endif | ||
| 40 | |||
| 41 | #define MAX_SPI_PORTS 3 | 37 | #define MAX_SPI_PORTS 3 |
| 42 | #define S3C64XX_SPI_QUIRK_POLL (1 << 0) | 38 | #define S3C64XX_SPI_QUIRK_POLL (1 << 0) |
| 43 | 39 | ||
| @@ -200,9 +196,6 @@ struct s3c64xx_spi_driver_data { | |||
| 200 | unsigned cur_speed; | 196 | unsigned cur_speed; |
| 201 | struct s3c64xx_spi_dma_data rx_dma; | 197 | struct s3c64xx_spi_dma_data rx_dma; |
| 202 | struct s3c64xx_spi_dma_data tx_dma; | 198 | struct s3c64xx_spi_dma_data tx_dma; |
| 203 | #ifdef CONFIG_S3C_DMA | ||
| 204 | struct samsung_dma_ops *ops; | ||
| 205 | #endif | ||
| 206 | struct s3c64xx_spi_port_config *port_conf; | 199 | struct s3c64xx_spi_port_config *port_conf; |
| 207 | unsigned int port_id; | 200 | unsigned int port_id; |
| 208 | bool cs_gpio; | 201 | bool cs_gpio; |
| @@ -284,180 +277,6 @@ static void s3c64xx_spi_dmacb(void *data) | |||
| 284 | spin_unlock_irqrestore(&sdd->lock, flags); | 277 | spin_unlock_irqrestore(&sdd->lock, flags); |
| 285 | } | 278 | } |
| 286 | 279 | ||
| 287 | #ifdef CONFIG_S3C_DMA | ||
| 288 | /* FIXME: remove this section once arch/arm/mach-s3c64xx uses dmaengine */ | ||
| 289 | |||
| 290 | static struct s3c2410_dma_client s3c64xx_spi_dma_client = { | ||
| 291 | .name = "samsung-spi-dma", | ||
| 292 | }; | ||
| 293 | |||
| 294 | static int s3c64xx_spi_map_mssg(struct s3c64xx_spi_driver_data *sdd, | ||
| 295 | struct spi_message *msg) | ||
| 296 | { | ||
| 297 | struct device *dev = &sdd->pdev->dev; | ||
| 298 | struct spi_transfer *xfer; | ||
| 299 | |||
| 300 | if (is_polling(sdd) || msg->is_dma_mapped) | ||
| 301 | return 0; | ||
| 302 | |||
| 303 | /* First mark all xfer unmapped */ | ||
| 304 | list_for_each_entry(xfer, &msg->transfers, transfer_list) { | ||
| 305 | xfer->rx_dma = XFER_DMAADDR_INVALID; | ||
| 306 | xfer->tx_dma = XFER_DMAADDR_INVALID; | ||
| 307 | } | ||
| 308 | |||
| 309 | /* Map until end or first fail */ | ||
| 310 | list_for_each_entry(xfer, &msg->transfers, transfer_list) { | ||
| 311 | |||
| 312 | if (xfer->len <= ((FIFO_LVL_MASK(sdd) >> 1) + 1)) | ||
| 313 | continue; | ||
| 314 | |||
| 315 | if (xfer->tx_buf != NULL) { | ||
| 316 | xfer->tx_dma = dma_map_single(dev, | ||
| 317 | (void *)xfer->tx_buf, xfer->len, | ||
| 318 | DMA_TO_DEVICE); | ||
| 319 | if (dma_mapping_error(dev, xfer->tx_dma)) { | ||
| 320 | dev_err(dev, "dma_map_single Tx failed\n"); | ||
| 321 | xfer->tx_dma = XFER_DMAADDR_INVALID; | ||
| 322 | return -ENOMEM; | ||
| 323 | } | ||
| 324 | } | ||
| 325 | |||
| 326 | if (xfer->rx_buf != NULL) { | ||
| 327 | xfer->rx_dma = dma_map_single(dev, xfer->rx_buf, | ||
| 328 | xfer->len, DMA_FROM_DEVICE); | ||
| 329 | if (dma_mapping_error(dev, xfer->rx_dma)) { | ||
| 330 | dev_err(dev, "dma_map_single Rx failed\n"); | ||
| 331 | dma_unmap_single(dev, xfer->tx_dma, | ||
| 332 | xfer->len, DMA_TO_DEVICE); | ||
| 333 | xfer->tx_dma = XFER_DMAADDR_INVALID; | ||
| 334 | xfer->rx_dma = XFER_DMAADDR_INVALID; | ||
| 335 | return -ENOMEM; | ||
| 336 | } | ||
| 337 | } | ||
| 338 | } | ||
| 339 | |||
| 340 | return 0; | ||
| 341 | } | ||
| 342 | |||
| 343 | static void s3c64xx_spi_unmap_mssg(struct s3c64xx_spi_driver_data *sdd, | ||
| 344 | struct spi_message *msg) | ||
| 345 | { | ||
| 346 | struct device *dev = &sdd->pdev->dev; | ||
| 347 | struct spi_transfer *xfer; | ||
| 348 | |||
| 349 | if (is_polling(sdd) || msg->is_dma_mapped) | ||
| 350 | return; | ||
| 351 | |||
| 352 | list_for_each_entry(xfer, &msg->transfers, transfer_list) { | ||
| 353 | |||
| 354 | if (xfer->len <= ((FIFO_LVL_MASK(sdd) >> 1) + 1)) | ||
| 355 | continue; | ||
| 356 | |||
| 357 | if (xfer->rx_buf != NULL | ||
| 358 | && xfer->rx_dma != XFER_DMAADDR_INVALID) | ||
| 359 | dma_unmap_single(dev, xfer->rx_dma, | ||
| 360 | xfer->len, DMA_FROM_DEVICE); | ||
| 361 | |||
| 362 | if (xfer->tx_buf != NULL | ||
| 363 | && xfer->tx_dma != XFER_DMAADDR_INVALID) | ||
| 364 | dma_unmap_single(dev, xfer->tx_dma, | ||
| 365 | xfer->len, DMA_TO_DEVICE); | ||
| 366 | } | ||
| 367 | } | ||
| 368 | |||
| 369 | static void prepare_dma(struct s3c64xx_spi_dma_data *dma, | ||
| 370 | unsigned len, dma_addr_t buf) | ||
| 371 | { | ||
| 372 | struct s3c64xx_spi_driver_data *sdd; | ||
| 373 | struct samsung_dma_prep info; | ||
| 374 | struct samsung_dma_config config; | ||
| 375 | |||
| 376 | if (dma->direction == DMA_DEV_TO_MEM) { | ||
| 377 | sdd = container_of((void *)dma, | ||
| 378 | struct s3c64xx_spi_driver_data, rx_dma); | ||
| 379 | config.direction = sdd->rx_dma.direction; | ||
| 380 | config.fifo = sdd->sfr_start + S3C64XX_SPI_RX_DATA; | ||
| 381 | config.width = sdd->cur_bpw / 8; | ||
| 382 | sdd->ops->config((enum dma_ch)sdd->rx_dma.ch, &config); | ||
| 383 | } else { | ||
| 384 | sdd = container_of((void *)dma, | ||
| 385 | struct s3c64xx_spi_driver_data, tx_dma); | ||
| 386 | config.direction = sdd->tx_dma.direction; | ||
| 387 | config.fifo = sdd->sfr_start + S3C64XX_SPI_TX_DATA; | ||
| 388 | config.width = sdd->cur_bpw / 8; | ||
| 389 | sdd->ops->config((enum dma_ch)sdd->tx_dma.ch, &config); | ||
| 390 | } | ||
| 391 | |||
| 392 | info.cap = DMA_SLAVE; | ||
| 393 | info.len = len; | ||
| 394 | info.fp = s3c64xx_spi_dmacb; | ||
| 395 | info.fp_param = dma; | ||
| 396 | info.direction = dma->direction; | ||
| 397 | info.buf = buf; | ||
| 398 | |||
| 399 | sdd->ops->prepare((enum dma_ch)dma->ch, &info); | ||
| 400 | sdd->ops->trigger((enum dma_ch)dma->ch); | ||
| 401 | } | ||
| 402 | |||
| 403 | static int acquire_dma(struct s3c64xx_spi_driver_data *sdd) | ||
| 404 | { | ||
| 405 | struct samsung_dma_req req; | ||
| 406 | struct device *dev = &sdd->pdev->dev; | ||
| 407 | |||
| 408 | sdd->ops = samsung_dma_get_ops(); | ||
| 409 | |||
| 410 | req.cap = DMA_SLAVE; | ||
| 411 | req.client = &s3c64xx_spi_dma_client; | ||
| 412 | |||
| 413 | sdd->rx_dma.ch = (struct dma_chan *)(unsigned long)sdd->ops->request( | ||
| 414 | sdd->rx_dma.dmach, &req, dev, "rx"); | ||
| 415 | sdd->tx_dma.ch = (struct dma_chan *)(unsigned long)sdd->ops->request( | ||
| 416 | sdd->tx_dma.dmach, &req, dev, "tx"); | ||
| 417 | |||
| 418 | return 1; | ||
| 419 | } | ||
| 420 | |||
| 421 | static int s3c64xx_spi_prepare_transfer(struct spi_master *spi) | ||
| 422 | { | ||
| 423 | struct s3c64xx_spi_driver_data *sdd = spi_master_get_devdata(spi); | ||
| 424 | |||
| 425 | /* | ||
| 426 | * If DMA resource was not available during | ||
| 427 | * probe, no need to continue with dma requests | ||
| 428 | * else Acquire DMA channels | ||
| 429 | */ | ||
| 430 | while (!is_polling(sdd) && !acquire_dma(sdd)) | ||
| 431 | usleep_range(10000, 11000); | ||
| 432 | |||
| 433 | return 0; | ||
| 434 | } | ||
| 435 | |||
| 436 | static int s3c64xx_spi_unprepare_transfer(struct spi_master *spi) | ||
| 437 | { | ||
| 438 | struct s3c64xx_spi_driver_data *sdd = spi_master_get_devdata(spi); | ||
| 439 | |||
| 440 | /* Free DMA channels */ | ||
| 441 | if (!is_polling(sdd)) { | ||
| 442 | sdd->ops->release((enum dma_ch)sdd->rx_dma.ch, | ||
| 443 | &s3c64xx_spi_dma_client); | ||
| 444 | sdd->ops->release((enum dma_ch)sdd->tx_dma.ch, | ||
| 445 | &s3c64xx_spi_dma_client); | ||
| 446 | } | ||
| 447 | |||
| 448 | return 0; | ||
| 449 | } | ||
| 450 | |||
| 451 | static void s3c64xx_spi_dma_stop(struct s3c64xx_spi_driver_data *sdd, | ||
| 452 | struct s3c64xx_spi_dma_data *dma) | ||
| 453 | { | ||
| 454 | sdd->ops->stop((enum dma_ch)dma->ch); | ||
| 455 | } | ||
| 456 | |||
| 457 | #define s3c64xx_spi_can_dma NULL | ||
| 458 | |||
| 459 | #else | ||
| 460 | |||
| 461 | static int s3c64xx_spi_map_mssg(struct s3c64xx_spi_driver_data *sdd, | 280 | static int s3c64xx_spi_map_mssg(struct s3c64xx_spi_driver_data *sdd, |
| 462 | struct spi_message *msg) | 281 | struct spi_message *msg) |
| 463 | { | 282 | { |
| @@ -583,8 +402,6 @@ static bool s3c64xx_spi_can_dma(struct spi_master *master, | |||
| 583 | return xfer->len > (FIFO_LVL_MASK(sdd) >> 1) + 1; | 402 | return xfer->len > (FIFO_LVL_MASK(sdd) >> 1) + 1; |
| 584 | } | 403 | } |
| 585 | 404 | ||
| 586 | #endif | ||
| 587 | |||
| 588 | static void enable_datapath(struct s3c64xx_spi_driver_data *sdd, | 405 | static void enable_datapath(struct s3c64xx_spi_driver_data *sdd, |
| 589 | struct spi_device *spi, | 406 | struct spi_device *spi, |
| 590 | struct spi_transfer *xfer, int dma_mode) | 407 | struct spi_transfer *xfer, int dma_mode) |
| @@ -616,11 +433,7 @@ static void enable_datapath(struct s3c64xx_spi_driver_data *sdd, | |||
| 616 | chcfg |= S3C64XX_SPI_CH_TXCH_ON; | 433 | chcfg |= S3C64XX_SPI_CH_TXCH_ON; |
| 617 | if (dma_mode) { | 434 | if (dma_mode) { |
| 618 | modecfg |= S3C64XX_SPI_MODE_TXDMA_ON; | 435 | modecfg |= S3C64XX_SPI_MODE_TXDMA_ON; |
| 619 | #ifndef CONFIG_S3C_DMA | ||
| 620 | prepare_dma(&sdd->tx_dma, &xfer->tx_sg); | 436 | prepare_dma(&sdd->tx_dma, &xfer->tx_sg); |
| 621 | #else | ||
| 622 | prepare_dma(&sdd->tx_dma, xfer->len, xfer->tx_dma); | ||
| 623 | #endif | ||
| 624 | } else { | 437 | } else { |
| 625 | switch (sdd->cur_bpw) { | 438 | switch (sdd->cur_bpw) { |
| 626 | case 32: | 439 | case 32: |
| @@ -652,11 +465,7 @@ static void enable_datapath(struct s3c64xx_spi_driver_data *sdd, | |||
| 652 | writel(((xfer->len * 8 / sdd->cur_bpw) & 0xffff) | 465 | writel(((xfer->len * 8 / sdd->cur_bpw) & 0xffff) |
| 653 | | S3C64XX_SPI_PACKET_CNT_EN, | 466 | | S3C64XX_SPI_PACKET_CNT_EN, |
| 654 | regs + S3C64XX_SPI_PACKET_CNT); | 467 | regs + S3C64XX_SPI_PACKET_CNT); |
| 655 | #ifndef CONFIG_S3C_DMA | ||
| 656 | prepare_dma(&sdd->rx_dma, &xfer->rx_sg); | 468 | prepare_dma(&sdd->rx_dma, &xfer->rx_sg); |
| 657 | #else | ||
| 658 | prepare_dma(&sdd->rx_dma, xfer->len, xfer->rx_dma); | ||
| 659 | #endif | ||
| 660 | } | 469 | } |
| 661 | } | 470 | } |
| 662 | 471 | ||
