aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorVinod Koul <vinod.koul@intel.com>2017-08-22 12:33:43 -0400
committerVinod Koul <vinod.koul@intel.com>2017-08-22 12:33:43 -0400
commit491bea00a13600eaa146923b910b70e9a326e950 (patch)
tree06963b79f67db3f739be43efe8149a3574442da6
parenta85c6f1b2921cbd2f54666a52804f407c4a064fe (diff)
dmaengine: altera: remove DMA_SG
Commit c678fa66341c: ("dmaengine: remove DMA_SG as it is dead code in kernel") removes DMA_SG from dmaengine subsystem but missed the newly added driver, so remove it from here as well Reported-by: Stephen Rothwell <sfr@canb.auug.org.au> Signed-off-by: Vinod Koul <vinod.koul@intel.com>
-rw-r--r--drivers/dma/altera-msgdma.c94
1 files changed, 0 insertions, 94 deletions
diff --git a/drivers/dma/altera-msgdma.c b/drivers/dma/altera-msgdma.c
index 33b87b413793..75208b43d137 100644
--- a/drivers/dma/altera-msgdma.c
+++ b/drivers/dma/altera-msgdma.c
@@ -386,98 +386,6 @@ msgdma_prep_memcpy(struct dma_chan *dchan, dma_addr_t dma_dst,
386} 386}
387 387
388/** 388/**
389 * msgdma_prep_sg - prepare descriptors for a memory sg transaction
390 * @dchan: DMA channel
391 * @dst_sg: Destination scatter list
392 * @dst_sg_len: Number of entries in destination scatter list
393 * @src_sg: Source scatter list
394 * @src_sg_len: Number of entries in source scatter list
395 * @flags: transfer ack flags
396 *
397 * Return: Async transaction descriptor on success and NULL on failure
398 */
399static struct dma_async_tx_descriptor *
400msgdma_prep_sg(struct dma_chan *dchan, struct scatterlist *dst_sg,
401 unsigned int dst_sg_len, struct scatterlist *src_sg,
402 unsigned int src_sg_len, unsigned long flags)
403{
404 struct msgdma_device *mdev = to_mdev(dchan);
405 struct msgdma_sw_desc *new, *first = NULL;
406 void *desc = NULL;
407 size_t len, dst_avail, src_avail;
408 dma_addr_t dma_dst, dma_src;
409 u32 desc_cnt = 0, i;
410 struct scatterlist *sg;
411
412 for_each_sg(src_sg, sg, src_sg_len, i)
413 desc_cnt += DIV_ROUND_UP(sg_dma_len(sg), MSGDMA_MAX_TRANS_LEN);
414
415 spin_lock_bh(&mdev->lock);
416 if (desc_cnt > mdev->desc_free_cnt) {
417 spin_unlock_bh(&mdev->lock);
418 dev_dbg(mdev->dev, "mdev %p descs are not available\n", mdev);
419 return NULL;
420 }
421 mdev->desc_free_cnt -= desc_cnt;
422 spin_unlock_bh(&mdev->lock);
423
424 dst_avail = sg_dma_len(dst_sg);
425 src_avail = sg_dma_len(src_sg);
426
427 /* Run until we are out of scatterlist entries */
428 while (true) {
429 /* Allocate and populate the descriptor */
430 new = msgdma_get_descriptor(mdev);
431
432 desc = &new->hw_desc;
433 len = min_t(size_t, src_avail, dst_avail);
434 len = min_t(size_t, len, MSGDMA_MAX_TRANS_LEN);
435 if (len == 0)
436 goto fetch;
437 dma_dst = sg_dma_address(dst_sg) + sg_dma_len(dst_sg) -
438 dst_avail;
439 dma_src = sg_dma_address(src_sg) + sg_dma_len(src_sg) -
440 src_avail;
441
442 msgdma_desc_config(desc, dma_dst, dma_src, len,
443 MSGDMA_DESC_STRIDE_RW);
444 dst_avail -= len;
445 src_avail -= len;
446
447 if (!first)
448 first = new;
449 else
450 list_add_tail(&new->node, &first->tx_list);
451fetch:
452 /* Fetch the next dst scatterlist entry */
453 if (dst_avail == 0) {
454 if (dst_sg_len == 0)
455 break;
456 dst_sg = sg_next(dst_sg);
457 if (dst_sg == NULL)
458 break;
459 dst_sg_len--;
460 dst_avail = sg_dma_len(dst_sg);
461 }
462 /* Fetch the next src scatterlist entry */
463 if (src_avail == 0) {
464 if (src_sg_len == 0)
465 break;
466 src_sg = sg_next(src_sg);
467 if (src_sg == NULL)
468 break;
469 src_sg_len--;
470 src_avail = sg_dma_len(src_sg);
471 }
472 }
473
474 msgdma_desc_config_eod(desc);
475 first->async_tx.flags = flags;
476
477 return &first->async_tx;
478}
479
480/**
481 * msgdma_prep_slave_sg - prepare descriptors for a slave sg transaction 389 * msgdma_prep_slave_sg - prepare descriptors for a slave sg transaction
482 * 390 *
483 * @dchan: DMA channel 391 * @dchan: DMA channel
@@ -943,7 +851,6 @@ static int msgdma_probe(struct platform_device *pdev)
943 /* Set DMA capabilities */ 851 /* Set DMA capabilities */
944 dma_cap_zero(dma_dev->cap_mask); 852 dma_cap_zero(dma_dev->cap_mask);
945 dma_cap_set(DMA_MEMCPY, dma_dev->cap_mask); 853 dma_cap_set(DMA_MEMCPY, dma_dev->cap_mask);
946 dma_cap_set(DMA_SG, dma_dev->cap_mask);
947 dma_cap_set(DMA_SLAVE, dma_dev->cap_mask); 854 dma_cap_set(DMA_SLAVE, dma_dev->cap_mask);
948 855
949 dma_dev->src_addr_widths = BIT(DMA_SLAVE_BUSWIDTH_4_BYTES); 856 dma_dev->src_addr_widths = BIT(DMA_SLAVE_BUSWIDTH_4_BYTES);
@@ -962,7 +869,6 @@ static int msgdma_probe(struct platform_device *pdev)
962 869
963 dma_dev->copy_align = DMAENGINE_ALIGN_4_BYTES; 870 dma_dev->copy_align = DMAENGINE_ALIGN_4_BYTES;
964 dma_dev->device_prep_dma_memcpy = msgdma_prep_memcpy; 871 dma_dev->device_prep_dma_memcpy = msgdma_prep_memcpy;
965 dma_dev->device_prep_dma_sg = msgdma_prep_sg;
966 dma_dev->device_prep_slave_sg = msgdma_prep_slave_sg; 872 dma_dev->device_prep_slave_sg = msgdma_prep_slave_sg;
967 dma_dev->device_config = msgdma_dma_config; 873 dma_dev->device_config = msgdma_dma_config;
968 874