aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--drivers/dma/mv_xor.c24
1 files changed, 12 insertions, 12 deletions
diff --git a/drivers/dma/mv_xor.c b/drivers/dma/mv_xor.c
index 53fb0c8365b0..766b68ed505c 100644
--- a/drivers/dma/mv_xor.c
+++ b/drivers/dma/mv_xor.c
@@ -497,8 +497,8 @@ mv_xor_tx_submit(struct dma_async_tx_descriptor *tx)
497 if (!mv_can_chain(grp_start)) 497 if (!mv_can_chain(grp_start))
498 goto submit_done; 498 goto submit_done;
499 499
500 dev_dbg(mv_chan_to_devp(mv_chan), "Append to last desc %x\n", 500 dev_dbg(mv_chan_to_devp(mv_chan), "Append to last desc %pa\n",
501 old_chain_tail->async_tx.phys); 501 &old_chain_tail->async_tx.phys);
502 502
503 /* fix up the hardware chain */ 503 /* fix up the hardware chain */
504 mv_desc_set_next_desc(old_chain_tail, grp_start->async_tx.phys); 504 mv_desc_set_next_desc(old_chain_tail, grp_start->async_tx.phys);
@@ -527,7 +527,8 @@ submit_done:
527/* returns the number of allocated descriptors */ 527/* returns the number of allocated descriptors */
528static int mv_xor_alloc_chan_resources(struct dma_chan *chan) 528static int mv_xor_alloc_chan_resources(struct dma_chan *chan)
529{ 529{
530 char *hw_desc; 530 void *virt_desc;
531 dma_addr_t dma_desc;
531 int idx; 532 int idx;
532 struct mv_xor_chan *mv_chan = to_mv_xor_chan(chan); 533 struct mv_xor_chan *mv_chan = to_mv_xor_chan(chan);
533 struct mv_xor_desc_slot *slot = NULL; 534 struct mv_xor_desc_slot *slot = NULL;
@@ -542,17 +543,16 @@ static int mv_xor_alloc_chan_resources(struct dma_chan *chan)
542 " %d descriptor slots", idx); 543 " %d descriptor slots", idx);
543 break; 544 break;
544 } 545 }
545 hw_desc = (char *) mv_chan->dma_desc_pool_virt; 546 virt_desc = mv_chan->dma_desc_pool_virt;
546 slot->hw_desc = (void *) &hw_desc[idx * MV_XOR_SLOT_SIZE]; 547 slot->hw_desc = virt_desc + idx * MV_XOR_SLOT_SIZE;
547 548
548 dma_async_tx_descriptor_init(&slot->async_tx, chan); 549 dma_async_tx_descriptor_init(&slot->async_tx, chan);
549 slot->async_tx.tx_submit = mv_xor_tx_submit; 550 slot->async_tx.tx_submit = mv_xor_tx_submit;
550 INIT_LIST_HEAD(&slot->chain_node); 551 INIT_LIST_HEAD(&slot->chain_node);
551 INIT_LIST_HEAD(&slot->slot_node); 552 INIT_LIST_HEAD(&slot->slot_node);
552 INIT_LIST_HEAD(&slot->tx_list); 553 INIT_LIST_HEAD(&slot->tx_list);
553 hw_desc = (char *) mv_chan->dma_desc_pool; 554 dma_desc = mv_chan->dma_desc_pool;
554 slot->async_tx.phys = 555 slot->async_tx.phys = dma_desc + idx * MV_XOR_SLOT_SIZE;
555 (dma_addr_t) &hw_desc[idx * MV_XOR_SLOT_SIZE];
556 slot->idx = idx++; 556 slot->idx = idx++;
557 557
558 spin_lock_bh(&mv_chan->lock); 558 spin_lock_bh(&mv_chan->lock);
@@ -582,8 +582,8 @@ mv_xor_prep_dma_memcpy(struct dma_chan *chan, dma_addr_t dest, dma_addr_t src,
582 int slot_cnt; 582 int slot_cnt;
583 583
584 dev_dbg(mv_chan_to_devp(mv_chan), 584 dev_dbg(mv_chan_to_devp(mv_chan),
585 "%s dest: %x src %x len: %u flags: %ld\n", 585 "%s dest: %pad src %pad len: %u flags: %ld\n",
586 __func__, dest, src, len, flags); 586 __func__, &dest, &src, len, flags);
587 if (unlikely(len < MV_XOR_MIN_BYTE_COUNT)) 587 if (unlikely(len < MV_XOR_MIN_BYTE_COUNT))
588 return NULL; 588 return NULL;
589 589
@@ -626,8 +626,8 @@ mv_xor_prep_dma_xor(struct dma_chan *chan, dma_addr_t dest, dma_addr_t *src,
626 BUG_ON(len > MV_XOR_MAX_BYTE_COUNT); 626 BUG_ON(len > MV_XOR_MAX_BYTE_COUNT);
627 627
628 dev_dbg(mv_chan_to_devp(mv_chan), 628 dev_dbg(mv_chan_to_devp(mv_chan),
629 "%s src_cnt: %d len: dest %x %u flags: %ld\n", 629 "%s src_cnt: %d len: %u dest %pad flags: %ld\n",
630 __func__, src_cnt, len, dest, flags); 630 __func__, src_cnt, len, &dest, flags);
631 631
632 spin_lock_bh(&mv_chan->lock); 632 spin_lock_bh(&mv_chan->lock);
633 slot_cnt = mv_chan_xor_slot_count(len, src_cnt); 633 slot_cnt = mv_chan_xor_slot_count(len, src_cnt);