aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--drivers/net/mv643xx_eth.c41
1 files changed, 22 insertions, 19 deletions
diff --git a/drivers/net/mv643xx_eth.c b/drivers/net/mv643xx_eth.c
index a400d7115f78..6bb5af35eda6 100644
--- a/drivers/net/mv643xx_eth.c
+++ b/drivers/net/mv643xx_eth.c
@@ -569,7 +569,7 @@ static int rxq_process(struct rx_queue *rxq, int budget)
569 if (rxq->rx_curr_desc == rxq->rx_ring_size) 569 if (rxq->rx_curr_desc == rxq->rx_ring_size)
570 rxq->rx_curr_desc = 0; 570 rxq->rx_curr_desc = 0;
571 571
572 dma_unmap_single(NULL, rx_desc->buf_ptr, 572 dma_unmap_single(mp->dev->dev.parent, rx_desc->buf_ptr,
573 rx_desc->buf_size, DMA_FROM_DEVICE); 573 rx_desc->buf_size, DMA_FROM_DEVICE);
574 rxq->rx_desc_count--; 574 rxq->rx_desc_count--;
575 rx++; 575 rx++;
@@ -678,8 +678,9 @@ static int rxq_refill(struct rx_queue *rxq, int budget)
678 678
679 rx_desc = rxq->rx_desc_area + rx; 679 rx_desc = rxq->rx_desc_area + rx;
680 680
681 rx_desc->buf_ptr = dma_map_single(NULL, skb->data, 681 rx_desc->buf_ptr = dma_map_single(mp->dev->dev.parent,
682 mp->skb_size, DMA_FROM_DEVICE); 682 skb->data, mp->skb_size,
683 DMA_FROM_DEVICE);
683 rx_desc->buf_size = mp->skb_size; 684 rx_desc->buf_size = mp->skb_size;
684 rxq->rx_skb[rx] = skb; 685 rxq->rx_skb[rx] = skb;
685 wmb(); 686 wmb();
@@ -718,6 +719,7 @@ static inline unsigned int has_tiny_unaligned_frags(struct sk_buff *skb)
718 719
719static void txq_submit_frag_skb(struct tx_queue *txq, struct sk_buff *skb) 720static void txq_submit_frag_skb(struct tx_queue *txq, struct sk_buff *skb)
720{ 721{
722 struct mv643xx_eth_private *mp = txq_to_mp(txq);
721 int nr_frags = skb_shinfo(skb)->nr_frags; 723 int nr_frags = skb_shinfo(skb)->nr_frags;
722 int frag; 724 int frag;
723 725
@@ -746,10 +748,10 @@ static void txq_submit_frag_skb(struct tx_queue *txq, struct sk_buff *skb)
746 748
747 desc->l4i_chk = 0; 749 desc->l4i_chk = 0;
748 desc->byte_cnt = this_frag->size; 750 desc->byte_cnt = this_frag->size;
749 desc->buf_ptr = dma_map_page(NULL, this_frag->page, 751 desc->buf_ptr = dma_map_page(mp->dev->dev.parent,
750 this_frag->page_offset, 752 this_frag->page,
751 this_frag->size, 753 this_frag->page_offset,
752 DMA_TO_DEVICE); 754 this_frag->size, DMA_TO_DEVICE);
753 } 755 }
754} 756}
755 757
@@ -826,7 +828,8 @@ no_csum:
826 828
827 desc->l4i_chk = l4i_chk; 829 desc->l4i_chk = l4i_chk;
828 desc->byte_cnt = length; 830 desc->byte_cnt = length;
829 desc->buf_ptr = dma_map_single(NULL, skb->data, length, DMA_TO_DEVICE); 831 desc->buf_ptr = dma_map_single(mp->dev->dev.parent, skb->data,
832 length, DMA_TO_DEVICE);
830 833
831 __skb_queue_tail(&txq->tx_skb, skb); 834 __skb_queue_tail(&txq->tx_skb, skb);
832 835
@@ -956,10 +959,10 @@ static int txq_reclaim(struct tx_queue *txq, int budget, int force)
956 } 959 }
957 960
958 if (cmd_sts & TX_FIRST_DESC) { 961 if (cmd_sts & TX_FIRST_DESC) {
959 dma_unmap_single(NULL, desc->buf_ptr, 962 dma_unmap_single(mp->dev->dev.parent, desc->buf_ptr,
960 desc->byte_cnt, DMA_TO_DEVICE); 963 desc->byte_cnt, DMA_TO_DEVICE);
961 } else { 964 } else {
962 dma_unmap_page(NULL, desc->buf_ptr, 965 dma_unmap_page(mp->dev->dev.parent, desc->buf_ptr,
963 desc->byte_cnt, DMA_TO_DEVICE); 966 desc->byte_cnt, DMA_TO_DEVICE);
964 } 967 }
965 968
@@ -1894,9 +1897,9 @@ static int rxq_init(struct mv643xx_eth_private *mp, int index)
1894 mp->rx_desc_sram_size); 1897 mp->rx_desc_sram_size);
1895 rxq->rx_desc_dma = mp->rx_desc_sram_addr; 1898 rxq->rx_desc_dma = mp->rx_desc_sram_addr;
1896 } else { 1899 } else {
1897 rxq->rx_desc_area = dma_alloc_coherent(NULL, size, 1900 rxq->rx_desc_area = dma_alloc_coherent(mp->dev->dev.parent,
1898 &rxq->rx_desc_dma, 1901 size, &rxq->rx_desc_dma,
1899 GFP_KERNEL); 1902 GFP_KERNEL);
1900 } 1903 }
1901 1904
1902 if (rxq->rx_desc_area == NULL) { 1905 if (rxq->rx_desc_area == NULL) {
@@ -1947,7 +1950,7 @@ out_free:
1947 if (index == 0 && size <= mp->rx_desc_sram_size) 1950 if (index == 0 && size <= mp->rx_desc_sram_size)
1948 iounmap(rxq->rx_desc_area); 1951 iounmap(rxq->rx_desc_area);
1949 else 1952 else
1950 dma_free_coherent(NULL, size, 1953 dma_free_coherent(mp->dev->dev.parent, size,
1951 rxq->rx_desc_area, 1954 rxq->rx_desc_area,
1952 rxq->rx_desc_dma); 1955 rxq->rx_desc_dma);
1953 1956
@@ -1979,7 +1982,7 @@ static void rxq_deinit(struct rx_queue *rxq)
1979 rxq->rx_desc_area_size <= mp->rx_desc_sram_size) 1982 rxq->rx_desc_area_size <= mp->rx_desc_sram_size)
1980 iounmap(rxq->rx_desc_area); 1983 iounmap(rxq->rx_desc_area);
1981 else 1984 else
1982 dma_free_coherent(NULL, rxq->rx_desc_area_size, 1985 dma_free_coherent(mp->dev->dev.parent, rxq->rx_desc_area_size,
1983 rxq->rx_desc_area, rxq->rx_desc_dma); 1986 rxq->rx_desc_area, rxq->rx_desc_dma);
1984 1987
1985 kfree(rxq->rx_skb); 1988 kfree(rxq->rx_skb);
@@ -2007,9 +2010,9 @@ static int txq_init(struct mv643xx_eth_private *mp, int index)
2007 mp->tx_desc_sram_size); 2010 mp->tx_desc_sram_size);
2008 txq->tx_desc_dma = mp->tx_desc_sram_addr; 2011 txq->tx_desc_dma = mp->tx_desc_sram_addr;
2009 } else { 2012 } else {
2010 txq->tx_desc_area = dma_alloc_coherent(NULL, size, 2013 txq->tx_desc_area = dma_alloc_coherent(mp->dev->dev.parent,
2011 &txq->tx_desc_dma, 2014 size, &txq->tx_desc_dma,
2012 GFP_KERNEL); 2015 GFP_KERNEL);
2013 } 2016 }
2014 2017
2015 if (txq->tx_desc_area == NULL) { 2018 if (txq->tx_desc_area == NULL) {
@@ -2053,7 +2056,7 @@ static void txq_deinit(struct tx_queue *txq)
2053 txq->tx_desc_area_size <= mp->tx_desc_sram_size) 2056 txq->tx_desc_area_size <= mp->tx_desc_sram_size)
2054 iounmap(txq->tx_desc_area); 2057 iounmap(txq->tx_desc_area);
2055 else 2058 else
2056 dma_free_coherent(NULL, txq->tx_desc_area_size, 2059 dma_free_coherent(mp->dev->dev.parent, txq->tx_desc_area_size,
2057 txq->tx_desc_area, txq->tx_desc_dma); 2060 txq->tx_desc_area, txq->tx_desc_dma);
2058} 2061}
2059 2062