aboutsummaryrefslogtreecommitdiffstats
path: root/drivers
diff options
context:
space:
mode:
Diffstat (limited to 'drivers')
-rw-r--r--drivers/net/forcedeth.c46
1 files changed, 27 insertions, 19 deletions
diff --git a/drivers/net/forcedeth.c b/drivers/net/forcedeth.c
index b60a3041b64c..1094d292630f 100644
--- a/drivers/net/forcedeth.c
+++ b/drivers/net/forcedeth.c
@@ -719,7 +719,8 @@ static const struct register_test nv_registers_test[] = {
719struct nv_skb_map { 719struct nv_skb_map {
720 struct sk_buff *skb; 720 struct sk_buff *skb;
721 dma_addr_t dma; 721 dma_addr_t dma;
722 unsigned int dma_len; 722 unsigned int dma_len:31;
723 unsigned int dma_single:1;
723 struct ring_desc_ex *first_tx_desc; 724 struct ring_desc_ex *first_tx_desc;
724 struct nv_skb_map *next_tx_ctx; 725 struct nv_skb_map *next_tx_ctx;
725}; 726};
@@ -1912,6 +1913,7 @@ static void nv_init_tx(struct net_device *dev)
1912 np->tx_skb[i].skb = NULL; 1913 np->tx_skb[i].skb = NULL;
1913 np->tx_skb[i].dma = 0; 1914 np->tx_skb[i].dma = 0;
1914 np->tx_skb[i].dma_len = 0; 1915 np->tx_skb[i].dma_len = 0;
1916 np->tx_skb[i].dma_single = 0;
1915 np->tx_skb[i].first_tx_desc = NULL; 1917 np->tx_skb[i].first_tx_desc = NULL;
1916 np->tx_skb[i].next_tx_ctx = NULL; 1918 np->tx_skb[i].next_tx_ctx = NULL;
1917 } 1919 }
@@ -1930,23 +1932,30 @@ static int nv_init_ring(struct net_device *dev)
1930 return nv_alloc_rx_optimized(dev); 1932 return nv_alloc_rx_optimized(dev);
1931} 1933}
1932 1934
1933static int nv_release_txskb(struct net_device *dev, struct nv_skb_map* tx_skb) 1935static void nv_unmap_txskb(struct fe_priv *np, struct nv_skb_map *tx_skb)
1934{ 1936{
1935 struct fe_priv *np = netdev_priv(dev);
1936
1937 if (tx_skb->dma) { 1937 if (tx_skb->dma) {
1938 pci_unmap_page(np->pci_dev, tx_skb->dma, 1938 if (tx_skb->dma_single)
1939 tx_skb->dma_len, 1939 pci_unmap_single(np->pci_dev, tx_skb->dma,
1940 PCI_DMA_TODEVICE); 1940 tx_skb->dma_len,
1941 PCI_DMA_TODEVICE);
1942 else
1943 pci_unmap_page(np->pci_dev, tx_skb->dma,
1944 tx_skb->dma_len,
1945 PCI_DMA_TODEVICE);
1941 tx_skb->dma = 0; 1946 tx_skb->dma = 0;
1942 } 1947 }
1948}
1949
1950static int nv_release_txskb(struct fe_priv *np, struct nv_skb_map *tx_skb)
1951{
1952 nv_unmap_txskb(np, tx_skb);
1943 if (tx_skb->skb) { 1953 if (tx_skb->skb) {
1944 dev_kfree_skb_any(tx_skb->skb); 1954 dev_kfree_skb_any(tx_skb->skb);
1945 tx_skb->skb = NULL; 1955 tx_skb->skb = NULL;
1946 return 1; 1956 return 1;
1947 } else {
1948 return 0;
1949 } 1957 }
1958 return 0;
1950} 1959}
1951 1960
1952static void nv_drain_tx(struct net_device *dev) 1961static void nv_drain_tx(struct net_device *dev)
@@ -1964,10 +1973,11 @@ static void nv_drain_tx(struct net_device *dev)
1964 np->tx_ring.ex[i].bufhigh = 0; 1973 np->tx_ring.ex[i].bufhigh = 0;
1965 np->tx_ring.ex[i].buflow = 0; 1974 np->tx_ring.ex[i].buflow = 0;
1966 } 1975 }
1967 if (nv_release_txskb(dev, &np->tx_skb[i])) 1976 if (nv_release_txskb(np, &np->tx_skb[i]))
1968 dev->stats.tx_dropped++; 1977 dev->stats.tx_dropped++;
1969 np->tx_skb[i].dma = 0; 1978 np->tx_skb[i].dma = 0;
1970 np->tx_skb[i].dma_len = 0; 1979 np->tx_skb[i].dma_len = 0;
1980 np->tx_skb[i].dma_single = 0;
1971 np->tx_skb[i].first_tx_desc = NULL; 1981 np->tx_skb[i].first_tx_desc = NULL;
1972 np->tx_skb[i].next_tx_ctx = NULL; 1982 np->tx_skb[i].next_tx_ctx = NULL;
1973 } 1983 }
@@ -2171,6 +2181,7 @@ static int nv_start_xmit(struct sk_buff *skb, struct net_device *dev)
2171 np->put_tx_ctx->dma = pci_map_single(np->pci_dev, skb->data + offset, bcnt, 2181 np->put_tx_ctx->dma = pci_map_single(np->pci_dev, skb->data + offset, bcnt,
2172 PCI_DMA_TODEVICE); 2182 PCI_DMA_TODEVICE);
2173 np->put_tx_ctx->dma_len = bcnt; 2183 np->put_tx_ctx->dma_len = bcnt;
2184 np->put_tx_ctx->dma_single = 1;
2174 put_tx->buf = cpu_to_le32(np->put_tx_ctx->dma); 2185 put_tx->buf = cpu_to_le32(np->put_tx_ctx->dma);
2175 put_tx->flaglen = cpu_to_le32((bcnt-1) | tx_flags); 2186 put_tx->flaglen = cpu_to_le32((bcnt-1) | tx_flags);
2176 2187
@@ -2196,6 +2207,7 @@ static int nv_start_xmit(struct sk_buff *skb, struct net_device *dev)
2196 np->put_tx_ctx->dma = pci_map_page(np->pci_dev, frag->page, frag->page_offset+offset, bcnt, 2207 np->put_tx_ctx->dma = pci_map_page(np->pci_dev, frag->page, frag->page_offset+offset, bcnt,
2197 PCI_DMA_TODEVICE); 2208 PCI_DMA_TODEVICE);
2198 np->put_tx_ctx->dma_len = bcnt; 2209 np->put_tx_ctx->dma_len = bcnt;
2210 np->put_tx_ctx->dma_single = 0;
2199 put_tx->buf = cpu_to_le32(np->put_tx_ctx->dma); 2211 put_tx->buf = cpu_to_le32(np->put_tx_ctx->dma);
2200 put_tx->flaglen = cpu_to_le32((bcnt-1) | tx_flags); 2212 put_tx->flaglen = cpu_to_le32((bcnt-1) | tx_flags);
2201 2213
@@ -2291,6 +2303,7 @@ static int nv_start_xmit_optimized(struct sk_buff *skb, struct net_device *dev)
2291 np->put_tx_ctx->dma = pci_map_single(np->pci_dev, skb->data + offset, bcnt, 2303 np->put_tx_ctx->dma = pci_map_single(np->pci_dev, skb->data + offset, bcnt,
2292 PCI_DMA_TODEVICE); 2304 PCI_DMA_TODEVICE);
2293 np->put_tx_ctx->dma_len = bcnt; 2305 np->put_tx_ctx->dma_len = bcnt;
2306 np->put_tx_ctx->dma_single = 1;
2294 put_tx->bufhigh = cpu_to_le32(dma_high(np->put_tx_ctx->dma)); 2307 put_tx->bufhigh = cpu_to_le32(dma_high(np->put_tx_ctx->dma));
2295 put_tx->buflow = cpu_to_le32(dma_low(np->put_tx_ctx->dma)); 2308 put_tx->buflow = cpu_to_le32(dma_low(np->put_tx_ctx->dma));
2296 put_tx->flaglen = cpu_to_le32((bcnt-1) | tx_flags); 2309 put_tx->flaglen = cpu_to_le32((bcnt-1) | tx_flags);
@@ -2317,6 +2330,7 @@ static int nv_start_xmit_optimized(struct sk_buff *skb, struct net_device *dev)
2317 np->put_tx_ctx->dma = pci_map_page(np->pci_dev, frag->page, frag->page_offset+offset, bcnt, 2330 np->put_tx_ctx->dma = pci_map_page(np->pci_dev, frag->page, frag->page_offset+offset, bcnt,
2318 PCI_DMA_TODEVICE); 2331 PCI_DMA_TODEVICE);
2319 np->put_tx_ctx->dma_len = bcnt; 2332 np->put_tx_ctx->dma_len = bcnt;
2333 np->put_tx_ctx->dma_single = 0;
2320 put_tx->bufhigh = cpu_to_le32(dma_high(np->put_tx_ctx->dma)); 2334 put_tx->bufhigh = cpu_to_le32(dma_high(np->put_tx_ctx->dma));
2321 put_tx->buflow = cpu_to_le32(dma_low(np->put_tx_ctx->dma)); 2335 put_tx->buflow = cpu_to_le32(dma_low(np->put_tx_ctx->dma));
2322 put_tx->flaglen = cpu_to_le32((bcnt-1) | tx_flags); 2336 put_tx->flaglen = cpu_to_le32((bcnt-1) | tx_flags);
@@ -2434,10 +2448,7 @@ static int nv_tx_done(struct net_device *dev, int limit)
2434 dprintk(KERN_DEBUG "%s: nv_tx_done: flags 0x%x.\n", 2448 dprintk(KERN_DEBUG "%s: nv_tx_done: flags 0x%x.\n",
2435 dev->name, flags); 2449 dev->name, flags);
2436 2450
2437 pci_unmap_page(np->pci_dev, np->get_tx_ctx->dma, 2451 nv_unmap_txskb(np, np->get_tx_ctx);
2438 np->get_tx_ctx->dma_len,
2439 PCI_DMA_TODEVICE);
2440 np->get_tx_ctx->dma = 0;
2441 2452
2442 if (np->desc_ver == DESC_VER_1) { 2453 if (np->desc_ver == DESC_VER_1) {
2443 if (flags & NV_TX_LASTPACKET) { 2454 if (flags & NV_TX_LASTPACKET) {
@@ -2502,10 +2513,7 @@ static int nv_tx_done_optimized(struct net_device *dev, int limit)
2502 dprintk(KERN_DEBUG "%s: nv_tx_done_optimized: flags 0x%x.\n", 2513 dprintk(KERN_DEBUG "%s: nv_tx_done_optimized: flags 0x%x.\n",
2503 dev->name, flags); 2514 dev->name, flags);
2504 2515
2505 pci_unmap_page(np->pci_dev, np->get_tx_ctx->dma, 2516 nv_unmap_txskb(np, np->get_tx_ctx);
2506 np->get_tx_ctx->dma_len,
2507 PCI_DMA_TODEVICE);
2508 np->get_tx_ctx->dma = 0;
2509 2517
2510 if (flags & NV_TX2_LASTPACKET) { 2518 if (flags & NV_TX2_LASTPACKET) {
2511 if (!(flags & NV_TX2_ERROR)) 2519 if (!(flags & NV_TX2_ERROR))
@@ -5091,7 +5099,7 @@ static int nv_loopback_test(struct net_device *dev)
5091 dprintk(KERN_DEBUG "%s: loopback - did not receive test packet\n", dev->name); 5099 dprintk(KERN_DEBUG "%s: loopback - did not receive test packet\n", dev->name);
5092 } 5100 }
5093 5101
5094 pci_unmap_page(np->pci_dev, test_dma_addr, 5102 pci_unmap_single(np->pci_dev, test_dma_addr,
5095 (skb_end_pointer(tx_skb) - tx_skb->data), 5103 (skb_end_pointer(tx_skb) - tx_skb->data),
5096 PCI_DMA_TODEVICE); 5104 PCI_DMA_TODEVICE);
5097 dev_kfree_skb_any(tx_skb); 5105 dev_kfree_skb_any(tx_skb);