aboutsummaryrefslogtreecommitdiffstats
path: root/drivers/net/wireless/ath9k/xmit.c
diff options
context:
space:
mode:
authorSujith <Sujith.Manoharan@atheros.com>2008-08-14 03:57:16 -0400
committerJohn W. Linville <linville@tuxdriver.com>2008-08-29 16:24:04 -0400
commitff9b662dab1fcd885fb728de3a8c13ebb5455117 (patch)
treef07e329f9a9f4f446f407318fd1694d3df84f6d9 /drivers/net/wireless/ath9k/xmit.c
parentdc2222a85fd63103b9aad0d4b77c4d87b78c58a2 (diff)
ath9k: Miscellaneous fixes
This patch removes ath_vap_listen() and dma wrapper macros. Also, Inline abuse is cleaned up and a few typos are fixed. Signed-off-by: Sujith Manoharan <Sujith.Manoharan@atheros.com> Signed-off-by: John W. Linville <linville@tuxdriver.com>
Diffstat (limited to 'drivers/net/wireless/ath9k/xmit.c')
-rw-r--r--drivers/net/wireless/ath9k/xmit.c29
1 files changed, 11 insertions, 18 deletions
diff --git a/drivers/net/wireless/ath9k/xmit.c b/drivers/net/wireless/ath9k/xmit.c
index 8d31ad7952be..04f94d2c8010 100644
--- a/drivers/net/wireless/ath9k/xmit.c
+++ b/drivers/net/wireless/ath9k/xmit.c
@@ -499,7 +499,6 @@ static void ath_tx_complete_buf(struct ath_softc *sc,
499{ 499{
500 struct sk_buff *skb = bf->bf_mpdu; 500 struct sk_buff *skb = bf->bf_mpdu;
501 struct ath_xmit_status tx_status; 501 struct ath_xmit_status tx_status;
502 dma_addr_t *pa;
503 502
504 /* 503 /*
505 * Set retry information. 504 * Set retry information.
@@ -519,9 +518,8 @@ static void ath_tx_complete_buf(struct ath_softc *sc,
519 tx_status.flags |= ATH_TX_XRETRY; 518 tx_status.flags |= ATH_TX_XRETRY;
520 } 519 }
521 /* Unmap this frame */ 520 /* Unmap this frame */
522 pa = get_dma_mem_context(bf, bf_dmacontext);
523 pci_unmap_single(sc->pdev, 521 pci_unmap_single(sc->pdev,
524 *pa, 522 bf->bf_dmacontext,
525 skb->len, 523 skb->len,
526 PCI_DMA_TODEVICE); 524 PCI_DMA_TODEVICE);
527 /* complete this frame */ 525 /* complete this frame */
@@ -1172,11 +1170,8 @@ static void ath_tx_complete_aggr_rifs(struct ath_softc *sc,
1172 tbf->bf_lastfrm->bf_desc); 1170 tbf->bf_lastfrm->bf_desc);
1173 1171
1174 /* copy the DMA context */ 1172 /* copy the DMA context */
1175 copy_dma_mem_context( 1173 tbf->bf_dmacontext =
1176 get_dma_mem_context(tbf, 1174 bf_last->bf_dmacontext;
1177 bf_dmacontext),
1178 get_dma_mem_context(bf_last,
1179 bf_dmacontext));
1180 } 1175 }
1181 list_add_tail(&tbf->list, &bf_head); 1176 list_add_tail(&tbf->list, &bf_head);
1182 } else { 1177 } else {
@@ -1185,7 +1180,7 @@ static void ath_tx_complete_aggr_rifs(struct ath_softc *sc,
1185 * software retry 1180 * software retry
1186 */ 1181 */
1187 ath9k_hw_cleartxdesc(sc->sc_ah, 1182 ath9k_hw_cleartxdesc(sc->sc_ah,
1188 bf->bf_lastfrm->bf_desc); 1183 bf->bf_lastfrm->bf_desc);
1189 } 1184 }
1190 1185
1191 /* 1186 /*
@@ -2045,8 +2040,7 @@ static int ath_tx_start_dma(struct ath_softc *sc,
2045 /* 2040 /*
2046 * Save the DMA context in the first ath_buf 2041 * Save the DMA context in the first ath_buf
2047 */ 2042 */
2048 copy_dma_mem_context(get_dma_mem_context(bf, bf_dmacontext), 2043 bf->bf_dmacontext = txctl->dmacontext;
2049 get_dma_mem_context(txctl, dmacontext));
2050 2044
2051 /* 2045 /*
2052 * Formulate first tx descriptor with tx controls. 2046 * Formulate first tx descriptor with tx controls.
@@ -2127,25 +2121,26 @@ static int ath_tx_start_dma(struct ath_softc *sc,
2127 2121
2128static void xmit_map_sg(struct ath_softc *sc, 2122static void xmit_map_sg(struct ath_softc *sc,
2129 struct sk_buff *skb, 2123 struct sk_buff *skb,
2130 dma_addr_t *pa,
2131 struct ath_tx_control *txctl) 2124 struct ath_tx_control *txctl)
2132{ 2125{
2133 struct ath_xmit_status tx_status; 2126 struct ath_xmit_status tx_status;
2134 struct ath_atx_tid *tid; 2127 struct ath_atx_tid *tid;
2135 struct scatterlist sg; 2128 struct scatterlist sg;
2136 2129
2137 *pa = pci_map_single(sc->pdev, skb->data, skb->len, PCI_DMA_TODEVICE); 2130 txctl->dmacontext = pci_map_single(sc->pdev, skb->data,
2131 skb->len, PCI_DMA_TODEVICE);
2138 2132
2139 /* setup S/G list */ 2133 /* setup S/G list */
2140 memset(&sg, 0, sizeof(struct scatterlist)); 2134 memset(&sg, 0, sizeof(struct scatterlist));
2141 sg_dma_address(&sg) = *pa; 2135 sg_dma_address(&sg) = txctl->dmacontext;
2142 sg_dma_len(&sg) = skb->len; 2136 sg_dma_len(&sg) = skb->len;
2143 2137
2144 if (ath_tx_start_dma(sc, skb, &sg, 1, txctl) != 0) { 2138 if (ath_tx_start_dma(sc, skb, &sg, 1, txctl) != 0) {
2145 /* 2139 /*
2146 * We have to do drop frame here. 2140 * We have to do drop frame here.
2147 */ 2141 */
2148 pci_unmap_single(sc->pdev, *pa, skb->len, PCI_DMA_TODEVICE); 2142 pci_unmap_single(sc->pdev, txctl->dmacontext,
2143 skb->len, PCI_DMA_TODEVICE);
2149 2144
2150 tx_status.retries = 0; 2145 tx_status.retries = 0;
2151 tx_status.flags = ATH_TX_ERROR; 2146 tx_status.flags = ATH_TX_ERROR;
@@ -2419,9 +2414,7 @@ int ath_tx_start(struct ath_softc *sc, struct sk_buff *skb)
2419 * ath_tx_start_dma() will be called either synchronously 2414 * ath_tx_start_dma() will be called either synchronously
2420 * or asynchrounsly once DMA is complete. 2415 * or asynchrounsly once DMA is complete.
2421 */ 2416 */
2422 xmit_map_sg(sc, skb, 2417 xmit_map_sg(sc, skb, &txctl);
2423 get_dma_mem_context(&txctl, dmacontext),
2424 &txctl);
2425 else 2418 else
2426 ath_node_put(sc, txctl.an, ATH9K_BH_STATUS_CHANGE); 2419 ath_node_put(sc, txctl.an, ATH9K_BH_STATUS_CHANGE);
2427 2420