aboutsummaryrefslogtreecommitdiffstats
path: root/drivers/net/wireless
diff options
context:
space:
mode:
Diffstat (limited to 'drivers/net/wireless')
-rw-r--r--drivers/net/wireless/ath/ath9k/ath9k.h3
-rw-r--r--drivers/net/wireless/ath/ath9k/xmit.c53
2 files changed, 30 insertions, 26 deletions
diff --git a/drivers/net/wireless/ath/ath9k/ath9k.h b/drivers/net/wireless/ath/ath9k/ath9k.h
index 6aa8fa6010a..1a19aea8c88 100644
--- a/drivers/net/wireless/ath/ath9k/ath9k.h
+++ b/drivers/net/wireless/ath/ath9k/ath9k.h
@@ -207,7 +207,6 @@ struct ath_txq {
207 struct list_head txq_fifo_pending; 207 struct list_head txq_fifo_pending;
208 u8 txq_headidx; 208 u8 txq_headidx;
209 u8 txq_tailidx; 209 u8 txq_tailidx;
210 int pending_frames;
211}; 210};
212 211
213struct ath_atx_ac { 212struct ath_atx_ac {
@@ -245,7 +244,6 @@ struct ath_buf {
245 struct ath_buf_state bf_state; 244 struct ath_buf_state bf_state;
246 dma_addr_t bf_dmacontext; 245 dma_addr_t bf_dmacontext;
247 struct ath_wiphy *aphy; 246 struct ath_wiphy *aphy;
248 struct ath_txq *txq;
249}; 247};
250 248
251struct ath_atx_tid { 249struct ath_atx_tid {
@@ -296,6 +294,7 @@ struct ath_tx {
296 struct list_head txbuf; 294 struct list_head txbuf;
297 struct ath_txq txq[ATH9K_NUM_TX_QUEUES]; 295 struct ath_txq txq[ATH9K_NUM_TX_QUEUES];
298 struct ath_descdma txdma; 296 struct ath_descdma txdma;
297 int pending_frames[WME_NUM_AC];
299}; 298};
300 299
301struct ath_rx_edma { 300struct ath_rx_edma {
diff --git a/drivers/net/wireless/ath/ath9k/xmit.c b/drivers/net/wireless/ath/ath9k/xmit.c
index 9bff6c52c2e..875b8b47fef 100644
--- a/drivers/net/wireless/ath/ath9k/xmit.c
+++ b/drivers/net/wireless/ath/ath9k/xmit.c
@@ -1760,7 +1760,7 @@ int ath_tx_start(struct ieee80211_hw *hw, struct sk_buff *skb,
1760 struct ath_common *common = ath9k_hw_common(sc->sc_ah); 1760 struct ath_common *common = ath9k_hw_common(sc->sc_ah);
1761 struct ath_txq *txq = txctl->txq; 1761 struct ath_txq *txq = txctl->txq;
1762 struct ath_buf *bf; 1762 struct ath_buf *bf;
1763 int r; 1763 int q, r;
1764 1764
1765 bf = ath_tx_get_buffer(sc); 1765 bf = ath_tx_get_buffer(sc);
1766 if (!bf) { 1766 if (!bf) {
@@ -1768,14 +1768,6 @@ int ath_tx_start(struct ieee80211_hw *hw, struct sk_buff *skb,
1768 return -1; 1768 return -1;
1769 } 1769 }
1770 1770
1771 bf->txq = txctl->txq;
1772 spin_lock_bh(&bf->txq->axq_lock);
1773 if (++bf->txq->pending_frames > ATH_MAX_QDEPTH && !txq->stopped) {
1774 ath_mac80211_stop_queue(sc, skb_get_queue_mapping(skb));
1775 txq->stopped = 1;
1776 }
1777 spin_unlock_bh(&bf->txq->axq_lock);
1778
1779 r = ath_tx_setup_buffer(hw, bf, skb, txctl); 1771 r = ath_tx_setup_buffer(hw, bf, skb, txctl);
1780 if (unlikely(r)) { 1772 if (unlikely(r)) {
1781 ath_print(common, ATH_DBG_FATAL, "TX mem alloc failure\n"); 1773 ath_print(common, ATH_DBG_FATAL, "TX mem alloc failure\n");
@@ -1796,6 +1788,17 @@ int ath_tx_start(struct ieee80211_hw *hw, struct sk_buff *skb,
1796 return r; 1788 return r;
1797 } 1789 }
1798 1790
1791 q = skb_get_queue_mapping(skb);
1792 if (q >= 4)
1793 q = 0;
1794
1795 spin_lock_bh(&txq->axq_lock);
1796 if (++sc->tx.pending_frames[q] > ATH_MAX_QDEPTH && !txq->stopped) {
1797 ath_mac80211_stop_queue(sc, skb_get_queue_mapping(skb));
1798 txq->stopped = 1;
1799 }
1800 spin_unlock_bh(&txq->axq_lock);
1801
1799 ath_tx_start_dma(sc, bf, txctl); 1802 ath_tx_start_dma(sc, bf, txctl);
1800 1803
1801 return 0; 1804 return 0;
@@ -1865,7 +1868,7 @@ static void ath_tx_complete(struct ath_softc *sc, struct sk_buff *skb,
1865 struct ieee80211_tx_info *tx_info = IEEE80211_SKB_CB(skb); 1868 struct ieee80211_tx_info *tx_info = IEEE80211_SKB_CB(skb);
1866 struct ath_common *common = ath9k_hw_common(sc->sc_ah); 1869 struct ath_common *common = ath9k_hw_common(sc->sc_ah);
1867 struct ieee80211_hdr * hdr = (struct ieee80211_hdr *)skb->data; 1870 struct ieee80211_hdr * hdr = (struct ieee80211_hdr *)skb->data;
1868 int padpos, padsize; 1871 int q, padpos, padsize;
1869 1872
1870 ath_print(common, ATH_DBG_XMIT, "TX complete: skb: %p\n", skb); 1873 ath_print(common, ATH_DBG_XMIT, "TX complete: skb: %p\n", skb);
1871 1874
@@ -1904,8 +1907,16 @@ static void ath_tx_complete(struct ath_softc *sc, struct sk_buff *skb,
1904 1907
1905 if (unlikely(tx_info->pad[0] & ATH_TX_INFO_FRAME_TYPE_INTERNAL)) 1908 if (unlikely(tx_info->pad[0] & ATH_TX_INFO_FRAME_TYPE_INTERNAL))
1906 ath9k_tx_status(hw, skb); 1909 ath9k_tx_status(hw, skb);
1907 else 1910 else {
1911 q = skb_get_queue_mapping(skb);
1912 if (q >= 4)
1913 q = 0;
1914
1915 if (--sc->tx.pending_frames[q] < 0)
1916 sc->tx.pending_frames[q] = 0;
1917
1908 ieee80211_tx_status(hw, skb); 1918 ieee80211_tx_status(hw, skb);
1919 }
1909} 1920}
1910 1921
1911static void ath_tx_complete_buf(struct ath_softc *sc, struct ath_buf *bf, 1922static void ath_tx_complete_buf(struct ath_softc *sc, struct ath_buf *bf,
@@ -1926,13 +1937,6 @@ static void ath_tx_complete_buf(struct ath_softc *sc, struct ath_buf *bf,
1926 tx_flags |= ATH_TX_XRETRY; 1937 tx_flags |= ATH_TX_XRETRY;
1927 } 1938 }
1928 1939
1929 if (bf->txq) {
1930 spin_lock_bh(&bf->txq->axq_lock);
1931 bf->txq->pending_frames--;
1932 spin_unlock_bh(&bf->txq->axq_lock);
1933 bf->txq = NULL;
1934 }
1935
1936 dma_unmap_single(sc->dev, bf->bf_dmacontext, skb->len, DMA_TO_DEVICE); 1940 dma_unmap_single(sc->dev, bf->bf_dmacontext, skb->len, DMA_TO_DEVICE);
1937 ath_tx_complete(sc, skb, bf->aphy, tx_flags); 1941 ath_tx_complete(sc, skb, bf->aphy, tx_flags);
1938 ath_debug_stat_tx(sc, txq, bf, ts); 1942 ath_debug_stat_tx(sc, txq, bf, ts);
@@ -2020,13 +2024,14 @@ static void ath_wake_mac80211_queue(struct ath_softc *sc, struct ath_txq *txq)
2020{ 2024{
2021 int qnum; 2025 int qnum;
2022 2026
2027 qnum = ath_get_mac80211_qnum(txq->axq_class, sc);
2028 if (qnum == -1)
2029 return;
2030
2023 spin_lock_bh(&txq->axq_lock); 2031 spin_lock_bh(&txq->axq_lock);
2024 if (txq->stopped && txq->pending_frames < ATH_MAX_QDEPTH) { 2032 if (txq->stopped && sc->tx.pending_frames[qnum] < ATH_MAX_QDEPTH) {
2025 qnum = ath_get_mac80211_qnum(txq->axq_class, sc); 2033 ath_mac80211_start_queue(sc, qnum);
2026 if (qnum != -1) { 2034 txq->stopped = 0;
2027 ath_mac80211_start_queue(sc, qnum);
2028 txq->stopped = 0;
2029 }
2030 } 2035 }
2031 spin_unlock_bh(&txq->axq_lock); 2036 spin_unlock_bh(&txq->axq_lock);
2032} 2037}