aboutsummaryrefslogtreecommitdiffstats
path: root/drivers/net/wireless/ath/ath9k/xmit.c
diff options
context:
space:
mode:
Diffstat (limited to 'drivers/net/wireless/ath/ath9k/xmit.c')
-rw-r--r--drivers/net/wireless/ath/ath9k/xmit.c346
1 files changed, 141 insertions, 205 deletions
diff --git a/drivers/net/wireless/ath/ath9k/xmit.c b/drivers/net/wireless/ath/ath9k/xmit.c
index 3779b8977d47..52dadfc38841 100644
--- a/drivers/net/wireless/ath/ath9k/xmit.c
+++ b/drivers/net/wireless/ath/ath9k/xmit.c
@@ -14,6 +14,7 @@
14 * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. 14 * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
15 */ 15 */
16 16
17#include <linux/dma-mapping.h>
17#include "ath9k.h" 18#include "ath9k.h"
18#include "ar9003_mac.h" 19#include "ar9003_mac.h"
19 20
@@ -53,7 +54,7 @@ static void ath_tx_complete_buf(struct ath_softc *sc, struct ath_buf *bf,
53 struct ath_txq *txq, struct list_head *bf_q, 54 struct ath_txq *txq, struct list_head *bf_q,
54 struct ath_tx_status *ts, int txok, int sendbar); 55 struct ath_tx_status *ts, int txok, int sendbar);
55static void ath_tx_txqaddbuf(struct ath_softc *sc, struct ath_txq *txq, 56static void ath_tx_txqaddbuf(struct ath_softc *sc, struct ath_txq *txq,
56 struct list_head *head); 57 struct list_head *head, bool internal);
57static void ath_buf_set_rate(struct ath_softc *sc, struct ath_buf *bf, int len); 58static void ath_buf_set_rate(struct ath_softc *sc, struct ath_buf *bf, int len);
58static void ath_tx_rc_status(struct ath_softc *sc, struct ath_buf *bf, 59static void ath_tx_rc_status(struct ath_softc *sc, struct ath_buf *bf,
59 struct ath_tx_status *ts, int nframes, int nbad, 60 struct ath_tx_status *ts, int nframes, int nbad,
@@ -377,8 +378,7 @@ static void ath_tx_complete_aggr(struct ath_softc *sc, struct ath_txq *txq,
377 bf_next = bf->bf_next; 378 bf_next = bf->bf_next;
378 379
379 bf->bf_state.bf_type |= BUF_XRETRY; 380 bf->bf_state.bf_type |= BUF_XRETRY;
380 if ((sc->sc_ah->caps.hw_caps & ATH9K_HW_CAP_EDMA) || 381 if (!bf->bf_stale || bf_next != NULL)
381 !bf->bf_stale || bf_next != NULL)
382 list_move_tail(&bf->list, &bf_head); 382 list_move_tail(&bf->list, &bf_head);
383 383
384 ath_tx_rc_status(sc, bf, ts, 1, 1, 0, false); 384 ath_tx_rc_status(sc, bf, ts, 1, 1, 0, false);
@@ -463,20 +463,14 @@ static void ath_tx_complete_aggr(struct ath_softc *sc, struct ath_txq *txq,
463 } 463 }
464 } 464 }
465 465
466 if (!(sc->sc_ah->caps.hw_caps & ATH9K_HW_CAP_EDMA) && 466 /*
467 bf_next == NULL) { 467 * Make sure the last desc is reclaimed if it
468 /* 468 * not a holding desc.
469 * Make sure the last desc is reclaimed if it 469 */
470 * not a holding desc. 470 if (!bf_last->bf_stale || bf_next != NULL)
471 */
472 if (!bf_last->bf_stale)
473 list_move_tail(&bf->list, &bf_head);
474 else
475 INIT_LIST_HEAD(&bf_head);
476 } else {
477 BUG_ON(list_empty(bf_q));
478 list_move_tail(&bf->list, &bf_head); 471 list_move_tail(&bf->list, &bf_head);
479 } 472 else
473 INIT_LIST_HEAD(&bf_head);
480 474
481 if (!txpending || (tid->state & AGGR_CLEANUP)) { 475 if (!txpending || (tid->state & AGGR_CLEANUP)) {
482 /* 476 /*
@@ -572,11 +566,8 @@ static void ath_tx_complete_aggr(struct ath_softc *sc, struct ath_txq *txq,
572 566
573 rcu_read_unlock(); 567 rcu_read_unlock();
574 568
575 if (needreset) { 569 if (needreset)
576 spin_unlock_bh(&sc->sc_pcu_lock);
577 ath_reset(sc, false); 570 ath_reset(sc, false);
578 spin_lock_bh(&sc->sc_pcu_lock);
579 }
580} 571}
581 572
582static u32 ath_lookup_rate(struct ath_softc *sc, struct ath_buf *bf, 573static u32 ath_lookup_rate(struct ath_softc *sc, struct ath_buf *bf,
@@ -837,7 +828,7 @@ static void ath_tx_sched_aggr(struct ath_softc *sc, struct ath_txq *txq,
837 bf->bf_state.bf_type &= ~BUF_AGGR; 828 bf->bf_state.bf_type &= ~BUF_AGGR;
838 ath9k_hw_clr11n_aggr(sc->sc_ah, bf->bf_desc); 829 ath9k_hw_clr11n_aggr(sc->sc_ah, bf->bf_desc);
839 ath_buf_set_rate(sc, bf, fi->framelen); 830 ath_buf_set_rate(sc, bf, fi->framelen);
840 ath_tx_txqaddbuf(sc, txq, &bf_q); 831 ath_tx_txqaddbuf(sc, txq, &bf_q, false);
841 continue; 832 continue;
842 } 833 }
843 834
@@ -849,7 +840,7 @@ static void ath_tx_sched_aggr(struct ath_softc *sc, struct ath_txq *txq,
849 /* anchor last desc of aggregate */ 840 /* anchor last desc of aggregate */
850 ath9k_hw_set11n_aggr_last(sc->sc_ah, bf->bf_lastbf->bf_desc); 841 ath9k_hw_set11n_aggr_last(sc->sc_ah, bf->bf_lastbf->bf_desc);
851 842
852 ath_tx_txqaddbuf(sc, txq, &bf_q); 843 ath_tx_txqaddbuf(sc, txq, &bf_q, false);
853 TX_STAT_INC(txq->axq_qnum, a_aggr); 844 TX_STAT_INC(txq->axq_qnum, a_aggr);
854 845
855 } while (txq->axq_ampdu_depth < ATH_AGGR_MIN_QDEPTH && 846 } while (txq->axq_ampdu_depth < ATH_AGGR_MIN_QDEPTH &&
@@ -1085,7 +1076,6 @@ struct ath_txq *ath_txq_setup(struct ath_softc *sc, int qtype, int subtype)
1085 txq->txq_headidx = txq->txq_tailidx = 0; 1076 txq->txq_headidx = txq->txq_tailidx = 0;
1086 for (i = 0; i < ATH_TXFIFO_DEPTH; i++) 1077 for (i = 0; i < ATH_TXFIFO_DEPTH; i++)
1087 INIT_LIST_HEAD(&txq->txq_fifo[i]); 1078 INIT_LIST_HEAD(&txq->txq_fifo[i]);
1088 INIT_LIST_HEAD(&txq->txq_fifo_pending);
1089 } 1079 }
1090 return &sc->tx.txq[axq_qnum]; 1080 return &sc->tx.txq[axq_qnum];
1091} 1081}
@@ -1155,13 +1145,8 @@ static bool bf_is_ampdu_not_probing(struct ath_buf *bf)
1155 return bf_isampdu(bf) && !(info->flags & IEEE80211_TX_CTL_RATE_CTRL_PROBE); 1145 return bf_isampdu(bf) && !(info->flags & IEEE80211_TX_CTL_RATE_CTRL_PROBE);
1156} 1146}
1157 1147
1158/* 1148static void ath_drain_txq_list(struct ath_softc *sc, struct ath_txq *txq,
1159 * Drain a given TX queue (could be Beacon or Data) 1149 struct list_head *list, bool retry_tx)
1160 *
1161 * This assumes output has been stopped and
1162 * we do not need to block ath_tx_tasklet.
1163 */
1164void ath_draintxq(struct ath_softc *sc, struct ath_txq *txq, bool retry_tx)
1165{ 1150{
1166 struct ath_buf *bf, *lastbf; 1151 struct ath_buf *bf, *lastbf;
1167 struct list_head bf_head; 1152 struct list_head bf_head;
@@ -1170,93 +1155,63 @@ void ath_draintxq(struct ath_softc *sc, struct ath_txq *txq, bool retry_tx)
1170 memset(&ts, 0, sizeof(ts)); 1155 memset(&ts, 0, sizeof(ts));
1171 INIT_LIST_HEAD(&bf_head); 1156 INIT_LIST_HEAD(&bf_head);
1172 1157
1173 for (;;) { 1158 while (!list_empty(list)) {
1174 spin_lock_bh(&txq->axq_lock); 1159 bf = list_first_entry(list, struct ath_buf, list);
1175 1160
1176 if (sc->sc_ah->caps.hw_caps & ATH9K_HW_CAP_EDMA) { 1161 if (bf->bf_stale) {
1177 if (list_empty(&txq->txq_fifo[txq->txq_tailidx])) { 1162 list_del(&bf->list);
1178 txq->txq_headidx = txq->txq_tailidx = 0;
1179 spin_unlock_bh(&txq->axq_lock);
1180 break;
1181 } else {
1182 bf = list_first_entry(&txq->txq_fifo[txq->txq_tailidx],
1183 struct ath_buf, list);
1184 }
1185 } else {
1186 if (list_empty(&txq->axq_q)) {
1187 txq->axq_link = NULL;
1188 spin_unlock_bh(&txq->axq_lock);
1189 break;
1190 }
1191 bf = list_first_entry(&txq->axq_q, struct ath_buf,
1192 list);
1193
1194 if (bf->bf_stale) {
1195 list_del(&bf->list);
1196 spin_unlock_bh(&txq->axq_lock);
1197 1163
1198 ath_tx_return_buffer(sc, bf); 1164 ath_tx_return_buffer(sc, bf);
1199 continue; 1165 continue;
1200 }
1201 } 1166 }
1202 1167
1203 lastbf = bf->bf_lastbf; 1168 lastbf = bf->bf_lastbf;
1204 1169 list_cut_position(&bf_head, list, &lastbf->list);
1205 if (sc->sc_ah->caps.hw_caps & ATH9K_HW_CAP_EDMA) {
1206 list_cut_position(&bf_head,
1207 &txq->txq_fifo[txq->txq_tailidx],
1208 &lastbf->list);
1209 INCR(txq->txq_tailidx, ATH_TXFIFO_DEPTH);
1210 } else {
1211 /* remove ath_buf's of the same mpdu from txq */
1212 list_cut_position(&bf_head, &txq->axq_q, &lastbf->list);
1213 }
1214 1170
1215 txq->axq_depth--; 1171 txq->axq_depth--;
1216 if (bf_is_ampdu_not_probing(bf)) 1172 if (bf_is_ampdu_not_probing(bf))
1217 txq->axq_ampdu_depth--; 1173 txq->axq_ampdu_depth--;
1218 spin_unlock_bh(&txq->axq_lock);
1219 1174
1175 spin_unlock_bh(&txq->axq_lock);
1220 if (bf_isampdu(bf)) 1176 if (bf_isampdu(bf))
1221 ath_tx_complete_aggr(sc, txq, bf, &bf_head, &ts, 0, 1177 ath_tx_complete_aggr(sc, txq, bf, &bf_head, &ts, 0,
1222 retry_tx); 1178 retry_tx);
1223 else 1179 else
1224 ath_tx_complete_buf(sc, bf, txq, &bf_head, &ts, 0, 0); 1180 ath_tx_complete_buf(sc, bf, txq, &bf_head, &ts, 0, 0);
1181 spin_lock_bh(&txq->axq_lock);
1225 } 1182 }
1183}
1226 1184
1185/*
1186 * Drain a given TX queue (could be Beacon or Data)
1187 *
1188 * This assumes output has been stopped and
1189 * we do not need to block ath_tx_tasklet.
1190 */
1191void ath_draintxq(struct ath_softc *sc, struct ath_txq *txq, bool retry_tx)
1192{
1227 spin_lock_bh(&txq->axq_lock); 1193 spin_lock_bh(&txq->axq_lock);
1228 txq->axq_tx_inprogress = false;
1229 spin_unlock_bh(&txq->axq_lock);
1230
1231 if (sc->sc_ah->caps.hw_caps & ATH9K_HW_CAP_EDMA) { 1194 if (sc->sc_ah->caps.hw_caps & ATH9K_HW_CAP_EDMA) {
1232 spin_lock_bh(&txq->axq_lock); 1195 int idx = txq->txq_tailidx;
1233 while (!list_empty(&txq->txq_fifo_pending)) {
1234 bf = list_first_entry(&txq->txq_fifo_pending,
1235 struct ath_buf, list);
1236 list_cut_position(&bf_head,
1237 &txq->txq_fifo_pending,
1238 &bf->bf_lastbf->list);
1239 spin_unlock_bh(&txq->axq_lock);
1240 1196
1241 if (bf_isampdu(bf)) 1197 while (!list_empty(&txq->txq_fifo[idx])) {
1242 ath_tx_complete_aggr(sc, txq, bf, &bf_head, 1198 ath_drain_txq_list(sc, txq, &txq->txq_fifo[idx],
1243 &ts, 0, retry_tx); 1199 retry_tx);
1244 else 1200
1245 ath_tx_complete_buf(sc, bf, txq, &bf_head, 1201 INCR(idx, ATH_TXFIFO_DEPTH);
1246 &ts, 0, 0);
1247 spin_lock_bh(&txq->axq_lock);
1248 } 1202 }
1249 spin_unlock_bh(&txq->axq_lock); 1203 txq->txq_tailidx = idx;
1250 } 1204 }
1251 1205
1206 txq->axq_link = NULL;
1207 txq->axq_tx_inprogress = false;
1208 ath_drain_txq_list(sc, txq, &txq->axq_q, retry_tx);
1209
1252 /* flush any pending frames if aggregation is enabled */ 1210 /* flush any pending frames if aggregation is enabled */
1253 if (sc->sc_flags & SC_OP_TXAGGR) { 1211 if ((sc->sc_flags & SC_OP_TXAGGR) && !retry_tx)
1254 if (!retry_tx) { 1212 ath_txq_drain_pending_buffers(sc, txq);
1255 spin_lock_bh(&txq->axq_lock); 1213
1256 ath_txq_drain_pending_buffers(sc, txq); 1214 spin_unlock_bh(&txq->axq_lock);
1257 spin_unlock_bh(&txq->axq_lock);
1258 }
1259 }
1260} 1215}
1261 1216
1262bool ath_drain_all_txq(struct ath_softc *sc, bool retry_tx) 1217bool ath_drain_all_txq(struct ath_softc *sc, bool retry_tx)
@@ -1370,11 +1325,13 @@ void ath_txq_schedule(struct ath_softc *sc, struct ath_txq *txq)
1370 * assume the descriptors are already chained together by caller. 1325 * assume the descriptors are already chained together by caller.
1371 */ 1326 */
1372static void ath_tx_txqaddbuf(struct ath_softc *sc, struct ath_txq *txq, 1327static void ath_tx_txqaddbuf(struct ath_softc *sc, struct ath_txq *txq,
1373 struct list_head *head) 1328 struct list_head *head, bool internal)
1374{ 1329{
1375 struct ath_hw *ah = sc->sc_ah; 1330 struct ath_hw *ah = sc->sc_ah;
1376 struct ath_common *common = ath9k_hw_common(ah); 1331 struct ath_common *common = ath9k_hw_common(ah);
1377 struct ath_buf *bf; 1332 struct ath_buf *bf, *bf_last;
1333 bool puttxbuf = false;
1334 bool edma;
1378 1335
1379 /* 1336 /*
1380 * Insert the frame on the outbound list and 1337 * Insert the frame on the outbound list and
@@ -1384,51 +1341,49 @@ static void ath_tx_txqaddbuf(struct ath_softc *sc, struct ath_txq *txq,
1384 if (list_empty(head)) 1341 if (list_empty(head))
1385 return; 1342 return;
1386 1343
1344 edma = !!(ah->caps.hw_caps & ATH9K_HW_CAP_EDMA);
1387 bf = list_first_entry(head, struct ath_buf, list); 1345 bf = list_first_entry(head, struct ath_buf, list);
1346 bf_last = list_entry(head->prev, struct ath_buf, list);
1388 1347
1389 ath_dbg(common, ATH_DBG_QUEUE, 1348 ath_dbg(common, ATH_DBG_QUEUE,
1390 "qnum: %d, txq depth: %d\n", txq->axq_qnum, txq->axq_depth); 1349 "qnum: %d, txq depth: %d\n", txq->axq_qnum, txq->axq_depth);
1391 1350
1392 if (sc->sc_ah->caps.hw_caps & ATH9K_HW_CAP_EDMA) { 1351 if (edma && list_empty(&txq->txq_fifo[txq->txq_headidx])) {
1393 if (txq->axq_depth >= ATH_TXFIFO_DEPTH) { 1352 list_splice_tail_init(head, &txq->txq_fifo[txq->txq_headidx]);
1394 list_splice_tail_init(head, &txq->txq_fifo_pending);
1395 return;
1396 }
1397 if (!list_empty(&txq->txq_fifo[txq->txq_headidx]))
1398 ath_dbg(common, ATH_DBG_XMIT,
1399 "Initializing tx fifo %d which is non-empty\n",
1400 txq->txq_headidx);
1401 INIT_LIST_HEAD(&txq->txq_fifo[txq->txq_headidx]);
1402 list_splice_init(head, &txq->txq_fifo[txq->txq_headidx]);
1403 INCR(txq->txq_headidx, ATH_TXFIFO_DEPTH); 1353 INCR(txq->txq_headidx, ATH_TXFIFO_DEPTH);
1404 TX_STAT_INC(txq->axq_qnum, puttxbuf); 1354 puttxbuf = true;
1405 ath9k_hw_puttxbuf(ah, txq->axq_qnum, bf->bf_daddr);
1406 ath_dbg(common, ATH_DBG_XMIT, "TXDP[%u] = %llx (%p)\n",
1407 txq->axq_qnum, ito64(bf->bf_daddr), bf->bf_desc);
1408 } else { 1355 } else {
1409 list_splice_tail_init(head, &txq->axq_q); 1356 list_splice_tail_init(head, &txq->axq_q);
1410 1357
1411 if (txq->axq_link == NULL) { 1358 if (txq->axq_link) {
1412 TX_STAT_INC(txq->axq_qnum, puttxbuf); 1359 ath9k_hw_set_desc_link(ah, txq->axq_link, bf->bf_daddr);
1413 ath9k_hw_puttxbuf(ah, txq->axq_qnum, bf->bf_daddr);
1414 ath_dbg(common, ATH_DBG_XMIT, "TXDP[%u] = %llx (%p)\n",
1415 txq->axq_qnum, ito64(bf->bf_daddr),
1416 bf->bf_desc);
1417 } else {
1418 *txq->axq_link = bf->bf_daddr;
1419 ath_dbg(common, ATH_DBG_XMIT, 1360 ath_dbg(common, ATH_DBG_XMIT,
1420 "link[%u] (%p)=%llx (%p)\n", 1361 "link[%u] (%p)=%llx (%p)\n",
1421 txq->axq_qnum, txq->axq_link, 1362 txq->axq_qnum, txq->axq_link,
1422 ito64(bf->bf_daddr), bf->bf_desc); 1363 ito64(bf->bf_daddr), bf->bf_desc);
1423 } 1364 } else if (!edma)
1424 ath9k_hw_get_desc_link(ah, bf->bf_lastbf->bf_desc, 1365 puttxbuf = true;
1425 &txq->axq_link); 1366
1367 txq->axq_link = bf_last->bf_desc;
1368 }
1369
1370 if (puttxbuf) {
1371 TX_STAT_INC(txq->axq_qnum, puttxbuf);
1372 ath9k_hw_puttxbuf(ah, txq->axq_qnum, bf->bf_daddr);
1373 ath_dbg(common, ATH_DBG_XMIT, "TXDP[%u] = %llx (%p)\n",
1374 txq->axq_qnum, ito64(bf->bf_daddr), bf->bf_desc);
1375 }
1376
1377 if (!edma) {
1426 TX_STAT_INC(txq->axq_qnum, txstart); 1378 TX_STAT_INC(txq->axq_qnum, txstart);
1427 ath9k_hw_txstart(ah, txq->axq_qnum); 1379 ath9k_hw_txstart(ah, txq->axq_qnum);
1428 } 1380 }
1429 txq->axq_depth++; 1381
1430 if (bf_is_ampdu_not_probing(bf)) 1382 if (!internal) {
1431 txq->axq_ampdu_depth++; 1383 txq->axq_depth++;
1384 if (bf_is_ampdu_not_probing(bf))
1385 txq->axq_ampdu_depth++;
1386 }
1432} 1387}
1433 1388
1434static void ath_tx_send_ampdu(struct ath_softc *sc, struct ath_atx_tid *tid, 1389static void ath_tx_send_ampdu(struct ath_softc *sc, struct ath_atx_tid *tid,
@@ -1470,7 +1425,7 @@ static void ath_tx_send_ampdu(struct ath_softc *sc, struct ath_atx_tid *tid,
1470 TX_STAT_INC(txctl->txq->axq_qnum, a_queued_hw); 1425 TX_STAT_INC(txctl->txq->axq_qnum, a_queued_hw);
1471 bf->bf_lastbf = bf; 1426 bf->bf_lastbf = bf;
1472 ath_buf_set_rate(sc, bf, fi->framelen); 1427 ath_buf_set_rate(sc, bf, fi->framelen);
1473 ath_tx_txqaddbuf(sc, txctl->txq, &bf_head); 1428 ath_tx_txqaddbuf(sc, txctl->txq, &bf_head, false);
1474} 1429}
1475 1430
1476static void ath_tx_send_normal(struct ath_softc *sc, struct ath_txq *txq, 1431static void ath_tx_send_normal(struct ath_softc *sc, struct ath_txq *txq,
@@ -1490,7 +1445,7 @@ static void ath_tx_send_normal(struct ath_softc *sc, struct ath_txq *txq,
1490 bf->bf_lastbf = bf; 1445 bf->bf_lastbf = bf;
1491 fi = get_frame_info(bf->bf_mpdu); 1446 fi = get_frame_info(bf->bf_mpdu);
1492 ath_buf_set_rate(sc, bf, fi->framelen); 1447 ath_buf_set_rate(sc, bf, fi->framelen);
1493 ath_tx_txqaddbuf(sc, txq, bf_head); 1448 ath_tx_txqaddbuf(sc, txq, bf_head, false);
1494 TX_STAT_INC(txq->axq_qnum, queued); 1449 TX_STAT_INC(txq->axq_qnum, queued);
1495} 1450}
1496 1451
@@ -2077,6 +2032,38 @@ static void ath_tx_rc_status(struct ath_softc *sc, struct ath_buf *bf,
2077 tx_info->status.rates[tx_rateindex].count = ts->ts_longretry + 1; 2032 tx_info->status.rates[tx_rateindex].count = ts->ts_longretry + 1;
2078} 2033}
2079 2034
2035static void ath_tx_process_buffer(struct ath_softc *sc, struct ath_txq *txq,
2036 struct ath_tx_status *ts, struct ath_buf *bf,
2037 struct list_head *bf_head)
2038{
2039 int txok;
2040
2041 txq->axq_depth--;
2042 txok = !(ts->ts_status & ATH9K_TXERR_MASK);
2043 txq->axq_tx_inprogress = false;
2044 if (bf_is_ampdu_not_probing(bf))
2045 txq->axq_ampdu_depth--;
2046
2047 spin_unlock_bh(&txq->axq_lock);
2048
2049 if (!bf_isampdu(bf)) {
2050 /*
2051 * This frame is sent out as a single frame.
2052 * Use hardware retry status for this frame.
2053 */
2054 if (ts->ts_status & ATH9K_TXERR_XRETRY)
2055 bf->bf_state.bf_type |= BUF_XRETRY;
2056 ath_tx_rc_status(sc, bf, ts, 1, txok ? 0 : 1, txok, true);
2057 ath_tx_complete_buf(sc, bf, txq, bf_head, ts, txok, 0);
2058 } else
2059 ath_tx_complete_aggr(sc, txq, bf, bf_head, ts, txok, true);
2060
2061 spin_lock_bh(&txq->axq_lock);
2062
2063 if (sc->sc_flags & SC_OP_TXAGGR)
2064 ath_txq_schedule(sc, txq);
2065}
2066
2080static void ath_tx_processq(struct ath_softc *sc, struct ath_txq *txq) 2067static void ath_tx_processq(struct ath_softc *sc, struct ath_txq *txq)
2081{ 2068{
2082 struct ath_hw *ah = sc->sc_ah; 2069 struct ath_hw *ah = sc->sc_ah;
@@ -2085,20 +2072,18 @@ static void ath_tx_processq(struct ath_softc *sc, struct ath_txq *txq)
2085 struct list_head bf_head; 2072 struct list_head bf_head;
2086 struct ath_desc *ds; 2073 struct ath_desc *ds;
2087 struct ath_tx_status ts; 2074 struct ath_tx_status ts;
2088 int txok;
2089 int status; 2075 int status;
2090 2076
2091 ath_dbg(common, ATH_DBG_QUEUE, "tx queue %d (%x), link %p\n", 2077 ath_dbg(common, ATH_DBG_QUEUE, "tx queue %d (%x), link %p\n",
2092 txq->axq_qnum, ath9k_hw_gettxbuf(sc->sc_ah, txq->axq_qnum), 2078 txq->axq_qnum, ath9k_hw_gettxbuf(sc->sc_ah, txq->axq_qnum),
2093 txq->axq_link); 2079 txq->axq_link);
2094 2080
2081 spin_lock_bh(&txq->axq_lock);
2095 for (;;) { 2082 for (;;) {
2096 spin_lock_bh(&txq->axq_lock);
2097 if (list_empty(&txq->axq_q)) { 2083 if (list_empty(&txq->axq_q)) {
2098 txq->axq_link = NULL; 2084 txq->axq_link = NULL;
2099 if (sc->sc_flags & SC_OP_TXAGGR) 2085 if (sc->sc_flags & SC_OP_TXAGGR)
2100 ath_txq_schedule(sc, txq); 2086 ath_txq_schedule(sc, txq);
2101 spin_unlock_bh(&txq->axq_lock);
2102 break; 2087 break;
2103 } 2088 }
2104 bf = list_first_entry(&txq->axq_q, struct ath_buf, list); 2089 bf = list_first_entry(&txq->axq_q, struct ath_buf, list);
@@ -2114,13 +2099,11 @@ static void ath_tx_processq(struct ath_softc *sc, struct ath_txq *txq)
2114 bf_held = NULL; 2099 bf_held = NULL;
2115 if (bf->bf_stale) { 2100 if (bf->bf_stale) {
2116 bf_held = bf; 2101 bf_held = bf;
2117 if (list_is_last(&bf_held->list, &txq->axq_q)) { 2102 if (list_is_last(&bf_held->list, &txq->axq_q))
2118 spin_unlock_bh(&txq->axq_lock);
2119 break; 2103 break;
2120 } else { 2104
2121 bf = list_entry(bf_held->list.next, 2105 bf = list_entry(bf_held->list.next, struct ath_buf,
2122 struct ath_buf, list); 2106 list);
2123 }
2124 } 2107 }
2125 2108
2126 lastbf = bf->bf_lastbf; 2109 lastbf = bf->bf_lastbf;
@@ -2128,10 +2111,9 @@ static void ath_tx_processq(struct ath_softc *sc, struct ath_txq *txq)
2128 2111
2129 memset(&ts, 0, sizeof(ts)); 2112 memset(&ts, 0, sizeof(ts));
2130 status = ath9k_hw_txprocdesc(ah, ds, &ts); 2113 status = ath9k_hw_txprocdesc(ah, ds, &ts);
2131 if (status == -EINPROGRESS) { 2114 if (status == -EINPROGRESS)
2132 spin_unlock_bh(&txq->axq_lock);
2133 break; 2115 break;
2134 } 2116
2135 TX_STAT_INC(txq->axq_qnum, txprocdesc); 2117 TX_STAT_INC(txq->axq_qnum, txprocdesc);
2136 2118
2137 /* 2119 /*
@@ -2145,42 +2127,14 @@ static void ath_tx_processq(struct ath_softc *sc, struct ath_txq *txq)
2145 list_cut_position(&bf_head, 2127 list_cut_position(&bf_head,
2146 &txq->axq_q, lastbf->list.prev); 2128 &txq->axq_q, lastbf->list.prev);
2147 2129
2148 txq->axq_depth--; 2130 if (bf_held) {
2149 txok = !(ts.ts_status & ATH9K_TXERR_MASK);
2150 txq->axq_tx_inprogress = false;
2151 if (bf_held)
2152 list_del(&bf_held->list); 2131 list_del(&bf_held->list);
2153
2154 if (bf_is_ampdu_not_probing(bf))
2155 txq->axq_ampdu_depth--;
2156
2157 spin_unlock_bh(&txq->axq_lock);
2158
2159 if (bf_held)
2160 ath_tx_return_buffer(sc, bf_held); 2132 ath_tx_return_buffer(sc, bf_held);
2161
2162 if (!bf_isampdu(bf)) {
2163 /*
2164 * This frame is sent out as a single frame.
2165 * Use hardware retry status for this frame.
2166 */
2167 if (ts.ts_status & ATH9K_TXERR_XRETRY)
2168 bf->bf_state.bf_type |= BUF_XRETRY;
2169 ath_tx_rc_status(sc, bf, &ts, 1, txok ? 0 : 1, txok, true);
2170 } 2133 }
2171 2134
2172 if (bf_isampdu(bf)) 2135 ath_tx_process_buffer(sc, txq, &ts, bf, &bf_head);
2173 ath_tx_complete_aggr(sc, txq, bf, &bf_head, &ts, txok,
2174 true);
2175 else
2176 ath_tx_complete_buf(sc, bf, txq, &bf_head, &ts, txok, 0);
2177
2178 spin_lock_bh(&txq->axq_lock);
2179
2180 if (sc->sc_flags & SC_OP_TXAGGR)
2181 ath_txq_schedule(sc, txq);
2182 spin_unlock_bh(&txq->axq_lock);
2183 } 2136 }
2137 spin_unlock_bh(&txq->axq_lock);
2184} 2138}
2185 2139
2186static void ath_tx_complete_poll_work(struct work_struct *work) 2140static void ath_tx_complete_poll_work(struct work_struct *work)
@@ -2213,7 +2167,9 @@ static void ath_tx_complete_poll_work(struct work_struct *work)
2213 if (needreset) { 2167 if (needreset) {
2214 ath_dbg(ath9k_hw_common(sc->sc_ah), ATH_DBG_RESET, 2168 ath_dbg(ath9k_hw_common(sc->sc_ah), ATH_DBG_RESET,
2215 "tx hung, resetting the chip\n"); 2169 "tx hung, resetting the chip\n");
2170 spin_lock_bh(&sc->sc_pcu_lock);
2216 ath_reset(sc, true); 2171 ath_reset(sc, true);
2172 spin_unlock_bh(&sc->sc_pcu_lock);
2217 } 2173 }
2218 2174
2219 ieee80211_queue_delayed_work(sc->hw, &sc->tx_complete_work, 2175 ieee80211_queue_delayed_work(sc->hw, &sc->tx_complete_work,
@@ -2237,17 +2193,16 @@ void ath_tx_tasklet(struct ath_softc *sc)
2237 2193
2238void ath_tx_edma_tasklet(struct ath_softc *sc) 2194void ath_tx_edma_tasklet(struct ath_softc *sc)
2239{ 2195{
2240 struct ath_tx_status txs; 2196 struct ath_tx_status ts;
2241 struct ath_common *common = ath9k_hw_common(sc->sc_ah); 2197 struct ath_common *common = ath9k_hw_common(sc->sc_ah);
2242 struct ath_hw *ah = sc->sc_ah; 2198 struct ath_hw *ah = sc->sc_ah;
2243 struct ath_txq *txq; 2199 struct ath_txq *txq;
2244 struct ath_buf *bf, *lastbf; 2200 struct ath_buf *bf, *lastbf;
2245 struct list_head bf_head; 2201 struct list_head bf_head;
2246 int status; 2202 int status;
2247 int txok;
2248 2203
2249 for (;;) { 2204 for (;;) {
2250 status = ath9k_hw_txprocdesc(ah, NULL, (void *)&txs); 2205 status = ath9k_hw_txprocdesc(ah, NULL, (void *)&ts);
2251 if (status == -EINPROGRESS) 2206 if (status == -EINPROGRESS)
2252 break; 2207 break;
2253 if (status == -EIO) { 2208 if (status == -EIO) {
@@ -2257,12 +2212,13 @@ void ath_tx_edma_tasklet(struct ath_softc *sc)
2257 } 2212 }
2258 2213
2259 /* Skip beacon completions */ 2214 /* Skip beacon completions */
2260 if (txs.qid == sc->beacon.beaconq) 2215 if (ts.qid == sc->beacon.beaconq)
2261 continue; 2216 continue;
2262 2217
2263 txq = &sc->tx.txq[txs.qid]; 2218 txq = &sc->tx.txq[ts.qid];
2264 2219
2265 spin_lock_bh(&txq->axq_lock); 2220 spin_lock_bh(&txq->axq_lock);
2221
2266 if (list_empty(&txq->txq_fifo[txq->txq_tailidx])) { 2222 if (list_empty(&txq->txq_fifo[txq->txq_tailidx])) {
2267 spin_unlock_bh(&txq->axq_lock); 2223 spin_unlock_bh(&txq->axq_lock);
2268 return; 2224 return;
@@ -2275,41 +2231,21 @@ void ath_tx_edma_tasklet(struct ath_softc *sc)
2275 INIT_LIST_HEAD(&bf_head); 2231 INIT_LIST_HEAD(&bf_head);
2276 list_cut_position(&bf_head, &txq->txq_fifo[txq->txq_tailidx], 2232 list_cut_position(&bf_head, &txq->txq_fifo[txq->txq_tailidx],
2277 &lastbf->list); 2233 &lastbf->list);
2278 INCR(txq->txq_tailidx, ATH_TXFIFO_DEPTH);
2279 txq->axq_depth--;
2280 txq->axq_tx_inprogress = false;
2281 if (bf_is_ampdu_not_probing(bf))
2282 txq->axq_ampdu_depth--;
2283 spin_unlock_bh(&txq->axq_lock);
2284 2234
2285 txok = !(txs.ts_status & ATH9K_TXERR_MASK); 2235 if (list_empty(&txq->txq_fifo[txq->txq_tailidx])) {
2286 2236 INCR(txq->txq_tailidx, ATH_TXFIFO_DEPTH);
2287 if (!bf_isampdu(bf)) {
2288 if (txs.ts_status & ATH9K_TXERR_XRETRY)
2289 bf->bf_state.bf_type |= BUF_XRETRY;
2290 ath_tx_rc_status(sc, bf, &txs, 1, txok ? 0 : 1, txok, true);
2291 }
2292
2293 if (bf_isampdu(bf))
2294 ath_tx_complete_aggr(sc, txq, bf, &bf_head, &txs,
2295 txok, true);
2296 else
2297 ath_tx_complete_buf(sc, bf, txq, &bf_head,
2298 &txs, txok, 0);
2299 2237
2300 spin_lock_bh(&txq->axq_lock); 2238 if (!list_empty(&txq->axq_q)) {
2239 struct list_head bf_q;
2301 2240
2302 if (!list_empty(&txq->txq_fifo_pending)) { 2241 INIT_LIST_HEAD(&bf_q);
2303 INIT_LIST_HEAD(&bf_head); 2242 txq->axq_link = NULL;
2304 bf = list_first_entry(&txq->txq_fifo_pending, 2243 list_splice_tail_init(&txq->axq_q, &bf_q);
2305 struct ath_buf, list); 2244 ath_tx_txqaddbuf(sc, txq, &bf_q, true);
2306 list_cut_position(&bf_head, 2245 }
2307 &txq->txq_fifo_pending, 2246 }
2308 &bf->bf_lastbf->list);
2309 ath_tx_txqaddbuf(sc, txq, &bf_head);
2310 } else if (sc->sc_flags & SC_OP_TXAGGR)
2311 ath_txq_schedule(sc, txq);
2312 2247
2248 ath_tx_process_buffer(sc, txq, &ts, bf, &bf_head);
2313 spin_unlock_bh(&txq->axq_lock); 2249 spin_unlock_bh(&txq->axq_lock);
2314 } 2250 }
2315} 2251}