1 // SPDX-License-Identifier: ISC
2 /* Copyright (C) 2019 MediaTek Inc.
4 * Author: Ryder Lee <ryder.lee@mediatek.com>
5 * Roy Luo <royluo@google.com>
6 * Felix Fietkau <nbd@nbd.name>
7 * Lorenzo Bianconi <lorenzo@kernel.org>
10 #include <linux/etherdevice.h>
11 #include <linux/timekeeping.h>
16 static struct mt76_wcid
*mt7615_rx_get_wcid(struct mt7615_dev
*dev
,
19 struct mt7615_sta
*sta
;
20 struct mt76_wcid
*wcid
;
22 if (idx
>= ARRAY_SIZE(dev
->mt76
.wcid
))
25 wcid
= rcu_dereference(dev
->mt76
.wcid
[idx
]);
32 sta
= container_of(wcid
, struct mt7615_sta
, wcid
);
36 return &sta
->vif
->sta
.wcid
;
39 static int mt7615_get_rate(struct mt7615_dev
*dev
,
40 struct ieee80211_supported_band
*sband
,
44 int len
= sband
->n_bitrates
;
48 if (sband
== &dev
->mt76
.sband_5g
.sband
)
51 idx
&= ~BIT(2); /* short preamble */
52 } else if (sband
== &dev
->mt76
.sband_2g
.sband
) {
56 for (i
= offset
; i
< len
; i
++) {
57 if ((sband
->bitrates
[i
].hw_value
& GENMASK(7, 0)) == idx
)
64 static void mt7615_insert_ccmp_hdr(struct sk_buff
*skb
, u8 key_id
)
66 struct mt76_rx_status
*status
= (struct mt76_rx_status
*)skb
->cb
;
67 int hdr_len
= ieee80211_get_hdrlen_from_skb(skb
);
72 memmove(skb
->data
, skb
->data
+ 8, hdr_len
);
73 hdr
= skb
->data
+ hdr_len
;
78 hdr
[3] = 0x20 | (key_id
<< 6);
84 status
->flag
&= ~RX_FLAG_IV_STRIPPED
;
87 int mt7615_mac_fill_rx(struct mt7615_dev
*dev
, struct sk_buff
*skb
)
89 struct mt76_rx_status
*status
= (struct mt76_rx_status
*)skb
->cb
;
90 struct ieee80211_supported_band
*sband
;
91 struct ieee80211_hdr
*hdr
;
92 __le32
*rxd
= (__le32
*)skb
->data
;
93 u32 rxd0
= le32_to_cpu(rxd
[0]);
94 u32 rxd1
= le32_to_cpu(rxd
[1]);
95 u32 rxd2
= le32_to_cpu(rxd
[2]);
96 bool unicast
, remove_pad
, insert_ccmp_hdr
= false;
99 memset(status
, 0, sizeof(*status
));
101 unicast
= (rxd1
& MT_RXD1_NORMAL_ADDR_TYPE
) == MT_RXD1_NORMAL_U2M
;
102 idx
= FIELD_GET(MT_RXD2_NORMAL_WLAN_IDX
, rxd2
);
103 status
->wcid
= mt7615_rx_get_wcid(dev
, idx
, unicast
);
105 /* TODO: properly support DBDC */
106 status
->freq
= dev
->mt76
.chandef
.chan
->center_freq
;
107 status
->band
= dev
->mt76
.chandef
.chan
->band
;
108 if (status
->band
== NL80211_BAND_5GHZ
)
109 sband
= &dev
->mt76
.sband_5g
.sband
;
111 sband
= &dev
->mt76
.sband_2g
.sband
;
113 if (rxd2
& MT_RXD2_NORMAL_FCS_ERR
)
114 status
->flag
|= RX_FLAG_FAILED_FCS_CRC
;
116 if (rxd2
& MT_RXD2_NORMAL_TKIP_MIC_ERR
)
117 status
->flag
|= RX_FLAG_MMIC_ERROR
;
119 if (FIELD_GET(MT_RXD2_NORMAL_SEC_MODE
, rxd2
) != 0 &&
120 !(rxd2
& (MT_RXD2_NORMAL_CLM
| MT_RXD2_NORMAL_CM
))) {
121 status
->flag
|= RX_FLAG_DECRYPTED
;
122 status
->flag
|= RX_FLAG_IV_STRIPPED
;
123 status
->flag
|= RX_FLAG_MMIC_STRIPPED
| RX_FLAG_MIC_STRIPPED
;
126 remove_pad
= rxd1
& MT_RXD1_NORMAL_HDR_OFFSET
;
128 if (rxd2
& MT_RXD2_NORMAL_MAX_LEN_ERROR
)
131 if (!sband
->channels
)
135 if (rxd0
& MT_RXD0_NORMAL_GROUP_4
) {
137 if ((u8
*)rxd
- skb
->data
>= skb
->len
)
141 if (rxd0
& MT_RXD0_NORMAL_GROUP_1
) {
142 u8
*data
= (u8
*)rxd
;
144 if (status
->flag
& RX_FLAG_DECRYPTED
) {
145 status
->iv
[0] = data
[5];
146 status
->iv
[1] = data
[4];
147 status
->iv
[2] = data
[3];
148 status
->iv
[3] = data
[2];
149 status
->iv
[4] = data
[1];
150 status
->iv
[5] = data
[0];
152 insert_ccmp_hdr
= FIELD_GET(MT_RXD2_NORMAL_FRAG
, rxd2
);
155 if ((u8
*)rxd
- skb
->data
>= skb
->len
)
159 if (rxd0
& MT_RXD0_NORMAL_GROUP_2
) {
161 if ((u8
*)rxd
- skb
->data
>= skb
->len
)
165 if (rxd0
& MT_RXD0_NORMAL_GROUP_3
) {
166 u32 rxdg0
= le32_to_cpu(rxd
[0]);
167 u32 rxdg1
= le32_to_cpu(rxd
[1]);
168 u8 stbc
= FIELD_GET(MT_RXV1_HT_STBC
, rxdg0
);
171 i
= FIELD_GET(MT_RXV1_TX_RATE
, rxdg0
);
172 switch (FIELD_GET(MT_RXV1_TX_MODE
, rxdg0
)) {
173 case MT_PHY_TYPE_CCK
:
176 case MT_PHY_TYPE_OFDM
:
177 i
= mt7615_get_rate(dev
, sband
, i
, cck
);
179 case MT_PHY_TYPE_HT_GF
:
181 status
->encoding
= RX_ENC_HT
;
185 case MT_PHY_TYPE_VHT
:
186 status
->nss
= FIELD_GET(MT_RXV2_NSTS
, rxdg1
) + 1;
187 status
->encoding
= RX_ENC_VHT
;
192 status
->rate_idx
= i
;
194 switch (FIELD_GET(MT_RXV1_FRAME_MODE
, rxdg0
)) {
198 status
->bw
= RATE_INFO_BW_40
;
201 status
->bw
= RATE_INFO_BW_80
;
204 status
->bw
= RATE_INFO_BW_160
;
210 if (rxdg0
& MT_RXV1_HT_SHORT_GI
)
211 status
->enc_flags
|= RX_ENC_FLAG_SHORT_GI
;
212 if (rxdg0
& MT_RXV1_HT_AD_CODE
)
213 status
->enc_flags
|= RX_ENC_FLAG_LDPC
;
215 status
->enc_flags
|= RX_ENC_FLAG_STBC_MASK
* stbc
;
219 if ((u8
*)rxd
- skb
->data
>= skb
->len
)
223 skb_pull(skb
, (u8
*)rxd
- skb
->data
+ 2 * remove_pad
);
225 if (insert_ccmp_hdr
) {
226 u8 key_id
= FIELD_GET(MT_RXD1_NORMAL_KEY_ID
, rxd1
);
228 mt7615_insert_ccmp_hdr(skb
, key_id
);
231 hdr
= (struct ieee80211_hdr
*)skb
->data
;
232 if (!status
->wcid
|| !ieee80211_is_data_qos(hdr
->frame_control
))
235 status
->aggr
= unicast
&&
236 !ieee80211_is_qos_nullfunc(hdr
->frame_control
);
237 status
->tid
= *ieee80211_get_qos_ctl(hdr
) & IEEE80211_QOS_CTL_TID_MASK
;
238 status
->seqno
= IEEE80211_SEQ_TO_SN(le16_to_cpu(hdr
->seq_ctrl
));
243 void mt7615_sta_ps(struct mt76_dev
*mdev
, struct ieee80211_sta
*sta
, bool ps
)
247 void mt7615_tx_complete_skb(struct mt76_dev
*mdev
, enum mt76_txq_id qid
,
248 struct mt76_queue_entry
*e
)
251 dev_kfree_skb_any(e
->skb
);
256 if (e
->skb
== DMA_DUMMY_DATA
) {
257 struct mt76_txwi_cache
*t
;
258 struct mt7615_dev
*dev
;
259 struct mt7615_txp
*txp
;
262 txwi_ptr
= mt76_get_txwi_ptr(mdev
, e
->txwi
);
263 txp
= (struct mt7615_txp
*)(txwi_ptr
+ MT_TXD_SIZE
);
264 dev
= container_of(mdev
, struct mt7615_dev
, mt76
);
266 spin_lock_bh(&dev
->token_lock
);
267 t
= idr_remove(&dev
->token
, le16_to_cpu(txp
->token
));
268 spin_unlock_bh(&dev
->token_lock
);
269 e
->skb
= t
? t
->skb
: NULL
;
273 mt76_tx_complete_skb(mdev
, e
->skb
);
276 u16
mt7615_mac_tx_rate_val(struct mt7615_dev
*dev
,
277 const struct ieee80211_tx_rate
*rate
,
280 u8 phy
, nss
, rate_idx
;
285 if (rate
->flags
& IEEE80211_TX_RC_VHT_MCS
) {
286 rate_idx
= ieee80211_rate_get_vht_mcs(rate
);
287 nss
= ieee80211_rate_get_vht_nss(rate
);
288 phy
= MT_PHY_TYPE_VHT
;
289 if (rate
->flags
& IEEE80211_TX_RC_40_MHZ_WIDTH
)
291 else if (rate
->flags
& IEEE80211_TX_RC_80_MHZ_WIDTH
)
293 else if (rate
->flags
& IEEE80211_TX_RC_160_MHZ_WIDTH
)
295 } else if (rate
->flags
& IEEE80211_TX_RC_MCS
) {
296 rate_idx
= rate
->idx
;
297 nss
= 1 + (rate
->idx
>> 3);
298 phy
= MT_PHY_TYPE_HT
;
299 if (rate
->flags
& IEEE80211_TX_RC_GREEN_FIELD
)
300 phy
= MT_PHY_TYPE_HT_GF
;
301 if (rate
->flags
& IEEE80211_TX_RC_40_MHZ_WIDTH
)
304 const struct ieee80211_rate
*r
;
305 int band
= dev
->mt76
.chandef
.chan
->band
;
309 r
= &mt76_hw(dev
)->wiphy
->bands
[band
]->bitrates
[rate
->idx
];
310 if (rate
->flags
& IEEE80211_TX_RC_USE_SHORT_PREAMBLE
)
311 val
= r
->hw_value_short
;
316 rate_idx
= val
& 0xff;
319 rateval
= (FIELD_PREP(MT_TX_RATE_IDX
, rate_idx
) |
320 FIELD_PREP(MT_TX_RATE_MODE
, phy
) |
321 FIELD_PREP(MT_TX_RATE_NSS
, nss
- 1));
323 if (stbc
&& nss
== 1)
324 rateval
|= MT_TX_RATE_STBC
;
329 int mt7615_mac_write_txwi(struct mt7615_dev
*dev
, __le32
*txwi
,
330 struct sk_buff
*skb
, struct mt76_wcid
*wcid
,
331 struct ieee80211_sta
*sta
, int pid
,
332 struct ieee80211_key_conf
*key
)
334 struct ieee80211_tx_info
*info
= IEEE80211_SKB_CB(skb
);
335 struct ieee80211_tx_rate
*rate
= &info
->control
.rates
[0];
336 struct ieee80211_hdr
*hdr
= (struct ieee80211_hdr
*)skb
->data
;
337 struct ieee80211_vif
*vif
= info
->control
.vif
;
339 u8 fc_type
, fc_stype
, p_fmt
, q_idx
, omac_idx
= 0;
340 __le16 fc
= hdr
->frame_control
;
345 struct mt7615_vif
*mvif
= (struct mt7615_vif
*)vif
->drv_priv
;
347 omac_idx
= mvif
->omac_idx
;
351 struct mt7615_sta
*msta
= (struct mt7615_sta
*)sta
->drv_priv
;
353 tx_count
= msta
->rate_count
;
356 fc_type
= (le16_to_cpu(fc
) & IEEE80211_FCTL_FTYPE
) >> 2;
357 fc_stype
= (le16_to_cpu(fc
) & IEEE80211_FCTL_STYPE
) >> 4;
359 if (ieee80211_is_data(fc
)) {
360 q_idx
= skb_get_queue_mapping(skb
);
361 p_fmt
= MT_TX_TYPE_CT
;
362 } else if (ieee80211_is_beacon(fc
)) {
363 q_idx
= MT_LMAC_BCN0
;
364 p_fmt
= MT_TX_TYPE_FW
;
366 q_idx
= MT_LMAC_ALTX0
;
367 p_fmt
= MT_TX_TYPE_CT
;
370 val
= FIELD_PREP(MT_TXD0_TX_BYTES
, skb
->len
+ MT_TXD_SIZE
) |
371 FIELD_PREP(MT_TXD0_P_IDX
, MT_TX_PORT_IDX_LMAC
) |
372 FIELD_PREP(MT_TXD0_Q_IDX
, q_idx
);
373 txwi
[0] = cpu_to_le32(val
);
375 val
= MT_TXD1_LONG_FORMAT
|
376 FIELD_PREP(MT_TXD1_WLAN_IDX
, wcid
->idx
) |
377 FIELD_PREP(MT_TXD1_HDR_FORMAT
, MT_HDR_FORMAT_802_11
) |
378 FIELD_PREP(MT_TXD1_HDR_INFO
,
379 ieee80211_get_hdrlen_from_skb(skb
) / 2) |
380 FIELD_PREP(MT_TXD1_TID
,
381 skb
->priority
& IEEE80211_QOS_CTL_TID_MASK
) |
382 FIELD_PREP(MT_TXD1_PKT_FMT
, p_fmt
) |
383 FIELD_PREP(MT_TXD1_OWN_MAC
, omac_idx
);
384 txwi
[1] = cpu_to_le32(val
);
386 val
= FIELD_PREP(MT_TXD2_FRAME_TYPE
, fc_type
) |
387 FIELD_PREP(MT_TXD2_SUB_TYPE
, fc_stype
) |
388 FIELD_PREP(MT_TXD2_MULTICAST
,
389 is_multicast_ether_addr(hdr
->addr1
));
390 txwi
[2] = cpu_to_le32(val
);
392 if (!(info
->flags
& IEEE80211_TX_CTL_AMPDU
))
393 txwi
[2] |= cpu_to_le32(MT_TXD2_BA_DISABLE
);
398 if (rate
->idx
>= 0 && rate
->count
&&
399 !(info
->flags
& IEEE80211_TX_CTL_RATE_CTRL_PROBE
)) {
400 bool stbc
= info
->flags
& IEEE80211_TX_CTL_STBC
;
402 u16 rateval
= mt7615_mac_tx_rate_val(dev
, rate
, stbc
, &bw
);
404 txwi
[2] |= cpu_to_le32(MT_TXD2_FIX_RATE
);
406 val
= MT_TXD6_FIXED_BW
|
407 FIELD_PREP(MT_TXD6_BW
, bw
) |
408 FIELD_PREP(MT_TXD6_TX_RATE
, rateval
);
409 txwi
[6] |= cpu_to_le32(val
);
411 if (rate
->flags
& IEEE80211_TX_RC_SHORT_GI
)
412 txwi
[6] |= cpu_to_le32(MT_TXD6_SGI
);
414 if (info
->flags
& IEEE80211_TX_CTL_LDPC
)
415 txwi
[6] |= cpu_to_le32(MT_TXD6_LDPC
);
417 if (!(rate
->flags
& (IEEE80211_TX_RC_MCS
|
418 IEEE80211_TX_RC_VHT_MCS
)))
419 txwi
[2] |= cpu_to_le32(MT_TXD2_BA_DISABLE
);
421 tx_count
= rate
->count
;
424 if (!ieee80211_is_beacon(fc
)) {
425 val
= MT_TXD5_TX_STATUS_HOST
| MT_TXD5_SW_POWER_MGMT
|
426 FIELD_PREP(MT_TXD5_PID
, pid
);
427 txwi
[5] = cpu_to_le32(val
);
430 /* use maximum tx count for beacons */
434 val
= FIELD_PREP(MT_TXD3_REM_TX_COUNT
, tx_count
);
435 if (ieee80211_is_data_qos(hdr
->frame_control
)) {
436 seqno
= IEEE80211_SEQ_TO_SN(le16_to_cpu(hdr
->seq_ctrl
));
437 val
|= MT_TXD3_SN_VALID
;
438 } else if (ieee80211_is_back_req(hdr
->frame_control
)) {
439 struct ieee80211_bar
*bar
= (struct ieee80211_bar
*)skb
->data
;
441 seqno
= IEEE80211_SEQ_TO_SN(le16_to_cpu(bar
->start_seq_num
));
442 val
|= MT_TXD3_SN_VALID
;
444 val
|= FIELD_PREP(MT_TXD3_SEQ
, seqno
);
446 txwi
[3] = cpu_to_le32(val
);
448 if (info
->flags
& IEEE80211_TX_CTL_NO_ACK
)
449 txwi
[3] |= cpu_to_le32(MT_TXD3_NO_ACK
);
452 txwi
[3] |= cpu_to_le32(MT_TXD3_PROTECT_FRAME
);
454 txwi
[7] = FIELD_PREP(MT_TXD7_TYPE
, fc_type
) |
455 FIELD_PREP(MT_TXD7_SUB_TYPE
, fc_stype
);
460 void mt7615_txp_skb_unmap(struct mt76_dev
*dev
,
461 struct mt76_txwi_cache
*t
)
463 struct mt7615_txp
*txp
;
467 txwi
= mt76_get_txwi_ptr(dev
, t
);
468 txp
= (struct mt7615_txp
*)(txwi
+ MT_TXD_SIZE
);
469 for (i
= 1; i
< txp
->nbuf
; i
++)
470 dma_unmap_single(dev
->dev
, le32_to_cpu(txp
->buf
[i
]),
471 le16_to_cpu(txp
->len
[i
]), DMA_TO_DEVICE
);
474 int mt7615_tx_prepare_skb(struct mt76_dev
*mdev
, void *txwi_ptr
,
475 enum mt76_txq_id qid
, struct mt76_wcid
*wcid
,
476 struct ieee80211_sta
*sta
,
477 struct mt76_tx_info
*tx_info
)
479 struct ieee80211_hdr
*hdr
= (struct ieee80211_hdr
*)tx_info
->skb
->data
;
480 struct mt7615_dev
*dev
= container_of(mdev
, struct mt7615_dev
, mt76
);
481 struct mt7615_sta
*msta
= container_of(wcid
, struct mt7615_sta
, wcid
);
482 struct ieee80211_tx_info
*info
= IEEE80211_SKB_CB(tx_info
->skb
);
483 struct ieee80211_key_conf
*key
= info
->control
.hw_key
;
484 struct ieee80211_vif
*vif
= info
->control
.vif
;
485 int i
, pid
, id
, nbuf
= tx_info
->nbuf
- 1;
486 u8
*txwi
= (u8
*)txwi_ptr
;
487 struct mt76_txwi_cache
*t
;
488 struct mt7615_txp
*txp
;
491 wcid
= &dev
->mt76
.global_wcid
;
493 pid
= mt76_tx_status_skb_add(mdev
, wcid
, tx_info
->skb
);
495 if (info
->flags
& IEEE80211_TX_CTL_RATE_CTRL_PROBE
) {
496 spin_lock_bh(&dev
->mt76
.lock
);
497 msta
->rate_probe
= true;
498 mt7615_mcu_set_rates(dev
, msta
, &info
->control
.rates
[0],
500 spin_unlock_bh(&dev
->mt76
.lock
);
503 mt7615_mac_write_txwi(dev
, txwi_ptr
, tx_info
->skb
, wcid
, sta
,
506 txp
= (struct mt7615_txp
*)(txwi
+ MT_TXD_SIZE
);
507 for (i
= 0; i
< nbuf
; i
++) {
508 txp
->buf
[i
] = cpu_to_le32(tx_info
->buf
[i
+ 1].addr
);
509 txp
->len
[i
] = cpu_to_le16(tx_info
->buf
[i
+ 1].len
);
513 /* pass partial skb header to fw */
514 tx_info
->buf
[1].len
= MT_CT_PARSE_LEN
;
515 tx_info
->nbuf
= MT_CT_DMA_BUF_NUM
;
517 txp
->flags
= cpu_to_le16(MT_CT_INFO_APPLY_TXD
);
520 txp
->flags
|= cpu_to_le16(MT_CT_INFO_NONE_CIPHER_FRAME
);
522 if (ieee80211_is_mgmt(hdr
->frame_control
))
523 txp
->flags
|= cpu_to_le16(MT_CT_INFO_MGMT_FRAME
);
526 struct mt7615_vif
*mvif
= (struct mt7615_vif
*)vif
->drv_priv
;
528 txp
->bss_idx
= mvif
->idx
;
531 t
= (struct mt76_txwi_cache
*)(txwi
+ mdev
->drv
->txwi_size
);
532 t
->skb
= tx_info
->skb
;
534 spin_lock_bh(&dev
->token_lock
);
535 id
= idr_alloc(&dev
->token
, t
, 0, MT7615_TOKEN_SIZE
, GFP_ATOMIC
);
536 spin_unlock_bh(&dev
->token_lock
);
540 txp
->token
= cpu_to_le16(id
);
541 txp
->rept_wds_wcid
= 0xff;
542 tx_info
->skb
= DMA_DUMMY_DATA
;
547 static bool mt7615_fill_txs(struct mt7615_dev
*dev
, struct mt7615_sta
*sta
,
548 struct ieee80211_tx_info
*info
, __le32
*txs_data
)
550 struct ieee80211_supported_band
*sband
;
551 int i
, idx
, count
, final_idx
= 0;
552 bool fixed_rate
, final_mpdu
, ack_timeout
;
553 bool probe
, ampdu
, cck
= false;
554 u32 final_rate
, final_rate_flags
, final_nss
, txs
;
557 fixed_rate
= info
->status
.rates
[0].count
;
558 probe
= !!(info
->flags
& IEEE80211_TX_CTL_RATE_CTRL_PROBE
);
560 txs
= le32_to_cpu(txs_data
[1]);
561 final_mpdu
= txs
& MT_TXS1_ACKED_MPDU
;
562 ampdu
= !fixed_rate
&& (txs
& MT_TXS1_AMPDU
);
564 txs
= le32_to_cpu(txs_data
[3]);
565 count
= FIELD_GET(MT_TXS3_TX_COUNT
, txs
);
567 txs
= le32_to_cpu(txs_data
[0]);
568 pid
= FIELD_GET(MT_TXS0_PID
, txs
);
569 final_rate
= FIELD_GET(MT_TXS0_TX_RATE
, txs
);
570 ack_timeout
= txs
& MT_TXS0_ACK_TIMEOUT
;
572 if (!ampdu
&& (txs
& MT_TXS0_RTS_TIMEOUT
))
575 if (txs
& MT_TXS0_QUEUE_TIMEOUT
)
579 info
->flags
|= IEEE80211_TX_STAT_ACK
;
581 info
->status
.ampdu_len
= 1;
582 info
->status
.ampdu_ack_len
= !!(info
->flags
&
583 IEEE80211_TX_STAT_ACK
);
585 if (ampdu
|| (info
->flags
& IEEE80211_TX_CTL_AMPDU
))
586 info
->flags
|= IEEE80211_TX_STAT_AMPDU
| IEEE80211_TX_CTL_AMPDU
;
588 if (fixed_rate
&& !probe
) {
589 info
->status
.rates
[0].count
= count
;
593 for (i
= 0, idx
= 0; i
< ARRAY_SIZE(info
->status
.rates
); i
++) {
594 int cur_count
= min_t(int, count
, 2 * MT7615_RATE_RETRY
);
599 info
->status
.rates
[i
] = sta
->rates
[idx
];
603 if (i
&& info
->status
.rates
[i
].idx
< 0) {
604 info
->status
.rates
[i
- 1].count
+= count
;
609 info
->status
.rates
[i
].idx
= -1;
613 info
->status
.rates
[i
].count
= cur_count
;
619 final_rate_flags
= info
->status
.rates
[final_idx
].flags
;
621 switch (FIELD_GET(MT_TX_RATE_MODE
, final_rate
)) {
622 case MT_PHY_TYPE_CCK
:
625 case MT_PHY_TYPE_OFDM
:
626 if (dev
->mt76
.chandef
.chan
->band
== NL80211_BAND_5GHZ
)
627 sband
= &dev
->mt76
.sband_5g
.sband
;
629 sband
= &dev
->mt76
.sband_2g
.sband
;
630 final_rate
&= MT_TX_RATE_IDX
;
631 final_rate
= mt7615_get_rate(dev
, sband
, final_rate
, cck
);
632 final_rate_flags
= 0;
634 case MT_PHY_TYPE_HT_GF
:
636 final_rate_flags
|= IEEE80211_TX_RC_MCS
;
637 final_rate
&= MT_TX_RATE_IDX
;
641 case MT_PHY_TYPE_VHT
:
642 final_nss
= FIELD_GET(MT_TX_RATE_NSS
, final_rate
);
643 final_rate_flags
|= IEEE80211_TX_RC_VHT_MCS
;
644 final_rate
= (final_rate
& MT_TX_RATE_IDX
) | (final_nss
<< 4);
650 info
->status
.rates
[final_idx
].idx
= final_rate
;
651 info
->status
.rates
[final_idx
].flags
= final_rate_flags
;
656 static bool mt7615_mac_add_txs_skb(struct mt7615_dev
*dev
,
657 struct mt7615_sta
*sta
, int pid
,
660 struct mt76_dev
*mdev
= &dev
->mt76
;
661 struct sk_buff_head list
;
664 if (pid
< MT_PACKET_ID_FIRST
)
667 mt76_tx_status_lock(mdev
, &list
);
668 skb
= mt76_tx_status_skb_get(mdev
, &sta
->wcid
, pid
, &list
);
670 struct ieee80211_tx_info
*info
= IEEE80211_SKB_CB(skb
);
672 if (info
->flags
& IEEE80211_TX_CTL_RATE_CTRL_PROBE
) {
673 spin_lock_bh(&dev
->mt76
.lock
);
674 if (sta
->rate_probe
) {
675 mt7615_mcu_set_rates(dev
, sta
, NULL
,
677 sta
->rate_probe
= false;
679 spin_unlock_bh(&dev
->mt76
.lock
);
682 if (!mt7615_fill_txs(dev
, sta
, info
, txs_data
)) {
683 ieee80211_tx_info_clear_status(info
);
684 info
->status
.rates
[0].idx
= -1;
687 mt76_tx_status_skb_done(mdev
, skb
, &list
);
689 mt76_tx_status_unlock(mdev
, &list
);
694 void mt7615_mac_add_txs(struct mt7615_dev
*dev
, void *data
)
696 struct ieee80211_tx_info info
= {};
697 struct ieee80211_sta
*sta
= NULL
;
698 struct mt7615_sta
*msta
= NULL
;
699 struct mt76_wcid
*wcid
;
700 __le32
*txs_data
= data
;
705 txs
= le32_to_cpu(txs_data
[0]);
706 pid
= FIELD_GET(MT_TXS0_PID
, txs
);
707 txs
= le32_to_cpu(txs_data
[2]);
708 wcidx
= FIELD_GET(MT_TXS2_WCID
, txs
);
710 if (pid
== MT_PACKET_ID_NO_ACK
)
713 if (wcidx
>= ARRAY_SIZE(dev
->mt76
.wcid
))
718 wcid
= rcu_dereference(dev
->mt76
.wcid
[wcidx
]);
722 msta
= container_of(wcid
, struct mt7615_sta
, wcid
);
723 sta
= wcid_to_sta(wcid
);
725 if (mt7615_mac_add_txs_skb(dev
, msta
, pid
, txs_data
))
728 if (wcidx
>= MT7615_WTBL_STA
|| !sta
)
731 if (mt7615_fill_txs(dev
, msta
, &info
, txs_data
))
732 ieee80211_tx_status_noskb(mt76_hw(dev
), sta
, &info
);
738 void mt7615_mac_tx_free(struct mt7615_dev
*dev
, struct sk_buff
*skb
)
740 struct mt7615_tx_free
*free
= (struct mt7615_tx_free
*)skb
->data
;
741 struct mt76_dev
*mdev
= &dev
->mt76
;
742 struct mt76_txwi_cache
*txwi
;
745 count
= FIELD_GET(MT_TX_FREE_MSDU_ID_CNT
, le16_to_cpu(free
->ctrl
));
746 for (i
= 0; i
< count
; i
++) {
747 spin_lock_bh(&dev
->token_lock
);
748 txwi
= idr_remove(&dev
->token
, le16_to_cpu(free
->token
[i
]));
749 spin_unlock_bh(&dev
->token_lock
);
754 mt7615_txp_skb_unmap(mdev
, txwi
);
756 mt76_tx_complete_skb(mdev
, txwi
->skb
);
760 mt76_put_txwi(mdev
, txwi
);
765 void mt7615_mac_work(struct work_struct
*work
)
767 struct mt7615_dev
*dev
;
769 dev
= (struct mt7615_dev
*)container_of(work
, struct mt76_dev
,
772 mt76_tx_status_check(&dev
->mt76
, NULL
, false);
773 ieee80211_queue_delayed_work(mt76_hw(dev
), &dev
->mt76
.mac_work
,
774 MT7615_WATCHDOG_TIME
);