gpio: rcar: Fix runtime PM imbalance on error
[linux/fpc-iii.git] / drivers / net / wireless / mediatek / mt76 / mac80211.c
blobf44f99184c10f38479ecdd159c551fd1509c3307
1 // SPDX-License-Identifier: ISC
2 /*
3 * Copyright (C) 2016 Felix Fietkau <nbd@nbd.name>
4 */
5 #include <linux/of.h>
6 #include "mt76.h"
8 #define CHAN2G(_idx, _freq) { \
9 .band = NL80211_BAND_2GHZ, \
10 .center_freq = (_freq), \
11 .hw_value = (_idx), \
12 .max_power = 30, \
15 #define CHAN5G(_idx, _freq) { \
16 .band = NL80211_BAND_5GHZ, \
17 .center_freq = (_freq), \
18 .hw_value = (_idx), \
19 .max_power = 30, \
22 static const struct ieee80211_channel mt76_channels_2ghz[] = {
23 CHAN2G(1, 2412),
24 CHAN2G(2, 2417),
25 CHAN2G(3, 2422),
26 CHAN2G(4, 2427),
27 CHAN2G(5, 2432),
28 CHAN2G(6, 2437),
29 CHAN2G(7, 2442),
30 CHAN2G(8, 2447),
31 CHAN2G(9, 2452),
32 CHAN2G(10, 2457),
33 CHAN2G(11, 2462),
34 CHAN2G(12, 2467),
35 CHAN2G(13, 2472),
36 CHAN2G(14, 2484),
39 static const struct ieee80211_channel mt76_channels_5ghz[] = {
40 CHAN5G(36, 5180),
41 CHAN5G(40, 5200),
42 CHAN5G(44, 5220),
43 CHAN5G(48, 5240),
45 CHAN5G(52, 5260),
46 CHAN5G(56, 5280),
47 CHAN5G(60, 5300),
48 CHAN5G(64, 5320),
50 CHAN5G(100, 5500),
51 CHAN5G(104, 5520),
52 CHAN5G(108, 5540),
53 CHAN5G(112, 5560),
54 CHAN5G(116, 5580),
55 CHAN5G(120, 5600),
56 CHAN5G(124, 5620),
57 CHAN5G(128, 5640),
58 CHAN5G(132, 5660),
59 CHAN5G(136, 5680),
60 CHAN5G(140, 5700),
62 CHAN5G(149, 5745),
63 CHAN5G(153, 5765),
64 CHAN5G(157, 5785),
65 CHAN5G(161, 5805),
66 CHAN5G(165, 5825),
69 static const struct ieee80211_tpt_blink mt76_tpt_blink[] = {
70 { .throughput = 0 * 1024, .blink_time = 334 },
71 { .throughput = 1 * 1024, .blink_time = 260 },
72 { .throughput = 5 * 1024, .blink_time = 220 },
73 { .throughput = 10 * 1024, .blink_time = 190 },
74 { .throughput = 20 * 1024, .blink_time = 170 },
75 { .throughput = 50 * 1024, .blink_time = 150 },
76 { .throughput = 70 * 1024, .blink_time = 130 },
77 { .throughput = 100 * 1024, .blink_time = 110 },
78 { .throughput = 200 * 1024, .blink_time = 80 },
79 { .throughput = 300 * 1024, .blink_time = 50 },
82 static int mt76_led_init(struct mt76_dev *dev)
84 struct device_node *np = dev->dev->of_node;
85 struct ieee80211_hw *hw = dev->hw;
86 int led_pin;
88 if (!dev->led_cdev.brightness_set && !dev->led_cdev.blink_set)
89 return 0;
91 snprintf(dev->led_name, sizeof(dev->led_name),
92 "mt76-%s", wiphy_name(hw->wiphy));
94 dev->led_cdev.name = dev->led_name;
95 dev->led_cdev.default_trigger =
96 ieee80211_create_tpt_led_trigger(hw,
97 IEEE80211_TPT_LEDTRIG_FL_RADIO,
98 mt76_tpt_blink,
99 ARRAY_SIZE(mt76_tpt_blink));
101 np = of_get_child_by_name(np, "led");
102 if (np) {
103 if (!of_property_read_u32(np, "led-sources", &led_pin))
104 dev->led_pin = led_pin;
105 dev->led_al = of_property_read_bool(np, "led-active-low");
108 return led_classdev_register(dev->dev, &dev->led_cdev);
111 static void mt76_led_cleanup(struct mt76_dev *dev)
113 if (!dev->led_cdev.brightness_set && !dev->led_cdev.blink_set)
114 return;
116 led_classdev_unregister(&dev->led_cdev);
119 static void mt76_init_stream_cap(struct mt76_dev *dev,
120 struct ieee80211_supported_band *sband,
121 bool vht)
123 struct ieee80211_sta_ht_cap *ht_cap = &sband->ht_cap;
124 int i, nstream = hweight8(dev->phy.antenna_mask);
125 struct ieee80211_sta_vht_cap *vht_cap;
126 u16 mcs_map = 0;
128 if (nstream > 1)
129 ht_cap->cap |= IEEE80211_HT_CAP_TX_STBC;
130 else
131 ht_cap->cap &= ~IEEE80211_HT_CAP_TX_STBC;
133 for (i = 0; i < IEEE80211_HT_MCS_MASK_LEN; i++)
134 ht_cap->mcs.rx_mask[i] = i < nstream ? 0xff : 0;
136 if (!vht)
137 return;
139 vht_cap = &sband->vht_cap;
140 if (nstream > 1)
141 vht_cap->cap |= IEEE80211_VHT_CAP_TXSTBC;
142 else
143 vht_cap->cap &= ~IEEE80211_VHT_CAP_TXSTBC;
145 for (i = 0; i < 8; i++) {
146 if (i < nstream)
147 mcs_map |= (IEEE80211_VHT_MCS_SUPPORT_0_9 << (i * 2));
148 else
149 mcs_map |=
150 (IEEE80211_VHT_MCS_NOT_SUPPORTED << (i * 2));
152 vht_cap->vht_mcs.rx_mcs_map = cpu_to_le16(mcs_map);
153 vht_cap->vht_mcs.tx_mcs_map = cpu_to_le16(mcs_map);
156 void mt76_set_stream_caps(struct mt76_dev *dev, bool vht)
158 if (dev->cap.has_2ghz)
159 mt76_init_stream_cap(dev, &dev->phy.sband_2g.sband, false);
160 if (dev->cap.has_5ghz)
161 mt76_init_stream_cap(dev, &dev->phy.sband_5g.sband, vht);
163 EXPORT_SYMBOL_GPL(mt76_set_stream_caps);
165 static int
166 mt76_init_sband(struct mt76_dev *dev, struct mt76_sband *msband,
167 const struct ieee80211_channel *chan, int n_chan,
168 struct ieee80211_rate *rates, int n_rates, bool vht)
170 struct ieee80211_supported_band *sband = &msband->sband;
171 struct ieee80211_sta_ht_cap *ht_cap;
172 struct ieee80211_sta_vht_cap *vht_cap;
173 void *chanlist;
174 int size;
176 size = n_chan * sizeof(*chan);
177 chanlist = devm_kmemdup(dev->dev, chan, size, GFP_KERNEL);
178 if (!chanlist)
179 return -ENOMEM;
181 msband->chan = devm_kcalloc(dev->dev, n_chan, sizeof(*msband->chan),
182 GFP_KERNEL);
183 if (!msband->chan)
184 return -ENOMEM;
186 sband->channels = chanlist;
187 sband->n_channels = n_chan;
188 sband->bitrates = rates;
189 sband->n_bitrates = n_rates;
191 ht_cap = &sband->ht_cap;
192 ht_cap->ht_supported = true;
193 ht_cap->cap |= IEEE80211_HT_CAP_SUP_WIDTH_20_40 |
194 IEEE80211_HT_CAP_GRN_FLD |
195 IEEE80211_HT_CAP_SGI_20 |
196 IEEE80211_HT_CAP_SGI_40 |
197 (1 << IEEE80211_HT_CAP_RX_STBC_SHIFT);
199 ht_cap->mcs.tx_params = IEEE80211_HT_MCS_TX_DEFINED;
200 ht_cap->ampdu_factor = IEEE80211_HT_MAX_AMPDU_64K;
201 ht_cap->ampdu_density = IEEE80211_HT_MPDU_DENSITY_4;
203 mt76_init_stream_cap(dev, sband, vht);
205 if (!vht)
206 return 0;
208 vht_cap = &sband->vht_cap;
209 vht_cap->vht_supported = true;
210 vht_cap->cap |= IEEE80211_VHT_CAP_RXLDPC |
211 IEEE80211_VHT_CAP_RXSTBC_1 |
212 IEEE80211_VHT_CAP_SHORT_GI_80 |
213 IEEE80211_VHT_CAP_RX_ANTENNA_PATTERN |
214 IEEE80211_VHT_CAP_TX_ANTENNA_PATTERN |
215 (3 << IEEE80211_VHT_CAP_MAX_A_MPDU_LENGTH_EXPONENT_SHIFT);
217 return 0;
220 static int
221 mt76_init_sband_2g(struct mt76_dev *dev, struct ieee80211_rate *rates,
222 int n_rates)
224 dev->hw->wiphy->bands[NL80211_BAND_2GHZ] = &dev->phy.sband_2g.sband;
226 return mt76_init_sband(dev, &dev->phy.sband_2g,
227 mt76_channels_2ghz,
228 ARRAY_SIZE(mt76_channels_2ghz),
229 rates, n_rates, false);
232 static int
233 mt76_init_sband_5g(struct mt76_dev *dev, struct ieee80211_rate *rates,
234 int n_rates, bool vht)
236 dev->hw->wiphy->bands[NL80211_BAND_5GHZ] = &dev->phy.sband_5g.sband;
238 return mt76_init_sband(dev, &dev->phy.sband_5g,
239 mt76_channels_5ghz,
240 ARRAY_SIZE(mt76_channels_5ghz),
241 rates, n_rates, vht);
244 static void
245 mt76_check_sband(struct mt76_phy *phy, struct mt76_sband *msband,
246 enum nl80211_band band)
248 struct ieee80211_supported_band *sband = &msband->sband;
249 bool found = false;
250 int i;
252 if (!sband)
253 return;
255 for (i = 0; i < sband->n_channels; i++) {
256 if (sband->channels[i].flags & IEEE80211_CHAN_DISABLED)
257 continue;
259 found = true;
260 break;
263 if (found) {
264 phy->chandef.chan = &sband->channels[0];
265 phy->chan_state = &msband->chan[0];
266 return;
269 sband->n_channels = 0;
270 phy->hw->wiphy->bands[band] = NULL;
273 static void
274 mt76_phy_init(struct mt76_dev *dev, struct ieee80211_hw *hw)
276 struct wiphy *wiphy = hw->wiphy;
278 SET_IEEE80211_DEV(hw, dev->dev);
279 SET_IEEE80211_PERM_ADDR(hw, dev->macaddr);
281 wiphy->features |= NL80211_FEATURE_ACTIVE_MONITOR;
282 wiphy->flags |= WIPHY_FLAG_HAS_CHANNEL_SWITCH;
284 wiphy_ext_feature_set(wiphy, NL80211_EXT_FEATURE_CQM_RSSI_LIST);
285 wiphy_ext_feature_set(wiphy, NL80211_EXT_FEATURE_AIRTIME_FAIRNESS);
286 wiphy_ext_feature_set(wiphy, NL80211_EXT_FEATURE_AQL);
288 wiphy->available_antennas_tx = dev->phy.antenna_mask;
289 wiphy->available_antennas_rx = dev->phy.antenna_mask;
291 hw->txq_data_size = sizeof(struct mt76_txq);
293 if (!hw->max_tx_fragments)
294 hw->max_tx_fragments = 16;
296 ieee80211_hw_set(hw, SIGNAL_DBM);
297 ieee80211_hw_set(hw, PS_NULLFUNC_STACK);
298 ieee80211_hw_set(hw, AMPDU_AGGREGATION);
299 ieee80211_hw_set(hw, SUPPORTS_RC_TABLE);
300 ieee80211_hw_set(hw, SUPPORT_FAST_XMIT);
301 ieee80211_hw_set(hw, SUPPORTS_CLONED_SKBS);
302 ieee80211_hw_set(hw, SUPPORTS_AMSDU_IN_AMPDU);
303 ieee80211_hw_set(hw, TX_AMSDU);
304 ieee80211_hw_set(hw, TX_FRAG_LIST);
305 ieee80211_hw_set(hw, MFP_CAPABLE);
306 ieee80211_hw_set(hw, AP_LINK_PS);
307 ieee80211_hw_set(hw, REPORTS_TX_ACK_STATUS);
308 ieee80211_hw_set(hw, NEEDS_UNIQUE_STA_ADDR);
310 wiphy->flags |= WIPHY_FLAG_IBSS_RSN;
311 wiphy->interface_modes =
312 BIT(NL80211_IFTYPE_STATION) |
313 BIT(NL80211_IFTYPE_AP) |
314 #ifdef CONFIG_MAC80211_MESH
315 BIT(NL80211_IFTYPE_MESH_POINT) |
316 #endif
317 BIT(NL80211_IFTYPE_ADHOC);
320 struct mt76_phy *
321 mt76_alloc_phy(struct mt76_dev *dev, unsigned int size,
322 const struct ieee80211_ops *ops)
324 struct ieee80211_hw *hw;
325 struct mt76_phy *phy;
326 unsigned int phy_size, chan_size;
327 unsigned int size_2g, size_5g;
328 void *priv;
330 phy_size = ALIGN(sizeof(*phy), 8);
331 chan_size = sizeof(dev->phy.sband_2g.chan[0]);
332 size_2g = ALIGN(ARRAY_SIZE(mt76_channels_2ghz) * chan_size, 8);
333 size_5g = ALIGN(ARRAY_SIZE(mt76_channels_5ghz) * chan_size, 8);
335 size += phy_size + size_2g + size_5g;
336 hw = ieee80211_alloc_hw(size, ops);
337 if (!hw)
338 return NULL;
340 phy = hw->priv;
341 phy->dev = dev;
342 phy->hw = hw;
344 mt76_phy_init(dev, hw);
346 priv = hw->priv + phy_size;
348 phy->sband_2g = dev->phy.sband_2g;
349 phy->sband_2g.chan = priv;
350 priv += size_2g;
352 phy->sband_5g = dev->phy.sband_5g;
353 phy->sband_5g.chan = priv;
354 priv += size_5g;
356 phy->priv = priv;
358 hw->wiphy->bands[NL80211_BAND_2GHZ] = &phy->sband_2g.sband;
359 hw->wiphy->bands[NL80211_BAND_5GHZ] = &phy->sband_5g.sband;
361 mt76_check_sband(phy, &phy->sband_2g, NL80211_BAND_2GHZ);
362 mt76_check_sband(phy, &phy->sband_5g, NL80211_BAND_5GHZ);
364 return phy;
366 EXPORT_SYMBOL_GPL(mt76_alloc_phy);
369 mt76_register_phy(struct mt76_phy *phy)
371 int ret;
373 ret = ieee80211_register_hw(phy->hw);
374 if (ret)
375 return ret;
377 phy->dev->phy2 = phy;
378 return 0;
380 EXPORT_SYMBOL_GPL(mt76_register_phy);
382 void
383 mt76_unregister_phy(struct mt76_phy *phy)
385 struct mt76_dev *dev = phy->dev;
387 dev->phy2 = NULL;
388 mt76_tx_status_check(dev, NULL, true);
389 ieee80211_unregister_hw(phy->hw);
391 EXPORT_SYMBOL_GPL(mt76_unregister_phy);
393 struct mt76_dev *
394 mt76_alloc_device(struct device *pdev, unsigned int size,
395 const struct ieee80211_ops *ops,
396 const struct mt76_driver_ops *drv_ops)
398 struct ieee80211_hw *hw;
399 struct mt76_phy *phy;
400 struct mt76_dev *dev;
401 int i;
403 hw = ieee80211_alloc_hw(size, ops);
404 if (!hw)
405 return NULL;
407 dev = hw->priv;
408 dev->hw = hw;
409 dev->dev = pdev;
410 dev->drv = drv_ops;
412 phy = &dev->phy;
413 phy->dev = dev;
414 phy->hw = hw;
416 spin_lock_init(&dev->rx_lock);
417 spin_lock_init(&dev->lock);
418 spin_lock_init(&dev->cc_lock);
419 mutex_init(&dev->mutex);
420 init_waitqueue_head(&dev->tx_wait);
421 skb_queue_head_init(&dev->status_list);
423 skb_queue_head_init(&dev->mcu.res_q);
424 init_waitqueue_head(&dev->mcu.wait);
425 mutex_init(&dev->mcu.mutex);
427 INIT_LIST_HEAD(&dev->txwi_cache);
429 for (i = 0; i < ARRAY_SIZE(dev->q_rx); i++)
430 skb_queue_head_init(&dev->rx_skb[i]);
432 tasklet_init(&dev->tx_tasklet, mt76_tx_tasklet, (unsigned long)dev);
434 return dev;
436 EXPORT_SYMBOL_GPL(mt76_alloc_device);
438 int mt76_register_device(struct mt76_dev *dev, bool vht,
439 struct ieee80211_rate *rates, int n_rates)
441 struct ieee80211_hw *hw = dev->hw;
442 struct mt76_phy *phy = &dev->phy;
443 int ret;
445 dev_set_drvdata(dev->dev, dev);
446 mt76_phy_init(dev, hw);
448 if (dev->cap.has_2ghz) {
449 ret = mt76_init_sband_2g(dev, rates, n_rates);
450 if (ret)
451 return ret;
454 if (dev->cap.has_5ghz) {
455 ret = mt76_init_sband_5g(dev, rates + 4, n_rates - 4, vht);
456 if (ret)
457 return ret;
460 wiphy_read_of_freq_limits(hw->wiphy);
461 mt76_check_sband(&dev->phy, &phy->sband_2g, NL80211_BAND_2GHZ);
462 mt76_check_sband(&dev->phy, &phy->sband_5g, NL80211_BAND_5GHZ);
464 if (IS_ENABLED(CONFIG_MT76_LEDS)) {
465 ret = mt76_led_init(dev);
466 if (ret)
467 return ret;
470 return ieee80211_register_hw(hw);
472 EXPORT_SYMBOL_GPL(mt76_register_device);
474 void mt76_unregister_device(struct mt76_dev *dev)
476 struct ieee80211_hw *hw = dev->hw;
478 if (IS_ENABLED(CONFIG_MT76_LEDS))
479 mt76_led_cleanup(dev);
480 mt76_tx_status_check(dev, NULL, true);
481 ieee80211_unregister_hw(hw);
483 EXPORT_SYMBOL_GPL(mt76_unregister_device);
485 void mt76_free_device(struct mt76_dev *dev)
487 mt76_tx_free(dev);
488 ieee80211_free_hw(dev->hw);
490 EXPORT_SYMBOL_GPL(mt76_free_device);
492 void mt76_rx(struct mt76_dev *dev, enum mt76_rxq_id q, struct sk_buff *skb)
494 struct mt76_rx_status *status = (struct mt76_rx_status *)skb->cb;
495 struct mt76_phy *phy = mt76_dev_phy(dev, status->ext_phy);
497 if (!test_bit(MT76_STATE_RUNNING, &phy->state)) {
498 dev_kfree_skb(skb);
499 return;
502 __skb_queue_tail(&dev->rx_skb[q], skb);
504 EXPORT_SYMBOL_GPL(mt76_rx);
506 bool mt76_has_tx_pending(struct mt76_phy *phy)
508 struct mt76_dev *dev = phy->dev;
509 struct mt76_queue *q;
510 int i, offset;
512 offset = __MT_TXQ_MAX * (phy != &dev->phy);
514 for (i = 0; i < __MT_TXQ_MAX; i++) {
515 q = dev->q_tx[offset + i].q;
516 if (q && q->queued)
517 return true;
520 return false;
522 EXPORT_SYMBOL_GPL(mt76_has_tx_pending);
524 static struct mt76_channel_state *
525 mt76_channel_state(struct mt76_phy *phy, struct ieee80211_channel *c)
527 struct mt76_sband *msband;
528 int idx;
530 if (c->band == NL80211_BAND_2GHZ)
531 msband = &phy->sband_2g;
532 else
533 msband = &phy->sband_5g;
535 idx = c - &msband->sband.channels[0];
536 return &msband->chan[idx];
539 static void
540 mt76_update_survey_active_time(struct mt76_phy *phy, ktime_t time)
542 struct mt76_channel_state *state = phy->chan_state;
544 state->cc_active += ktime_to_us(ktime_sub(time,
545 phy->survey_time));
546 phy->survey_time = time;
549 void mt76_update_survey(struct mt76_dev *dev)
551 ktime_t cur_time;
553 if (dev->drv->update_survey)
554 dev->drv->update_survey(dev);
556 cur_time = ktime_get_boottime();
557 mt76_update_survey_active_time(&dev->phy, cur_time);
558 if (dev->phy2)
559 mt76_update_survey_active_time(dev->phy2, cur_time);
561 if (dev->drv->drv_flags & MT_DRV_SW_RX_AIRTIME) {
562 struct mt76_channel_state *state = dev->phy.chan_state;
564 spin_lock_bh(&dev->cc_lock);
565 state->cc_bss_rx += dev->cur_cc_bss_rx;
566 dev->cur_cc_bss_rx = 0;
567 spin_unlock_bh(&dev->cc_lock);
570 EXPORT_SYMBOL_GPL(mt76_update_survey);
572 void mt76_set_channel(struct mt76_phy *phy)
574 struct mt76_dev *dev = phy->dev;
575 struct ieee80211_hw *hw = phy->hw;
576 struct cfg80211_chan_def *chandef = &hw->conf.chandef;
577 bool offchannel = hw->conf.flags & IEEE80211_CONF_OFFCHANNEL;
578 int timeout = HZ / 5;
580 wait_event_timeout(dev->tx_wait, !mt76_has_tx_pending(phy), timeout);
581 mt76_update_survey(dev);
583 phy->chandef = *chandef;
584 phy->chan_state = mt76_channel_state(phy, chandef->chan);
586 if (!offchannel)
587 phy->main_chan = chandef->chan;
589 if (chandef->chan != phy->main_chan)
590 memset(phy->chan_state, 0, sizeof(*phy->chan_state));
592 EXPORT_SYMBOL_GPL(mt76_set_channel);
594 int mt76_get_survey(struct ieee80211_hw *hw, int idx,
595 struct survey_info *survey)
597 struct mt76_phy *phy = hw->priv;
598 struct mt76_dev *dev = phy->dev;
599 struct mt76_sband *sband;
600 struct ieee80211_channel *chan;
601 struct mt76_channel_state *state;
602 int ret = 0;
604 mutex_lock(&dev->mutex);
605 if (idx == 0 && dev->drv->update_survey)
606 mt76_update_survey(dev);
608 sband = &phy->sband_2g;
609 if (idx >= sband->sband.n_channels) {
610 idx -= sband->sband.n_channels;
611 sband = &phy->sband_5g;
614 if (idx >= sband->sband.n_channels) {
615 ret = -ENOENT;
616 goto out;
619 chan = &sband->sband.channels[idx];
620 state = mt76_channel_state(phy, chan);
622 memset(survey, 0, sizeof(*survey));
623 survey->channel = chan;
624 survey->filled = SURVEY_INFO_TIME | SURVEY_INFO_TIME_BUSY;
625 survey->filled |= dev->drv->survey_flags;
626 if (state->noise)
627 survey->filled |= SURVEY_INFO_NOISE_DBM;
629 if (chan == phy->main_chan) {
630 survey->filled |= SURVEY_INFO_IN_USE;
632 if (dev->drv->drv_flags & MT_DRV_SW_RX_AIRTIME)
633 survey->filled |= SURVEY_INFO_TIME_BSS_RX;
636 survey->time_busy = div_u64(state->cc_busy, 1000);
637 survey->time_rx = div_u64(state->cc_rx, 1000);
638 survey->time = div_u64(state->cc_active, 1000);
639 survey->noise = state->noise;
641 spin_lock_bh(&dev->cc_lock);
642 survey->time_bss_rx = div_u64(state->cc_bss_rx, 1000);
643 survey->time_tx = div_u64(state->cc_tx, 1000);
644 spin_unlock_bh(&dev->cc_lock);
646 out:
647 mutex_unlock(&dev->mutex);
649 return ret;
651 EXPORT_SYMBOL_GPL(mt76_get_survey);
653 void mt76_wcid_key_setup(struct mt76_dev *dev, struct mt76_wcid *wcid,
654 struct ieee80211_key_conf *key)
656 struct ieee80211_key_seq seq;
657 int i;
659 wcid->rx_check_pn = false;
661 if (!key)
662 return;
664 if (key->cipher != WLAN_CIPHER_SUITE_CCMP)
665 return;
667 wcid->rx_check_pn = true;
668 for (i = 0; i < IEEE80211_NUM_TIDS; i++) {
669 ieee80211_get_key_rx_seq(key, i, &seq);
670 memcpy(wcid->rx_key_pn[i], seq.ccmp.pn, sizeof(seq.ccmp.pn));
673 EXPORT_SYMBOL(mt76_wcid_key_setup);
675 static void
676 mt76_rx_convert(struct mt76_dev *dev, struct sk_buff *skb,
677 struct ieee80211_hw **hw,
678 struct ieee80211_sta **sta)
681 struct ieee80211_rx_status *status = IEEE80211_SKB_RXCB(skb);
682 struct mt76_rx_status mstat;
684 mstat = *((struct mt76_rx_status *)skb->cb);
685 memset(status, 0, sizeof(*status));
687 status->flag = mstat.flag;
688 status->freq = mstat.freq;
689 status->enc_flags = mstat.enc_flags;
690 status->encoding = mstat.encoding;
691 status->bw = mstat.bw;
692 status->rate_idx = mstat.rate_idx;
693 status->nss = mstat.nss;
694 status->band = mstat.band;
695 status->signal = mstat.signal;
696 status->chains = mstat.chains;
697 status->ampdu_reference = mstat.ampdu_ref;
699 BUILD_BUG_ON(sizeof(mstat) > sizeof(skb->cb));
700 BUILD_BUG_ON(sizeof(status->chain_signal) !=
701 sizeof(mstat.chain_signal));
702 memcpy(status->chain_signal, mstat.chain_signal,
703 sizeof(mstat.chain_signal));
705 *sta = wcid_to_sta(mstat.wcid);
706 *hw = mt76_phy_hw(dev, mstat.ext_phy);
709 static int
710 mt76_check_ccmp_pn(struct sk_buff *skb)
712 struct mt76_rx_status *status = (struct mt76_rx_status *)skb->cb;
713 struct mt76_wcid *wcid = status->wcid;
714 struct ieee80211_hdr *hdr;
715 int ret;
717 if (!(status->flag & RX_FLAG_DECRYPTED))
718 return 0;
720 if (!wcid || !wcid->rx_check_pn)
721 return 0;
723 if (!(status->flag & RX_FLAG_IV_STRIPPED)) {
725 * Validate the first fragment both here and in mac80211
726 * All further fragments will be validated by mac80211 only.
728 hdr = (struct ieee80211_hdr *)skb->data;
729 if (ieee80211_is_frag(hdr) &&
730 !ieee80211_is_first_frag(hdr->frame_control))
731 return 0;
734 BUILD_BUG_ON(sizeof(status->iv) != sizeof(wcid->rx_key_pn[0]));
735 ret = memcmp(status->iv, wcid->rx_key_pn[status->tid],
736 sizeof(status->iv));
737 if (ret <= 0)
738 return -EINVAL; /* replay */
740 memcpy(wcid->rx_key_pn[status->tid], status->iv, sizeof(status->iv));
742 if (status->flag & RX_FLAG_IV_STRIPPED)
743 status->flag |= RX_FLAG_PN_VALIDATED;
745 return 0;
748 static void
749 mt76_airtime_report(struct mt76_dev *dev, struct mt76_rx_status *status,
750 int len)
752 struct mt76_wcid *wcid = status->wcid;
753 struct ieee80211_rx_status info = {
754 .enc_flags = status->enc_flags,
755 .rate_idx = status->rate_idx,
756 .encoding = status->encoding,
757 .band = status->band,
758 .nss = status->nss,
759 .bw = status->bw,
761 struct ieee80211_sta *sta;
762 u32 airtime;
764 airtime = ieee80211_calc_rx_airtime(dev->hw, &info, len);
765 spin_lock(&dev->cc_lock);
766 dev->cur_cc_bss_rx += airtime;
767 spin_unlock(&dev->cc_lock);
769 if (!wcid || !wcid->sta)
770 return;
772 sta = container_of((void *)wcid, struct ieee80211_sta, drv_priv);
773 ieee80211_sta_register_airtime(sta, status->tid, 0, airtime);
776 static void
777 mt76_airtime_flush_ampdu(struct mt76_dev *dev)
779 struct mt76_wcid *wcid;
780 int wcid_idx;
782 if (!dev->rx_ampdu_len)
783 return;
785 wcid_idx = dev->rx_ampdu_status.wcid_idx;
786 if (wcid_idx < ARRAY_SIZE(dev->wcid))
787 wcid = rcu_dereference(dev->wcid[wcid_idx]);
788 else
789 wcid = NULL;
790 dev->rx_ampdu_status.wcid = wcid;
792 mt76_airtime_report(dev, &dev->rx_ampdu_status, dev->rx_ampdu_len);
794 dev->rx_ampdu_len = 0;
795 dev->rx_ampdu_ref = 0;
798 static void
799 mt76_airtime_check(struct mt76_dev *dev, struct sk_buff *skb)
801 struct ieee80211_hdr *hdr = (struct ieee80211_hdr *)skb->data;
802 struct mt76_rx_status *status = (struct mt76_rx_status *)skb->cb;
803 struct mt76_wcid *wcid = status->wcid;
805 if (!(dev->drv->drv_flags & MT_DRV_SW_RX_AIRTIME))
806 return;
808 if (!wcid || !wcid->sta) {
809 if (!ether_addr_equal(hdr->addr1, dev->macaddr))
810 return;
812 wcid = NULL;
815 if (!(status->flag & RX_FLAG_AMPDU_DETAILS) ||
816 status->ampdu_ref != dev->rx_ampdu_ref)
817 mt76_airtime_flush_ampdu(dev);
819 if (status->flag & RX_FLAG_AMPDU_DETAILS) {
820 if (!dev->rx_ampdu_len ||
821 status->ampdu_ref != dev->rx_ampdu_ref) {
822 dev->rx_ampdu_status = *status;
823 dev->rx_ampdu_status.wcid_idx = wcid ? wcid->idx : 0xff;
824 dev->rx_ampdu_ref = status->ampdu_ref;
827 dev->rx_ampdu_len += skb->len;
828 return;
831 mt76_airtime_report(dev, status, skb->len);
834 static void
835 mt76_check_sta(struct mt76_dev *dev, struct sk_buff *skb)
837 struct mt76_rx_status *status = (struct mt76_rx_status *)skb->cb;
838 struct ieee80211_hdr *hdr = (struct ieee80211_hdr *)skb->data;
839 struct ieee80211_sta *sta;
840 struct ieee80211_hw *hw;
841 struct mt76_wcid *wcid = status->wcid;
842 bool ps;
843 int i;
845 hw = mt76_phy_hw(dev, status->ext_phy);
846 if (ieee80211_is_pspoll(hdr->frame_control) && !wcid) {
847 sta = ieee80211_find_sta_by_ifaddr(hw, hdr->addr2, NULL);
848 if (sta)
849 wcid = status->wcid = (struct mt76_wcid *)sta->drv_priv;
852 mt76_airtime_check(dev, skb);
854 if (!wcid || !wcid->sta)
855 return;
857 sta = container_of((void *)wcid, struct ieee80211_sta, drv_priv);
859 if (status->signal <= 0)
860 ewma_signal_add(&wcid->rssi, -status->signal);
862 wcid->inactive_count = 0;
864 if (!test_bit(MT_WCID_FLAG_CHECK_PS, &wcid->flags))
865 return;
867 if (ieee80211_is_pspoll(hdr->frame_control)) {
868 ieee80211_sta_pspoll(sta);
869 return;
872 if (ieee80211_has_morefrags(hdr->frame_control) ||
873 !(ieee80211_is_mgmt(hdr->frame_control) ||
874 ieee80211_is_data(hdr->frame_control)))
875 return;
877 ps = ieee80211_has_pm(hdr->frame_control);
879 if (ps && (ieee80211_is_data_qos(hdr->frame_control) ||
880 ieee80211_is_qos_nullfunc(hdr->frame_control)))
881 ieee80211_sta_uapsd_trigger(sta, status->tid);
883 if (!!test_bit(MT_WCID_FLAG_PS, &wcid->flags) == ps)
884 return;
886 if (ps)
887 set_bit(MT_WCID_FLAG_PS, &wcid->flags);
888 else
889 clear_bit(MT_WCID_FLAG_PS, &wcid->flags);
891 dev->drv->sta_ps(dev, sta, ps);
892 ieee80211_sta_ps_transition(sta, ps);
894 if (ps)
895 return;
897 for (i = 0; i < ARRAY_SIZE(sta->txq); i++) {
898 struct mt76_txq *mtxq;
900 if (!sta->txq[i])
901 continue;
903 mtxq = (struct mt76_txq *)sta->txq[i]->drv_priv;
904 if (!skb_queue_empty(&mtxq->retry_q))
905 ieee80211_schedule_txq(hw, sta->txq[i]);
909 void mt76_rx_complete(struct mt76_dev *dev, struct sk_buff_head *frames,
910 struct napi_struct *napi)
912 struct ieee80211_sta *sta;
913 struct ieee80211_hw *hw;
914 struct sk_buff *skb;
916 spin_lock(&dev->rx_lock);
917 while ((skb = __skb_dequeue(frames)) != NULL) {
918 if (mt76_check_ccmp_pn(skb)) {
919 dev_kfree_skb(skb);
920 continue;
923 mt76_rx_convert(dev, skb, &hw, &sta);
924 ieee80211_rx_napi(hw, sta, skb, napi);
926 spin_unlock(&dev->rx_lock);
929 void mt76_rx_poll_complete(struct mt76_dev *dev, enum mt76_rxq_id q,
930 struct napi_struct *napi)
932 struct sk_buff_head frames;
933 struct sk_buff *skb;
935 __skb_queue_head_init(&frames);
937 while ((skb = __skb_dequeue(&dev->rx_skb[q])) != NULL) {
938 mt76_check_sta(dev, skb);
939 mt76_rx_aggr_reorder(skb, &frames);
942 mt76_rx_complete(dev, &frames, napi);
944 EXPORT_SYMBOL_GPL(mt76_rx_poll_complete);
946 static int
947 mt76_sta_add(struct mt76_dev *dev, struct ieee80211_vif *vif,
948 struct ieee80211_sta *sta, bool ext_phy)
950 struct mt76_wcid *wcid = (struct mt76_wcid *)sta->drv_priv;
951 int ret;
952 int i;
954 mutex_lock(&dev->mutex);
956 ret = dev->drv->sta_add(dev, vif, sta);
957 if (ret)
958 goto out;
960 for (i = 0; i < ARRAY_SIZE(sta->txq); i++) {
961 struct mt76_txq *mtxq;
963 if (!sta->txq[i])
964 continue;
966 mtxq = (struct mt76_txq *)sta->txq[i]->drv_priv;
967 mtxq->wcid = wcid;
969 mt76_txq_init(dev, sta->txq[i]);
972 ewma_signal_init(&wcid->rssi);
973 if (ext_phy)
974 mt76_wcid_mask_set(dev->wcid_phy_mask, wcid->idx);
975 wcid->ext_phy = ext_phy;
976 rcu_assign_pointer(dev->wcid[wcid->idx], wcid);
978 out:
979 mutex_unlock(&dev->mutex);
981 return ret;
984 void __mt76_sta_remove(struct mt76_dev *dev, struct ieee80211_vif *vif,
985 struct ieee80211_sta *sta)
987 struct mt76_wcid *wcid = (struct mt76_wcid *)sta->drv_priv;
988 int i, idx = wcid->idx;
990 for (i = 0; i < ARRAY_SIZE(wcid->aggr); i++)
991 mt76_rx_aggr_stop(dev, wcid, i);
993 if (dev->drv->sta_remove)
994 dev->drv->sta_remove(dev, vif, sta);
996 mt76_tx_status_check(dev, wcid, true);
997 for (i = 0; i < ARRAY_SIZE(sta->txq); i++)
998 mt76_txq_remove(dev, sta->txq[i]);
999 mt76_wcid_mask_clear(dev->wcid_mask, idx);
1000 mt76_wcid_mask_clear(dev->wcid_phy_mask, idx);
1002 EXPORT_SYMBOL_GPL(__mt76_sta_remove);
1004 static void
1005 mt76_sta_remove(struct mt76_dev *dev, struct ieee80211_vif *vif,
1006 struct ieee80211_sta *sta)
1008 mutex_lock(&dev->mutex);
1009 __mt76_sta_remove(dev, vif, sta);
1010 mutex_unlock(&dev->mutex);
1013 int mt76_sta_state(struct ieee80211_hw *hw, struct ieee80211_vif *vif,
1014 struct ieee80211_sta *sta,
1015 enum ieee80211_sta_state old_state,
1016 enum ieee80211_sta_state new_state)
1018 struct mt76_phy *phy = hw->priv;
1019 struct mt76_dev *dev = phy->dev;
1020 bool ext_phy = phy != &dev->phy;
1022 if (old_state == IEEE80211_STA_NOTEXIST &&
1023 new_state == IEEE80211_STA_NONE)
1024 return mt76_sta_add(dev, vif, sta, ext_phy);
1026 if (old_state == IEEE80211_STA_AUTH &&
1027 new_state == IEEE80211_STA_ASSOC &&
1028 dev->drv->sta_assoc)
1029 dev->drv->sta_assoc(dev, vif, sta);
1031 if (old_state == IEEE80211_STA_NONE &&
1032 new_state == IEEE80211_STA_NOTEXIST)
1033 mt76_sta_remove(dev, vif, sta);
1035 return 0;
1037 EXPORT_SYMBOL_GPL(mt76_sta_state);
1039 void mt76_sta_pre_rcu_remove(struct ieee80211_hw *hw, struct ieee80211_vif *vif,
1040 struct ieee80211_sta *sta)
1042 struct mt76_phy *phy = hw->priv;
1043 struct mt76_dev *dev = phy->dev;
1044 struct mt76_wcid *wcid = (struct mt76_wcid *)sta->drv_priv;
1046 mutex_lock(&dev->mutex);
1047 rcu_assign_pointer(dev->wcid[wcid->idx], NULL);
1048 mutex_unlock(&dev->mutex);
1050 EXPORT_SYMBOL_GPL(mt76_sta_pre_rcu_remove);
1052 int mt76_get_txpower(struct ieee80211_hw *hw, struct ieee80211_vif *vif,
1053 int *dbm)
1055 struct mt76_phy *phy = hw->priv;
1056 int n_chains = hweight8(phy->antenna_mask);
1057 int delta = mt76_tx_power_nss_delta(n_chains);
1059 *dbm = DIV_ROUND_UP(phy->txpower_cur + delta, 2);
1061 return 0;
1063 EXPORT_SYMBOL_GPL(mt76_get_txpower);
1065 static void
1066 __mt76_csa_finish(void *priv, u8 *mac, struct ieee80211_vif *vif)
1068 if (vif->csa_active && ieee80211_csa_is_complete(vif))
1069 ieee80211_csa_finish(vif);
1072 void mt76_csa_finish(struct mt76_dev *dev)
1074 if (!dev->csa_complete)
1075 return;
1077 ieee80211_iterate_active_interfaces_atomic(dev->hw,
1078 IEEE80211_IFACE_ITER_RESUME_ALL,
1079 __mt76_csa_finish, dev);
1081 dev->csa_complete = 0;
1083 EXPORT_SYMBOL_GPL(mt76_csa_finish);
1085 static void
1086 __mt76_csa_check(void *priv, u8 *mac, struct ieee80211_vif *vif)
1088 struct mt76_dev *dev = priv;
1090 if (!vif->csa_active)
1091 return;
1093 dev->csa_complete |= ieee80211_csa_is_complete(vif);
1096 void mt76_csa_check(struct mt76_dev *dev)
1098 ieee80211_iterate_active_interfaces_atomic(dev->hw,
1099 IEEE80211_IFACE_ITER_RESUME_ALL,
1100 __mt76_csa_check, dev);
1102 EXPORT_SYMBOL_GPL(mt76_csa_check);
1105 mt76_set_tim(struct ieee80211_hw *hw, struct ieee80211_sta *sta, bool set)
1107 return 0;
1109 EXPORT_SYMBOL_GPL(mt76_set_tim);
1111 void mt76_insert_ccmp_hdr(struct sk_buff *skb, u8 key_id)
1113 struct mt76_rx_status *status = (struct mt76_rx_status *)skb->cb;
1114 int hdr_len = ieee80211_get_hdrlen_from_skb(skb);
1115 u8 *hdr, *pn = status->iv;
1117 __skb_push(skb, 8);
1118 memmove(skb->data, skb->data + 8, hdr_len);
1119 hdr = skb->data + hdr_len;
1121 hdr[0] = pn[5];
1122 hdr[1] = pn[4];
1123 hdr[2] = 0;
1124 hdr[3] = 0x20 | (key_id << 6);
1125 hdr[4] = pn[3];
1126 hdr[5] = pn[2];
1127 hdr[6] = pn[1];
1128 hdr[7] = pn[0];
1130 status->flag &= ~RX_FLAG_IV_STRIPPED;
1132 EXPORT_SYMBOL_GPL(mt76_insert_ccmp_hdr);
1134 int mt76_get_rate(struct mt76_dev *dev,
1135 struct ieee80211_supported_band *sband,
1136 int idx, bool cck)
1138 int i, offset = 0, len = sband->n_bitrates;
1140 if (cck) {
1141 if (sband == &dev->phy.sband_5g.sband)
1142 return 0;
1144 idx &= ~BIT(2); /* short preamble */
1145 } else if (sband == &dev->phy.sband_2g.sband) {
1146 offset = 4;
1149 for (i = offset; i < len; i++) {
1150 if ((sband->bitrates[i].hw_value & GENMASK(7, 0)) == idx)
1151 return i;
1154 return 0;
1156 EXPORT_SYMBOL_GPL(mt76_get_rate);
1158 void mt76_sw_scan(struct ieee80211_hw *hw, struct ieee80211_vif *vif,
1159 const u8 *mac)
1161 struct mt76_phy *phy = hw->priv;
1163 set_bit(MT76_SCANNING, &phy->state);
1165 EXPORT_SYMBOL_GPL(mt76_sw_scan);
1167 void mt76_sw_scan_complete(struct ieee80211_hw *hw, struct ieee80211_vif *vif)
1169 struct mt76_phy *phy = hw->priv;
1171 clear_bit(MT76_SCANNING, &phy->state);
1173 EXPORT_SYMBOL_GPL(mt76_sw_scan_complete);
1175 int mt76_get_antenna(struct ieee80211_hw *hw, u32 *tx_ant, u32 *rx_ant)
1177 struct mt76_phy *phy = hw->priv;
1178 struct mt76_dev *dev = phy->dev;
1180 mutex_lock(&dev->mutex);
1181 *tx_ant = phy->antenna_mask;
1182 *rx_ant = phy->antenna_mask;
1183 mutex_unlock(&dev->mutex);
1185 return 0;
1187 EXPORT_SYMBOL_GPL(mt76_get_antenna);