dmaengine: imx-sdma: Let the core do the device node validation
[linux/fpc-iii.git] / drivers / net / wireless / mediatek / mt76 / mac80211.c
blob5b6a81ee457e906e79196856ccdd6e6a2c3bdeb6
1 /*
2 * Copyright (C) 2016 Felix Fietkau <nbd@nbd.name>
4 * Permission to use, copy, modify, and/or distribute this software for any
5 * purpose with or without fee is hereby granted, provided that the above
6 * copyright notice and this permission notice appear in all copies.
8 * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
9 * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
10 * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
11 * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
12 * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
13 * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
14 * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
16 #include <linux/of.h>
17 #include "mt76.h"
19 #define CHAN2G(_idx, _freq) { \
20 .band = NL80211_BAND_2GHZ, \
21 .center_freq = (_freq), \
22 .hw_value = (_idx), \
23 .max_power = 30, \
26 #define CHAN5G(_idx, _freq) { \
27 .band = NL80211_BAND_5GHZ, \
28 .center_freq = (_freq), \
29 .hw_value = (_idx), \
30 .max_power = 30, \
33 static const struct ieee80211_channel mt76_channels_2ghz[] = {
34 CHAN2G(1, 2412),
35 CHAN2G(2, 2417),
36 CHAN2G(3, 2422),
37 CHAN2G(4, 2427),
38 CHAN2G(5, 2432),
39 CHAN2G(6, 2437),
40 CHAN2G(7, 2442),
41 CHAN2G(8, 2447),
42 CHAN2G(9, 2452),
43 CHAN2G(10, 2457),
44 CHAN2G(11, 2462),
45 CHAN2G(12, 2467),
46 CHAN2G(13, 2472),
47 CHAN2G(14, 2484),
50 static const struct ieee80211_channel mt76_channels_5ghz[] = {
51 CHAN5G(36, 5180),
52 CHAN5G(40, 5200),
53 CHAN5G(44, 5220),
54 CHAN5G(48, 5240),
56 CHAN5G(52, 5260),
57 CHAN5G(56, 5280),
58 CHAN5G(60, 5300),
59 CHAN5G(64, 5320),
61 CHAN5G(100, 5500),
62 CHAN5G(104, 5520),
63 CHAN5G(108, 5540),
64 CHAN5G(112, 5560),
65 CHAN5G(116, 5580),
66 CHAN5G(120, 5600),
67 CHAN5G(124, 5620),
68 CHAN5G(128, 5640),
69 CHAN5G(132, 5660),
70 CHAN5G(136, 5680),
71 CHAN5G(140, 5700),
73 CHAN5G(149, 5745),
74 CHAN5G(153, 5765),
75 CHAN5G(157, 5785),
76 CHAN5G(161, 5805),
77 CHAN5G(165, 5825),
80 static const struct ieee80211_tpt_blink mt76_tpt_blink[] = {
81 { .throughput = 0 * 1024, .blink_time = 334 },
82 { .throughput = 1 * 1024, .blink_time = 260 },
83 { .throughput = 5 * 1024, .blink_time = 220 },
84 { .throughput = 10 * 1024, .blink_time = 190 },
85 { .throughput = 20 * 1024, .blink_time = 170 },
86 { .throughput = 50 * 1024, .blink_time = 150 },
87 { .throughput = 70 * 1024, .blink_time = 130 },
88 { .throughput = 100 * 1024, .blink_time = 110 },
89 { .throughput = 200 * 1024, .blink_time = 80 },
90 { .throughput = 300 * 1024, .blink_time = 50 },
93 static int mt76_led_init(struct mt76_dev *dev)
95 struct device_node *np = dev->dev->of_node;
96 struct ieee80211_hw *hw = dev->hw;
97 int led_pin;
99 if (!dev->led_cdev.brightness_set && !dev->led_cdev.blink_set)
100 return 0;
102 snprintf(dev->led_name, sizeof(dev->led_name),
103 "mt76-%s", wiphy_name(hw->wiphy));
105 dev->led_cdev.name = dev->led_name;
106 dev->led_cdev.default_trigger =
107 ieee80211_create_tpt_led_trigger(hw,
108 IEEE80211_TPT_LEDTRIG_FL_RADIO,
109 mt76_tpt_blink,
110 ARRAY_SIZE(mt76_tpt_blink));
112 np = of_get_child_by_name(np, "led");
113 if (np) {
114 if (!of_property_read_u32(np, "led-sources", &led_pin))
115 dev->led_pin = led_pin;
116 dev->led_al = of_property_read_bool(np, "led-active-low");
119 return devm_led_classdev_register(dev->dev, &dev->led_cdev);
122 static void mt76_init_stream_cap(struct mt76_dev *dev,
123 struct ieee80211_supported_band *sband,
124 bool vht)
126 struct ieee80211_sta_ht_cap *ht_cap = &sband->ht_cap;
127 int i, nstream = hweight8(dev->antenna_mask);
128 struct ieee80211_sta_vht_cap *vht_cap;
129 u16 mcs_map = 0;
131 if (nstream > 1)
132 ht_cap->cap |= IEEE80211_HT_CAP_TX_STBC;
133 else
134 ht_cap->cap &= ~IEEE80211_HT_CAP_TX_STBC;
136 for (i = 0; i < IEEE80211_HT_MCS_MASK_LEN; i++)
137 ht_cap->mcs.rx_mask[i] = i < nstream ? 0xff : 0;
139 if (!vht)
140 return;
142 vht_cap = &sband->vht_cap;
143 if (nstream > 1)
144 vht_cap->cap |= IEEE80211_VHT_CAP_TXSTBC;
145 else
146 vht_cap->cap &= ~IEEE80211_VHT_CAP_TXSTBC;
148 for (i = 0; i < 8; i++) {
149 if (i < nstream)
150 mcs_map |= (IEEE80211_VHT_MCS_SUPPORT_0_9 << (i * 2));
151 else
152 mcs_map |=
153 (IEEE80211_VHT_MCS_NOT_SUPPORTED << (i * 2));
155 vht_cap->vht_mcs.rx_mcs_map = cpu_to_le16(mcs_map);
156 vht_cap->vht_mcs.tx_mcs_map = cpu_to_le16(mcs_map);
159 void mt76_set_stream_caps(struct mt76_dev *dev, bool vht)
161 if (dev->cap.has_2ghz)
162 mt76_init_stream_cap(dev, &dev->sband_2g.sband, false);
163 if (dev->cap.has_5ghz)
164 mt76_init_stream_cap(dev, &dev->sband_5g.sband, vht);
166 EXPORT_SYMBOL_GPL(mt76_set_stream_caps);
168 static int
169 mt76_init_sband(struct mt76_dev *dev, struct mt76_sband *msband,
170 const struct ieee80211_channel *chan, int n_chan,
171 struct ieee80211_rate *rates, int n_rates, bool vht)
173 struct ieee80211_supported_band *sband = &msband->sband;
174 struct ieee80211_sta_ht_cap *ht_cap;
175 struct ieee80211_sta_vht_cap *vht_cap;
176 void *chanlist;
177 int size;
179 size = n_chan * sizeof(*chan);
180 chanlist = devm_kmemdup(dev->dev, chan, size, GFP_KERNEL);
181 if (!chanlist)
182 return -ENOMEM;
184 msband->chan = devm_kcalloc(dev->dev, n_chan, sizeof(*msband->chan),
185 GFP_KERNEL);
186 if (!msband->chan)
187 return -ENOMEM;
189 sband->channels = chanlist;
190 sband->n_channels = n_chan;
191 sband->bitrates = rates;
192 sband->n_bitrates = n_rates;
193 dev->chandef.chan = &sband->channels[0];
195 ht_cap = &sband->ht_cap;
196 ht_cap->ht_supported = true;
197 ht_cap->cap |= IEEE80211_HT_CAP_SUP_WIDTH_20_40 |
198 IEEE80211_HT_CAP_GRN_FLD |
199 IEEE80211_HT_CAP_SGI_20 |
200 IEEE80211_HT_CAP_SGI_40 |
201 (1 << IEEE80211_HT_CAP_RX_STBC_SHIFT);
203 ht_cap->mcs.tx_params = IEEE80211_HT_MCS_TX_DEFINED;
204 ht_cap->ampdu_factor = IEEE80211_HT_MAX_AMPDU_64K;
205 ht_cap->ampdu_density = IEEE80211_HT_MPDU_DENSITY_4;
207 mt76_init_stream_cap(dev, sband, vht);
209 if (!vht)
210 return 0;
212 vht_cap = &sband->vht_cap;
213 vht_cap->vht_supported = true;
214 vht_cap->cap |= IEEE80211_VHT_CAP_RXLDPC |
215 IEEE80211_VHT_CAP_RXSTBC_1 |
216 IEEE80211_VHT_CAP_SHORT_GI_80 |
217 IEEE80211_VHT_CAP_RX_ANTENNA_PATTERN |
218 IEEE80211_VHT_CAP_TX_ANTENNA_PATTERN |
219 (3 << IEEE80211_VHT_CAP_MAX_A_MPDU_LENGTH_EXPONENT_SHIFT);
221 return 0;
224 static int
225 mt76_init_sband_2g(struct mt76_dev *dev, struct ieee80211_rate *rates,
226 int n_rates)
228 dev->hw->wiphy->bands[NL80211_BAND_2GHZ] = &dev->sband_2g.sband;
230 return mt76_init_sband(dev, &dev->sband_2g,
231 mt76_channels_2ghz,
232 ARRAY_SIZE(mt76_channels_2ghz),
233 rates, n_rates, false);
236 static int
237 mt76_init_sband_5g(struct mt76_dev *dev, struct ieee80211_rate *rates,
238 int n_rates, bool vht)
240 dev->hw->wiphy->bands[NL80211_BAND_5GHZ] = &dev->sband_5g.sband;
242 return mt76_init_sband(dev, &dev->sband_5g,
243 mt76_channels_5ghz,
244 ARRAY_SIZE(mt76_channels_5ghz),
245 rates, n_rates, vht);
248 static void
249 mt76_check_sband(struct mt76_dev *dev, int band)
251 struct ieee80211_supported_band *sband = dev->hw->wiphy->bands[band];
252 bool found = false;
253 int i;
255 if (!sband)
256 return;
258 for (i = 0; i < sband->n_channels; i++) {
259 if (sband->channels[i].flags & IEEE80211_CHAN_DISABLED)
260 continue;
262 found = true;
263 break;
266 if (found)
267 return;
269 sband->n_channels = 0;
270 dev->hw->wiphy->bands[band] = NULL;
273 struct mt76_dev *
274 mt76_alloc_device(struct device *pdev, unsigned int size,
275 const struct ieee80211_ops *ops,
276 const struct mt76_driver_ops *drv_ops)
278 struct ieee80211_hw *hw;
279 struct mt76_dev *dev;
281 hw = ieee80211_alloc_hw(size, ops);
282 if (!hw)
283 return NULL;
285 dev = hw->priv;
286 dev->hw = hw;
287 dev->dev = pdev;
288 dev->drv = drv_ops;
290 spin_lock_init(&dev->rx_lock);
291 spin_lock_init(&dev->lock);
292 spin_lock_init(&dev->cc_lock);
293 mutex_init(&dev->mutex);
294 init_waitqueue_head(&dev->tx_wait);
295 skb_queue_head_init(&dev->status_list);
297 return dev;
299 EXPORT_SYMBOL_GPL(mt76_alloc_device);
301 int mt76_register_device(struct mt76_dev *dev, bool vht,
302 struct ieee80211_rate *rates, int n_rates)
304 struct ieee80211_hw *hw = dev->hw;
305 struct wiphy *wiphy = hw->wiphy;
306 int ret;
308 dev_set_drvdata(dev->dev, dev);
310 INIT_LIST_HEAD(&dev->txwi_cache);
312 SET_IEEE80211_DEV(hw, dev->dev);
313 SET_IEEE80211_PERM_ADDR(hw, dev->macaddr);
315 wiphy->features |= NL80211_FEATURE_ACTIVE_MONITOR;
317 wiphy_ext_feature_set(wiphy, NL80211_EXT_FEATURE_CQM_RSSI_LIST);
319 wiphy->available_antennas_tx = dev->antenna_mask;
320 wiphy->available_antennas_rx = dev->antenna_mask;
322 hw->txq_data_size = sizeof(struct mt76_txq);
323 hw->max_tx_fragments = 16;
325 ieee80211_hw_set(hw, SIGNAL_DBM);
326 ieee80211_hw_set(hw, PS_NULLFUNC_STACK);
327 ieee80211_hw_set(hw, HOST_BROADCAST_PS_BUFFERING);
328 ieee80211_hw_set(hw, AMPDU_AGGREGATION);
329 ieee80211_hw_set(hw, SUPPORTS_RC_TABLE);
330 ieee80211_hw_set(hw, SUPPORT_FAST_XMIT);
331 ieee80211_hw_set(hw, SUPPORTS_CLONED_SKBS);
332 ieee80211_hw_set(hw, SUPPORTS_AMSDU_IN_AMPDU);
333 ieee80211_hw_set(hw, TX_AMSDU);
334 ieee80211_hw_set(hw, TX_FRAG_LIST);
335 ieee80211_hw_set(hw, MFP_CAPABLE);
336 ieee80211_hw_set(hw, AP_LINK_PS);
337 ieee80211_hw_set(hw, REPORTS_TX_ACK_STATUS);
338 ieee80211_hw_set(hw, NEEDS_UNIQUE_STA_ADDR);
340 wiphy->flags |= WIPHY_FLAG_IBSS_RSN;
342 if (dev->cap.has_2ghz) {
343 ret = mt76_init_sband_2g(dev, rates, n_rates);
344 if (ret)
345 return ret;
348 if (dev->cap.has_5ghz) {
349 ret = mt76_init_sband_5g(dev, rates + 4, n_rates - 4, vht);
350 if (ret)
351 return ret;
354 wiphy_read_of_freq_limits(dev->hw->wiphy);
355 mt76_check_sband(dev, NL80211_BAND_2GHZ);
356 mt76_check_sband(dev, NL80211_BAND_5GHZ);
358 if (IS_ENABLED(CONFIG_MT76_LEDS)) {
359 ret = mt76_led_init(dev);
360 if (ret)
361 return ret;
364 return ieee80211_register_hw(hw);
366 EXPORT_SYMBOL_GPL(mt76_register_device);
368 void mt76_unregister_device(struct mt76_dev *dev)
370 struct ieee80211_hw *hw = dev->hw;
372 mt76_tx_status_check(dev, NULL, true);
373 ieee80211_unregister_hw(hw);
375 EXPORT_SYMBOL_GPL(mt76_unregister_device);
377 void mt76_free_device(struct mt76_dev *dev)
379 mt76_tx_free(dev);
380 ieee80211_free_hw(dev->hw);
382 EXPORT_SYMBOL_GPL(mt76_free_device);
384 void mt76_rx(struct mt76_dev *dev, enum mt76_rxq_id q, struct sk_buff *skb)
386 if (!test_bit(MT76_STATE_RUNNING, &dev->state)) {
387 dev_kfree_skb(skb);
388 return;
391 __skb_queue_tail(&dev->rx_skb[q], skb);
393 EXPORT_SYMBOL_GPL(mt76_rx);
395 bool mt76_has_tx_pending(struct mt76_dev *dev)
397 struct mt76_queue *q;
398 int i;
400 for (i = 0; i < ARRAY_SIZE(dev->q_tx); i++) {
401 q = dev->q_tx[i].q;
402 if (q && q->queued)
403 return true;
406 return false;
408 EXPORT_SYMBOL_GPL(mt76_has_tx_pending);
410 void mt76_set_channel(struct mt76_dev *dev)
412 struct ieee80211_hw *hw = dev->hw;
413 struct cfg80211_chan_def *chandef = &hw->conf.chandef;
414 struct mt76_channel_state *state;
415 bool offchannel = hw->conf.flags & IEEE80211_CONF_OFFCHANNEL;
416 int timeout = HZ / 5;
418 if (offchannel)
419 set_bit(MT76_OFFCHANNEL, &dev->state);
420 else
421 clear_bit(MT76_OFFCHANNEL, &dev->state);
423 wait_event_timeout(dev->tx_wait, !mt76_has_tx_pending(dev), timeout);
425 if (dev->drv->update_survey)
426 dev->drv->update_survey(dev);
428 dev->chandef = *chandef;
430 if (!offchannel)
431 dev->main_chan = chandef->chan;
433 if (chandef->chan != dev->main_chan) {
434 state = mt76_channel_state(dev, chandef->chan);
435 memset(state, 0, sizeof(*state));
438 EXPORT_SYMBOL_GPL(mt76_set_channel);
440 int mt76_get_survey(struct ieee80211_hw *hw, int idx,
441 struct survey_info *survey)
443 struct mt76_dev *dev = hw->priv;
444 struct mt76_sband *sband;
445 struct ieee80211_channel *chan;
446 struct mt76_channel_state *state;
447 int ret = 0;
449 if (idx == 0 && dev->drv->update_survey)
450 dev->drv->update_survey(dev);
452 sband = &dev->sband_2g;
453 if (idx >= sband->sband.n_channels) {
454 idx -= sband->sband.n_channels;
455 sband = &dev->sband_5g;
458 if (idx >= sband->sband.n_channels)
459 return -ENOENT;
461 chan = &sband->sband.channels[idx];
462 state = mt76_channel_state(dev, chan);
464 memset(survey, 0, sizeof(*survey));
465 survey->channel = chan;
466 survey->filled = SURVEY_INFO_TIME | SURVEY_INFO_TIME_BUSY;
467 if (chan == dev->main_chan)
468 survey->filled |= SURVEY_INFO_IN_USE;
470 spin_lock_bh(&dev->cc_lock);
471 survey->time = div_u64(state->cc_active, 1000);
472 survey->time_busy = div_u64(state->cc_busy, 1000);
473 spin_unlock_bh(&dev->cc_lock);
475 return ret;
477 EXPORT_SYMBOL_GPL(mt76_get_survey);
479 void mt76_wcid_key_setup(struct mt76_dev *dev, struct mt76_wcid *wcid,
480 struct ieee80211_key_conf *key)
482 struct ieee80211_key_seq seq;
483 int i;
485 wcid->rx_check_pn = false;
487 if (!key)
488 return;
490 if (key->cipher == WLAN_CIPHER_SUITE_CCMP)
491 wcid->rx_check_pn = true;
493 for (i = 0; i < IEEE80211_NUM_TIDS; i++) {
494 ieee80211_get_key_rx_seq(key, i, &seq);
495 memcpy(wcid->rx_key_pn[i], seq.ccmp.pn, sizeof(seq.ccmp.pn));
498 EXPORT_SYMBOL(mt76_wcid_key_setup);
500 struct ieee80211_sta *mt76_rx_convert(struct sk_buff *skb)
502 struct ieee80211_rx_status *status = IEEE80211_SKB_RXCB(skb);
503 struct mt76_rx_status mstat;
505 mstat = *((struct mt76_rx_status *) skb->cb);
506 memset(status, 0, sizeof(*status));
508 status->flag = mstat.flag;
509 status->freq = mstat.freq;
510 status->enc_flags = mstat.enc_flags;
511 status->encoding = mstat.encoding;
512 status->bw = mstat.bw;
513 status->rate_idx = mstat.rate_idx;
514 status->nss = mstat.nss;
515 status->band = mstat.band;
516 status->signal = mstat.signal;
517 status->chains = mstat.chains;
519 BUILD_BUG_ON(sizeof(mstat) > sizeof(skb->cb));
520 BUILD_BUG_ON(sizeof(status->chain_signal) != sizeof(mstat.chain_signal));
521 memcpy(status->chain_signal, mstat.chain_signal, sizeof(mstat.chain_signal));
523 return wcid_to_sta(mstat.wcid);
525 EXPORT_SYMBOL(mt76_rx_convert);
527 static int
528 mt76_check_ccmp_pn(struct sk_buff *skb)
530 struct mt76_rx_status *status = (struct mt76_rx_status *) skb->cb;
531 struct mt76_wcid *wcid = status->wcid;
532 struct ieee80211_hdr *hdr;
533 int ret;
535 if (!(status->flag & RX_FLAG_DECRYPTED))
536 return 0;
538 if (!wcid || !wcid->rx_check_pn)
539 return 0;
541 if (!(status->flag & RX_FLAG_IV_STRIPPED)) {
543 * Validate the first fragment both here and in mac80211
544 * All further fragments will be validated by mac80211 only.
546 hdr = (struct ieee80211_hdr *) skb->data;
547 if (ieee80211_is_frag(hdr) &&
548 !ieee80211_is_first_frag(hdr->frame_control))
549 return 0;
552 BUILD_BUG_ON(sizeof(status->iv) != sizeof(wcid->rx_key_pn[0]));
553 ret = memcmp(status->iv, wcid->rx_key_pn[status->tid],
554 sizeof(status->iv));
555 if (ret <= 0)
556 return -EINVAL; /* replay */
558 memcpy(wcid->rx_key_pn[status->tid], status->iv, sizeof(status->iv));
560 if (status->flag & RX_FLAG_IV_STRIPPED)
561 status->flag |= RX_FLAG_PN_VALIDATED;
563 return 0;
566 static void
567 mt76_check_sta(struct mt76_dev *dev, struct sk_buff *skb)
569 struct mt76_rx_status *status = (struct mt76_rx_status *) skb->cb;
570 struct ieee80211_hdr *hdr = (struct ieee80211_hdr *) skb->data;
571 struct ieee80211_sta *sta;
572 struct mt76_wcid *wcid = status->wcid;
573 bool ps;
574 int i;
576 if (ieee80211_is_pspoll(hdr->frame_control) && !wcid) {
577 sta = ieee80211_find_sta_by_ifaddr(dev->hw, hdr->addr2, NULL);
578 if (sta)
579 wcid = status->wcid = (struct mt76_wcid *) sta->drv_priv;
582 if (!wcid || !wcid->sta)
583 return;
585 sta = container_of((void *) wcid, struct ieee80211_sta, drv_priv);
587 if (status->signal <= 0)
588 ewma_signal_add(&wcid->rssi, -status->signal);
590 wcid->inactive_count = 0;
592 if (!test_bit(MT_WCID_FLAG_CHECK_PS, &wcid->flags))
593 return;
595 if (ieee80211_is_pspoll(hdr->frame_control)) {
596 ieee80211_sta_pspoll(sta);
597 return;
600 if (ieee80211_has_morefrags(hdr->frame_control) ||
601 !(ieee80211_is_mgmt(hdr->frame_control) ||
602 ieee80211_is_data(hdr->frame_control)))
603 return;
605 ps = ieee80211_has_pm(hdr->frame_control);
607 if (ps && (ieee80211_is_data_qos(hdr->frame_control) ||
608 ieee80211_is_qos_nullfunc(hdr->frame_control)))
609 ieee80211_sta_uapsd_trigger(sta, status->tid);
611 if (!!test_bit(MT_WCID_FLAG_PS, &wcid->flags) == ps)
612 return;
614 if (ps)
615 set_bit(MT_WCID_FLAG_PS, &wcid->flags);
616 else
617 clear_bit(MT_WCID_FLAG_PS, &wcid->flags);
619 dev->drv->sta_ps(dev, sta, ps);
620 ieee80211_sta_ps_transition(sta, ps);
622 if (ps)
623 return;
625 for (i = 0; i < ARRAY_SIZE(sta->txq); i++) {
626 struct mt76_txq *mtxq;
628 if (!sta->txq[i])
629 continue;
631 mtxq = (struct mt76_txq *) sta->txq[i]->drv_priv;
632 if (!skb_queue_empty(&mtxq->retry_q))
633 ieee80211_schedule_txq(dev->hw, sta->txq[i]);
637 void mt76_rx_complete(struct mt76_dev *dev, struct sk_buff_head *frames,
638 struct napi_struct *napi)
640 struct ieee80211_sta *sta;
641 struct sk_buff *skb;
643 spin_lock(&dev->rx_lock);
644 while ((skb = __skb_dequeue(frames)) != NULL) {
645 if (mt76_check_ccmp_pn(skb)) {
646 dev_kfree_skb(skb);
647 continue;
650 sta = mt76_rx_convert(skb);
651 ieee80211_rx_napi(dev->hw, sta, skb, napi);
653 spin_unlock(&dev->rx_lock);
656 void mt76_rx_poll_complete(struct mt76_dev *dev, enum mt76_rxq_id q,
657 struct napi_struct *napi)
659 struct sk_buff_head frames;
660 struct sk_buff *skb;
662 __skb_queue_head_init(&frames);
664 while ((skb = __skb_dequeue(&dev->rx_skb[q])) != NULL) {
665 mt76_check_sta(dev, skb);
666 mt76_rx_aggr_reorder(skb, &frames);
669 mt76_rx_complete(dev, &frames, napi);
671 EXPORT_SYMBOL_GPL(mt76_rx_poll_complete);
673 static int
674 mt76_sta_add(struct mt76_dev *dev, struct ieee80211_vif *vif,
675 struct ieee80211_sta *sta)
677 struct mt76_wcid *wcid = (struct mt76_wcid *)sta->drv_priv;
678 int ret;
679 int i;
681 mutex_lock(&dev->mutex);
683 ret = dev->drv->sta_add(dev, vif, sta);
684 if (ret)
685 goto out;
687 for (i = 0; i < ARRAY_SIZE(sta->txq); i++) {
688 struct mt76_txq *mtxq;
690 if (!sta->txq[i])
691 continue;
693 mtxq = (struct mt76_txq *)sta->txq[i]->drv_priv;
694 mtxq->wcid = wcid;
696 mt76_txq_init(dev, sta->txq[i]);
699 ewma_signal_init(&wcid->rssi);
700 rcu_assign_pointer(dev->wcid[wcid->idx], wcid);
702 out:
703 mutex_unlock(&dev->mutex);
705 return ret;
708 void __mt76_sta_remove(struct mt76_dev *dev, struct ieee80211_vif *vif,
709 struct ieee80211_sta *sta)
711 struct mt76_wcid *wcid = (struct mt76_wcid *)sta->drv_priv;
712 int i, idx = wcid->idx;
714 rcu_assign_pointer(dev->wcid[idx], NULL);
715 synchronize_rcu();
717 if (dev->drv->sta_remove)
718 dev->drv->sta_remove(dev, vif, sta);
720 mt76_tx_status_check(dev, wcid, true);
721 for (i = 0; i < ARRAY_SIZE(sta->txq); i++)
722 mt76_txq_remove(dev, sta->txq[i]);
723 mt76_wcid_free(dev->wcid_mask, idx);
725 EXPORT_SYMBOL_GPL(__mt76_sta_remove);
727 static void
728 mt76_sta_remove(struct mt76_dev *dev, struct ieee80211_vif *vif,
729 struct ieee80211_sta *sta)
731 mutex_lock(&dev->mutex);
732 __mt76_sta_remove(dev, vif, sta);
733 mutex_unlock(&dev->mutex);
736 int mt76_sta_state(struct ieee80211_hw *hw, struct ieee80211_vif *vif,
737 struct ieee80211_sta *sta,
738 enum ieee80211_sta_state old_state,
739 enum ieee80211_sta_state new_state)
741 struct mt76_dev *dev = hw->priv;
743 if (old_state == IEEE80211_STA_NOTEXIST &&
744 new_state == IEEE80211_STA_NONE)
745 return mt76_sta_add(dev, vif, sta);
747 if (old_state == IEEE80211_STA_AUTH &&
748 new_state == IEEE80211_STA_ASSOC &&
749 dev->drv->sta_assoc)
750 dev->drv->sta_assoc(dev, vif, sta);
752 if (old_state == IEEE80211_STA_NONE &&
753 new_state == IEEE80211_STA_NOTEXIST)
754 mt76_sta_remove(dev, vif, sta);
756 return 0;
758 EXPORT_SYMBOL_GPL(mt76_sta_state);
760 int mt76_get_txpower(struct ieee80211_hw *hw, struct ieee80211_vif *vif,
761 int *dbm)
763 struct mt76_dev *dev = hw->priv;
764 int n_chains = hweight8(dev->antenna_mask);
766 *dbm = DIV_ROUND_UP(dev->txpower_cur, 2);
768 /* convert from per-chain power to combined
769 * output on 2x2 devices
771 if (n_chains > 1)
772 *dbm += 3;
774 return 0;
776 EXPORT_SYMBOL_GPL(mt76_get_txpower);
778 static void
779 __mt76_csa_finish(void *priv, u8 *mac, struct ieee80211_vif *vif)
781 if (vif->csa_active && ieee80211_csa_is_complete(vif))
782 ieee80211_csa_finish(vif);
785 void mt76_csa_finish(struct mt76_dev *dev)
787 if (!dev->csa_complete)
788 return;
790 ieee80211_iterate_active_interfaces_atomic(dev->hw,
791 IEEE80211_IFACE_ITER_RESUME_ALL,
792 __mt76_csa_finish, dev);
794 dev->csa_complete = 0;
796 EXPORT_SYMBOL_GPL(mt76_csa_finish);
798 static void
799 __mt76_csa_check(void *priv, u8 *mac, struct ieee80211_vif *vif)
801 struct mt76_dev *dev = priv;
803 if (!vif->csa_active)
804 return;
806 dev->csa_complete |= ieee80211_csa_is_complete(vif);
809 void mt76_csa_check(struct mt76_dev *dev)
811 ieee80211_iterate_active_interfaces_atomic(dev->hw,
812 IEEE80211_IFACE_ITER_RESUME_ALL,
813 __mt76_csa_check, dev);
815 EXPORT_SYMBOL_GPL(mt76_csa_check);
818 mt76_set_tim(struct ieee80211_hw *hw, struct ieee80211_sta *sta, bool set)
820 return 0;
822 EXPORT_SYMBOL_GPL(mt76_set_tim);