gpio: rcar: Fix runtime PM imbalance on error
[linux/fpc-iii.git] / drivers / net / wireless / mediatek / mt76 / mt76x0 / phy.c
blob09f34deb6ba1a201e47d1b51d52bc0c6f6062428
1 // SPDX-License-Identifier: GPL-2.0-only
2 /*
3 * (c) Copyright 2002-2010, Ralink Technology, Inc.
4 * Copyright (C) 2014 Felix Fietkau <nbd@openwrt.org>
5 * Copyright (C) 2015 Jakub Kicinski <kubakici@wp.pl>
6 * Copyright (C) 2018 Stanislaw Gruszka <stf_xl@wp.pl>
7 */
9 #include <linux/kernel.h>
10 #include <linux/etherdevice.h>
12 #include "mt76x0.h"
13 #include "mcu.h"
14 #include "eeprom.h"
15 #include "phy.h"
16 #include "initvals.h"
17 #include "initvals_phy.h"
18 #include "../mt76x02_phy.h"
20 static int
21 mt76x0_rf_csr_wr(struct mt76x02_dev *dev, u32 offset, u8 value)
23 int ret = 0;
24 u8 bank, reg;
26 if (test_bit(MT76_REMOVED, &dev->mphy.state))
27 return -ENODEV;
29 bank = MT_RF_BANK(offset);
30 reg = MT_RF_REG(offset);
32 if (WARN_ON_ONCE(reg > 127) || WARN_ON_ONCE(bank > 8))
33 return -EINVAL;
35 mutex_lock(&dev->phy_mutex);
37 if (!mt76_poll(dev, MT_RF_CSR_CFG, MT_RF_CSR_CFG_KICK, 0, 100)) {
38 ret = -ETIMEDOUT;
39 goto out;
42 mt76_wr(dev, MT_RF_CSR_CFG,
43 FIELD_PREP(MT_RF_CSR_CFG_DATA, value) |
44 FIELD_PREP(MT_RF_CSR_CFG_REG_BANK, bank) |
45 FIELD_PREP(MT_RF_CSR_CFG_REG_ID, reg) |
46 MT_RF_CSR_CFG_WR |
47 MT_RF_CSR_CFG_KICK);
49 out:
50 mutex_unlock(&dev->phy_mutex);
52 if (ret < 0)
53 dev_err(dev->mt76.dev, "Error: RF write %d:%d failed:%d!!\n",
54 bank, reg, ret);
56 return ret;
59 static int mt76x0_rf_csr_rr(struct mt76x02_dev *dev, u32 offset)
61 int ret = -ETIMEDOUT;
62 u32 val;
63 u8 bank, reg;
65 if (test_bit(MT76_REMOVED, &dev->mphy.state))
66 return -ENODEV;
68 bank = MT_RF_BANK(offset);
69 reg = MT_RF_REG(offset);
71 if (WARN_ON_ONCE(reg > 127) || WARN_ON_ONCE(bank > 8))
72 return -EINVAL;
74 mutex_lock(&dev->phy_mutex);
76 if (!mt76_poll(dev, MT_RF_CSR_CFG, MT_RF_CSR_CFG_KICK, 0, 100))
77 goto out;
79 mt76_wr(dev, MT_RF_CSR_CFG,
80 FIELD_PREP(MT_RF_CSR_CFG_REG_BANK, bank) |
81 FIELD_PREP(MT_RF_CSR_CFG_REG_ID, reg) |
82 MT_RF_CSR_CFG_KICK);
84 if (!mt76_poll(dev, MT_RF_CSR_CFG, MT_RF_CSR_CFG_KICK, 0, 100))
85 goto out;
87 val = mt76_rr(dev, MT_RF_CSR_CFG);
88 if (FIELD_GET(MT_RF_CSR_CFG_REG_ID, val) == reg &&
89 FIELD_GET(MT_RF_CSR_CFG_REG_BANK, val) == bank)
90 ret = FIELD_GET(MT_RF_CSR_CFG_DATA, val);
92 out:
93 mutex_unlock(&dev->phy_mutex);
95 if (ret < 0)
96 dev_err(dev->mt76.dev, "Error: RF read %d:%d failed:%d!!\n",
97 bank, reg, ret);
99 return ret;
102 static int
103 mt76x0_rf_wr(struct mt76x02_dev *dev, u32 offset, u8 val)
105 if (mt76_is_usb(&dev->mt76)) {
106 struct mt76_reg_pair pair = {
107 .reg = offset,
108 .value = val,
111 WARN_ON_ONCE(!test_bit(MT76_STATE_MCU_RUNNING,
112 &dev->mphy.state));
113 return mt76_wr_rp(dev, MT_MCU_MEMMAP_RF, &pair, 1);
114 } else {
115 return mt76x0_rf_csr_wr(dev, offset, val);
119 static int mt76x0_rf_rr(struct mt76x02_dev *dev, u32 offset)
121 int ret;
122 u32 val;
124 if (mt76_is_usb(&dev->mt76)) {
125 struct mt76_reg_pair pair = {
126 .reg = offset,
129 WARN_ON_ONCE(!test_bit(MT76_STATE_MCU_RUNNING,
130 &dev->mphy.state));
131 ret = mt76_rd_rp(dev, MT_MCU_MEMMAP_RF, &pair, 1);
132 val = pair.value;
133 } else {
134 ret = val = mt76x0_rf_csr_rr(dev, offset);
137 return (ret < 0) ? ret : val;
140 static int
141 mt76x0_rf_rmw(struct mt76x02_dev *dev, u32 offset, u8 mask, u8 val)
143 int ret;
145 ret = mt76x0_rf_rr(dev, offset);
146 if (ret < 0)
147 return ret;
149 val |= ret & ~mask;
151 ret = mt76x0_rf_wr(dev, offset, val);
152 return ret ? ret : val;
155 static int
156 mt76x0_rf_set(struct mt76x02_dev *dev, u32 offset, u8 val)
158 return mt76x0_rf_rmw(dev, offset, 0, val);
161 static int
162 mt76x0_rf_clear(struct mt76x02_dev *dev, u32 offset, u8 mask)
164 return mt76x0_rf_rmw(dev, offset, mask, 0);
167 static void
168 mt76x0_phy_rf_csr_wr_rp(struct mt76x02_dev *dev,
169 const struct mt76_reg_pair *data,
170 int n)
172 while (n-- > 0) {
173 mt76x0_rf_csr_wr(dev, data->reg, data->value);
174 data++;
178 #define RF_RANDOM_WRITE(dev, tab) do { \
179 if (mt76_is_mmio(&dev->mt76)) \
180 mt76x0_phy_rf_csr_wr_rp(dev, tab, ARRAY_SIZE(tab)); \
181 else \
182 mt76_wr_rp(dev, MT_MCU_MEMMAP_RF, tab, ARRAY_SIZE(tab));\
183 } while (0)
185 int mt76x0_phy_wait_bbp_ready(struct mt76x02_dev *dev)
187 int i = 20;
188 u32 val;
190 do {
191 val = mt76_rr(dev, MT_BBP(CORE, 0));
192 if (val && ~val)
193 break;
194 } while (--i);
196 if (!i) {
197 dev_err(dev->mt76.dev, "Error: BBP is not ready\n");
198 return -EIO;
201 dev_dbg(dev->mt76.dev, "BBP version %08x\n", val);
202 return 0;
205 static void
206 mt76x0_phy_set_band(struct mt76x02_dev *dev, enum nl80211_band band)
208 switch (band) {
209 case NL80211_BAND_2GHZ:
210 RF_RANDOM_WRITE(dev, mt76x0_rf_2g_channel_0_tab);
212 mt76x0_rf_wr(dev, MT_RF(5, 0), 0x45);
213 mt76x0_rf_wr(dev, MT_RF(6, 0), 0x44);
215 mt76_wr(dev, MT_TX_ALC_VGA3, 0x00050007);
216 mt76_wr(dev, MT_TX0_RF_GAIN_CORR, 0x003E0002);
217 break;
218 case NL80211_BAND_5GHZ:
219 RF_RANDOM_WRITE(dev, mt76x0_rf_5g_channel_0_tab);
221 mt76x0_rf_wr(dev, MT_RF(5, 0), 0x44);
222 mt76x0_rf_wr(dev, MT_RF(6, 0), 0x45);
224 mt76_wr(dev, MT_TX_ALC_VGA3, 0x00000005);
225 mt76_wr(dev, MT_TX0_RF_GAIN_CORR, 0x01010102);
226 break;
227 default:
228 break;
232 static void
233 mt76x0_phy_set_chan_rf_params(struct mt76x02_dev *dev, u8 channel,
234 u16 rf_bw_band)
236 const struct mt76x0_freq_item *freq_item;
237 u16 rf_band = rf_bw_band & 0xff00;
238 u16 rf_bw = rf_bw_band & 0x00ff;
239 enum nl80211_band band;
240 bool b_sdm = false;
241 u32 mac_reg;
242 int i;
244 for (i = 0; i < ARRAY_SIZE(mt76x0_sdm_channel); i++) {
245 if (channel == mt76x0_sdm_channel[i]) {
246 b_sdm = true;
247 break;
251 for (i = 0; i < ARRAY_SIZE(mt76x0_frequency_plan); i++) {
252 if (channel == mt76x0_frequency_plan[i].channel) {
253 rf_band = mt76x0_frequency_plan[i].band;
255 if (b_sdm)
256 freq_item = &mt76x0_sdm_frequency_plan[i];
257 else
258 freq_item = &mt76x0_frequency_plan[i];
260 mt76x0_rf_wr(dev, MT_RF(0, 37), freq_item->pllR37);
261 mt76x0_rf_wr(dev, MT_RF(0, 36), freq_item->pllR36);
262 mt76x0_rf_wr(dev, MT_RF(0, 35), freq_item->pllR35);
263 mt76x0_rf_wr(dev, MT_RF(0, 34), freq_item->pllR34);
264 mt76x0_rf_wr(dev, MT_RF(0, 33), freq_item->pllR33);
266 mt76x0_rf_rmw(dev, MT_RF(0, 32), 0xe0,
267 freq_item->pllR32_b7b5);
269 /* R32<4:0> pll_den: (Denomina - 8) */
270 mt76x0_rf_rmw(dev, MT_RF(0, 32), MT_RF_PLL_DEN_MASK,
271 freq_item->pllR32_b4b0);
273 /* R31<7:5> */
274 mt76x0_rf_rmw(dev, MT_RF(0, 31), 0xe0,
275 freq_item->pllR31_b7b5);
277 /* R31<4:0> pll_k(Nominator) */
278 mt76x0_rf_rmw(dev, MT_RF(0, 31), MT_RF_PLL_K_MASK,
279 freq_item->pllR31_b4b0);
281 /* R30<7> sdm_reset_n */
282 if (b_sdm) {
283 mt76x0_rf_clear(dev, MT_RF(0, 30),
284 MT_RF_SDM_RESET_MASK);
285 mt76x0_rf_set(dev, MT_RF(0, 30),
286 MT_RF_SDM_RESET_MASK);
287 } else {
288 mt76x0_rf_rmw(dev, MT_RF(0, 30),
289 MT_RF_SDM_RESET_MASK,
290 freq_item->pllR30_b7);
293 /* R30<6:2> sdmmash_prbs,sin */
294 mt76x0_rf_rmw(dev, MT_RF(0, 30),
295 MT_RF_SDM_MASH_PRBS_MASK,
296 freq_item->pllR30_b6b2);
298 /* R30<1> sdm_bp */
299 mt76x0_rf_rmw(dev, MT_RF(0, 30), MT_RF_SDM_BP_MASK,
300 freq_item->pllR30_b1 << 1);
302 /* R30<0> R29<7:0> (hex) pll_n */
303 mt76x0_rf_wr(dev, MT_RF(0, 29),
304 freq_item->pll_n & 0xff);
306 mt76x0_rf_rmw(dev, MT_RF(0, 30), 0x1,
307 (freq_item->pll_n >> 8) & 0x1);
309 /* R28<7:6> isi_iso */
310 mt76x0_rf_rmw(dev, MT_RF(0, 28), MT_RF_ISI_ISO_MASK,
311 freq_item->pllR28_b7b6);
313 /* R28<5:4> pfd_dly */
314 mt76x0_rf_rmw(dev, MT_RF(0, 28), MT_RF_PFD_DLY_MASK,
315 freq_item->pllR28_b5b4);
317 /* R28<3:2> clksel option */
318 mt76x0_rf_rmw(dev, MT_RF(0, 28), MT_RF_CLK_SEL_MASK,
319 freq_item->pllR28_b3b2);
321 /* R28<1:0> R27<7:0> R26<7:0> (hex) sdm_k */
322 mt76x0_rf_wr(dev, MT_RF(0, 26),
323 freq_item->pll_sdm_k & 0xff);
324 mt76x0_rf_wr(dev, MT_RF(0, 27),
325 (freq_item->pll_sdm_k >> 8) & 0xff);
327 mt76x0_rf_rmw(dev, MT_RF(0, 28), 0x3,
328 (freq_item->pll_sdm_k >> 16) & 0x3);
330 /* R24<1:0> xo_div */
331 mt76x0_rf_rmw(dev, MT_RF(0, 24), MT_RF_XO_DIV_MASK,
332 freq_item->pllR24_b1b0);
334 break;
338 for (i = 0; i < ARRAY_SIZE(mt76x0_rf_bw_switch_tab); i++) {
339 if (rf_bw == mt76x0_rf_bw_switch_tab[i].bw_band) {
340 mt76x0_rf_wr(dev,
341 mt76x0_rf_bw_switch_tab[i].rf_bank_reg,
342 mt76x0_rf_bw_switch_tab[i].value);
343 } else if ((rf_bw == (mt76x0_rf_bw_switch_tab[i].bw_band & 0xFF)) &&
344 (rf_band & mt76x0_rf_bw_switch_tab[i].bw_band)) {
345 mt76x0_rf_wr(dev,
346 mt76x0_rf_bw_switch_tab[i].rf_bank_reg,
347 mt76x0_rf_bw_switch_tab[i].value);
351 for (i = 0; i < ARRAY_SIZE(mt76x0_rf_band_switch_tab); i++) {
352 if (mt76x0_rf_band_switch_tab[i].bw_band & rf_band) {
353 mt76x0_rf_wr(dev,
354 mt76x0_rf_band_switch_tab[i].rf_bank_reg,
355 mt76x0_rf_band_switch_tab[i].value);
359 mt76_clear(dev, MT_RF_MISC, 0xc);
361 band = (rf_band & RF_G_BAND) ? NL80211_BAND_2GHZ : NL80211_BAND_5GHZ;
362 if (mt76x02_ext_pa_enabled(dev, band)) {
363 /* MT_RF_MISC (offset: 0x0518)
364 * [2]1'b1: enable external A band PA
365 * 1'b0: disable external A band PA
366 * [3]1'b1: enable external G band PA
367 * 1'b0: disable external G band PA
369 if (rf_band & RF_A_BAND)
370 mt76_set(dev, MT_RF_MISC, BIT(2));
371 else
372 mt76_set(dev, MT_RF_MISC, BIT(3));
374 /* External PA */
375 for (i = 0; i < ARRAY_SIZE(mt76x0_rf_ext_pa_tab); i++)
376 if (mt76x0_rf_ext_pa_tab[i].bw_band & rf_band)
377 mt76x0_rf_wr(dev,
378 mt76x0_rf_ext_pa_tab[i].rf_bank_reg,
379 mt76x0_rf_ext_pa_tab[i].value);
382 if (rf_band & RF_G_BAND) {
383 mt76_wr(dev, MT_TX0_RF_GAIN_ATTEN, 0x63707400);
384 /* Set Atten mode = 2 For G band, Disable Tx Inc dcoc. */
385 mac_reg = mt76_rr(dev, MT_TX_ALC_CFG_1);
386 mac_reg &= 0x896400FF;
387 mt76_wr(dev, MT_TX_ALC_CFG_1, mac_reg);
388 } else {
389 mt76_wr(dev, MT_TX0_RF_GAIN_ATTEN, 0x686A7800);
390 /* Set Atten mode = 0
391 * For Ext A band, Disable Tx Inc dcoc Cal.
393 mac_reg = mt76_rr(dev, MT_TX_ALC_CFG_1);
394 mac_reg &= 0x890400FF;
395 mt76_wr(dev, MT_TX_ALC_CFG_1, mac_reg);
399 static void
400 mt76x0_phy_set_chan_bbp_params(struct mt76x02_dev *dev, u16 rf_bw_band)
402 int i;
404 for (i = 0; i < ARRAY_SIZE(mt76x0_bbp_switch_tab); i++) {
405 const struct mt76x0_bbp_switch_item *item = &mt76x0_bbp_switch_tab[i];
406 const struct mt76_reg_pair *pair = &item->reg_pair;
408 if ((rf_bw_band & item->bw_band) != rf_bw_band)
409 continue;
411 if (pair->reg == MT_BBP(AGC, 8)) {
412 u32 val = pair->value;
413 u8 gain;
415 gain = FIELD_GET(MT_BBP_AGC_GAIN, val);
416 gain -= dev->cal.rx.lna_gain * 2;
417 val &= ~MT_BBP_AGC_GAIN;
418 val |= FIELD_PREP(MT_BBP_AGC_GAIN, gain);
419 mt76_wr(dev, pair->reg, val);
420 } else {
421 mt76_wr(dev, pair->reg, pair->value);
426 static void mt76x0_phy_ant_select(struct mt76x02_dev *dev)
428 u16 ee_ant = mt76x02_eeprom_get(dev, MT_EE_ANTENNA);
429 u16 ee_cfg1 = mt76x02_eeprom_get(dev, MT_EE_CFG1_INIT);
430 u16 nic_conf2 = mt76x02_eeprom_get(dev, MT_EE_NIC_CONF_2);
431 u32 wlan, coex3;
432 bool ant_div;
434 wlan = mt76_rr(dev, MT_WLAN_FUN_CTRL);
435 coex3 = mt76_rr(dev, MT_COEXCFG3);
437 ee_ant &= ~(BIT(14) | BIT(12));
438 wlan &= ~(BIT(6) | BIT(5));
439 coex3 &= ~GENMASK(5, 2);
441 if (ee_ant & MT_EE_ANTENNA_DUAL) {
442 /* dual antenna mode */
443 ant_div = !(nic_conf2 & MT_EE_NIC_CONF_2_ANT_OPT) &&
444 (nic_conf2 & MT_EE_NIC_CONF_2_ANT_DIV);
445 if (ant_div)
446 ee_ant |= BIT(12);
447 else
448 coex3 |= BIT(4);
449 coex3 |= BIT(3);
450 if (dev->mt76.cap.has_2ghz)
451 wlan |= BIT(6);
452 } else {
453 /* sigle antenna mode */
454 if (dev->mt76.cap.has_5ghz) {
455 coex3 |= BIT(3) | BIT(4);
456 } else {
457 wlan |= BIT(6);
458 coex3 |= BIT(1);
462 if (is_mt7630(dev))
463 ee_ant |= BIT(14) | BIT(11);
465 mt76_wr(dev, MT_WLAN_FUN_CTRL, wlan);
466 mt76_rmw(dev, MT_CMB_CTRL, GENMASK(15, 0), ee_ant);
467 mt76_rmw(dev, MT_CSR_EE_CFG1, GENMASK(15, 0), ee_cfg1);
468 mt76_clear(dev, MT_COEXCFG0, BIT(2));
469 mt76_wr(dev, MT_COEXCFG3, coex3);
472 static void
473 mt76x0_phy_bbp_set_bw(struct mt76x02_dev *dev, enum nl80211_chan_width width)
475 enum { BW_20 = 0, BW_40 = 1, BW_80 = 2, BW_10 = 4};
476 int bw;
478 switch (width) {
479 default:
480 case NL80211_CHAN_WIDTH_20_NOHT:
481 case NL80211_CHAN_WIDTH_20:
482 bw = BW_20;
483 break;
484 case NL80211_CHAN_WIDTH_40:
485 bw = BW_40;
486 break;
487 case NL80211_CHAN_WIDTH_80:
488 bw = BW_80;
489 break;
490 case NL80211_CHAN_WIDTH_10:
491 bw = BW_10;
492 break;
493 case NL80211_CHAN_WIDTH_80P80:
494 case NL80211_CHAN_WIDTH_160:
495 case NL80211_CHAN_WIDTH_5:
496 /* TODO error */
497 return;
500 mt76x02_mcu_function_select(dev, BW_SETTING, bw);
503 static void mt76x0_phy_tssi_dc_calibrate(struct mt76x02_dev *dev)
505 struct ieee80211_channel *chan = dev->mphy.chandef.chan;
506 u32 val;
508 if (chan->band == NL80211_BAND_5GHZ)
509 mt76x0_rf_clear(dev, MT_RF(0, 67), 0xf);
511 /* bypass ADDA control */
512 mt76_wr(dev, MT_RF_SETTING_0, 0x60002237);
513 mt76_wr(dev, MT_RF_BYPASS_0, 0xffffffff);
515 /* bbp sw reset */
516 mt76_set(dev, MT_BBP(CORE, 4), BIT(0));
517 usleep_range(500, 1000);
518 mt76_clear(dev, MT_BBP(CORE, 4), BIT(0));
520 val = (chan->band == NL80211_BAND_5GHZ) ? 0x80055 : 0x80050;
521 mt76_wr(dev, MT_BBP(CORE, 34), val);
523 /* enable TX with DAC0 input */
524 mt76_wr(dev, MT_BBP(TXBE, 6), BIT(31));
526 mt76_poll_msec(dev, MT_BBP(CORE, 34), BIT(4), 0, 200);
527 dev->cal.tssi_dc = mt76_rr(dev, MT_BBP(CORE, 35)) & 0xff;
529 /* stop bypass ADDA */
530 mt76_wr(dev, MT_RF_BYPASS_0, 0);
531 /* stop TX */
532 mt76_wr(dev, MT_BBP(TXBE, 6), 0);
533 /* bbp sw reset */
534 mt76_set(dev, MT_BBP(CORE, 4), BIT(0));
535 usleep_range(500, 1000);
536 mt76_clear(dev, MT_BBP(CORE, 4), BIT(0));
538 if (chan->band == NL80211_BAND_5GHZ)
539 mt76x0_rf_rmw(dev, MT_RF(0, 67), 0xf, 0x4);
542 static int
543 mt76x0_phy_tssi_adc_calibrate(struct mt76x02_dev *dev, s16 *ltssi,
544 u8 *info)
546 struct ieee80211_channel *chan = dev->mphy.chandef.chan;
547 u32 val;
549 val = (chan->band == NL80211_BAND_5GHZ) ? 0x80055 : 0x80050;
550 mt76_wr(dev, MT_BBP(CORE, 34), val);
552 if (!mt76_poll_msec(dev, MT_BBP(CORE, 34), BIT(4), 0, 200)) {
553 mt76_clear(dev, MT_BBP(CORE, 34), BIT(4));
554 return -ETIMEDOUT;
557 *ltssi = mt76_rr(dev, MT_BBP(CORE, 35)) & 0xff;
558 if (chan->band == NL80211_BAND_5GHZ)
559 *ltssi += 128;
561 /* set packet info#1 mode */
562 mt76_wr(dev, MT_BBP(CORE, 34), 0x80041);
563 info[0] = mt76_rr(dev, MT_BBP(CORE, 35)) & 0xff;
565 /* set packet info#2 mode */
566 mt76_wr(dev, MT_BBP(CORE, 34), 0x80042);
567 info[1] = mt76_rr(dev, MT_BBP(CORE, 35)) & 0xff;
569 /* set packet info#3 mode */
570 mt76_wr(dev, MT_BBP(CORE, 34), 0x80043);
571 info[2] = mt76_rr(dev, MT_BBP(CORE, 35)) & 0xff;
573 return 0;
576 static u8 mt76x0_phy_get_rf_pa_mode(struct mt76x02_dev *dev,
577 int index, u8 tx_rate)
579 u32 val, reg;
581 reg = (index == 1) ? MT_RF_PA_MODE_CFG1 : MT_RF_PA_MODE_CFG0;
582 val = mt76_rr(dev, reg);
583 return (val & (3 << (tx_rate * 2))) >> (tx_rate * 2);
586 static int
587 mt76x0_phy_get_target_power(struct mt76x02_dev *dev, u8 tx_mode,
588 u8 *info, s8 *target_power,
589 s8 *target_pa_power)
591 u8 tx_rate, cur_power;
593 cur_power = mt76_rr(dev, MT_TX_ALC_CFG_0) & MT_TX_ALC_CFG_0_CH_INIT_0;
594 switch (tx_mode) {
595 case 0:
596 /* cck rates */
597 tx_rate = (info[0] & 0x60) >> 5;
598 if (tx_rate > 3)
599 return -EINVAL;
601 *target_power = cur_power + dev->mt76.rate_power.cck[tx_rate];
602 *target_pa_power = mt76x0_phy_get_rf_pa_mode(dev, 0, tx_rate);
603 break;
604 case 1: {
605 u8 index;
607 /* ofdm rates */
608 tx_rate = (info[0] & 0xf0) >> 4;
609 switch (tx_rate) {
610 case 0xb:
611 index = 0;
612 break;
613 case 0xf:
614 index = 1;
615 break;
616 case 0xa:
617 index = 2;
618 break;
619 case 0xe:
620 index = 3;
621 break;
622 case 0x9:
623 index = 4;
624 break;
625 case 0xd:
626 index = 5;
627 break;
628 case 0x8:
629 index = 6;
630 break;
631 case 0xc:
632 index = 7;
633 break;
634 default:
635 return -EINVAL;
638 *target_power = cur_power + dev->mt76.rate_power.ofdm[index];
639 *target_pa_power = mt76x0_phy_get_rf_pa_mode(dev, 0, index + 4);
640 break;
642 case 4:
643 /* vht rates */
644 tx_rate = info[1] & 0xf;
645 if (tx_rate > 9)
646 return -EINVAL;
648 *target_power = cur_power + dev->mt76.rate_power.vht[tx_rate];
649 *target_pa_power = mt76x0_phy_get_rf_pa_mode(dev, 1, tx_rate);
650 break;
651 default:
652 /* ht rates */
653 tx_rate = info[1] & 0x7f;
654 if (tx_rate > 9)
655 return -EINVAL;
657 *target_power = cur_power + dev->mt76.rate_power.ht[tx_rate];
658 *target_pa_power = mt76x0_phy_get_rf_pa_mode(dev, 1, tx_rate);
659 break;
662 return 0;
665 static s16 mt76x0_phy_lin2db(u16 val)
667 u32 mantissa = val << 4;
668 int ret, data;
669 s16 exp = -4;
671 while (mantissa < BIT(15)) {
672 mantissa <<= 1;
673 if (--exp < -20)
674 return -10000;
676 while (mantissa > 0xffff) {
677 mantissa >>= 1;
678 if (++exp > 20)
679 return -10000;
682 /* s(15,0) */
683 if (mantissa <= 47104)
684 data = mantissa + (mantissa >> 3) + (mantissa >> 4) - 38400;
685 else
686 data = mantissa - (mantissa >> 3) - (mantissa >> 6) - 23040;
687 data = max_t(int, 0, data);
689 ret = ((15 + exp) << 15) + data;
690 ret = (ret << 2) + (ret << 1) + (ret >> 6) + (ret >> 7);
691 return ret >> 10;
694 static int
695 mt76x0_phy_get_delta_power(struct mt76x02_dev *dev, u8 tx_mode,
696 s8 target_power, s8 target_pa_power,
697 s16 ltssi)
699 struct ieee80211_channel *chan = dev->mphy.chandef.chan;
700 int tssi_target = target_power << 12, tssi_slope;
701 int tssi_offset, tssi_db, ret;
702 u32 data;
703 u16 val;
705 if (chan->band == NL80211_BAND_5GHZ) {
706 u8 bound[7];
707 int i, err;
709 err = mt76x02_eeprom_copy(dev, MT_EE_TSSI_BOUND1, bound,
710 sizeof(bound));
711 if (err < 0)
712 return err;
714 for (i = 0; i < ARRAY_SIZE(bound); i++) {
715 if (chan->hw_value <= bound[i] || !bound[i])
716 break;
718 val = mt76x02_eeprom_get(dev, MT_EE_TSSI_SLOPE_5G + i * 2);
720 tssi_offset = val >> 8;
721 if ((tssi_offset >= 64 && tssi_offset <= 127) ||
722 (tssi_offset & BIT(7)))
723 tssi_offset -= BIT(8);
724 } else {
725 val = mt76x02_eeprom_get(dev, MT_EE_TSSI_SLOPE_2G);
727 tssi_offset = val >> 8;
728 if (tssi_offset & BIT(7))
729 tssi_offset -= BIT(8);
731 tssi_slope = val & 0xff;
733 switch (target_pa_power) {
734 case 1:
735 if (chan->band == NL80211_BAND_2GHZ)
736 tssi_target += 29491; /* 3.6 * 8192 */
737 /* fall through */
738 case 0:
739 break;
740 default:
741 tssi_target += 4424; /* 0.54 * 8192 */
742 break;
745 if (!tx_mode) {
746 data = mt76_rr(dev, MT_BBP(CORE, 1));
747 if (is_mt7630(dev) && mt76_is_mmio(&dev->mt76)) {
748 int offset;
750 /* 2.3 * 8192 or 1.5 * 8192 */
751 offset = (data & BIT(5)) ? 18841 : 12288;
752 tssi_target += offset;
753 } else if (data & BIT(5)) {
754 /* 0.8 * 8192 */
755 tssi_target += 6554;
759 data = mt76_rr(dev, MT_BBP(TXBE, 4));
760 switch (data & 0x3) {
761 case 1:
762 tssi_target -= 49152; /* -6db * 8192 */
763 break;
764 case 2:
765 tssi_target -= 98304; /* -12db * 8192 */
766 break;
767 case 3:
768 tssi_target += 49152; /* 6db * 8192 */
769 break;
770 default:
771 break;
774 tssi_db = mt76x0_phy_lin2db(ltssi - dev->cal.tssi_dc) * tssi_slope;
775 if (chan->band == NL80211_BAND_5GHZ) {
776 tssi_db += ((tssi_offset - 50) << 10); /* offset s4.3 */
777 tssi_target -= tssi_db;
778 if (ltssi > 254 && tssi_target > 0) {
779 /* upper saturate */
780 tssi_target = 0;
782 } else {
783 tssi_db += (tssi_offset << 9); /* offset s3.4 */
784 tssi_target -= tssi_db;
785 /* upper-lower saturate */
786 if ((ltssi > 126 && tssi_target > 0) ||
787 ((ltssi - dev->cal.tssi_dc) < 1 && tssi_target < 0)) {
788 tssi_target = 0;
792 if ((dev->cal.tssi_target ^ tssi_target) < 0 &&
793 dev->cal.tssi_target > -4096 && dev->cal.tssi_target < 4096 &&
794 tssi_target > -4096 && tssi_target < 4096) {
795 if ((tssi_target < 0 &&
796 tssi_target + dev->cal.tssi_target > 0) ||
797 (tssi_target > 0 &&
798 tssi_target + dev->cal.tssi_target <= 0))
799 tssi_target = 0;
800 else
801 dev->cal.tssi_target = tssi_target;
802 } else {
803 dev->cal.tssi_target = tssi_target;
806 /* make the compensate value to the nearest compensate code */
807 if (tssi_target > 0)
808 tssi_target += 2048;
809 else
810 tssi_target -= 2048;
811 tssi_target >>= 12;
813 ret = mt76_get_field(dev, MT_TX_ALC_CFG_1, MT_TX_ALC_CFG_1_TEMP_COMP);
814 if (ret & BIT(5))
815 ret -= BIT(6);
816 ret += tssi_target;
818 ret = min_t(int, 31, ret);
819 return max_t(int, -32, ret);
822 static void mt76x0_phy_tssi_calibrate(struct mt76x02_dev *dev)
824 s8 target_power, target_pa_power;
825 u8 tssi_info[3], tx_mode;
826 s16 ltssi;
827 s8 val;
829 if (mt76x0_phy_tssi_adc_calibrate(dev, &ltssi, tssi_info) < 0)
830 return;
832 tx_mode = tssi_info[0] & 0x7;
833 if (mt76x0_phy_get_target_power(dev, tx_mode, tssi_info,
834 &target_power, &target_pa_power) < 0)
835 return;
837 val = mt76x0_phy_get_delta_power(dev, tx_mode, target_power,
838 target_pa_power, ltssi);
839 mt76_rmw_field(dev, MT_TX_ALC_CFG_1, MT_TX_ALC_CFG_1_TEMP_COMP, val);
842 void mt76x0_phy_set_txpower(struct mt76x02_dev *dev)
844 struct mt76_rate_power *t = &dev->mt76.rate_power;
845 s8 info;
847 mt76x0_get_tx_power_per_rate(dev, dev->mphy.chandef.chan, t);
848 mt76x0_get_power_info(dev, dev->mphy.chandef.chan, &info);
850 mt76x02_add_rate_power_offset(t, info);
851 mt76x02_limit_rate_power(t, dev->txpower_conf);
852 dev->mphy.txpower_cur = mt76x02_get_max_rate_power(t);
853 mt76x02_add_rate_power_offset(t, -info);
855 dev->target_power = info;
856 mt76x02_phy_set_txpower(dev, info, info);
859 void mt76x0_phy_calibrate(struct mt76x02_dev *dev, bool power_on)
861 struct ieee80211_channel *chan = dev->mphy.chandef.chan;
862 int is_5ghz = (chan->band == NL80211_BAND_5GHZ) ? 1 : 0;
863 u32 val, tx_alc, reg_val;
865 if (is_mt7630(dev))
866 return;
868 if (power_on) {
869 mt76x02_mcu_calibrate(dev, MCU_CAL_R, 0);
870 mt76x02_mcu_calibrate(dev, MCU_CAL_VCO, chan->hw_value);
871 usleep_range(10, 20);
873 if (mt76x0_tssi_enabled(dev)) {
874 mt76_wr(dev, MT_MAC_SYS_CTRL,
875 MT_MAC_SYS_CTRL_ENABLE_RX);
876 mt76x0_phy_tssi_dc_calibrate(dev);
877 mt76_wr(dev, MT_MAC_SYS_CTRL,
878 MT_MAC_SYS_CTRL_ENABLE_TX |
879 MT_MAC_SYS_CTRL_ENABLE_RX);
883 tx_alc = mt76_rr(dev, MT_TX_ALC_CFG_0);
884 mt76_wr(dev, MT_TX_ALC_CFG_0, 0);
885 usleep_range(500, 700);
887 reg_val = mt76_rr(dev, MT_BBP(IBI, 9));
888 mt76_wr(dev, MT_BBP(IBI, 9), 0xffffff7e);
890 if (is_5ghz) {
891 if (chan->hw_value < 100)
892 val = 0x701;
893 else if (chan->hw_value < 140)
894 val = 0x801;
895 else
896 val = 0x901;
897 } else {
898 val = 0x600;
901 mt76x02_mcu_calibrate(dev, MCU_CAL_FULL, val);
902 mt76x02_mcu_calibrate(dev, MCU_CAL_LC, is_5ghz);
903 usleep_range(15000, 20000);
905 mt76_wr(dev, MT_BBP(IBI, 9), reg_val);
906 mt76_wr(dev, MT_TX_ALC_CFG_0, tx_alc);
907 mt76x02_mcu_calibrate(dev, MCU_CAL_RXDCOC, 1);
909 EXPORT_SYMBOL_GPL(mt76x0_phy_calibrate);
911 void mt76x0_phy_set_channel(struct mt76x02_dev *dev,
912 struct cfg80211_chan_def *chandef)
914 u32 ext_cca_chan[4] = {
915 [0] = FIELD_PREP(MT_EXT_CCA_CFG_CCA0, 0) |
916 FIELD_PREP(MT_EXT_CCA_CFG_CCA1, 1) |
917 FIELD_PREP(MT_EXT_CCA_CFG_CCA2, 2) |
918 FIELD_PREP(MT_EXT_CCA_CFG_CCA3, 3) |
919 FIELD_PREP(MT_EXT_CCA_CFG_CCA_MASK, BIT(0)),
920 [1] = FIELD_PREP(MT_EXT_CCA_CFG_CCA0, 1) |
921 FIELD_PREP(MT_EXT_CCA_CFG_CCA1, 0) |
922 FIELD_PREP(MT_EXT_CCA_CFG_CCA2, 2) |
923 FIELD_PREP(MT_EXT_CCA_CFG_CCA3, 3) |
924 FIELD_PREP(MT_EXT_CCA_CFG_CCA_MASK, BIT(1)),
925 [2] = FIELD_PREP(MT_EXT_CCA_CFG_CCA0, 2) |
926 FIELD_PREP(MT_EXT_CCA_CFG_CCA1, 3) |
927 FIELD_PREP(MT_EXT_CCA_CFG_CCA2, 1) |
928 FIELD_PREP(MT_EXT_CCA_CFG_CCA3, 0) |
929 FIELD_PREP(MT_EXT_CCA_CFG_CCA_MASK, BIT(2)),
930 [3] = FIELD_PREP(MT_EXT_CCA_CFG_CCA0, 3) |
931 FIELD_PREP(MT_EXT_CCA_CFG_CCA1, 2) |
932 FIELD_PREP(MT_EXT_CCA_CFG_CCA2, 1) |
933 FIELD_PREP(MT_EXT_CCA_CFG_CCA3, 0) |
934 FIELD_PREP(MT_EXT_CCA_CFG_CCA_MASK, BIT(3)),
936 bool scan = test_bit(MT76_SCANNING, &dev->mphy.state);
937 int ch_group_index, freq, freq1;
938 u8 channel;
939 u32 val;
940 u16 rf_bw_band;
942 freq = chandef->chan->center_freq;
943 freq1 = chandef->center_freq1;
944 channel = chandef->chan->hw_value;
945 rf_bw_band = (channel <= 14) ? RF_G_BAND : RF_A_BAND;
947 switch (chandef->width) {
948 case NL80211_CHAN_WIDTH_40:
949 if (freq1 > freq)
950 ch_group_index = 0;
951 else
952 ch_group_index = 1;
953 channel += 2 - ch_group_index * 4;
954 rf_bw_band |= RF_BW_40;
955 break;
956 case NL80211_CHAN_WIDTH_80:
957 ch_group_index = (freq - freq1 + 30) / 20;
958 if (WARN_ON(ch_group_index < 0 || ch_group_index > 3))
959 ch_group_index = 0;
960 channel += 6 - ch_group_index * 4;
961 rf_bw_band |= RF_BW_80;
962 break;
963 default:
964 ch_group_index = 0;
965 rf_bw_band |= RF_BW_20;
966 break;
969 if (mt76_is_usb(&dev->mt76)) {
970 mt76x0_phy_bbp_set_bw(dev, chandef->width);
971 } else {
972 if (chandef->width == NL80211_CHAN_WIDTH_80 ||
973 chandef->width == NL80211_CHAN_WIDTH_40)
974 val = 0x201;
975 else
976 val = 0x601;
977 mt76_wr(dev, MT_TX_SW_CFG0, val);
979 mt76x02_phy_set_bw(dev, chandef->width, ch_group_index);
980 mt76x02_phy_set_band(dev, chandef->chan->band,
981 ch_group_index & 1);
983 mt76_rmw(dev, MT_EXT_CCA_CFG,
984 (MT_EXT_CCA_CFG_CCA0 |
985 MT_EXT_CCA_CFG_CCA1 |
986 MT_EXT_CCA_CFG_CCA2 |
987 MT_EXT_CCA_CFG_CCA3 |
988 MT_EXT_CCA_CFG_CCA_MASK),
989 ext_cca_chan[ch_group_index]);
991 mt76x0_phy_set_band(dev, chandef->chan->band);
992 mt76x0_phy_set_chan_rf_params(dev, channel, rf_bw_band);
994 /* set Japan Tx filter at channel 14 */
995 if (channel == 14)
996 mt76_set(dev, MT_BBP(CORE, 1), 0x20);
997 else
998 mt76_clear(dev, MT_BBP(CORE, 1), 0x20);
1000 mt76x0_read_rx_gain(dev);
1001 mt76x0_phy_set_chan_bbp_params(dev, rf_bw_band);
1003 /* enable vco */
1004 mt76x0_rf_set(dev, MT_RF(0, 4), BIT(7));
1005 if (scan)
1006 return;
1008 mt76x02_init_agc_gain(dev);
1009 mt76x0_phy_calibrate(dev, false);
1010 mt76x0_phy_set_txpower(dev);
1012 ieee80211_queue_delayed_work(dev->mt76.hw, &dev->cal_work,
1013 MT_CALIBRATE_INTERVAL);
1016 static void mt76x0_phy_temp_sensor(struct mt76x02_dev *dev)
1018 u8 rf_b7_73, rf_b0_66, rf_b0_67;
1019 s8 val;
1021 rf_b7_73 = mt76x0_rf_rr(dev, MT_RF(7, 73));
1022 rf_b0_66 = mt76x0_rf_rr(dev, MT_RF(0, 66));
1023 rf_b0_67 = mt76x0_rf_rr(dev, MT_RF(0, 67));
1025 mt76x0_rf_wr(dev, MT_RF(7, 73), 0x02);
1026 mt76x0_rf_wr(dev, MT_RF(0, 66), 0x23);
1027 mt76x0_rf_wr(dev, MT_RF(0, 67), 0x01);
1029 mt76_wr(dev, MT_BBP(CORE, 34), 0x00080055);
1030 if (!mt76_poll_msec(dev, MT_BBP(CORE, 34), BIT(4), 0, 200)) {
1031 mt76_clear(dev, MT_BBP(CORE, 34), BIT(4));
1032 goto done;
1035 val = mt76_rr(dev, MT_BBP(CORE, 35));
1036 val = (35 * (val - dev->cal.rx.temp_offset)) / 10 + 25;
1038 if (abs(val - dev->cal.temp_vco) > 20) {
1039 mt76x02_mcu_calibrate(dev, MCU_CAL_VCO,
1040 dev->mphy.chandef.chan->hw_value);
1041 dev->cal.temp_vco = val;
1043 if (abs(val - dev->cal.temp) > 30) {
1044 mt76x0_phy_calibrate(dev, false);
1045 dev->cal.temp = val;
1048 done:
1049 mt76x0_rf_wr(dev, MT_RF(7, 73), rf_b7_73);
1050 mt76x0_rf_wr(dev, MT_RF(0, 66), rf_b0_66);
1051 mt76x0_rf_wr(dev, MT_RF(0, 67), rf_b0_67);
1054 static void mt76x0_phy_set_gain_val(struct mt76x02_dev *dev)
1056 u8 gain = dev->cal.agc_gain_cur[0] - dev->cal.agc_gain_adjust;
1058 mt76_rmw_field(dev, MT_BBP(AGC, 8), MT_BBP_AGC_GAIN, gain);
1060 if ((dev->mphy.chandef.chan->flags & IEEE80211_CHAN_RADAR) &&
1061 !is_mt7630(dev))
1062 mt76x02_phy_dfs_adjust_agc(dev);
1065 static void
1066 mt76x0_phy_update_channel_gain(struct mt76x02_dev *dev)
1068 bool gain_change;
1069 u8 gain_delta;
1070 int low_gain;
1072 dev->cal.avg_rssi_all = mt76_get_min_avg_rssi(&dev->mt76, false);
1073 if (!dev->cal.avg_rssi_all)
1074 dev->cal.avg_rssi_all = -75;
1076 low_gain = (dev->cal.avg_rssi_all > mt76x02_get_rssi_gain_thresh(dev)) +
1077 (dev->cal.avg_rssi_all > mt76x02_get_low_rssi_gain_thresh(dev));
1079 gain_change = dev->cal.low_gain < 0 ||
1080 (dev->cal.low_gain & 2) ^ (low_gain & 2);
1081 dev->cal.low_gain = low_gain;
1083 if (!gain_change) {
1084 if (mt76x02_phy_adjust_vga_gain(dev))
1085 mt76x0_phy_set_gain_val(dev);
1086 return;
1089 dev->cal.agc_gain_adjust = (low_gain == 2) ? 0 : 10;
1090 gain_delta = (low_gain == 2) ? 10 : 0;
1092 dev->cal.agc_gain_cur[0] = dev->cal.agc_gain_init[0] - gain_delta;
1093 mt76x0_phy_set_gain_val(dev);
1095 /* clear false CCA counters */
1096 mt76_rr(dev, MT_RX_STAT_1);
1099 static void mt76x0_phy_calibration_work(struct work_struct *work)
1101 struct mt76x02_dev *dev = container_of(work, struct mt76x02_dev,
1102 cal_work.work);
1104 mt76x0_phy_update_channel_gain(dev);
1105 if (mt76x0_tssi_enabled(dev))
1106 mt76x0_phy_tssi_calibrate(dev);
1107 else
1108 mt76x0_phy_temp_sensor(dev);
1110 ieee80211_queue_delayed_work(dev->mt76.hw, &dev->cal_work,
1111 4 * MT_CALIBRATE_INTERVAL);
1114 static void mt76x0_rf_patch_reg_array(struct mt76x02_dev *dev,
1115 const struct mt76_reg_pair *rp, int len)
1117 int i;
1119 for (i = 0; i < len; i++) {
1120 u32 reg = rp[i].reg;
1121 u8 val = rp[i].value;
1123 switch (reg) {
1124 case MT_RF(0, 3):
1125 if (mt76_is_mmio(&dev->mt76)) {
1126 if (is_mt7630(dev))
1127 val = 0x70;
1128 else
1129 val = 0x63;
1130 } else {
1131 val = 0x73;
1133 break;
1134 case MT_RF(0, 21):
1135 if (is_mt7610e(dev))
1136 val = 0x10;
1137 else
1138 val = 0x12;
1139 break;
1140 case MT_RF(5, 2):
1141 if (is_mt7630(dev))
1142 val = 0x1d;
1143 else if (is_mt7610e(dev))
1144 val = 0x00;
1145 else
1146 val = 0x0c;
1147 break;
1148 default:
1149 break;
1151 mt76x0_rf_wr(dev, reg, val);
1155 static void mt76x0_phy_rf_init(struct mt76x02_dev *dev)
1157 int i;
1159 mt76x0_rf_patch_reg_array(dev, mt76x0_rf_central_tab,
1160 ARRAY_SIZE(mt76x0_rf_central_tab));
1161 mt76x0_rf_patch_reg_array(dev, mt76x0_rf_2g_channel_0_tab,
1162 ARRAY_SIZE(mt76x0_rf_2g_channel_0_tab));
1163 RF_RANDOM_WRITE(dev, mt76x0_rf_5g_channel_0_tab);
1164 RF_RANDOM_WRITE(dev, mt76x0_rf_vga_channel_0_tab);
1166 for (i = 0; i < ARRAY_SIZE(mt76x0_rf_bw_switch_tab); i++) {
1167 const struct mt76x0_rf_switch_item *item = &mt76x0_rf_bw_switch_tab[i];
1169 if (item->bw_band == RF_BW_20)
1170 mt76x0_rf_wr(dev, item->rf_bank_reg, item->value);
1171 else if (((RF_G_BAND | RF_BW_20) & item->bw_band) ==
1172 (RF_G_BAND | RF_BW_20))
1173 mt76x0_rf_wr(dev, item->rf_bank_reg, item->value);
1176 for (i = 0; i < ARRAY_SIZE(mt76x0_rf_band_switch_tab); i++) {
1177 if (mt76x0_rf_band_switch_tab[i].bw_band & RF_G_BAND) {
1178 mt76x0_rf_wr(dev,
1179 mt76x0_rf_band_switch_tab[i].rf_bank_reg,
1180 mt76x0_rf_band_switch_tab[i].value);
1184 /* Frequency calibration
1185 * E1: B0.R22<6:0>: xo_cxo<6:0>
1186 * E2: B0.R21<0>: xo_cxo<0>, B0.R22<7:0>: xo_cxo<8:1>
1188 mt76x0_rf_wr(dev, MT_RF(0, 22),
1189 min_t(u8, dev->cal.rx.freq_offset, 0xbf));
1190 mt76x0_rf_rr(dev, MT_RF(0, 22));
1192 /* Reset procedure DAC during power-up:
1193 * - set B0.R73<7>
1194 * - clear B0.R73<7>
1195 * - set B0.R73<7>
1197 mt76x0_rf_set(dev, MT_RF(0, 73), BIT(7));
1198 mt76x0_rf_clear(dev, MT_RF(0, 73), BIT(7));
1199 mt76x0_rf_set(dev, MT_RF(0, 73), BIT(7));
1201 /* vcocal_en: initiate VCO calibration (reset after completion)) */
1202 mt76x0_rf_set(dev, MT_RF(0, 4), 0x80);
1205 void mt76x0_phy_init(struct mt76x02_dev *dev)
1207 INIT_DELAYED_WORK(&dev->cal_work, mt76x0_phy_calibration_work);
1209 mt76x0_phy_ant_select(dev);
1210 mt76x0_phy_rf_init(dev);
1211 mt76x02_phy_set_rxpath(dev);
1212 mt76x02_phy_set_txdac(dev);