1 // SPDX-License-Identifier: GPL-2.0-only
3 * (c) Copyright 2002-2010, Ralink Technology, Inc.
4 * Copyright (C) 2014 Felix Fietkau <nbd@openwrt.org>
5 * Copyright (C) 2015 Jakub Kicinski <kubakici@wp.pl>
6 * Copyright (C) 2018 Stanislaw Gruszka <stf_xl@wp.pl>
9 #include <linux/kernel.h>
10 #include <linux/etherdevice.h>
17 #include "initvals_phy.h"
18 #include "../mt76x02_phy.h"
21 mt76x0_rf_csr_wr(struct mt76x02_dev
*dev
, u32 offset
, u8 value
)
26 if (test_bit(MT76_REMOVED
, &dev
->mphy
.state
))
29 bank
= MT_RF_BANK(offset
);
30 reg
= MT_RF_REG(offset
);
32 if (WARN_ON_ONCE(reg
> 127) || WARN_ON_ONCE(bank
> 8))
35 mutex_lock(&dev
->phy_mutex
);
37 if (!mt76_poll(dev
, MT_RF_CSR_CFG
, MT_RF_CSR_CFG_KICK
, 0, 100)) {
42 mt76_wr(dev
, MT_RF_CSR_CFG
,
43 FIELD_PREP(MT_RF_CSR_CFG_DATA
, value
) |
44 FIELD_PREP(MT_RF_CSR_CFG_REG_BANK
, bank
) |
45 FIELD_PREP(MT_RF_CSR_CFG_REG_ID
, reg
) |
50 mutex_unlock(&dev
->phy_mutex
);
53 dev_err(dev
->mt76
.dev
, "Error: RF write %d:%d failed:%d!!\n",
59 static int mt76x0_rf_csr_rr(struct mt76x02_dev
*dev
, u32 offset
)
65 if (test_bit(MT76_REMOVED
, &dev
->mphy
.state
))
68 bank
= MT_RF_BANK(offset
);
69 reg
= MT_RF_REG(offset
);
71 if (WARN_ON_ONCE(reg
> 127) || WARN_ON_ONCE(bank
> 8))
74 mutex_lock(&dev
->phy_mutex
);
76 if (!mt76_poll(dev
, MT_RF_CSR_CFG
, MT_RF_CSR_CFG_KICK
, 0, 100))
79 mt76_wr(dev
, MT_RF_CSR_CFG
,
80 FIELD_PREP(MT_RF_CSR_CFG_REG_BANK
, bank
) |
81 FIELD_PREP(MT_RF_CSR_CFG_REG_ID
, reg
) |
84 if (!mt76_poll(dev
, MT_RF_CSR_CFG
, MT_RF_CSR_CFG_KICK
, 0, 100))
87 val
= mt76_rr(dev
, MT_RF_CSR_CFG
);
88 if (FIELD_GET(MT_RF_CSR_CFG_REG_ID
, val
) == reg
&&
89 FIELD_GET(MT_RF_CSR_CFG_REG_BANK
, val
) == bank
)
90 ret
= FIELD_GET(MT_RF_CSR_CFG_DATA
, val
);
93 mutex_unlock(&dev
->phy_mutex
);
96 dev_err(dev
->mt76
.dev
, "Error: RF read %d:%d failed:%d!!\n",
103 mt76x0_rf_wr(struct mt76x02_dev
*dev
, u32 offset
, u8 val
)
105 if (mt76_is_usb(&dev
->mt76
)) {
106 struct mt76_reg_pair pair
= {
111 WARN_ON_ONCE(!test_bit(MT76_STATE_MCU_RUNNING
,
113 return mt76_wr_rp(dev
, MT_MCU_MEMMAP_RF
, &pair
, 1);
115 return mt76x0_rf_csr_wr(dev
, offset
, val
);
119 static int mt76x0_rf_rr(struct mt76x02_dev
*dev
, u32 offset
)
124 if (mt76_is_usb(&dev
->mt76
)) {
125 struct mt76_reg_pair pair
= {
129 WARN_ON_ONCE(!test_bit(MT76_STATE_MCU_RUNNING
,
131 ret
= mt76_rd_rp(dev
, MT_MCU_MEMMAP_RF
, &pair
, 1);
134 ret
= val
= mt76x0_rf_csr_rr(dev
, offset
);
137 return (ret
< 0) ? ret
: val
;
141 mt76x0_rf_rmw(struct mt76x02_dev
*dev
, u32 offset
, u8 mask
, u8 val
)
145 ret
= mt76x0_rf_rr(dev
, offset
);
151 ret
= mt76x0_rf_wr(dev
, offset
, val
);
152 return ret
? ret
: val
;
156 mt76x0_rf_set(struct mt76x02_dev
*dev
, u32 offset
, u8 val
)
158 return mt76x0_rf_rmw(dev
, offset
, 0, val
);
162 mt76x0_rf_clear(struct mt76x02_dev
*dev
, u32 offset
, u8 mask
)
164 return mt76x0_rf_rmw(dev
, offset
, mask
, 0);
168 mt76x0_phy_rf_csr_wr_rp(struct mt76x02_dev
*dev
,
169 const struct mt76_reg_pair
*data
,
173 mt76x0_rf_csr_wr(dev
, data
->reg
, data
->value
);
178 #define RF_RANDOM_WRITE(dev, tab) do { \
179 if (mt76_is_mmio(&dev->mt76)) \
180 mt76x0_phy_rf_csr_wr_rp(dev, tab, ARRAY_SIZE(tab)); \
182 mt76_wr_rp(dev, MT_MCU_MEMMAP_RF, tab, ARRAY_SIZE(tab));\
185 int mt76x0_phy_wait_bbp_ready(struct mt76x02_dev
*dev
)
191 val
= mt76_rr(dev
, MT_BBP(CORE
, 0));
197 dev_err(dev
->mt76
.dev
, "Error: BBP is not ready\n");
201 dev_dbg(dev
->mt76
.dev
, "BBP version %08x\n", val
);
206 mt76x0_phy_set_band(struct mt76x02_dev
*dev
, enum nl80211_band band
)
209 case NL80211_BAND_2GHZ
:
210 RF_RANDOM_WRITE(dev
, mt76x0_rf_2g_channel_0_tab
);
212 mt76x0_rf_wr(dev
, MT_RF(5, 0), 0x45);
213 mt76x0_rf_wr(dev
, MT_RF(6, 0), 0x44);
215 mt76_wr(dev
, MT_TX_ALC_VGA3
, 0x00050007);
216 mt76_wr(dev
, MT_TX0_RF_GAIN_CORR
, 0x003E0002);
218 case NL80211_BAND_5GHZ
:
219 RF_RANDOM_WRITE(dev
, mt76x0_rf_5g_channel_0_tab
);
221 mt76x0_rf_wr(dev
, MT_RF(5, 0), 0x44);
222 mt76x0_rf_wr(dev
, MT_RF(6, 0), 0x45);
224 mt76_wr(dev
, MT_TX_ALC_VGA3
, 0x00000005);
225 mt76_wr(dev
, MT_TX0_RF_GAIN_CORR
, 0x01010102);
233 mt76x0_phy_set_chan_rf_params(struct mt76x02_dev
*dev
, u8 channel
,
236 const struct mt76x0_freq_item
*freq_item
;
237 u16 rf_band
= rf_bw_band
& 0xff00;
238 u16 rf_bw
= rf_bw_band
& 0x00ff;
239 enum nl80211_band band
;
244 for (i
= 0; i
< ARRAY_SIZE(mt76x0_sdm_channel
); i
++) {
245 if (channel
== mt76x0_sdm_channel
[i
]) {
251 for (i
= 0; i
< ARRAY_SIZE(mt76x0_frequency_plan
); i
++) {
252 if (channel
== mt76x0_frequency_plan
[i
].channel
) {
253 rf_band
= mt76x0_frequency_plan
[i
].band
;
256 freq_item
= &mt76x0_sdm_frequency_plan
[i
];
258 freq_item
= &mt76x0_frequency_plan
[i
];
260 mt76x0_rf_wr(dev
, MT_RF(0, 37), freq_item
->pllR37
);
261 mt76x0_rf_wr(dev
, MT_RF(0, 36), freq_item
->pllR36
);
262 mt76x0_rf_wr(dev
, MT_RF(0, 35), freq_item
->pllR35
);
263 mt76x0_rf_wr(dev
, MT_RF(0, 34), freq_item
->pllR34
);
264 mt76x0_rf_wr(dev
, MT_RF(0, 33), freq_item
->pllR33
);
266 mt76x0_rf_rmw(dev
, MT_RF(0, 32), 0xe0,
267 freq_item
->pllR32_b7b5
);
269 /* R32<4:0> pll_den: (Denomina - 8) */
270 mt76x0_rf_rmw(dev
, MT_RF(0, 32), MT_RF_PLL_DEN_MASK
,
271 freq_item
->pllR32_b4b0
);
274 mt76x0_rf_rmw(dev
, MT_RF(0, 31), 0xe0,
275 freq_item
->pllR31_b7b5
);
277 /* R31<4:0> pll_k(Nominator) */
278 mt76x0_rf_rmw(dev
, MT_RF(0, 31), MT_RF_PLL_K_MASK
,
279 freq_item
->pllR31_b4b0
);
281 /* R30<7> sdm_reset_n */
283 mt76x0_rf_clear(dev
, MT_RF(0, 30),
284 MT_RF_SDM_RESET_MASK
);
285 mt76x0_rf_set(dev
, MT_RF(0, 30),
286 MT_RF_SDM_RESET_MASK
);
288 mt76x0_rf_rmw(dev
, MT_RF(0, 30),
289 MT_RF_SDM_RESET_MASK
,
290 freq_item
->pllR30_b7
);
293 /* R30<6:2> sdmmash_prbs,sin */
294 mt76x0_rf_rmw(dev
, MT_RF(0, 30),
295 MT_RF_SDM_MASH_PRBS_MASK
,
296 freq_item
->pllR30_b6b2
);
299 mt76x0_rf_rmw(dev
, MT_RF(0, 30), MT_RF_SDM_BP_MASK
,
300 freq_item
->pllR30_b1
<< 1);
302 /* R30<0> R29<7:0> (hex) pll_n */
303 mt76x0_rf_wr(dev
, MT_RF(0, 29),
304 freq_item
->pll_n
& 0xff);
306 mt76x0_rf_rmw(dev
, MT_RF(0, 30), 0x1,
307 (freq_item
->pll_n
>> 8) & 0x1);
309 /* R28<7:6> isi_iso */
310 mt76x0_rf_rmw(dev
, MT_RF(0, 28), MT_RF_ISI_ISO_MASK
,
311 freq_item
->pllR28_b7b6
);
313 /* R28<5:4> pfd_dly */
314 mt76x0_rf_rmw(dev
, MT_RF(0, 28), MT_RF_PFD_DLY_MASK
,
315 freq_item
->pllR28_b5b4
);
317 /* R28<3:2> clksel option */
318 mt76x0_rf_rmw(dev
, MT_RF(0, 28), MT_RF_CLK_SEL_MASK
,
319 freq_item
->pllR28_b3b2
);
321 /* R28<1:0> R27<7:0> R26<7:0> (hex) sdm_k */
322 mt76x0_rf_wr(dev
, MT_RF(0, 26),
323 freq_item
->pll_sdm_k
& 0xff);
324 mt76x0_rf_wr(dev
, MT_RF(0, 27),
325 (freq_item
->pll_sdm_k
>> 8) & 0xff);
327 mt76x0_rf_rmw(dev
, MT_RF(0, 28), 0x3,
328 (freq_item
->pll_sdm_k
>> 16) & 0x3);
330 /* R24<1:0> xo_div */
331 mt76x0_rf_rmw(dev
, MT_RF(0, 24), MT_RF_XO_DIV_MASK
,
332 freq_item
->pllR24_b1b0
);
338 for (i
= 0; i
< ARRAY_SIZE(mt76x0_rf_bw_switch_tab
); i
++) {
339 if (rf_bw
== mt76x0_rf_bw_switch_tab
[i
].bw_band
) {
341 mt76x0_rf_bw_switch_tab
[i
].rf_bank_reg
,
342 mt76x0_rf_bw_switch_tab
[i
].value
);
343 } else if ((rf_bw
== (mt76x0_rf_bw_switch_tab
[i
].bw_band
& 0xFF)) &&
344 (rf_band
& mt76x0_rf_bw_switch_tab
[i
].bw_band
)) {
346 mt76x0_rf_bw_switch_tab
[i
].rf_bank_reg
,
347 mt76x0_rf_bw_switch_tab
[i
].value
);
351 for (i
= 0; i
< ARRAY_SIZE(mt76x0_rf_band_switch_tab
); i
++) {
352 if (mt76x0_rf_band_switch_tab
[i
].bw_band
& rf_band
) {
354 mt76x0_rf_band_switch_tab
[i
].rf_bank_reg
,
355 mt76x0_rf_band_switch_tab
[i
].value
);
359 mt76_clear(dev
, MT_RF_MISC
, 0xc);
361 band
= (rf_band
& RF_G_BAND
) ? NL80211_BAND_2GHZ
: NL80211_BAND_5GHZ
;
362 if (mt76x02_ext_pa_enabled(dev
, band
)) {
363 /* MT_RF_MISC (offset: 0x0518)
364 * [2]1'b1: enable external A band PA
365 * 1'b0: disable external A band PA
366 * [3]1'b1: enable external G band PA
367 * 1'b0: disable external G band PA
369 if (rf_band
& RF_A_BAND
)
370 mt76_set(dev
, MT_RF_MISC
, BIT(2));
372 mt76_set(dev
, MT_RF_MISC
, BIT(3));
375 for (i
= 0; i
< ARRAY_SIZE(mt76x0_rf_ext_pa_tab
); i
++)
376 if (mt76x0_rf_ext_pa_tab
[i
].bw_band
& rf_band
)
378 mt76x0_rf_ext_pa_tab
[i
].rf_bank_reg
,
379 mt76x0_rf_ext_pa_tab
[i
].value
);
382 if (rf_band
& RF_G_BAND
) {
383 mt76_wr(dev
, MT_TX0_RF_GAIN_ATTEN
, 0x63707400);
384 /* Set Atten mode = 2 For G band, Disable Tx Inc dcoc. */
385 mac_reg
= mt76_rr(dev
, MT_TX_ALC_CFG_1
);
386 mac_reg
&= 0x896400FF;
387 mt76_wr(dev
, MT_TX_ALC_CFG_1
, mac_reg
);
389 mt76_wr(dev
, MT_TX0_RF_GAIN_ATTEN
, 0x686A7800);
390 /* Set Atten mode = 0
391 * For Ext A band, Disable Tx Inc dcoc Cal.
393 mac_reg
= mt76_rr(dev
, MT_TX_ALC_CFG_1
);
394 mac_reg
&= 0x890400FF;
395 mt76_wr(dev
, MT_TX_ALC_CFG_1
, mac_reg
);
400 mt76x0_phy_set_chan_bbp_params(struct mt76x02_dev
*dev
, u16 rf_bw_band
)
404 for (i
= 0; i
< ARRAY_SIZE(mt76x0_bbp_switch_tab
); i
++) {
405 const struct mt76x0_bbp_switch_item
*item
= &mt76x0_bbp_switch_tab
[i
];
406 const struct mt76_reg_pair
*pair
= &item
->reg_pair
;
408 if ((rf_bw_band
& item
->bw_band
) != rf_bw_band
)
411 if (pair
->reg
== MT_BBP(AGC
, 8)) {
412 u32 val
= pair
->value
;
415 gain
= FIELD_GET(MT_BBP_AGC_GAIN
, val
);
416 gain
-= dev
->cal
.rx
.lna_gain
* 2;
417 val
&= ~MT_BBP_AGC_GAIN
;
418 val
|= FIELD_PREP(MT_BBP_AGC_GAIN
, gain
);
419 mt76_wr(dev
, pair
->reg
, val
);
421 mt76_wr(dev
, pair
->reg
, pair
->value
);
426 static void mt76x0_phy_ant_select(struct mt76x02_dev
*dev
)
428 u16 ee_ant
= mt76x02_eeprom_get(dev
, MT_EE_ANTENNA
);
429 u16 ee_cfg1
= mt76x02_eeprom_get(dev
, MT_EE_CFG1_INIT
);
430 u16 nic_conf2
= mt76x02_eeprom_get(dev
, MT_EE_NIC_CONF_2
);
434 wlan
= mt76_rr(dev
, MT_WLAN_FUN_CTRL
);
435 coex3
= mt76_rr(dev
, MT_COEXCFG3
);
437 ee_ant
&= ~(BIT(14) | BIT(12));
438 wlan
&= ~(BIT(6) | BIT(5));
439 coex3
&= ~GENMASK(5, 2);
441 if (ee_ant
& MT_EE_ANTENNA_DUAL
) {
442 /* dual antenna mode */
443 ant_div
= !(nic_conf2
& MT_EE_NIC_CONF_2_ANT_OPT
) &&
444 (nic_conf2
& MT_EE_NIC_CONF_2_ANT_DIV
);
450 if (dev
->mt76
.cap
.has_2ghz
)
453 /* sigle antenna mode */
454 if (dev
->mt76
.cap
.has_5ghz
) {
455 coex3
|= BIT(3) | BIT(4);
463 ee_ant
|= BIT(14) | BIT(11);
465 mt76_wr(dev
, MT_WLAN_FUN_CTRL
, wlan
);
466 mt76_rmw(dev
, MT_CMB_CTRL
, GENMASK(15, 0), ee_ant
);
467 mt76_rmw(dev
, MT_CSR_EE_CFG1
, GENMASK(15, 0), ee_cfg1
);
468 mt76_clear(dev
, MT_COEXCFG0
, BIT(2));
469 mt76_wr(dev
, MT_COEXCFG3
, coex3
);
473 mt76x0_phy_bbp_set_bw(struct mt76x02_dev
*dev
, enum nl80211_chan_width width
)
475 enum { BW_20
= 0, BW_40
= 1, BW_80
= 2, BW_10
= 4};
480 case NL80211_CHAN_WIDTH_20_NOHT
:
481 case NL80211_CHAN_WIDTH_20
:
484 case NL80211_CHAN_WIDTH_40
:
487 case NL80211_CHAN_WIDTH_80
:
490 case NL80211_CHAN_WIDTH_10
:
493 case NL80211_CHAN_WIDTH_80P80
:
494 case NL80211_CHAN_WIDTH_160
:
495 case NL80211_CHAN_WIDTH_5
:
500 mt76x02_mcu_function_select(dev
, BW_SETTING
, bw
);
503 static void mt76x0_phy_tssi_dc_calibrate(struct mt76x02_dev
*dev
)
505 struct ieee80211_channel
*chan
= dev
->mphy
.chandef
.chan
;
508 if (chan
->band
== NL80211_BAND_5GHZ
)
509 mt76x0_rf_clear(dev
, MT_RF(0, 67), 0xf);
511 /* bypass ADDA control */
512 mt76_wr(dev
, MT_RF_SETTING_0
, 0x60002237);
513 mt76_wr(dev
, MT_RF_BYPASS_0
, 0xffffffff);
516 mt76_set(dev
, MT_BBP(CORE
, 4), BIT(0));
517 usleep_range(500, 1000);
518 mt76_clear(dev
, MT_BBP(CORE
, 4), BIT(0));
520 val
= (chan
->band
== NL80211_BAND_5GHZ
) ? 0x80055 : 0x80050;
521 mt76_wr(dev
, MT_BBP(CORE
, 34), val
);
523 /* enable TX with DAC0 input */
524 mt76_wr(dev
, MT_BBP(TXBE
, 6), BIT(31));
526 mt76_poll_msec(dev
, MT_BBP(CORE
, 34), BIT(4), 0, 200);
527 dev
->cal
.tssi_dc
= mt76_rr(dev
, MT_BBP(CORE
, 35)) & 0xff;
529 /* stop bypass ADDA */
530 mt76_wr(dev
, MT_RF_BYPASS_0
, 0);
532 mt76_wr(dev
, MT_BBP(TXBE
, 6), 0);
534 mt76_set(dev
, MT_BBP(CORE
, 4), BIT(0));
535 usleep_range(500, 1000);
536 mt76_clear(dev
, MT_BBP(CORE
, 4), BIT(0));
538 if (chan
->band
== NL80211_BAND_5GHZ
)
539 mt76x0_rf_rmw(dev
, MT_RF(0, 67), 0xf, 0x4);
543 mt76x0_phy_tssi_adc_calibrate(struct mt76x02_dev
*dev
, s16
*ltssi
,
546 struct ieee80211_channel
*chan
= dev
->mphy
.chandef
.chan
;
549 val
= (chan
->band
== NL80211_BAND_5GHZ
) ? 0x80055 : 0x80050;
550 mt76_wr(dev
, MT_BBP(CORE
, 34), val
);
552 if (!mt76_poll_msec(dev
, MT_BBP(CORE
, 34), BIT(4), 0, 200)) {
553 mt76_clear(dev
, MT_BBP(CORE
, 34), BIT(4));
557 *ltssi
= mt76_rr(dev
, MT_BBP(CORE
, 35)) & 0xff;
558 if (chan
->band
== NL80211_BAND_5GHZ
)
561 /* set packet info#1 mode */
562 mt76_wr(dev
, MT_BBP(CORE
, 34), 0x80041);
563 info
[0] = mt76_rr(dev
, MT_BBP(CORE
, 35)) & 0xff;
565 /* set packet info#2 mode */
566 mt76_wr(dev
, MT_BBP(CORE
, 34), 0x80042);
567 info
[1] = mt76_rr(dev
, MT_BBP(CORE
, 35)) & 0xff;
569 /* set packet info#3 mode */
570 mt76_wr(dev
, MT_BBP(CORE
, 34), 0x80043);
571 info
[2] = mt76_rr(dev
, MT_BBP(CORE
, 35)) & 0xff;
576 static u8
mt76x0_phy_get_rf_pa_mode(struct mt76x02_dev
*dev
,
577 int index
, u8 tx_rate
)
581 reg
= (index
== 1) ? MT_RF_PA_MODE_CFG1
: MT_RF_PA_MODE_CFG0
;
582 val
= mt76_rr(dev
, reg
);
583 return (val
& (3 << (tx_rate
* 2))) >> (tx_rate
* 2);
587 mt76x0_phy_get_target_power(struct mt76x02_dev
*dev
, u8 tx_mode
,
588 u8
*info
, s8
*target_power
,
591 u8 tx_rate
, cur_power
;
593 cur_power
= mt76_rr(dev
, MT_TX_ALC_CFG_0
) & MT_TX_ALC_CFG_0_CH_INIT_0
;
597 tx_rate
= (info
[0] & 0x60) >> 5;
601 *target_power
= cur_power
+ dev
->mt76
.rate_power
.cck
[tx_rate
];
602 *target_pa_power
= mt76x0_phy_get_rf_pa_mode(dev
, 0, tx_rate
);
608 tx_rate
= (info
[0] & 0xf0) >> 4;
638 *target_power
= cur_power
+ dev
->mt76
.rate_power
.ofdm
[index
];
639 *target_pa_power
= mt76x0_phy_get_rf_pa_mode(dev
, 0, index
+ 4);
644 tx_rate
= info
[1] & 0xf;
648 *target_power
= cur_power
+ dev
->mt76
.rate_power
.vht
[tx_rate
];
649 *target_pa_power
= mt76x0_phy_get_rf_pa_mode(dev
, 1, tx_rate
);
653 tx_rate
= info
[1] & 0x7f;
657 *target_power
= cur_power
+ dev
->mt76
.rate_power
.ht
[tx_rate
];
658 *target_pa_power
= mt76x0_phy_get_rf_pa_mode(dev
, 1, tx_rate
);
665 static s16
mt76x0_phy_lin2db(u16 val
)
667 u32 mantissa
= val
<< 4;
671 while (mantissa
< BIT(15)) {
676 while (mantissa
> 0xffff) {
683 if (mantissa
<= 47104)
684 data
= mantissa
+ (mantissa
>> 3) + (mantissa
>> 4) - 38400;
686 data
= mantissa
- (mantissa
>> 3) - (mantissa
>> 6) - 23040;
687 data
= max_t(int, 0, data
);
689 ret
= ((15 + exp
) << 15) + data
;
690 ret
= (ret
<< 2) + (ret
<< 1) + (ret
>> 6) + (ret
>> 7);
695 mt76x0_phy_get_delta_power(struct mt76x02_dev
*dev
, u8 tx_mode
,
696 s8 target_power
, s8 target_pa_power
,
699 struct ieee80211_channel
*chan
= dev
->mphy
.chandef
.chan
;
700 int tssi_target
= target_power
<< 12, tssi_slope
;
701 int tssi_offset
, tssi_db
, ret
;
705 if (chan
->band
== NL80211_BAND_5GHZ
) {
709 err
= mt76x02_eeprom_copy(dev
, MT_EE_TSSI_BOUND1
, bound
,
714 for (i
= 0; i
< ARRAY_SIZE(bound
); i
++) {
715 if (chan
->hw_value
<= bound
[i
] || !bound
[i
])
718 val
= mt76x02_eeprom_get(dev
, MT_EE_TSSI_SLOPE_5G
+ i
* 2);
720 tssi_offset
= val
>> 8;
721 if ((tssi_offset
>= 64 && tssi_offset
<= 127) ||
722 (tssi_offset
& BIT(7)))
723 tssi_offset
-= BIT(8);
725 val
= mt76x02_eeprom_get(dev
, MT_EE_TSSI_SLOPE_2G
);
727 tssi_offset
= val
>> 8;
728 if (tssi_offset
& BIT(7))
729 tssi_offset
-= BIT(8);
731 tssi_slope
= val
& 0xff;
733 switch (target_pa_power
) {
735 if (chan
->band
== NL80211_BAND_2GHZ
)
736 tssi_target
+= 29491; /* 3.6 * 8192 */
741 tssi_target
+= 4424; /* 0.54 * 8192 */
746 data
= mt76_rr(dev
, MT_BBP(CORE
, 1));
747 if (is_mt7630(dev
) && mt76_is_mmio(&dev
->mt76
)) {
750 /* 2.3 * 8192 or 1.5 * 8192 */
751 offset
= (data
& BIT(5)) ? 18841 : 12288;
752 tssi_target
+= offset
;
753 } else if (data
& BIT(5)) {
759 data
= mt76_rr(dev
, MT_BBP(TXBE
, 4));
760 switch (data
& 0x3) {
762 tssi_target
-= 49152; /* -6db * 8192 */
765 tssi_target
-= 98304; /* -12db * 8192 */
768 tssi_target
+= 49152; /* 6db * 8192 */
774 tssi_db
= mt76x0_phy_lin2db(ltssi
- dev
->cal
.tssi_dc
) * tssi_slope
;
775 if (chan
->band
== NL80211_BAND_5GHZ
) {
776 tssi_db
+= ((tssi_offset
- 50) << 10); /* offset s4.3 */
777 tssi_target
-= tssi_db
;
778 if (ltssi
> 254 && tssi_target
> 0) {
783 tssi_db
+= (tssi_offset
<< 9); /* offset s3.4 */
784 tssi_target
-= tssi_db
;
785 /* upper-lower saturate */
786 if ((ltssi
> 126 && tssi_target
> 0) ||
787 ((ltssi
- dev
->cal
.tssi_dc
) < 1 && tssi_target
< 0)) {
792 if ((dev
->cal
.tssi_target
^ tssi_target
) < 0 &&
793 dev
->cal
.tssi_target
> -4096 && dev
->cal
.tssi_target
< 4096 &&
794 tssi_target
> -4096 && tssi_target
< 4096) {
795 if ((tssi_target
< 0 &&
796 tssi_target
+ dev
->cal
.tssi_target
> 0) ||
798 tssi_target
+ dev
->cal
.tssi_target
<= 0))
801 dev
->cal
.tssi_target
= tssi_target
;
803 dev
->cal
.tssi_target
= tssi_target
;
806 /* make the compensate value to the nearest compensate code */
813 ret
= mt76_get_field(dev
, MT_TX_ALC_CFG_1
, MT_TX_ALC_CFG_1_TEMP_COMP
);
818 ret
= min_t(int, 31, ret
);
819 return max_t(int, -32, ret
);
822 static void mt76x0_phy_tssi_calibrate(struct mt76x02_dev
*dev
)
824 s8 target_power
, target_pa_power
;
825 u8 tssi_info
[3], tx_mode
;
829 if (mt76x0_phy_tssi_adc_calibrate(dev
, <ssi
, tssi_info
) < 0)
832 tx_mode
= tssi_info
[0] & 0x7;
833 if (mt76x0_phy_get_target_power(dev
, tx_mode
, tssi_info
,
834 &target_power
, &target_pa_power
) < 0)
837 val
= mt76x0_phy_get_delta_power(dev
, tx_mode
, target_power
,
838 target_pa_power
, ltssi
);
839 mt76_rmw_field(dev
, MT_TX_ALC_CFG_1
, MT_TX_ALC_CFG_1_TEMP_COMP
, val
);
842 void mt76x0_phy_set_txpower(struct mt76x02_dev
*dev
)
844 struct mt76_rate_power
*t
= &dev
->mt76
.rate_power
;
847 mt76x0_get_tx_power_per_rate(dev
, dev
->mphy
.chandef
.chan
, t
);
848 mt76x0_get_power_info(dev
, dev
->mphy
.chandef
.chan
, &info
);
850 mt76x02_add_rate_power_offset(t
, info
);
851 mt76x02_limit_rate_power(t
, dev
->txpower_conf
);
852 dev
->mphy
.txpower_cur
= mt76x02_get_max_rate_power(t
);
853 mt76x02_add_rate_power_offset(t
, -info
);
855 dev
->target_power
= info
;
856 mt76x02_phy_set_txpower(dev
, info
, info
);
859 void mt76x0_phy_calibrate(struct mt76x02_dev
*dev
, bool power_on
)
861 struct ieee80211_channel
*chan
= dev
->mphy
.chandef
.chan
;
862 int is_5ghz
= (chan
->band
== NL80211_BAND_5GHZ
) ? 1 : 0;
863 u32 val
, tx_alc
, reg_val
;
869 mt76x02_mcu_calibrate(dev
, MCU_CAL_R
, 0);
870 mt76x02_mcu_calibrate(dev
, MCU_CAL_VCO
, chan
->hw_value
);
871 usleep_range(10, 20);
873 if (mt76x0_tssi_enabled(dev
)) {
874 mt76_wr(dev
, MT_MAC_SYS_CTRL
,
875 MT_MAC_SYS_CTRL_ENABLE_RX
);
876 mt76x0_phy_tssi_dc_calibrate(dev
);
877 mt76_wr(dev
, MT_MAC_SYS_CTRL
,
878 MT_MAC_SYS_CTRL_ENABLE_TX
|
879 MT_MAC_SYS_CTRL_ENABLE_RX
);
883 tx_alc
= mt76_rr(dev
, MT_TX_ALC_CFG_0
);
884 mt76_wr(dev
, MT_TX_ALC_CFG_0
, 0);
885 usleep_range(500, 700);
887 reg_val
= mt76_rr(dev
, MT_BBP(IBI
, 9));
888 mt76_wr(dev
, MT_BBP(IBI
, 9), 0xffffff7e);
891 if (chan
->hw_value
< 100)
893 else if (chan
->hw_value
< 140)
901 mt76x02_mcu_calibrate(dev
, MCU_CAL_FULL
, val
);
902 mt76x02_mcu_calibrate(dev
, MCU_CAL_LC
, is_5ghz
);
903 usleep_range(15000, 20000);
905 mt76_wr(dev
, MT_BBP(IBI
, 9), reg_val
);
906 mt76_wr(dev
, MT_TX_ALC_CFG_0
, tx_alc
);
907 mt76x02_mcu_calibrate(dev
, MCU_CAL_RXDCOC
, 1);
909 EXPORT_SYMBOL_GPL(mt76x0_phy_calibrate
);
911 void mt76x0_phy_set_channel(struct mt76x02_dev
*dev
,
912 struct cfg80211_chan_def
*chandef
)
914 u32 ext_cca_chan
[4] = {
915 [0] = FIELD_PREP(MT_EXT_CCA_CFG_CCA0
, 0) |
916 FIELD_PREP(MT_EXT_CCA_CFG_CCA1
, 1) |
917 FIELD_PREP(MT_EXT_CCA_CFG_CCA2
, 2) |
918 FIELD_PREP(MT_EXT_CCA_CFG_CCA3
, 3) |
919 FIELD_PREP(MT_EXT_CCA_CFG_CCA_MASK
, BIT(0)),
920 [1] = FIELD_PREP(MT_EXT_CCA_CFG_CCA0
, 1) |
921 FIELD_PREP(MT_EXT_CCA_CFG_CCA1
, 0) |
922 FIELD_PREP(MT_EXT_CCA_CFG_CCA2
, 2) |
923 FIELD_PREP(MT_EXT_CCA_CFG_CCA3
, 3) |
924 FIELD_PREP(MT_EXT_CCA_CFG_CCA_MASK
, BIT(1)),
925 [2] = FIELD_PREP(MT_EXT_CCA_CFG_CCA0
, 2) |
926 FIELD_PREP(MT_EXT_CCA_CFG_CCA1
, 3) |
927 FIELD_PREP(MT_EXT_CCA_CFG_CCA2
, 1) |
928 FIELD_PREP(MT_EXT_CCA_CFG_CCA3
, 0) |
929 FIELD_PREP(MT_EXT_CCA_CFG_CCA_MASK
, BIT(2)),
930 [3] = FIELD_PREP(MT_EXT_CCA_CFG_CCA0
, 3) |
931 FIELD_PREP(MT_EXT_CCA_CFG_CCA1
, 2) |
932 FIELD_PREP(MT_EXT_CCA_CFG_CCA2
, 1) |
933 FIELD_PREP(MT_EXT_CCA_CFG_CCA3
, 0) |
934 FIELD_PREP(MT_EXT_CCA_CFG_CCA_MASK
, BIT(3)),
936 bool scan
= test_bit(MT76_SCANNING
, &dev
->mphy
.state
);
937 int ch_group_index
, freq
, freq1
;
942 freq
= chandef
->chan
->center_freq
;
943 freq1
= chandef
->center_freq1
;
944 channel
= chandef
->chan
->hw_value
;
945 rf_bw_band
= (channel
<= 14) ? RF_G_BAND
: RF_A_BAND
;
947 switch (chandef
->width
) {
948 case NL80211_CHAN_WIDTH_40
:
953 channel
+= 2 - ch_group_index
* 4;
954 rf_bw_band
|= RF_BW_40
;
956 case NL80211_CHAN_WIDTH_80
:
957 ch_group_index
= (freq
- freq1
+ 30) / 20;
958 if (WARN_ON(ch_group_index
< 0 || ch_group_index
> 3))
960 channel
+= 6 - ch_group_index
* 4;
961 rf_bw_band
|= RF_BW_80
;
965 rf_bw_band
|= RF_BW_20
;
969 if (mt76_is_usb(&dev
->mt76
)) {
970 mt76x0_phy_bbp_set_bw(dev
, chandef
->width
);
972 if (chandef
->width
== NL80211_CHAN_WIDTH_80
||
973 chandef
->width
== NL80211_CHAN_WIDTH_40
)
977 mt76_wr(dev
, MT_TX_SW_CFG0
, val
);
979 mt76x02_phy_set_bw(dev
, chandef
->width
, ch_group_index
);
980 mt76x02_phy_set_band(dev
, chandef
->chan
->band
,
983 mt76_rmw(dev
, MT_EXT_CCA_CFG
,
984 (MT_EXT_CCA_CFG_CCA0
|
985 MT_EXT_CCA_CFG_CCA1
|
986 MT_EXT_CCA_CFG_CCA2
|
987 MT_EXT_CCA_CFG_CCA3
|
988 MT_EXT_CCA_CFG_CCA_MASK
),
989 ext_cca_chan
[ch_group_index
]);
991 mt76x0_phy_set_band(dev
, chandef
->chan
->band
);
992 mt76x0_phy_set_chan_rf_params(dev
, channel
, rf_bw_band
);
994 /* set Japan Tx filter at channel 14 */
996 mt76_set(dev
, MT_BBP(CORE
, 1), 0x20);
998 mt76_clear(dev
, MT_BBP(CORE
, 1), 0x20);
1000 mt76x0_read_rx_gain(dev
);
1001 mt76x0_phy_set_chan_bbp_params(dev
, rf_bw_band
);
1004 mt76x0_rf_set(dev
, MT_RF(0, 4), BIT(7));
1008 mt76x02_init_agc_gain(dev
);
1009 mt76x0_phy_calibrate(dev
, false);
1010 mt76x0_phy_set_txpower(dev
);
1012 ieee80211_queue_delayed_work(dev
->mt76
.hw
, &dev
->cal_work
,
1013 MT_CALIBRATE_INTERVAL
);
1016 static void mt76x0_phy_temp_sensor(struct mt76x02_dev
*dev
)
1018 u8 rf_b7_73
, rf_b0_66
, rf_b0_67
;
1021 rf_b7_73
= mt76x0_rf_rr(dev
, MT_RF(7, 73));
1022 rf_b0_66
= mt76x0_rf_rr(dev
, MT_RF(0, 66));
1023 rf_b0_67
= mt76x0_rf_rr(dev
, MT_RF(0, 67));
1025 mt76x0_rf_wr(dev
, MT_RF(7, 73), 0x02);
1026 mt76x0_rf_wr(dev
, MT_RF(0, 66), 0x23);
1027 mt76x0_rf_wr(dev
, MT_RF(0, 67), 0x01);
1029 mt76_wr(dev
, MT_BBP(CORE
, 34), 0x00080055);
1030 if (!mt76_poll_msec(dev
, MT_BBP(CORE
, 34), BIT(4), 0, 200)) {
1031 mt76_clear(dev
, MT_BBP(CORE
, 34), BIT(4));
1035 val
= mt76_rr(dev
, MT_BBP(CORE
, 35));
1036 val
= (35 * (val
- dev
->cal
.rx
.temp_offset
)) / 10 + 25;
1038 if (abs(val
- dev
->cal
.temp_vco
) > 20) {
1039 mt76x02_mcu_calibrate(dev
, MCU_CAL_VCO
,
1040 dev
->mphy
.chandef
.chan
->hw_value
);
1041 dev
->cal
.temp_vco
= val
;
1043 if (abs(val
- dev
->cal
.temp
) > 30) {
1044 mt76x0_phy_calibrate(dev
, false);
1045 dev
->cal
.temp
= val
;
1049 mt76x0_rf_wr(dev
, MT_RF(7, 73), rf_b7_73
);
1050 mt76x0_rf_wr(dev
, MT_RF(0, 66), rf_b0_66
);
1051 mt76x0_rf_wr(dev
, MT_RF(0, 67), rf_b0_67
);
1054 static void mt76x0_phy_set_gain_val(struct mt76x02_dev
*dev
)
1056 u8 gain
= dev
->cal
.agc_gain_cur
[0] - dev
->cal
.agc_gain_adjust
;
1058 mt76_rmw_field(dev
, MT_BBP(AGC
, 8), MT_BBP_AGC_GAIN
, gain
);
1060 if ((dev
->mphy
.chandef
.chan
->flags
& IEEE80211_CHAN_RADAR
) &&
1062 mt76x02_phy_dfs_adjust_agc(dev
);
1066 mt76x0_phy_update_channel_gain(struct mt76x02_dev
*dev
)
1072 dev
->cal
.avg_rssi_all
= mt76_get_min_avg_rssi(&dev
->mt76
, false);
1073 if (!dev
->cal
.avg_rssi_all
)
1074 dev
->cal
.avg_rssi_all
= -75;
1076 low_gain
= (dev
->cal
.avg_rssi_all
> mt76x02_get_rssi_gain_thresh(dev
)) +
1077 (dev
->cal
.avg_rssi_all
> mt76x02_get_low_rssi_gain_thresh(dev
));
1079 gain_change
= dev
->cal
.low_gain
< 0 ||
1080 (dev
->cal
.low_gain
& 2) ^ (low_gain
& 2);
1081 dev
->cal
.low_gain
= low_gain
;
1084 if (mt76x02_phy_adjust_vga_gain(dev
))
1085 mt76x0_phy_set_gain_val(dev
);
1089 dev
->cal
.agc_gain_adjust
= (low_gain
== 2) ? 0 : 10;
1090 gain_delta
= (low_gain
== 2) ? 10 : 0;
1092 dev
->cal
.agc_gain_cur
[0] = dev
->cal
.agc_gain_init
[0] - gain_delta
;
1093 mt76x0_phy_set_gain_val(dev
);
1095 /* clear false CCA counters */
1096 mt76_rr(dev
, MT_RX_STAT_1
);
1099 static void mt76x0_phy_calibration_work(struct work_struct
*work
)
1101 struct mt76x02_dev
*dev
= container_of(work
, struct mt76x02_dev
,
1104 mt76x0_phy_update_channel_gain(dev
);
1105 if (mt76x0_tssi_enabled(dev
))
1106 mt76x0_phy_tssi_calibrate(dev
);
1108 mt76x0_phy_temp_sensor(dev
);
1110 ieee80211_queue_delayed_work(dev
->mt76
.hw
, &dev
->cal_work
,
1111 4 * MT_CALIBRATE_INTERVAL
);
1114 static void mt76x0_rf_patch_reg_array(struct mt76x02_dev
*dev
,
1115 const struct mt76_reg_pair
*rp
, int len
)
1119 for (i
= 0; i
< len
; i
++) {
1120 u32 reg
= rp
[i
].reg
;
1121 u8 val
= rp
[i
].value
;
1125 if (mt76_is_mmio(&dev
->mt76
)) {
1135 if (is_mt7610e(dev
))
1143 else if (is_mt7610e(dev
))
1151 mt76x0_rf_wr(dev
, reg
, val
);
1155 static void mt76x0_phy_rf_init(struct mt76x02_dev
*dev
)
1159 mt76x0_rf_patch_reg_array(dev
, mt76x0_rf_central_tab
,
1160 ARRAY_SIZE(mt76x0_rf_central_tab
));
1161 mt76x0_rf_patch_reg_array(dev
, mt76x0_rf_2g_channel_0_tab
,
1162 ARRAY_SIZE(mt76x0_rf_2g_channel_0_tab
));
1163 RF_RANDOM_WRITE(dev
, mt76x0_rf_5g_channel_0_tab
);
1164 RF_RANDOM_WRITE(dev
, mt76x0_rf_vga_channel_0_tab
);
1166 for (i
= 0; i
< ARRAY_SIZE(mt76x0_rf_bw_switch_tab
); i
++) {
1167 const struct mt76x0_rf_switch_item
*item
= &mt76x0_rf_bw_switch_tab
[i
];
1169 if (item
->bw_band
== RF_BW_20
)
1170 mt76x0_rf_wr(dev
, item
->rf_bank_reg
, item
->value
);
1171 else if (((RF_G_BAND
| RF_BW_20
) & item
->bw_band
) ==
1172 (RF_G_BAND
| RF_BW_20
))
1173 mt76x0_rf_wr(dev
, item
->rf_bank_reg
, item
->value
);
1176 for (i
= 0; i
< ARRAY_SIZE(mt76x0_rf_band_switch_tab
); i
++) {
1177 if (mt76x0_rf_band_switch_tab
[i
].bw_band
& RF_G_BAND
) {
1179 mt76x0_rf_band_switch_tab
[i
].rf_bank_reg
,
1180 mt76x0_rf_band_switch_tab
[i
].value
);
1184 /* Frequency calibration
1185 * E1: B0.R22<6:0>: xo_cxo<6:0>
1186 * E2: B0.R21<0>: xo_cxo<0>, B0.R22<7:0>: xo_cxo<8:1>
1188 mt76x0_rf_wr(dev
, MT_RF(0, 22),
1189 min_t(u8
, dev
->cal
.rx
.freq_offset
, 0xbf));
1190 mt76x0_rf_rr(dev
, MT_RF(0, 22));
1192 /* Reset procedure DAC during power-up:
1197 mt76x0_rf_set(dev
, MT_RF(0, 73), BIT(7));
1198 mt76x0_rf_clear(dev
, MT_RF(0, 73), BIT(7));
1199 mt76x0_rf_set(dev
, MT_RF(0, 73), BIT(7));
1201 /* vcocal_en: initiate VCO calibration (reset after completion)) */
1202 mt76x0_rf_set(dev
, MT_RF(0, 4), 0x80);
1205 void mt76x0_phy_init(struct mt76x02_dev
*dev
)
1207 INIT_DELAYED_WORK(&dev
->cal_work
, mt76x0_phy_calibration_work
);
1209 mt76x0_phy_ant_select(dev
);
1210 mt76x0_phy_rf_init(dev
);
1211 mt76x02_phy_set_rxpath(dev
);
1212 mt76x02_phy_set_txdac(dev
);