1 // SPDX-License-Identifier: GPL-2.0-only
3 * (c) Copyright 2002-2010, Ralink Technology, Inc.
4 * Copyright (C) 2014 Felix Fietkau <nbd@openwrt.org>
5 * Copyright (C) 2015 Jakub Kicinski <kubakici@wp.pl>
6 * Copyright (C) 2018 Stanislaw Gruszka <stf_xl@wp.pl>
9 #include <linux/kernel.h>
10 #include <linux/etherdevice.h>
17 #include "initvals_phy.h"
18 #include "../mt76x02_phy.h"
21 mt76x0_rf_csr_wr(struct mt76x02_dev
*dev
, u32 offset
, u8 value
)
26 if (test_bit(MT76_REMOVED
, &dev
->mphy
.state
))
29 bank
= MT_RF_BANK(offset
);
30 reg
= MT_RF_REG(offset
);
32 if (WARN_ON_ONCE(reg
> 127) || WARN_ON_ONCE(bank
> 8))
35 mutex_lock(&dev
->phy_mutex
);
37 if (!mt76_poll(dev
, MT_RF_CSR_CFG
, MT_RF_CSR_CFG_KICK
, 0, 100)) {
42 mt76_wr(dev
, MT_RF_CSR_CFG
,
43 FIELD_PREP(MT_RF_CSR_CFG_DATA
, value
) |
44 FIELD_PREP(MT_RF_CSR_CFG_REG_BANK
, bank
) |
45 FIELD_PREP(MT_RF_CSR_CFG_REG_ID
, reg
) |
50 mutex_unlock(&dev
->phy_mutex
);
53 dev_err(dev
->mt76
.dev
, "Error: RF write %d:%d failed:%d!!\n",
59 static int mt76x0_rf_csr_rr(struct mt76x02_dev
*dev
, u32 offset
)
65 if (test_bit(MT76_REMOVED
, &dev
->mphy
.state
))
68 bank
= MT_RF_BANK(offset
);
69 reg
= MT_RF_REG(offset
);
71 if (WARN_ON_ONCE(reg
> 127) || WARN_ON_ONCE(bank
> 8))
74 mutex_lock(&dev
->phy_mutex
);
76 if (!mt76_poll(dev
, MT_RF_CSR_CFG
, MT_RF_CSR_CFG_KICK
, 0, 100))
79 mt76_wr(dev
, MT_RF_CSR_CFG
,
80 FIELD_PREP(MT_RF_CSR_CFG_REG_BANK
, bank
) |
81 FIELD_PREP(MT_RF_CSR_CFG_REG_ID
, reg
) |
84 if (!mt76_poll(dev
, MT_RF_CSR_CFG
, MT_RF_CSR_CFG_KICK
, 0, 100))
87 val
= mt76_rr(dev
, MT_RF_CSR_CFG
);
88 if (FIELD_GET(MT_RF_CSR_CFG_REG_ID
, val
) == reg
&&
89 FIELD_GET(MT_RF_CSR_CFG_REG_BANK
, val
) == bank
)
90 ret
= FIELD_GET(MT_RF_CSR_CFG_DATA
, val
);
93 mutex_unlock(&dev
->phy_mutex
);
96 dev_err(dev
->mt76
.dev
, "Error: RF read %d:%d failed:%d!!\n",
103 mt76x0_rf_wr(struct mt76x02_dev
*dev
, u32 offset
, u8 val
)
105 if (mt76_is_usb(&dev
->mt76
)) {
106 struct mt76_reg_pair pair
= {
111 WARN_ON_ONCE(!test_bit(MT76_STATE_MCU_RUNNING
,
113 return mt76_wr_rp(dev
, MT_MCU_MEMMAP_RF
, &pair
, 1);
115 return mt76x0_rf_csr_wr(dev
, offset
, val
);
119 static int mt76x0_rf_rr(struct mt76x02_dev
*dev
, u32 offset
)
124 if (mt76_is_usb(&dev
->mt76
)) {
125 struct mt76_reg_pair pair
= {
129 WARN_ON_ONCE(!test_bit(MT76_STATE_MCU_RUNNING
,
131 ret
= mt76_rd_rp(dev
, MT_MCU_MEMMAP_RF
, &pair
, 1);
134 ret
= val
= mt76x0_rf_csr_rr(dev
, offset
);
137 return (ret
< 0) ? ret
: val
;
141 mt76x0_rf_rmw(struct mt76x02_dev
*dev
, u32 offset
, u8 mask
, u8 val
)
145 ret
= mt76x0_rf_rr(dev
, offset
);
151 ret
= mt76x0_rf_wr(dev
, offset
, val
);
152 return ret
? ret
: val
;
156 mt76x0_rf_set(struct mt76x02_dev
*dev
, u32 offset
, u8 val
)
158 return mt76x0_rf_rmw(dev
, offset
, 0, val
);
162 mt76x0_rf_clear(struct mt76x02_dev
*dev
, u32 offset
, u8 mask
)
164 return mt76x0_rf_rmw(dev
, offset
, mask
, 0);
168 mt76x0_phy_rf_csr_wr_rp(struct mt76x02_dev
*dev
,
169 const struct mt76_reg_pair
*data
,
173 mt76x0_rf_csr_wr(dev
, data
->reg
, data
->value
);
178 #define RF_RANDOM_WRITE(dev, tab) do { \
179 if (mt76_is_mmio(&dev->mt76)) \
180 mt76x0_phy_rf_csr_wr_rp(dev, tab, ARRAY_SIZE(tab)); \
182 mt76_wr_rp(dev, MT_MCU_MEMMAP_RF, tab, ARRAY_SIZE(tab));\
185 int mt76x0_phy_wait_bbp_ready(struct mt76x02_dev
*dev
)
191 val
= mt76_rr(dev
, MT_BBP(CORE
, 0));
197 dev_err(dev
->mt76
.dev
, "Error: BBP is not ready\n");
201 dev_dbg(dev
->mt76
.dev
, "BBP version %08x\n", val
);
206 mt76x0_phy_set_band(struct mt76x02_dev
*dev
, enum nl80211_band band
)
209 case NL80211_BAND_2GHZ
:
210 RF_RANDOM_WRITE(dev
, mt76x0_rf_2g_channel_0_tab
);
212 mt76x0_rf_wr(dev
, MT_RF(5, 0), 0x45);
213 mt76x0_rf_wr(dev
, MT_RF(6, 0), 0x44);
215 mt76_wr(dev
, MT_TX_ALC_VGA3
, 0x00050007);
216 mt76_wr(dev
, MT_TX0_RF_GAIN_CORR
, 0x003E0002);
218 case NL80211_BAND_5GHZ
:
219 RF_RANDOM_WRITE(dev
, mt76x0_rf_5g_channel_0_tab
);
221 mt76x0_rf_wr(dev
, MT_RF(5, 0), 0x44);
222 mt76x0_rf_wr(dev
, MT_RF(6, 0), 0x45);
224 mt76_wr(dev
, MT_TX_ALC_VGA3
, 0x00000005);
225 mt76_wr(dev
, MT_TX0_RF_GAIN_CORR
, 0x01010102);
233 mt76x0_phy_set_chan_rf_params(struct mt76x02_dev
*dev
, u8 channel
,
236 const struct mt76x0_freq_item
*freq_item
;
237 u16 rf_band
= rf_bw_band
& 0xff00;
238 u16 rf_bw
= rf_bw_band
& 0x00ff;
239 enum nl80211_band band
;
244 for (i
= 0; i
< ARRAY_SIZE(mt76x0_sdm_channel
); i
++) {
245 if (channel
== mt76x0_sdm_channel
[i
]) {
251 for (i
= 0; i
< ARRAY_SIZE(mt76x0_frequency_plan
); i
++) {
252 if (channel
== mt76x0_frequency_plan
[i
].channel
) {
253 rf_band
= mt76x0_frequency_plan
[i
].band
;
256 freq_item
= &mt76x0_sdm_frequency_plan
[i
];
258 freq_item
= &mt76x0_frequency_plan
[i
];
260 mt76x0_rf_wr(dev
, MT_RF(0, 37), freq_item
->pllR37
);
261 mt76x0_rf_wr(dev
, MT_RF(0, 36), freq_item
->pllR36
);
262 mt76x0_rf_wr(dev
, MT_RF(0, 35), freq_item
->pllR35
);
263 mt76x0_rf_wr(dev
, MT_RF(0, 34), freq_item
->pllR34
);
264 mt76x0_rf_wr(dev
, MT_RF(0, 33), freq_item
->pllR33
);
266 mt76x0_rf_rmw(dev
, MT_RF(0, 32), 0xe0,
267 freq_item
->pllR32_b7b5
);
269 /* R32<4:0> pll_den: (Denomina - 8) */
270 mt76x0_rf_rmw(dev
, MT_RF(0, 32), MT_RF_PLL_DEN_MASK
,
271 freq_item
->pllR32_b4b0
);
274 mt76x0_rf_rmw(dev
, MT_RF(0, 31), 0xe0,
275 freq_item
->pllR31_b7b5
);
277 /* R31<4:0> pll_k(Nominator) */
278 mt76x0_rf_rmw(dev
, MT_RF(0, 31), MT_RF_PLL_K_MASK
,
279 freq_item
->pllR31_b4b0
);
281 /* R30<7> sdm_reset_n */
283 mt76x0_rf_clear(dev
, MT_RF(0, 30),
284 MT_RF_SDM_RESET_MASK
);
285 mt76x0_rf_set(dev
, MT_RF(0, 30),
286 MT_RF_SDM_RESET_MASK
);
288 mt76x0_rf_rmw(dev
, MT_RF(0, 30),
289 MT_RF_SDM_RESET_MASK
,
290 freq_item
->pllR30_b7
);
293 /* R30<6:2> sdmmash_prbs,sin */
294 mt76x0_rf_rmw(dev
, MT_RF(0, 30),
295 MT_RF_SDM_MASH_PRBS_MASK
,
296 freq_item
->pllR30_b6b2
);
299 mt76x0_rf_rmw(dev
, MT_RF(0, 30), MT_RF_SDM_BP_MASK
,
300 freq_item
->pllR30_b1
<< 1);
302 /* R30<0> R29<7:0> (hex) pll_n */
303 mt76x0_rf_wr(dev
, MT_RF(0, 29),
304 freq_item
->pll_n
& 0xff);
306 mt76x0_rf_rmw(dev
, MT_RF(0, 30), 0x1,
307 (freq_item
->pll_n
>> 8) & 0x1);
309 /* R28<7:6> isi_iso */
310 mt76x0_rf_rmw(dev
, MT_RF(0, 28), MT_RF_ISI_ISO_MASK
,
311 freq_item
->pllR28_b7b6
);
313 /* R28<5:4> pfd_dly */
314 mt76x0_rf_rmw(dev
, MT_RF(0, 28), MT_RF_PFD_DLY_MASK
,
315 freq_item
->pllR28_b5b4
);
317 /* R28<3:2> clksel option */
318 mt76x0_rf_rmw(dev
, MT_RF(0, 28), MT_RF_CLK_SEL_MASK
,
319 freq_item
->pllR28_b3b2
);
321 /* R28<1:0> R27<7:0> R26<7:0> (hex) sdm_k */
322 mt76x0_rf_wr(dev
, MT_RF(0, 26),
323 freq_item
->pll_sdm_k
& 0xff);
324 mt76x0_rf_wr(dev
, MT_RF(0, 27),
325 (freq_item
->pll_sdm_k
>> 8) & 0xff);
327 mt76x0_rf_rmw(dev
, MT_RF(0, 28), 0x3,
328 (freq_item
->pll_sdm_k
>> 16) & 0x3);
330 /* R24<1:0> xo_div */
331 mt76x0_rf_rmw(dev
, MT_RF(0, 24), MT_RF_XO_DIV_MASK
,
332 freq_item
->pllR24_b1b0
);
338 for (i
= 0; i
< ARRAY_SIZE(mt76x0_rf_bw_switch_tab
); i
++) {
339 if (rf_bw
== mt76x0_rf_bw_switch_tab
[i
].bw_band
) {
341 mt76x0_rf_bw_switch_tab
[i
].rf_bank_reg
,
342 mt76x0_rf_bw_switch_tab
[i
].value
);
343 } else if ((rf_bw
== (mt76x0_rf_bw_switch_tab
[i
].bw_band
& 0xFF)) &&
344 (rf_band
& mt76x0_rf_bw_switch_tab
[i
].bw_band
)) {
346 mt76x0_rf_bw_switch_tab
[i
].rf_bank_reg
,
347 mt76x0_rf_bw_switch_tab
[i
].value
);
351 for (i
= 0; i
< ARRAY_SIZE(mt76x0_rf_band_switch_tab
); i
++) {
352 if (mt76x0_rf_band_switch_tab
[i
].bw_band
& rf_band
) {
354 mt76x0_rf_band_switch_tab
[i
].rf_bank_reg
,
355 mt76x0_rf_band_switch_tab
[i
].value
);
359 mt76_clear(dev
, MT_RF_MISC
, 0xc);
361 band
= (rf_band
& RF_G_BAND
) ? NL80211_BAND_2GHZ
: NL80211_BAND_5GHZ
;
362 if (mt76x02_ext_pa_enabled(dev
, band
)) {
363 /* MT_RF_MISC (offset: 0x0518)
364 * [2]1'b1: enable external A band PA
365 * 1'b0: disable external A band PA
366 * [3]1'b1: enable external G band PA
367 * 1'b0: disable external G band PA
369 if (rf_band
& RF_A_BAND
)
370 mt76_set(dev
, MT_RF_MISC
, BIT(2));
372 mt76_set(dev
, MT_RF_MISC
, BIT(3));
375 for (i
= 0; i
< ARRAY_SIZE(mt76x0_rf_ext_pa_tab
); i
++)
376 if (mt76x0_rf_ext_pa_tab
[i
].bw_band
& rf_band
)
378 mt76x0_rf_ext_pa_tab
[i
].rf_bank_reg
,
379 mt76x0_rf_ext_pa_tab
[i
].value
);
382 if (rf_band
& RF_G_BAND
) {
383 mt76_wr(dev
, MT_TX0_RF_GAIN_ATTEN
, 0x63707400);
384 /* Set Atten mode = 2 For G band, Disable Tx Inc dcoc. */
385 mac_reg
= mt76_rr(dev
, MT_TX_ALC_CFG_1
);
386 mac_reg
&= 0x896400FF;
387 mt76_wr(dev
, MT_TX_ALC_CFG_1
, mac_reg
);
389 mt76_wr(dev
, MT_TX0_RF_GAIN_ATTEN
, 0x686A7800);
390 /* Set Atten mode = 0
391 * For Ext A band, Disable Tx Inc dcoc Cal.
393 mac_reg
= mt76_rr(dev
, MT_TX_ALC_CFG_1
);
394 mac_reg
&= 0x890400FF;
395 mt76_wr(dev
, MT_TX_ALC_CFG_1
, mac_reg
);
400 mt76x0_phy_set_chan_bbp_params(struct mt76x02_dev
*dev
, u16 rf_bw_band
)
404 for (i
= 0; i
< ARRAY_SIZE(mt76x0_bbp_switch_tab
); i
++) {
405 const struct mt76x0_bbp_switch_item
*item
= &mt76x0_bbp_switch_tab
[i
];
406 const struct mt76_reg_pair
*pair
= &item
->reg_pair
;
408 if ((rf_bw_band
& item
->bw_band
) != rf_bw_band
)
411 if (pair
->reg
== MT_BBP(AGC
, 8)) {
412 u32 val
= pair
->value
;
415 gain
= FIELD_GET(MT_BBP_AGC_GAIN
, val
);
416 gain
-= dev
->cal
.rx
.lna_gain
* 2;
417 val
&= ~MT_BBP_AGC_GAIN
;
418 val
|= FIELD_PREP(MT_BBP_AGC_GAIN
, gain
);
419 mt76_wr(dev
, pair
->reg
, val
);
421 mt76_wr(dev
, pair
->reg
, pair
->value
);
426 static void mt76x0_phy_ant_select(struct mt76x02_dev
*dev
)
428 u16 ee_ant
= mt76x02_eeprom_get(dev
, MT_EE_ANTENNA
);
429 u16 ee_cfg1
= mt76x02_eeprom_get(dev
, MT_EE_CFG1_INIT
);
430 u16 nic_conf2
= mt76x02_eeprom_get(dev
, MT_EE_NIC_CONF_2
);
434 wlan
= mt76_rr(dev
, MT_WLAN_FUN_CTRL
);
435 coex3
= mt76_rr(dev
, MT_COEXCFG3
);
437 ee_ant
&= ~(BIT(14) | BIT(12));
438 wlan
&= ~(BIT(6) | BIT(5));
439 coex3
&= ~GENMASK(5, 2);
441 if (ee_ant
& MT_EE_ANTENNA_DUAL
) {
442 /* dual antenna mode */
443 ant_div
= !(nic_conf2
& MT_EE_NIC_CONF_2_ANT_OPT
) &&
444 (nic_conf2
& MT_EE_NIC_CONF_2_ANT_DIV
);
450 if (dev
->mphy
.cap
.has_2ghz
)
453 /* sigle antenna mode */
454 if (dev
->mphy
.cap
.has_5ghz
) {
455 coex3
|= BIT(3) | BIT(4);
463 ee_ant
|= BIT(14) | BIT(11);
465 mt76_wr(dev
, MT_WLAN_FUN_CTRL
, wlan
);
466 mt76_rmw(dev
, MT_CMB_CTRL
, GENMASK(15, 0), ee_ant
);
467 mt76_rmw(dev
, MT_CSR_EE_CFG1
, GENMASK(15, 0), ee_cfg1
);
468 mt76_clear(dev
, MT_COEXCFG0
, BIT(2));
469 mt76_wr(dev
, MT_COEXCFG3
, coex3
);
473 mt76x0_phy_bbp_set_bw(struct mt76x02_dev
*dev
, enum nl80211_chan_width width
)
475 enum { BW_20
= 0, BW_40
= 1, BW_80
= 2, BW_10
= 4};
480 case NL80211_CHAN_WIDTH_20_NOHT
:
481 case NL80211_CHAN_WIDTH_20
:
484 case NL80211_CHAN_WIDTH_40
:
487 case NL80211_CHAN_WIDTH_80
:
490 case NL80211_CHAN_WIDTH_10
:
493 case NL80211_CHAN_WIDTH_80P80
:
494 case NL80211_CHAN_WIDTH_160
:
495 case NL80211_CHAN_WIDTH_5
:
500 mt76x02_mcu_function_select(dev
, BW_SETTING
, bw
);
503 static void mt76x0_phy_tssi_dc_calibrate(struct mt76x02_dev
*dev
)
505 struct ieee80211_channel
*chan
= dev
->mphy
.chandef
.chan
;
508 if (chan
->band
== NL80211_BAND_5GHZ
)
509 mt76x0_rf_clear(dev
, MT_RF(0, 67), 0xf);
511 /* bypass ADDA control */
512 mt76_wr(dev
, MT_RF_SETTING_0
, 0x60002237);
513 mt76_wr(dev
, MT_RF_BYPASS_0
, 0xffffffff);
516 mt76_set(dev
, MT_BBP(CORE
, 4), BIT(0));
517 usleep_range(500, 1000);
518 mt76_clear(dev
, MT_BBP(CORE
, 4), BIT(0));
520 val
= (chan
->band
== NL80211_BAND_5GHZ
) ? 0x80055 : 0x80050;
521 mt76_wr(dev
, MT_BBP(CORE
, 34), val
);
523 /* enable TX with DAC0 input */
524 mt76_wr(dev
, MT_BBP(TXBE
, 6), BIT(31));
526 mt76_poll_msec(dev
, MT_BBP(CORE
, 34), BIT(4), 0, 200);
527 dev
->cal
.tssi_dc
= mt76_rr(dev
, MT_BBP(CORE
, 35)) & 0xff;
529 /* stop bypass ADDA */
530 mt76_wr(dev
, MT_RF_BYPASS_0
, 0);
532 mt76_wr(dev
, MT_BBP(TXBE
, 6), 0);
534 mt76_set(dev
, MT_BBP(CORE
, 4), BIT(0));
535 usleep_range(500, 1000);
536 mt76_clear(dev
, MT_BBP(CORE
, 4), BIT(0));
538 if (chan
->band
== NL80211_BAND_5GHZ
)
539 mt76x0_rf_rmw(dev
, MT_RF(0, 67), 0xf, 0x4);
543 mt76x0_phy_tssi_adc_calibrate(struct mt76x02_dev
*dev
, s16
*ltssi
,
546 struct ieee80211_channel
*chan
= dev
->mphy
.chandef
.chan
;
549 val
= (chan
->band
== NL80211_BAND_5GHZ
) ? 0x80055 : 0x80050;
550 mt76_wr(dev
, MT_BBP(CORE
, 34), val
);
552 if (!mt76_poll_msec(dev
, MT_BBP(CORE
, 34), BIT(4), 0, 200)) {
553 mt76_clear(dev
, MT_BBP(CORE
, 34), BIT(4));
557 *ltssi
= mt76_rr(dev
, MT_BBP(CORE
, 35)) & 0xff;
558 if (chan
->band
== NL80211_BAND_5GHZ
)
561 /* set packet info#1 mode */
562 mt76_wr(dev
, MT_BBP(CORE
, 34), 0x80041);
563 info
[0] = mt76_rr(dev
, MT_BBP(CORE
, 35)) & 0xff;
565 /* set packet info#2 mode */
566 mt76_wr(dev
, MT_BBP(CORE
, 34), 0x80042);
567 info
[1] = mt76_rr(dev
, MT_BBP(CORE
, 35)) & 0xff;
569 /* set packet info#3 mode */
570 mt76_wr(dev
, MT_BBP(CORE
, 34), 0x80043);
571 info
[2] = mt76_rr(dev
, MT_BBP(CORE
, 35)) & 0xff;
576 static u8
mt76x0_phy_get_rf_pa_mode(struct mt76x02_dev
*dev
,
577 int index
, u8 tx_rate
)
581 reg
= (index
== 1) ? MT_RF_PA_MODE_CFG1
: MT_RF_PA_MODE_CFG0
;
582 val
= mt76_rr(dev
, reg
);
583 return (val
& (3 << (tx_rate
* 2))) >> (tx_rate
* 2);
587 mt76x0_phy_get_target_power(struct mt76x02_dev
*dev
, u8 tx_mode
,
588 u8
*info
, s8
*target_power
,
591 u8 tx_rate
, cur_power
;
593 cur_power
= mt76_rr(dev
, MT_TX_ALC_CFG_0
) & MT_TX_ALC_CFG_0_CH_INIT_0
;
597 tx_rate
= (info
[0] & 0x60) >> 5;
598 *target_power
= cur_power
+ dev
->rate_power
.cck
[tx_rate
];
599 *target_pa_power
= mt76x0_phy_get_rf_pa_mode(dev
, 0, tx_rate
);
605 tx_rate
= (info
[0] & 0xf0) >> 4;
635 *target_power
= cur_power
+ dev
->rate_power
.ofdm
[index
];
636 *target_pa_power
= mt76x0_phy_get_rf_pa_mode(dev
, 0, index
+ 4);
641 tx_rate
= info
[1] & 0xf;
645 *target_power
= cur_power
;
647 *target_power
+= dev
->rate_power
.vht
[tx_rate
- 8];
649 *target_power
+= dev
->rate_power
.ht
[tx_rate
];
651 *target_pa_power
= mt76x0_phy_get_rf_pa_mode(dev
, 1, tx_rate
);
655 tx_rate
= info
[1] & 0x7f;
659 *target_power
= cur_power
+ dev
->rate_power
.ht
[tx_rate
];
660 *target_pa_power
= mt76x0_phy_get_rf_pa_mode(dev
, 1, tx_rate
);
667 static s16
mt76x0_phy_lin2db(u16 val
)
669 u32 mantissa
= val
<< 4;
673 while (mantissa
< BIT(15)) {
678 while (mantissa
> 0xffff) {
685 if (mantissa
<= 47104)
686 data
= mantissa
+ (mantissa
>> 3) + (mantissa
>> 4) - 38400;
688 data
= mantissa
- (mantissa
>> 3) - (mantissa
>> 6) - 23040;
689 data
= max_t(int, 0, data
);
691 ret
= ((15 + exp
) << 15) + data
;
692 ret
= (ret
<< 2) + (ret
<< 1) + (ret
>> 6) + (ret
>> 7);
697 mt76x0_phy_get_delta_power(struct mt76x02_dev
*dev
, u8 tx_mode
,
698 s8 target_power
, s8 target_pa_power
,
701 struct ieee80211_channel
*chan
= dev
->mphy
.chandef
.chan
;
702 int tssi_target
= target_power
<< 12, tssi_slope
;
703 int tssi_offset
, tssi_db
, ret
;
707 if (chan
->band
== NL80211_BAND_5GHZ
) {
711 err
= mt76x02_eeprom_copy(dev
, MT_EE_TSSI_BOUND1
, bound
,
716 for (i
= 0; i
< ARRAY_SIZE(bound
); i
++) {
717 if (chan
->hw_value
<= bound
[i
] || !bound
[i
])
720 val
= mt76x02_eeprom_get(dev
, MT_EE_TSSI_SLOPE_5G
+ i
* 2);
722 tssi_offset
= val
>> 8;
723 if ((tssi_offset
>= 64 && tssi_offset
<= 127) ||
724 (tssi_offset
& BIT(7)))
725 tssi_offset
-= BIT(8);
727 val
= mt76x02_eeprom_get(dev
, MT_EE_TSSI_SLOPE_2G
);
729 tssi_offset
= val
>> 8;
730 if (tssi_offset
& BIT(7))
731 tssi_offset
-= BIT(8);
733 tssi_slope
= val
& 0xff;
735 switch (target_pa_power
) {
737 if (chan
->band
== NL80211_BAND_2GHZ
)
738 tssi_target
+= 29491; /* 3.6 * 8192 */
743 tssi_target
+= 4424; /* 0.54 * 8192 */
748 data
= mt76_rr(dev
, MT_BBP(CORE
, 1));
749 if (is_mt7630(dev
) && mt76_is_mmio(&dev
->mt76
)) {
752 /* 2.3 * 8192 or 1.5 * 8192 */
753 offset
= (data
& BIT(5)) ? 18841 : 12288;
754 tssi_target
+= offset
;
755 } else if (data
& BIT(5)) {
761 data
= mt76_rr(dev
, MT_BBP(TXBE
, 4));
762 switch (data
& 0x3) {
764 tssi_target
-= 49152; /* -6db * 8192 */
767 tssi_target
-= 98304; /* -12db * 8192 */
770 tssi_target
+= 49152; /* 6db * 8192 */
776 tssi_db
= mt76x0_phy_lin2db(ltssi
- dev
->cal
.tssi_dc
) * tssi_slope
;
777 if (chan
->band
== NL80211_BAND_5GHZ
) {
778 tssi_db
+= ((tssi_offset
- 50) << 10); /* offset s4.3 */
779 tssi_target
-= tssi_db
;
780 if (ltssi
> 254 && tssi_target
> 0) {
785 tssi_db
+= (tssi_offset
<< 9); /* offset s3.4 */
786 tssi_target
-= tssi_db
;
787 /* upper-lower saturate */
788 if ((ltssi
> 126 && tssi_target
> 0) ||
789 ((ltssi
- dev
->cal
.tssi_dc
) < 1 && tssi_target
< 0)) {
794 if ((dev
->cal
.tssi_target
^ tssi_target
) < 0 &&
795 dev
->cal
.tssi_target
> -4096 && dev
->cal
.tssi_target
< 4096 &&
796 tssi_target
> -4096 && tssi_target
< 4096) {
797 if ((tssi_target
< 0 &&
798 tssi_target
+ dev
->cal
.tssi_target
> 0) ||
800 tssi_target
+ dev
->cal
.tssi_target
<= 0))
803 dev
->cal
.tssi_target
= tssi_target
;
805 dev
->cal
.tssi_target
= tssi_target
;
808 /* make the compensate value to the nearest compensate code */
815 ret
= mt76_get_field(dev
, MT_TX_ALC_CFG_1
, MT_TX_ALC_CFG_1_TEMP_COMP
);
820 ret
= min_t(int, 31, ret
);
821 return max_t(int, -32, ret
);
824 static void mt76x0_phy_tssi_calibrate(struct mt76x02_dev
*dev
)
826 s8 target_power
, target_pa_power
;
827 u8 tssi_info
[3], tx_mode
;
831 if (mt76x0_phy_tssi_adc_calibrate(dev
, <ssi
, tssi_info
) < 0)
834 tx_mode
= tssi_info
[0] & 0x7;
835 if (mt76x0_phy_get_target_power(dev
, tx_mode
, tssi_info
,
836 &target_power
, &target_pa_power
) < 0)
839 val
= mt76x0_phy_get_delta_power(dev
, tx_mode
, target_power
,
840 target_pa_power
, ltssi
);
841 mt76_rmw_field(dev
, MT_TX_ALC_CFG_1
, MT_TX_ALC_CFG_1_TEMP_COMP
, val
);
844 void mt76x0_phy_set_txpower(struct mt76x02_dev
*dev
)
846 struct mt76x02_rate_power
*t
= &dev
->rate_power
;
849 mt76x0_get_tx_power_per_rate(dev
, dev
->mphy
.chandef
.chan
, t
);
850 mt76x0_get_power_info(dev
, dev
->mphy
.chandef
.chan
, &info
);
852 mt76x02_add_rate_power_offset(t
, info
);
853 mt76x02_limit_rate_power(t
, dev
->txpower_conf
);
854 dev
->mphy
.txpower_cur
= mt76x02_get_max_rate_power(t
);
855 mt76x02_add_rate_power_offset(t
, -info
);
857 dev
->target_power
= info
;
858 mt76x02_phy_set_txpower(dev
, info
, info
);
861 void mt76x0_phy_calibrate(struct mt76x02_dev
*dev
, bool power_on
)
863 struct ieee80211_channel
*chan
= dev
->mphy
.chandef
.chan
;
864 int is_5ghz
= (chan
->band
== NL80211_BAND_5GHZ
) ? 1 : 0;
865 u32 val
, tx_alc
, reg_val
;
871 mt76x02_mcu_calibrate(dev
, MCU_CAL_R
, 0);
872 mt76x02_mcu_calibrate(dev
, MCU_CAL_VCO
, chan
->hw_value
);
873 usleep_range(10, 20);
875 if (mt76x0_tssi_enabled(dev
)) {
876 mt76_wr(dev
, MT_MAC_SYS_CTRL
,
877 MT_MAC_SYS_CTRL_ENABLE_RX
);
878 mt76x0_phy_tssi_dc_calibrate(dev
);
879 mt76_wr(dev
, MT_MAC_SYS_CTRL
,
880 MT_MAC_SYS_CTRL_ENABLE_TX
|
881 MT_MAC_SYS_CTRL_ENABLE_RX
);
885 tx_alc
= mt76_rr(dev
, MT_TX_ALC_CFG_0
);
886 mt76_wr(dev
, MT_TX_ALC_CFG_0
, 0);
887 usleep_range(500, 700);
889 reg_val
= mt76_rr(dev
, MT_BBP(IBI
, 9));
890 mt76_wr(dev
, MT_BBP(IBI
, 9), 0xffffff7e);
893 if (chan
->hw_value
< 100)
895 else if (chan
->hw_value
< 140)
903 mt76x02_mcu_calibrate(dev
, MCU_CAL_FULL
, val
);
904 mt76x02_mcu_calibrate(dev
, MCU_CAL_LC
, is_5ghz
);
905 usleep_range(15000, 20000);
907 mt76_wr(dev
, MT_BBP(IBI
, 9), reg_val
);
908 mt76_wr(dev
, MT_TX_ALC_CFG_0
, tx_alc
);
909 mt76x02_mcu_calibrate(dev
, MCU_CAL_RXDCOC
, 1);
911 EXPORT_SYMBOL_GPL(mt76x0_phy_calibrate
);
913 void mt76x0_phy_set_channel(struct mt76x02_dev
*dev
,
914 struct cfg80211_chan_def
*chandef
)
916 u32 ext_cca_chan
[4] = {
917 [0] = FIELD_PREP(MT_EXT_CCA_CFG_CCA0
, 0) |
918 FIELD_PREP(MT_EXT_CCA_CFG_CCA1
, 1) |
919 FIELD_PREP(MT_EXT_CCA_CFG_CCA2
, 2) |
920 FIELD_PREP(MT_EXT_CCA_CFG_CCA3
, 3) |
921 FIELD_PREP(MT_EXT_CCA_CFG_CCA_MASK
, BIT(0)),
922 [1] = FIELD_PREP(MT_EXT_CCA_CFG_CCA0
, 1) |
923 FIELD_PREP(MT_EXT_CCA_CFG_CCA1
, 0) |
924 FIELD_PREP(MT_EXT_CCA_CFG_CCA2
, 2) |
925 FIELD_PREP(MT_EXT_CCA_CFG_CCA3
, 3) |
926 FIELD_PREP(MT_EXT_CCA_CFG_CCA_MASK
, BIT(1)),
927 [2] = FIELD_PREP(MT_EXT_CCA_CFG_CCA0
, 2) |
928 FIELD_PREP(MT_EXT_CCA_CFG_CCA1
, 3) |
929 FIELD_PREP(MT_EXT_CCA_CFG_CCA2
, 1) |
930 FIELD_PREP(MT_EXT_CCA_CFG_CCA3
, 0) |
931 FIELD_PREP(MT_EXT_CCA_CFG_CCA_MASK
, BIT(2)),
932 [3] = FIELD_PREP(MT_EXT_CCA_CFG_CCA0
, 3) |
933 FIELD_PREP(MT_EXT_CCA_CFG_CCA1
, 2) |
934 FIELD_PREP(MT_EXT_CCA_CFG_CCA2
, 1) |
935 FIELD_PREP(MT_EXT_CCA_CFG_CCA3
, 0) |
936 FIELD_PREP(MT_EXT_CCA_CFG_CCA_MASK
, BIT(3)),
938 bool scan
= test_bit(MT76_SCANNING
, &dev
->mphy
.state
);
939 int ch_group_index
, freq
, freq1
;
944 freq
= chandef
->chan
->center_freq
;
945 freq1
= chandef
->center_freq1
;
946 channel
= chandef
->chan
->hw_value
;
947 rf_bw_band
= (channel
<= 14) ? RF_G_BAND
: RF_A_BAND
;
949 switch (chandef
->width
) {
950 case NL80211_CHAN_WIDTH_40
:
955 channel
+= 2 - ch_group_index
* 4;
956 rf_bw_band
|= RF_BW_40
;
958 case NL80211_CHAN_WIDTH_80
:
959 ch_group_index
= (freq
- freq1
+ 30) / 20;
960 if (WARN_ON(ch_group_index
< 0 || ch_group_index
> 3))
962 channel
+= 6 - ch_group_index
* 4;
963 rf_bw_band
|= RF_BW_80
;
967 rf_bw_band
|= RF_BW_20
;
971 if (mt76_is_usb(&dev
->mt76
)) {
972 mt76x0_phy_bbp_set_bw(dev
, chandef
->width
);
974 if (chandef
->width
== NL80211_CHAN_WIDTH_80
||
975 chandef
->width
== NL80211_CHAN_WIDTH_40
)
979 mt76_wr(dev
, MT_TX_SW_CFG0
, val
);
981 mt76x02_phy_set_bw(dev
, chandef
->width
, ch_group_index
);
982 mt76x02_phy_set_band(dev
, chandef
->chan
->band
,
985 mt76_rmw(dev
, MT_EXT_CCA_CFG
,
986 (MT_EXT_CCA_CFG_CCA0
|
987 MT_EXT_CCA_CFG_CCA1
|
988 MT_EXT_CCA_CFG_CCA2
|
989 MT_EXT_CCA_CFG_CCA3
|
990 MT_EXT_CCA_CFG_CCA_MASK
),
991 ext_cca_chan
[ch_group_index
]);
993 mt76x0_phy_set_band(dev
, chandef
->chan
->band
);
994 mt76x0_phy_set_chan_rf_params(dev
, channel
, rf_bw_band
);
996 /* set Japan Tx filter at channel 14 */
998 mt76_set(dev
, MT_BBP(CORE
, 1), 0x20);
1000 mt76_clear(dev
, MT_BBP(CORE
, 1), 0x20);
1002 mt76x0_read_rx_gain(dev
);
1003 mt76x0_phy_set_chan_bbp_params(dev
, rf_bw_band
);
1006 mt76x0_rf_set(dev
, MT_RF(0, 4), BIT(7));
1010 mt76x02_init_agc_gain(dev
);
1011 mt76x0_phy_calibrate(dev
, false);
1012 mt76x0_phy_set_txpower(dev
);
1014 ieee80211_queue_delayed_work(dev
->mt76
.hw
, &dev
->cal_work
,
1015 MT_CALIBRATE_INTERVAL
);
1018 static void mt76x0_phy_temp_sensor(struct mt76x02_dev
*dev
)
1020 u8 rf_b7_73
, rf_b0_66
, rf_b0_67
;
1023 rf_b7_73
= mt76x0_rf_rr(dev
, MT_RF(7, 73));
1024 rf_b0_66
= mt76x0_rf_rr(dev
, MT_RF(0, 66));
1025 rf_b0_67
= mt76x0_rf_rr(dev
, MT_RF(0, 67));
1027 mt76x0_rf_wr(dev
, MT_RF(7, 73), 0x02);
1028 mt76x0_rf_wr(dev
, MT_RF(0, 66), 0x23);
1029 mt76x0_rf_wr(dev
, MT_RF(0, 67), 0x01);
1031 mt76_wr(dev
, MT_BBP(CORE
, 34), 0x00080055);
1032 if (!mt76_poll_msec(dev
, MT_BBP(CORE
, 34), BIT(4), 0, 200)) {
1033 mt76_clear(dev
, MT_BBP(CORE
, 34), BIT(4));
1037 val
= mt76_rr(dev
, MT_BBP(CORE
, 35));
1038 val
= (35 * (val
- dev
->cal
.rx
.temp_offset
)) / 10 + 25;
1040 if (abs(val
- dev
->cal
.temp_vco
) > 20) {
1041 mt76x02_mcu_calibrate(dev
, MCU_CAL_VCO
,
1042 dev
->mphy
.chandef
.chan
->hw_value
);
1043 dev
->cal
.temp_vco
= val
;
1045 if (abs(val
- dev
->cal
.temp
) > 30) {
1046 mt76x0_phy_calibrate(dev
, false);
1047 dev
->cal
.temp
= val
;
1051 mt76x0_rf_wr(dev
, MT_RF(7, 73), rf_b7_73
);
1052 mt76x0_rf_wr(dev
, MT_RF(0, 66), rf_b0_66
);
1053 mt76x0_rf_wr(dev
, MT_RF(0, 67), rf_b0_67
);
1056 static void mt76x0_phy_set_gain_val(struct mt76x02_dev
*dev
)
1058 u8 gain
= dev
->cal
.agc_gain_cur
[0] - dev
->cal
.agc_gain_adjust
;
1060 mt76_rmw_field(dev
, MT_BBP(AGC
, 8), MT_BBP_AGC_GAIN
, gain
);
1062 if ((dev
->mphy
.chandef
.chan
->flags
& IEEE80211_CHAN_RADAR
) &&
1064 mt76x02_phy_dfs_adjust_agc(dev
);
1068 mt76x0_phy_update_channel_gain(struct mt76x02_dev
*dev
)
1074 dev
->cal
.avg_rssi_all
= mt76_get_min_avg_rssi(&dev
->mt76
, false);
1075 if (!dev
->cal
.avg_rssi_all
)
1076 dev
->cal
.avg_rssi_all
= -75;
1078 low_gain
= (dev
->cal
.avg_rssi_all
> mt76x02_get_rssi_gain_thresh(dev
)) +
1079 (dev
->cal
.avg_rssi_all
> mt76x02_get_low_rssi_gain_thresh(dev
));
1081 gain_change
= dev
->cal
.low_gain
< 0 ||
1082 (dev
->cal
.low_gain
& 2) ^ (low_gain
& 2);
1083 dev
->cal
.low_gain
= low_gain
;
1086 if (mt76x02_phy_adjust_vga_gain(dev
))
1087 mt76x0_phy_set_gain_val(dev
);
1091 dev
->cal
.agc_gain_adjust
= (low_gain
== 2) ? 0 : 10;
1092 gain_delta
= (low_gain
== 2) ? 10 : 0;
1094 dev
->cal
.agc_gain_cur
[0] = dev
->cal
.agc_gain_init
[0] - gain_delta
;
1095 mt76x0_phy_set_gain_val(dev
);
1097 /* clear false CCA counters */
1098 mt76_rr(dev
, MT_RX_STAT_1
);
1101 static void mt76x0_phy_calibration_work(struct work_struct
*work
)
1103 struct mt76x02_dev
*dev
= container_of(work
, struct mt76x02_dev
,
1106 mt76x0_phy_update_channel_gain(dev
);
1107 if (mt76x0_tssi_enabled(dev
))
1108 mt76x0_phy_tssi_calibrate(dev
);
1110 mt76x0_phy_temp_sensor(dev
);
1112 ieee80211_queue_delayed_work(dev
->mt76
.hw
, &dev
->cal_work
,
1113 4 * MT_CALIBRATE_INTERVAL
);
1116 static void mt76x0_rf_patch_reg_array(struct mt76x02_dev
*dev
,
1117 const struct mt76_reg_pair
*rp
, int len
)
1121 for (i
= 0; i
< len
; i
++) {
1122 u32 reg
= rp
[i
].reg
;
1123 u8 val
= rp
[i
].value
;
1127 if (mt76_is_mmio(&dev
->mt76
)) {
1137 if (is_mt7610e(dev
))
1145 else if (is_mt7610e(dev
))
1153 mt76x0_rf_wr(dev
, reg
, val
);
1157 static void mt76x0_phy_rf_init(struct mt76x02_dev
*dev
)
1161 mt76x0_rf_patch_reg_array(dev
, mt76x0_rf_central_tab
,
1162 ARRAY_SIZE(mt76x0_rf_central_tab
));
1163 mt76x0_rf_patch_reg_array(dev
, mt76x0_rf_2g_channel_0_tab
,
1164 ARRAY_SIZE(mt76x0_rf_2g_channel_0_tab
));
1165 RF_RANDOM_WRITE(dev
, mt76x0_rf_5g_channel_0_tab
);
1166 RF_RANDOM_WRITE(dev
, mt76x0_rf_vga_channel_0_tab
);
1168 for (i
= 0; i
< ARRAY_SIZE(mt76x0_rf_bw_switch_tab
); i
++) {
1169 const struct mt76x0_rf_switch_item
*item
= &mt76x0_rf_bw_switch_tab
[i
];
1171 if (item
->bw_band
== RF_BW_20
)
1172 mt76x0_rf_wr(dev
, item
->rf_bank_reg
, item
->value
);
1173 else if (((RF_G_BAND
| RF_BW_20
) & item
->bw_band
) ==
1174 (RF_G_BAND
| RF_BW_20
))
1175 mt76x0_rf_wr(dev
, item
->rf_bank_reg
, item
->value
);
1178 for (i
= 0; i
< ARRAY_SIZE(mt76x0_rf_band_switch_tab
); i
++) {
1179 if (mt76x0_rf_band_switch_tab
[i
].bw_band
& RF_G_BAND
) {
1181 mt76x0_rf_band_switch_tab
[i
].rf_bank_reg
,
1182 mt76x0_rf_band_switch_tab
[i
].value
);
1186 /* Frequency calibration
1187 * E1: B0.R22<6:0>: xo_cxo<6:0>
1188 * E2: B0.R21<0>: xo_cxo<0>, B0.R22<7:0>: xo_cxo<8:1>
1190 mt76x0_rf_wr(dev
, MT_RF(0, 22),
1191 min_t(u8
, dev
->cal
.rx
.freq_offset
, 0xbf));
1192 mt76x0_rf_rr(dev
, MT_RF(0, 22));
1194 /* Reset procedure DAC during power-up:
1199 mt76x0_rf_set(dev
, MT_RF(0, 73), BIT(7));
1200 mt76x0_rf_clear(dev
, MT_RF(0, 73), BIT(7));
1201 mt76x0_rf_set(dev
, MT_RF(0, 73), BIT(7));
1203 /* vcocal_en: initiate VCO calibration (reset after completion)) */
1204 mt76x0_rf_set(dev
, MT_RF(0, 4), 0x80);
1207 void mt76x0_phy_init(struct mt76x02_dev
*dev
)
1209 INIT_DELAYED_WORK(&dev
->cal_work
, mt76x0_phy_calibration_work
);
1211 mt76x0_phy_ant_select(dev
);
1212 mt76x0_phy_rf_init(dev
);
1213 mt76x02_phy_set_rxpath(dev
);
1214 mt76x02_phy_set_txdac(dev
);