2 * (c) Copyright 2002-2010, Ralink Technology, Inc.
3 * Copyright (C) 2014 Felix Fietkau <nbd@openwrt.org>
4 * Copyright (C) 2015 Jakub Kicinski <kubakici@wp.pl>
5 * Copyright (C) 2018 Stanislaw Gruszka <stf_xl@wp.pl>
7 * This program is free software; you can redistribute it and/or modify
8 * it under the terms of the GNU General Public License version 2
9 * as published by the Free Software Foundation
11 * This program is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 * GNU General Public License for more details.
17 #include <linux/kernel.h>
18 #include <linux/etherdevice.h>
25 #include "initvals_phy.h"
26 #include "../mt76x02_phy.h"
29 mt76x0_rf_csr_wr(struct mt76x02_dev *dev, u32 offset, u8 value)
34 if (test_bit(MT76_REMOVED, &dev->mt76.state))
37 bank = MT_RF_BANK(offset);
38 reg = MT_RF_REG(offset);
40 if (WARN_ON_ONCE(reg > 127) || WARN_ON_ONCE(bank > 8))
43 mutex_lock(&dev->phy_mutex);
45 if (!mt76_poll(dev, MT_RF_CSR_CFG, MT_RF_CSR_CFG_KICK, 0, 100)) {
50 mt76_wr(dev, MT_RF_CSR_CFG,
51 FIELD_PREP(MT_RF_CSR_CFG_DATA, value) |
52 FIELD_PREP(MT_RF_CSR_CFG_REG_BANK, bank) |
53 FIELD_PREP(MT_RF_CSR_CFG_REG_ID, reg) |
58 mutex_unlock(&dev->phy_mutex);
61 dev_err(dev->mt76.dev, "Error: RF write %d:%d failed:%d!!\n",
67 static int mt76x0_rf_csr_rr(struct mt76x02_dev *dev, u32 offset)
73 if (test_bit(MT76_REMOVED, &dev->mt76.state))
76 bank = MT_RF_BANK(offset);
77 reg = MT_RF_REG(offset);
79 if (WARN_ON_ONCE(reg > 127) || WARN_ON_ONCE(bank > 8))
82 mutex_lock(&dev->phy_mutex);
84 if (!mt76_poll(dev, MT_RF_CSR_CFG, MT_RF_CSR_CFG_KICK, 0, 100))
87 mt76_wr(dev, MT_RF_CSR_CFG,
88 FIELD_PREP(MT_RF_CSR_CFG_REG_BANK, bank) |
89 FIELD_PREP(MT_RF_CSR_CFG_REG_ID, reg) |
92 if (!mt76_poll(dev, MT_RF_CSR_CFG, MT_RF_CSR_CFG_KICK, 0, 100))
95 val = mt76_rr(dev, MT_RF_CSR_CFG);
96 if (FIELD_GET(MT_RF_CSR_CFG_REG_ID, val) == reg &&
97 FIELD_GET(MT_RF_CSR_CFG_REG_BANK, val) == bank)
98 ret = FIELD_GET(MT_RF_CSR_CFG_DATA, val);
101 mutex_unlock(&dev->phy_mutex);
104 dev_err(dev->mt76.dev, "Error: RF read %d:%d failed:%d!!\n",
111 mt76x0_rf_wr(struct mt76x02_dev *dev, u32 offset, u8 val)
113 if (mt76_is_usb(dev)) {
114 struct mt76_reg_pair pair = {
119 WARN_ON_ONCE(!test_bit(MT76_STATE_MCU_RUNNING,
121 return mt76_wr_rp(dev, MT_MCU_MEMMAP_RF, &pair, 1);
123 return mt76x0_rf_csr_wr(dev, offset, val);
127 static int mt76x0_rf_rr(struct mt76x02_dev *dev, u32 offset)
132 if (mt76_is_usb(dev)) {
133 struct mt76_reg_pair pair = {
137 WARN_ON_ONCE(!test_bit(MT76_STATE_MCU_RUNNING,
139 ret = mt76_rd_rp(dev, MT_MCU_MEMMAP_RF, &pair, 1);
142 ret = val = mt76x0_rf_csr_rr(dev, offset);
145 return (ret < 0) ? ret : val;
149 mt76x0_rf_rmw(struct mt76x02_dev *dev, u32 offset, u8 mask, u8 val)
153 ret = mt76x0_rf_rr(dev, offset);
159 ret = mt76x0_rf_wr(dev, offset, val);
160 return ret ? ret : val;
164 mt76x0_rf_set(struct mt76x02_dev *dev, u32 offset, u8 val)
166 return mt76x0_rf_rmw(dev, offset, 0, val);
170 mt76x0_rf_clear(struct mt76x02_dev *dev, u32 offset, u8 mask)
172 return mt76x0_rf_rmw(dev, offset, mask, 0);
176 mt76x0_phy_rf_csr_wr_rp(struct mt76x02_dev *dev,
177 const struct mt76_reg_pair *data,
181 mt76x0_rf_csr_wr(dev, data->reg, data->value);
186 #define RF_RANDOM_WRITE(dev, tab) do { \
187 if (mt76_is_mmio(dev)) \
188 mt76x0_phy_rf_csr_wr_rp(dev, tab, ARRAY_SIZE(tab)); \
190 mt76_wr_rp(dev, MT_MCU_MEMMAP_RF, tab, ARRAY_SIZE(tab));\
193 int mt76x0_phy_wait_bbp_ready(struct mt76x02_dev *dev)
199 val = mt76_rr(dev, MT_BBP(CORE, 0));
205 dev_err(dev->mt76.dev, "Error: BBP is not ready\n");
209 dev_dbg(dev->mt76.dev, "BBP version %08x\n", val);
214 mt76x0_phy_set_band(struct mt76x02_dev *dev, enum nl80211_band band)
217 case NL80211_BAND_2GHZ:
218 RF_RANDOM_WRITE(dev, mt76x0_rf_2g_channel_0_tab);
220 mt76x0_rf_wr(dev, MT_RF(5, 0), 0x45);
221 mt76x0_rf_wr(dev, MT_RF(6, 0), 0x44);
223 mt76_wr(dev, MT_TX_ALC_VGA3, 0x00050007);
224 mt76_wr(dev, MT_TX0_RF_GAIN_CORR, 0x003E0002);
226 case NL80211_BAND_5GHZ:
227 RF_RANDOM_WRITE(dev, mt76x0_rf_5g_channel_0_tab);
229 mt76x0_rf_wr(dev, MT_RF(5, 0), 0x44);
230 mt76x0_rf_wr(dev, MT_RF(6, 0), 0x45);
232 mt76_wr(dev, MT_TX_ALC_VGA3, 0x00000005);
233 mt76_wr(dev, MT_TX0_RF_GAIN_CORR, 0x01010102);
241 mt76x0_phy_set_chan_rf_params(struct mt76x02_dev *dev, u8 channel, u16 rf_bw_band)
243 const struct mt76x0_freq_item *freq_item;
244 u16 rf_band = rf_bw_band & 0xff00;
245 u16 rf_bw = rf_bw_band & 0x00ff;
246 enum nl80211_band band;
251 for (i = 0; i < ARRAY_SIZE(mt76x0_sdm_channel); i++) {
252 if (channel == mt76x0_sdm_channel[i]) {
258 for (i = 0; i < ARRAY_SIZE(mt76x0_frequency_plan); i++) {
259 if (channel == mt76x0_frequency_plan[i].channel) {
260 rf_band = mt76x0_frequency_plan[i].band;
263 freq_item = &(mt76x0_sdm_frequency_plan[i]);
265 freq_item = &(mt76x0_frequency_plan[i]);
267 mt76x0_rf_wr(dev, MT_RF(0, 37), freq_item->pllR37);
268 mt76x0_rf_wr(dev, MT_RF(0, 36), freq_item->pllR36);
269 mt76x0_rf_wr(dev, MT_RF(0, 35), freq_item->pllR35);
270 mt76x0_rf_wr(dev, MT_RF(0, 34), freq_item->pllR34);
271 mt76x0_rf_wr(dev, MT_RF(0, 33), freq_item->pllR33);
273 mt76x0_rf_rmw(dev, MT_RF(0, 32), 0xe0,
274 freq_item->pllR32_b7b5);
276 /* R32<4:0> pll_den: (Denomina - 8) */
277 mt76x0_rf_rmw(dev, MT_RF(0, 32), MT_RF_PLL_DEN_MASK,
278 freq_item->pllR32_b4b0);
281 mt76x0_rf_rmw(dev, MT_RF(0, 31), 0xe0,
282 freq_item->pllR31_b7b5);
284 /* R31<4:0> pll_k(Nominator) */
285 mt76x0_rf_rmw(dev, MT_RF(0, 31), MT_RF_PLL_K_MASK,
286 freq_item->pllR31_b4b0);
288 /* R30<7> sdm_reset_n */
290 mt76x0_rf_clear(dev, MT_RF(0, 30),
291 MT_RF_SDM_RESET_MASK);
292 mt76x0_rf_set(dev, MT_RF(0, 30),
293 MT_RF_SDM_RESET_MASK);
295 mt76x0_rf_rmw(dev, MT_RF(0, 30),
296 MT_RF_SDM_RESET_MASK,
297 freq_item->pllR30_b7);
300 /* R30<6:2> sdmmash_prbs,sin */
301 mt76x0_rf_rmw(dev, MT_RF(0, 30),
302 MT_RF_SDM_MASH_PRBS_MASK,
303 freq_item->pllR30_b6b2);
306 mt76x0_rf_rmw(dev, MT_RF(0, 30), MT_RF_SDM_BP_MASK,
307 freq_item->pllR30_b1 << 1);
309 /* R30<0> R29<7:0> (hex) pll_n */
310 mt76x0_rf_wr(dev, MT_RF(0, 29),
311 freq_item->pll_n & 0xff);
313 mt76x0_rf_rmw(dev, MT_RF(0, 30), 0x1,
314 (freq_item->pll_n >> 8) & 0x1);
316 /* R28<7:6> isi_iso */
317 mt76x0_rf_rmw(dev, MT_RF(0, 28), MT_RF_ISI_ISO_MASK,
318 freq_item->pllR28_b7b6);
320 /* R28<5:4> pfd_dly */
321 mt76x0_rf_rmw(dev, MT_RF(0, 28), MT_RF_PFD_DLY_MASK,
322 freq_item->pllR28_b5b4);
324 /* R28<3:2> clksel option */
325 mt76x0_rf_rmw(dev, MT_RF(0, 28), MT_RF_CLK_SEL_MASK,
326 freq_item->pllR28_b3b2);
328 /* R28<1:0> R27<7:0> R26<7:0> (hex) sdm_k */
329 mt76x0_rf_wr(dev, MT_RF(0, 26),
330 freq_item->pll_sdm_k & 0xff);
331 mt76x0_rf_wr(dev, MT_RF(0, 27),
332 (freq_item->pll_sdm_k >> 8) & 0xff);
334 mt76x0_rf_rmw(dev, MT_RF(0, 28), 0x3,
335 (freq_item->pll_sdm_k >> 16) & 0x3);
337 /* R24<1:0> xo_div */
338 mt76x0_rf_rmw(dev, MT_RF(0, 24), MT_RF_XO_DIV_MASK,
339 freq_item->pllR24_b1b0);
345 for (i = 0; i < ARRAY_SIZE(mt76x0_rf_bw_switch_tab); i++) {
346 if (rf_bw == mt76x0_rf_bw_switch_tab[i].bw_band) {
348 mt76x0_rf_bw_switch_tab[i].rf_bank_reg,
349 mt76x0_rf_bw_switch_tab[i].value);
350 } else if ((rf_bw == (mt76x0_rf_bw_switch_tab[i].bw_band & 0xFF)) &&
351 (rf_band & mt76x0_rf_bw_switch_tab[i].bw_band)) {
353 mt76x0_rf_bw_switch_tab[i].rf_bank_reg,
354 mt76x0_rf_bw_switch_tab[i].value);
358 for (i = 0; i < ARRAY_SIZE(mt76x0_rf_band_switch_tab); i++) {
359 if (mt76x0_rf_band_switch_tab[i].bw_band & rf_band) {
361 mt76x0_rf_band_switch_tab[i].rf_bank_reg,
362 mt76x0_rf_band_switch_tab[i].value);
366 mt76_clear(dev, MT_RF_MISC, 0xc);
368 band = (rf_band & RF_G_BAND) ? NL80211_BAND_2GHZ : NL80211_BAND_5GHZ;
369 if (mt76x02_ext_pa_enabled(dev, band)) {
371 MT_RF_MISC (offset: 0x0518)
372 [2]1'b1: enable external A band PA, 1'b0: disable external A band PA
373 [3]1'b1: enable external G band PA, 1'b0: disable external G band PA
375 if (rf_band & RF_A_BAND)
376 mt76_set(dev, MT_RF_MISC, BIT(2));
378 mt76_set(dev, MT_RF_MISC, BIT(3));
381 for (i = 0; i < ARRAY_SIZE(mt76x0_rf_ext_pa_tab); i++)
382 if (mt76x0_rf_ext_pa_tab[i].bw_band & rf_band)
384 mt76x0_rf_ext_pa_tab[i].rf_bank_reg,
385 mt76x0_rf_ext_pa_tab[i].value);
388 if (rf_band & RF_G_BAND) {
389 mt76_wr(dev, MT_TX0_RF_GAIN_ATTEN, 0x63707400);
390 /* Set Atten mode = 2 For G band, Disable Tx Inc dcoc. */
391 mac_reg = mt76_rr(dev, MT_TX_ALC_CFG_1);
392 mac_reg &= 0x896400FF;
393 mt76_wr(dev, MT_TX_ALC_CFG_1, mac_reg);
395 mt76_wr(dev, MT_TX0_RF_GAIN_ATTEN, 0x686A7800);
396 /* Set Atten mode = 0 For Ext A band, Disable Tx Inc dcoc Cal. */
397 mac_reg = mt76_rr(dev, MT_TX_ALC_CFG_1);
398 mac_reg &= 0x890400FF;
399 mt76_wr(dev, MT_TX_ALC_CFG_1, mac_reg);
404 mt76x0_phy_set_chan_bbp_params(struct mt76x02_dev *dev, u16 rf_bw_band)
408 for (i = 0; i < ARRAY_SIZE(mt76x0_bbp_switch_tab); i++) {
409 const struct mt76x0_bbp_switch_item *item = &mt76x0_bbp_switch_tab[i];
410 const struct mt76_reg_pair *pair = &item->reg_pair;
412 if ((rf_bw_band & item->bw_band) != rf_bw_band)
415 if (pair->reg == MT_BBP(AGC, 8)) {
416 u32 val = pair->value;
419 gain = FIELD_GET(MT_BBP_AGC_GAIN, val);
420 gain -= dev->cal.rx.lna_gain * 2;
421 val &= ~MT_BBP_AGC_GAIN;
422 val |= FIELD_PREP(MT_BBP_AGC_GAIN, gain);
423 mt76_wr(dev, pair->reg, val);
425 mt76_wr(dev, pair->reg, pair->value);
430 static void mt76x0_phy_ant_select(struct mt76x02_dev *dev)
432 u16 ee_ant = mt76x02_eeprom_get(dev, MT_EE_ANTENNA);
433 u16 nic_conf2 = mt76x02_eeprom_get(dev, MT_EE_NIC_CONF_2);
434 u32 wlan, coex3, cmb;
437 wlan = mt76_rr(dev, MT_WLAN_FUN_CTRL);
438 cmb = mt76_rr(dev, MT_CMB_CTRL);
439 coex3 = mt76_rr(dev, MT_COEXCFG3);
441 cmb &= ~(BIT(14) | BIT(12));
442 wlan &= ~(BIT(6) | BIT(5));
443 coex3 &= ~GENMASK(5, 2);
445 if (ee_ant & MT_EE_ANTENNA_DUAL) {
446 /* dual antenna mode */
447 ant_div = !(nic_conf2 & MT_EE_NIC_CONF_2_ANT_OPT) &&
448 (nic_conf2 & MT_EE_NIC_CONF_2_ANT_DIV);
454 if (dev->mt76.cap.has_2ghz)
457 /* sigle antenna mode */
458 if (dev->mt76.cap.has_5ghz) {
459 coex3 |= BIT(3) | BIT(4);
467 cmb |= BIT(14) | BIT(11);
469 mt76_wr(dev, MT_WLAN_FUN_CTRL, wlan);
470 mt76_wr(dev, MT_CMB_CTRL, cmb);
471 mt76_clear(dev, MT_COEXCFG0, BIT(2));
472 mt76_wr(dev, MT_COEXCFG3, coex3);
476 mt76x0_phy_bbp_set_bw(struct mt76x02_dev *dev, enum nl80211_chan_width width)
478 enum { BW_20 = 0, BW_40 = 1, BW_80 = 2, BW_10 = 4};
483 case NL80211_CHAN_WIDTH_20_NOHT:
484 case NL80211_CHAN_WIDTH_20:
487 case NL80211_CHAN_WIDTH_40:
490 case NL80211_CHAN_WIDTH_80:
493 case NL80211_CHAN_WIDTH_10:
496 case NL80211_CHAN_WIDTH_80P80:
497 case NL80211_CHAN_WIDTH_160:
498 case NL80211_CHAN_WIDTH_5:
503 mt76x02_mcu_function_select(dev, BW_SETTING, bw);
506 static void mt76x0_phy_tssi_dc_calibrate(struct mt76x02_dev *dev)
508 struct ieee80211_channel *chan = dev->mt76.chandef.chan;
511 if (chan->band == NL80211_BAND_5GHZ)
512 mt76x0_rf_clear(dev, MT_RF(0, 67), 0xf);
514 /* bypass ADDA control */
515 mt76_wr(dev, MT_RF_SETTING_0, 0x60002237);
516 mt76_wr(dev, MT_RF_BYPASS_0, 0xffffffff);
519 mt76_set(dev, MT_BBP(CORE, 4), BIT(0));
520 usleep_range(500, 1000);
521 mt76_clear(dev, MT_BBP(CORE, 4), BIT(0));
523 val = (chan->band == NL80211_BAND_5GHZ) ? 0x80055 : 0x80050;
524 mt76_wr(dev, MT_BBP(CORE, 34), val);
526 /* enable TX with DAC0 input */
527 mt76_wr(dev, MT_BBP(TXBE, 6), BIT(31));
529 mt76_poll_msec(dev, MT_BBP(CORE, 34), BIT(4), 0, 200);
530 dev->cal.tssi_dc = mt76_rr(dev, MT_BBP(CORE, 35)) & 0xff;
532 /* stop bypass ADDA */
533 mt76_wr(dev, MT_RF_BYPASS_0, 0);
535 mt76_wr(dev, MT_BBP(TXBE, 6), 0);
537 mt76_set(dev, MT_BBP(CORE, 4), BIT(0));
538 usleep_range(500, 1000);
539 mt76_clear(dev, MT_BBP(CORE, 4), BIT(0));
541 if (chan->band == NL80211_BAND_5GHZ)
542 mt76x0_rf_rmw(dev, MT_RF(0, 67), 0xf, 0x4);
546 mt76x0_phy_tssi_adc_calibrate(struct mt76x02_dev *dev, s16 *ltssi,
549 struct ieee80211_channel *chan = dev->mt76.chandef.chan;
552 val = (chan->band == NL80211_BAND_5GHZ) ? 0x80055 : 0x80050;
553 mt76_wr(dev, MT_BBP(CORE, 34), val);
555 if (!mt76_poll_msec(dev, MT_BBP(CORE, 34), BIT(4), 0, 200)) {
556 mt76_clear(dev, MT_BBP(CORE, 34), BIT(4));
560 *ltssi = mt76_rr(dev, MT_BBP(CORE, 35)) & 0xff;
561 if (chan->band == NL80211_BAND_5GHZ)
564 /* set packet info#1 mode */
565 mt76_wr(dev, MT_BBP(CORE, 34), 0x80041);
566 info[0] = mt76_rr(dev, MT_BBP(CORE, 35)) & 0xff;
568 /* set packet info#2 mode */
569 mt76_wr(dev, MT_BBP(CORE, 34), 0x80042);
570 info[1] = mt76_rr(dev, MT_BBP(CORE, 35)) & 0xff;
572 /* set packet info#3 mode */
573 mt76_wr(dev, MT_BBP(CORE, 34), 0x80043);
574 info[2] = mt76_rr(dev, MT_BBP(CORE, 35)) & 0xff;
579 static u8 mt76x0_phy_get_rf_pa_mode(struct mt76x02_dev *dev,
580 int index, u8 tx_rate)
584 reg = (index == 1) ? MT_RF_PA_MODE_CFG1 : MT_RF_PA_MODE_CFG0;
585 val = mt76_rr(dev, reg);
586 return (val & (3 << (tx_rate * 2))) >> (tx_rate * 2);
590 mt76x0_phy_get_target_power(struct mt76x02_dev *dev, u8 tx_mode,
591 u8 *info, s8 *target_power,
594 u8 tx_rate, cur_power;
596 cur_power = mt76_rr(dev, MT_TX_ALC_CFG_0) & MT_TX_ALC_CFG_0_CH_INIT_0;
600 tx_rate = (info[0] & 0x60) >> 5;
604 *target_power = cur_power + dev->mt76.rate_power.cck[tx_rate];
605 *target_pa_power = mt76x0_phy_get_rf_pa_mode(dev, 0, tx_rate);
611 tx_rate = (info[0] & 0xf0) >> 4;
641 *target_power = cur_power + dev->mt76.rate_power.ofdm[index];
642 *target_pa_power = mt76x0_phy_get_rf_pa_mode(dev, 0, index + 4);
647 tx_rate = info[1] & 0xf;
651 *target_power = cur_power + dev->mt76.rate_power.vht[tx_rate];
652 *target_pa_power = mt76x0_phy_get_rf_pa_mode(dev, 1, tx_rate);
656 tx_rate = info[1] & 0x7f;
660 *target_power = cur_power + dev->mt76.rate_power.ht[tx_rate];
661 *target_pa_power = mt76x0_phy_get_rf_pa_mode(dev, 1, tx_rate);
668 static s16 mt76x0_phy_lin2db(u16 val)
670 u32 mantissa = val << 4;
674 while (mantissa < BIT(15)) {
679 while (mantissa > 0xffff) {
686 if (mantissa <= 47104)
687 data = mantissa + (mantissa >> 3) + (mantissa >> 4) - 38400;
689 data = mantissa - (mantissa >> 3) - (mantissa >> 6) - 23040;
690 data = max_t(int, 0, data);
692 ret = ((15 + exp) << 15) + data;
693 ret = (ret << 2) + (ret << 1) + (ret >> 6) + (ret >> 7);
698 mt76x0_phy_get_delta_power(struct mt76x02_dev *dev, u8 tx_mode,
699 s8 target_power, s8 target_pa_power,
702 struct ieee80211_channel *chan = dev->mt76.chandef.chan;
703 int tssi_target = target_power << 12, tssi_slope;
704 int tssi_offset, tssi_db, ret;
708 if (chan->band == NL80211_BAND_5GHZ) {
712 err = mt76x02_eeprom_copy(dev, MT_EE_TSSI_BOUND1, bound,
717 for (i = 0; i < ARRAY_SIZE(bound); i++) {
718 if (chan->hw_value <= bound[i] || !bound[i])
721 val = mt76x02_eeprom_get(dev, MT_EE_TSSI_SLOPE_5G + i * 2);
723 tssi_offset = val >> 8;
724 if ((tssi_offset >= 64 && tssi_offset <= 127) ||
725 (tssi_offset & BIT(7)))
726 tssi_offset -= BIT(8);
728 val = mt76x02_eeprom_get(dev, MT_EE_TSSI_SLOPE_2G);
730 tssi_offset = val >> 8;
731 if (tssi_offset & BIT(7))
732 tssi_offset -= BIT(8);
734 tssi_slope = val & 0xff;
736 switch (target_pa_power) {
738 if (chan->band == NL80211_BAND_2GHZ)
739 tssi_target += 29491; /* 3.6 * 8192 */
744 tssi_target += 4424; /* 0.54 * 8192 */
749 data = mt76_rr(dev, MT_BBP(CORE, 1));
750 if (is_mt7630(dev) && mt76_is_mmio(dev)) {
753 /* 2.3 * 8192 or 1.5 * 8192 */
754 offset = (data & BIT(5)) ? 18841 : 12288;
755 tssi_target += offset;
756 } else if (data & BIT(5)) {
762 data = mt76_rr(dev, MT_BBP(TXBE, 4));
763 switch (data & 0x3) {
765 tssi_target -= 49152; /* -6db * 8192 */
768 tssi_target -= 98304; /* -12db * 8192 */
771 tssi_target += 49152; /* 6db * 8192 */
777 tssi_db = mt76x0_phy_lin2db(ltssi - dev->cal.tssi_dc) * tssi_slope;
778 if (chan->band == NL80211_BAND_5GHZ) {
779 tssi_db += ((tssi_offset - 50) << 10); /* offset s4.3 */
780 tssi_target -= tssi_db;
781 if (ltssi > 254 && tssi_target > 0) {
786 tssi_db += (tssi_offset << 9); /* offset s3.4 */
787 tssi_target -= tssi_db;
788 /* upper-lower saturate */
789 if ((ltssi > 126 && tssi_target > 0) ||
790 ((ltssi - dev->cal.tssi_dc) < 1 && tssi_target < 0)) {
795 if ((dev->cal.tssi_target ^ tssi_target) < 0 &&
796 dev->cal.tssi_target > -4096 && dev->cal.tssi_target < 4096 &&
797 tssi_target > -4096 && tssi_target < 4096) {
798 if ((tssi_target < 0 &&
799 tssi_target + dev->cal.tssi_target > 0) ||
801 tssi_target + dev->cal.tssi_target <= 0))
804 dev->cal.tssi_target = tssi_target;
806 dev->cal.tssi_target = tssi_target;
809 /* make the compensate value to the nearest compensate code */
816 ret = mt76_get_field(dev, MT_TX_ALC_CFG_1, MT_TX_ALC_CFG_1_TEMP_COMP);
821 ret = min_t(int, 31, ret);
822 return max_t(int, -32, ret);
825 static void mt76x0_phy_tssi_calibrate(struct mt76x02_dev *dev)
827 s8 target_power, target_pa_power;
828 u8 tssi_info[3], tx_mode;
832 if (mt76x0_phy_tssi_adc_calibrate(dev, <ssi, tssi_info) < 0)
835 tx_mode = tssi_info[0] & 0x7;
836 if (mt76x0_phy_get_target_power(dev, tx_mode, tssi_info,
837 &target_power, &target_pa_power) < 0)
840 val = mt76x0_phy_get_delta_power(dev, tx_mode, target_power,
841 target_pa_power, ltssi);
842 mt76_rmw_field(dev, MT_TX_ALC_CFG_1, MT_TX_ALC_CFG_1_TEMP_COMP, val);
845 void mt76x0_phy_set_txpower(struct mt76x02_dev *dev)
847 struct mt76_rate_power *t = &dev->mt76.rate_power;
850 mt76x0_get_tx_power_per_rate(dev);
851 mt76x0_get_power_info(dev, info);
853 mt76x02_add_rate_power_offset(t, info[0]);
854 mt76x02_limit_rate_power(t, dev->mt76.txpower_conf);
855 dev->mt76.txpower_cur = mt76x02_get_max_rate_power(t);
856 mt76x02_add_rate_power_offset(t, -info[0]);
858 mt76x02_phy_set_txpower(dev, info[0], info[1]);
861 void mt76x0_phy_calibrate(struct mt76x02_dev *dev, bool power_on)
863 struct ieee80211_channel *chan = dev->mt76.chandef.chan;
864 int is_5ghz = (chan->band == NL80211_BAND_5GHZ) ? 1 : 0;
865 u32 val, tx_alc, reg_val;
871 mt76x02_mcu_calibrate(dev, MCU_CAL_R, 0);
872 mt76x02_mcu_calibrate(dev, MCU_CAL_VCO, chan->hw_value);
873 usleep_range(10, 20);
875 if (mt76x0_tssi_enabled(dev)) {
876 mt76_wr(dev, MT_MAC_SYS_CTRL,
877 MT_MAC_SYS_CTRL_ENABLE_RX);
878 mt76x0_phy_tssi_dc_calibrate(dev);
879 mt76_wr(dev, MT_MAC_SYS_CTRL,
880 MT_MAC_SYS_CTRL_ENABLE_TX |
881 MT_MAC_SYS_CTRL_ENABLE_RX);
885 tx_alc = mt76_rr(dev, MT_TX_ALC_CFG_0);
886 mt76_wr(dev, MT_TX_ALC_CFG_0, 0);
887 usleep_range(500, 700);
889 reg_val = mt76_rr(dev, MT_BBP(IBI, 9));
890 mt76_wr(dev, MT_BBP(IBI, 9), 0xffffff7e);
893 if (chan->hw_value < 100)
895 else if (chan->hw_value < 140)
903 mt76x02_mcu_calibrate(dev, MCU_CAL_FULL, val);
905 mt76x02_mcu_calibrate(dev, MCU_CAL_LC, is_5ghz);
906 usleep_range(15000, 20000);
908 mt76_wr(dev, MT_BBP(IBI, 9), reg_val);
909 mt76_wr(dev, MT_TX_ALC_CFG_0, tx_alc);
910 mt76x02_mcu_calibrate(dev, MCU_CAL_RXDCOC, 1);
912 EXPORT_SYMBOL_GPL(mt76x0_phy_calibrate);
914 int mt76x0_phy_set_channel(struct mt76x02_dev *dev,
915 struct cfg80211_chan_def *chandef)
917 u32 ext_cca_chan[4] = {
918 [0] = FIELD_PREP(MT_EXT_CCA_CFG_CCA0, 0) |
919 FIELD_PREP(MT_EXT_CCA_CFG_CCA1, 1) |
920 FIELD_PREP(MT_EXT_CCA_CFG_CCA2, 2) |
921 FIELD_PREP(MT_EXT_CCA_CFG_CCA3, 3) |
922 FIELD_PREP(MT_EXT_CCA_CFG_CCA_MASK, BIT(0)),
923 [1] = FIELD_PREP(MT_EXT_CCA_CFG_CCA0, 1) |
924 FIELD_PREP(MT_EXT_CCA_CFG_CCA1, 0) |
925 FIELD_PREP(MT_EXT_CCA_CFG_CCA2, 2) |
926 FIELD_PREP(MT_EXT_CCA_CFG_CCA3, 3) |
927 FIELD_PREP(MT_EXT_CCA_CFG_CCA_MASK, BIT(1)),
928 [2] = FIELD_PREP(MT_EXT_CCA_CFG_CCA0, 2) |
929 FIELD_PREP(MT_EXT_CCA_CFG_CCA1, 3) |
930 FIELD_PREP(MT_EXT_CCA_CFG_CCA2, 1) |
931 FIELD_PREP(MT_EXT_CCA_CFG_CCA3, 0) |
932 FIELD_PREP(MT_EXT_CCA_CFG_CCA_MASK, BIT(2)),
933 [3] = FIELD_PREP(MT_EXT_CCA_CFG_CCA0, 3) |
934 FIELD_PREP(MT_EXT_CCA_CFG_CCA1, 2) |
935 FIELD_PREP(MT_EXT_CCA_CFG_CCA2, 1) |
936 FIELD_PREP(MT_EXT_CCA_CFG_CCA3, 0) |
937 FIELD_PREP(MT_EXT_CCA_CFG_CCA_MASK, BIT(3)),
939 bool scan = test_bit(MT76_SCANNING, &dev->mt76.state);
940 int ch_group_index, freq, freq1;
945 freq = chandef->chan->center_freq;
946 freq1 = chandef->center_freq1;
947 channel = chandef->chan->hw_value;
948 rf_bw_band = (channel <= 14) ? RF_G_BAND : RF_A_BAND;
949 dev->mt76.chandef = *chandef;
951 switch (chandef->width) {
952 case NL80211_CHAN_WIDTH_40:
957 channel += 2 - ch_group_index * 4;
958 rf_bw_band |= RF_BW_40;
960 case NL80211_CHAN_WIDTH_80:
961 ch_group_index = (freq - freq1 + 30) / 20;
962 if (WARN_ON(ch_group_index < 0 || ch_group_index > 3))
964 channel += 6 - ch_group_index * 4;
965 rf_bw_band |= RF_BW_80;
969 rf_bw_band |= RF_BW_20;
973 if (mt76_is_usb(dev)) {
974 mt76x0_phy_bbp_set_bw(dev, chandef->width);
976 if (chandef->width == NL80211_CHAN_WIDTH_80 ||
977 chandef->width == NL80211_CHAN_WIDTH_40)
981 mt76_wr(dev, MT_TX_SW_CFG0, val);
983 mt76x02_phy_set_bw(dev, chandef->width, ch_group_index);
984 mt76x02_phy_set_band(dev, chandef->chan->band,
987 mt76_rmw(dev, MT_EXT_CCA_CFG,
988 (MT_EXT_CCA_CFG_CCA0 |
989 MT_EXT_CCA_CFG_CCA1 |
990 MT_EXT_CCA_CFG_CCA2 |
991 MT_EXT_CCA_CFG_CCA3 |
992 MT_EXT_CCA_CFG_CCA_MASK),
993 ext_cca_chan[ch_group_index]);
995 mt76x0_phy_set_band(dev, chandef->chan->band);
996 mt76x0_phy_set_chan_rf_params(dev, channel, rf_bw_band);
998 /* set Japan Tx filter at channel 14 */
1000 mt76_set(dev, MT_BBP(CORE, 1), 0x20);
1002 mt76_clear(dev, MT_BBP(CORE, 1), 0x20);
1004 mt76x0_read_rx_gain(dev);
1005 mt76x0_phy_set_chan_bbp_params(dev, rf_bw_band);
1008 mt76x0_rf_set(dev, MT_RF(0, 4), BIT(7));
1012 mt76x02_init_agc_gain(dev);
1013 mt76x0_phy_calibrate(dev, false);
1014 mt76x0_phy_set_txpower(dev);
1016 ieee80211_queue_delayed_work(dev->mt76.hw, &dev->cal_work,
1017 MT_CALIBRATE_INTERVAL);
1022 static void mt76x0_phy_temp_sensor(struct mt76x02_dev *dev)
1024 u8 rf_b7_73, rf_b0_66, rf_b0_67;
1027 rf_b7_73 = mt76x0_rf_rr(dev, MT_RF(7, 73));
1028 rf_b0_66 = mt76x0_rf_rr(dev, MT_RF(0, 66));
1029 rf_b0_67 = mt76x0_rf_rr(dev, MT_RF(0, 67));
1031 mt76x0_rf_wr(dev, MT_RF(7, 73), 0x02);
1032 mt76x0_rf_wr(dev, MT_RF(0, 66), 0x23);
1033 mt76x0_rf_wr(dev, MT_RF(0, 67), 0x01);
1035 mt76_wr(dev, MT_BBP(CORE, 34), 0x00080055);
1036 if (!mt76_poll_msec(dev, MT_BBP(CORE, 34), BIT(4), 0, 200)) {
1037 mt76_clear(dev, MT_BBP(CORE, 34), BIT(4));
1041 val = mt76_rr(dev, MT_BBP(CORE, 35));
1042 val = (35 * (val - dev->cal.rx.temp_offset)) / 10 + 25;
1044 if (abs(val - dev->cal.temp_vco) > 20) {
1045 mt76x02_mcu_calibrate(dev, MCU_CAL_VCO,
1046 dev->mt76.chandef.chan->hw_value);
1047 dev->cal.temp_vco = val;
1049 if (abs(val - dev->cal.temp) > 30) {
1050 mt76x0_phy_calibrate(dev, false);
1051 dev->cal.temp = val;
1055 mt76x0_rf_wr(dev, MT_RF(7, 73), rf_b7_73);
1056 mt76x0_rf_wr(dev, MT_RF(0, 66), rf_b0_66);
1057 mt76x0_rf_wr(dev, MT_RF(0, 67), rf_b0_67);
1060 static void mt76x0_phy_set_gain_val(struct mt76x02_dev *dev)
1062 u8 gain = dev->cal.agc_gain_cur[0] - dev->cal.agc_gain_adjust;
1064 mt76_rmw_field(dev, MT_BBP(AGC, 8), MT_BBP_AGC_GAIN, gain);
1066 if ((dev->mt76.chandef.chan->flags & IEEE80211_CHAN_RADAR) &&
1068 mt76x02_phy_dfs_adjust_agc(dev);
1072 mt76x0_phy_update_channel_gain(struct mt76x02_dev *dev)
1078 dev->cal.avg_rssi_all = mt76x02_phy_get_min_avg_rssi(dev);
1080 low_gain = (dev->cal.avg_rssi_all > mt76x02_get_rssi_gain_thresh(dev)) +
1081 (dev->cal.avg_rssi_all > mt76x02_get_low_rssi_gain_thresh(dev));
1083 gain_change = dev->cal.low_gain < 0 ||
1084 (dev->cal.low_gain & 2) ^ (low_gain & 2);
1085 dev->cal.low_gain = low_gain;
1088 if (mt76x02_phy_adjust_vga_gain(dev))
1089 mt76x0_phy_set_gain_val(dev);
1093 dev->cal.agc_gain_adjust = (low_gain == 2) ? 0 : 10;
1094 gain_delta = (low_gain == 2) ? 10 : 0;
1096 dev->cal.agc_gain_cur[0] = dev->cal.agc_gain_init[0] - gain_delta;
1097 mt76x0_phy_set_gain_val(dev);
1099 /* clear false CCA counters */
1100 mt76_rr(dev, MT_RX_STAT_1);
1103 static void mt76x0_phy_calibration_work(struct work_struct *work)
1105 struct mt76x02_dev *dev = container_of(work, struct mt76x02_dev,
1108 mt76x0_phy_update_channel_gain(dev);
1109 if (mt76x0_tssi_enabled(dev))
1110 mt76x0_phy_tssi_calibrate(dev);
1112 mt76x0_phy_temp_sensor(dev);
1114 ieee80211_queue_delayed_work(dev->mt76.hw, &dev->cal_work,
1115 4 * MT_CALIBRATE_INTERVAL);
1118 static void mt76x0_rf_patch_reg_array(struct mt76x02_dev *dev,
1119 const struct mt76_reg_pair *rp, int len)
1123 for (i = 0; i < len; i++) {
1124 u32 reg = rp[i].reg;
1125 u8 val = rp[i].value;
1129 if (mt76_is_mmio(dev)) {
1139 if (is_mt7610e(dev))
1147 else if (is_mt7610e(dev))
1155 mt76x0_rf_wr(dev, reg, val);
1159 static void mt76x0_phy_rf_init(struct mt76x02_dev *dev)
1164 mt76x0_rf_patch_reg_array(dev, mt76x0_rf_central_tab,
1165 ARRAY_SIZE(mt76x0_rf_central_tab));
1166 mt76x0_rf_patch_reg_array(dev, mt76x0_rf_2g_channel_0_tab,
1167 ARRAY_SIZE(mt76x0_rf_2g_channel_0_tab));
1168 RF_RANDOM_WRITE(dev, mt76x0_rf_5g_channel_0_tab);
1169 RF_RANDOM_WRITE(dev, mt76x0_rf_vga_channel_0_tab);
1171 for (i = 0; i < ARRAY_SIZE(mt76x0_rf_bw_switch_tab); i++) {
1172 const struct mt76x0_rf_switch_item *item = &mt76x0_rf_bw_switch_tab[i];
1174 if (item->bw_band == RF_BW_20)
1175 mt76x0_rf_wr(dev, item->rf_bank_reg, item->value);
1176 else if (((RF_G_BAND | RF_BW_20) & item->bw_band) == (RF_G_BAND | RF_BW_20))
1177 mt76x0_rf_wr(dev, item->rf_bank_reg, item->value);
1180 for (i = 0; i < ARRAY_SIZE(mt76x0_rf_band_switch_tab); i++) {
1181 if (mt76x0_rf_band_switch_tab[i].bw_band & RF_G_BAND) {
1183 mt76x0_rf_band_switch_tab[i].rf_bank_reg,
1184 mt76x0_rf_band_switch_tab[i].value);
1189 Frequency calibration
1190 E1: B0.R22<6:0>: xo_cxo<6:0>
1191 E2: B0.R21<0>: xo_cxo<0>, B0.R22<7:0>: xo_cxo<8:1>
1193 mt76x0_rf_wr(dev, MT_RF(0, 22),
1194 min_t(u8, dev->cal.rx.freq_offset, 0xbf));
1195 val = mt76x0_rf_rr(dev, MT_RF(0, 22));
1197 /* Reset procedure DAC during power-up:
1202 mt76x0_rf_set(dev, MT_RF(0, 73), BIT(7));
1203 mt76x0_rf_clear(dev, MT_RF(0, 73), BIT(7));
1204 mt76x0_rf_set(dev, MT_RF(0, 73), BIT(7));
1206 /* vcocal_en: initiate VCO calibration (reset after completion)) */
1207 mt76x0_rf_set(dev, MT_RF(0, 4), 0x80);
1210 void mt76x0_phy_init(struct mt76x02_dev *dev)
1212 INIT_DELAYED_WORK(&dev->cal_work, mt76x0_phy_calibration_work);
1214 mt76x0_phy_ant_select(dev);
1215 mt76x0_phy_rf_init(dev);
1216 mt76x02_phy_set_rxpath(dev);
1217 mt76x02_phy_set_txdac(dev);