1 // SPDX-License-Identifier: ISC
2 /* Copyright (C) 2020 MediaTek Inc. */
11 TM_CHANGED_FREQ_OFFSET,
17 static const u8 tm_change_map[] = {
18 [TM_CHANGED_TXPOWER] = MT76_TM_ATTR_TX_POWER,
19 [TM_CHANGED_FREQ_OFFSET] = MT76_TM_ATTR_FREQ_OFFSET,
26 #define REG_BAND(_reg) \
27 { .band[0] = MT_##_reg(0), .band[1] = MT_##_reg(1) }
28 #define REG_BAND_IDX(_reg, _idx) \
29 { .band[0] = MT_##_reg(0, _idx), .band[1] = MT_##_reg(1, _idx) }
31 static const struct reg_band reg_backup_list[] = {
32 REG_BAND_IDX(AGG_PCR0, 0),
33 REG_BAND_IDX(AGG_PCR0, 1),
34 REG_BAND_IDX(AGG_AWSCR0, 0),
35 REG_BAND_IDX(AGG_AWSCR0, 1),
36 REG_BAND_IDX(AGG_AWSCR0, 2),
37 REG_BAND_IDX(AGG_AWSCR0, 3),
45 REG_BAND_IDX(ARB_DRNGR0, 0),
46 REG_BAND_IDX(ARB_DRNGR0, 1),
52 mt7915_tm_set_tx_power(struct mt7915_phy *phy)
54 struct mt7915_dev *dev = phy->dev;
55 struct mt76_phy *mphy = phy->mt76;
56 struct cfg80211_chan_def *chandef = &mphy->chandef;
57 int freq = chandef->center_freq1;
63 u8 ant_idx; /* Only 0 is valid */
68 .dbdc_idx = phy != &dev->phy,
69 .center_chan = ieee80211_frequency_to_channel(freq),
73 if (phy->mt76->test.state != MT76_TM_STATE_OFF)
74 tx_power = phy->mt76->test.tx_power;
76 /* Tx power of the other antennas are the same as antenna 0 */
77 if (tx_power && tx_power[0])
78 req.tx_power = tx_power[0];
80 ret = mt76_mcu_send_msg(&dev->mt76,
81 MCU_EXT_CMD_TX_POWER_FEATURE_CTRL,
82 &req, sizeof(req), false);
88 mt7915_tm_set_freq_offset(struct mt7915_phy *phy, bool en, u32 val)
90 struct mt7915_dev *dev = phy->dev;
91 struct mt7915_tm_cmd req = {
93 .param_idx = MCU_ATE_SET_FREQ_OFFSET,
94 .param.freq.band = phy != &dev->phy,
95 .param.freq.freq_offset = cpu_to_le32(val),
98 return mt76_mcu_send_msg(&dev->mt76, MCU_EXT_CMD_ATE_CTRL, &req,
103 mt7915_tm_mode_ctrl(struct mt7915_dev *dev, bool enable)
114 return mt76_mcu_send_msg(&dev->mt76,
115 MCU_EXT_CMD_TX_POWER_FEATURE_CTRL,
116 &req, sizeof(req), false);
120 mt7915_tm_set_trx(struct mt7915_phy *phy, int type, bool en)
122 struct mt7915_dev *dev = phy->dev;
123 struct mt7915_tm_cmd req = {
125 .param_idx = MCU_ATE_SET_TRX,
126 .param.trx.type = type,
127 .param.trx.enable = en,
128 .param.trx.band = phy != &dev->phy,
131 return mt76_mcu_send_msg(&dev->mt76, MCU_EXT_CMD_ATE_CTRL, &req,
136 mt7915_tm_clean_hwq(struct mt7915_phy *phy, u8 wcid)
138 struct mt7915_dev *dev = phy->dev;
139 struct mt7915_tm_cmd req = {
141 .param_idx = MCU_ATE_CLEAN_TXQUEUE,
142 .param.clean.wcid = wcid,
143 .param.clean.band = phy != &dev->phy,
146 return mt76_mcu_send_msg(&dev->mt76, MCU_EXT_CMD_ATE_CTRL, &req,
151 mt7915_tm_set_slot_time(struct mt7915_phy *phy, u8 slot_time, u8 sifs)
153 struct mt7915_dev *dev = phy->dev;
154 struct mt7915_tm_cmd req = {
155 .testmode_en = !(phy->mt76->test.state == MT76_TM_STATE_OFF),
156 .param_idx = MCU_ATE_SET_SLOT_TIME,
157 .param.slot.slot_time = slot_time,
158 .param.slot.sifs = sifs,
159 .param.slot.rifs = 2,
160 .param.slot.eifs = cpu_to_le16(60),
161 .param.slot.band = phy != &dev->phy,
164 return mt76_mcu_send_msg(&dev->mt76, MCU_EXT_CMD_ATE_CTRL, &req,
169 mt7915_tm_set_wmm_qid(struct mt7915_dev *dev, u8 qid, u8 aifs, u8 cw_min,
170 u16 cw_max, u16 txop)
172 struct mt7915_mcu_tx req = { .total = 1 };
173 struct edca *e = &req.edca[0];
176 e->set = WMM_PARAM_SET;
180 e->cw_max = cpu_to_le16(cw_max);
181 e->txop = cpu_to_le16(txop);
183 return mt7915_mcu_update_edca(dev, &req);
187 mt7915_tm_set_ipg_params(struct mt7915_phy *phy, u32 ipg, u8 mode)
189 #define TM_DEFAULT_SIFS 10
190 #define TM_MAX_SIFS 127
191 #define TM_MAX_AIFSN 0xf
192 #define TM_MIN_AIFSN 0x1
193 #define BBP_PROC_TIME 1500
194 struct mt7915_dev *dev = phy->dev;
195 u8 sig_ext = (mode == MT76_TM_TX_MODE_CCK) ? 0 : 6;
196 u8 slot_time = 9, sifs = TM_DEFAULT_SIFS;
197 u8 aifsn = TM_MIN_AIFSN;
198 u32 i2t_time, tr2t_time, txv_time;
199 bool ext_phy = phy != &dev->phy;
202 if (ipg < sig_ext + slot_time + sifs)
210 if (ipg <= (TM_MAX_SIFS + slot_time)) {
211 sifs = ipg - slot_time;
213 u32 val = (ipg + slot_time) / slot_time;
221 ipg -= ((1 << cw) - 1) * slot_time;
223 aifsn = ipg / slot_time;
224 if (aifsn > TM_MAX_AIFSN)
225 aifsn = TM_MAX_AIFSN;
227 ipg -= aifsn * slot_time;
229 if (ipg > TM_DEFAULT_SIFS) {
230 if (ipg < TM_MAX_SIFS)
237 txv_time = mt76_get_field(dev, MT_TMAC_ATCR(ext_phy),
238 MT_TMAC_ATCR_TXV_TOUT);
239 txv_time *= 50; /* normal clock time */
241 i2t_time = (slot_time * 1000 - txv_time - BBP_PROC_TIME) / 50;
242 tr2t_time = (sifs * 1000 - txv_time - BBP_PROC_TIME) / 50;
244 mt76_set(dev, MT_TMAC_TRCR0(ext_phy),
245 FIELD_PREP(MT_TMAC_TRCR0_TR2T_CHK, tr2t_time) |
246 FIELD_PREP(MT_TMAC_TRCR0_I2T_CHK, i2t_time));
248 mt7915_tm_set_slot_time(phy, slot_time, sifs);
250 return mt7915_tm_set_wmm_qid(dev,
251 mt7915_lmac_mapping(dev, IEEE80211_AC_BE),
256 mt7915_tm_set_tx_len(struct mt7915_phy *phy, u32 tx_time)
258 struct mt76_phy *mphy = phy->mt76;
259 struct mt76_testmode_data *td = &mphy->test;
260 struct sk_buff *old = td->tx_skb, *new;
261 struct ieee80211_supported_band *sband;
262 struct rate_info rate = {};
263 u16 flags = 0, tx_len;
266 if (!tx_time || !old)
269 rate.mcs = td->tx_rate_idx;
270 rate.nss = td->tx_rate_nss;
272 switch (td->tx_rate_mode) {
273 case MT76_TM_TX_MODE_CCK:
274 case MT76_TM_TX_MODE_OFDM:
275 if (mphy->chandef.chan->band == NL80211_BAND_5GHZ)
276 sband = &mphy->sband_5g.sband;
278 sband = &mphy->sband_2g.sband;
280 rate.legacy = sband->bitrates[rate.mcs].bitrate;
282 case MT76_TM_TX_MODE_HT:
283 rate.mcs += rate.nss * 8;
284 flags |= RATE_INFO_FLAGS_MCS;
287 flags |= RATE_INFO_FLAGS_SHORT_GI;
289 case MT76_TM_TX_MODE_VHT:
290 flags |= RATE_INFO_FLAGS_VHT_MCS;
293 flags |= RATE_INFO_FLAGS_SHORT_GI;
295 case MT76_TM_TX_MODE_HE_SU:
296 case MT76_TM_TX_MODE_HE_EXT_SU:
297 case MT76_TM_TX_MODE_HE_TB:
298 case MT76_TM_TX_MODE_HE_MU:
299 rate.he_gi = td->tx_rate_sgi;
300 flags |= RATE_INFO_FLAGS_HE_MCS;
307 switch (mphy->chandef.width) {
308 case NL80211_CHAN_WIDTH_160:
309 case NL80211_CHAN_WIDTH_80P80:
310 rate.bw = RATE_INFO_BW_160;
312 case NL80211_CHAN_WIDTH_80:
313 rate.bw = RATE_INFO_BW_80;
315 case NL80211_CHAN_WIDTH_40:
316 rate.bw = RATE_INFO_BW_40;
319 rate.bw = RATE_INFO_BW_20;
323 bitrate = cfg80211_calculate_bitrate(&rate);
324 tx_len = bitrate * tx_time / 10 / 8;
326 if (tx_len < sizeof(struct ieee80211_hdr))
327 tx_len = sizeof(struct ieee80211_hdr);
328 else if (tx_len > IEEE80211_MAX_FRAME_LEN)
329 tx_len = IEEE80211_MAX_FRAME_LEN;
331 new = alloc_skb(tx_len, GFP_KERNEL);
335 skb_copy_header(new, old);
336 __skb_put_zero(new, tx_len);
337 memcpy(new->data, old->data, sizeof(struct ieee80211_hdr));
346 mt7915_tm_reg_backup_restore(struct mt7915_phy *phy)
348 int n_regs = ARRAY_SIZE(reg_backup_list);
349 struct mt7915_dev *dev = phy->dev;
350 bool ext_phy = phy != &dev->phy;
351 u32 *b = phy->test.reg_backup;
354 if (phy->mt76->test.state == MT76_TM_STATE_OFF) {
355 for (i = 0; i < n_regs; i++)
356 mt76_wr(dev, reg_backup_list[i].band[ext_phy], b[i]);
363 b = devm_kzalloc(dev->mt76.dev, 4 * n_regs, GFP_KERNEL);
367 phy->test.reg_backup = b;
368 for (i = 0; i < n_regs; i++)
369 b[i] = mt76_rr(dev, reg_backup_list[i].band[ext_phy]);
371 mt76_clear(dev, MT_AGG_PCR0(ext_phy, 0), MT_AGG_PCR0_MM_PROT |
372 MT_AGG_PCR0_GF_PROT | MT_AGG_PCR0_ERP_PROT |
373 MT_AGG_PCR0_VHT_PROT | MT_AGG_PCR0_BW20_PROT |
374 MT_AGG_PCR0_BW40_PROT | MT_AGG_PCR0_BW80_PROT);
375 mt76_set(dev, MT_AGG_PCR0(ext_phy, 0), MT_AGG_PCR0_PTA_WIN_DIS);
377 mt76_wr(dev, MT_AGG_PCR0(ext_phy, 1), MT_AGG_PCR1_RTS0_NUM_THRES |
378 MT_AGG_PCR1_RTS0_LEN_THRES);
380 mt76_clear(dev, MT_AGG_MRCR(ext_phy), MT_AGG_MRCR_BAR_CNT_LIMIT |
381 MT_AGG_MRCR_LAST_RTS_CTS_RN | MT_AGG_MRCR_RTS_FAIL_LIMIT |
382 MT_AGG_MRCR_TXCMD_RTS_FAIL_LIMIT);
384 mt76_rmw(dev, MT_AGG_MRCR(ext_phy), MT_AGG_MRCR_RTS_FAIL_LIMIT |
385 MT_AGG_MRCR_TXCMD_RTS_FAIL_LIMIT,
386 FIELD_PREP(MT_AGG_MRCR_RTS_FAIL_LIMIT, 1) |
387 FIELD_PREP(MT_AGG_MRCR_TXCMD_RTS_FAIL_LIMIT, 1));
389 mt76_wr(dev, MT_TMAC_TFCR0(ext_phy), 0);
390 mt76_clear(dev, MT_TMAC_TCR0(ext_phy), MT_TMAC_TCR0_TBTT_STOP_CTRL);
392 /* config rx filter for testmode rx */
393 mt76_wr(dev, MT_WF_RFCR(ext_phy), 0xcf70a);
394 mt76_wr(dev, MT_WF_RFCR1(ext_phy), 0);
398 mt7915_tm_init(struct mt7915_phy *phy, bool en)
400 struct mt7915_dev *dev = phy->dev;
402 if (!test_bit(MT76_STATE_RUNNING, &phy->mt76->state))
405 mt7915_tm_mode_ctrl(dev, en);
406 mt7915_tm_reg_backup_restore(phy);
407 mt7915_tm_set_trx(phy, TM_MAC_TXRX, !en);
409 mt7915_mcu_add_bss_info(phy, phy->monitor_vif, en);
413 mt7915_tm_update_channel(struct mt7915_phy *phy)
415 mutex_unlock(&phy->dev->mt76.mutex);
416 mt7915_set_channel(phy);
417 mutex_lock(&phy->dev->mt76.mutex);
419 mt7915_mcu_set_chan_info(phy, MCU_EXT_CMD_SET_RX_PATH);
423 mt7915_tm_set_tx_frames(struct mt7915_phy *phy, bool en)
425 static const u8 spe_idx_map[] = {0, 0, 1, 0, 3, 2, 4, 0,
426 9, 8, 6, 10, 16, 12, 18, 0};
427 struct mt76_testmode_data *td = &phy->mt76->test;
428 struct mt7915_dev *dev = phy->dev;
429 struct ieee80211_tx_info *info;
430 u8 duty_cycle = td->tx_duty_cycle;
431 u32 tx_time = td->tx_time;
432 u32 ipg = td->tx_ipg;
434 mt7915_tm_set_trx(phy, TM_MAC_RX_RXV, false);
435 mt7915_tm_clean_hwq(phy, dev->mt76.global_wcid.idx);
438 mt7915_tm_update_channel(phy);
440 if (td->tx_spe_idx) {
441 phy->test.spe_idx = td->tx_spe_idx;
443 u8 tx_ant = td->tx_antenna_mask;
445 if (phy != &dev->phy)
447 phy->test.spe_idx = spe_idx_map[tx_ant];
451 /* if all three params are set, duty_cycle will be ignored */
452 if (duty_cycle && tx_time && !ipg) {
453 ipg = tx_time * 100 / duty_cycle - tx_time;
454 } else if (duty_cycle && !tx_time && ipg) {
455 if (duty_cycle < 100)
456 tx_time = duty_cycle * ipg / (100 - duty_cycle);
459 mt7915_tm_set_ipg_params(phy, ipg, td->tx_rate_mode);
460 mt7915_tm_set_tx_len(phy, tx_time);
463 td->tx_queued_limit = MT76_TM_TIMEOUT * 1000000 / ipg / 2;
465 if (!en || !td->tx_skb)
468 info = IEEE80211_SKB_CB(td->tx_skb);
469 info->control.vif = phy->monitor_vif;
471 mt7915_tm_set_trx(phy, TM_MAC_TX, en);
475 mt7915_tm_set_rx_frames(struct mt7915_phy *phy, bool en)
478 mt7915_tm_update_channel(phy);
480 mt7915_tm_set_trx(phy, TM_MAC_RX_RXV, en);
484 mt7915_tm_rf_switch_mode(struct mt7915_dev *dev, u32 oper)
486 struct mt7915_tm_rf_test req = {
487 .op.op_mode = cpu_to_le32(oper),
490 return mt76_mcu_send_msg(&dev->mt76, MCU_EXT_CMD_RF_TEST, &req,
495 mt7915_tm_set_tx_cont(struct mt7915_phy *phy, bool en)
497 #define TX_CONT_START 0x05
498 #define TX_CONT_STOP 0x06
499 struct mt7915_dev *dev = phy->dev;
500 struct cfg80211_chan_def *chandef = &phy->mt76->chandef;
501 int freq1 = ieee80211_frequency_to_channel(chandef->center_freq1);
502 struct mt76_testmode_data *td = &phy->mt76->test;
503 u32 func_idx = en ? TX_CONT_START : TX_CONT_STOP;
504 u8 rate_idx = td->tx_rate_idx, mode;
506 struct mt7915_tm_rf_test req = {
509 .op.rf.func_idx = cpu_to_le32(func_idx),
511 struct tm_tx_cont *tx_cont = &req.op.rf.param.tx_cont;
513 tx_cont->control_ch = chandef->chan->hw_value;
514 tx_cont->center_ch = freq1;
515 tx_cont->tx_ant = td->tx_antenna_mask;
516 tx_cont->band = phy != &dev->phy;
518 switch (chandef->width) {
519 case NL80211_CHAN_WIDTH_40:
520 tx_cont->bw = CMD_CBW_40MHZ;
522 case NL80211_CHAN_WIDTH_80:
523 tx_cont->bw = CMD_CBW_80MHZ;
525 case NL80211_CHAN_WIDTH_80P80:
526 tx_cont->bw = CMD_CBW_8080MHZ;
528 case NL80211_CHAN_WIDTH_160:
529 tx_cont->bw = CMD_CBW_160MHZ;
531 case NL80211_CHAN_WIDTH_5:
532 tx_cont->bw = CMD_CBW_5MHZ;
534 case NL80211_CHAN_WIDTH_10:
535 tx_cont->bw = CMD_CBW_10MHZ;
537 case NL80211_CHAN_WIDTH_20:
538 tx_cont->bw = CMD_CBW_20MHZ;
540 case NL80211_CHAN_WIDTH_20_NOHT:
541 tx_cont->bw = CMD_CBW_20MHZ;
548 req.op.rf.param.func_data = cpu_to_le32(phy != &dev->phy);
552 if (td->tx_rate_mode <= MT76_TM_TX_MODE_OFDM) {
553 struct ieee80211_supported_band *sband;
556 if (chandef->chan->band == NL80211_BAND_5GHZ)
557 sband = &phy->mt76->sband_5g.sband;
559 sband = &phy->mt76->sband_2g.sband;
561 if (td->tx_rate_mode == MT76_TM_TX_MODE_OFDM)
563 rate_idx = sband->bitrates[idx].hw_value & 0xff;
566 switch (td->tx_rate_mode) {
567 case MT76_TM_TX_MODE_CCK:
568 mode = MT_PHY_TYPE_CCK;
570 case MT76_TM_TX_MODE_OFDM:
571 mode = MT_PHY_TYPE_OFDM;
573 case MT76_TM_TX_MODE_HT:
574 mode = MT_PHY_TYPE_HT;
576 case MT76_TM_TX_MODE_VHT:
577 mode = MT_PHY_TYPE_VHT;
579 case MT76_TM_TX_MODE_HE_SU:
580 mode = MT_PHY_TYPE_HE_SU;
582 case MT76_TM_TX_MODE_HE_EXT_SU:
583 mode = MT_PHY_TYPE_HE_EXT_SU;
585 case MT76_TM_TX_MODE_HE_TB:
586 mode = MT_PHY_TYPE_HE_TB;
588 case MT76_TM_TX_MODE_HE_MU:
589 mode = MT_PHY_TYPE_HE_MU;
595 rateval = mode << 6 | rate_idx;
596 tx_cont->rateval = cpu_to_le16(rateval);
602 ret = mt76_mcu_send_msg(&dev->mt76, MCU_EXT_CMD_RF_TEST, &req,
607 return mt7915_tm_rf_switch_mode(dev, RF_OPER_NORMAL);
610 mt7915_tm_rf_switch_mode(dev, RF_OPER_RF_TEST);
611 mt7915_tm_update_channel(phy);
613 return mt76_mcu_send_msg(&dev->mt76, MCU_EXT_CMD_RF_TEST, &req,
618 mt7915_tm_update_params(struct mt7915_phy *phy, u32 changed)
620 struct mt76_testmode_data *td = &phy->mt76->test;
621 bool en = phy->mt76->test.state != MT76_TM_STATE_OFF;
623 if (changed & BIT(TM_CHANGED_FREQ_OFFSET))
624 mt7915_tm_set_freq_offset(phy, en, en ? td->freq_offset : 0);
625 if (changed & BIT(TM_CHANGED_TXPOWER))
626 mt7915_tm_set_tx_power(phy);
630 mt7915_tm_set_state(struct mt76_phy *mphy, enum mt76_testmode_state state)
632 struct mt76_testmode_data *td = &mphy->test;
633 struct mt7915_phy *phy = mphy->priv;
634 enum mt76_testmode_state prev_state = td->state;
636 mphy->test.state = state;
638 if (prev_state == MT76_TM_STATE_TX_FRAMES ||
639 state == MT76_TM_STATE_TX_FRAMES)
640 mt7915_tm_set_tx_frames(phy, state == MT76_TM_STATE_TX_FRAMES);
641 else if (prev_state == MT76_TM_STATE_RX_FRAMES ||
642 state == MT76_TM_STATE_RX_FRAMES)
643 mt7915_tm_set_rx_frames(phy, state == MT76_TM_STATE_RX_FRAMES);
644 else if (prev_state == MT76_TM_STATE_TX_CONT ||
645 state == MT76_TM_STATE_TX_CONT)
646 mt7915_tm_set_tx_cont(phy, state == MT76_TM_STATE_TX_CONT);
647 else if (prev_state == MT76_TM_STATE_OFF ||
648 state == MT76_TM_STATE_OFF)
649 mt7915_tm_init(phy, !(state == MT76_TM_STATE_OFF));
651 if ((state == MT76_TM_STATE_IDLE &&
652 prev_state == MT76_TM_STATE_OFF) ||
653 (state == MT76_TM_STATE_OFF &&
654 prev_state == MT76_TM_STATE_IDLE)) {
658 for (i = 0; i < ARRAY_SIZE(tm_change_map); i++) {
659 u16 cur = tm_change_map[i];
661 if (td->param_set[cur / 32] & BIT(cur % 32))
665 mt7915_tm_update_params(phy, changed);
672 mt7915_tm_set_params(struct mt76_phy *mphy, struct nlattr **tb,
673 enum mt76_testmode_state new_state)
675 struct mt76_testmode_data *td = &mphy->test;
676 struct mt7915_phy *phy = mphy->priv;
680 BUILD_BUG_ON(NUM_TM_CHANGED >= 32);
682 if (new_state == MT76_TM_STATE_OFF ||
683 td->state == MT76_TM_STATE_OFF)
686 if (td->tx_antenna_mask & ~mphy->chainmask)
689 for (i = 0; i < ARRAY_SIZE(tm_change_map); i++) {
690 if (tb[tm_change_map[i]])
694 mt7915_tm_update_params(phy, changed);
700 mt7915_tm_dump_stats(struct mt76_phy *mphy, struct sk_buff *msg)
702 struct mt7915_phy *phy = mphy->priv;
706 rx = nla_nest_start(msg, MT76_TM_STATS_ATTR_LAST_RX);
710 if (nla_put_s32(msg, MT76_TM_RX_ATTR_FREQ_OFFSET, phy->test.last_freq_offset))
713 rssi = nla_nest_start(msg, MT76_TM_RX_ATTR_RCPI);
717 for (i = 0; i < ARRAY_SIZE(phy->test.last_rcpi); i++)
718 if (nla_put_u8(msg, i, phy->test.last_rcpi[i]))
721 nla_nest_end(msg, rssi);
723 rssi = nla_nest_start(msg, MT76_TM_RX_ATTR_IB_RSSI);
727 for (i = 0; i < ARRAY_SIZE(phy->test.last_ib_rssi); i++)
728 if (nla_put_s8(msg, i, phy->test.last_ib_rssi[i]))
731 nla_nest_end(msg, rssi);
733 rssi = nla_nest_start(msg, MT76_TM_RX_ATTR_WB_RSSI);
737 for (i = 0; i < ARRAY_SIZE(phy->test.last_wb_rssi); i++)
738 if (nla_put_s8(msg, i, phy->test.last_wb_rssi[i]))
741 nla_nest_end(msg, rssi);
743 if (nla_put_u8(msg, MT76_TM_RX_ATTR_SNR, phy->test.last_snr))
746 nla_nest_end(msg, rx);
751 const struct mt76_testmode_ops mt7915_testmode_ops = {
752 .set_state = mt7915_tm_set_state,
753 .set_params = mt7915_tm_set_params,
754 .dump_stats = mt7915_tm_dump_stats,