1 // SPDX-License-Identifier: ISC
2 /* Copyright (C) 2020 Felix Fietkau <nbd@nbd.name> */
4 #include <linux/random.h>
7 const struct nla_policy mt76_tm_policy
[NUM_MT76_TM_ATTRS
] = {
8 [MT76_TM_ATTR_RESET
] = { .type
= NLA_FLAG
},
9 [MT76_TM_ATTR_STATE
] = { .type
= NLA_U8
},
10 [MT76_TM_ATTR_TX_COUNT
] = { .type
= NLA_U32
},
11 [MT76_TM_ATTR_TX_LENGTH
] = { .type
= NLA_U32
},
12 [MT76_TM_ATTR_TX_RATE_MODE
] = { .type
= NLA_U8
},
13 [MT76_TM_ATTR_TX_RATE_NSS
] = { .type
= NLA_U8
},
14 [MT76_TM_ATTR_TX_RATE_IDX
] = { .type
= NLA_U8
},
15 [MT76_TM_ATTR_TX_RATE_SGI
] = { .type
= NLA_U8
},
16 [MT76_TM_ATTR_TX_RATE_LDPC
] = { .type
= NLA_U8
},
17 [MT76_TM_ATTR_TX_RATE_STBC
] = { .type
= NLA_U8
},
18 [MT76_TM_ATTR_TX_LTF
] = { .type
= NLA_U8
},
19 [MT76_TM_ATTR_TX_ANTENNA
] = { .type
= NLA_U8
},
20 [MT76_TM_ATTR_TX_SPE_IDX
] = { .type
= NLA_U8
},
21 [MT76_TM_ATTR_TX_POWER_CONTROL
] = { .type
= NLA_U8
},
22 [MT76_TM_ATTR_TX_POWER
] = { .type
= NLA_NESTED
},
23 [MT76_TM_ATTR_TX_DUTY_CYCLE
] = { .type
= NLA_U8
},
24 [MT76_TM_ATTR_TX_IPG
] = { .type
= NLA_U32
},
25 [MT76_TM_ATTR_TX_TIME
] = { .type
= NLA_U32
},
26 [MT76_TM_ATTR_FREQ_OFFSET
] = { .type
= NLA_U32
},
27 [MT76_TM_ATTR_DRV_DATA
] = { .type
= NLA_NESTED
},
29 EXPORT_SYMBOL_GPL(mt76_tm_policy
);
31 void mt76_testmode_tx_pending(struct mt76_phy
*phy
)
33 struct mt76_testmode_data
*td
= &phy
->test
;
34 struct mt76_dev
*dev
= phy
->dev
;
35 struct mt76_wcid
*wcid
= &dev
->global_wcid
;
36 struct sk_buff
*skb
= td
->tx_skb
;
41 if (!skb
|| !td
->tx_pending
)
44 qid
= skb_get_queue_mapping(skb
);
47 tx_queued_limit
= td
->tx_queued_limit
? td
->tx_queued_limit
: 1000;
49 spin_lock_bh(&q
->lock
);
51 while (td
->tx_pending
> 0 &&
52 td
->tx_queued
- td
->tx_done
< tx_queued_limit
&&
53 q
->queued
< q
->ndesc
/ 2) {
56 ret
= dev
->queue_ops
->tx_queue_skb(phy
, q
, qid
, skb_get(skb
),
65 dev
->queue_ops
->kick(dev
, q
);
67 spin_unlock_bh(&q
->lock
);
71 mt76_testmode_max_mpdu_len(struct mt76_phy
*phy
, u8 tx_rate_mode
)
73 switch (tx_rate_mode
) {
74 case MT76_TM_TX_MODE_HT
:
75 return IEEE80211_MAX_MPDU_LEN_HT_7935
;
76 case MT76_TM_TX_MODE_VHT
:
77 case MT76_TM_TX_MODE_HE_SU
:
78 case MT76_TM_TX_MODE_HE_EXT_SU
:
79 case MT76_TM_TX_MODE_HE_TB
:
80 case MT76_TM_TX_MODE_HE_MU
:
81 if (phy
->sband_5g
.sband
.vht_cap
.cap
&
82 IEEE80211_VHT_CAP_MAX_MPDU_LENGTH_7991
)
83 return IEEE80211_MAX_MPDU_LEN_VHT_7991
;
84 return IEEE80211_MAX_MPDU_LEN_VHT_11454
;
85 case MT76_TM_TX_MODE_CCK
:
86 case MT76_TM_TX_MODE_OFDM
:
88 return IEEE80211_MAX_FRAME_LEN
;
93 mt76_testmode_free_skb(struct mt76_phy
*phy
)
95 struct mt76_testmode_data
*td
= &phy
->test
;
97 dev_kfree_skb(td
->tx_skb
);
101 int mt76_testmode_alloc_skb(struct mt76_phy
*phy
, u32 len
)
103 #define MT_TXP_MAX_LEN 4095
104 u16 fc
= IEEE80211_FTYPE_DATA
| IEEE80211_STYPE_DATA
|
105 IEEE80211_FCTL_FROMDS
;
106 struct mt76_testmode_data
*td
= &phy
->test
;
107 struct sk_buff
**frag_tail
, *head
;
108 struct ieee80211_tx_info
*info
;
109 struct ieee80211_hdr
*hdr
;
110 u32 max_len
, head_len
;
113 max_len
= mt76_testmode_max_mpdu_len(phy
, td
->tx_rate_mode
);
116 else if (len
< sizeof(struct ieee80211_hdr
))
117 len
= sizeof(struct ieee80211_hdr
);
119 nfrags
= len
/ MT_TXP_MAX_LEN
;
120 head_len
= nfrags
? MT_TXP_MAX_LEN
: len
;
122 if (len
> IEEE80211_MAX_FRAME_LEN
)
123 fc
|= IEEE80211_STYPE_QOS_DATA
;
125 head
= alloc_skb(head_len
, GFP_KERNEL
);
129 hdr
= __skb_put_zero(head
, sizeof(*hdr
));
130 hdr
->frame_control
= cpu_to_le16(fc
);
131 memcpy(hdr
->addr1
, td
->addr
[0], ETH_ALEN
);
132 memcpy(hdr
->addr2
, td
->addr
[1], ETH_ALEN
);
133 memcpy(hdr
->addr3
, td
->addr
[2], ETH_ALEN
);
134 skb_set_queue_mapping(head
, IEEE80211_AC_BE
);
135 get_random_bytes(__skb_put(head
, head_len
- sizeof(*hdr
)),
136 head_len
- sizeof(*hdr
));
138 info
= IEEE80211_SKB_CB(head
);
139 info
->flags
= IEEE80211_TX_CTL_INJECTED
|
140 IEEE80211_TX_CTL_NO_ACK
|
141 IEEE80211_TX_CTL_NO_PS_BUFFER
;
143 info
->hw_queue
|= FIELD_PREP(MT_TX_HW_QUEUE_PHY
, phy
->band_idx
);
144 frag_tail
= &skb_shinfo(head
)->frag_list
;
146 for (i
= 0; i
< nfrags
; i
++) {
147 struct sk_buff
*frag
;
151 frag_len
= len
% MT_TXP_MAX_LEN
;
153 frag_len
= MT_TXP_MAX_LEN
;
155 frag
= alloc_skb(frag_len
, GFP_KERNEL
);
157 mt76_testmode_free_skb(phy
);
162 get_random_bytes(__skb_put(frag
, frag_len
), frag_len
);
163 head
->len
+= frag
->len
;
164 head
->data_len
+= frag
->len
;
167 frag_tail
= &(*frag_tail
)->next
;
170 mt76_testmode_free_skb(phy
);
175 EXPORT_SYMBOL(mt76_testmode_alloc_skb
);
178 mt76_testmode_tx_init(struct mt76_phy
*phy
)
180 struct mt76_testmode_data
*td
= &phy
->test
;
181 struct ieee80211_tx_info
*info
;
182 struct ieee80211_tx_rate
*rate
;
183 u8 max_nss
= hweight8(phy
->antenna_mask
);
186 ret
= mt76_testmode_alloc_skb(phy
, td
->tx_mpdu_len
);
190 if (td
->tx_rate_mode
> MT76_TM_TX_MODE_VHT
)
193 if (td
->tx_antenna_mask
)
194 max_nss
= min_t(u8
, max_nss
, hweight8(td
->tx_antenna_mask
));
196 info
= IEEE80211_SKB_CB(td
->tx_skb
);
197 rate
= &info
->control
.rates
[0];
199 rate
->idx
= td
->tx_rate_idx
;
201 switch (td
->tx_rate_mode
) {
202 case MT76_TM_TX_MODE_CCK
:
203 if (phy
->chandef
.chan
->band
!= NL80211_BAND_2GHZ
)
209 case MT76_TM_TX_MODE_OFDM
:
210 if (phy
->chandef
.chan
->band
!= NL80211_BAND_2GHZ
)
218 case MT76_TM_TX_MODE_HT
:
219 if (rate
->idx
> 8 * max_nss
&&
221 phy
->chandef
.width
>= NL80211_CHAN_WIDTH_40
))
224 rate
->flags
|= IEEE80211_TX_RC_MCS
;
226 case MT76_TM_TX_MODE_VHT
:
230 if (td
->tx_rate_nss
> max_nss
)
233 ieee80211_rate_set_vht(rate
, td
->tx_rate_idx
, td
->tx_rate_nss
);
234 rate
->flags
|= IEEE80211_TX_RC_VHT_MCS
;
241 rate
->flags
|= IEEE80211_TX_RC_SHORT_GI
;
243 if (td
->tx_rate_ldpc
)
244 info
->flags
|= IEEE80211_TX_CTL_LDPC
;
246 if (td
->tx_rate_stbc
)
247 info
->flags
|= IEEE80211_TX_CTL_STBC
;
249 if (td
->tx_rate_mode
>= MT76_TM_TX_MODE_HT
) {
250 switch (phy
->chandef
.width
) {
251 case NL80211_CHAN_WIDTH_40
:
252 rate
->flags
|= IEEE80211_TX_RC_40_MHZ_WIDTH
;
254 case NL80211_CHAN_WIDTH_80
:
255 rate
->flags
|= IEEE80211_TX_RC_80_MHZ_WIDTH
;
257 case NL80211_CHAN_WIDTH_80P80
:
258 case NL80211_CHAN_WIDTH_160
:
259 rate
->flags
|= IEEE80211_TX_RC_160_MHZ_WIDTH
;
270 mt76_testmode_tx_start(struct mt76_phy
*phy
)
272 struct mt76_testmode_data
*td
= &phy
->test
;
273 struct mt76_dev
*dev
= phy
->dev
;
277 td
->tx_pending
= td
->tx_count
;
278 mt76_worker_schedule(&dev
->tx_worker
);
282 mt76_testmode_tx_stop(struct mt76_phy
*phy
)
284 struct mt76_testmode_data
*td
= &phy
->test
;
285 struct mt76_dev
*dev
= phy
->dev
;
287 mt76_worker_disable(&dev
->tx_worker
);
291 mt76_worker_enable(&dev
->tx_worker
);
293 wait_event_timeout(dev
->tx_wait
, td
->tx_done
== td
->tx_queued
,
294 MT76_TM_TIMEOUT
* HZ
);
296 mt76_testmode_free_skb(phy
);
300 mt76_testmode_param_set(struct mt76_testmode_data
*td
, u16 idx
)
302 td
->param_set
[idx
/ 32] |= BIT(idx
% 32);
306 mt76_testmode_param_present(struct mt76_testmode_data
*td
, u16 idx
)
308 return td
->param_set
[idx
/ 32] & BIT(idx
% 32);
312 mt76_testmode_init_defaults(struct mt76_phy
*phy
)
314 struct mt76_testmode_data
*td
= &phy
->test
;
316 if (td
->tx_mpdu_len
> 0)
319 td
->tx_mpdu_len
= 1024;
321 td
->tx_rate_mode
= MT76_TM_TX_MODE_OFDM
;
324 memcpy(td
->addr
[0], phy
->macaddr
, ETH_ALEN
);
325 memcpy(td
->addr
[1], phy
->macaddr
, ETH_ALEN
);
326 memcpy(td
->addr
[2], phy
->macaddr
, ETH_ALEN
);
330 __mt76_testmode_set_state(struct mt76_phy
*phy
, enum mt76_testmode_state state
)
332 enum mt76_testmode_state prev_state
= phy
->test
.state
;
333 struct mt76_dev
*dev
= phy
->dev
;
336 if (prev_state
== MT76_TM_STATE_TX_FRAMES
)
337 mt76_testmode_tx_stop(phy
);
339 if (state
== MT76_TM_STATE_TX_FRAMES
) {
340 err
= mt76_testmode_tx_init(phy
);
345 err
= dev
->test_ops
->set_state(phy
, state
);
347 if (state
== MT76_TM_STATE_TX_FRAMES
)
348 mt76_testmode_tx_stop(phy
);
353 if (state
== MT76_TM_STATE_TX_FRAMES
)
354 mt76_testmode_tx_start(phy
);
355 else if (state
== MT76_TM_STATE_RX_FRAMES
) {
356 memset(&phy
->test
.rx_stats
, 0, sizeof(phy
->test
.rx_stats
));
359 phy
->test
.state
= state
;
364 int mt76_testmode_set_state(struct mt76_phy
*phy
, enum mt76_testmode_state state
)
366 struct mt76_testmode_data
*td
= &phy
->test
;
367 struct ieee80211_hw
*hw
= phy
->hw
;
369 if (state
== td
->state
&& state
== MT76_TM_STATE_OFF
)
372 if (state
> MT76_TM_STATE_OFF
&&
373 (!test_bit(MT76_STATE_RUNNING
, &phy
->state
) ||
374 !(hw
->conf
.flags
& IEEE80211_CONF_MONITOR
)))
377 if (state
!= MT76_TM_STATE_IDLE
&&
378 td
->state
!= MT76_TM_STATE_IDLE
) {
381 ret
= __mt76_testmode_set_state(phy
, MT76_TM_STATE_IDLE
);
386 return __mt76_testmode_set_state(phy
, state
);
389 EXPORT_SYMBOL(mt76_testmode_set_state
);
392 mt76_tm_get_u8(struct nlattr
*attr
, u8
*dest
, u8 min
, u8 max
)
399 val
= nla_get_u8(attr
);
400 if (val
< min
|| val
> max
)
407 int mt76_testmode_cmd(struct ieee80211_hw
*hw
, struct ieee80211_vif
*vif
,
410 struct mt76_phy
*phy
= hw
->priv
;
411 struct mt76_dev
*dev
= phy
->dev
;
412 struct mt76_testmode_data
*td
= &phy
->test
;
413 struct nlattr
*tb
[NUM_MT76_TM_ATTRS
];
421 err
= nla_parse_deprecated(tb
, MT76_TM_ATTR_MAX
, data
, len
,
422 mt76_tm_policy
, NULL
);
428 mutex_lock(&dev
->mutex
);
430 if (tb
[MT76_TM_ATTR_RESET
]) {
431 mt76_testmode_set_state(phy
, MT76_TM_STATE_OFF
);
432 memset(td
, 0, sizeof(*td
));
435 mt76_testmode_init_defaults(phy
);
437 if (tb
[MT76_TM_ATTR_TX_COUNT
])
438 td
->tx_count
= nla_get_u32(tb
[MT76_TM_ATTR_TX_COUNT
]);
440 if (tb
[MT76_TM_ATTR_TX_RATE_IDX
])
441 td
->tx_rate_idx
= nla_get_u8(tb
[MT76_TM_ATTR_TX_RATE_IDX
]);
443 if (mt76_tm_get_u8(tb
[MT76_TM_ATTR_TX_RATE_MODE
], &td
->tx_rate_mode
,
444 0, MT76_TM_TX_MODE_MAX
) ||
445 mt76_tm_get_u8(tb
[MT76_TM_ATTR_TX_RATE_NSS
], &td
->tx_rate_nss
,
446 1, hweight8(phy
->antenna_mask
)) ||
447 mt76_tm_get_u8(tb
[MT76_TM_ATTR_TX_RATE_SGI
], &td
->tx_rate_sgi
, 0, 2) ||
448 mt76_tm_get_u8(tb
[MT76_TM_ATTR_TX_RATE_LDPC
], &td
->tx_rate_ldpc
, 0, 1) ||
449 mt76_tm_get_u8(tb
[MT76_TM_ATTR_TX_RATE_STBC
], &td
->tx_rate_stbc
, 0, 1) ||
450 mt76_tm_get_u8(tb
[MT76_TM_ATTR_TX_LTF
], &td
->tx_ltf
, 0, 2) ||
451 mt76_tm_get_u8(tb
[MT76_TM_ATTR_TX_ANTENNA
],
452 &td
->tx_antenna_mask
, 0, 0xff) ||
453 mt76_tm_get_u8(tb
[MT76_TM_ATTR_TX_SPE_IDX
], &td
->tx_spe_idx
, 0, 27) ||
454 mt76_tm_get_u8(tb
[MT76_TM_ATTR_TX_DUTY_CYCLE
],
455 &td
->tx_duty_cycle
, 0, 99) ||
456 mt76_tm_get_u8(tb
[MT76_TM_ATTR_TX_POWER_CONTROL
],
457 &td
->tx_power_control
, 0, 1))
460 if (tb
[MT76_TM_ATTR_TX_LENGTH
]) {
461 u32 val
= nla_get_u32(tb
[MT76_TM_ATTR_TX_LENGTH
]);
463 if (val
> mt76_testmode_max_mpdu_len(phy
, td
->tx_rate_mode
) ||
464 val
< sizeof(struct ieee80211_hdr
))
467 td
->tx_mpdu_len
= val
;
470 if (tb
[MT76_TM_ATTR_TX_IPG
])
471 td
->tx_ipg
= nla_get_u32(tb
[MT76_TM_ATTR_TX_IPG
]);
473 if (tb
[MT76_TM_ATTR_TX_TIME
])
474 td
->tx_time
= nla_get_u32(tb
[MT76_TM_ATTR_TX_TIME
]);
476 if (tb
[MT76_TM_ATTR_FREQ_OFFSET
])
477 td
->freq_offset
= nla_get_u32(tb
[MT76_TM_ATTR_FREQ_OFFSET
]);
479 if (tb
[MT76_TM_ATTR_STATE
]) {
480 state
= nla_get_u32(tb
[MT76_TM_ATTR_STATE
]);
481 if (state
> MT76_TM_STATE_MAX
)
487 if (tb
[MT76_TM_ATTR_TX_POWER
]) {
492 nla_for_each_nested(cur
, tb
[MT76_TM_ATTR_TX_POWER
], rem
) {
493 if (nla_len(cur
) != 1 ||
494 idx
>= ARRAY_SIZE(td
->tx_power
))
497 td
->tx_power
[idx
++] = nla_get_u8(cur
);
501 if (tb
[MT76_TM_ATTR_MAC_ADDRS
]) {
506 nla_for_each_nested(cur
, tb
[MT76_TM_ATTR_MAC_ADDRS
], rem
) {
507 if (nla_len(cur
) != ETH_ALEN
|| idx
>= 3)
510 memcpy(td
->addr
[idx
], nla_data(cur
), ETH_ALEN
);
515 if (dev
->test_ops
->set_params
) {
516 err
= dev
->test_ops
->set_params(phy
, tb
, state
);
521 for (i
= MT76_TM_ATTR_STATE
; i
< ARRAY_SIZE(tb
); i
++)
523 mt76_testmode_param_set(td
, i
);
526 if (tb
[MT76_TM_ATTR_STATE
])
527 err
= mt76_testmode_set_state(phy
, state
);
530 mutex_unlock(&dev
->mutex
);
534 EXPORT_SYMBOL(mt76_testmode_cmd
);
537 mt76_testmode_dump_stats(struct mt76_phy
*phy
, struct sk_buff
*msg
)
539 struct mt76_testmode_data
*td
= &phy
->test
;
540 struct mt76_dev
*dev
= phy
->dev
;
542 u64 rx_fcs_error
= 0;
545 if (dev
->test_ops
->dump_stats
) {
548 ret
= dev
->test_ops
->dump_stats(phy
, msg
);
553 for (i
= 0; i
< ARRAY_SIZE(td
->rx_stats
.packets
); i
++) {
554 rx_packets
+= td
->rx_stats
.packets
[i
];
555 rx_fcs_error
+= td
->rx_stats
.fcs_error
[i
];
558 if (nla_put_u32(msg
, MT76_TM_STATS_ATTR_TX_PENDING
, td
->tx_pending
) ||
559 nla_put_u32(msg
, MT76_TM_STATS_ATTR_TX_QUEUED
, td
->tx_queued
) ||
560 nla_put_u32(msg
, MT76_TM_STATS_ATTR_TX_DONE
, td
->tx_done
) ||
561 nla_put_u64_64bit(msg
, MT76_TM_STATS_ATTR_RX_PACKETS
, rx_packets
,
562 MT76_TM_STATS_ATTR_PAD
) ||
563 nla_put_u64_64bit(msg
, MT76_TM_STATS_ATTR_RX_FCS_ERROR
, rx_fcs_error
,
564 MT76_TM_STATS_ATTR_PAD
))
570 int mt76_testmode_dump(struct ieee80211_hw
*hw
, struct sk_buff
*msg
,
571 struct netlink_callback
*cb
, void *data
, int len
)
573 struct mt76_phy
*phy
= hw
->priv
;
574 struct mt76_dev
*dev
= phy
->dev
;
575 struct mt76_testmode_data
*td
= &phy
->test
;
576 struct nlattr
*tb
[NUM_MT76_TM_ATTRS
] = {};
584 if (cb
->args
[2]++ > 0)
588 err
= nla_parse_deprecated(tb
, MT76_TM_ATTR_MAX
, data
, len
,
589 mt76_tm_policy
, NULL
);
594 mutex_lock(&dev
->mutex
);
596 if (tb
[MT76_TM_ATTR_STATS
]) {
599 a
= nla_nest_start(msg
, MT76_TM_ATTR_STATS
);
601 err
= mt76_testmode_dump_stats(phy
, msg
);
602 nla_nest_end(msg
, a
);
608 mt76_testmode_init_defaults(phy
);
611 if (nla_put_u32(msg
, MT76_TM_ATTR_STATE
, td
->state
))
614 if (dev
->test_mtd
.name
&&
615 (nla_put_string(msg
, MT76_TM_ATTR_MTD_PART
, dev
->test_mtd
.name
) ||
616 nla_put_u32(msg
, MT76_TM_ATTR_MTD_OFFSET
, dev
->test_mtd
.offset
)))
619 if (nla_put_u32(msg
, MT76_TM_ATTR_TX_COUNT
, td
->tx_count
) ||
620 nla_put_u32(msg
, MT76_TM_ATTR_TX_LENGTH
, td
->tx_mpdu_len
) ||
621 nla_put_u8(msg
, MT76_TM_ATTR_TX_RATE_MODE
, td
->tx_rate_mode
) ||
622 nla_put_u8(msg
, MT76_TM_ATTR_TX_RATE_NSS
, td
->tx_rate_nss
) ||
623 nla_put_u8(msg
, MT76_TM_ATTR_TX_RATE_IDX
, td
->tx_rate_idx
) ||
624 nla_put_u8(msg
, MT76_TM_ATTR_TX_RATE_SGI
, td
->tx_rate_sgi
) ||
625 nla_put_u8(msg
, MT76_TM_ATTR_TX_RATE_LDPC
, td
->tx_rate_ldpc
) ||
626 nla_put_u8(msg
, MT76_TM_ATTR_TX_RATE_STBC
, td
->tx_rate_stbc
) ||
627 (mt76_testmode_param_present(td
, MT76_TM_ATTR_TX_LTF
) &&
628 nla_put_u8(msg
, MT76_TM_ATTR_TX_LTF
, td
->tx_ltf
)) ||
629 (mt76_testmode_param_present(td
, MT76_TM_ATTR_TX_ANTENNA
) &&
630 nla_put_u8(msg
, MT76_TM_ATTR_TX_ANTENNA
, td
->tx_antenna_mask
)) ||
631 (mt76_testmode_param_present(td
, MT76_TM_ATTR_TX_SPE_IDX
) &&
632 nla_put_u8(msg
, MT76_TM_ATTR_TX_SPE_IDX
, td
->tx_spe_idx
)) ||
633 (mt76_testmode_param_present(td
, MT76_TM_ATTR_TX_DUTY_CYCLE
) &&
634 nla_put_u8(msg
, MT76_TM_ATTR_TX_DUTY_CYCLE
, td
->tx_duty_cycle
)) ||
635 (mt76_testmode_param_present(td
, MT76_TM_ATTR_TX_IPG
) &&
636 nla_put_u32(msg
, MT76_TM_ATTR_TX_IPG
, td
->tx_ipg
)) ||
637 (mt76_testmode_param_present(td
, MT76_TM_ATTR_TX_TIME
) &&
638 nla_put_u32(msg
, MT76_TM_ATTR_TX_TIME
, td
->tx_time
)) ||
639 (mt76_testmode_param_present(td
, MT76_TM_ATTR_TX_POWER_CONTROL
) &&
640 nla_put_u8(msg
, MT76_TM_ATTR_TX_POWER_CONTROL
, td
->tx_power_control
)) ||
641 (mt76_testmode_param_present(td
, MT76_TM_ATTR_FREQ_OFFSET
) &&
642 nla_put_u8(msg
, MT76_TM_ATTR_FREQ_OFFSET
, td
->freq_offset
)))
645 if (mt76_testmode_param_present(td
, MT76_TM_ATTR_TX_POWER
)) {
646 a
= nla_nest_start(msg
, MT76_TM_ATTR_TX_POWER
);
650 for (i
= 0; i
< ARRAY_SIZE(td
->tx_power
); i
++)
651 if (nla_put_u8(msg
, i
, td
->tx_power
[i
]))
654 nla_nest_end(msg
, a
);
657 if (mt76_testmode_param_present(td
, MT76_TM_ATTR_MAC_ADDRS
)) {
658 a
= nla_nest_start(msg
, MT76_TM_ATTR_MAC_ADDRS
);
662 for (i
= 0; i
< 3; i
++)
663 if (nla_put(msg
, i
, ETH_ALEN
, td
->addr
[i
]))
666 nla_nest_end(msg
, a
);
672 mutex_unlock(&dev
->mutex
);
676 EXPORT_SYMBOL(mt76_testmode_dump
);