1 /* SPDX-License-Identifier: ISC */
3 * Copyright (c) 2005-2011 Atheros Communications Inc.
4 * Copyright (c) 2011-2017 Qualcomm Atheros, Inc.
5 * Copyright (c) 2018, The Linux Foundation. All rights reserved.
15 void (*rx
)(struct ath10k
*ar
, struct sk_buff
*skb
);
16 void (*map_svc
)(const __le32
*in
, unsigned long *out
, size_t len
);
17 void (*map_svc_ext
)(const __le32
*in
, unsigned long *out
, size_t len
);
19 int (*pull_scan
)(struct ath10k
*ar
, struct sk_buff
*skb
,
20 struct wmi_scan_ev_arg
*arg
);
21 int (*pull_mgmt_rx
)(struct ath10k
*ar
, struct sk_buff
*skb
,
22 struct wmi_mgmt_rx_ev_arg
*arg
);
23 int (*pull_mgmt_tx_compl
)(struct ath10k
*ar
, struct sk_buff
*skb
,
24 struct wmi_tlv_mgmt_tx_compl_ev_arg
*arg
);
25 int (*pull_mgmt_tx_bundle_compl
)(
26 struct ath10k
*ar
, struct sk_buff
*skb
,
27 struct wmi_tlv_mgmt_tx_bundle_compl_ev_arg
*arg
);
28 int (*pull_ch_info
)(struct ath10k
*ar
, struct sk_buff
*skb
,
29 struct wmi_ch_info_ev_arg
*arg
);
30 int (*pull_vdev_start
)(struct ath10k
*ar
, struct sk_buff
*skb
,
31 struct wmi_vdev_start_ev_arg
*arg
);
32 int (*pull_peer_kick
)(struct ath10k
*ar
, struct sk_buff
*skb
,
33 struct wmi_peer_kick_ev_arg
*arg
);
34 int (*pull_swba
)(struct ath10k
*ar
, struct sk_buff
*skb
,
35 struct wmi_swba_ev_arg
*arg
);
36 int (*pull_phyerr_hdr
)(struct ath10k
*ar
, struct sk_buff
*skb
,
37 struct wmi_phyerr_hdr_arg
*arg
);
38 int (*pull_phyerr
)(struct ath10k
*ar
, const void *phyerr_buf
,
39 int left_len
, struct wmi_phyerr_ev_arg
*arg
);
40 int (*pull_svc_rdy
)(struct ath10k
*ar
, struct sk_buff
*skb
,
41 struct wmi_svc_rdy_ev_arg
*arg
);
42 int (*pull_rdy
)(struct ath10k
*ar
, struct sk_buff
*skb
,
43 struct wmi_rdy_ev_arg
*arg
);
44 int (*pull_fw_stats
)(struct ath10k
*ar
, struct sk_buff
*skb
,
45 struct ath10k_fw_stats
*stats
);
46 int (*pull_roam_ev
)(struct ath10k
*ar
, struct sk_buff
*skb
,
47 struct wmi_roam_ev_arg
*arg
);
48 int (*pull_wow_event
)(struct ath10k
*ar
, struct sk_buff
*skb
,
49 struct wmi_wow_ev_arg
*arg
);
50 int (*pull_echo_ev
)(struct ath10k
*ar
, struct sk_buff
*skb
,
51 struct wmi_echo_ev_arg
*arg
);
52 int (*pull_dfs_status_ev
)(struct ath10k
*ar
, struct sk_buff
*skb
,
53 struct wmi_dfs_status_ev_arg
*arg
);
54 int (*pull_svc_avail
)(struct ath10k
*ar
, struct sk_buff
*skb
,
55 struct wmi_svc_avail_ev_arg
*arg
);
57 enum wmi_txbf_conf (*get_txbf_conf_scheme
)(struct ath10k
*ar
);
59 struct sk_buff
*(*gen_pdev_suspend
)(struct ath10k
*ar
, u32 suspend_opt
);
60 struct sk_buff
*(*gen_pdev_resume
)(struct ath10k
*ar
);
61 struct sk_buff
*(*gen_pdev_set_base_macaddr
)(struct ath10k
*ar
,
62 const u8 macaddr
[ETH_ALEN
]);
63 struct sk_buff
*(*gen_pdev_set_rd
)(struct ath10k
*ar
, u16 rd
, u16 rd2g
,
64 u16 rd5g
, u16 ctl2g
, u16 ctl5g
,
65 enum wmi_dfs_region dfs_reg
);
66 struct sk_buff
*(*gen_pdev_set_param
)(struct ath10k
*ar
, u32 id
,
68 struct sk_buff
*(*gen_init
)(struct ath10k
*ar
);
69 struct sk_buff
*(*gen_start_scan
)(struct ath10k
*ar
,
70 const struct wmi_start_scan_arg
*arg
);
71 struct sk_buff
*(*gen_stop_scan
)(struct ath10k
*ar
,
72 const struct wmi_stop_scan_arg
*arg
);
73 struct sk_buff
*(*gen_vdev_create
)(struct ath10k
*ar
, u32 vdev_id
,
74 enum wmi_vdev_type type
,
75 enum wmi_vdev_subtype subtype
,
76 const u8 macaddr
[ETH_ALEN
]);
77 struct sk_buff
*(*gen_vdev_delete
)(struct ath10k
*ar
, u32 vdev_id
);
78 struct sk_buff
*(*gen_vdev_start
)(struct ath10k
*ar
,
79 const struct wmi_vdev_start_request_arg
*arg
,
81 struct sk_buff
*(*gen_vdev_stop
)(struct ath10k
*ar
, u32 vdev_id
);
82 struct sk_buff
*(*gen_vdev_up
)(struct ath10k
*ar
, u32 vdev_id
, u32 aid
,
84 struct sk_buff
*(*gen_vdev_down
)(struct ath10k
*ar
, u32 vdev_id
);
85 struct sk_buff
*(*gen_vdev_set_param
)(struct ath10k
*ar
, u32 vdev_id
,
86 u32 param_id
, u32 param_value
);
87 struct sk_buff
*(*gen_vdev_install_key
)(struct ath10k
*ar
,
88 const struct wmi_vdev_install_key_arg
*arg
);
89 struct sk_buff
*(*gen_vdev_spectral_conf
)(struct ath10k
*ar
,
90 const struct wmi_vdev_spectral_conf_arg
*arg
);
91 struct sk_buff
*(*gen_vdev_spectral_enable
)(struct ath10k
*ar
, u32 vdev_id
,
92 u32 trigger
, u32 enable
);
93 struct sk_buff
*(*gen_vdev_wmm_conf
)(struct ath10k
*ar
, u32 vdev_id
,
94 const struct wmi_wmm_params_all_arg
*arg
);
95 struct sk_buff
*(*gen_peer_create
)(struct ath10k
*ar
, u32 vdev_id
,
96 const u8 peer_addr
[ETH_ALEN
],
97 enum wmi_peer_type peer_type
);
98 struct sk_buff
*(*gen_peer_delete
)(struct ath10k
*ar
, u32 vdev_id
,
99 const u8 peer_addr
[ETH_ALEN
]);
100 struct sk_buff
*(*gen_peer_flush
)(struct ath10k
*ar
, u32 vdev_id
,
101 const u8 peer_addr
[ETH_ALEN
],
103 struct sk_buff
*(*gen_peer_set_param
)(struct ath10k
*ar
, u32 vdev_id
,
105 enum wmi_peer_param param_id
,
107 struct sk_buff
*(*gen_peer_assoc
)(struct ath10k
*ar
,
108 const struct wmi_peer_assoc_complete_arg
*arg
);
109 struct sk_buff
*(*gen_set_psmode
)(struct ath10k
*ar
, u32 vdev_id
,
110 enum wmi_sta_ps_mode psmode
);
111 struct sk_buff
*(*gen_set_sta_ps
)(struct ath10k
*ar
, u32 vdev_id
,
112 enum wmi_sta_powersave_param param_id
,
114 struct sk_buff
*(*gen_set_ap_ps
)(struct ath10k
*ar
, u32 vdev_id
,
116 enum wmi_ap_ps_peer_param param_id
,
118 struct sk_buff
*(*gen_scan_chan_list
)(struct ath10k
*ar
,
119 const struct wmi_scan_chan_list_arg
*arg
);
120 struct sk_buff
*(*gen_scan_prob_req_oui
)(struct ath10k
*ar
,
122 struct sk_buff
*(*gen_beacon_dma
)(struct ath10k
*ar
, u32 vdev_id
,
123 const void *bcn
, size_t bcn_len
,
124 u32 bcn_paddr
, bool dtim_zero
,
126 struct sk_buff
*(*gen_pdev_set_wmm
)(struct ath10k
*ar
,
127 const struct wmi_wmm_params_all_arg
*arg
);
128 struct sk_buff
*(*gen_request_stats
)(struct ath10k
*ar
, u32 stats_mask
);
129 struct sk_buff
*(*gen_request_peer_stats_info
)(struct ath10k
*ar
,
132 wmi_peer_stats_info_request_type
136 struct sk_buff
*(*gen_force_fw_hang
)(struct ath10k
*ar
,
137 enum wmi_force_fw_hang_type type
,
139 struct sk_buff
*(*gen_mgmt_tx
)(struct ath10k
*ar
, struct sk_buff
*skb
);
140 struct sk_buff
*(*gen_mgmt_tx_send
)(struct ath10k
*ar
,
143 int (*cleanup_mgmt_tx_send
)(struct ath10k
*ar
, struct sk_buff
*msdu
);
144 struct sk_buff
*(*gen_dbglog_cfg
)(struct ath10k
*ar
, u64 module_enable
,
146 struct sk_buff
*(*gen_pktlog_enable
)(struct ath10k
*ar
, u32 filter
);
147 struct sk_buff
*(*gen_pktlog_disable
)(struct ath10k
*ar
);
148 struct sk_buff
*(*gen_pdev_set_quiet_mode
)(struct ath10k
*ar
,
149 u32 period
, u32 duration
,
152 struct sk_buff
*(*gen_pdev_get_temperature
)(struct ath10k
*ar
);
153 struct sk_buff
*(*gen_addba_clear_resp
)(struct ath10k
*ar
, u32 vdev_id
,
155 struct sk_buff
*(*gen_addba_send
)(struct ath10k
*ar
, u32 vdev_id
,
156 const u8
*mac
, u32 tid
, u32 buf_size
);
157 struct sk_buff
*(*gen_addba_set_resp
)(struct ath10k
*ar
, u32 vdev_id
,
158 const u8
*mac
, u32 tid
,
160 struct sk_buff
*(*gen_delba_send
)(struct ath10k
*ar
, u32 vdev_id
,
161 const u8
*mac
, u32 tid
, u32 initiator
,
163 struct sk_buff
*(*gen_bcn_tmpl
)(struct ath10k
*ar
, u32 vdev_id
,
164 u32 tim_ie_offset
, struct sk_buff
*bcn
,
165 u32 prb_caps
, u32 prb_erp
,
166 void *prb_ies
, size_t prb_ies_len
);
167 struct sk_buff
*(*gen_prb_tmpl
)(struct ath10k
*ar
, u32 vdev_id
,
168 struct sk_buff
*bcn
);
169 struct sk_buff
*(*gen_p2p_go_bcn_ie
)(struct ath10k
*ar
, u32 vdev_id
,
171 struct sk_buff
*(*gen_vdev_sta_uapsd
)(struct ath10k
*ar
, u32 vdev_id
,
172 const u8 peer_addr
[ETH_ALEN
],
173 const struct wmi_sta_uapsd_auto_trig_arg
*args
,
175 struct sk_buff
*(*gen_sta_keepalive
)(struct ath10k
*ar
,
176 const struct wmi_sta_keepalive_arg
*arg
);
177 struct sk_buff
*(*gen_wow_enable
)(struct ath10k
*ar
);
178 struct sk_buff
*(*gen_wow_add_wakeup_event
)(struct ath10k
*ar
, u32 vdev_id
,
179 enum wmi_wow_wakeup_event event
,
181 struct sk_buff
*(*gen_wow_host_wakeup_ind
)(struct ath10k
*ar
);
182 struct sk_buff
*(*gen_wow_add_pattern
)(struct ath10k
*ar
, u32 vdev_id
,
188 struct sk_buff
*(*gen_wow_del_pattern
)(struct ath10k
*ar
, u32 vdev_id
,
190 struct sk_buff
*(*gen_update_fw_tdls_state
)(struct ath10k
*ar
,
192 enum wmi_tdls_state state
);
193 struct sk_buff
*(*gen_tdls_peer_update
)(struct ath10k
*ar
,
194 const struct wmi_tdls_peer_update_cmd_arg
*arg
,
195 const struct wmi_tdls_peer_capab_arg
*cap
,
196 const struct wmi_channel_arg
*chan
);
197 struct sk_buff
*(*gen_radar_found
)
199 const struct ath10k_radar_found_info
*arg
);
200 struct sk_buff
*(*gen_adaptive_qcs
)(struct ath10k
*ar
, bool enable
);
201 struct sk_buff
*(*gen_pdev_get_tpc_config
)(struct ath10k
*ar
,
203 void (*fw_stats_fill
)(struct ath10k
*ar
,
204 struct ath10k_fw_stats
*fw_stats
,
206 struct sk_buff
*(*gen_pdev_enable_adaptive_cca
)(struct ath10k
*ar
,
210 struct sk_buff
*(*ext_resource_config
)(struct ath10k
*ar
,
211 enum wmi_host_platform_type type
,
212 u32 fw_feature_bitmap
);
213 int (*get_vdev_subtype
)(struct ath10k
*ar
,
214 enum wmi_vdev_subtype subtype
);
215 struct sk_buff
*(*gen_wow_config_pno
)(struct ath10k
*ar
,
217 struct wmi_pno_scan_req
*pno_scan
);
218 struct sk_buff
*(*gen_pdev_bss_chan_info_req
)
220 enum wmi_bss_survey_req_type type
);
221 struct sk_buff
*(*gen_echo
)(struct ath10k
*ar
, u32 value
);
222 struct sk_buff
*(*gen_pdev_get_tpc_table_cmdid
)(struct ath10k
*ar
,
224 struct sk_buff
*(*gen_bb_timing
)
226 const struct wmi_bb_timing_cfg_arg
*arg
);
227 struct sk_buff
*(*gen_per_peer_per_tid_cfg
)(struct ath10k
*ar
,
228 const struct wmi_per_peer_per_tid_cfg_arg
*arg
);
229 struct sk_buff
*(*gen_gpio_config
)(struct ath10k
*ar
, u32 gpio_num
,
230 u32 input
, u32 pull_type
, u32 intr_mode
);
232 struct sk_buff
*(*gen_gpio_output
)(struct ath10k
*ar
, u32 gpio_num
, u32 set
);
235 int ath10k_wmi_cmd_send(struct ath10k
*ar
, struct sk_buff
*skb
, u32 cmd_id
);
238 ath10k_wmi_rx(struct ath10k
*ar
, struct sk_buff
*skb
)
240 if (WARN_ON_ONCE(!ar
->wmi
.ops
->rx
))
243 ar
->wmi
.ops
->rx(ar
, skb
);
248 ath10k_wmi_map_svc(struct ath10k
*ar
, const __le32
*in
, unsigned long *out
,
251 if (!ar
->wmi
.ops
->map_svc
)
254 ar
->wmi
.ops
->map_svc(in
, out
, len
);
259 ath10k_wmi_map_svc_ext(struct ath10k
*ar
, const __le32
*in
, unsigned long *out
,
262 if (!ar
->wmi
.ops
->map_svc_ext
)
265 ar
->wmi
.ops
->map_svc_ext(in
, out
, len
);
270 ath10k_wmi_pull_scan(struct ath10k
*ar
, struct sk_buff
*skb
,
271 struct wmi_scan_ev_arg
*arg
)
273 if (!ar
->wmi
.ops
->pull_scan
)
276 return ar
->wmi
.ops
->pull_scan(ar
, skb
, arg
);
280 ath10k_wmi_pull_mgmt_tx_compl(struct ath10k
*ar
, struct sk_buff
*skb
,
281 struct wmi_tlv_mgmt_tx_compl_ev_arg
*arg
)
283 if (!ar
->wmi
.ops
->pull_mgmt_tx_compl
)
286 return ar
->wmi
.ops
->pull_mgmt_tx_compl(ar
, skb
, arg
);
290 ath10k_wmi_pull_mgmt_tx_bundle_compl(struct ath10k
*ar
, struct sk_buff
*skb
,
291 struct wmi_tlv_mgmt_tx_bundle_compl_ev_arg
*arg
)
293 if (!ar
->wmi
.ops
->pull_mgmt_tx_bundle_compl
)
296 return ar
->wmi
.ops
->pull_mgmt_tx_bundle_compl(ar
, skb
, arg
);
300 ath10k_wmi_pull_mgmt_rx(struct ath10k
*ar
, struct sk_buff
*skb
,
301 struct wmi_mgmt_rx_ev_arg
*arg
)
303 if (!ar
->wmi
.ops
->pull_mgmt_rx
)
306 return ar
->wmi
.ops
->pull_mgmt_rx(ar
, skb
, arg
);
310 ath10k_wmi_pull_ch_info(struct ath10k
*ar
, struct sk_buff
*skb
,
311 struct wmi_ch_info_ev_arg
*arg
)
313 if (!ar
->wmi
.ops
->pull_ch_info
)
316 return ar
->wmi
.ops
->pull_ch_info(ar
, skb
, arg
);
320 ath10k_wmi_pull_vdev_start(struct ath10k
*ar
, struct sk_buff
*skb
,
321 struct wmi_vdev_start_ev_arg
*arg
)
323 if (!ar
->wmi
.ops
->pull_vdev_start
)
326 return ar
->wmi
.ops
->pull_vdev_start(ar
, skb
, arg
);
330 ath10k_wmi_pull_peer_kick(struct ath10k
*ar
, struct sk_buff
*skb
,
331 struct wmi_peer_kick_ev_arg
*arg
)
333 if (!ar
->wmi
.ops
->pull_peer_kick
)
336 return ar
->wmi
.ops
->pull_peer_kick(ar
, skb
, arg
);
340 ath10k_wmi_pull_swba(struct ath10k
*ar
, struct sk_buff
*skb
,
341 struct wmi_swba_ev_arg
*arg
)
343 if (!ar
->wmi
.ops
->pull_swba
)
346 return ar
->wmi
.ops
->pull_swba(ar
, skb
, arg
);
350 ath10k_wmi_pull_phyerr_hdr(struct ath10k
*ar
, struct sk_buff
*skb
,
351 struct wmi_phyerr_hdr_arg
*arg
)
353 if (!ar
->wmi
.ops
->pull_phyerr_hdr
)
356 return ar
->wmi
.ops
->pull_phyerr_hdr(ar
, skb
, arg
);
360 ath10k_wmi_pull_phyerr(struct ath10k
*ar
, const void *phyerr_buf
,
361 int left_len
, struct wmi_phyerr_ev_arg
*arg
)
363 if (!ar
->wmi
.ops
->pull_phyerr
)
366 return ar
->wmi
.ops
->pull_phyerr(ar
, phyerr_buf
, left_len
, arg
);
370 ath10k_wmi_pull_svc_rdy(struct ath10k
*ar
, struct sk_buff
*skb
,
371 struct wmi_svc_rdy_ev_arg
*arg
)
373 if (!ar
->wmi
.ops
->pull_svc_rdy
)
376 return ar
->wmi
.ops
->pull_svc_rdy(ar
, skb
, arg
);
380 ath10k_wmi_pull_rdy(struct ath10k
*ar
, struct sk_buff
*skb
,
381 struct wmi_rdy_ev_arg
*arg
)
383 if (!ar
->wmi
.ops
->pull_rdy
)
386 return ar
->wmi
.ops
->pull_rdy(ar
, skb
, arg
);
390 ath10k_wmi_pull_svc_avail(struct ath10k
*ar
, struct sk_buff
*skb
,
391 struct wmi_svc_avail_ev_arg
*arg
)
393 if (!ar
->wmi
.ops
->pull_svc_avail
)
395 return ar
->wmi
.ops
->pull_svc_avail(ar
, skb
, arg
);
399 ath10k_wmi_pull_fw_stats(struct ath10k
*ar
, struct sk_buff
*skb
,
400 struct ath10k_fw_stats
*stats
)
402 if (!ar
->wmi
.ops
->pull_fw_stats
)
405 return ar
->wmi
.ops
->pull_fw_stats(ar
, skb
, stats
);
409 ath10k_wmi_pull_roam_ev(struct ath10k
*ar
, struct sk_buff
*skb
,
410 struct wmi_roam_ev_arg
*arg
)
412 if (!ar
->wmi
.ops
->pull_roam_ev
)
415 return ar
->wmi
.ops
->pull_roam_ev(ar
, skb
, arg
);
419 ath10k_wmi_pull_wow_event(struct ath10k
*ar
, struct sk_buff
*skb
,
420 struct wmi_wow_ev_arg
*arg
)
422 if (!ar
->wmi
.ops
->pull_wow_event
)
425 return ar
->wmi
.ops
->pull_wow_event(ar
, skb
, arg
);
429 ath10k_wmi_pull_echo_ev(struct ath10k
*ar
, struct sk_buff
*skb
,
430 struct wmi_echo_ev_arg
*arg
)
432 if (!ar
->wmi
.ops
->pull_echo_ev
)
435 return ar
->wmi
.ops
->pull_echo_ev(ar
, skb
, arg
);
439 ath10k_wmi_pull_dfs_status(struct ath10k
*ar
, struct sk_buff
*skb
,
440 struct wmi_dfs_status_ev_arg
*arg
)
442 if (!ar
->wmi
.ops
->pull_dfs_status_ev
)
445 return ar
->wmi
.ops
->pull_dfs_status_ev(ar
, skb
, arg
);
448 static inline enum wmi_txbf_conf
449 ath10k_wmi_get_txbf_conf_scheme(struct ath10k
*ar
)
451 if (!ar
->wmi
.ops
->get_txbf_conf_scheme
)
452 return WMI_TXBF_CONF_UNSUPPORTED
;
454 return ar
->wmi
.ops
->get_txbf_conf_scheme(ar
);
458 ath10k_wmi_cleanup_mgmt_tx_send(struct ath10k
*ar
, struct sk_buff
*msdu
)
460 if (!ar
->wmi
.ops
->cleanup_mgmt_tx_send
)
463 return ar
->wmi
.ops
->cleanup_mgmt_tx_send(ar
, msdu
);
467 ath10k_wmi_mgmt_tx_send(struct ath10k
*ar
, struct sk_buff
*msdu
,
473 if (!ar
->wmi
.ops
->gen_mgmt_tx_send
)
476 skb
= ar
->wmi
.ops
->gen_mgmt_tx_send(ar
, msdu
, paddr
);
480 ret
= ath10k_wmi_cmd_send(ar
, skb
,
481 ar
->wmi
.cmd
->mgmt_tx_send_cmdid
);
489 ath10k_wmi_mgmt_tx(struct ath10k
*ar
, struct sk_buff
*msdu
)
491 struct ieee80211_tx_info
*info
= IEEE80211_SKB_CB(msdu
);
495 if (!ar
->wmi
.ops
->gen_mgmt_tx
)
498 skb
= ar
->wmi
.ops
->gen_mgmt_tx(ar
, msdu
);
502 ret
= ath10k_wmi_cmd_send(ar
, skb
,
503 ar
->wmi
.cmd
->mgmt_tx_cmdid
);
507 /* FIXME There's no ACK event for Management Tx. This probably
508 * shouldn't be called here either.
510 info
->flags
|= IEEE80211_TX_STAT_ACK
;
511 ieee80211_tx_status_irqsafe(ar
->hw
, msdu
);
517 ath10k_wmi_pdev_set_regdomain(struct ath10k
*ar
, u16 rd
, u16 rd2g
, u16 rd5g
,
518 u16 ctl2g
, u16 ctl5g
,
519 enum wmi_dfs_region dfs_reg
)
523 if (!ar
->wmi
.ops
->gen_pdev_set_rd
)
526 skb
= ar
->wmi
.ops
->gen_pdev_set_rd(ar
, rd
, rd2g
, rd5g
, ctl2g
, ctl5g
,
531 return ath10k_wmi_cmd_send(ar
, skb
,
532 ar
->wmi
.cmd
->pdev_set_regdomain_cmdid
);
536 ath10k_wmi_pdev_set_base_macaddr(struct ath10k
*ar
, const u8 macaddr
[ETH_ALEN
])
540 if (!ar
->wmi
.ops
->gen_pdev_set_base_macaddr
)
543 skb
= ar
->wmi
.ops
->gen_pdev_set_base_macaddr(ar
, macaddr
);
547 return ath10k_wmi_cmd_send(ar
, skb
,
548 ar
->wmi
.cmd
->pdev_set_base_macaddr_cmdid
);
552 ath10k_wmi_pdev_suspend_target(struct ath10k
*ar
, u32 suspend_opt
)
556 if (!ar
->wmi
.ops
->gen_pdev_suspend
)
559 skb
= ar
->wmi
.ops
->gen_pdev_suspend(ar
, suspend_opt
);
563 return ath10k_wmi_cmd_send(ar
, skb
, ar
->wmi
.cmd
->pdev_suspend_cmdid
);
567 ath10k_wmi_pdev_resume_target(struct ath10k
*ar
)
571 if (!ar
->wmi
.ops
->gen_pdev_resume
)
574 skb
= ar
->wmi
.ops
->gen_pdev_resume(ar
);
578 return ath10k_wmi_cmd_send(ar
, skb
, ar
->wmi
.cmd
->pdev_resume_cmdid
);
582 ath10k_wmi_pdev_set_param(struct ath10k
*ar
, u32 id
, u32 value
)
586 if (!ar
->wmi
.ops
->gen_pdev_set_param
)
589 skb
= ar
->wmi
.ops
->gen_pdev_set_param(ar
, id
, value
);
593 return ath10k_wmi_cmd_send(ar
, skb
, ar
->wmi
.cmd
->pdev_set_param_cmdid
);
597 ath10k_wmi_cmd_init(struct ath10k
*ar
)
601 if (!ar
->wmi
.ops
->gen_init
)
604 skb
= ar
->wmi
.ops
->gen_init(ar
);
608 return ath10k_wmi_cmd_send(ar
, skb
, ar
->wmi
.cmd
->init_cmdid
);
612 ath10k_wmi_start_scan(struct ath10k
*ar
,
613 const struct wmi_start_scan_arg
*arg
)
617 if (!ar
->wmi
.ops
->gen_start_scan
)
620 skb
= ar
->wmi
.ops
->gen_start_scan(ar
, arg
);
624 return ath10k_wmi_cmd_send(ar
, skb
, ar
->wmi
.cmd
->start_scan_cmdid
);
628 ath10k_wmi_stop_scan(struct ath10k
*ar
, const struct wmi_stop_scan_arg
*arg
)
632 if (!ar
->wmi
.ops
->gen_stop_scan
)
635 skb
= ar
->wmi
.ops
->gen_stop_scan(ar
, arg
);
639 return ath10k_wmi_cmd_send(ar
, skb
, ar
->wmi
.cmd
->stop_scan_cmdid
);
643 ath10k_wmi_vdev_create(struct ath10k
*ar
, u32 vdev_id
,
644 enum wmi_vdev_type type
,
645 enum wmi_vdev_subtype subtype
,
646 const u8 macaddr
[ETH_ALEN
])
650 if (!ar
->wmi
.ops
->gen_vdev_create
)
653 skb
= ar
->wmi
.ops
->gen_vdev_create(ar
, vdev_id
, type
, subtype
, macaddr
);
657 return ath10k_wmi_cmd_send(ar
, skb
, ar
->wmi
.cmd
->vdev_create_cmdid
);
661 ath10k_wmi_vdev_delete(struct ath10k
*ar
, u32 vdev_id
)
665 if (!ar
->wmi
.ops
->gen_vdev_delete
)
668 skb
= ar
->wmi
.ops
->gen_vdev_delete(ar
, vdev_id
);
672 return ath10k_wmi_cmd_send(ar
, skb
, ar
->wmi
.cmd
->vdev_delete_cmdid
);
676 ath10k_wmi_vdev_start(struct ath10k
*ar
,
677 const struct wmi_vdev_start_request_arg
*arg
)
681 if (!ar
->wmi
.ops
->gen_vdev_start
)
684 skb
= ar
->wmi
.ops
->gen_vdev_start(ar
, arg
, false);
688 return ath10k_wmi_cmd_send(ar
, skb
,
689 ar
->wmi
.cmd
->vdev_start_request_cmdid
);
693 ath10k_wmi_vdev_restart(struct ath10k
*ar
,
694 const struct wmi_vdev_start_request_arg
*arg
)
698 if (!ar
->wmi
.ops
->gen_vdev_start
)
701 skb
= ar
->wmi
.ops
->gen_vdev_start(ar
, arg
, true);
705 return ath10k_wmi_cmd_send(ar
, skb
,
706 ar
->wmi
.cmd
->vdev_restart_request_cmdid
);
710 ath10k_wmi_vdev_stop(struct ath10k
*ar
, u32 vdev_id
)
714 if (!ar
->wmi
.ops
->gen_vdev_stop
)
717 skb
= ar
->wmi
.ops
->gen_vdev_stop(ar
, vdev_id
);
721 return ath10k_wmi_cmd_send(ar
, skb
, ar
->wmi
.cmd
->vdev_stop_cmdid
);
725 ath10k_wmi_vdev_up(struct ath10k
*ar
, u32 vdev_id
, u32 aid
, const u8
*bssid
)
729 if (!ar
->wmi
.ops
->gen_vdev_up
)
732 skb
= ar
->wmi
.ops
->gen_vdev_up(ar
, vdev_id
, aid
, bssid
);
736 return ath10k_wmi_cmd_send(ar
, skb
, ar
->wmi
.cmd
->vdev_up_cmdid
);
740 ath10k_wmi_vdev_down(struct ath10k
*ar
, u32 vdev_id
)
744 if (!ar
->wmi
.ops
->gen_vdev_down
)
747 skb
= ar
->wmi
.ops
->gen_vdev_down(ar
, vdev_id
);
751 return ath10k_wmi_cmd_send(ar
, skb
, ar
->wmi
.cmd
->vdev_down_cmdid
);
755 ath10k_wmi_vdev_set_param(struct ath10k
*ar
, u32 vdev_id
, u32 param_id
,
760 if (!ar
->wmi
.ops
->gen_vdev_set_param
)
763 skb
= ar
->wmi
.ops
->gen_vdev_set_param(ar
, vdev_id
, param_id
,
768 return ath10k_wmi_cmd_send(ar
, skb
, ar
->wmi
.cmd
->vdev_set_param_cmdid
);
772 ath10k_wmi_vdev_install_key(struct ath10k
*ar
,
773 const struct wmi_vdev_install_key_arg
*arg
)
777 if (!ar
->wmi
.ops
->gen_vdev_install_key
)
780 skb
= ar
->wmi
.ops
->gen_vdev_install_key(ar
, arg
);
784 return ath10k_wmi_cmd_send(ar
, skb
,
785 ar
->wmi
.cmd
->vdev_install_key_cmdid
);
789 ath10k_wmi_vdev_spectral_conf(struct ath10k
*ar
,
790 const struct wmi_vdev_spectral_conf_arg
*arg
)
795 if (!ar
->wmi
.ops
->gen_vdev_spectral_conf
)
798 skb
= ar
->wmi
.ops
->gen_vdev_spectral_conf(ar
, arg
);
802 cmd_id
= ar
->wmi
.cmd
->vdev_spectral_scan_configure_cmdid
;
803 return ath10k_wmi_cmd_send(ar
, skb
, cmd_id
);
807 ath10k_wmi_vdev_spectral_enable(struct ath10k
*ar
, u32 vdev_id
, u32 trigger
,
813 if (!ar
->wmi
.ops
->gen_vdev_spectral_enable
)
816 skb
= ar
->wmi
.ops
->gen_vdev_spectral_enable(ar
, vdev_id
, trigger
,
821 cmd_id
= ar
->wmi
.cmd
->vdev_spectral_scan_enable_cmdid
;
822 return ath10k_wmi_cmd_send(ar
, skb
, cmd_id
);
826 ath10k_wmi_vdev_sta_uapsd(struct ath10k
*ar
, u32 vdev_id
,
827 const u8 peer_addr
[ETH_ALEN
],
828 const struct wmi_sta_uapsd_auto_trig_arg
*args
,
834 if (!ar
->wmi
.ops
->gen_vdev_sta_uapsd
)
837 skb
= ar
->wmi
.ops
->gen_vdev_sta_uapsd(ar
, vdev_id
, peer_addr
, args
,
842 cmd_id
= ar
->wmi
.cmd
->sta_uapsd_auto_trig_cmdid
;
843 return ath10k_wmi_cmd_send(ar
, skb
, cmd_id
);
847 ath10k_wmi_vdev_wmm_conf(struct ath10k
*ar
, u32 vdev_id
,
848 const struct wmi_wmm_params_all_arg
*arg
)
853 skb
= ar
->wmi
.ops
->gen_vdev_wmm_conf(ar
, vdev_id
, arg
);
857 cmd_id
= ar
->wmi
.cmd
->vdev_set_wmm_params_cmdid
;
858 return ath10k_wmi_cmd_send(ar
, skb
, cmd_id
);
862 ath10k_wmi_peer_create(struct ath10k
*ar
, u32 vdev_id
,
863 const u8 peer_addr
[ETH_ALEN
],
864 enum wmi_peer_type peer_type
)
868 if (!ar
->wmi
.ops
->gen_peer_create
)
871 skb
= ar
->wmi
.ops
->gen_peer_create(ar
, vdev_id
, peer_addr
, peer_type
);
875 return ath10k_wmi_cmd_send(ar
, skb
, ar
->wmi
.cmd
->peer_create_cmdid
);
879 ath10k_wmi_peer_delete(struct ath10k
*ar
, u32 vdev_id
,
880 const u8 peer_addr
[ETH_ALEN
])
884 if (!ar
->wmi
.ops
->gen_peer_delete
)
887 skb
= ar
->wmi
.ops
->gen_peer_delete(ar
, vdev_id
, peer_addr
);
891 return ath10k_wmi_cmd_send(ar
, skb
, ar
->wmi
.cmd
->peer_delete_cmdid
);
895 ath10k_wmi_peer_flush(struct ath10k
*ar
, u32 vdev_id
,
896 const u8 peer_addr
[ETH_ALEN
], u32 tid_bitmap
)
900 if (!ar
->wmi
.ops
->gen_peer_flush
)
903 skb
= ar
->wmi
.ops
->gen_peer_flush(ar
, vdev_id
, peer_addr
, tid_bitmap
);
907 return ath10k_wmi_cmd_send(ar
, skb
, ar
->wmi
.cmd
->peer_flush_tids_cmdid
);
911 ath10k_wmi_peer_set_param(struct ath10k
*ar
, u32 vdev_id
, const u8
*peer_addr
,
912 enum wmi_peer_param param_id
, u32 param_value
)
916 if (!ar
->wmi
.ops
->gen_peer_set_param
)
919 skb
= ar
->wmi
.ops
->gen_peer_set_param(ar
, vdev_id
, peer_addr
, param_id
,
924 return ath10k_wmi_cmd_send(ar
, skb
, ar
->wmi
.cmd
->peer_set_param_cmdid
);
928 ath10k_wmi_set_psmode(struct ath10k
*ar
, u32 vdev_id
,
929 enum wmi_sta_ps_mode psmode
)
933 if (!ar
->wmi
.ops
->gen_set_psmode
)
936 skb
= ar
->wmi
.ops
->gen_set_psmode(ar
, vdev_id
, psmode
);
940 return ath10k_wmi_cmd_send(ar
, skb
,
941 ar
->wmi
.cmd
->sta_powersave_mode_cmdid
);
945 ath10k_wmi_set_sta_ps_param(struct ath10k
*ar
, u32 vdev_id
,
946 enum wmi_sta_powersave_param param_id
, u32 value
)
950 if (!ar
->wmi
.ops
->gen_set_sta_ps
)
953 skb
= ar
->wmi
.ops
->gen_set_sta_ps(ar
, vdev_id
, param_id
, value
);
957 return ath10k_wmi_cmd_send(ar
, skb
,
958 ar
->wmi
.cmd
->sta_powersave_param_cmdid
);
962 ath10k_wmi_set_ap_ps_param(struct ath10k
*ar
, u32 vdev_id
, const u8
*mac
,
963 enum wmi_ap_ps_peer_param param_id
, u32 value
)
967 if (!ar
->wmi
.ops
->gen_set_ap_ps
)
970 skb
= ar
->wmi
.ops
->gen_set_ap_ps(ar
, vdev_id
, mac
, param_id
, value
);
974 return ath10k_wmi_cmd_send(ar
, skb
,
975 ar
->wmi
.cmd
->ap_ps_peer_param_cmdid
);
979 ath10k_wmi_scan_chan_list(struct ath10k
*ar
,
980 const struct wmi_scan_chan_list_arg
*arg
)
984 if (!ar
->wmi
.ops
->gen_scan_chan_list
)
987 skb
= ar
->wmi
.ops
->gen_scan_chan_list(ar
, arg
);
991 return ath10k_wmi_cmd_send(ar
, skb
, ar
->wmi
.cmd
->scan_chan_list_cmdid
);
995 ath10k_wmi_scan_prob_req_oui(struct ath10k
*ar
, const u8 mac_addr
[ETH_ALEN
])
1000 prob_req_oui
= (((u32
)mac_addr
[0]) << 16) |
1001 (((u32
)mac_addr
[1]) << 8) | mac_addr
[2];
1003 if (!ar
->wmi
.ops
->gen_scan_prob_req_oui
)
1006 skb
= ar
->wmi
.ops
->gen_scan_prob_req_oui(ar
, prob_req_oui
);
1008 return PTR_ERR(skb
);
1010 return ath10k_wmi_cmd_send(ar
, skb
,
1011 ar
->wmi
.cmd
->scan_prob_req_oui_cmdid
);
1015 ath10k_wmi_peer_assoc(struct ath10k
*ar
,
1016 const struct wmi_peer_assoc_complete_arg
*arg
)
1018 struct sk_buff
*skb
;
1020 if (!ar
->wmi
.ops
->gen_peer_assoc
)
1023 skb
= ar
->wmi
.ops
->gen_peer_assoc(ar
, arg
);
1025 return PTR_ERR(skb
);
1027 return ath10k_wmi_cmd_send(ar
, skb
, ar
->wmi
.cmd
->peer_assoc_cmdid
);
1031 ath10k_wmi_beacon_send_ref_nowait(struct ath10k
*ar
, u32 vdev_id
,
1032 const void *bcn
, size_t bcn_len
,
1033 u32 bcn_paddr
, bool dtim_zero
,
1036 struct sk_buff
*skb
;
1039 if (!ar
->wmi
.ops
->gen_beacon_dma
)
1042 skb
= ar
->wmi
.ops
->gen_beacon_dma(ar
, vdev_id
, bcn
, bcn_len
, bcn_paddr
,
1043 dtim_zero
, deliver_cab
);
1045 return PTR_ERR(skb
);
1047 ret
= ath10k_wmi_cmd_send_nowait(ar
, skb
,
1048 ar
->wmi
.cmd
->pdev_send_bcn_cmdid
);
1058 ath10k_wmi_pdev_set_wmm_params(struct ath10k
*ar
,
1059 const struct wmi_wmm_params_all_arg
*arg
)
1061 struct sk_buff
*skb
;
1063 if (!ar
->wmi
.ops
->gen_pdev_set_wmm
)
1066 skb
= ar
->wmi
.ops
->gen_pdev_set_wmm(ar
, arg
);
1068 return PTR_ERR(skb
);
1070 return ath10k_wmi_cmd_send(ar
, skb
,
1071 ar
->wmi
.cmd
->pdev_set_wmm_params_cmdid
);
1075 ath10k_wmi_request_stats(struct ath10k
*ar
, u32 stats_mask
)
1077 struct sk_buff
*skb
;
1079 if (!ar
->wmi
.ops
->gen_request_stats
)
1082 skb
= ar
->wmi
.ops
->gen_request_stats(ar
, stats_mask
);
1084 return PTR_ERR(skb
);
1086 return ath10k_wmi_cmd_send(ar
, skb
, ar
->wmi
.cmd
->request_stats_cmdid
);
1090 ath10k_wmi_request_peer_stats_info(struct ath10k
*ar
,
1092 enum wmi_peer_stats_info_request_type type
,
1096 struct sk_buff
*skb
;
1098 if (!ar
->wmi
.ops
->gen_request_peer_stats_info
)
1101 skb
= ar
->wmi
.ops
->gen_request_peer_stats_info(ar
,
1107 return PTR_ERR(skb
);
1109 return ath10k_wmi_cmd_send(ar
, skb
, ar
->wmi
.cmd
->request_peer_stats_info_cmdid
);
1113 ath10k_wmi_force_fw_hang(struct ath10k
*ar
,
1114 enum wmi_force_fw_hang_type type
, u32 delay_ms
)
1116 struct sk_buff
*skb
;
1118 if (!ar
->wmi
.ops
->gen_force_fw_hang
)
1121 skb
= ar
->wmi
.ops
->gen_force_fw_hang(ar
, type
, delay_ms
);
1123 return PTR_ERR(skb
);
1125 return ath10k_wmi_cmd_send(ar
, skb
, ar
->wmi
.cmd
->force_fw_hang_cmdid
);
1128 static inline int ath10k_wmi_gpio_config(struct ath10k
*ar
, u32 gpio_num
,
1129 u32 input
, u32 pull_type
, u32 intr_mode
)
1131 struct sk_buff
*skb
;
1133 if (!ar
->wmi
.ops
->gen_gpio_config
)
1136 skb
= ar
->wmi
.ops
->gen_gpio_config(ar
, gpio_num
, input
, pull_type
, intr_mode
);
1138 return PTR_ERR(skb
);
1140 return ath10k_wmi_cmd_send(ar
, skb
, ar
->wmi
.cmd
->gpio_config_cmdid
);
1143 static inline int ath10k_wmi_gpio_output(struct ath10k
*ar
, u32 gpio_num
, u32 set
)
1145 struct sk_buff
*skb
;
1147 if (!ar
->wmi
.ops
->gen_gpio_config
)
1150 skb
= ar
->wmi
.ops
->gen_gpio_output(ar
, gpio_num
, set
);
1152 return PTR_ERR(skb
);
1154 return ath10k_wmi_cmd_send(ar
, skb
, ar
->wmi
.cmd
->gpio_output_cmdid
);
1158 ath10k_wmi_dbglog_cfg(struct ath10k
*ar
, u64 module_enable
, u32 log_level
)
1160 struct sk_buff
*skb
;
1162 if (!ar
->wmi
.ops
->gen_dbglog_cfg
)
1165 skb
= ar
->wmi
.ops
->gen_dbglog_cfg(ar
, module_enable
, log_level
);
1167 return PTR_ERR(skb
);
1169 return ath10k_wmi_cmd_send(ar
, skb
, ar
->wmi
.cmd
->dbglog_cfg_cmdid
);
1173 ath10k_wmi_pdev_pktlog_enable(struct ath10k
*ar
, u32 filter
)
1175 struct sk_buff
*skb
;
1177 if (!ar
->wmi
.ops
->gen_pktlog_enable
)
1180 skb
= ar
->wmi
.ops
->gen_pktlog_enable(ar
, filter
);
1182 return PTR_ERR(skb
);
1184 return ath10k_wmi_cmd_send(ar
, skb
, ar
->wmi
.cmd
->pdev_pktlog_enable_cmdid
);
1188 ath10k_wmi_pdev_pktlog_disable(struct ath10k
*ar
)
1190 struct sk_buff
*skb
;
1192 if (!ar
->wmi
.ops
->gen_pktlog_disable
)
1195 skb
= ar
->wmi
.ops
->gen_pktlog_disable(ar
);
1197 return PTR_ERR(skb
);
1199 return ath10k_wmi_cmd_send(ar
, skb
,
1200 ar
->wmi
.cmd
->pdev_pktlog_disable_cmdid
);
1204 ath10k_wmi_pdev_set_quiet_mode(struct ath10k
*ar
, u32 period
, u32 duration
,
1205 u32 next_offset
, u32 enabled
)
1207 struct sk_buff
*skb
;
1209 if (!ar
->wmi
.ops
->gen_pdev_set_quiet_mode
)
1212 skb
= ar
->wmi
.ops
->gen_pdev_set_quiet_mode(ar
, period
, duration
,
1213 next_offset
, enabled
);
1215 return PTR_ERR(skb
);
1217 return ath10k_wmi_cmd_send(ar
, skb
,
1218 ar
->wmi
.cmd
->pdev_set_quiet_mode_cmdid
);
1222 ath10k_wmi_pdev_get_temperature(struct ath10k
*ar
)
1224 struct sk_buff
*skb
;
1226 if (!ar
->wmi
.ops
->gen_pdev_get_temperature
)
1229 skb
= ar
->wmi
.ops
->gen_pdev_get_temperature(ar
);
1231 return PTR_ERR(skb
);
1233 return ath10k_wmi_cmd_send(ar
, skb
,
1234 ar
->wmi
.cmd
->pdev_get_temperature_cmdid
);
1238 ath10k_wmi_addba_clear_resp(struct ath10k
*ar
, u32 vdev_id
, const u8
*mac
)
1240 struct sk_buff
*skb
;
1242 if (!ar
->wmi
.ops
->gen_addba_clear_resp
)
1245 skb
= ar
->wmi
.ops
->gen_addba_clear_resp(ar
, vdev_id
, mac
);
1247 return PTR_ERR(skb
);
1249 return ath10k_wmi_cmd_send(ar
, skb
,
1250 ar
->wmi
.cmd
->addba_clear_resp_cmdid
);
1254 ath10k_wmi_addba_send(struct ath10k
*ar
, u32 vdev_id
, const u8
*mac
,
1255 u32 tid
, u32 buf_size
)
1257 struct sk_buff
*skb
;
1259 if (!ar
->wmi
.ops
->gen_addba_send
)
1262 skb
= ar
->wmi
.ops
->gen_addba_send(ar
, vdev_id
, mac
, tid
, buf_size
);
1264 return PTR_ERR(skb
);
1266 return ath10k_wmi_cmd_send(ar
, skb
,
1267 ar
->wmi
.cmd
->addba_send_cmdid
);
1271 ath10k_wmi_addba_set_resp(struct ath10k
*ar
, u32 vdev_id
, const u8
*mac
,
1272 u32 tid
, u32 status
)
1274 struct sk_buff
*skb
;
1276 if (!ar
->wmi
.ops
->gen_addba_set_resp
)
1279 skb
= ar
->wmi
.ops
->gen_addba_set_resp(ar
, vdev_id
, mac
, tid
, status
);
1281 return PTR_ERR(skb
);
1283 return ath10k_wmi_cmd_send(ar
, skb
,
1284 ar
->wmi
.cmd
->addba_set_resp_cmdid
);
1288 ath10k_wmi_delba_send(struct ath10k
*ar
, u32 vdev_id
, const u8
*mac
,
1289 u32 tid
, u32 initiator
, u32 reason
)
1291 struct sk_buff
*skb
;
1293 if (!ar
->wmi
.ops
->gen_delba_send
)
1296 skb
= ar
->wmi
.ops
->gen_delba_send(ar
, vdev_id
, mac
, tid
, initiator
,
1299 return PTR_ERR(skb
);
1301 return ath10k_wmi_cmd_send(ar
, skb
,
1302 ar
->wmi
.cmd
->delba_send_cmdid
);
1306 ath10k_wmi_bcn_tmpl(struct ath10k
*ar
, u32 vdev_id
, u32 tim_ie_offset
,
1307 struct sk_buff
*bcn
, u32 prb_caps
, u32 prb_erp
,
1308 void *prb_ies
, size_t prb_ies_len
)
1310 struct sk_buff
*skb
;
1312 if (!ar
->wmi
.ops
->gen_bcn_tmpl
)
1315 skb
= ar
->wmi
.ops
->gen_bcn_tmpl(ar
, vdev_id
, tim_ie_offset
, bcn
,
1316 prb_caps
, prb_erp
, prb_ies
,
1319 return PTR_ERR(skb
);
1321 return ath10k_wmi_cmd_send(ar
, skb
, ar
->wmi
.cmd
->bcn_tmpl_cmdid
);
1325 ath10k_wmi_prb_tmpl(struct ath10k
*ar
, u32 vdev_id
, struct sk_buff
*prb
)
1327 struct sk_buff
*skb
;
1329 if (!ar
->wmi
.ops
->gen_prb_tmpl
)
1332 skb
= ar
->wmi
.ops
->gen_prb_tmpl(ar
, vdev_id
, prb
);
1334 return PTR_ERR(skb
);
1336 return ath10k_wmi_cmd_send(ar
, skb
, ar
->wmi
.cmd
->prb_tmpl_cmdid
);
1340 ath10k_wmi_p2p_go_bcn_ie(struct ath10k
*ar
, u32 vdev_id
, const u8
*p2p_ie
)
1342 struct sk_buff
*skb
;
1344 if (!ar
->wmi
.ops
->gen_p2p_go_bcn_ie
)
1347 skb
= ar
->wmi
.ops
->gen_p2p_go_bcn_ie(ar
, vdev_id
, p2p_ie
);
1349 return PTR_ERR(skb
);
1351 return ath10k_wmi_cmd_send(ar
, skb
, ar
->wmi
.cmd
->p2p_go_set_beacon_ie
);
1355 ath10k_wmi_sta_keepalive(struct ath10k
*ar
,
1356 const struct wmi_sta_keepalive_arg
*arg
)
1358 struct sk_buff
*skb
;
1361 if (!ar
->wmi
.ops
->gen_sta_keepalive
)
1364 skb
= ar
->wmi
.ops
->gen_sta_keepalive(ar
, arg
);
1366 return PTR_ERR(skb
);
1368 cmd_id
= ar
->wmi
.cmd
->sta_keepalive_cmd
;
1369 return ath10k_wmi_cmd_send(ar
, skb
, cmd_id
);
1373 ath10k_wmi_wow_enable(struct ath10k
*ar
)
1375 struct sk_buff
*skb
;
1378 if (!ar
->wmi
.ops
->gen_wow_enable
)
1381 skb
= ar
->wmi
.ops
->gen_wow_enable(ar
);
1383 return PTR_ERR(skb
);
1385 cmd_id
= ar
->wmi
.cmd
->wow_enable_cmdid
;
1386 return ath10k_wmi_cmd_send(ar
, skb
, cmd_id
);
1390 ath10k_wmi_wow_add_wakeup_event(struct ath10k
*ar
, u32 vdev_id
,
1391 enum wmi_wow_wakeup_event event
,
1394 struct sk_buff
*skb
;
1397 if (!ar
->wmi
.ops
->gen_wow_add_wakeup_event
)
1400 skb
= ar
->wmi
.ops
->gen_wow_add_wakeup_event(ar
, vdev_id
, event
, enable
);
1402 return PTR_ERR(skb
);
1404 cmd_id
= ar
->wmi
.cmd
->wow_enable_disable_wake_event_cmdid
;
1405 return ath10k_wmi_cmd_send(ar
, skb
, cmd_id
);
1409 ath10k_wmi_wow_host_wakeup_ind(struct ath10k
*ar
)
1411 struct sk_buff
*skb
;
1414 if (!ar
->wmi
.ops
->gen_wow_host_wakeup_ind
)
1417 skb
= ar
->wmi
.ops
->gen_wow_host_wakeup_ind(ar
);
1419 return PTR_ERR(skb
);
1421 cmd_id
= ar
->wmi
.cmd
->wow_hostwakeup_from_sleep_cmdid
;
1422 return ath10k_wmi_cmd_send(ar
, skb
, cmd_id
);
1426 ath10k_wmi_wow_add_pattern(struct ath10k
*ar
, u32 vdev_id
, u32 pattern_id
,
1427 const u8
*pattern
, const u8
*mask
,
1428 int pattern_len
, int pattern_offset
)
1430 struct sk_buff
*skb
;
1433 if (!ar
->wmi
.ops
->gen_wow_add_pattern
)
1436 skb
= ar
->wmi
.ops
->gen_wow_add_pattern(ar
, vdev_id
, pattern_id
,
1437 pattern
, mask
, pattern_len
,
1440 return PTR_ERR(skb
);
1442 cmd_id
= ar
->wmi
.cmd
->wow_add_wake_pattern_cmdid
;
1443 return ath10k_wmi_cmd_send(ar
, skb
, cmd_id
);
1447 ath10k_wmi_wow_del_pattern(struct ath10k
*ar
, u32 vdev_id
, u32 pattern_id
)
1449 struct sk_buff
*skb
;
1452 if (!ar
->wmi
.ops
->gen_wow_del_pattern
)
1455 skb
= ar
->wmi
.ops
->gen_wow_del_pattern(ar
, vdev_id
, pattern_id
);
1457 return PTR_ERR(skb
);
1459 cmd_id
= ar
->wmi
.cmd
->wow_del_wake_pattern_cmdid
;
1460 return ath10k_wmi_cmd_send(ar
, skb
, cmd_id
);
1464 ath10k_wmi_wow_config_pno(struct ath10k
*ar
, u32 vdev_id
,
1465 struct wmi_pno_scan_req
*pno_scan
)
1467 struct sk_buff
*skb
;
1470 if (!ar
->wmi
.ops
->gen_wow_config_pno
)
1473 skb
= ar
->wmi
.ops
->gen_wow_config_pno(ar
, vdev_id
, pno_scan
);
1475 return PTR_ERR(skb
);
1477 cmd_id
= ar
->wmi
.cmd
->network_list_offload_config_cmdid
;
1478 return ath10k_wmi_cmd_send(ar
, skb
, cmd_id
);
1482 ath10k_wmi_update_fw_tdls_state(struct ath10k
*ar
, u32 vdev_id
,
1483 enum wmi_tdls_state state
)
1485 struct sk_buff
*skb
;
1487 if (!ar
->wmi
.ops
->gen_update_fw_tdls_state
)
1490 skb
= ar
->wmi
.ops
->gen_update_fw_tdls_state(ar
, vdev_id
, state
);
1492 return PTR_ERR(skb
);
1494 return ath10k_wmi_cmd_send(ar
, skb
, ar
->wmi
.cmd
->tdls_set_state_cmdid
);
1498 ath10k_wmi_tdls_peer_update(struct ath10k
*ar
,
1499 const struct wmi_tdls_peer_update_cmd_arg
*arg
,
1500 const struct wmi_tdls_peer_capab_arg
*cap
,
1501 const struct wmi_channel_arg
*chan
)
1503 struct sk_buff
*skb
;
1505 if (!ar
->wmi
.ops
->gen_tdls_peer_update
)
1508 skb
= ar
->wmi
.ops
->gen_tdls_peer_update(ar
, arg
, cap
, chan
);
1510 return PTR_ERR(skb
);
1512 return ath10k_wmi_cmd_send(ar
, skb
,
1513 ar
->wmi
.cmd
->tdls_peer_update_cmdid
);
1517 ath10k_wmi_adaptive_qcs(struct ath10k
*ar
, bool enable
)
1519 struct sk_buff
*skb
;
1521 if (!ar
->wmi
.ops
->gen_adaptive_qcs
)
1524 skb
= ar
->wmi
.ops
->gen_adaptive_qcs(ar
, enable
);
1526 return PTR_ERR(skb
);
1528 return ath10k_wmi_cmd_send(ar
, skb
, ar
->wmi
.cmd
->adaptive_qcs_cmdid
);
1532 ath10k_wmi_pdev_get_tpc_config(struct ath10k
*ar
, u32 param
)
1534 struct sk_buff
*skb
;
1536 if (!ar
->wmi
.ops
->gen_pdev_get_tpc_config
)
1539 skb
= ar
->wmi
.ops
->gen_pdev_get_tpc_config(ar
, param
);
1542 return PTR_ERR(skb
);
1544 return ath10k_wmi_cmd_send(ar
, skb
,
1545 ar
->wmi
.cmd
->pdev_get_tpc_config_cmdid
);
1549 ath10k_wmi_fw_stats_fill(struct ath10k
*ar
, struct ath10k_fw_stats
*fw_stats
,
1552 if (!ar
->wmi
.ops
->fw_stats_fill
)
1555 ar
->wmi
.ops
->fw_stats_fill(ar
, fw_stats
, buf
);
1560 ath10k_wmi_pdev_enable_adaptive_cca(struct ath10k
*ar
, u8 enable
,
1561 u32 detect_level
, u32 detect_margin
)
1563 struct sk_buff
*skb
;
1565 if (!ar
->wmi
.ops
->gen_pdev_enable_adaptive_cca
)
1568 skb
= ar
->wmi
.ops
->gen_pdev_enable_adaptive_cca(ar
, enable
,
1573 return PTR_ERR(skb
);
1575 return ath10k_wmi_cmd_send(ar
, skb
,
1576 ar
->wmi
.cmd
->pdev_enable_adaptive_cca_cmdid
);
1580 ath10k_wmi_ext_resource_config(struct ath10k
*ar
,
1581 enum wmi_host_platform_type type
,
1582 u32 fw_feature_bitmap
)
1584 struct sk_buff
*skb
;
1586 if (!ar
->wmi
.ops
->ext_resource_config
)
1589 skb
= ar
->wmi
.ops
->ext_resource_config(ar
, type
,
1593 return PTR_ERR(skb
);
1595 return ath10k_wmi_cmd_send(ar
, skb
,
1596 ar
->wmi
.cmd
->ext_resource_cfg_cmdid
);
1600 ath10k_wmi_get_vdev_subtype(struct ath10k
*ar
, enum wmi_vdev_subtype subtype
)
1602 if (!ar
->wmi
.ops
->get_vdev_subtype
)
1605 return ar
->wmi
.ops
->get_vdev_subtype(ar
, subtype
);
1609 ath10k_wmi_pdev_bss_chan_info_request(struct ath10k
*ar
,
1610 enum wmi_bss_survey_req_type type
)
1612 struct ath10k_wmi
*wmi
= &ar
->wmi
;
1613 struct sk_buff
*skb
;
1615 if (!wmi
->ops
->gen_pdev_bss_chan_info_req
)
1618 skb
= wmi
->ops
->gen_pdev_bss_chan_info_req(ar
, type
);
1620 return PTR_ERR(skb
);
1622 return ath10k_wmi_cmd_send(ar
, skb
,
1623 wmi
->cmd
->pdev_bss_chan_info_request_cmdid
);
1627 ath10k_wmi_echo(struct ath10k
*ar
, u32 value
)
1629 struct ath10k_wmi
*wmi
= &ar
->wmi
;
1630 struct sk_buff
*skb
;
1632 if (!wmi
->ops
->gen_echo
)
1635 skb
= wmi
->ops
->gen_echo(ar
, value
);
1637 return PTR_ERR(skb
);
1639 return ath10k_wmi_cmd_send(ar
, skb
, wmi
->cmd
->echo_cmdid
);
1643 ath10k_wmi_pdev_get_tpc_table_cmdid(struct ath10k
*ar
, u32 param
)
1645 struct sk_buff
*skb
;
1647 if (!ar
->wmi
.ops
->gen_pdev_get_tpc_table_cmdid
)
1650 skb
= ar
->wmi
.ops
->gen_pdev_get_tpc_table_cmdid(ar
, param
);
1653 return PTR_ERR(skb
);
1655 return ath10k_wmi_cmd_send(ar
, skb
,
1656 ar
->wmi
.cmd
->pdev_get_tpc_table_cmdid
);
1660 ath10k_wmi_report_radar_found(struct ath10k
*ar
,
1661 const struct ath10k_radar_found_info
*arg
)
1663 struct sk_buff
*skb
;
1665 if (!ar
->wmi
.ops
->gen_radar_found
)
1668 skb
= ar
->wmi
.ops
->gen_radar_found(ar
, arg
);
1670 return PTR_ERR(skb
);
1672 return ath10k_wmi_cmd_send(ar
, skb
,
1673 ar
->wmi
.cmd
->radar_found_cmdid
);
1677 ath10k_wmi_pdev_bb_timing(struct ath10k
*ar
,
1678 const struct wmi_bb_timing_cfg_arg
*arg
)
1680 struct sk_buff
*skb
;
1682 if (!ar
->wmi
.ops
->gen_bb_timing
)
1685 skb
= ar
->wmi
.ops
->gen_bb_timing(ar
, arg
);
1688 return PTR_ERR(skb
);
1690 return ath10k_wmi_cmd_send(ar
, skb
,
1691 ar
->wmi
.cmd
->set_bb_timing_cmdid
);
1695 ath10k_wmi_set_per_peer_per_tid_cfg(struct ath10k
*ar
,
1696 const struct wmi_per_peer_per_tid_cfg_arg
*arg
)
1698 struct sk_buff
*skb
;
1700 if (!ar
->wmi
.ops
->gen_per_peer_per_tid_cfg
)
1703 skb
= ar
->wmi
.ops
->gen_per_peer_per_tid_cfg(ar
, arg
);
1705 return PTR_ERR(skb
);
1707 return ath10k_wmi_cmd_send(ar
, skb
,
1708 ar
->wmi
.cmd
->per_peer_per_tid_config_cmdid
);