accel/ivpu: Move recovery work to system_unbound_wq
[drm/drm-misc.git] / drivers / net / wireless / ath / ath12k / hal.c
blobfd98fac16dd5faec1af1e17cfc780e377d4a07a8
1 // SPDX-License-Identifier: BSD-3-Clause-Clear
2 /*
3 * Copyright (c) 2018-2021 The Linux Foundation. All rights reserved.
4 * Copyright (c) 2021-2024 Qualcomm Innovation Center, Inc. All rights reserved.
5 */
6 #include <linux/dma-mapping.h>
7 #include "hal_tx.h"
8 #include "hal_rx.h"
9 #include "debug.h"
10 #include "hal_desc.h"
11 #include "hif.h"
13 static const struct hal_srng_config hw_srng_config_template[] = {
14 /* TODO: max_rings can populated by querying HW capabilities */
15 [HAL_REO_DST] = {
16 .start_ring_id = HAL_SRNG_RING_ID_REO2SW1,
17 .max_rings = 8,
18 .entry_size = sizeof(struct hal_reo_dest_ring) >> 2,
19 .mac_type = ATH12K_HAL_SRNG_UMAC,
20 .ring_dir = HAL_SRNG_DIR_DST,
21 .max_size = HAL_REO_REO2SW1_RING_BASE_MSB_RING_SIZE,
23 [HAL_REO_EXCEPTION] = {
24 /* Designating REO2SW0 ring as exception ring.
25 * Any of theREO2SW rings can be used as exception ring.
27 .start_ring_id = HAL_SRNG_RING_ID_REO2SW0,
28 .max_rings = 1,
29 .entry_size = sizeof(struct hal_reo_dest_ring) >> 2,
30 .mac_type = ATH12K_HAL_SRNG_UMAC,
31 .ring_dir = HAL_SRNG_DIR_DST,
32 .max_size = HAL_REO_REO2SW0_RING_BASE_MSB_RING_SIZE,
34 [HAL_REO_REINJECT] = {
35 .start_ring_id = HAL_SRNG_RING_ID_SW2REO,
36 .max_rings = 4,
37 .entry_size = sizeof(struct hal_reo_entrance_ring) >> 2,
38 .mac_type = ATH12K_HAL_SRNG_UMAC,
39 .ring_dir = HAL_SRNG_DIR_SRC,
40 .max_size = HAL_REO_SW2REO_RING_BASE_MSB_RING_SIZE,
42 [HAL_REO_CMD] = {
43 .start_ring_id = HAL_SRNG_RING_ID_REO_CMD,
44 .max_rings = 1,
45 .entry_size = (sizeof(struct hal_tlv_64_hdr) +
46 sizeof(struct hal_reo_get_queue_stats)) >> 2,
47 .mac_type = ATH12K_HAL_SRNG_UMAC,
48 .ring_dir = HAL_SRNG_DIR_SRC,
49 .max_size = HAL_REO_CMD_RING_BASE_MSB_RING_SIZE,
51 [HAL_REO_STATUS] = {
52 .start_ring_id = HAL_SRNG_RING_ID_REO_STATUS,
53 .max_rings = 1,
54 .entry_size = (sizeof(struct hal_tlv_64_hdr) +
55 sizeof(struct hal_reo_get_queue_stats_status)) >> 2,
56 .mac_type = ATH12K_HAL_SRNG_UMAC,
57 .ring_dir = HAL_SRNG_DIR_DST,
58 .max_size = HAL_REO_STATUS_RING_BASE_MSB_RING_SIZE,
60 [HAL_TCL_DATA] = {
61 .start_ring_id = HAL_SRNG_RING_ID_SW2TCL1,
62 .max_rings = 6,
63 .entry_size = sizeof(struct hal_tcl_data_cmd) >> 2,
64 .mac_type = ATH12K_HAL_SRNG_UMAC,
65 .ring_dir = HAL_SRNG_DIR_SRC,
66 .max_size = HAL_SW2TCL1_RING_BASE_MSB_RING_SIZE,
68 [HAL_TCL_CMD] = {
69 .start_ring_id = HAL_SRNG_RING_ID_SW2TCL_CMD,
70 .max_rings = 1,
71 .entry_size = sizeof(struct hal_tcl_gse_cmd) >> 2,
72 .mac_type = ATH12K_HAL_SRNG_UMAC,
73 .ring_dir = HAL_SRNG_DIR_SRC,
74 .max_size = HAL_SW2TCL1_CMD_RING_BASE_MSB_RING_SIZE,
76 [HAL_TCL_STATUS] = {
77 .start_ring_id = HAL_SRNG_RING_ID_TCL_STATUS,
78 .max_rings = 1,
79 .entry_size = (sizeof(struct hal_tlv_hdr) +
80 sizeof(struct hal_tcl_status_ring)) >> 2,
81 .mac_type = ATH12K_HAL_SRNG_UMAC,
82 .ring_dir = HAL_SRNG_DIR_DST,
83 .max_size = HAL_TCL_STATUS_RING_BASE_MSB_RING_SIZE,
85 [HAL_CE_SRC] = {
86 .start_ring_id = HAL_SRNG_RING_ID_CE0_SRC,
87 .max_rings = 16,
88 .entry_size = sizeof(struct hal_ce_srng_src_desc) >> 2,
89 .mac_type = ATH12K_HAL_SRNG_UMAC,
90 .ring_dir = HAL_SRNG_DIR_SRC,
91 .max_size = HAL_CE_SRC_RING_BASE_MSB_RING_SIZE,
93 [HAL_CE_DST] = {
94 .start_ring_id = HAL_SRNG_RING_ID_CE0_DST,
95 .max_rings = 16,
96 .entry_size = sizeof(struct hal_ce_srng_dest_desc) >> 2,
97 .mac_type = ATH12K_HAL_SRNG_UMAC,
98 .ring_dir = HAL_SRNG_DIR_SRC,
99 .max_size = HAL_CE_DST_RING_BASE_MSB_RING_SIZE,
101 [HAL_CE_DST_STATUS] = {
102 .start_ring_id = HAL_SRNG_RING_ID_CE0_DST_STATUS,
103 .max_rings = 16,
104 .entry_size = sizeof(struct hal_ce_srng_dst_status_desc) >> 2,
105 .mac_type = ATH12K_HAL_SRNG_UMAC,
106 .ring_dir = HAL_SRNG_DIR_DST,
107 .max_size = HAL_CE_DST_STATUS_RING_BASE_MSB_RING_SIZE,
109 [HAL_WBM_IDLE_LINK] = {
110 .start_ring_id = HAL_SRNG_RING_ID_WBM_IDLE_LINK,
111 .max_rings = 1,
112 .entry_size = sizeof(struct hal_wbm_link_desc) >> 2,
113 .mac_type = ATH12K_HAL_SRNG_UMAC,
114 .ring_dir = HAL_SRNG_DIR_SRC,
115 .max_size = HAL_WBM_IDLE_LINK_RING_BASE_MSB_RING_SIZE,
117 [HAL_SW2WBM_RELEASE] = {
118 .start_ring_id = HAL_SRNG_RING_ID_WBM_SW0_RELEASE,
119 .max_rings = 2,
120 .entry_size = sizeof(struct hal_wbm_release_ring) >> 2,
121 .mac_type = ATH12K_HAL_SRNG_UMAC,
122 .ring_dir = HAL_SRNG_DIR_SRC,
123 .max_size = HAL_SW2WBM_RELEASE_RING_BASE_MSB_RING_SIZE,
125 [HAL_WBM2SW_RELEASE] = {
126 .start_ring_id = HAL_SRNG_RING_ID_WBM2SW0_RELEASE,
127 .max_rings = 8,
128 .entry_size = sizeof(struct hal_wbm_release_ring) >> 2,
129 .mac_type = ATH12K_HAL_SRNG_UMAC,
130 .ring_dir = HAL_SRNG_DIR_DST,
131 .max_size = HAL_WBM2SW_RELEASE_RING_BASE_MSB_RING_SIZE,
133 [HAL_RXDMA_BUF] = {
134 .start_ring_id = HAL_SRNG_SW2RXDMA_BUF0,
135 .max_rings = 1,
136 .entry_size = sizeof(struct hal_wbm_buffer_ring) >> 2,
137 .mac_type = ATH12K_HAL_SRNG_DMAC,
138 .ring_dir = HAL_SRNG_DIR_SRC,
139 .max_size = HAL_RXDMA_RING_MAX_SIZE_BE,
141 [HAL_RXDMA_DST] = {
142 .start_ring_id = HAL_SRNG_RING_ID_WMAC1_RXDMA2SW0,
143 .max_rings = 0,
144 .entry_size = 0,
145 .mac_type = ATH12K_HAL_SRNG_PMAC,
146 .ring_dir = HAL_SRNG_DIR_DST,
147 .max_size = HAL_RXDMA_RING_MAX_SIZE_BE,
149 [HAL_RXDMA_MONITOR_BUF] = {
150 .start_ring_id = HAL_SRNG_SW2RXMON_BUF0,
151 .max_rings = 1,
152 .entry_size = sizeof(struct hal_mon_buf_ring) >> 2,
153 .mac_type = ATH12K_HAL_SRNG_PMAC,
154 .ring_dir = HAL_SRNG_DIR_SRC,
155 .max_size = HAL_RXDMA_RING_MAX_SIZE_BE,
157 [HAL_RXDMA_MONITOR_STATUS] = { 0, },
158 [HAL_RXDMA_MONITOR_DESC] = { 0, },
159 [HAL_RXDMA_DIR_BUF] = {
160 .start_ring_id = HAL_SRNG_RING_ID_RXDMA_DIR_BUF,
161 .max_rings = 2,
162 .entry_size = 8 >> 2, /* TODO: Define the struct */
163 .mac_type = ATH12K_HAL_SRNG_PMAC,
164 .ring_dir = HAL_SRNG_DIR_SRC,
165 .max_size = HAL_RXDMA_RING_MAX_SIZE_BE,
167 [HAL_PPE2TCL] = {
168 .start_ring_id = HAL_SRNG_RING_ID_PPE2TCL1,
169 .max_rings = 1,
170 .entry_size = sizeof(struct hal_tcl_entrance_from_ppe_ring) >> 2,
171 .mac_type = ATH12K_HAL_SRNG_PMAC,
172 .ring_dir = HAL_SRNG_DIR_SRC,
173 .max_size = HAL_SW2TCL1_RING_BASE_MSB_RING_SIZE,
175 [HAL_PPE_RELEASE] = {
176 .start_ring_id = HAL_SRNG_RING_ID_WBM_PPE_RELEASE,
177 .max_rings = 1,
178 .entry_size = sizeof(struct hal_wbm_release_ring) >> 2,
179 .mac_type = ATH12K_HAL_SRNG_PMAC,
180 .ring_dir = HAL_SRNG_DIR_SRC,
181 .max_size = HAL_WBM2PPE_RELEASE_RING_BASE_MSB_RING_SIZE,
183 [HAL_TX_MONITOR_BUF] = {
184 .start_ring_id = HAL_SRNG_SW2TXMON_BUF0,
185 .max_rings = 1,
186 .entry_size = sizeof(struct hal_mon_buf_ring) >> 2,
187 .mac_type = ATH12K_HAL_SRNG_PMAC,
188 .ring_dir = HAL_SRNG_DIR_SRC,
189 .max_size = HAL_RXDMA_RING_MAX_SIZE_BE,
191 [HAL_RXDMA_MONITOR_DST] = {
192 .start_ring_id = HAL_SRNG_RING_ID_WMAC1_SW2RXMON_BUF0,
193 .max_rings = 1,
194 .entry_size = sizeof(struct hal_mon_dest_desc) >> 2,
195 .mac_type = ATH12K_HAL_SRNG_PMAC,
196 .ring_dir = HAL_SRNG_DIR_DST,
197 .max_size = HAL_RXDMA_RING_MAX_SIZE_BE,
199 [HAL_TX_MONITOR_DST] = {
200 .start_ring_id = HAL_SRNG_RING_ID_WMAC1_TXMON2SW0_BUF0,
201 .max_rings = 1,
202 .entry_size = sizeof(struct hal_mon_dest_desc) >> 2,
203 .mac_type = ATH12K_HAL_SRNG_PMAC,
204 .ring_dir = HAL_SRNG_DIR_DST,
205 .max_size = HAL_RXDMA_RING_MAX_SIZE_BE,
209 static const struct ath12k_hal_tcl_to_wbm_rbm_map
210 ath12k_hal_qcn9274_tcl_to_wbm_rbm_map[DP_TCL_NUM_RING_MAX] = {
212 .wbm_ring_num = 0,
213 .rbm_id = HAL_RX_BUF_RBM_SW0_BM,
216 .wbm_ring_num = 1,
217 .rbm_id = HAL_RX_BUF_RBM_SW1_BM,
220 .wbm_ring_num = 2,
221 .rbm_id = HAL_RX_BUF_RBM_SW2_BM,
224 .wbm_ring_num = 4,
225 .rbm_id = HAL_RX_BUF_RBM_SW4_BM,
229 static const struct ath12k_hal_tcl_to_wbm_rbm_map
230 ath12k_hal_wcn7850_tcl_to_wbm_rbm_map[DP_TCL_NUM_RING_MAX] = {
232 .wbm_ring_num = 0,
233 .rbm_id = HAL_RX_BUF_RBM_SW0_BM,
236 .wbm_ring_num = 2,
237 .rbm_id = HAL_RX_BUF_RBM_SW2_BM,
240 .wbm_ring_num = 4,
241 .rbm_id = HAL_RX_BUF_RBM_SW4_BM,
245 static unsigned int ath12k_hal_reo1_ring_id_offset(struct ath12k_base *ab)
247 return HAL_REO1_RING_ID(ab) - HAL_REO1_RING_BASE_LSB(ab);
250 static unsigned int ath12k_hal_reo1_ring_msi1_base_lsb_offset(struct ath12k_base *ab)
252 return HAL_REO1_RING_MSI1_BASE_LSB(ab) - HAL_REO1_RING_BASE_LSB(ab);
255 static unsigned int ath12k_hal_reo1_ring_msi1_base_msb_offset(struct ath12k_base *ab)
257 return HAL_REO1_RING_MSI1_BASE_MSB(ab) - HAL_REO1_RING_BASE_LSB(ab);
260 static unsigned int ath12k_hal_reo1_ring_msi1_data_offset(struct ath12k_base *ab)
262 return HAL_REO1_RING_MSI1_DATA(ab) - HAL_REO1_RING_BASE_LSB(ab);
265 static unsigned int ath12k_hal_reo1_ring_base_msb_offset(struct ath12k_base *ab)
267 return HAL_REO1_RING_BASE_MSB(ab) - HAL_REO1_RING_BASE_LSB(ab);
270 static unsigned int ath12k_hal_reo1_ring_producer_int_setup_offset(struct ath12k_base *ab)
272 return HAL_REO1_RING_PRODUCER_INT_SETUP(ab) - HAL_REO1_RING_BASE_LSB(ab);
275 static unsigned int ath12k_hal_reo1_ring_hp_addr_lsb_offset(struct ath12k_base *ab)
277 return HAL_REO1_RING_HP_ADDR_LSB(ab) - HAL_REO1_RING_BASE_LSB(ab);
280 static unsigned int ath12k_hal_reo1_ring_hp_addr_msb_offset(struct ath12k_base *ab)
282 return HAL_REO1_RING_HP_ADDR_MSB(ab) - HAL_REO1_RING_BASE_LSB(ab);
285 static unsigned int ath12k_hal_reo1_ring_misc_offset(struct ath12k_base *ab)
287 return HAL_REO1_RING_MISC(ab) - HAL_REO1_RING_BASE_LSB(ab);
290 static bool ath12k_hw_qcn9274_rx_desc_get_first_msdu(struct hal_rx_desc *desc)
292 return !!le16_get_bits(desc->u.qcn9274.msdu_end.info5,
293 RX_MSDU_END_INFO5_FIRST_MSDU);
296 static bool ath12k_hw_qcn9274_rx_desc_get_last_msdu(struct hal_rx_desc *desc)
298 return !!le16_get_bits(desc->u.qcn9274.msdu_end.info5,
299 RX_MSDU_END_INFO5_LAST_MSDU);
302 static u8 ath12k_hw_qcn9274_rx_desc_get_l3_pad_bytes(struct hal_rx_desc *desc)
304 return le16_get_bits(desc->u.qcn9274.msdu_end.info5,
305 RX_MSDU_END_INFO5_L3_HDR_PADDING);
308 static bool ath12k_hw_qcn9274_rx_desc_encrypt_valid(struct hal_rx_desc *desc)
310 return !!le32_get_bits(desc->u.qcn9274.mpdu_start.info4,
311 RX_MPDU_START_INFO4_ENCRYPT_INFO_VALID);
314 static u32 ath12k_hw_qcn9274_rx_desc_get_encrypt_type(struct hal_rx_desc *desc)
316 return le32_get_bits(desc->u.qcn9274.mpdu_start.info2,
317 RX_MPDU_START_INFO2_ENC_TYPE);
320 static u8 ath12k_hw_qcn9274_rx_desc_get_decap_type(struct hal_rx_desc *desc)
322 return le32_get_bits(desc->u.qcn9274.msdu_end.info11,
323 RX_MSDU_END_INFO11_DECAP_FORMAT);
326 static u8 ath12k_hw_qcn9274_rx_desc_get_mesh_ctl(struct hal_rx_desc *desc)
328 return le32_get_bits(desc->u.qcn9274.msdu_end.info11,
329 RX_MSDU_END_INFO11_MESH_CTRL_PRESENT);
332 static bool ath12k_hw_qcn9274_rx_desc_get_mpdu_seq_ctl_vld(struct hal_rx_desc *desc)
334 return !!le32_get_bits(desc->u.qcn9274.mpdu_start.info4,
335 RX_MPDU_START_INFO4_MPDU_SEQ_CTRL_VALID);
338 static bool ath12k_hw_qcn9274_rx_desc_get_mpdu_fc_valid(struct hal_rx_desc *desc)
340 return !!le32_get_bits(desc->u.qcn9274.mpdu_start.info4,
341 RX_MPDU_START_INFO4_MPDU_FCTRL_VALID);
344 static u16 ath12k_hw_qcn9274_rx_desc_get_mpdu_start_seq_no(struct hal_rx_desc *desc)
346 return le32_get_bits(desc->u.qcn9274.mpdu_start.info4,
347 RX_MPDU_START_INFO4_MPDU_SEQ_NUM);
350 static u16 ath12k_hw_qcn9274_rx_desc_get_msdu_len(struct hal_rx_desc *desc)
352 return le32_get_bits(desc->u.qcn9274.msdu_end.info10,
353 RX_MSDU_END_INFO10_MSDU_LENGTH);
356 static u8 ath12k_hw_qcn9274_rx_desc_get_msdu_sgi(struct hal_rx_desc *desc)
358 return le32_get_bits(desc->u.qcn9274.msdu_end.info12,
359 RX_MSDU_END_INFO12_SGI);
362 static u8 ath12k_hw_qcn9274_rx_desc_get_msdu_rate_mcs(struct hal_rx_desc *desc)
364 return le32_get_bits(desc->u.qcn9274.msdu_end.info12,
365 RX_MSDU_END_INFO12_RATE_MCS);
368 static u8 ath12k_hw_qcn9274_rx_desc_get_msdu_rx_bw(struct hal_rx_desc *desc)
370 return le32_get_bits(desc->u.qcn9274.msdu_end.info12,
371 RX_MSDU_END_INFO12_RECV_BW);
374 static u32 ath12k_hw_qcn9274_rx_desc_get_msdu_freq(struct hal_rx_desc *desc)
376 return __le32_to_cpu(desc->u.qcn9274.msdu_end.phy_meta_data);
379 static u8 ath12k_hw_qcn9274_rx_desc_get_msdu_pkt_type(struct hal_rx_desc *desc)
381 return le32_get_bits(desc->u.qcn9274.msdu_end.info12,
382 RX_MSDU_END_INFO12_PKT_TYPE);
385 static u8 ath12k_hw_qcn9274_rx_desc_get_msdu_nss(struct hal_rx_desc *desc)
387 return le32_get_bits(desc->u.qcn9274.msdu_end.info12,
388 RX_MSDU_END_INFO12_MIMO_SS_BITMAP);
391 static u8 ath12k_hw_qcn9274_rx_desc_get_mpdu_tid(struct hal_rx_desc *desc)
393 return le16_get_bits(desc->u.qcn9274.msdu_end.info5,
394 RX_MSDU_END_INFO5_TID);
397 static u16 ath12k_hw_qcn9274_rx_desc_get_mpdu_peer_id(struct hal_rx_desc *desc)
399 return __le16_to_cpu(desc->u.qcn9274.mpdu_start.sw_peer_id);
402 static void ath12k_hw_qcn9274_rx_desc_copy_end_tlv(struct hal_rx_desc *fdesc,
403 struct hal_rx_desc *ldesc)
405 memcpy(&fdesc->u.qcn9274.msdu_end, &ldesc->u.qcn9274.msdu_end,
406 sizeof(struct rx_msdu_end_qcn9274));
409 static u32 ath12k_hw_qcn9274_rx_desc_get_mpdu_ppdu_id(struct hal_rx_desc *desc)
411 return __le16_to_cpu(desc->u.qcn9274.mpdu_start.phy_ppdu_id);
414 static void ath12k_hw_qcn9274_rx_desc_set_msdu_len(struct hal_rx_desc *desc, u16 len)
416 u32 info = __le32_to_cpu(desc->u.qcn9274.msdu_end.info10);
418 info &= ~RX_MSDU_END_INFO10_MSDU_LENGTH;
419 info |= u32_encode_bits(len, RX_MSDU_END_INFO10_MSDU_LENGTH);
421 desc->u.qcn9274.msdu_end.info10 = __cpu_to_le32(info);
424 static u8 *ath12k_hw_qcn9274_rx_desc_get_msdu_payload(struct hal_rx_desc *desc)
426 return &desc->u.qcn9274.msdu_payload[0];
429 static u32 ath12k_hw_qcn9274_rx_desc_get_mpdu_start_offset(void)
431 return offsetof(struct hal_rx_desc_qcn9274, mpdu_start);
434 static u32 ath12k_hw_qcn9274_rx_desc_get_msdu_end_offset(void)
436 return offsetof(struct hal_rx_desc_qcn9274, msdu_end);
439 static bool ath12k_hw_qcn9274_rx_desc_mac_addr2_valid(struct hal_rx_desc *desc)
441 return __le32_to_cpu(desc->u.qcn9274.mpdu_start.info4) &
442 RX_MPDU_START_INFO4_MAC_ADDR2_VALID;
445 static u8 *ath12k_hw_qcn9274_rx_desc_mpdu_start_addr2(struct hal_rx_desc *desc)
447 return desc->u.qcn9274.mpdu_start.addr2;
450 static bool ath12k_hw_qcn9274_rx_desc_is_da_mcbc(struct hal_rx_desc *desc)
452 return __le32_to_cpu(desc->u.qcn9274.mpdu_start.info6) &
453 RX_MPDU_START_INFO6_MCAST_BCAST;
456 static void ath12k_hw_qcn9274_rx_desc_get_dot11_hdr(struct hal_rx_desc *desc,
457 struct ieee80211_hdr *hdr)
459 hdr->frame_control = desc->u.qcn9274.mpdu_start.frame_ctrl;
460 hdr->duration_id = desc->u.qcn9274.mpdu_start.duration;
461 ether_addr_copy(hdr->addr1, desc->u.qcn9274.mpdu_start.addr1);
462 ether_addr_copy(hdr->addr2, desc->u.qcn9274.mpdu_start.addr2);
463 ether_addr_copy(hdr->addr3, desc->u.qcn9274.mpdu_start.addr3);
464 if (__le32_to_cpu(desc->u.qcn9274.mpdu_start.info4) &
465 RX_MPDU_START_INFO4_MAC_ADDR4_VALID) {
466 ether_addr_copy(hdr->addr4, desc->u.qcn9274.mpdu_start.addr4);
468 hdr->seq_ctrl = desc->u.qcn9274.mpdu_start.seq_ctrl;
471 static void ath12k_hw_qcn9274_rx_desc_get_crypto_hdr(struct hal_rx_desc *desc,
472 u8 *crypto_hdr,
473 enum hal_encrypt_type enctype)
475 unsigned int key_id;
477 switch (enctype) {
478 case HAL_ENCRYPT_TYPE_OPEN:
479 return;
480 case HAL_ENCRYPT_TYPE_TKIP_NO_MIC:
481 case HAL_ENCRYPT_TYPE_TKIP_MIC:
482 crypto_hdr[0] =
483 HAL_RX_MPDU_INFO_PN_GET_BYTE2(desc->u.qcn9274.mpdu_start.pn[0]);
484 crypto_hdr[1] = 0;
485 crypto_hdr[2] =
486 HAL_RX_MPDU_INFO_PN_GET_BYTE1(desc->u.qcn9274.mpdu_start.pn[0]);
487 break;
488 case HAL_ENCRYPT_TYPE_CCMP_128:
489 case HAL_ENCRYPT_TYPE_CCMP_256:
490 case HAL_ENCRYPT_TYPE_GCMP_128:
491 case HAL_ENCRYPT_TYPE_AES_GCMP_256:
492 crypto_hdr[0] =
493 HAL_RX_MPDU_INFO_PN_GET_BYTE1(desc->u.qcn9274.mpdu_start.pn[0]);
494 crypto_hdr[1] =
495 HAL_RX_MPDU_INFO_PN_GET_BYTE2(desc->u.qcn9274.mpdu_start.pn[0]);
496 crypto_hdr[2] = 0;
497 break;
498 case HAL_ENCRYPT_TYPE_WEP_40:
499 case HAL_ENCRYPT_TYPE_WEP_104:
500 case HAL_ENCRYPT_TYPE_WEP_128:
501 case HAL_ENCRYPT_TYPE_WAPI_GCM_SM4:
502 case HAL_ENCRYPT_TYPE_WAPI:
503 return;
505 key_id = le32_get_bits(desc->u.qcn9274.mpdu_start.info5,
506 RX_MPDU_START_INFO5_KEY_ID);
507 crypto_hdr[3] = 0x20 | (key_id << 6);
508 crypto_hdr[4] = HAL_RX_MPDU_INFO_PN_GET_BYTE3(desc->u.qcn9274.mpdu_start.pn[0]);
509 crypto_hdr[5] = HAL_RX_MPDU_INFO_PN_GET_BYTE4(desc->u.qcn9274.mpdu_start.pn[0]);
510 crypto_hdr[6] = HAL_RX_MPDU_INFO_PN_GET_BYTE1(desc->u.qcn9274.mpdu_start.pn[1]);
511 crypto_hdr[7] = HAL_RX_MPDU_INFO_PN_GET_BYTE2(desc->u.qcn9274.mpdu_start.pn[1]);
514 static u16 ath12k_hw_qcn9274_rx_desc_get_mpdu_frame_ctl(struct hal_rx_desc *desc)
516 return __le16_to_cpu(desc->u.qcn9274.mpdu_start.frame_ctrl);
519 static int ath12k_hal_srng_create_config_qcn9274(struct ath12k_base *ab)
521 struct ath12k_hal *hal = &ab->hal;
522 struct hal_srng_config *s;
524 hal->srng_config = kmemdup(hw_srng_config_template,
525 sizeof(hw_srng_config_template),
526 GFP_KERNEL);
527 if (!hal->srng_config)
528 return -ENOMEM;
530 s = &hal->srng_config[HAL_REO_DST];
531 s->reg_start[0] = HAL_SEQ_WCSS_UMAC_REO_REG + HAL_REO1_RING_BASE_LSB(ab);
532 s->reg_start[1] = HAL_SEQ_WCSS_UMAC_REO_REG + HAL_REO1_RING_HP;
533 s->reg_size[0] = HAL_REO2_RING_BASE_LSB(ab) - HAL_REO1_RING_BASE_LSB(ab);
534 s->reg_size[1] = HAL_REO2_RING_HP - HAL_REO1_RING_HP;
536 s = &hal->srng_config[HAL_REO_EXCEPTION];
537 s->reg_start[0] = HAL_SEQ_WCSS_UMAC_REO_REG + HAL_REO_SW0_RING_BASE_LSB(ab);
538 s->reg_start[1] = HAL_SEQ_WCSS_UMAC_REO_REG + HAL_REO_SW0_RING_HP;
540 s = &hal->srng_config[HAL_REO_REINJECT];
541 s->reg_start[0] = HAL_SEQ_WCSS_UMAC_REO_REG + HAL_SW2REO_RING_BASE_LSB(ab);
542 s->reg_start[1] = HAL_SEQ_WCSS_UMAC_REO_REG + HAL_SW2REO_RING_HP;
543 s->reg_size[0] = HAL_SW2REO1_RING_BASE_LSB(ab) - HAL_SW2REO_RING_BASE_LSB(ab);
544 s->reg_size[1] = HAL_SW2REO1_RING_HP - HAL_SW2REO_RING_HP;
546 s = &hal->srng_config[HAL_REO_CMD];
547 s->reg_start[0] = HAL_SEQ_WCSS_UMAC_REO_REG + HAL_REO_CMD_RING_BASE_LSB(ab);
548 s->reg_start[1] = HAL_SEQ_WCSS_UMAC_REO_REG + HAL_REO_CMD_HP;
550 s = &hal->srng_config[HAL_REO_STATUS];
551 s->reg_start[0] = HAL_SEQ_WCSS_UMAC_REO_REG + HAL_REO_STATUS_RING_BASE_LSB(ab);
552 s->reg_start[1] = HAL_SEQ_WCSS_UMAC_REO_REG + HAL_REO_STATUS_HP;
554 s = &hal->srng_config[HAL_TCL_DATA];
555 s->reg_start[0] = HAL_SEQ_WCSS_UMAC_TCL_REG + HAL_TCL1_RING_BASE_LSB;
556 s->reg_start[1] = HAL_SEQ_WCSS_UMAC_TCL_REG + HAL_TCL1_RING_HP;
557 s->reg_size[0] = HAL_TCL2_RING_BASE_LSB - HAL_TCL1_RING_BASE_LSB;
558 s->reg_size[1] = HAL_TCL2_RING_HP - HAL_TCL1_RING_HP;
560 s = &hal->srng_config[HAL_TCL_CMD];
561 s->reg_start[0] = HAL_SEQ_WCSS_UMAC_TCL_REG + HAL_TCL_RING_BASE_LSB(ab);
562 s->reg_start[1] = HAL_SEQ_WCSS_UMAC_TCL_REG + HAL_TCL_RING_HP;
564 s = &hal->srng_config[HAL_TCL_STATUS];
565 s->reg_start[0] = HAL_SEQ_WCSS_UMAC_TCL_REG + HAL_TCL_STATUS_RING_BASE_LSB(ab);
566 s->reg_start[1] = HAL_SEQ_WCSS_UMAC_TCL_REG + HAL_TCL_STATUS_RING_HP;
568 s = &hal->srng_config[HAL_CE_SRC];
569 s->reg_start[0] = HAL_SEQ_WCSS_UMAC_CE0_SRC_REG + HAL_CE_DST_RING_BASE_LSB;
570 s->reg_start[1] = HAL_SEQ_WCSS_UMAC_CE0_SRC_REG + HAL_CE_DST_RING_HP;
571 s->reg_size[0] = HAL_SEQ_WCSS_UMAC_CE1_SRC_REG -
572 HAL_SEQ_WCSS_UMAC_CE0_SRC_REG;
573 s->reg_size[1] = HAL_SEQ_WCSS_UMAC_CE1_SRC_REG -
574 HAL_SEQ_WCSS_UMAC_CE0_SRC_REG;
576 s = &hal->srng_config[HAL_CE_DST];
577 s->reg_start[0] = HAL_SEQ_WCSS_UMAC_CE0_DST_REG + HAL_CE_DST_RING_BASE_LSB;
578 s->reg_start[1] = HAL_SEQ_WCSS_UMAC_CE0_DST_REG + HAL_CE_DST_RING_HP;
579 s->reg_size[0] = HAL_SEQ_WCSS_UMAC_CE1_DST_REG -
580 HAL_SEQ_WCSS_UMAC_CE0_DST_REG;
581 s->reg_size[1] = HAL_SEQ_WCSS_UMAC_CE1_DST_REG -
582 HAL_SEQ_WCSS_UMAC_CE0_DST_REG;
584 s = &hal->srng_config[HAL_CE_DST_STATUS];
585 s->reg_start[0] = HAL_SEQ_WCSS_UMAC_CE0_DST_REG +
586 HAL_CE_DST_STATUS_RING_BASE_LSB;
587 s->reg_start[1] = HAL_SEQ_WCSS_UMAC_CE0_DST_REG + HAL_CE_DST_STATUS_RING_HP;
588 s->reg_size[0] = HAL_SEQ_WCSS_UMAC_CE1_DST_REG -
589 HAL_SEQ_WCSS_UMAC_CE0_DST_REG;
590 s->reg_size[1] = HAL_SEQ_WCSS_UMAC_CE1_DST_REG -
591 HAL_SEQ_WCSS_UMAC_CE0_DST_REG;
593 s = &hal->srng_config[HAL_WBM_IDLE_LINK];
594 s->reg_start[0] = HAL_SEQ_WCSS_UMAC_WBM_REG + HAL_WBM_IDLE_LINK_RING_BASE_LSB(ab);
595 s->reg_start[1] = HAL_SEQ_WCSS_UMAC_WBM_REG + HAL_WBM_IDLE_LINK_RING_HP;
597 s = &hal->srng_config[HAL_SW2WBM_RELEASE];
598 s->reg_start[0] = HAL_SEQ_WCSS_UMAC_WBM_REG +
599 HAL_WBM_SW_RELEASE_RING_BASE_LSB(ab);
600 s->reg_start[1] = HAL_SEQ_WCSS_UMAC_WBM_REG + HAL_WBM_SW_RELEASE_RING_HP;
601 s->reg_size[0] = HAL_WBM_SW1_RELEASE_RING_BASE_LSB(ab) -
602 HAL_WBM_SW_RELEASE_RING_BASE_LSB(ab);
603 s->reg_size[1] = HAL_WBM_SW1_RELEASE_RING_HP - HAL_WBM_SW_RELEASE_RING_HP;
605 s = &hal->srng_config[HAL_WBM2SW_RELEASE];
606 s->reg_start[0] = HAL_SEQ_WCSS_UMAC_WBM_REG + HAL_WBM0_RELEASE_RING_BASE_LSB(ab);
607 s->reg_start[1] = HAL_SEQ_WCSS_UMAC_WBM_REG + HAL_WBM0_RELEASE_RING_HP;
608 s->reg_size[0] = HAL_WBM1_RELEASE_RING_BASE_LSB(ab) -
609 HAL_WBM0_RELEASE_RING_BASE_LSB(ab);
610 s->reg_size[1] = HAL_WBM1_RELEASE_RING_HP - HAL_WBM0_RELEASE_RING_HP;
612 /* Some LMAC rings are not accessed from the host:
613 * RXDMA_BUG, RXDMA_DST, RXDMA_MONITOR_BUF, RXDMA_MONITOR_STATUS,
614 * RXDMA_MONITOR_DST, RXDMA_MONITOR_DESC, RXDMA_DIR_BUF_SRC,
615 * RXDMA_RX_MONITOR_BUF, TX_MONITOR_BUF, TX_MONITOR_DST, SW2RXDMA
617 s = &hal->srng_config[HAL_PPE2TCL];
618 s->reg_start[0] = HAL_SEQ_WCSS_UMAC_TCL_REG + HAL_TCL_PPE2TCL1_RING_BASE_LSB;
619 s->reg_start[1] = HAL_SEQ_WCSS_UMAC_TCL_REG + HAL_TCL_PPE2TCL1_RING_HP;
621 s = &hal->srng_config[HAL_PPE_RELEASE];
622 s->reg_start[0] = HAL_SEQ_WCSS_UMAC_WBM_REG +
623 HAL_WBM_PPE_RELEASE_RING_BASE_LSB(ab);
624 s->reg_start[1] = HAL_SEQ_WCSS_UMAC_WBM_REG + HAL_WBM_PPE_RELEASE_RING_HP;
626 return 0;
629 static u16 ath12k_hal_qcn9274_rx_mpdu_start_wmask_get(void)
631 return QCN9274_MPDU_START_WMASK;
634 static u32 ath12k_hal_qcn9274_rx_msdu_end_wmask_get(void)
636 return QCN9274_MSDU_END_WMASK;
639 static const struct hal_rx_ops *ath12k_hal_qcn9274_get_hal_rx_compact_ops(void)
641 return &hal_rx_qcn9274_compact_ops;
644 static bool ath12k_hw_qcn9274_dp_rx_h_msdu_done(struct hal_rx_desc *desc)
646 return !!le32_get_bits(desc->u.qcn9274.msdu_end.info14,
647 RX_MSDU_END_INFO14_MSDU_DONE);
650 static bool ath12k_hw_qcn9274_dp_rx_h_l4_cksum_fail(struct hal_rx_desc *desc)
652 return !!le32_get_bits(desc->u.qcn9274.msdu_end.info13,
653 RX_MSDU_END_INFO13_TCP_UDP_CKSUM_FAIL);
656 static bool ath12k_hw_qcn9274_dp_rx_h_ip_cksum_fail(struct hal_rx_desc *desc)
658 return !!le32_get_bits(desc->u.qcn9274.msdu_end.info13,
659 RX_MSDU_END_INFO13_IP_CKSUM_FAIL);
662 static bool ath12k_hw_qcn9274_dp_rx_h_is_decrypted(struct hal_rx_desc *desc)
664 return (le32_get_bits(desc->u.qcn9274.msdu_end.info14,
665 RX_MSDU_END_INFO14_DECRYPT_STATUS_CODE) ==
666 RX_DESC_DECRYPT_STATUS_CODE_OK);
669 static u32 ath12k_hw_qcn9274_dp_rx_h_mpdu_err(struct hal_rx_desc *desc)
671 u32 info = __le32_to_cpu(desc->u.qcn9274.msdu_end.info13);
672 u32 errmap = 0;
674 if (info & RX_MSDU_END_INFO13_FCS_ERR)
675 errmap |= HAL_RX_MPDU_ERR_FCS;
677 if (info & RX_MSDU_END_INFO13_DECRYPT_ERR)
678 errmap |= HAL_RX_MPDU_ERR_DECRYPT;
680 if (info & RX_MSDU_END_INFO13_TKIP_MIC_ERR)
681 errmap |= HAL_RX_MPDU_ERR_TKIP_MIC;
683 if (info & RX_MSDU_END_INFO13_A_MSDU_ERROR)
684 errmap |= HAL_RX_MPDU_ERR_AMSDU_ERR;
686 if (info & RX_MSDU_END_INFO13_OVERFLOW_ERR)
687 errmap |= HAL_RX_MPDU_ERR_OVERFLOW;
689 if (info & RX_MSDU_END_INFO13_MSDU_LEN_ERR)
690 errmap |= HAL_RX_MPDU_ERR_MSDU_LEN;
692 if (info & RX_MSDU_END_INFO13_MPDU_LEN_ERR)
693 errmap |= HAL_RX_MPDU_ERR_MPDU_LEN;
695 return errmap;
698 static u32 ath12k_hw_qcn9274_get_rx_desc_size(void)
700 return sizeof(struct hal_rx_desc_qcn9274);
703 static u8 ath12k_hw_qcn9274_rx_desc_get_msdu_src_link(struct hal_rx_desc *desc)
705 return 0;
708 const struct hal_rx_ops hal_rx_qcn9274_ops = {
709 .rx_desc_get_first_msdu = ath12k_hw_qcn9274_rx_desc_get_first_msdu,
710 .rx_desc_get_last_msdu = ath12k_hw_qcn9274_rx_desc_get_last_msdu,
711 .rx_desc_get_l3_pad_bytes = ath12k_hw_qcn9274_rx_desc_get_l3_pad_bytes,
712 .rx_desc_encrypt_valid = ath12k_hw_qcn9274_rx_desc_encrypt_valid,
713 .rx_desc_get_encrypt_type = ath12k_hw_qcn9274_rx_desc_get_encrypt_type,
714 .rx_desc_get_decap_type = ath12k_hw_qcn9274_rx_desc_get_decap_type,
715 .rx_desc_get_mesh_ctl = ath12k_hw_qcn9274_rx_desc_get_mesh_ctl,
716 .rx_desc_get_mpdu_seq_ctl_vld = ath12k_hw_qcn9274_rx_desc_get_mpdu_seq_ctl_vld,
717 .rx_desc_get_mpdu_fc_valid = ath12k_hw_qcn9274_rx_desc_get_mpdu_fc_valid,
718 .rx_desc_get_mpdu_start_seq_no = ath12k_hw_qcn9274_rx_desc_get_mpdu_start_seq_no,
719 .rx_desc_get_msdu_len = ath12k_hw_qcn9274_rx_desc_get_msdu_len,
720 .rx_desc_get_msdu_sgi = ath12k_hw_qcn9274_rx_desc_get_msdu_sgi,
721 .rx_desc_get_msdu_rate_mcs = ath12k_hw_qcn9274_rx_desc_get_msdu_rate_mcs,
722 .rx_desc_get_msdu_rx_bw = ath12k_hw_qcn9274_rx_desc_get_msdu_rx_bw,
723 .rx_desc_get_msdu_freq = ath12k_hw_qcn9274_rx_desc_get_msdu_freq,
724 .rx_desc_get_msdu_pkt_type = ath12k_hw_qcn9274_rx_desc_get_msdu_pkt_type,
725 .rx_desc_get_msdu_nss = ath12k_hw_qcn9274_rx_desc_get_msdu_nss,
726 .rx_desc_get_mpdu_tid = ath12k_hw_qcn9274_rx_desc_get_mpdu_tid,
727 .rx_desc_get_mpdu_peer_id = ath12k_hw_qcn9274_rx_desc_get_mpdu_peer_id,
728 .rx_desc_copy_end_tlv = ath12k_hw_qcn9274_rx_desc_copy_end_tlv,
729 .rx_desc_get_mpdu_ppdu_id = ath12k_hw_qcn9274_rx_desc_get_mpdu_ppdu_id,
730 .rx_desc_set_msdu_len = ath12k_hw_qcn9274_rx_desc_set_msdu_len,
731 .rx_desc_get_msdu_payload = ath12k_hw_qcn9274_rx_desc_get_msdu_payload,
732 .rx_desc_get_mpdu_start_offset = ath12k_hw_qcn9274_rx_desc_get_mpdu_start_offset,
733 .rx_desc_get_msdu_end_offset = ath12k_hw_qcn9274_rx_desc_get_msdu_end_offset,
734 .rx_desc_mac_addr2_valid = ath12k_hw_qcn9274_rx_desc_mac_addr2_valid,
735 .rx_desc_mpdu_start_addr2 = ath12k_hw_qcn9274_rx_desc_mpdu_start_addr2,
736 .rx_desc_is_da_mcbc = ath12k_hw_qcn9274_rx_desc_is_da_mcbc,
737 .rx_desc_get_dot11_hdr = ath12k_hw_qcn9274_rx_desc_get_dot11_hdr,
738 .rx_desc_get_crypto_header = ath12k_hw_qcn9274_rx_desc_get_crypto_hdr,
739 .rx_desc_get_mpdu_frame_ctl = ath12k_hw_qcn9274_rx_desc_get_mpdu_frame_ctl,
740 .dp_rx_h_msdu_done = ath12k_hw_qcn9274_dp_rx_h_msdu_done,
741 .dp_rx_h_l4_cksum_fail = ath12k_hw_qcn9274_dp_rx_h_l4_cksum_fail,
742 .dp_rx_h_ip_cksum_fail = ath12k_hw_qcn9274_dp_rx_h_ip_cksum_fail,
743 .dp_rx_h_is_decrypted = ath12k_hw_qcn9274_dp_rx_h_is_decrypted,
744 .dp_rx_h_mpdu_err = ath12k_hw_qcn9274_dp_rx_h_mpdu_err,
745 .rx_desc_get_desc_size = ath12k_hw_qcn9274_get_rx_desc_size,
746 .rx_desc_get_msdu_src_link_id = ath12k_hw_qcn9274_rx_desc_get_msdu_src_link,
749 static bool ath12k_hw_qcn9274_compact_rx_desc_get_first_msdu(struct hal_rx_desc *desc)
751 return !!le16_get_bits(desc->u.qcn9274_compact.msdu_end.info5,
752 RX_MSDU_END_INFO5_FIRST_MSDU);
755 static bool ath12k_hw_qcn9274_compact_rx_desc_get_last_msdu(struct hal_rx_desc *desc)
757 return !!le16_get_bits(desc->u.qcn9274_compact.msdu_end.info5,
758 RX_MSDU_END_INFO5_LAST_MSDU);
761 static u8 ath12k_hw_qcn9274_compact_rx_desc_get_l3_pad_bytes(struct hal_rx_desc *desc)
763 return le16_get_bits(desc->u.qcn9274_compact.msdu_end.info5,
764 RX_MSDU_END_INFO5_L3_HDR_PADDING);
767 static bool ath12k_hw_qcn9274_compact_rx_desc_encrypt_valid(struct hal_rx_desc *desc)
769 return !!le32_get_bits(desc->u.qcn9274_compact.mpdu_start.info4,
770 RX_MPDU_START_INFO4_ENCRYPT_INFO_VALID);
773 static u32 ath12k_hw_qcn9274_compact_rx_desc_get_encrypt_type(struct hal_rx_desc *desc)
775 return le32_get_bits(desc->u.qcn9274_compact.mpdu_start.info2,
776 RX_MPDU_START_INFO2_ENC_TYPE);
779 static u8 ath12k_hw_qcn9274_compact_rx_desc_get_decap_type(struct hal_rx_desc *desc)
781 return le32_get_bits(desc->u.qcn9274_compact.msdu_end.info11,
782 RX_MSDU_END_INFO11_DECAP_FORMAT);
785 static u8 ath12k_hw_qcn9274_compact_rx_desc_get_mesh_ctl(struct hal_rx_desc *desc)
787 return le32_get_bits(desc->u.qcn9274.msdu_end.info11,
788 RX_MSDU_END_INFO11_MESH_CTRL_PRESENT);
791 static bool
792 ath12k_hw_qcn9274_compact_rx_desc_get_mpdu_seq_ctl_vld(struct hal_rx_desc *desc)
794 return !!le32_get_bits(desc->u.qcn9274_compact.mpdu_start.info4,
795 RX_MPDU_START_INFO4_MPDU_SEQ_CTRL_VALID);
798 static bool ath12k_hw_qcn9274_compact_rx_desc_get_mpdu_fc_valid(struct hal_rx_desc *desc)
800 return !!le32_get_bits(desc->u.qcn9274_compact.mpdu_start.info4,
801 RX_MPDU_START_INFO4_MPDU_FCTRL_VALID);
804 static u16
805 ath12k_hw_qcn9274_compact_rx_desc_get_mpdu_start_seq_no(struct hal_rx_desc *desc)
807 return le32_get_bits(desc->u.qcn9274_compact.mpdu_start.info4,
808 RX_MPDU_START_INFO4_MPDU_SEQ_NUM);
811 static u16 ath12k_hw_qcn9274_compact_rx_desc_get_msdu_len(struct hal_rx_desc *desc)
813 return le32_get_bits(desc->u.qcn9274_compact.msdu_end.info10,
814 RX_MSDU_END_INFO10_MSDU_LENGTH);
817 static u8 ath12k_hw_qcn9274_compact_rx_desc_get_msdu_sgi(struct hal_rx_desc *desc)
819 return le32_get_bits(desc->u.qcn9274_compact.msdu_end.info12,
820 RX_MSDU_END_INFO12_SGI);
823 static u8 ath12k_hw_qcn9274_compact_rx_desc_get_msdu_rate_mcs(struct hal_rx_desc *desc)
825 return le32_get_bits(desc->u.qcn9274_compact.msdu_end.info12,
826 RX_MSDU_END_INFO12_RATE_MCS);
829 static u8 ath12k_hw_qcn9274_compact_rx_desc_get_msdu_rx_bw(struct hal_rx_desc *desc)
831 return le32_get_bits(desc->u.qcn9274_compact.msdu_end.info12,
832 RX_MSDU_END_INFO12_RECV_BW);
835 static u32 ath12k_hw_qcn9274_compact_rx_desc_get_msdu_freq(struct hal_rx_desc *desc)
837 return __le32_to_cpu(desc->u.qcn9274_compact.msdu_end.phy_meta_data);
840 static u8 ath12k_hw_qcn9274_compact_rx_desc_get_msdu_pkt_type(struct hal_rx_desc *desc)
842 return le32_get_bits(desc->u.qcn9274_compact.msdu_end.info12,
843 RX_MSDU_END_INFO12_PKT_TYPE);
846 static u8 ath12k_hw_qcn9274_compact_rx_desc_get_msdu_nss(struct hal_rx_desc *desc)
848 return le32_get_bits(desc->u.qcn9274_compact.msdu_end.info12,
849 RX_MSDU_END_INFO12_MIMO_SS_BITMAP);
852 static u8 ath12k_hw_qcn9274_compact_rx_desc_get_mpdu_tid(struct hal_rx_desc *desc)
854 return le16_get_bits(desc->u.qcn9274_compact.msdu_end.info5,
855 RX_MSDU_END_INFO5_TID);
858 static u16 ath12k_hw_qcn9274_compact_rx_desc_get_mpdu_peer_id(struct hal_rx_desc *desc)
860 return __le16_to_cpu(desc->u.qcn9274_compact.mpdu_start.sw_peer_id);
863 static void ath12k_hw_qcn9274_compact_rx_desc_copy_end_tlv(struct hal_rx_desc *fdesc,
864 struct hal_rx_desc *ldesc)
866 fdesc->u.qcn9274_compact.msdu_end = ldesc->u.qcn9274_compact.msdu_end;
869 static u32 ath12k_hw_qcn9274_compact_rx_desc_get_mpdu_ppdu_id(struct hal_rx_desc *desc)
871 return __le16_to_cpu(desc->u.qcn9274_compact.mpdu_start.phy_ppdu_id);
874 static void
875 ath12k_hw_qcn9274_compact_rx_desc_set_msdu_len(struct hal_rx_desc *desc, u16 len)
877 u32 info = __le32_to_cpu(desc->u.qcn9274_compact.msdu_end.info10);
879 info = u32_replace_bits(info, len, RX_MSDU_END_INFO10_MSDU_LENGTH);
880 desc->u.qcn9274_compact.msdu_end.info10 = __cpu_to_le32(info);
883 static u8 *ath12k_hw_qcn9274_compact_rx_desc_get_msdu_payload(struct hal_rx_desc *desc)
885 return &desc->u.qcn9274_compact.msdu_payload[0];
888 static u32 ath12k_hw_qcn9274_compact_rx_desc_get_mpdu_start_offset(void)
890 return offsetof(struct hal_rx_desc_qcn9274_compact, mpdu_start);
893 static u32 ath12k_hw_qcn9274_compact_rx_desc_get_msdu_end_offset(void)
895 return offsetof(struct hal_rx_desc_qcn9274_compact, msdu_end);
898 static bool ath12k_hw_qcn9274_compact_rx_desc_mac_addr2_valid(struct hal_rx_desc *desc)
900 return __le32_to_cpu(desc->u.qcn9274_compact.mpdu_start.info4) &
901 RX_MPDU_START_INFO4_MAC_ADDR2_VALID;
904 static u8 *ath12k_hw_qcn9274_compact_rx_desc_mpdu_start_addr2(struct hal_rx_desc *desc)
906 return desc->u.qcn9274_compact.mpdu_start.addr2;
909 static bool ath12k_hw_qcn9274_compact_rx_desc_is_da_mcbc(struct hal_rx_desc *desc)
911 return __le32_to_cpu(desc->u.qcn9274_compact.mpdu_start.info6) &
912 RX_MPDU_START_INFO6_MCAST_BCAST;
915 static void ath12k_hw_qcn9274_compact_rx_desc_get_dot11_hdr(struct hal_rx_desc *desc,
916 struct ieee80211_hdr *hdr)
918 hdr->frame_control = desc->u.qcn9274_compact.mpdu_start.frame_ctrl;
919 hdr->duration_id = desc->u.qcn9274_compact.mpdu_start.duration;
920 ether_addr_copy(hdr->addr1, desc->u.qcn9274_compact.mpdu_start.addr1);
921 ether_addr_copy(hdr->addr2, desc->u.qcn9274_compact.mpdu_start.addr2);
922 ether_addr_copy(hdr->addr3, desc->u.qcn9274_compact.mpdu_start.addr3);
923 if (__le32_to_cpu(desc->u.qcn9274_compact.mpdu_start.info4) &
924 RX_MPDU_START_INFO4_MAC_ADDR4_VALID) {
925 ether_addr_copy(hdr->addr4, desc->u.qcn9274_compact.mpdu_start.addr4);
927 hdr->seq_ctrl = desc->u.qcn9274_compact.mpdu_start.seq_ctrl;
930 static void
931 ath12k_hw_qcn9274_compact_rx_desc_get_crypto_hdr(struct hal_rx_desc *desc,
932 u8 *crypto_hdr,
933 enum hal_encrypt_type enctype)
935 unsigned int key_id;
937 switch (enctype) {
938 case HAL_ENCRYPT_TYPE_OPEN:
939 return;
940 case HAL_ENCRYPT_TYPE_TKIP_NO_MIC:
941 case HAL_ENCRYPT_TYPE_TKIP_MIC:
942 crypto_hdr[0] =
943 HAL_RX_MPDU_INFO_PN_GET_BYTE2(desc->u.qcn9274_compact.mpdu_start.pn[0]);
944 crypto_hdr[1] = 0;
945 crypto_hdr[2] =
946 HAL_RX_MPDU_INFO_PN_GET_BYTE1(desc->u.qcn9274_compact.mpdu_start.pn[0]);
947 break;
948 case HAL_ENCRYPT_TYPE_CCMP_128:
949 case HAL_ENCRYPT_TYPE_CCMP_256:
950 case HAL_ENCRYPT_TYPE_GCMP_128:
951 case HAL_ENCRYPT_TYPE_AES_GCMP_256:
952 crypto_hdr[0] =
953 HAL_RX_MPDU_INFO_PN_GET_BYTE1(desc->u.qcn9274_compact.mpdu_start.pn[0]);
954 crypto_hdr[1] =
955 HAL_RX_MPDU_INFO_PN_GET_BYTE2(desc->u.qcn9274_compact.mpdu_start.pn[0]);
956 crypto_hdr[2] = 0;
957 break;
958 case HAL_ENCRYPT_TYPE_WEP_40:
959 case HAL_ENCRYPT_TYPE_WEP_104:
960 case HAL_ENCRYPT_TYPE_WEP_128:
961 case HAL_ENCRYPT_TYPE_WAPI_GCM_SM4:
962 case HAL_ENCRYPT_TYPE_WAPI:
963 return;
965 key_id = le32_get_bits(desc->u.qcn9274_compact.mpdu_start.info5,
966 RX_MPDU_START_INFO5_KEY_ID);
967 crypto_hdr[3] = 0x20 | (key_id << 6);
968 crypto_hdr[4] =
969 HAL_RX_MPDU_INFO_PN_GET_BYTE3(desc->u.qcn9274_compact.mpdu_start.pn[0]);
970 crypto_hdr[5] =
971 HAL_RX_MPDU_INFO_PN_GET_BYTE4(desc->u.qcn9274_compact.mpdu_start.pn[0]);
972 crypto_hdr[6] =
973 HAL_RX_MPDU_INFO_PN_GET_BYTE1(desc->u.qcn9274_compact.mpdu_start.pn[1]);
974 crypto_hdr[7] =
975 HAL_RX_MPDU_INFO_PN_GET_BYTE2(desc->u.qcn9274_compact.mpdu_start.pn[1]);
978 static u16 ath12k_hw_qcn9274_compact_rx_desc_get_mpdu_frame_ctl(struct hal_rx_desc *desc)
980 return __le16_to_cpu(desc->u.qcn9274_compact.mpdu_start.frame_ctrl);
983 static bool ath12k_hw_qcn9274_compact_dp_rx_h_msdu_done(struct hal_rx_desc *desc)
985 return !!le32_get_bits(desc->u.qcn9274_compact.msdu_end.info14,
986 RX_MSDU_END_INFO14_MSDU_DONE);
989 static bool ath12k_hw_qcn9274_compact_dp_rx_h_l4_cksum_fail(struct hal_rx_desc *desc)
991 return !!le32_get_bits(desc->u.qcn9274_compact.msdu_end.info13,
992 RX_MSDU_END_INFO13_TCP_UDP_CKSUM_FAIL);
995 static bool ath12k_hw_qcn9274_compact_dp_rx_h_ip_cksum_fail(struct hal_rx_desc *desc)
997 return !!le32_get_bits(desc->u.qcn9274_compact.msdu_end.info13,
998 RX_MSDU_END_INFO13_IP_CKSUM_FAIL);
1001 static bool ath12k_hw_qcn9274_compact_dp_rx_h_is_decrypted(struct hal_rx_desc *desc)
1003 return (le32_get_bits(desc->u.qcn9274_compact.msdu_end.info14,
1004 RX_MSDU_END_INFO14_DECRYPT_STATUS_CODE) ==
1005 RX_DESC_DECRYPT_STATUS_CODE_OK);
1008 static u32 ath12k_hw_qcn9274_compact_dp_rx_h_mpdu_err(struct hal_rx_desc *desc)
1010 u32 info = __le32_to_cpu(desc->u.qcn9274_compact.msdu_end.info13);
1011 u32 errmap = 0;
1013 if (info & RX_MSDU_END_INFO13_FCS_ERR)
1014 errmap |= HAL_RX_MPDU_ERR_FCS;
1016 if (info & RX_MSDU_END_INFO13_DECRYPT_ERR)
1017 errmap |= HAL_RX_MPDU_ERR_DECRYPT;
1019 if (info & RX_MSDU_END_INFO13_TKIP_MIC_ERR)
1020 errmap |= HAL_RX_MPDU_ERR_TKIP_MIC;
1022 if (info & RX_MSDU_END_INFO13_A_MSDU_ERROR)
1023 errmap |= HAL_RX_MPDU_ERR_AMSDU_ERR;
1025 if (info & RX_MSDU_END_INFO13_OVERFLOW_ERR)
1026 errmap |= HAL_RX_MPDU_ERR_OVERFLOW;
1028 if (info & RX_MSDU_END_INFO13_MSDU_LEN_ERR)
1029 errmap |= HAL_RX_MPDU_ERR_MSDU_LEN;
1031 if (info & RX_MSDU_END_INFO13_MPDU_LEN_ERR)
1032 errmap |= HAL_RX_MPDU_ERR_MPDU_LEN;
1034 return errmap;
1037 static u32 ath12k_hw_qcn9274_compact_get_rx_desc_size(void)
1039 return sizeof(struct hal_rx_desc_qcn9274_compact);
1042 static u8 ath12k_hw_qcn9274_compact_rx_desc_get_msdu_src_link(struct hal_rx_desc *desc)
1044 return le64_get_bits(desc->u.qcn9274_compact.msdu_end.msdu_end_tag,
1045 RX_MSDU_END_64_TLV_SRC_LINK_ID);
1048 const struct hal_rx_ops hal_rx_qcn9274_compact_ops = {
1049 .rx_desc_get_first_msdu = ath12k_hw_qcn9274_compact_rx_desc_get_first_msdu,
1050 .rx_desc_get_last_msdu = ath12k_hw_qcn9274_compact_rx_desc_get_last_msdu,
1051 .rx_desc_get_l3_pad_bytes = ath12k_hw_qcn9274_compact_rx_desc_get_l3_pad_bytes,
1052 .rx_desc_encrypt_valid = ath12k_hw_qcn9274_compact_rx_desc_encrypt_valid,
1053 .rx_desc_get_encrypt_type = ath12k_hw_qcn9274_compact_rx_desc_get_encrypt_type,
1054 .rx_desc_get_decap_type = ath12k_hw_qcn9274_compact_rx_desc_get_decap_type,
1055 .rx_desc_get_mesh_ctl = ath12k_hw_qcn9274_compact_rx_desc_get_mesh_ctl,
1056 .rx_desc_get_mpdu_seq_ctl_vld =
1057 ath12k_hw_qcn9274_compact_rx_desc_get_mpdu_seq_ctl_vld,
1058 .rx_desc_get_mpdu_fc_valid = ath12k_hw_qcn9274_compact_rx_desc_get_mpdu_fc_valid,
1059 .rx_desc_get_mpdu_start_seq_no =
1060 ath12k_hw_qcn9274_compact_rx_desc_get_mpdu_start_seq_no,
1061 .rx_desc_get_msdu_len = ath12k_hw_qcn9274_compact_rx_desc_get_msdu_len,
1062 .rx_desc_get_msdu_sgi = ath12k_hw_qcn9274_compact_rx_desc_get_msdu_sgi,
1063 .rx_desc_get_msdu_rate_mcs = ath12k_hw_qcn9274_compact_rx_desc_get_msdu_rate_mcs,
1064 .rx_desc_get_msdu_rx_bw = ath12k_hw_qcn9274_compact_rx_desc_get_msdu_rx_bw,
1065 .rx_desc_get_msdu_freq = ath12k_hw_qcn9274_compact_rx_desc_get_msdu_freq,
1066 .rx_desc_get_msdu_pkt_type = ath12k_hw_qcn9274_compact_rx_desc_get_msdu_pkt_type,
1067 .rx_desc_get_msdu_nss = ath12k_hw_qcn9274_compact_rx_desc_get_msdu_nss,
1068 .rx_desc_get_mpdu_tid = ath12k_hw_qcn9274_compact_rx_desc_get_mpdu_tid,
1069 .rx_desc_get_mpdu_peer_id = ath12k_hw_qcn9274_compact_rx_desc_get_mpdu_peer_id,
1070 .rx_desc_copy_end_tlv = ath12k_hw_qcn9274_compact_rx_desc_copy_end_tlv,
1071 .rx_desc_get_mpdu_ppdu_id = ath12k_hw_qcn9274_compact_rx_desc_get_mpdu_ppdu_id,
1072 .rx_desc_set_msdu_len = ath12k_hw_qcn9274_compact_rx_desc_set_msdu_len,
1073 .rx_desc_get_msdu_payload = ath12k_hw_qcn9274_compact_rx_desc_get_msdu_payload,
1074 .rx_desc_get_mpdu_start_offset =
1075 ath12k_hw_qcn9274_compact_rx_desc_get_mpdu_start_offset,
1076 .rx_desc_get_msdu_end_offset =
1077 ath12k_hw_qcn9274_compact_rx_desc_get_msdu_end_offset,
1078 .rx_desc_mac_addr2_valid = ath12k_hw_qcn9274_compact_rx_desc_mac_addr2_valid,
1079 .rx_desc_mpdu_start_addr2 = ath12k_hw_qcn9274_compact_rx_desc_mpdu_start_addr2,
1080 .rx_desc_is_da_mcbc = ath12k_hw_qcn9274_compact_rx_desc_is_da_mcbc,
1081 .rx_desc_get_dot11_hdr = ath12k_hw_qcn9274_compact_rx_desc_get_dot11_hdr,
1082 .rx_desc_get_crypto_header = ath12k_hw_qcn9274_compact_rx_desc_get_crypto_hdr,
1083 .rx_desc_get_mpdu_frame_ctl =
1084 ath12k_hw_qcn9274_compact_rx_desc_get_mpdu_frame_ctl,
1085 .dp_rx_h_msdu_done = ath12k_hw_qcn9274_compact_dp_rx_h_msdu_done,
1086 .dp_rx_h_l4_cksum_fail = ath12k_hw_qcn9274_compact_dp_rx_h_l4_cksum_fail,
1087 .dp_rx_h_ip_cksum_fail = ath12k_hw_qcn9274_compact_dp_rx_h_ip_cksum_fail,
1088 .dp_rx_h_is_decrypted = ath12k_hw_qcn9274_compact_dp_rx_h_is_decrypted,
1089 .dp_rx_h_mpdu_err = ath12k_hw_qcn9274_compact_dp_rx_h_mpdu_err,
1090 .rx_desc_get_desc_size = ath12k_hw_qcn9274_compact_get_rx_desc_size,
1091 .rx_desc_get_msdu_src_link_id =
1092 ath12k_hw_qcn9274_compact_rx_desc_get_msdu_src_link,
1095 const struct hal_ops hal_qcn9274_ops = {
1096 .create_srng_config = ath12k_hal_srng_create_config_qcn9274,
1097 .tcl_to_wbm_rbm_map = ath12k_hal_qcn9274_tcl_to_wbm_rbm_map,
1098 .rxdma_ring_wmask_rx_mpdu_start = ath12k_hal_qcn9274_rx_mpdu_start_wmask_get,
1099 .rxdma_ring_wmask_rx_msdu_end = ath12k_hal_qcn9274_rx_msdu_end_wmask_get,
1100 .get_hal_rx_compact_ops = ath12k_hal_qcn9274_get_hal_rx_compact_ops,
1103 static bool ath12k_hw_wcn7850_rx_desc_get_first_msdu(struct hal_rx_desc *desc)
1105 return !!le16_get_bits(desc->u.wcn7850.msdu_end.info5,
1106 RX_MSDU_END_INFO5_FIRST_MSDU);
1109 static bool ath12k_hw_wcn7850_rx_desc_get_last_msdu(struct hal_rx_desc *desc)
1111 return !!le16_get_bits(desc->u.wcn7850.msdu_end.info5,
1112 RX_MSDU_END_INFO5_LAST_MSDU);
1115 static u8 ath12k_hw_wcn7850_rx_desc_get_l3_pad_bytes(struct hal_rx_desc *desc)
1117 return le16_get_bits(desc->u.wcn7850.msdu_end.info5,
1118 RX_MSDU_END_INFO5_L3_HDR_PADDING);
1121 static bool ath12k_hw_wcn7850_rx_desc_encrypt_valid(struct hal_rx_desc *desc)
1123 return !!le32_get_bits(desc->u.wcn7850.mpdu_start.info4,
1124 RX_MPDU_START_INFO4_ENCRYPT_INFO_VALID);
1127 static u32 ath12k_hw_wcn7850_rx_desc_get_encrypt_type(struct hal_rx_desc *desc)
1129 return le32_get_bits(desc->u.wcn7850.mpdu_start.info2,
1130 RX_MPDU_START_INFO2_ENC_TYPE);
1133 static u8 ath12k_hw_wcn7850_rx_desc_get_decap_type(struct hal_rx_desc *desc)
1135 return le32_get_bits(desc->u.wcn7850.msdu_end.info11,
1136 RX_MSDU_END_INFO11_DECAP_FORMAT);
1139 static u8 ath12k_hw_wcn7850_rx_desc_get_mesh_ctl(struct hal_rx_desc *desc)
1141 return le32_get_bits(desc->u.wcn7850.msdu_end.info11,
1142 RX_MSDU_END_INFO11_MESH_CTRL_PRESENT);
1145 static bool ath12k_hw_wcn7850_rx_desc_get_mpdu_seq_ctl_vld(struct hal_rx_desc *desc)
1147 return !!le32_get_bits(desc->u.wcn7850.mpdu_start.info4,
1148 RX_MPDU_START_INFO4_MPDU_SEQ_CTRL_VALID);
1151 static bool ath12k_hw_wcn7850_rx_desc_get_mpdu_fc_valid(struct hal_rx_desc *desc)
1153 return !!le32_get_bits(desc->u.wcn7850.mpdu_start.info4,
1154 RX_MPDU_START_INFO4_MPDU_FCTRL_VALID);
1157 static u16 ath12k_hw_wcn7850_rx_desc_get_mpdu_start_seq_no(struct hal_rx_desc *desc)
1159 return le32_get_bits(desc->u.wcn7850.mpdu_start.info4,
1160 RX_MPDU_START_INFO4_MPDU_SEQ_NUM);
1163 static u16 ath12k_hw_wcn7850_rx_desc_get_msdu_len(struct hal_rx_desc *desc)
1165 return le32_get_bits(desc->u.wcn7850.msdu_end.info10,
1166 RX_MSDU_END_INFO10_MSDU_LENGTH);
1169 static u8 ath12k_hw_wcn7850_rx_desc_get_msdu_sgi(struct hal_rx_desc *desc)
1171 return le32_get_bits(desc->u.wcn7850.msdu_end.info12,
1172 RX_MSDU_END_INFO12_SGI);
1175 static u8 ath12k_hw_wcn7850_rx_desc_get_msdu_rate_mcs(struct hal_rx_desc *desc)
1177 return le32_get_bits(desc->u.wcn7850.msdu_end.info12,
1178 RX_MSDU_END_INFO12_RATE_MCS);
1181 static u8 ath12k_hw_wcn7850_rx_desc_get_msdu_rx_bw(struct hal_rx_desc *desc)
1183 return le32_get_bits(desc->u.wcn7850.msdu_end.info12,
1184 RX_MSDU_END_INFO12_RECV_BW);
1187 static u32 ath12k_hw_wcn7850_rx_desc_get_msdu_freq(struct hal_rx_desc *desc)
1189 return __le32_to_cpu(desc->u.wcn7850.msdu_end.phy_meta_data);
1192 static u8 ath12k_hw_wcn7850_rx_desc_get_msdu_pkt_type(struct hal_rx_desc *desc)
1194 return le32_get_bits(desc->u.wcn7850.msdu_end.info12,
1195 RX_MSDU_END_INFO12_PKT_TYPE);
1198 static u8 ath12k_hw_wcn7850_rx_desc_get_msdu_nss(struct hal_rx_desc *desc)
1200 return le32_get_bits(desc->u.wcn7850.msdu_end.info12,
1201 RX_MSDU_END_INFO12_MIMO_SS_BITMAP);
1204 static u8 ath12k_hw_wcn7850_rx_desc_get_mpdu_tid(struct hal_rx_desc *desc)
1206 return le32_get_bits(desc->u.wcn7850.mpdu_start.info2,
1207 RX_MPDU_START_INFO2_TID);
1210 static u16 ath12k_hw_wcn7850_rx_desc_get_mpdu_peer_id(struct hal_rx_desc *desc)
1212 return __le16_to_cpu(desc->u.wcn7850.mpdu_start.sw_peer_id);
1215 static void ath12k_hw_wcn7850_rx_desc_copy_end_tlv(struct hal_rx_desc *fdesc,
1216 struct hal_rx_desc *ldesc)
1218 memcpy(&fdesc->u.wcn7850.msdu_end, &ldesc->u.wcn7850.msdu_end,
1219 sizeof(struct rx_msdu_end_qcn9274));
1222 static u32 ath12k_hw_wcn7850_rx_desc_get_mpdu_start_tag(struct hal_rx_desc *desc)
1224 return le64_get_bits(desc->u.wcn7850.mpdu_start_tag,
1225 HAL_TLV_HDR_TAG);
1228 static u32 ath12k_hw_wcn7850_rx_desc_get_mpdu_ppdu_id(struct hal_rx_desc *desc)
1230 return __le16_to_cpu(desc->u.wcn7850.mpdu_start.phy_ppdu_id);
1233 static void ath12k_hw_wcn7850_rx_desc_set_msdu_len(struct hal_rx_desc *desc, u16 len)
1235 u32 info = __le32_to_cpu(desc->u.wcn7850.msdu_end.info10);
1237 info &= ~RX_MSDU_END_INFO10_MSDU_LENGTH;
1238 info |= u32_encode_bits(len, RX_MSDU_END_INFO10_MSDU_LENGTH);
1240 desc->u.wcn7850.msdu_end.info10 = __cpu_to_le32(info);
1243 static u8 *ath12k_hw_wcn7850_rx_desc_get_msdu_payload(struct hal_rx_desc *desc)
1245 return &desc->u.wcn7850.msdu_payload[0];
1248 static u32 ath12k_hw_wcn7850_rx_desc_get_mpdu_start_offset(void)
1250 return offsetof(struct hal_rx_desc_wcn7850, mpdu_start_tag);
1253 static u32 ath12k_hw_wcn7850_rx_desc_get_msdu_end_offset(void)
1255 return offsetof(struct hal_rx_desc_wcn7850, msdu_end_tag);
1258 static bool ath12k_hw_wcn7850_rx_desc_mac_addr2_valid(struct hal_rx_desc *desc)
1260 return __le32_to_cpu(desc->u.wcn7850.mpdu_start.info4) &
1261 RX_MPDU_START_INFO4_MAC_ADDR2_VALID;
1264 static u8 *ath12k_hw_wcn7850_rx_desc_mpdu_start_addr2(struct hal_rx_desc *desc)
1266 return desc->u.wcn7850.mpdu_start.addr2;
1269 static bool ath12k_hw_wcn7850_rx_desc_is_da_mcbc(struct hal_rx_desc *desc)
1271 return __le32_to_cpu(desc->u.wcn7850.msdu_end.info13) &
1272 RX_MSDU_END_INFO13_MCAST_BCAST;
1275 static void ath12k_hw_wcn7850_rx_desc_get_dot11_hdr(struct hal_rx_desc *desc,
1276 struct ieee80211_hdr *hdr)
1278 hdr->frame_control = desc->u.wcn7850.mpdu_start.frame_ctrl;
1279 hdr->duration_id = desc->u.wcn7850.mpdu_start.duration;
1280 ether_addr_copy(hdr->addr1, desc->u.wcn7850.mpdu_start.addr1);
1281 ether_addr_copy(hdr->addr2, desc->u.wcn7850.mpdu_start.addr2);
1282 ether_addr_copy(hdr->addr3, desc->u.wcn7850.mpdu_start.addr3);
1283 if (__le32_to_cpu(desc->u.wcn7850.mpdu_start.info4) &
1284 RX_MPDU_START_INFO4_MAC_ADDR4_VALID) {
1285 ether_addr_copy(hdr->addr4, desc->u.wcn7850.mpdu_start.addr4);
1287 hdr->seq_ctrl = desc->u.wcn7850.mpdu_start.seq_ctrl;
1290 static void ath12k_hw_wcn7850_rx_desc_get_crypto_hdr(struct hal_rx_desc *desc,
1291 u8 *crypto_hdr,
1292 enum hal_encrypt_type enctype)
1294 unsigned int key_id;
1296 switch (enctype) {
1297 case HAL_ENCRYPT_TYPE_OPEN:
1298 return;
1299 case HAL_ENCRYPT_TYPE_TKIP_NO_MIC:
1300 case HAL_ENCRYPT_TYPE_TKIP_MIC:
1301 crypto_hdr[0] =
1302 HAL_RX_MPDU_INFO_PN_GET_BYTE2(desc->u.wcn7850.mpdu_start.pn[0]);
1303 crypto_hdr[1] = 0;
1304 crypto_hdr[2] =
1305 HAL_RX_MPDU_INFO_PN_GET_BYTE1(desc->u.wcn7850.mpdu_start.pn[0]);
1306 break;
1307 case HAL_ENCRYPT_TYPE_CCMP_128:
1308 case HAL_ENCRYPT_TYPE_CCMP_256:
1309 case HAL_ENCRYPT_TYPE_GCMP_128:
1310 case HAL_ENCRYPT_TYPE_AES_GCMP_256:
1311 crypto_hdr[0] =
1312 HAL_RX_MPDU_INFO_PN_GET_BYTE1(desc->u.wcn7850.mpdu_start.pn[0]);
1313 crypto_hdr[1] =
1314 HAL_RX_MPDU_INFO_PN_GET_BYTE2(desc->u.wcn7850.mpdu_start.pn[0]);
1315 crypto_hdr[2] = 0;
1316 break;
1317 case HAL_ENCRYPT_TYPE_WEP_40:
1318 case HAL_ENCRYPT_TYPE_WEP_104:
1319 case HAL_ENCRYPT_TYPE_WEP_128:
1320 case HAL_ENCRYPT_TYPE_WAPI_GCM_SM4:
1321 case HAL_ENCRYPT_TYPE_WAPI:
1322 return;
1324 key_id = u32_get_bits(__le32_to_cpu(desc->u.wcn7850.mpdu_start.info5),
1325 RX_MPDU_START_INFO5_KEY_ID);
1326 crypto_hdr[3] = 0x20 | (key_id << 6);
1327 crypto_hdr[4] = HAL_RX_MPDU_INFO_PN_GET_BYTE3(desc->u.wcn7850.mpdu_start.pn[0]);
1328 crypto_hdr[5] = HAL_RX_MPDU_INFO_PN_GET_BYTE4(desc->u.wcn7850.mpdu_start.pn[0]);
1329 crypto_hdr[6] = HAL_RX_MPDU_INFO_PN_GET_BYTE1(desc->u.wcn7850.mpdu_start.pn[1]);
1330 crypto_hdr[7] = HAL_RX_MPDU_INFO_PN_GET_BYTE2(desc->u.wcn7850.mpdu_start.pn[1]);
1333 static u16 ath12k_hw_wcn7850_rx_desc_get_mpdu_frame_ctl(struct hal_rx_desc *desc)
1335 return __le16_to_cpu(desc->u.wcn7850.mpdu_start.frame_ctrl);
1338 static int ath12k_hal_srng_create_config_wcn7850(struct ath12k_base *ab)
1340 struct ath12k_hal *hal = &ab->hal;
1341 struct hal_srng_config *s;
1343 hal->srng_config = kmemdup(hw_srng_config_template,
1344 sizeof(hw_srng_config_template),
1345 GFP_KERNEL);
1346 if (!hal->srng_config)
1347 return -ENOMEM;
1349 s = &hal->srng_config[HAL_REO_DST];
1350 s->reg_start[0] = HAL_SEQ_WCSS_UMAC_REO_REG + HAL_REO1_RING_BASE_LSB(ab);
1351 s->reg_start[1] = HAL_SEQ_WCSS_UMAC_REO_REG + HAL_REO1_RING_HP;
1352 s->reg_size[0] = HAL_REO2_RING_BASE_LSB(ab) - HAL_REO1_RING_BASE_LSB(ab);
1353 s->reg_size[1] = HAL_REO2_RING_HP - HAL_REO1_RING_HP;
1355 s = &hal->srng_config[HAL_REO_EXCEPTION];
1356 s->reg_start[0] = HAL_SEQ_WCSS_UMAC_REO_REG + HAL_REO_SW0_RING_BASE_LSB(ab);
1357 s->reg_start[1] = HAL_SEQ_WCSS_UMAC_REO_REG + HAL_REO_SW0_RING_HP;
1359 s = &hal->srng_config[HAL_REO_REINJECT];
1360 s->max_rings = 1;
1361 s->reg_start[0] = HAL_SEQ_WCSS_UMAC_REO_REG + HAL_SW2REO_RING_BASE_LSB(ab);
1362 s->reg_start[1] = HAL_SEQ_WCSS_UMAC_REO_REG + HAL_SW2REO_RING_HP;
1364 s = &hal->srng_config[HAL_REO_CMD];
1365 s->reg_start[0] = HAL_SEQ_WCSS_UMAC_REO_REG + HAL_REO_CMD_RING_BASE_LSB(ab);
1366 s->reg_start[1] = HAL_SEQ_WCSS_UMAC_REO_REG + HAL_REO_CMD_HP;
1368 s = &hal->srng_config[HAL_REO_STATUS];
1369 s->reg_start[0] = HAL_SEQ_WCSS_UMAC_REO_REG + HAL_REO_STATUS_RING_BASE_LSB(ab);
1370 s->reg_start[1] = HAL_SEQ_WCSS_UMAC_REO_REG + HAL_REO_STATUS_HP;
1372 s = &hal->srng_config[HAL_TCL_DATA];
1373 s->max_rings = 5;
1374 s->reg_start[0] = HAL_SEQ_WCSS_UMAC_TCL_REG + HAL_TCL1_RING_BASE_LSB;
1375 s->reg_start[1] = HAL_SEQ_WCSS_UMAC_TCL_REG + HAL_TCL1_RING_HP;
1376 s->reg_size[0] = HAL_TCL2_RING_BASE_LSB - HAL_TCL1_RING_BASE_LSB;
1377 s->reg_size[1] = HAL_TCL2_RING_HP - HAL_TCL1_RING_HP;
1379 s = &hal->srng_config[HAL_TCL_CMD];
1380 s->reg_start[0] = HAL_SEQ_WCSS_UMAC_TCL_REG + HAL_TCL_RING_BASE_LSB(ab);
1381 s->reg_start[1] = HAL_SEQ_WCSS_UMAC_TCL_REG + HAL_TCL_RING_HP;
1383 s = &hal->srng_config[HAL_TCL_STATUS];
1384 s->reg_start[0] = HAL_SEQ_WCSS_UMAC_TCL_REG + HAL_TCL_STATUS_RING_BASE_LSB(ab);
1385 s->reg_start[1] = HAL_SEQ_WCSS_UMAC_TCL_REG + HAL_TCL_STATUS_RING_HP;
1387 s = &hal->srng_config[HAL_CE_SRC];
1388 s->max_rings = 12;
1389 s->reg_start[0] = HAL_SEQ_WCSS_UMAC_CE0_SRC_REG + HAL_CE_DST_RING_BASE_LSB;
1390 s->reg_start[1] = HAL_SEQ_WCSS_UMAC_CE0_SRC_REG + HAL_CE_DST_RING_HP;
1391 s->reg_size[0] = HAL_SEQ_WCSS_UMAC_CE1_SRC_REG -
1392 HAL_SEQ_WCSS_UMAC_CE0_SRC_REG;
1393 s->reg_size[1] = HAL_SEQ_WCSS_UMAC_CE1_SRC_REG -
1394 HAL_SEQ_WCSS_UMAC_CE0_SRC_REG;
1396 s = &hal->srng_config[HAL_CE_DST];
1397 s->max_rings = 12;
1398 s->reg_start[0] = HAL_SEQ_WCSS_UMAC_CE0_DST_REG + HAL_CE_DST_RING_BASE_LSB;
1399 s->reg_start[1] = HAL_SEQ_WCSS_UMAC_CE0_DST_REG + HAL_CE_DST_RING_HP;
1400 s->reg_size[0] = HAL_SEQ_WCSS_UMAC_CE1_DST_REG -
1401 HAL_SEQ_WCSS_UMAC_CE0_DST_REG;
1402 s->reg_size[1] = HAL_SEQ_WCSS_UMAC_CE1_DST_REG -
1403 HAL_SEQ_WCSS_UMAC_CE0_DST_REG;
1405 s = &hal->srng_config[HAL_CE_DST_STATUS];
1406 s->max_rings = 12;
1407 s->reg_start[0] = HAL_SEQ_WCSS_UMAC_CE0_DST_REG +
1408 HAL_CE_DST_STATUS_RING_BASE_LSB;
1409 s->reg_start[1] = HAL_SEQ_WCSS_UMAC_CE0_DST_REG + HAL_CE_DST_STATUS_RING_HP;
1410 s->reg_size[0] = HAL_SEQ_WCSS_UMAC_CE1_DST_REG -
1411 HAL_SEQ_WCSS_UMAC_CE0_DST_REG;
1412 s->reg_size[1] = HAL_SEQ_WCSS_UMAC_CE1_DST_REG -
1413 HAL_SEQ_WCSS_UMAC_CE0_DST_REG;
1415 s = &hal->srng_config[HAL_WBM_IDLE_LINK];
1416 s->reg_start[0] = HAL_SEQ_WCSS_UMAC_WBM_REG + HAL_WBM_IDLE_LINK_RING_BASE_LSB(ab);
1417 s->reg_start[1] = HAL_SEQ_WCSS_UMAC_WBM_REG + HAL_WBM_IDLE_LINK_RING_HP;
1419 s = &hal->srng_config[HAL_SW2WBM_RELEASE];
1420 s->max_rings = 1;
1421 s->reg_start[0] = HAL_SEQ_WCSS_UMAC_WBM_REG +
1422 HAL_WBM_SW_RELEASE_RING_BASE_LSB(ab);
1423 s->reg_start[1] = HAL_SEQ_WCSS_UMAC_WBM_REG + HAL_WBM_SW_RELEASE_RING_HP;
1425 s = &hal->srng_config[HAL_WBM2SW_RELEASE];
1426 s->reg_start[0] = HAL_SEQ_WCSS_UMAC_WBM_REG + HAL_WBM0_RELEASE_RING_BASE_LSB(ab);
1427 s->reg_start[1] = HAL_SEQ_WCSS_UMAC_WBM_REG + HAL_WBM0_RELEASE_RING_HP;
1428 s->reg_size[0] = HAL_WBM1_RELEASE_RING_BASE_LSB(ab) -
1429 HAL_WBM0_RELEASE_RING_BASE_LSB(ab);
1430 s->reg_size[1] = HAL_WBM1_RELEASE_RING_HP - HAL_WBM0_RELEASE_RING_HP;
1432 s = &hal->srng_config[HAL_RXDMA_BUF];
1433 s->max_rings = 2;
1434 s->mac_type = ATH12K_HAL_SRNG_PMAC;
1436 s = &hal->srng_config[HAL_RXDMA_DST];
1437 s->max_rings = 1;
1438 s->entry_size = sizeof(struct hal_reo_entrance_ring) >> 2;
1440 /* below rings are not used */
1441 s = &hal->srng_config[HAL_RXDMA_DIR_BUF];
1442 s->max_rings = 0;
1444 s = &hal->srng_config[HAL_PPE2TCL];
1445 s->max_rings = 0;
1447 s = &hal->srng_config[HAL_PPE_RELEASE];
1448 s->max_rings = 0;
1450 s = &hal->srng_config[HAL_TX_MONITOR_BUF];
1451 s->max_rings = 0;
1453 s = &hal->srng_config[HAL_TX_MONITOR_DST];
1454 s->max_rings = 0;
1456 s = &hal->srng_config[HAL_PPE2TCL];
1457 s->max_rings = 0;
1459 return 0;
1462 static bool ath12k_hw_wcn7850_dp_rx_h_msdu_done(struct hal_rx_desc *desc)
1464 return !!le32_get_bits(desc->u.wcn7850.msdu_end.info14,
1465 RX_MSDU_END_INFO14_MSDU_DONE);
1468 static bool ath12k_hw_wcn7850_dp_rx_h_l4_cksum_fail(struct hal_rx_desc *desc)
1470 return !!le32_get_bits(desc->u.wcn7850.msdu_end.info13,
1471 RX_MSDU_END_INFO13_TCP_UDP_CKSUM_FAIL);
1474 static bool ath12k_hw_wcn7850_dp_rx_h_ip_cksum_fail(struct hal_rx_desc *desc)
1476 return !!le32_get_bits(desc->u.wcn7850.msdu_end.info13,
1477 RX_MSDU_END_INFO13_IP_CKSUM_FAIL);
1480 static bool ath12k_hw_wcn7850_dp_rx_h_is_decrypted(struct hal_rx_desc *desc)
1482 return (le32_get_bits(desc->u.wcn7850.msdu_end.info14,
1483 RX_MSDU_END_INFO14_DECRYPT_STATUS_CODE) ==
1484 RX_DESC_DECRYPT_STATUS_CODE_OK);
1487 static u32 ath12k_hw_wcn7850_dp_rx_h_mpdu_err(struct hal_rx_desc *desc)
1489 u32 info = __le32_to_cpu(desc->u.wcn7850.msdu_end.info13);
1490 u32 errmap = 0;
1492 if (info & RX_MSDU_END_INFO13_FCS_ERR)
1493 errmap |= HAL_RX_MPDU_ERR_FCS;
1495 if (info & RX_MSDU_END_INFO13_DECRYPT_ERR)
1496 errmap |= HAL_RX_MPDU_ERR_DECRYPT;
1498 if (info & RX_MSDU_END_INFO13_TKIP_MIC_ERR)
1499 errmap |= HAL_RX_MPDU_ERR_TKIP_MIC;
1501 if (info & RX_MSDU_END_INFO13_A_MSDU_ERROR)
1502 errmap |= HAL_RX_MPDU_ERR_AMSDU_ERR;
1504 if (info & RX_MSDU_END_INFO13_OVERFLOW_ERR)
1505 errmap |= HAL_RX_MPDU_ERR_OVERFLOW;
1507 if (info & RX_MSDU_END_INFO13_MSDU_LEN_ERR)
1508 errmap |= HAL_RX_MPDU_ERR_MSDU_LEN;
1510 if (info & RX_MSDU_END_INFO13_MPDU_LEN_ERR)
1511 errmap |= HAL_RX_MPDU_ERR_MPDU_LEN;
1513 return errmap;
1516 static u32 ath12k_hw_wcn7850_get_rx_desc_size(void)
1518 return sizeof(struct hal_rx_desc_wcn7850);
1521 static u8 ath12k_hw_wcn7850_rx_desc_get_msdu_src_link(struct hal_rx_desc *desc)
1523 return 0;
1526 const struct hal_rx_ops hal_rx_wcn7850_ops = {
1527 .rx_desc_get_first_msdu = ath12k_hw_wcn7850_rx_desc_get_first_msdu,
1528 .rx_desc_get_last_msdu = ath12k_hw_wcn7850_rx_desc_get_last_msdu,
1529 .rx_desc_get_l3_pad_bytes = ath12k_hw_wcn7850_rx_desc_get_l3_pad_bytes,
1530 .rx_desc_encrypt_valid = ath12k_hw_wcn7850_rx_desc_encrypt_valid,
1531 .rx_desc_get_encrypt_type = ath12k_hw_wcn7850_rx_desc_get_encrypt_type,
1532 .rx_desc_get_decap_type = ath12k_hw_wcn7850_rx_desc_get_decap_type,
1533 .rx_desc_get_mesh_ctl = ath12k_hw_wcn7850_rx_desc_get_mesh_ctl,
1534 .rx_desc_get_mpdu_seq_ctl_vld = ath12k_hw_wcn7850_rx_desc_get_mpdu_seq_ctl_vld,
1535 .rx_desc_get_mpdu_fc_valid = ath12k_hw_wcn7850_rx_desc_get_mpdu_fc_valid,
1536 .rx_desc_get_mpdu_start_seq_no = ath12k_hw_wcn7850_rx_desc_get_mpdu_start_seq_no,
1537 .rx_desc_get_msdu_len = ath12k_hw_wcn7850_rx_desc_get_msdu_len,
1538 .rx_desc_get_msdu_sgi = ath12k_hw_wcn7850_rx_desc_get_msdu_sgi,
1539 .rx_desc_get_msdu_rate_mcs = ath12k_hw_wcn7850_rx_desc_get_msdu_rate_mcs,
1540 .rx_desc_get_msdu_rx_bw = ath12k_hw_wcn7850_rx_desc_get_msdu_rx_bw,
1541 .rx_desc_get_msdu_freq = ath12k_hw_wcn7850_rx_desc_get_msdu_freq,
1542 .rx_desc_get_msdu_pkt_type = ath12k_hw_wcn7850_rx_desc_get_msdu_pkt_type,
1543 .rx_desc_get_msdu_nss = ath12k_hw_wcn7850_rx_desc_get_msdu_nss,
1544 .rx_desc_get_mpdu_tid = ath12k_hw_wcn7850_rx_desc_get_mpdu_tid,
1545 .rx_desc_get_mpdu_peer_id = ath12k_hw_wcn7850_rx_desc_get_mpdu_peer_id,
1546 .rx_desc_copy_end_tlv = ath12k_hw_wcn7850_rx_desc_copy_end_tlv,
1547 .rx_desc_get_mpdu_start_tag = ath12k_hw_wcn7850_rx_desc_get_mpdu_start_tag,
1548 .rx_desc_get_mpdu_ppdu_id = ath12k_hw_wcn7850_rx_desc_get_mpdu_ppdu_id,
1549 .rx_desc_set_msdu_len = ath12k_hw_wcn7850_rx_desc_set_msdu_len,
1550 .rx_desc_get_msdu_payload = ath12k_hw_wcn7850_rx_desc_get_msdu_payload,
1551 .rx_desc_get_mpdu_start_offset = ath12k_hw_wcn7850_rx_desc_get_mpdu_start_offset,
1552 .rx_desc_get_msdu_end_offset = ath12k_hw_wcn7850_rx_desc_get_msdu_end_offset,
1553 .rx_desc_mac_addr2_valid = ath12k_hw_wcn7850_rx_desc_mac_addr2_valid,
1554 .rx_desc_mpdu_start_addr2 = ath12k_hw_wcn7850_rx_desc_mpdu_start_addr2,
1555 .rx_desc_is_da_mcbc = ath12k_hw_wcn7850_rx_desc_is_da_mcbc,
1556 .rx_desc_get_dot11_hdr = ath12k_hw_wcn7850_rx_desc_get_dot11_hdr,
1557 .rx_desc_get_crypto_header = ath12k_hw_wcn7850_rx_desc_get_crypto_hdr,
1558 .rx_desc_get_mpdu_frame_ctl = ath12k_hw_wcn7850_rx_desc_get_mpdu_frame_ctl,
1559 .dp_rx_h_msdu_done = ath12k_hw_wcn7850_dp_rx_h_msdu_done,
1560 .dp_rx_h_l4_cksum_fail = ath12k_hw_wcn7850_dp_rx_h_l4_cksum_fail,
1561 .dp_rx_h_ip_cksum_fail = ath12k_hw_wcn7850_dp_rx_h_ip_cksum_fail,
1562 .dp_rx_h_is_decrypted = ath12k_hw_wcn7850_dp_rx_h_is_decrypted,
1563 .dp_rx_h_mpdu_err = ath12k_hw_wcn7850_dp_rx_h_mpdu_err,
1564 .rx_desc_get_desc_size = ath12k_hw_wcn7850_get_rx_desc_size,
1565 .rx_desc_get_msdu_src_link_id = ath12k_hw_wcn7850_rx_desc_get_msdu_src_link,
1568 const struct hal_ops hal_wcn7850_ops = {
1569 .create_srng_config = ath12k_hal_srng_create_config_wcn7850,
1570 .tcl_to_wbm_rbm_map = ath12k_hal_wcn7850_tcl_to_wbm_rbm_map,
1571 .rxdma_ring_wmask_rx_mpdu_start = NULL,
1572 .rxdma_ring_wmask_rx_msdu_end = NULL,
1573 .get_hal_rx_compact_ops = NULL,
1576 static int ath12k_hal_alloc_cont_rdp(struct ath12k_base *ab)
1578 struct ath12k_hal *hal = &ab->hal;
1579 size_t size;
1581 size = sizeof(u32) * HAL_SRNG_RING_ID_MAX;
1582 hal->rdp.vaddr = dma_alloc_coherent(ab->dev, size, &hal->rdp.paddr,
1583 GFP_KERNEL);
1584 if (!hal->rdp.vaddr)
1585 return -ENOMEM;
1587 return 0;
1590 static void ath12k_hal_free_cont_rdp(struct ath12k_base *ab)
1592 struct ath12k_hal *hal = &ab->hal;
1593 size_t size;
1595 if (!hal->rdp.vaddr)
1596 return;
1598 size = sizeof(u32) * HAL_SRNG_RING_ID_MAX;
1599 dma_free_coherent(ab->dev, size,
1600 hal->rdp.vaddr, hal->rdp.paddr);
1601 hal->rdp.vaddr = NULL;
1604 static int ath12k_hal_alloc_cont_wrp(struct ath12k_base *ab)
1606 struct ath12k_hal *hal = &ab->hal;
1607 size_t size;
1609 size = sizeof(u32) * (HAL_SRNG_NUM_PMAC_RINGS + HAL_SRNG_NUM_DMAC_RINGS);
1610 hal->wrp.vaddr = dma_alloc_coherent(ab->dev, size, &hal->wrp.paddr,
1611 GFP_KERNEL);
1612 if (!hal->wrp.vaddr)
1613 return -ENOMEM;
1615 return 0;
1618 static void ath12k_hal_free_cont_wrp(struct ath12k_base *ab)
1620 struct ath12k_hal *hal = &ab->hal;
1621 size_t size;
1623 if (!hal->wrp.vaddr)
1624 return;
1626 size = sizeof(u32) * (HAL_SRNG_NUM_PMAC_RINGS + HAL_SRNG_NUM_DMAC_RINGS);
1627 dma_free_coherent(ab->dev, size,
1628 hal->wrp.vaddr, hal->wrp.paddr);
1629 hal->wrp.vaddr = NULL;
1632 static void ath12k_hal_ce_dst_setup(struct ath12k_base *ab,
1633 struct hal_srng *srng, int ring_num)
1635 struct hal_srng_config *srng_config = &ab->hal.srng_config[HAL_CE_DST];
1636 u32 addr;
1637 u32 val;
1639 addr = HAL_CE_DST_RING_CTRL +
1640 srng_config->reg_start[HAL_SRNG_REG_GRP_R0] +
1641 ring_num * srng_config->reg_size[HAL_SRNG_REG_GRP_R0];
1643 val = ath12k_hif_read32(ab, addr);
1644 val &= ~HAL_CE_DST_R0_DEST_CTRL_MAX_LEN;
1645 val |= u32_encode_bits(srng->u.dst_ring.max_buffer_length,
1646 HAL_CE_DST_R0_DEST_CTRL_MAX_LEN);
1647 ath12k_hif_write32(ab, addr, val);
1650 static void ath12k_hal_srng_dst_hw_init(struct ath12k_base *ab,
1651 struct hal_srng *srng)
1653 struct ath12k_hal *hal = &ab->hal;
1654 u32 val;
1655 u64 hp_addr;
1656 u32 reg_base;
1658 reg_base = srng->hwreg_base[HAL_SRNG_REG_GRP_R0];
1660 if (srng->flags & HAL_SRNG_FLAGS_MSI_INTR) {
1661 ath12k_hif_write32(ab, reg_base +
1662 ath12k_hal_reo1_ring_msi1_base_lsb_offset(ab),
1663 srng->msi_addr);
1665 val = u32_encode_bits(((u64)srng->msi_addr >> HAL_ADDR_MSB_REG_SHIFT),
1666 HAL_REO1_RING_MSI1_BASE_MSB_ADDR) |
1667 HAL_REO1_RING_MSI1_BASE_MSB_MSI1_ENABLE;
1668 ath12k_hif_write32(ab, reg_base +
1669 ath12k_hal_reo1_ring_msi1_base_msb_offset(ab), val);
1671 ath12k_hif_write32(ab,
1672 reg_base + ath12k_hal_reo1_ring_msi1_data_offset(ab),
1673 srng->msi_data);
1676 ath12k_hif_write32(ab, reg_base, srng->ring_base_paddr);
1678 val = u32_encode_bits(((u64)srng->ring_base_paddr >> HAL_ADDR_MSB_REG_SHIFT),
1679 HAL_REO1_RING_BASE_MSB_RING_BASE_ADDR_MSB) |
1680 u32_encode_bits((srng->entry_size * srng->num_entries),
1681 HAL_REO1_RING_BASE_MSB_RING_SIZE);
1682 ath12k_hif_write32(ab, reg_base + ath12k_hal_reo1_ring_base_msb_offset(ab), val);
1684 val = u32_encode_bits(srng->ring_id, HAL_REO1_RING_ID_RING_ID) |
1685 u32_encode_bits(srng->entry_size, HAL_REO1_RING_ID_ENTRY_SIZE);
1686 ath12k_hif_write32(ab, reg_base + ath12k_hal_reo1_ring_id_offset(ab), val);
1688 /* interrupt setup */
1689 val = u32_encode_bits((srng->intr_timer_thres_us >> 3),
1690 HAL_REO1_RING_PRDR_INT_SETUP_INTR_TMR_THOLD);
1692 val |= u32_encode_bits((srng->intr_batch_cntr_thres_entries * srng->entry_size),
1693 HAL_REO1_RING_PRDR_INT_SETUP_BATCH_COUNTER_THOLD);
1695 ath12k_hif_write32(ab,
1696 reg_base + ath12k_hal_reo1_ring_producer_int_setup_offset(ab),
1697 val);
1699 hp_addr = hal->rdp.paddr +
1700 ((unsigned long)srng->u.dst_ring.hp_addr -
1701 (unsigned long)hal->rdp.vaddr);
1702 ath12k_hif_write32(ab, reg_base + ath12k_hal_reo1_ring_hp_addr_lsb_offset(ab),
1703 hp_addr & HAL_ADDR_LSB_REG_MASK);
1704 ath12k_hif_write32(ab, reg_base + ath12k_hal_reo1_ring_hp_addr_msb_offset(ab),
1705 hp_addr >> HAL_ADDR_MSB_REG_SHIFT);
1707 /* Initialize head and tail pointers to indicate ring is empty */
1708 reg_base = srng->hwreg_base[HAL_SRNG_REG_GRP_R2];
1709 ath12k_hif_write32(ab, reg_base, 0);
1710 ath12k_hif_write32(ab, reg_base + HAL_REO1_RING_TP_OFFSET, 0);
1711 *srng->u.dst_ring.hp_addr = 0;
1713 reg_base = srng->hwreg_base[HAL_SRNG_REG_GRP_R0];
1714 val = 0;
1715 if (srng->flags & HAL_SRNG_FLAGS_DATA_TLV_SWAP)
1716 val |= HAL_REO1_RING_MISC_DATA_TLV_SWAP;
1717 if (srng->flags & HAL_SRNG_FLAGS_RING_PTR_SWAP)
1718 val |= HAL_REO1_RING_MISC_HOST_FW_SWAP;
1719 if (srng->flags & HAL_SRNG_FLAGS_MSI_SWAP)
1720 val |= HAL_REO1_RING_MISC_MSI_SWAP;
1721 val |= HAL_REO1_RING_MISC_SRNG_ENABLE;
1723 ath12k_hif_write32(ab, reg_base + ath12k_hal_reo1_ring_misc_offset(ab), val);
1726 static void ath12k_hal_srng_src_hw_init(struct ath12k_base *ab,
1727 struct hal_srng *srng)
1729 struct ath12k_hal *hal = &ab->hal;
1730 u32 val;
1731 u64 tp_addr;
1732 u32 reg_base;
1734 reg_base = srng->hwreg_base[HAL_SRNG_REG_GRP_R0];
1736 if (srng->flags & HAL_SRNG_FLAGS_MSI_INTR) {
1737 ath12k_hif_write32(ab, reg_base +
1738 HAL_TCL1_RING_MSI1_BASE_LSB_OFFSET(ab),
1739 srng->msi_addr);
1741 val = u32_encode_bits(((u64)srng->msi_addr >> HAL_ADDR_MSB_REG_SHIFT),
1742 HAL_TCL1_RING_MSI1_BASE_MSB_ADDR) |
1743 HAL_TCL1_RING_MSI1_BASE_MSB_MSI1_ENABLE;
1744 ath12k_hif_write32(ab, reg_base +
1745 HAL_TCL1_RING_MSI1_BASE_MSB_OFFSET(ab),
1746 val);
1748 ath12k_hif_write32(ab, reg_base +
1749 HAL_TCL1_RING_MSI1_DATA_OFFSET(ab),
1750 srng->msi_data);
1753 ath12k_hif_write32(ab, reg_base, srng->ring_base_paddr);
1755 val = u32_encode_bits(((u64)srng->ring_base_paddr >> HAL_ADDR_MSB_REG_SHIFT),
1756 HAL_TCL1_RING_BASE_MSB_RING_BASE_ADDR_MSB) |
1757 u32_encode_bits((srng->entry_size * srng->num_entries),
1758 HAL_TCL1_RING_BASE_MSB_RING_SIZE);
1759 ath12k_hif_write32(ab, reg_base + HAL_TCL1_RING_BASE_MSB_OFFSET, val);
1761 val = u32_encode_bits(srng->entry_size, HAL_REO1_RING_ID_ENTRY_SIZE);
1762 ath12k_hif_write32(ab, reg_base + HAL_TCL1_RING_ID_OFFSET(ab), val);
1764 val = u32_encode_bits(srng->intr_timer_thres_us,
1765 HAL_TCL1_RING_CONSR_INT_SETUP_IX0_INTR_TMR_THOLD);
1767 val |= u32_encode_bits((srng->intr_batch_cntr_thres_entries * srng->entry_size),
1768 HAL_TCL1_RING_CONSR_INT_SETUP_IX0_BATCH_COUNTER_THOLD);
1770 ath12k_hif_write32(ab,
1771 reg_base + HAL_TCL1_RING_CONSR_INT_SETUP_IX0_OFFSET(ab),
1772 val);
1774 val = 0;
1775 if (srng->flags & HAL_SRNG_FLAGS_LOW_THRESH_INTR_EN) {
1776 val |= u32_encode_bits(srng->u.src_ring.low_threshold,
1777 HAL_TCL1_RING_CONSR_INT_SETUP_IX1_LOW_THOLD);
1779 ath12k_hif_write32(ab,
1780 reg_base + HAL_TCL1_RING_CONSR_INT_SETUP_IX1_OFFSET(ab),
1781 val);
1783 if (srng->ring_id != HAL_SRNG_RING_ID_WBM_IDLE_LINK) {
1784 tp_addr = hal->rdp.paddr +
1785 ((unsigned long)srng->u.src_ring.tp_addr -
1786 (unsigned long)hal->rdp.vaddr);
1787 ath12k_hif_write32(ab,
1788 reg_base + HAL_TCL1_RING_TP_ADDR_LSB_OFFSET(ab),
1789 tp_addr & HAL_ADDR_LSB_REG_MASK);
1790 ath12k_hif_write32(ab,
1791 reg_base + HAL_TCL1_RING_TP_ADDR_MSB_OFFSET(ab),
1792 tp_addr >> HAL_ADDR_MSB_REG_SHIFT);
1795 /* Initialize head and tail pointers to indicate ring is empty */
1796 reg_base = srng->hwreg_base[HAL_SRNG_REG_GRP_R2];
1797 ath12k_hif_write32(ab, reg_base, 0);
1798 ath12k_hif_write32(ab, reg_base + HAL_TCL1_RING_TP_OFFSET, 0);
1799 *srng->u.src_ring.tp_addr = 0;
1801 reg_base = srng->hwreg_base[HAL_SRNG_REG_GRP_R0];
1802 val = 0;
1803 if (srng->flags & HAL_SRNG_FLAGS_DATA_TLV_SWAP)
1804 val |= HAL_TCL1_RING_MISC_DATA_TLV_SWAP;
1805 if (srng->flags & HAL_SRNG_FLAGS_RING_PTR_SWAP)
1806 val |= HAL_TCL1_RING_MISC_HOST_FW_SWAP;
1807 if (srng->flags & HAL_SRNG_FLAGS_MSI_SWAP)
1808 val |= HAL_TCL1_RING_MISC_MSI_SWAP;
1810 /* Loop count is not used for SRC rings */
1811 val |= HAL_TCL1_RING_MISC_MSI_LOOPCNT_DISABLE;
1813 val |= HAL_TCL1_RING_MISC_SRNG_ENABLE;
1815 if (srng->ring_id == HAL_SRNG_RING_ID_WBM_IDLE_LINK)
1816 val |= HAL_TCL1_RING_MISC_MSI_RING_ID_DISABLE;
1818 ath12k_hif_write32(ab, reg_base + HAL_TCL1_RING_MISC_OFFSET(ab), val);
1821 static void ath12k_hal_srng_hw_init(struct ath12k_base *ab,
1822 struct hal_srng *srng)
1824 if (srng->ring_dir == HAL_SRNG_DIR_SRC)
1825 ath12k_hal_srng_src_hw_init(ab, srng);
1826 else
1827 ath12k_hal_srng_dst_hw_init(ab, srng);
1830 static int ath12k_hal_srng_get_ring_id(struct ath12k_base *ab,
1831 enum hal_ring_type type,
1832 int ring_num, int mac_id)
1834 struct hal_srng_config *srng_config = &ab->hal.srng_config[type];
1835 int ring_id;
1837 if (ring_num >= srng_config->max_rings) {
1838 ath12k_warn(ab, "invalid ring number :%d\n", ring_num);
1839 return -EINVAL;
1842 ring_id = srng_config->start_ring_id + ring_num;
1843 if (srng_config->mac_type == ATH12K_HAL_SRNG_PMAC)
1844 ring_id += mac_id * HAL_SRNG_RINGS_PER_PMAC;
1846 if (WARN_ON(ring_id >= HAL_SRNG_RING_ID_MAX))
1847 return -EINVAL;
1849 return ring_id;
1852 int ath12k_hal_srng_get_entrysize(struct ath12k_base *ab, u32 ring_type)
1854 struct hal_srng_config *srng_config;
1856 if (WARN_ON(ring_type >= HAL_MAX_RING_TYPES))
1857 return -EINVAL;
1859 srng_config = &ab->hal.srng_config[ring_type];
1861 return (srng_config->entry_size << 2);
1864 int ath12k_hal_srng_get_max_entries(struct ath12k_base *ab, u32 ring_type)
1866 struct hal_srng_config *srng_config;
1868 if (WARN_ON(ring_type >= HAL_MAX_RING_TYPES))
1869 return -EINVAL;
1871 srng_config = &ab->hal.srng_config[ring_type];
1873 return (srng_config->max_size / srng_config->entry_size);
1876 void ath12k_hal_srng_get_params(struct ath12k_base *ab, struct hal_srng *srng,
1877 struct hal_srng_params *params)
1879 params->ring_base_paddr = srng->ring_base_paddr;
1880 params->ring_base_vaddr = srng->ring_base_vaddr;
1881 params->num_entries = srng->num_entries;
1882 params->intr_timer_thres_us = srng->intr_timer_thres_us;
1883 params->intr_batch_cntr_thres_entries =
1884 srng->intr_batch_cntr_thres_entries;
1885 params->low_threshold = srng->u.src_ring.low_threshold;
1886 params->msi_addr = srng->msi_addr;
1887 params->msi2_addr = srng->msi2_addr;
1888 params->msi_data = srng->msi_data;
1889 params->msi2_data = srng->msi2_data;
1890 params->flags = srng->flags;
1893 dma_addr_t ath12k_hal_srng_get_hp_addr(struct ath12k_base *ab,
1894 struct hal_srng *srng)
1896 if (!(srng->flags & HAL_SRNG_FLAGS_LMAC_RING))
1897 return 0;
1899 if (srng->ring_dir == HAL_SRNG_DIR_SRC)
1900 return ab->hal.wrp.paddr +
1901 ((unsigned long)srng->u.src_ring.hp_addr -
1902 (unsigned long)ab->hal.wrp.vaddr);
1903 else
1904 return ab->hal.rdp.paddr +
1905 ((unsigned long)srng->u.dst_ring.hp_addr -
1906 (unsigned long)ab->hal.rdp.vaddr);
1909 dma_addr_t ath12k_hal_srng_get_tp_addr(struct ath12k_base *ab,
1910 struct hal_srng *srng)
1912 if (!(srng->flags & HAL_SRNG_FLAGS_LMAC_RING))
1913 return 0;
1915 if (srng->ring_dir == HAL_SRNG_DIR_SRC)
1916 return ab->hal.rdp.paddr +
1917 ((unsigned long)srng->u.src_ring.tp_addr -
1918 (unsigned long)ab->hal.rdp.vaddr);
1919 else
1920 return ab->hal.wrp.paddr +
1921 ((unsigned long)srng->u.dst_ring.tp_addr -
1922 (unsigned long)ab->hal.wrp.vaddr);
1925 u32 ath12k_hal_ce_get_desc_size(enum hal_ce_desc type)
1927 switch (type) {
1928 case HAL_CE_DESC_SRC:
1929 return sizeof(struct hal_ce_srng_src_desc);
1930 case HAL_CE_DESC_DST:
1931 return sizeof(struct hal_ce_srng_dest_desc);
1932 case HAL_CE_DESC_DST_STATUS:
1933 return sizeof(struct hal_ce_srng_dst_status_desc);
1936 return 0;
1939 void ath12k_hal_ce_src_set_desc(struct hal_ce_srng_src_desc *desc, dma_addr_t paddr,
1940 u32 len, u32 id, u8 byte_swap_data)
1942 desc->buffer_addr_low = cpu_to_le32(paddr & HAL_ADDR_LSB_REG_MASK);
1943 desc->buffer_addr_info =
1944 le32_encode_bits(((u64)paddr >> HAL_ADDR_MSB_REG_SHIFT),
1945 HAL_CE_SRC_DESC_ADDR_INFO_ADDR_HI) |
1946 le32_encode_bits(byte_swap_data,
1947 HAL_CE_SRC_DESC_ADDR_INFO_BYTE_SWAP) |
1948 le32_encode_bits(0, HAL_CE_SRC_DESC_ADDR_INFO_GATHER) |
1949 le32_encode_bits(len, HAL_CE_SRC_DESC_ADDR_INFO_LEN);
1950 desc->meta_info = le32_encode_bits(id, HAL_CE_SRC_DESC_META_INFO_DATA);
1953 void ath12k_hal_ce_dst_set_desc(struct hal_ce_srng_dest_desc *desc, dma_addr_t paddr)
1955 desc->buffer_addr_low = cpu_to_le32(paddr & HAL_ADDR_LSB_REG_MASK);
1956 desc->buffer_addr_info =
1957 le32_encode_bits(((u64)paddr >> HAL_ADDR_MSB_REG_SHIFT),
1958 HAL_CE_DEST_DESC_ADDR_INFO_ADDR_HI);
1961 u32 ath12k_hal_ce_dst_status_get_length(struct hal_ce_srng_dst_status_desc *desc)
1963 u32 len;
1965 len = le32_get_bits(desc->flags, HAL_CE_DST_STATUS_DESC_FLAGS_LEN);
1966 desc->flags &= ~cpu_to_le32(HAL_CE_DST_STATUS_DESC_FLAGS_LEN);
1968 return len;
1971 void ath12k_hal_set_link_desc_addr(struct hal_wbm_link_desc *desc, u32 cookie,
1972 dma_addr_t paddr,
1973 enum hal_rx_buf_return_buf_manager rbm)
1975 desc->buf_addr_info.info0 = le32_encode_bits((paddr & HAL_ADDR_LSB_REG_MASK),
1976 BUFFER_ADDR_INFO0_ADDR);
1977 desc->buf_addr_info.info1 =
1978 le32_encode_bits(((u64)paddr >> HAL_ADDR_MSB_REG_SHIFT),
1979 BUFFER_ADDR_INFO1_ADDR) |
1980 le32_encode_bits(rbm, BUFFER_ADDR_INFO1_RET_BUF_MGR) |
1981 le32_encode_bits(cookie, BUFFER_ADDR_INFO1_SW_COOKIE);
1984 void *ath12k_hal_srng_dst_peek(struct ath12k_base *ab, struct hal_srng *srng)
1986 lockdep_assert_held(&srng->lock);
1988 if (srng->u.dst_ring.tp != srng->u.dst_ring.cached_hp)
1989 return (srng->ring_base_vaddr + srng->u.dst_ring.tp);
1991 return NULL;
1994 void *ath12k_hal_srng_dst_get_next_entry(struct ath12k_base *ab,
1995 struct hal_srng *srng)
1997 void *desc;
1999 lockdep_assert_held(&srng->lock);
2001 if (srng->u.dst_ring.tp == srng->u.dst_ring.cached_hp)
2002 return NULL;
2004 desc = srng->ring_base_vaddr + srng->u.dst_ring.tp;
2006 srng->u.dst_ring.tp = (srng->u.dst_ring.tp + srng->entry_size) %
2007 srng->ring_size;
2009 return desc;
2012 int ath12k_hal_srng_dst_num_free(struct ath12k_base *ab, struct hal_srng *srng,
2013 bool sync_hw_ptr)
2015 u32 tp, hp;
2017 lockdep_assert_held(&srng->lock);
2019 tp = srng->u.dst_ring.tp;
2021 if (sync_hw_ptr) {
2022 hp = *srng->u.dst_ring.hp_addr;
2023 srng->u.dst_ring.cached_hp = hp;
2024 } else {
2025 hp = srng->u.dst_ring.cached_hp;
2028 if (hp >= tp)
2029 return (hp - tp) / srng->entry_size;
2030 else
2031 return (srng->ring_size - tp + hp) / srng->entry_size;
2034 /* Returns number of available entries in src ring */
2035 int ath12k_hal_srng_src_num_free(struct ath12k_base *ab, struct hal_srng *srng,
2036 bool sync_hw_ptr)
2038 u32 tp, hp;
2040 lockdep_assert_held(&srng->lock);
2042 hp = srng->u.src_ring.hp;
2044 if (sync_hw_ptr) {
2045 tp = *srng->u.src_ring.tp_addr;
2046 srng->u.src_ring.cached_tp = tp;
2047 } else {
2048 tp = srng->u.src_ring.cached_tp;
2051 if (tp > hp)
2052 return ((tp - hp) / srng->entry_size) - 1;
2053 else
2054 return ((srng->ring_size - hp + tp) / srng->entry_size) - 1;
2057 void *ath12k_hal_srng_src_get_next_entry(struct ath12k_base *ab,
2058 struct hal_srng *srng)
2060 void *desc;
2061 u32 next_hp;
2063 lockdep_assert_held(&srng->lock);
2065 /* TODO: Using % is expensive, but we have to do this since size of some
2066 * SRNG rings is not power of 2 (due to descriptor sizes). Need to see
2067 * if separate function is defined for rings having power of 2 ring size
2068 * (TCL2SW, REO2SW, SW2RXDMA and CE rings) so that we can avoid the
2069 * overhead of % by using mask (with &).
2071 next_hp = (srng->u.src_ring.hp + srng->entry_size) % srng->ring_size;
2073 if (next_hp == srng->u.src_ring.cached_tp)
2074 return NULL;
2076 desc = srng->ring_base_vaddr + srng->u.src_ring.hp;
2077 srng->u.src_ring.hp = next_hp;
2079 /* TODO: Reap functionality is not used by all rings. If particular
2080 * ring does not use reap functionality, we need not update reap_hp
2081 * with next_hp pointer. Need to make sure a separate function is used
2082 * before doing any optimization by removing below code updating
2083 * reap_hp.
2085 srng->u.src_ring.reap_hp = next_hp;
2087 return desc;
2090 void *ath12k_hal_srng_src_reap_next(struct ath12k_base *ab,
2091 struct hal_srng *srng)
2093 void *desc;
2094 u32 next_reap_hp;
2096 lockdep_assert_held(&srng->lock);
2098 next_reap_hp = (srng->u.src_ring.reap_hp + srng->entry_size) %
2099 srng->ring_size;
2101 if (next_reap_hp == srng->u.src_ring.cached_tp)
2102 return NULL;
2104 desc = srng->ring_base_vaddr + next_reap_hp;
2105 srng->u.src_ring.reap_hp = next_reap_hp;
2107 return desc;
2110 void *ath12k_hal_srng_src_get_next_reaped(struct ath12k_base *ab,
2111 struct hal_srng *srng)
2113 void *desc;
2115 lockdep_assert_held(&srng->lock);
2117 if (srng->u.src_ring.hp == srng->u.src_ring.reap_hp)
2118 return NULL;
2120 desc = srng->ring_base_vaddr + srng->u.src_ring.hp;
2121 srng->u.src_ring.hp = (srng->u.src_ring.hp + srng->entry_size) %
2122 srng->ring_size;
2124 return desc;
2127 void ath12k_hal_srng_access_begin(struct ath12k_base *ab, struct hal_srng *srng)
2129 lockdep_assert_held(&srng->lock);
2131 if (srng->ring_dir == HAL_SRNG_DIR_SRC)
2132 srng->u.src_ring.cached_tp =
2133 *(volatile u32 *)srng->u.src_ring.tp_addr;
2134 else
2135 srng->u.dst_ring.cached_hp = *srng->u.dst_ring.hp_addr;
2138 /* Update cached ring head/tail pointers to HW. ath12k_hal_srng_access_begin()
2139 * should have been called before this.
2141 void ath12k_hal_srng_access_end(struct ath12k_base *ab, struct hal_srng *srng)
2143 lockdep_assert_held(&srng->lock);
2145 /* TODO: See if we need a write memory barrier here */
2146 if (srng->flags & HAL_SRNG_FLAGS_LMAC_RING) {
2147 /* For LMAC rings, ring pointer updates are done through FW and
2148 * hence written to a shared memory location that is read by FW
2150 if (srng->ring_dir == HAL_SRNG_DIR_SRC) {
2151 srng->u.src_ring.last_tp =
2152 *(volatile u32 *)srng->u.src_ring.tp_addr;
2153 *srng->u.src_ring.hp_addr = srng->u.src_ring.hp;
2154 } else {
2155 srng->u.dst_ring.last_hp = *srng->u.dst_ring.hp_addr;
2156 *srng->u.dst_ring.tp_addr = srng->u.dst_ring.tp;
2158 } else {
2159 if (srng->ring_dir == HAL_SRNG_DIR_SRC) {
2160 srng->u.src_ring.last_tp =
2161 *(volatile u32 *)srng->u.src_ring.tp_addr;
2162 ath12k_hif_write32(ab,
2163 (unsigned long)srng->u.src_ring.hp_addr -
2164 (unsigned long)ab->mem,
2165 srng->u.src_ring.hp);
2166 } else {
2167 srng->u.dst_ring.last_hp = *srng->u.dst_ring.hp_addr;
2168 ath12k_hif_write32(ab,
2169 (unsigned long)srng->u.dst_ring.tp_addr -
2170 (unsigned long)ab->mem,
2171 srng->u.dst_ring.tp);
2175 srng->timestamp = jiffies;
2178 void ath12k_hal_setup_link_idle_list(struct ath12k_base *ab,
2179 struct hal_wbm_idle_scatter_list *sbuf,
2180 u32 nsbufs, u32 tot_link_desc,
2181 u32 end_offset)
2183 struct ath12k_buffer_addr *link_addr;
2184 int i;
2185 u32 reg_scatter_buf_sz = HAL_WBM_IDLE_SCATTER_BUF_SIZE / 64;
2186 u32 val;
2188 link_addr = (void *)sbuf[0].vaddr + HAL_WBM_IDLE_SCATTER_BUF_SIZE;
2190 for (i = 1; i < nsbufs; i++) {
2191 link_addr->info0 = cpu_to_le32(sbuf[i].paddr & HAL_ADDR_LSB_REG_MASK);
2193 link_addr->info1 =
2194 le32_encode_bits((u64)sbuf[i].paddr >> HAL_ADDR_MSB_REG_SHIFT,
2195 HAL_WBM_SCATTERED_DESC_MSB_BASE_ADDR_39_32) |
2196 le32_encode_bits(BASE_ADDR_MATCH_TAG_VAL,
2197 HAL_WBM_SCATTERED_DESC_MSB_BASE_ADDR_MATCH_TAG);
2199 link_addr = (void *)sbuf[i].vaddr +
2200 HAL_WBM_IDLE_SCATTER_BUF_SIZE;
2203 val = u32_encode_bits(reg_scatter_buf_sz, HAL_WBM_SCATTER_BUFFER_SIZE) |
2204 u32_encode_bits(0x1, HAL_WBM_LINK_DESC_IDLE_LIST_MODE);
2206 ath12k_hif_write32(ab,
2207 HAL_SEQ_WCSS_UMAC_WBM_REG +
2208 HAL_WBM_R0_IDLE_LIST_CONTROL_ADDR(ab),
2209 val);
2211 val = u32_encode_bits(reg_scatter_buf_sz * nsbufs,
2212 HAL_WBM_SCATTER_RING_SIZE_OF_IDLE_LINK_DESC_LIST);
2213 ath12k_hif_write32(ab,
2214 HAL_SEQ_WCSS_UMAC_WBM_REG + HAL_WBM_R0_IDLE_LIST_SIZE_ADDR(ab),
2215 val);
2217 val = u32_encode_bits(sbuf[0].paddr & HAL_ADDR_LSB_REG_MASK,
2218 BUFFER_ADDR_INFO0_ADDR);
2219 ath12k_hif_write32(ab,
2220 HAL_SEQ_WCSS_UMAC_WBM_REG +
2221 HAL_WBM_SCATTERED_RING_BASE_LSB(ab),
2222 val);
2224 val = u32_encode_bits(BASE_ADDR_MATCH_TAG_VAL,
2225 HAL_WBM_SCATTERED_DESC_MSB_BASE_ADDR_MATCH_TAG) |
2226 u32_encode_bits((u64)sbuf[0].paddr >> HAL_ADDR_MSB_REG_SHIFT,
2227 HAL_WBM_SCATTERED_DESC_MSB_BASE_ADDR_39_32);
2228 ath12k_hif_write32(ab,
2229 HAL_SEQ_WCSS_UMAC_WBM_REG +
2230 HAL_WBM_SCATTERED_RING_BASE_MSB(ab),
2231 val);
2233 /* Setup head and tail pointers for the idle list */
2234 val = u32_encode_bits(sbuf[nsbufs - 1].paddr, BUFFER_ADDR_INFO0_ADDR);
2235 ath12k_hif_write32(ab,
2236 HAL_SEQ_WCSS_UMAC_WBM_REG +
2237 HAL_WBM_SCATTERED_DESC_PTR_HEAD_INFO_IX0(ab),
2238 val);
2240 val = u32_encode_bits(((u64)sbuf[nsbufs - 1].paddr >> HAL_ADDR_MSB_REG_SHIFT),
2241 HAL_WBM_SCATTERED_DESC_MSB_BASE_ADDR_39_32) |
2242 u32_encode_bits((end_offset >> 2),
2243 HAL_WBM_SCATTERED_DESC_HEAD_P_OFFSET_IX1);
2244 ath12k_hif_write32(ab,
2245 HAL_SEQ_WCSS_UMAC_WBM_REG +
2246 HAL_WBM_SCATTERED_DESC_PTR_HEAD_INFO_IX1(ab),
2247 val);
2249 val = u32_encode_bits(sbuf[0].paddr, BUFFER_ADDR_INFO0_ADDR);
2250 ath12k_hif_write32(ab,
2251 HAL_SEQ_WCSS_UMAC_WBM_REG +
2252 HAL_WBM_SCATTERED_DESC_PTR_HEAD_INFO_IX0(ab),
2253 val);
2255 val = u32_encode_bits(sbuf[0].paddr, BUFFER_ADDR_INFO0_ADDR);
2256 ath12k_hif_write32(ab,
2257 HAL_SEQ_WCSS_UMAC_WBM_REG +
2258 HAL_WBM_SCATTERED_DESC_PTR_TAIL_INFO_IX0(ab),
2259 val);
2261 val = u32_encode_bits(((u64)sbuf[0].paddr >> HAL_ADDR_MSB_REG_SHIFT),
2262 HAL_WBM_SCATTERED_DESC_MSB_BASE_ADDR_39_32) |
2263 u32_encode_bits(0, HAL_WBM_SCATTERED_DESC_TAIL_P_OFFSET_IX1);
2264 ath12k_hif_write32(ab,
2265 HAL_SEQ_WCSS_UMAC_WBM_REG +
2266 HAL_WBM_SCATTERED_DESC_PTR_TAIL_INFO_IX1(ab),
2267 val);
2269 val = 2 * tot_link_desc;
2270 ath12k_hif_write32(ab,
2271 HAL_SEQ_WCSS_UMAC_WBM_REG +
2272 HAL_WBM_SCATTERED_DESC_PTR_HP_ADDR(ab),
2273 val);
2275 /* Enable the SRNG */
2276 val = u32_encode_bits(1, HAL_WBM_IDLE_LINK_RING_MISC_SRNG_ENABLE) |
2277 u32_encode_bits(1, HAL_WBM_IDLE_LINK_RING_MISC_RIND_ID_DISABLE);
2278 ath12k_hif_write32(ab,
2279 HAL_SEQ_WCSS_UMAC_WBM_REG +
2280 HAL_WBM_IDLE_LINK_RING_MISC_ADDR(ab),
2281 val);
2284 int ath12k_hal_srng_setup(struct ath12k_base *ab, enum hal_ring_type type,
2285 int ring_num, int mac_id,
2286 struct hal_srng_params *params)
2288 struct ath12k_hal *hal = &ab->hal;
2289 struct hal_srng_config *srng_config = &ab->hal.srng_config[type];
2290 struct hal_srng *srng;
2291 int ring_id;
2292 u32 idx;
2293 int i;
2294 u32 reg_base;
2296 ring_id = ath12k_hal_srng_get_ring_id(ab, type, ring_num, mac_id);
2297 if (ring_id < 0)
2298 return ring_id;
2300 srng = &hal->srng_list[ring_id];
2302 srng->ring_id = ring_id;
2303 srng->ring_dir = srng_config->ring_dir;
2304 srng->ring_base_paddr = params->ring_base_paddr;
2305 srng->ring_base_vaddr = params->ring_base_vaddr;
2306 srng->entry_size = srng_config->entry_size;
2307 srng->num_entries = params->num_entries;
2308 srng->ring_size = srng->entry_size * srng->num_entries;
2309 srng->intr_batch_cntr_thres_entries =
2310 params->intr_batch_cntr_thres_entries;
2311 srng->intr_timer_thres_us = params->intr_timer_thres_us;
2312 srng->flags = params->flags;
2313 srng->msi_addr = params->msi_addr;
2314 srng->msi2_addr = params->msi2_addr;
2315 srng->msi_data = params->msi_data;
2316 srng->msi2_data = params->msi2_data;
2317 srng->initialized = 1;
2318 spin_lock_init(&srng->lock);
2319 lockdep_set_class(&srng->lock, &srng->lock_key);
2321 for (i = 0; i < HAL_SRNG_NUM_REG_GRP; i++) {
2322 srng->hwreg_base[i] = srng_config->reg_start[i] +
2323 (ring_num * srng_config->reg_size[i]);
2326 memset(srng->ring_base_vaddr, 0,
2327 (srng->entry_size * srng->num_entries) << 2);
2329 reg_base = srng->hwreg_base[HAL_SRNG_REG_GRP_R2];
2331 if (srng->ring_dir == HAL_SRNG_DIR_SRC) {
2332 srng->u.src_ring.hp = 0;
2333 srng->u.src_ring.cached_tp = 0;
2334 srng->u.src_ring.reap_hp = srng->ring_size - srng->entry_size;
2335 srng->u.src_ring.tp_addr = (void *)(hal->rdp.vaddr + ring_id);
2336 srng->u.src_ring.low_threshold = params->low_threshold *
2337 srng->entry_size;
2338 if (srng_config->mac_type == ATH12K_HAL_SRNG_UMAC) {
2339 if (!ab->hw_params->supports_shadow_regs)
2340 srng->u.src_ring.hp_addr =
2341 (u32 *)((unsigned long)ab->mem + reg_base);
2342 else
2343 ath12k_dbg(ab, ATH12K_DBG_HAL,
2344 "hal type %d ring_num %d reg_base 0x%x shadow 0x%lx\n",
2345 type, ring_num,
2346 reg_base,
2347 (unsigned long)srng->u.src_ring.hp_addr -
2348 (unsigned long)ab->mem);
2349 } else {
2350 idx = ring_id - HAL_SRNG_RING_ID_DMAC_CMN_ID_START;
2351 srng->u.src_ring.hp_addr = (void *)(hal->wrp.vaddr +
2352 idx);
2353 srng->flags |= HAL_SRNG_FLAGS_LMAC_RING;
2355 } else {
2356 /* During initialization loop count in all the descriptors
2357 * will be set to zero, and HW will set it to 1 on completing
2358 * descriptor update in first loop, and increments it by 1 on
2359 * subsequent loops (loop count wraps around after reaching
2360 * 0xffff). The 'loop_cnt' in SW ring state is the expected
2361 * loop count in descriptors updated by HW (to be processed
2362 * by SW).
2364 srng->u.dst_ring.loop_cnt = 1;
2365 srng->u.dst_ring.tp = 0;
2366 srng->u.dst_ring.cached_hp = 0;
2367 srng->u.dst_ring.hp_addr = (void *)(hal->rdp.vaddr + ring_id);
2368 if (srng_config->mac_type == ATH12K_HAL_SRNG_UMAC) {
2369 if (!ab->hw_params->supports_shadow_regs)
2370 srng->u.dst_ring.tp_addr =
2371 (u32 *)((unsigned long)ab->mem + reg_base +
2372 (HAL_REO1_RING_TP - HAL_REO1_RING_HP));
2373 else
2374 ath12k_dbg(ab, ATH12K_DBG_HAL,
2375 "type %d ring_num %d target_reg 0x%x shadow 0x%lx\n",
2376 type, ring_num,
2377 reg_base + HAL_REO1_RING_TP - HAL_REO1_RING_HP,
2378 (unsigned long)srng->u.dst_ring.tp_addr -
2379 (unsigned long)ab->mem);
2380 } else {
2381 /* For PMAC & DMAC rings, tail pointer updates will be done
2382 * through FW by writing to a shared memory location
2384 idx = ring_id - HAL_SRNG_RING_ID_DMAC_CMN_ID_START;
2385 srng->u.dst_ring.tp_addr = (void *)(hal->wrp.vaddr +
2386 idx);
2387 srng->flags |= HAL_SRNG_FLAGS_LMAC_RING;
2391 if (srng_config->mac_type != ATH12K_HAL_SRNG_UMAC)
2392 return ring_id;
2394 ath12k_hal_srng_hw_init(ab, srng);
2396 if (type == HAL_CE_DST) {
2397 srng->u.dst_ring.max_buffer_length = params->max_buffer_len;
2398 ath12k_hal_ce_dst_setup(ab, srng, ring_num);
2401 return ring_id;
2404 static void ath12k_hal_srng_update_hp_tp_addr(struct ath12k_base *ab,
2405 int shadow_cfg_idx,
2406 enum hal_ring_type ring_type,
2407 int ring_num)
2409 struct hal_srng *srng;
2410 struct ath12k_hal *hal = &ab->hal;
2411 int ring_id;
2412 struct hal_srng_config *srng_config = &hal->srng_config[ring_type];
2414 ring_id = ath12k_hal_srng_get_ring_id(ab, ring_type, ring_num, 0);
2415 if (ring_id < 0)
2416 return;
2418 srng = &hal->srng_list[ring_id];
2420 if (srng_config->ring_dir == HAL_SRNG_DIR_DST)
2421 srng->u.dst_ring.tp_addr = (u32 *)(HAL_SHADOW_REG(shadow_cfg_idx) +
2422 (unsigned long)ab->mem);
2423 else
2424 srng->u.src_ring.hp_addr = (u32 *)(HAL_SHADOW_REG(shadow_cfg_idx) +
2425 (unsigned long)ab->mem);
2428 int ath12k_hal_srng_update_shadow_config(struct ath12k_base *ab,
2429 enum hal_ring_type ring_type,
2430 int ring_num)
2432 struct ath12k_hal *hal = &ab->hal;
2433 struct hal_srng_config *srng_config = &hal->srng_config[ring_type];
2434 int shadow_cfg_idx = hal->num_shadow_reg_configured;
2435 u32 target_reg;
2437 if (shadow_cfg_idx >= HAL_SHADOW_NUM_REGS)
2438 return -EINVAL;
2440 hal->num_shadow_reg_configured++;
2442 target_reg = srng_config->reg_start[HAL_HP_OFFSET_IN_REG_START];
2443 target_reg += srng_config->reg_size[HAL_HP_OFFSET_IN_REG_START] *
2444 ring_num;
2446 /* For destination ring, shadow the TP */
2447 if (srng_config->ring_dir == HAL_SRNG_DIR_DST)
2448 target_reg += HAL_OFFSET_FROM_HP_TO_TP;
2450 hal->shadow_reg_addr[shadow_cfg_idx] = target_reg;
2452 /* update hp/tp addr to hal structure*/
2453 ath12k_hal_srng_update_hp_tp_addr(ab, shadow_cfg_idx, ring_type,
2454 ring_num);
2456 ath12k_dbg(ab, ATH12K_DBG_HAL,
2457 "target_reg %x, shadow reg 0x%x shadow_idx 0x%x, ring_type %d, ring num %d",
2458 target_reg,
2459 HAL_SHADOW_REG(shadow_cfg_idx),
2460 shadow_cfg_idx,
2461 ring_type, ring_num);
2463 return 0;
2466 void ath12k_hal_srng_shadow_config(struct ath12k_base *ab)
2468 struct ath12k_hal *hal = &ab->hal;
2469 int ring_type, ring_num;
2471 /* update all the non-CE srngs. */
2472 for (ring_type = 0; ring_type < HAL_MAX_RING_TYPES; ring_type++) {
2473 struct hal_srng_config *srng_config = &hal->srng_config[ring_type];
2475 if (ring_type == HAL_CE_SRC ||
2476 ring_type == HAL_CE_DST ||
2477 ring_type == HAL_CE_DST_STATUS)
2478 continue;
2480 if (srng_config->mac_type == ATH12K_HAL_SRNG_DMAC ||
2481 srng_config->mac_type == ATH12K_HAL_SRNG_PMAC)
2482 continue;
2484 for (ring_num = 0; ring_num < srng_config->max_rings; ring_num++)
2485 ath12k_hal_srng_update_shadow_config(ab, ring_type, ring_num);
2489 void ath12k_hal_srng_get_shadow_config(struct ath12k_base *ab,
2490 u32 **cfg, u32 *len)
2492 struct ath12k_hal *hal = &ab->hal;
2494 *len = hal->num_shadow_reg_configured;
2495 *cfg = hal->shadow_reg_addr;
2498 void ath12k_hal_srng_shadow_update_hp_tp(struct ath12k_base *ab,
2499 struct hal_srng *srng)
2501 lockdep_assert_held(&srng->lock);
2503 /* check whether the ring is empty. Update the shadow
2504 * HP only when then ring isn't' empty.
2506 if (srng->ring_dir == HAL_SRNG_DIR_SRC &&
2507 *srng->u.src_ring.tp_addr != srng->u.src_ring.hp)
2508 ath12k_hal_srng_access_end(ab, srng);
2511 static void ath12k_hal_register_srng_lock_keys(struct ath12k_base *ab)
2513 struct ath12k_hal *hal = &ab->hal;
2514 u32 ring_id;
2516 for (ring_id = 0; ring_id < HAL_SRNG_RING_ID_MAX; ring_id++)
2517 lockdep_register_key(&hal->srng_list[ring_id].lock_key);
2520 static void ath12k_hal_unregister_srng_lock_keys(struct ath12k_base *ab)
2522 struct ath12k_hal *hal = &ab->hal;
2523 u32 ring_id;
2525 for (ring_id = 0; ring_id < HAL_SRNG_RING_ID_MAX; ring_id++)
2526 lockdep_unregister_key(&hal->srng_list[ring_id].lock_key);
2529 int ath12k_hal_srng_init(struct ath12k_base *ab)
2531 struct ath12k_hal *hal = &ab->hal;
2532 int ret;
2534 memset(hal, 0, sizeof(*hal));
2536 ret = ab->hw_params->hal_ops->create_srng_config(ab);
2537 if (ret)
2538 goto err_hal;
2540 ret = ath12k_hal_alloc_cont_rdp(ab);
2541 if (ret)
2542 goto err_hal;
2544 ret = ath12k_hal_alloc_cont_wrp(ab);
2545 if (ret)
2546 goto err_free_cont_rdp;
2548 ath12k_hal_register_srng_lock_keys(ab);
2550 return 0;
2552 err_free_cont_rdp:
2553 ath12k_hal_free_cont_rdp(ab);
2555 err_hal:
2556 return ret;
2559 void ath12k_hal_srng_deinit(struct ath12k_base *ab)
2561 struct ath12k_hal *hal = &ab->hal;
2563 ath12k_hal_unregister_srng_lock_keys(ab);
2564 ath12k_hal_free_cont_rdp(ab);
2565 ath12k_hal_free_cont_wrp(ab);
2566 kfree(hal->srng_config);
2567 hal->srng_config = NULL;
2570 void ath12k_hal_dump_srng_stats(struct ath12k_base *ab)
2572 struct hal_srng *srng;
2573 struct ath12k_ext_irq_grp *irq_grp;
2574 struct ath12k_ce_pipe *ce_pipe;
2575 int i;
2577 ath12k_err(ab, "Last interrupt received for each CE:\n");
2578 for (i = 0; i < ab->hw_params->ce_count; i++) {
2579 ce_pipe = &ab->ce.ce_pipe[i];
2581 if (ath12k_ce_get_attr_flags(ab, i) & CE_ATTR_DIS_INTR)
2582 continue;
2584 ath12k_err(ab, "CE_id %d pipe_num %d %ums before\n",
2585 i, ce_pipe->pipe_num,
2586 jiffies_to_msecs(jiffies - ce_pipe->timestamp));
2589 ath12k_err(ab, "\nLast interrupt received for each group:\n");
2590 for (i = 0; i < ATH12K_EXT_IRQ_GRP_NUM_MAX; i++) {
2591 irq_grp = &ab->ext_irq_grp[i];
2592 ath12k_err(ab, "group_id %d %ums before\n",
2593 irq_grp->grp_id,
2594 jiffies_to_msecs(jiffies - irq_grp->timestamp));
2597 for (i = 0; i < HAL_SRNG_RING_ID_MAX; i++) {
2598 srng = &ab->hal.srng_list[i];
2600 if (!srng->initialized)
2601 continue;
2603 if (srng->ring_dir == HAL_SRNG_DIR_SRC)
2604 ath12k_err(ab,
2605 "src srng id %u hp %u, reap_hp %u, cur tp %u, cached tp %u last tp %u napi processed before %ums\n",
2606 srng->ring_id, srng->u.src_ring.hp,
2607 srng->u.src_ring.reap_hp,
2608 *srng->u.src_ring.tp_addr, srng->u.src_ring.cached_tp,
2609 srng->u.src_ring.last_tp,
2610 jiffies_to_msecs(jiffies - srng->timestamp));
2611 else if (srng->ring_dir == HAL_SRNG_DIR_DST)
2612 ath12k_err(ab,
2613 "dst srng id %u tp %u, cur hp %u, cached hp %u last hp %u napi processed before %ums\n",
2614 srng->ring_id, srng->u.dst_ring.tp,
2615 *srng->u.dst_ring.hp_addr,
2616 srng->u.dst_ring.cached_hp,
2617 srng->u.dst_ring.last_hp,
2618 jiffies_to_msecs(jiffies - srng->timestamp));