1 // SPDX-License-Identifier: BSD-3-Clause-Clear
3 * Copyright (c) 2019-2021 The Linux Foundation. All rights reserved.
4 * Copyright (c) 2021-2023 Qualcomm Innovation Center, Inc. All rights reserved.
10 static int ath12k_dbring_bufs_replenish(struct ath12k
*ar
,
11 struct ath12k_dbring
*ring
,
12 struct ath12k_dbring_element
*buff
,
15 struct ath12k_base
*ab
= ar
->ab
;
16 struct hal_srng
*srng
;
18 void *ptr_aligned
, *ptr_unaligned
, *desc
;
23 srng
= &ab
->hal
.srng_list
[ring
->refill_srng
.ring_id
];
25 lockdep_assert_held(&srng
->lock
);
27 ath12k_hal_srng_access_begin(ab
, srng
);
29 ptr_unaligned
= buff
->payload
;
30 ptr_aligned
= PTR_ALIGN(ptr_unaligned
, ring
->buf_align
);
31 paddr
= dma_map_single(ab
->dev
, ptr_aligned
, ring
->buf_sz
,
34 ret
= dma_mapping_error(ab
->dev
, paddr
);
38 spin_lock_bh(&ring
->idr_lock
);
39 buf_id
= idr_alloc(&ring
->bufs_idr
, buff
, 0, ring
->bufs_max
, gfp
);
40 spin_unlock_bh(&ring
->idr_lock
);
46 desc
= ath12k_hal_srng_src_get_next_entry(ab
, srng
);
54 cookie
= u32_encode_bits(ar
->pdev_idx
, DP_RXDMA_BUF_COOKIE_PDEV_ID
) |
55 u32_encode_bits(buf_id
, DP_RXDMA_BUF_COOKIE_BUF_ID
);
57 ath12k_hal_rx_buf_addr_info_set(desc
, paddr
, cookie
, 0);
59 ath12k_hal_srng_access_end(ab
, srng
);
64 spin_lock_bh(&ring
->idr_lock
);
65 idr_remove(&ring
->bufs_idr
, buf_id
);
66 spin_unlock_bh(&ring
->idr_lock
);
68 dma_unmap_single(ab
->dev
, paddr
, ring
->buf_sz
,
71 ath12k_hal_srng_access_end(ab
, srng
);
75 static int ath12k_dbring_fill_bufs(struct ath12k
*ar
,
76 struct ath12k_dbring
*ring
,
79 struct ath12k_dbring_element
*buff
;
80 struct hal_srng
*srng
;
81 struct ath12k_base
*ab
= ar
->ab
;
82 int num_remain
, req_entries
, num_free
;
86 srng
= &ab
->hal
.srng_list
[ring
->refill_srng
.ring_id
];
88 spin_lock_bh(&srng
->lock
);
90 num_free
= ath12k_hal_srng_src_num_free(ab
, srng
, true);
91 req_entries
= min(num_free
, ring
->bufs_max
);
92 num_remain
= req_entries
;
93 align
= ring
->buf_align
;
94 size
= sizeof(*buff
) + ring
->buf_sz
+ align
- 1;
96 while (num_remain
> 0) {
97 buff
= kzalloc(size
, gfp
);
101 ret
= ath12k_dbring_bufs_replenish(ar
, ring
, buff
, gfp
);
103 ath12k_warn(ab
, "failed to replenish db ring num_remain %d req_ent %d\n",
104 num_remain
, req_entries
);
111 spin_unlock_bh(&srng
->lock
);
116 int ath12k_dbring_wmi_cfg_setup(struct ath12k
*ar
,
117 struct ath12k_dbring
*ring
,
118 enum wmi_direct_buffer_module id
)
120 struct ath12k_wmi_pdev_dma_ring_cfg_arg arg
= {0};
123 if (id
>= WMI_DIRECT_BUF_MAX
)
126 arg
.pdev_id
= DP_SW2HW_MACID(ring
->pdev_id
);
128 arg
.base_paddr_lo
= lower_32_bits(ring
->refill_srng
.paddr
);
129 arg
.base_paddr_hi
= upper_32_bits(ring
->refill_srng
.paddr
);
130 arg
.head_idx_paddr_lo
= lower_32_bits(ring
->hp_addr
);
131 arg
.head_idx_paddr_hi
= upper_32_bits(ring
->hp_addr
);
132 arg
.tail_idx_paddr_lo
= lower_32_bits(ring
->tp_addr
);
133 arg
.tail_idx_paddr_hi
= upper_32_bits(ring
->tp_addr
);
134 arg
.num_elems
= ring
->bufs_max
;
135 arg
.buf_size
= ring
->buf_sz
;
136 arg
.num_resp_per_event
= ring
->num_resp_per_event
;
137 arg
.event_timeout_ms
= ring
->event_timeout_ms
;
139 ret
= ath12k_wmi_pdev_dma_ring_cfg(ar
, &arg
);
141 ath12k_warn(ar
->ab
, "failed to setup db ring cfg\n");
148 int ath12k_dbring_set_cfg(struct ath12k
*ar
, struct ath12k_dbring
*ring
,
149 u32 num_resp_per_event
, u32 event_timeout_ms
,
150 int (*handler
)(struct ath12k
*,
151 struct ath12k_dbring_data
*))
156 ring
->num_resp_per_event
= num_resp_per_event
;
157 ring
->event_timeout_ms
= event_timeout_ms
;
158 ring
->handler
= handler
;
163 int ath12k_dbring_buf_setup(struct ath12k
*ar
,
164 struct ath12k_dbring
*ring
,
165 struct ath12k_dbring_cap
*db_cap
)
167 struct ath12k_base
*ab
= ar
->ab
;
168 struct hal_srng
*srng
;
171 srng
= &ab
->hal
.srng_list
[ring
->refill_srng
.ring_id
];
172 ring
->bufs_max
= ring
->refill_srng
.size
/
173 ath12k_hal_srng_get_entrysize(ab
, HAL_RXDMA_DIR_BUF
);
175 ring
->buf_sz
= db_cap
->min_buf_sz
;
176 ring
->buf_align
= db_cap
->min_buf_align
;
177 ring
->pdev_id
= db_cap
->pdev_id
;
178 ring
->hp_addr
= ath12k_hal_srng_get_hp_addr(ab
, srng
);
179 ring
->tp_addr
= ath12k_hal_srng_get_tp_addr(ab
, srng
);
181 ret
= ath12k_dbring_fill_bufs(ar
, ring
, GFP_KERNEL
);
186 int ath12k_dbring_srng_setup(struct ath12k
*ar
, struct ath12k_dbring
*ring
,
187 int ring_num
, int num_entries
)
191 ret
= ath12k_dp_srng_setup(ar
->ab
, &ring
->refill_srng
, HAL_RXDMA_DIR_BUF
,
192 ring_num
, ar
->pdev_idx
, num_entries
);
194 ath12k_warn(ar
->ab
, "failed to setup srng: %d ring_id %d\n",
201 ath12k_dp_srng_cleanup(ar
->ab
, &ring
->refill_srng
);
205 int ath12k_dbring_get_cap(struct ath12k_base
*ab
,
207 enum wmi_direct_buffer_module id
,
208 struct ath12k_dbring_cap
*db_cap
)
212 if (!ab
->num_db_cap
|| !ab
->db_caps
)
215 if (id
>= WMI_DIRECT_BUF_MAX
)
218 for (i
= 0; i
< ab
->num_db_cap
; i
++) {
219 if (pdev_idx
== ab
->db_caps
[i
].pdev_id
&&
220 id
== ab
->db_caps
[i
].id
) {
221 *db_cap
= ab
->db_caps
[i
];
230 int ath12k_dbring_buffer_release_event(struct ath12k_base
*ab
,
231 struct ath12k_dbring_buf_release_event
*ev
)
233 struct ath12k_dbring
*ring
= NULL
;
234 struct hal_srng
*srng
;
236 struct ath12k_dbring_element
*buff
;
237 struct ath12k_dbring_data handler_data
;
238 struct ath12k_buffer_addr desc
;
240 u32 num_entry
, num_buff_reaped
;
248 pdev_idx
= le32_to_cpu(ev
->fixed
.pdev_id
);
250 if (pdev_idx
>= ab
->num_radios
) {
251 ath12k_warn(ab
, "Invalid pdev id %d\n", pdev_idx
);
255 if (ev
->fixed
.num_buf_release_entry
!=
256 ev
->fixed
.num_meta_data_entry
) {
257 ath12k_warn(ab
, "Buffer entry %d mismatch meta entry %d\n",
258 ev
->fixed
.num_buf_release_entry
,
259 ev
->fixed
.num_meta_data_entry
);
263 ar
= ab
->pdevs
[pdev_idx
].ar
;
266 if (!rcu_dereference(ab
->pdevs_active
[pdev_idx
])) {
271 switch (ev
->fixed
.module_id
) {
272 case WMI_DIRECT_BUF_SPECTRAL
:
276 ath12k_warn(ab
, "Recv dma buffer release ev on unsupp module %d\n",
277 ev
->fixed
.module_id
);
286 srng
= &ab
->hal
.srng_list
[ring
->refill_srng
.ring_id
];
287 num_entry
= le32_to_cpu(ev
->fixed
.num_buf_release_entry
);
288 size
= sizeof(*buff
) + ring
->buf_sz
+ ring
->buf_align
- 1;
291 spin_lock_bh(&srng
->lock
);
293 while (num_buff_reaped
< num_entry
) {
294 desc
.info0
= ev
->buf_entry
[num_buff_reaped
].paddr_lo
;
295 desc
.info1
= ev
->buf_entry
[num_buff_reaped
].paddr_hi
;
296 handler_data
.meta
= ev
->meta_data
[num_buff_reaped
];
300 ath12k_hal_rx_buf_addr_info_get(&desc
, &paddr
, &cookie
, &rbm
);
302 buf_id
= u32_get_bits(cookie
, DP_RXDMA_BUF_COOKIE_BUF_ID
);
304 spin_lock_bh(&ring
->idr_lock
);
305 buff
= idr_find(&ring
->bufs_idr
, buf_id
);
307 spin_unlock_bh(&ring
->idr_lock
);
310 idr_remove(&ring
->bufs_idr
, buf_id
);
311 spin_unlock_bh(&ring
->idr_lock
);
313 dma_unmap_single(ab
->dev
, buff
->paddr
, ring
->buf_sz
,
317 vaddr_unalign
= buff
->payload
;
318 handler_data
.data
= PTR_ALIGN(vaddr_unalign
,
320 handler_data
.data_sz
= ring
->buf_sz
;
322 ring
->handler(ar
, &handler_data
);
325 memset(buff
, 0, size
);
326 ath12k_dbring_bufs_replenish(ar
, ring
, buff
, GFP_ATOMIC
);
329 spin_unlock_bh(&srng
->lock
);
337 void ath12k_dbring_srng_cleanup(struct ath12k
*ar
, struct ath12k_dbring
*ring
)
339 ath12k_dp_srng_cleanup(ar
->ab
, &ring
->refill_srng
);
342 void ath12k_dbring_buf_cleanup(struct ath12k
*ar
, struct ath12k_dbring
*ring
)
344 struct ath12k_dbring_element
*buff
;
347 spin_lock_bh(&ring
->idr_lock
);
348 idr_for_each_entry(&ring
->bufs_idr
, buff
, buf_id
) {
349 idr_remove(&ring
->bufs_idr
, buf_id
);
350 dma_unmap_single(ar
->ab
->dev
, buff
->paddr
,
351 ring
->buf_sz
, DMA_FROM_DEVICE
);
355 idr_destroy(&ring
->bufs_idr
);
356 spin_unlock_bh(&ring
->idr_lock
);