1 /* SPDX-License-Identifier: GPL-2.0 */
2 /* Interface for implementing AF_XDP zero-copy support in drivers.
3 * Copyright(c) 2020 Intel Corporation.
6 #ifndef _LINUX_XDP_SOCK_DRV_H
7 #define _LINUX_XDP_SOCK_DRV_H
9 #include <net/xdp_sock.h>
10 #include <net/xsk_buff_pool.h>
12 #define XDP_UMEM_MIN_CHUNK_SHIFT 11
13 #define XDP_UMEM_MIN_CHUNK_SIZE (1 << XDP_UMEM_MIN_CHUNK_SHIFT)
21 #ifdef CONFIG_XDP_SOCKETS
23 void xsk_tx_completed(struct xsk_buff_pool
*pool
, u32 nb_entries
);
24 bool xsk_tx_peek_desc(struct xsk_buff_pool
*pool
, struct xdp_desc
*desc
);
25 u32
xsk_tx_peek_release_desc_batch(struct xsk_buff_pool
*pool
, u32 max
);
26 void xsk_tx_release(struct xsk_buff_pool
*pool
);
27 struct xsk_buff_pool
*xsk_get_pool_from_qid(struct net_device
*dev
,
29 void xsk_set_rx_need_wakeup(struct xsk_buff_pool
*pool
);
30 void xsk_set_tx_need_wakeup(struct xsk_buff_pool
*pool
);
31 void xsk_clear_rx_need_wakeup(struct xsk_buff_pool
*pool
);
32 void xsk_clear_tx_need_wakeup(struct xsk_buff_pool
*pool
);
33 bool xsk_uses_need_wakeup(struct xsk_buff_pool
*pool
);
35 static inline u32
xsk_pool_get_headroom(struct xsk_buff_pool
*pool
)
37 return XDP_PACKET_HEADROOM
+ pool
->headroom
;
40 static inline u32
xsk_pool_get_chunk_size(struct xsk_buff_pool
*pool
)
42 return pool
->chunk_size
;
45 static inline u32
xsk_pool_get_rx_frame_size(struct xsk_buff_pool
*pool
)
47 return xsk_pool_get_chunk_size(pool
) - xsk_pool_get_headroom(pool
);
50 static inline void xsk_pool_set_rxq_info(struct xsk_buff_pool
*pool
,
51 struct xdp_rxq_info
*rxq
)
53 xp_set_rxq_info(pool
, rxq
);
56 static inline void xsk_pool_fill_cb(struct xsk_buff_pool
*pool
,
57 struct xsk_cb_desc
*desc
)
59 xp_fill_cb(pool
, desc
);
62 static inline unsigned int xsk_pool_get_napi_id(struct xsk_buff_pool
*pool
)
64 #ifdef CONFIG_NET_RX_BUSY_POLL
65 return pool
->heads
[0].xdp
.rxq
->napi_id
;
71 static inline void xsk_pool_dma_unmap(struct xsk_buff_pool
*pool
,
74 xp_dma_unmap(pool
, attrs
);
77 static inline int xsk_pool_dma_map(struct xsk_buff_pool
*pool
,
78 struct device
*dev
, unsigned long attrs
)
80 struct xdp_umem
*umem
= pool
->umem
;
82 return xp_dma_map(pool
, dev
, attrs
, umem
->pgs
, umem
->npgs
);
85 static inline dma_addr_t
xsk_buff_xdp_get_dma(struct xdp_buff
*xdp
)
87 struct xdp_buff_xsk
*xskb
= container_of(xdp
, struct xdp_buff_xsk
, xdp
);
89 return xp_get_dma(xskb
);
92 static inline dma_addr_t
xsk_buff_xdp_get_frame_dma(struct xdp_buff
*xdp
)
94 struct xdp_buff_xsk
*xskb
= container_of(xdp
, struct xdp_buff_xsk
, xdp
);
96 return xp_get_frame_dma(xskb
);
99 static inline struct xdp_buff
*xsk_buff_alloc(struct xsk_buff_pool
*pool
)
101 return xp_alloc(pool
);
104 static inline bool xsk_is_eop_desc(struct xdp_desc
*desc
)
106 return !xp_mb_desc(desc
);
109 /* Returns as many entries as possible up to max. 0 <= N <= max. */
110 static inline u32
xsk_buff_alloc_batch(struct xsk_buff_pool
*pool
, struct xdp_buff
**xdp
, u32 max
)
112 return xp_alloc_batch(pool
, xdp
, max
);
115 static inline bool xsk_buff_can_alloc(struct xsk_buff_pool
*pool
, u32 count
)
117 return xp_can_alloc(pool
, count
);
120 static inline void xsk_buff_free(struct xdp_buff
*xdp
)
122 struct xdp_buff_xsk
*xskb
= container_of(xdp
, struct xdp_buff_xsk
, xdp
);
123 struct list_head
*xskb_list
= &xskb
->pool
->xskb_list
;
124 struct xdp_buff_xsk
*pos
, *tmp
;
126 if (likely(!xdp_buff_has_frags(xdp
)))
129 list_for_each_entry_safe(pos
, tmp
, xskb_list
, list_node
) {
130 list_del(&pos
->list_node
);
134 xdp_get_shared_info_from_buff(xdp
)->nr_frags
= 0;
139 static inline void xsk_buff_add_frag(struct xdp_buff
*xdp
)
141 struct xdp_buff_xsk
*frag
= container_of(xdp
, struct xdp_buff_xsk
, xdp
);
143 list_add_tail(&frag
->list_node
, &frag
->pool
->xskb_list
);
146 static inline struct xdp_buff
*xsk_buff_get_frag(struct xdp_buff
*first
)
148 struct xdp_buff_xsk
*xskb
= container_of(first
, struct xdp_buff_xsk
, xdp
);
149 struct xdp_buff
*ret
= NULL
;
150 struct xdp_buff_xsk
*frag
;
152 frag
= list_first_entry_or_null(&xskb
->pool
->xskb_list
,
153 struct xdp_buff_xsk
, list_node
);
155 list_del(&frag
->list_node
);
162 static inline void xsk_buff_del_tail(struct xdp_buff
*tail
)
164 struct xdp_buff_xsk
*xskb
= container_of(tail
, struct xdp_buff_xsk
, xdp
);
166 list_del(&xskb
->list_node
);
169 static inline struct xdp_buff
*xsk_buff_get_tail(struct xdp_buff
*first
)
171 struct xdp_buff_xsk
*xskb
= container_of(first
, struct xdp_buff_xsk
, xdp
);
172 struct xdp_buff_xsk
*frag
;
174 frag
= list_last_entry(&xskb
->pool
->xskb_list
, struct xdp_buff_xsk
,
179 static inline void xsk_buff_set_size(struct xdp_buff
*xdp
, u32 size
)
181 xdp
->data
= xdp
->data_hard_start
+ XDP_PACKET_HEADROOM
;
182 xdp
->data_meta
= xdp
->data
;
183 xdp
->data_end
= xdp
->data
+ size
;
187 static inline dma_addr_t
xsk_buff_raw_get_dma(struct xsk_buff_pool
*pool
,
190 return xp_raw_get_dma(pool
, addr
);
193 static inline void *xsk_buff_raw_get_data(struct xsk_buff_pool
*pool
, u64 addr
)
195 return xp_raw_get_data(pool
, addr
);
198 #define XDP_TXMD_FLAGS_VALID ( \
199 XDP_TXMD_FLAGS_TIMESTAMP | \
200 XDP_TXMD_FLAGS_CHECKSUM | \
203 static inline bool xsk_buff_valid_tx_metadata(struct xsk_tx_metadata
*meta
)
205 return !(meta
->flags
& ~XDP_TXMD_FLAGS_VALID
);
208 static inline struct xsk_tx_metadata
*xsk_buff_get_metadata(struct xsk_buff_pool
*pool
, u64 addr
)
210 struct xsk_tx_metadata
*meta
;
212 if (!pool
->tx_metadata_len
)
215 meta
= xp_raw_get_data(pool
, addr
) - pool
->tx_metadata_len
;
216 if (unlikely(!xsk_buff_valid_tx_metadata(meta
)))
217 return NULL
; /* no way to signal the error to the user */
222 static inline void xsk_buff_dma_sync_for_cpu(struct xdp_buff
*xdp
)
224 struct xdp_buff_xsk
*xskb
= container_of(xdp
, struct xdp_buff_xsk
, xdp
);
226 xp_dma_sync_for_cpu(xskb
);
229 static inline void xsk_buff_raw_dma_sync_for_device(struct xsk_buff_pool
*pool
,
233 xp_dma_sync_for_device(pool
, dma
, size
);
238 static inline void xsk_tx_completed(struct xsk_buff_pool
*pool
, u32 nb_entries
)
242 static inline bool xsk_tx_peek_desc(struct xsk_buff_pool
*pool
,
243 struct xdp_desc
*desc
)
248 static inline u32
xsk_tx_peek_release_desc_batch(struct xsk_buff_pool
*pool
, u32 max
)
253 static inline void xsk_tx_release(struct xsk_buff_pool
*pool
)
257 static inline struct xsk_buff_pool
*
258 xsk_get_pool_from_qid(struct net_device
*dev
, u16 queue_id
)
263 static inline void xsk_set_rx_need_wakeup(struct xsk_buff_pool
*pool
)
267 static inline void xsk_set_tx_need_wakeup(struct xsk_buff_pool
*pool
)
271 static inline void xsk_clear_rx_need_wakeup(struct xsk_buff_pool
*pool
)
275 static inline void xsk_clear_tx_need_wakeup(struct xsk_buff_pool
*pool
)
279 static inline bool xsk_uses_need_wakeup(struct xsk_buff_pool
*pool
)
284 static inline u32
xsk_pool_get_headroom(struct xsk_buff_pool
*pool
)
289 static inline u32
xsk_pool_get_chunk_size(struct xsk_buff_pool
*pool
)
294 static inline u32
xsk_pool_get_rx_frame_size(struct xsk_buff_pool
*pool
)
299 static inline void xsk_pool_set_rxq_info(struct xsk_buff_pool
*pool
,
300 struct xdp_rxq_info
*rxq
)
304 static inline void xsk_pool_fill_cb(struct xsk_buff_pool
*pool
,
305 struct xsk_cb_desc
*desc
)
309 static inline unsigned int xsk_pool_get_napi_id(struct xsk_buff_pool
*pool
)
314 static inline void xsk_pool_dma_unmap(struct xsk_buff_pool
*pool
,
319 static inline int xsk_pool_dma_map(struct xsk_buff_pool
*pool
,
320 struct device
*dev
, unsigned long attrs
)
325 static inline dma_addr_t
xsk_buff_xdp_get_dma(struct xdp_buff
*xdp
)
330 static inline dma_addr_t
xsk_buff_xdp_get_frame_dma(struct xdp_buff
*xdp
)
335 static inline struct xdp_buff
*xsk_buff_alloc(struct xsk_buff_pool
*pool
)
340 static inline bool xsk_is_eop_desc(struct xdp_desc
*desc
)
345 static inline u32
xsk_buff_alloc_batch(struct xsk_buff_pool
*pool
, struct xdp_buff
**xdp
, u32 max
)
350 static inline bool xsk_buff_can_alloc(struct xsk_buff_pool
*pool
, u32 count
)
355 static inline void xsk_buff_free(struct xdp_buff
*xdp
)
359 static inline void xsk_buff_add_frag(struct xdp_buff
*xdp
)
363 static inline struct xdp_buff
*xsk_buff_get_frag(struct xdp_buff
*first
)
368 static inline void xsk_buff_del_tail(struct xdp_buff
*tail
)
372 static inline struct xdp_buff
*xsk_buff_get_tail(struct xdp_buff
*first
)
377 static inline void xsk_buff_set_size(struct xdp_buff
*xdp
, u32 size
)
381 static inline dma_addr_t
xsk_buff_raw_get_dma(struct xsk_buff_pool
*pool
,
387 static inline void *xsk_buff_raw_get_data(struct xsk_buff_pool
*pool
, u64 addr
)
392 static inline bool xsk_buff_valid_tx_metadata(struct xsk_tx_metadata
*meta
)
397 static inline struct xsk_tx_metadata
*xsk_buff_get_metadata(struct xsk_buff_pool
*pool
, u64 addr
)
402 static inline void xsk_buff_dma_sync_for_cpu(struct xdp_buff
*xdp
)
406 static inline void xsk_buff_raw_dma_sync_for_device(struct xsk_buff_pool
*pool
,
412 #endif /* CONFIG_XDP_SOCKETS */
414 #endif /* _LINUX_XDP_SOCK_DRV_H */