1 /* SPDX-License-Identifier: (GPL-2.0-only OR BSD-2-Clause) */
2 /* Copyright (c) 2024 Meta Platforms, Inc. and affiliates. */
6 #define WRITE_ONCE(x, val) ((*(volatile typeof(x) *) &(x)) = (val))
10 #define NUMA_NO_NODE (-1)
13 #ifndef arena_container_of
14 #define arena_container_of(ptr, type, member) \
16 void __arena *__mptr = (void __arena *)(ptr); \
17 ((type *)(__mptr - offsetof(type, member))); \
21 #ifdef __BPF__ /* when compiled as bpf program */
24 #define PAGE_SIZE __PAGE_SIZE
26 * for older kernels try sizeof(struct genradix_node)
28 * static inline long __bpf_page_size(void) {
29 * return bpf_core_enum_value(enum page_size_enum___l, __PAGE_SIZE___l) ?: sizeof(struct genradix_node);
31 * but generated code is not great.
35 #if defined(__BPF_FEATURE_ADDR_SPACE_CAST) && !defined(BPF_ARENA_FORCE_ASM)
36 #define __arena __attribute__((address_space(1)))
37 #define __arena_global __attribute__((address_space(1)))
38 #define cast_kern(ptr) /* nop for bpf prog. emitted by LLVM */
39 #define cast_user(ptr) /* nop for bpf prog. emitted by LLVM */
42 #define __arena_global SEC(".addr_space.1")
43 #define cast_kern(ptr) bpf_addr_space_cast(ptr, 0, 1)
44 #define cast_user(ptr) bpf_addr_space_cast(ptr, 1, 0)
47 void __arena
* bpf_arena_alloc_pages(void *map
, void __arena
*addr
, __u32 page_cnt
,
48 int node_id
, __u64 flags
) __ksym __weak
;
49 void bpf_arena_free_pages(void *map
, void __arena
*ptr
, __u32 page_cnt
) __ksym __weak
;
51 #else /* when compiled as user space code */
55 #define cast_kern(ptr) /* nop for user space */
56 #define cast_user(ptr) /* nop for user space */
60 #define offsetof(type, member) ((unsigned long)&((type *)0)->member)
63 static inline void __arena
* bpf_arena_alloc_pages(void *map
, void *addr
, __u32 page_cnt
,
64 int node_id
, __u64 flags
)
68 static inline void bpf_arena_free_pages(void *map
, void __arena
*ptr
, __u32 page_cnt
)