staging: rtl8192u: remove redundant assignment to pointer crypt
[linux/fpc-iii.git] / arch / arm64 / include / asm / kasan.h
blobb52aacd2c5261ada9d417f2a6de2dbb96658ff1c
1 /* SPDX-License-Identifier: GPL-2.0 */
2 #ifndef __ASM_KASAN_H
3 #define __ASM_KASAN_H
5 #ifndef __ASSEMBLY__
7 #include <linux/linkage.h>
8 #include <asm/memory.h>
9 #include <asm/pgtable-types.h>
11 #define arch_kasan_set_tag(addr, tag) __tag_set(addr, tag)
12 #define arch_kasan_reset_tag(addr) __tag_reset(addr)
13 #define arch_kasan_get_tag(addr) __tag_get(addr)
15 #ifdef CONFIG_KASAN
18 * KASAN_SHADOW_START: beginning of the kernel virtual addresses.
19 * KASAN_SHADOW_END: KASAN_SHADOW_START + 1/N of kernel virtual addresses,
20 * where N = (1 << KASAN_SHADOW_SCALE_SHIFT).
22 #define KASAN_SHADOW_START (VA_START)
23 #define KASAN_SHADOW_END (KASAN_SHADOW_START + KASAN_SHADOW_SIZE)
26 * This value is used to map an address to the corresponding shadow
27 * address by the following formula:
28 * shadow_addr = (address >> KASAN_SHADOW_SCALE_SHIFT) + KASAN_SHADOW_OFFSET
30 * (1 << (64 - KASAN_SHADOW_SCALE_SHIFT)) shadow addresses that lie in range
31 * [KASAN_SHADOW_OFFSET, KASAN_SHADOW_END) cover all 64-bits of virtual
32 * addresses. So KASAN_SHADOW_OFFSET should satisfy the following equation:
33 * KASAN_SHADOW_OFFSET = KASAN_SHADOW_END -
34 * (1ULL << (64 - KASAN_SHADOW_SCALE_SHIFT))
36 #define KASAN_SHADOW_OFFSET (KASAN_SHADOW_END - (1ULL << \
37 (64 - KASAN_SHADOW_SCALE_SHIFT)))
39 void kasan_init(void);
40 void kasan_copy_shadow(pgd_t *pgdir);
41 asmlinkage void kasan_early_init(void);
43 #else
44 static inline void kasan_init(void) { }
45 static inline void kasan_copy_shadow(pgd_t *pgdir) { }
46 #endif
48 #endif
49 #endif