1 /* SPDX-License-Identifier: GPL-2.0 */
3 * arch/arm/include/asm/cache.h
5 #ifndef __ASMARM_CACHE_H
6 #define __ASMARM_CACHE_H
8 #define L1_CACHE_SHIFT CONFIG_ARM_L1_CACHE_SHIFT
9 #define L1_CACHE_BYTES (1 << L1_CACHE_SHIFT)
12 * Memory returned by kmalloc() may be used for DMA, so we must make
13 * sure that all such allocations are cache aligned. Otherwise,
14 * unrelated code may cause parts of the buffer to be read into the
15 * cache before the transfer is done, causing old data to be seen by
18 #define ARCH_DMA_MINALIGN L1_CACHE_BYTES
21 * With EABI on ARMv5 and above we must have 64-bit aligned slab pointers.
23 #if defined(CONFIG_AEABI) && (__LINUX_ARM_ARCH__ >= 5)
24 #define ARCH_SLAB_MINALIGN 8
27 #define __read_mostly __attribute__((__section__(".data..read_mostly")))