1 /* SPDX-License-Identifier: GPL-2.0 */
2 #ifndef __ASM_BARRIER_H
3 #define __ASM_BARRIER_H
5 #include <asm/alternative.h>
9 /* The synchronize caches instruction executes as a nop on systems in
10 which all memory references are performed in order. */
11 #define synchronize_caches() asm volatile("sync" \
12 ALTERNATIVE(ALT_COND_NO_SMP, INSN_NOP) \
15 #if defined(CONFIG_SMP)
16 #define mb() do { synchronize_caches(); } while (0)
19 #define dma_rmb() mb()
20 #define dma_wmb() mb()
22 #define mb() barrier()
23 #define rmb() barrier()
24 #define wmb() barrier()
25 #define dma_rmb() barrier()
26 #define dma_wmb() barrier()
29 #define __smp_mb() mb()
30 #define __smp_rmb() mb()
31 #define __smp_wmb() mb()
33 #define __smp_store_release(p, v) \
35 typeof(p) __p = (p); \
36 union { typeof(*p) __val; char __c[1]; } __u = \
37 { .__val = (__force typeof(*p)) (v) }; \
38 compiletime_assert_atomic_type(*p); \
39 switch (sizeof(*p)) { \
41 asm volatile("stb,ma %0,0(%1)" \
42 : : "r"(*(__u8 *)__u.__c), "r"(__p) \
46 asm volatile("sth,ma %0,0(%1)" \
47 : : "r"(*(__u16 *)__u.__c), "r"(__p) \
51 asm volatile("stw,ma %0,0(%1)" \
52 : : "r"(*(__u32 *)__u.__c), "r"(__p) \
56 if (IS_ENABLED(CONFIG_64BIT)) \
57 asm volatile("std,ma %0,0(%1)" \
58 : : "r"(*(__u64 *)__u.__c), "r"(__p) \
64 #define __smp_load_acquire(p) \
66 union { typeof(*p) __val; char __c[1]; } __u; \
67 typeof(p) __p = (p); \
68 compiletime_assert_atomic_type(*p); \
69 switch (sizeof(*p)) { \
71 asm volatile("ldb,ma 0(%1),%0" \
72 : "=r"(*(__u8 *)__u.__c) : "r"(__p) \
76 asm volatile("ldh,ma 0(%1),%0" \
77 : "=r"(*(__u16 *)__u.__c) : "r"(__p) \
81 asm volatile("ldw,ma 0(%1),%0" \
82 : "=r"(*(__u32 *)__u.__c) : "r"(__p) \
86 if (IS_ENABLED(CONFIG_64BIT)) \
87 asm volatile("ldd,ma 0(%1),%0" \
88 : "=r"(*(__u64 *)__u.__c) : "r"(__p) \
94 #include <asm-generic/barrier.h>
96 #endif /* !__ASSEMBLY__ */
97 #endif /* __ASM_BARRIER_H */