Merge tag 'usb-5.11-rc3' of git://git.kernel.org/pub/scm/linux/kernel/git/gregkh/usb
[linux/fpc-iii.git] / arch / parisc / include / asm / barrier.h
blobc705decf2bed537182da03ab20191f6756d32ad1
1 /* SPDX-License-Identifier: GPL-2.0 */
2 #ifndef __ASM_BARRIER_H
3 #define __ASM_BARRIER_H
5 #include <asm/alternative.h>
7 #ifndef __ASSEMBLY__
9 /* The synchronize caches instruction executes as a nop on systems in
10 which all memory references are performed in order. */
11 #define synchronize_caches() asm volatile("sync" \
12 ALTERNATIVE(ALT_COND_NO_SMP, INSN_NOP) \
13 : : : "memory")
15 #if defined(CONFIG_SMP)
16 #define mb() do { synchronize_caches(); } while (0)
17 #define rmb() mb()
18 #define wmb() mb()
19 #define dma_rmb() mb()
20 #define dma_wmb() mb()
21 #else
22 #define mb() barrier()
23 #define rmb() barrier()
24 #define wmb() barrier()
25 #define dma_rmb() barrier()
26 #define dma_wmb() barrier()
27 #endif
29 #define __smp_mb() mb()
30 #define __smp_rmb() mb()
31 #define __smp_wmb() mb()
33 #define __smp_store_release(p, v) \
34 do { \
35 typeof(p) __p = (p); \
36 union { typeof(*p) __val; char __c[1]; } __u = \
37 { .__val = (__force typeof(*p)) (v) }; \
38 compiletime_assert_atomic_type(*p); \
39 switch (sizeof(*p)) { \
40 case 1: \
41 asm volatile("stb,ma %0,0(%1)" \
42 : : "r"(*(__u8 *)__u.__c), "r"(__p) \
43 : "memory"); \
44 break; \
45 case 2: \
46 asm volatile("sth,ma %0,0(%1)" \
47 : : "r"(*(__u16 *)__u.__c), "r"(__p) \
48 : "memory"); \
49 break; \
50 case 4: \
51 asm volatile("stw,ma %0,0(%1)" \
52 : : "r"(*(__u32 *)__u.__c), "r"(__p) \
53 : "memory"); \
54 break; \
55 case 8: \
56 if (IS_ENABLED(CONFIG_64BIT)) \
57 asm volatile("std,ma %0,0(%1)" \
58 : : "r"(*(__u64 *)__u.__c), "r"(__p) \
59 : "memory"); \
60 break; \
61 } \
62 } while (0)
64 #define __smp_load_acquire(p) \
65 ({ \
66 union { typeof(*p) __val; char __c[1]; } __u; \
67 typeof(p) __p = (p); \
68 compiletime_assert_atomic_type(*p); \
69 switch (sizeof(*p)) { \
70 case 1: \
71 asm volatile("ldb,ma 0(%1),%0" \
72 : "=r"(*(__u8 *)__u.__c) : "r"(__p) \
73 : "memory"); \
74 break; \
75 case 2: \
76 asm volatile("ldh,ma 0(%1),%0" \
77 : "=r"(*(__u16 *)__u.__c) : "r"(__p) \
78 : "memory"); \
79 break; \
80 case 4: \
81 asm volatile("ldw,ma 0(%1),%0" \
82 : "=r"(*(__u32 *)__u.__c) : "r"(__p) \
83 : "memory"); \
84 break; \
85 case 8: \
86 if (IS_ENABLED(CONFIG_64BIT)) \
87 asm volatile("ldd,ma 0(%1),%0" \
88 : "=r"(*(__u64 *)__u.__c) : "r"(__p) \
89 : "memory"); \
90 break; \
91 } \
92 __u.__val; \
94 #include <asm-generic/barrier.h>
96 #endif /* !__ASSEMBLY__ */
97 #endif /* __ASM_BARRIER_H */