Merge tag 'trace-printf-v6.13' of git://git.kernel.org/pub/scm/linux/kernel/git/trace...
[drm/drm-misc.git] / arch / arc / include / asm / cmpxchg.h
blob58045c898340458da6e8e02f653ace55a05253c9
1 /* SPDX-License-Identifier: GPL-2.0-only */
2 /*
3 * Copyright (C) 2004, 2007-2010, 2011-2012 Synopsys, Inc. (www.synopsys.com)
4 */
6 #ifndef __ASM_ARC_CMPXCHG_H
7 #define __ASM_ARC_CMPXCHG_H
9 #include <linux/build_bug.h>
10 #include <linux/types.h>
11 #include <linux/cmpxchg-emu.h>
13 #include <asm/barrier.h>
14 #include <asm/smp.h>
16 #ifdef CONFIG_ARC_HAS_LLSC
19 * if (*ptr == @old)
20 * *ptr = @new
22 #define __cmpxchg(ptr, old, new) \
23 ({ \
24 __typeof__(*(ptr)) _prev; \
26 __asm__ __volatile__( \
27 "1: llock %0, [%1] \n" \
28 " brne %0, %2, 2f \n" \
29 " scond %3, [%1] \n" \
30 " bnz 1b \n" \
31 "2: \n" \
32 : "=&r"(_prev) /* Early clobber prevent reg reuse */ \
33 : "r"(ptr), /* Not "m": llock only supports reg */ \
34 "ir"(old), \
35 "r"(new) /* Not "ir": scond can't take LIMM */ \
36 : "cc", \
37 "memory"); /* gcc knows memory is clobbered */ \
39 _prev; \
42 #define arch_cmpxchg_relaxed(ptr, old, new) \
43 ({ \
44 __typeof__(ptr) _p_ = (ptr); \
45 __typeof__(*(ptr)) _o_ = (old); \
46 __typeof__(*(ptr)) _n_ = (new); \
47 __typeof__(*(ptr)) _prev_; \
49 switch(sizeof((_p_))) { \
50 case 1: \
51 _prev_ = (__typeof__(*(ptr)))cmpxchg_emu_u8((volatile u8 *)_p_, (uintptr_t)_o_, (uintptr_t)_n_); \
52 break; \
53 case 4: \
54 _prev_ = __cmpxchg(_p_, _o_, _n_); \
55 break; \
56 default: \
57 BUILD_BUG(); \
58 } \
59 _prev_; \
62 #else
64 #define arch_cmpxchg(ptr, old, new) \
65 ({ \
66 volatile __typeof__(ptr) _p_ = (ptr); \
67 __typeof__(*(ptr)) _o_ = (old); \
68 __typeof__(*(ptr)) _n_ = (new); \
69 __typeof__(*(ptr)) _prev_; \
70 unsigned long __flags; \
72 /* \
73 * spin lock/unlock provide the needed smp_mb() before/after \
74 */ \
75 atomic_ops_lock(__flags); \
76 _prev_ = *_p_; \
77 if (_prev_ == _o_) \
78 *_p_ = _n_; \
79 atomic_ops_unlock(__flags); \
80 _prev_; \
83 #endif
86 * xchg
88 #ifdef CONFIG_ARC_HAS_LLSC
90 #define __arch_xchg(ptr, val) \
91 ({ \
92 __asm__ __volatile__( \
93 " ex %0, [%1] \n" /* set new value */ \
94 : "+r"(val) \
95 : "r"(ptr) \
96 : "memory"); \
97 _val_; /* get old value */ \
100 #define arch_xchg_relaxed(ptr, val) \
101 ({ \
102 __typeof__(ptr) _p_ = (ptr); \
103 __typeof__(*(ptr)) _val_ = (val); \
105 switch(sizeof(*(_p_))) { \
106 case 4: \
107 _val_ = __arch_xchg(_p_, _val_); \
108 break; \
109 default: \
110 BUILD_BUG(); \
112 _val_; \
115 #else /* !CONFIG_ARC_HAS_LLSC */
118 * EX instructions is baseline and present in !LLSC too. But in this
119 * regime it still needs use @atomic_ops_lock spinlock to allow interop
120 * with cmpxchg() which uses spinlock in !LLSC
121 * (llist.h use xchg and cmpxchg on sama data)
124 #define arch_xchg(ptr, val) \
125 ({ \
126 __typeof__(ptr) _p_ = (ptr); \
127 __typeof__(*(ptr)) _val_ = (val); \
129 unsigned long __flags; \
131 atomic_ops_lock(__flags); \
133 __asm__ __volatile__( \
134 " ex %0, [%1] \n" \
135 : "+r"(_val_) \
136 : "r"(_p_) \
137 : "memory"); \
139 atomic_ops_unlock(__flags); \
140 _val_; \
143 #endif
145 #endif