1 /* SPDX-License-Identifier: GPL-2.0
3 * include/asm-sh/spinlock-llsc.h
5 * Copyright (C) 2002, 2003 Paul Mundt
6 * Copyright (C) 2006, 2007 Akio Idehara
8 #ifndef __ASM_SH_SPINLOCK_LLSC_H
9 #define __ASM_SH_SPINLOCK_LLSC_H
11 #include <asm/barrier.h>
12 #include <asm/processor.h>
15 * Your basic SMP spinlocks, allowing only a single CPU anywhere
18 #define arch_spin_is_locked(x) ((x)->lock <= 0)
21 * Simple spin lock operations. There are two variants, one clears IRQ's
22 * on the local processor, one does not.
24 * We make no fairness assumptions. They have a cost.
26 static inline void arch_spin_lock(arch_spinlock_t
*lock
)
31 __asm__
__volatile__ (
33 "movli.l @%2, %0 ! arch_spin_lock \n\t"
36 "movco.l %0, @%2 \n\t"
40 : "=&z" (tmp
), "=&r" (oldval
)
46 static inline void arch_spin_unlock(arch_spinlock_t
*lock
)
50 /* This could be optimised with ARCH_HAS_MMIOWB */
52 __asm__
__volatile__ (
53 "mov #1, %0 ! arch_spin_unlock \n\t"
61 static inline int arch_spin_trylock(arch_spinlock_t
*lock
)
63 unsigned long tmp
, oldval
;
65 __asm__
__volatile__ (
67 "movli.l @%2, %0 ! arch_spin_trylock \n\t"
70 "movco.l %0, @%2 \n\t"
73 : "=&z" (tmp
), "=&r" (oldval
)
82 * Read-write spinlocks, allowing multiple readers but only one writer.
84 * NOTE! it is quite common to have readers in interrupts but no interrupt
85 * writers. For those circumstances we can "mix" irq-safe locks - any writer
86 * needs to get a irq-safe write-lock, but readers can get non-irqsafe
90 static inline void arch_read_lock(arch_rwlock_t
*rw
)
94 __asm__
__volatile__ (
96 "movli.l @%1, %0 ! arch_read_lock \n\t"
100 "movco.l %0, @%1 \n\t"
108 static inline void arch_read_unlock(arch_rwlock_t
*rw
)
112 __asm__
__volatile__ (
114 "movli.l @%1, %0 ! arch_read_unlock \n\t"
116 "movco.l %0, @%1 \n\t"
124 static inline void arch_write_lock(arch_rwlock_t
*rw
)
128 __asm__
__volatile__ (
130 "movli.l @%1, %0 ! arch_write_lock \n\t"
134 "movco.l %0, @%1 \n\t"
137 : "r" (&rw
->lock
), "r" (RW_LOCK_BIAS
)
142 static inline void arch_write_unlock(arch_rwlock_t
*rw
)
144 __asm__
__volatile__ (
145 "mov.l %1, @%0 ! arch_write_unlock \n\t"
147 : "r" (&rw
->lock
), "r" (RW_LOCK_BIAS
)
152 static inline int arch_read_trylock(arch_rwlock_t
*rw
)
154 unsigned long tmp
, oldval
;
156 __asm__
__volatile__ (
158 "movli.l @%2, %0 ! arch_read_trylock \n\t"
163 "movco.l %0, @%2 \n\t"
167 : "=&z" (tmp
), "=&r" (oldval
)
175 static inline int arch_write_trylock(arch_rwlock_t
*rw
)
177 unsigned long tmp
, oldval
;
179 __asm__
__volatile__ (
181 "movli.l @%2, %0 ! arch_write_trylock \n\t"
187 "movco.l %0, @%2 \n\t"
190 : "=&z" (tmp
), "=&r" (oldval
)
191 : "r" (&rw
->lock
), "r" (RW_LOCK_BIAS
)
195 return (oldval
> (RW_LOCK_BIAS
- 1));
198 #endif /* __ASM_SH_SPINLOCK_LLSC_H */