1 /* SPDX-License-Identifier: GPL-2.0-or-later */
5 * (C) Copyright 2013-2015 Hewlett-Packard Development Company, L.P.
6 * (C) Copyright 2015 Hewlett-Packard Enterprise Development LP
8 * Authors: Waiman Long <waiman.long@hpe.com>
10 #ifndef __ASM_GENERIC_QSPINLOCK_H
11 #define __ASM_GENERIC_QSPINLOCK_H
13 #include <asm-generic/qspinlock_types.h>
16 * queued_spin_is_locked - is the spinlock locked?
17 * @lock: Pointer to queued spinlock structure
18 * Return: 1 if it is locked, 0 otherwise
20 static __always_inline
int queued_spin_is_locked(struct qspinlock
*lock
)
23 * Any !0 state indicates it is locked, even if _Q_LOCKED_VAL
24 * isn't immediately observable.
26 return atomic_read(&lock
->val
);
30 * queued_spin_value_unlocked - is the spinlock structure unlocked?
31 * @lock: queued spinlock structure
32 * Return: 1 if it is unlocked, 0 otherwise
34 * N.B. Whenever there are tasks waiting for the lock, it is considered
35 * locked wrt the lockref code to avoid lock stealing by the lockref
36 * code and change things underneath the lock. This also allows some
37 * optimizations to be applied without conflict with lockref.
39 static __always_inline
int queued_spin_value_unlocked(struct qspinlock lock
)
41 return !atomic_read(&lock
.val
);
45 * queued_spin_is_contended - check if the lock is contended
46 * @lock : Pointer to queued spinlock structure
47 * Return: 1 if lock contended, 0 otherwise
49 static __always_inline
int queued_spin_is_contended(struct qspinlock
*lock
)
51 return atomic_read(&lock
->val
) & ~_Q_LOCKED_MASK
;
54 * queued_spin_trylock - try to acquire the queued spinlock
55 * @lock : Pointer to queued spinlock structure
56 * Return: 1 if lock acquired, 0 if failed
58 static __always_inline
int queued_spin_trylock(struct qspinlock
*lock
)
60 u32 val
= atomic_read(&lock
->val
);
65 return likely(atomic_try_cmpxchg_acquire(&lock
->val
, &val
, _Q_LOCKED_VAL
));
68 extern void queued_spin_lock_slowpath(struct qspinlock
*lock
, u32 val
);
71 * queued_spin_lock - acquire a queued spinlock
72 * @lock: Pointer to queued spinlock structure
74 static __always_inline
void queued_spin_lock(struct qspinlock
*lock
)
78 if (likely(atomic_try_cmpxchg_acquire(&lock
->val
, &val
, _Q_LOCKED_VAL
)))
81 queued_spin_lock_slowpath(lock
, val
);
84 #ifndef queued_spin_unlock
86 * queued_spin_unlock - release a queued spinlock
87 * @lock : Pointer to queued spinlock structure
89 static __always_inline
void queued_spin_unlock(struct qspinlock
*lock
)
92 * unlock() needs release semantics:
94 smp_store_release(&lock
->locked
, 0);
98 #ifndef virt_spin_lock
99 static __always_inline
bool virt_spin_lock(struct qspinlock
*lock
)
106 * Remapping spinlock architecture specific functions to the corresponding
107 * queued spinlock functions.
109 #define arch_spin_is_locked(l) queued_spin_is_locked(l)
110 #define arch_spin_is_contended(l) queued_spin_is_contended(l)
111 #define arch_spin_value_unlocked(l) queued_spin_value_unlocked(l)
112 #define arch_spin_lock(l) queued_spin_lock(l)
113 #define arch_spin_trylock(l) queued_spin_trylock(l)
114 #define arch_spin_unlock(l) queued_spin_unlock(l)
116 #endif /* __ASM_GENERIC_QSPINLOCK_H */