revert between 56095 -> 55830 in arch
[AROS.git] / arch / arm-native / kernel / spinlock.c
blob961894bf6c9cc82ca079eb0a89c3475b472a7ba0
1 /*
2 Copyright © 2015, The AROS Development Team. All rights reserved.
3 $Id$
4 */
6 #include <aros/types/spinlock_s.h>
7 #include <aros/kernel.h>
8 #include <aros/libcall.h>
9 #include <utility/hooks.h>
11 #include <asm/arm/cpu.h>
13 #include <kernel_base.h>
15 #include <proto/kernel.h>
17 AROS_LH3(spinlock_t *, KrnSpinLock,
18 AROS_LHA(spinlock_t *, lock, A1),
19 AROS_LHA(struct Hook *, failhook, A0),
20 AROS_LHA(ULONG, mode, D0),
21 struct KernelBase *, KernelBase, 52, Kernel)
23 AROS_LIBFUNC_INIT
25 unsigned long lock_value, result;
27 if (mode == SPINLOCK_MODE_WRITE)
29 asm volatile(
30 "1: ldrex %0, [%1] \n\t" // Load the lock value, gaining exclusive access
31 " teq %0, #0 \n\t" // Is the lock free?
32 " wfene \n\t" // Wait for Event if allready obtained
33 " strexeq %0, %2, [%1] \n\t" // Try to exclusively write the lock value to memory
34 " teq %0, #0 \n\t" // Did it succeeded?
35 " bne 1b \n\t" // If we failed, try to obtain the lock again
36 : "=&r"(lock_value)
37 : "r" (&lock->lock), "r"(0x80000000)
38 : "cc"
41 else
43 asm volatile(
44 "1: ldrex %0, [%2] \n\t" // Load the lock value, gaining exclusive access
45 " adds %0, %0, #1 \n\t" // Increase the lock value and update conditional bits
46 " wfemi \n\t" // Wait for event if lock value is negative
47 " strexpl %1, %0, [%2] \n\t" // Try to exclusively write the lock value to memory if positive
48 " rsbpls %0, %1, #0 \n\t" // Reverse substract and update conditionals:
49 // - if strex write was successful, %0 contains 0. #0 - %0 clears N flag.
50 // - If write failed %0 contains 1. #0 - %0 sets N flag (value 0xffffffff).
51 " bmi 1b \n\t" // Try again if N flag is set (because of lock value, or write failure)
52 : "=&r"(lock_value), "=&r"(result)
53 : "r"(&lock->lock)
54 : "cc"
58 dmb();
59 return lock;
61 AROS_LIBFUNC_EXIT