1 //===-- sanitizer_atomic_clang_mips.h ---------------------------*- C++ -*-===//
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
7 //===----------------------------------------------------------------------===//
9 // This file is a part of ThreadSanitizer/AddressSanitizer runtime.
10 // Not intended for direct inclusion. Include sanitizer_atomic.h.
12 //===----------------------------------------------------------------------===//
14 #ifndef SANITIZER_ATOMIC_CLANG_MIPS_H
15 #define SANITIZER_ATOMIC_CLANG_MIPS_H
17 namespace __sanitizer
{
19 // MIPS32 does not support atomics > 4 bytes. To address this lack of
20 // functionality, the sanitizer library provides helper methods which use an
21 // internal spin lock mechanism to emulate atomic operations when the size is
23 static void __spin_lock(volatile int *lock
) {
24 while (__sync_lock_test_and_set(lock
, 1))
29 static void __spin_unlock(volatile int *lock
) { __sync_lock_release(lock
); }
31 // Make sure the lock is on its own cache line to prevent false sharing.
32 // Put it inside a struct that is aligned and padded to the typical MIPS
33 // cacheline which is 32 bytes.
36 char pad
[32 - sizeof(int)];
37 } __attribute__((aligned(32))) lock
= {0, {0}};
40 inline atomic_uint64_t::Type
atomic_fetch_add(volatile atomic_uint64_t
*ptr
,
41 atomic_uint64_t::Type val
,
44 (memory_order_relaxed
| memory_order_release
| memory_order_seq_cst
));
45 DCHECK(!((uptr
)ptr
% sizeof(*ptr
)));
47 atomic_uint64_t::Type ret
;
49 __spin_lock(&lock
.lock
);
50 ret
= *(const_cast<atomic_uint64_t::Type
volatile *>(&ptr
->val_dont_use
));
51 ptr
->val_dont_use
= ret
+ val
;
52 __spin_unlock(&lock
.lock
);
58 inline atomic_uint64_t::Type
atomic_fetch_sub(volatile atomic_uint64_t
*ptr
,
59 atomic_uint64_t::Type val
,
61 return atomic_fetch_add(ptr
, -val
, mo
);
65 inline bool atomic_compare_exchange_strong(volatile atomic_uint64_t
*ptr
,
66 atomic_uint64_t::Type
*cmp
,
67 atomic_uint64_t::Type xchg
,
70 (memory_order_relaxed
| memory_order_release
| memory_order_seq_cst
));
71 DCHECK(!((uptr
)ptr
% sizeof(*ptr
)));
73 typedef atomic_uint64_t::Type Type
;
78 __spin_lock(&lock
.lock
);
79 prev
= *(const_cast<Type
volatile *>(&ptr
->val_dont_use
));
82 ptr
->val_dont_use
= xchg
;
84 __spin_unlock(&lock
.lock
);
90 inline atomic_uint64_t::Type
atomic_load(const volatile atomic_uint64_t
*ptr
,
93 (memory_order_relaxed
| memory_order_release
| memory_order_seq_cst
));
94 DCHECK(!((uptr
)ptr
% sizeof(*ptr
)));
96 atomic_uint64_t::Type zero
= 0;
97 volatile atomic_uint64_t
*Newptr
=
98 const_cast<volatile atomic_uint64_t
*>(ptr
);
99 return atomic_fetch_add(Newptr
, zero
, mo
);
103 inline void atomic_store(volatile atomic_uint64_t
*ptr
, atomic_uint64_t::Type v
,
106 (memory_order_relaxed
| memory_order_release
| memory_order_seq_cst
));
107 DCHECK(!((uptr
)ptr
% sizeof(*ptr
)));
109 __spin_lock(&lock
.lock
);
110 ptr
->val_dont_use
= v
;
111 __spin_unlock(&lock
.lock
);
114 } // namespace __sanitizer
116 #endif // SANITIZER_ATOMIC_CLANG_MIPS_H