Revert of Roll src/third_party/WebKit e0eac24:489c548 (svn 193311:193320) (patchset...
[chromium-blink-merge.git] / third_party / tcmalloc / chromium / src / base / spinlock.h
blobc2be4fd726490beb63d42dac8b617fb6ddd6fc01
1 /* Copyright (c) 2006, Google Inc.
2 * All rights reserved.
3 *
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions are
6 * met:
7 *
8 * * Redistributions of source code must retain the above copyright
9 * notice, this list of conditions and the following disclaimer.
10 * * Redistributions in binary form must reproduce the above
11 * copyright notice, this list of conditions and the following disclaimer
12 * in the documentation and/or other materials provided with the
13 * distribution.
14 * * Neither the name of Google Inc. nor the names of its
15 * contributors may be used to endorse or promote products derived from
16 * this software without specific prior written permission.
18 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
19 * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
20 * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
21 * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
22 * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
23 * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
24 * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
25 * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
26 * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
27 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
28 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
30 * ---
31 * Author: Sanjay Ghemawat
35 // Fast spinlocks (at least on x86, a lock/unlock pair is approximately
36 // half the cost of a Mutex because the unlock just does a store instead
37 // of a compare-and-swap which is expensive).
39 // SpinLock is async signal safe.
40 // If used within a signal handler, all lock holders
41 // should block the signal even outside the signal handler.
43 #ifndef BASE_SPINLOCK_H_
44 #define BASE_SPINLOCK_H_
46 #include <config.h>
47 #include "base/atomicops.h"
48 #include "base/basictypes.h"
49 #include "base/dynamic_annotations.h"
50 #include "base/thread_annotations.h"
52 class LOCKABLE SpinLock {
53 public:
54 SpinLock() : lockword_(kSpinLockFree) { }
56 // Special constructor for use with static SpinLock objects. E.g.,
58 // static SpinLock lock(base::LINKER_INITIALIZED);
60 // When intialized using this constructor, we depend on the fact
61 // that the linker has already initialized the memory appropriately.
62 // A SpinLock constructed like this can be freely used from global
63 // initializers without worrying about the order in which global
64 // initializers run.
65 explicit SpinLock(base::LinkerInitialized /*x*/) {
66 // Does nothing; lockword_ is already initialized
69 // Acquire this SpinLock.
70 // TODO(csilvers): uncomment the annotation when we figure out how to
71 // support this macro with 0 args (see thread_annotations.h)
72 inline void Lock() /*EXCLUSIVE_LOCK_FUNCTION()*/ {
73 if (base::subtle::Acquire_CompareAndSwap(&lockword_, kSpinLockFree,
74 kSpinLockHeld) != kSpinLockFree) {
75 SlowLock();
77 ANNOTATE_RWLOCK_ACQUIRED(this, 1);
80 // Try to acquire this SpinLock without blocking and return true if the
81 // acquisition was successful. If the lock was not acquired, false is
82 // returned. If this SpinLock is free at the time of the call, TryLock
83 // will return true with high probability.
84 inline bool TryLock() EXCLUSIVE_TRYLOCK_FUNCTION(true) {
85 bool res =
86 (base::subtle::Acquire_CompareAndSwap(&lockword_, kSpinLockFree,
87 kSpinLockHeld) == kSpinLockFree);
88 if (res) {
89 ANNOTATE_RWLOCK_ACQUIRED(this, 1);
91 return res;
94 // Release this SpinLock, which must be held by the calling thread.
95 // TODO(csilvers): uncomment the annotation when we figure out how to
96 // support this macro with 0 args (see thread_annotations.h)
97 inline void Unlock() /*UNLOCK_FUNCTION()*/ {
98 uint64 wait_cycles =
99 static_cast<uint64>(base::subtle::NoBarrier_Load(&lockword_));
100 ANNOTATE_RWLOCK_RELEASED(this, 1);
101 base::subtle::Release_Store(&lockword_, kSpinLockFree);
102 if (wait_cycles != kSpinLockHeld) {
103 // Collect contentionz profile info, and speed the wakeup of any waiter.
104 // The wait_cycles value indicates how long this thread spent waiting
105 // for the lock.
106 SlowUnlock(wait_cycles);
110 // Determine if the lock is held. When the lock is held by the invoking
111 // thread, true will always be returned. Intended to be used as
112 // CHECK(lock.IsHeld()).
113 inline bool IsHeld() const {
114 return base::subtle::NoBarrier_Load(&lockword_) != kSpinLockFree;
117 static const base::LinkerInitialized LINKER_INITIALIZED; // backwards compat
118 private:
119 enum { kSpinLockFree = 0 };
120 enum { kSpinLockHeld = 1 };
121 enum { kSpinLockSleeper = 2 };
123 volatile Atomic32 lockword_;
125 void SlowLock();
126 void SlowUnlock(uint64 wait_cycles);
127 Atomic32 SpinLoop(int64 initial_wait_timestamp, Atomic32* wait_cycles);
128 inline int32 CalculateWaitCycles(int64 wait_start_time);
130 DISALLOW_COPY_AND_ASSIGN(SpinLock);
133 // Corresponding locker object that arranges to acquire a spinlock for
134 // the duration of a C++ scope.
135 class SCOPED_LOCKABLE SpinLockHolder {
136 private:
137 SpinLock* lock_;
138 public:
139 inline explicit SpinLockHolder(SpinLock* l) EXCLUSIVE_LOCK_FUNCTION(l)
140 : lock_(l) {
141 l->Lock();
143 // TODO(csilvers): uncomment the annotation when we figure out how to
144 // support this macro with 0 args (see thread_annotations.h)
145 inline ~SpinLockHolder() /*UNLOCK_FUNCTION()*/ { lock_->Unlock(); }
147 // Catch bug where variable name is omitted, e.g. SpinLockHolder (&lock);
148 #define SpinLockHolder(x) COMPILE_ASSERT(0, spin_lock_decl_missing_var_name)
151 #endif // BASE_SPINLOCK_H_