ozone: evdev: Sync caps lock LED state to evdev
[chromium-blink-merge.git] / content / common / discardable_shared_memory_heap.cc
blobfc00ea300b95df236d885c68e7d93527b73d7bd2
1 // Copyright 2014 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 #include "content/common/discardable_shared_memory_heap.h"
7 #include "base/memory/discardable_shared_memory.h"
9 namespace content {
10 namespace {
12 bool IsPowerOfTwo(size_t x) {
13 return (x & (x - 1)) == 0;
16 bool IsInFreeList(DiscardableSharedMemoryHeap::Span* span) {
17 return span->previous() || span->next();
20 } // namespace
22 DiscardableSharedMemoryHeap::Span::Span(
23 base::DiscardableSharedMemory* shared_memory,
24 size_t start,
25 size_t length)
26 : shared_memory_(shared_memory), start_(start), length_(length) {
29 DiscardableSharedMemoryHeap::Span::~Span() {
32 DiscardableSharedMemoryHeap::DiscardableSharedMemoryHeap(size_t block_size)
33 : block_size_(block_size) {
34 DCHECK_NE(block_size_, 0u);
35 DCHECK(IsPowerOfTwo(block_size_));
38 DiscardableSharedMemoryHeap::~DiscardableSharedMemoryHeap() {
39 for (auto shared_memory : shared_memory_segments_)
40 ReleaseMemory(shared_memory);
42 DCHECK(free_spans_.empty());
45 scoped_ptr<DiscardableSharedMemoryHeap::Span> DiscardableSharedMemoryHeap::Grow(
46 scoped_ptr<base::DiscardableSharedMemory> shared_memory,
47 size_t size) {
48 // Memory must be aligned to block size.
49 DCHECK_EQ(
50 reinterpret_cast<size_t>(shared_memory->memory()) & (block_size_ - 1),
51 0u);
52 DCHECK_EQ(size & (block_size_ - 1), 0u);
54 scoped_ptr<Span> span(
55 new Span(shared_memory.get(),
56 reinterpret_cast<size_t>(shared_memory->memory()) / block_size_,
57 size / block_size_));
58 DCHECK(spans_.find(span->start_) == spans_.end());
59 DCHECK(spans_.find(span->start_ + span->length_ - 1) == spans_.end());
60 RegisterSpan(span.get());
62 // Start tracking if segment is resident by adding it to
63 // |shared_memory_segments_|.
64 shared_memory_segments_.push_back(shared_memory.release());
66 return span.Pass();
69 void DiscardableSharedMemoryHeap::MergeIntoFreeList(scoped_ptr<Span> span) {
70 DCHECK(span->shared_memory_);
72 // Merge with previous span if possible.
73 SpanMap::iterator prev_it = spans_.find(span->start_ - 1);
74 if (prev_it != spans_.end() && IsInFreeList(prev_it->second)) {
75 scoped_ptr<Span> prev = RemoveFromFreeList(prev_it->second);
76 DCHECK_EQ(prev->start_ + prev->length_, span->start_);
77 UnregisterSpan(prev.get());
78 if (span->length_ > 1)
79 spans_.erase(span->start_);
80 span->start_ -= prev->length_;
81 span->length_ += prev->length_;
82 spans_[span->start_] = span.get();
85 // Merge with next span if possible.
86 SpanMap::iterator next_it = spans_.find(span->start_ + span->length_);
87 if (next_it != spans_.end() && IsInFreeList(next_it->second)) {
88 scoped_ptr<Span> next = RemoveFromFreeList(next_it->second);
89 DCHECK_EQ(next->start_, span->start_ + span->length_);
90 UnregisterSpan(next.get());
91 if (span->length_ > 1)
92 spans_.erase(span->start_ + span->length_ - 1);
93 span->length_ += next->length_;
94 spans_[span->start_ + span->length_ - 1] = span.get();
97 free_spans_.Append(span.release());
100 scoped_ptr<DiscardableSharedMemoryHeap::Span>
101 DiscardableSharedMemoryHeap::Split(Span* span, size_t blocks) {
102 DCHECK(blocks);
103 DCHECK_LT(blocks, span->length_);
105 scoped_ptr<Span> leftover(new Span(
106 span->shared_memory_, span->start_ + blocks, span->length_ - blocks));
107 DCHECK_IMPLIES(leftover->length_ > 1,
108 spans_.find(leftover->start_) == spans_.end());
109 RegisterSpan(leftover.get());
110 spans_[span->start_ + blocks - 1] = span;
111 span->length_ = blocks;
112 return leftover.Pass();
115 scoped_ptr<DiscardableSharedMemoryHeap::Span>
116 DiscardableSharedMemoryHeap::SearchFreeList(size_t blocks) {
117 DCHECK(blocks);
119 // Search through list to find best span.
120 Span* best = nullptr;
122 // This implements address-ordered best-fit.
123 for (Span* span = free_spans_.head()->value(); span != free_spans_.end();
124 span = span->next()->value()) {
125 // Skip span if it's not large enough.
126 if (span->length_ < blocks)
127 continue;
129 if (best) {
130 // Skip span if |best| is a better fit.
131 if (span->length_ > best->length_)
132 continue;
134 // Skip span if |best| starts at a lower address.
135 if ((span->length_ == best->length_) && (span->start_ > best->start_))
136 continue;
139 best = span;
142 return best ? Carve(best, blocks) : nullptr;
145 size_t DiscardableSharedMemoryHeap::ReleaseFreeMemory() {
146 size_t bytes_released = 0;
147 size_t i = 0;
149 // Release memory for all non-resident segments.
150 while (i < shared_memory_segments_.size()) {
151 base::DiscardableSharedMemory* shared_memory = shared_memory_segments_[i];
153 // Skip segment if still resident.
154 if (shared_memory->IsMemoryResident()) {
155 ++i;
156 continue;
159 bytes_released += shared_memory->mapped_size();
161 // Release the memory and unregistering all associated spans.
162 ReleaseMemory(shared_memory);
164 std::swap(shared_memory_segments_[i], shared_memory_segments_.back());
165 shared_memory_segments_.pop_back();
168 return bytes_released;
171 scoped_ptr<DiscardableSharedMemoryHeap::Span>
172 DiscardableSharedMemoryHeap::RemoveFromFreeList(Span* span) {
173 span->RemoveFromList();
174 return make_scoped_ptr(span);
177 scoped_ptr<DiscardableSharedMemoryHeap::Span>
178 DiscardableSharedMemoryHeap::Carve(Span* span, size_t blocks) {
179 scoped_ptr<Span> serving = RemoveFromFreeList(span);
181 const int extra = serving->length_ - blocks;
182 if (extra) {
183 scoped_ptr<Span> leftover(
184 new Span(serving->shared_memory_, serving->start_ + blocks, extra));
185 DCHECK_IMPLIES(extra > 1, spans_.find(leftover->start_) == spans_.end());
186 RegisterSpan(leftover.get());
188 // No need to coalesce as the previous span of |leftover| was just split
189 // and the next span of |leftover| was not previously coalesced with
190 // |span|.
191 free_spans_.Append(leftover.release());
193 serving->length_ = blocks;
194 spans_[serving->start_ + blocks - 1] = serving.get();
197 return serving.Pass();
200 void DiscardableSharedMemoryHeap::RegisterSpan(Span* span) {
201 spans_[span->start_] = span;
202 if (span->length_ > 1)
203 spans_[span->start_ + span->length_ - 1] = span;
206 void DiscardableSharedMemoryHeap::UnregisterSpan(Span* span) {
207 DCHECK(spans_.find(span->start_) != spans_.end());
208 DCHECK_EQ(spans_[span->start_], span);
209 spans_.erase(span->start_);
210 if (span->length_ > 1) {
211 DCHECK(spans_.find(span->start_ + span->length_ - 1) != spans_.end());
212 DCHECK_EQ(spans_[span->start_ + span->length_ - 1], span);
213 spans_.erase(span->start_ + span->length_ - 1);
217 void DiscardableSharedMemoryHeap::ReleaseMemory(
218 base::DiscardableSharedMemory* shared_memory) {
219 size_t offset =
220 reinterpret_cast<size_t>(shared_memory->memory()) / block_size_;
221 size_t end = offset + shared_memory->mapped_size() / block_size_;
222 while (offset < end) {
223 DCHECK(spans_.find(offset) != spans_.end());
224 Span* span = spans_[offset];
225 DCHECK_EQ(span->shared_memory_, shared_memory);
226 span->shared_memory_ = nullptr;
227 UnregisterSpan(span);
229 offset += span->length_;
231 // If |span| is in the free list, remove it.
232 if (IsInFreeList(span))
233 RemoveFromFreeList(span);
237 } // namespace content