base: Change DCHECK_IS_ON to a macro DCHECK_IS_ON().
[chromium-blink-merge.git] / content / common / discardable_shared_memory_heap.cc
blob38396f2d8ae563ef0fbc8c75a80f39dd1b8d2b8f
1 // Copyright 2014 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 #include "content/common/discardable_shared_memory_heap.h"
7 #include "base/memory/discardable_shared_memory.h"
9 namespace content {
10 namespace {
12 bool IsPowerOfTwo(size_t x) {
13 return (x & (x - 1)) == 0;
16 bool IsInFreeList(DiscardableSharedMemoryHeap::Span* span) {
17 return span->previous() || span->next();
20 } // namespace
22 DiscardableSharedMemoryHeap::Span::Span(
23 linked_ptr<base::DiscardableSharedMemory> shared_memory,
24 size_t start,
25 size_t length)
26 : shared_memory_(shared_memory), start_(start), length_(length) {
29 DiscardableSharedMemoryHeap::Span::~Span() {
32 DiscardableSharedMemoryHeap::DiscardableSharedMemoryHeap(size_t block_size)
33 : block_size_(block_size) {
34 DCHECK_NE(block_size_, 0u);
35 DCHECK(IsPowerOfTwo(block_size_));
38 DiscardableSharedMemoryHeap::~DiscardableSharedMemoryHeap() {
39 while (!free_spans_.empty())
40 RemoveFromFreeList(free_spans_.tail()->value());
43 scoped_ptr<DiscardableSharedMemoryHeap::Span> DiscardableSharedMemoryHeap::Grow(
44 scoped_ptr<base::DiscardableSharedMemory> memory,
45 size_t size) {
46 linked_ptr<base::DiscardableSharedMemory> shared_memory(memory.release());
48 // Memory must be aligned to block size.
49 DCHECK_EQ(
50 reinterpret_cast<size_t>(shared_memory->memory()) & (block_size_ - 1),
51 0u);
52 DCHECK_EQ(size & (block_size_ - 1), 0u);
54 scoped_ptr<Span> span(
55 new Span(shared_memory,
56 reinterpret_cast<size_t>(shared_memory->memory()) / block_size_,
57 size / block_size_));
58 RegisterSpan(span.get());
59 return span.Pass();
62 void DiscardableSharedMemoryHeap::MergeIntoFreeList(scoped_ptr<Span> span) {
63 // Merge with previous span if possible.
64 SpanMap::iterator prev_it = spans_.find(span->start_ - 1);
65 if (prev_it != spans_.end() && IsInFreeList(prev_it->second)) {
66 scoped_ptr<Span> prev = RemoveFromFreeList(prev_it->second);
67 DCHECK_EQ(prev->start_ + prev->length_, span->start_);
68 span->start_ -= prev->length_;
69 span->length_ += prev->length_;
70 spans_[span->start_] = span.get();
73 // Merge with next span if possible.
74 SpanMap::iterator next_it = spans_.find(span->start_ + span->length_);
75 if (next_it != spans_.end() && IsInFreeList(next_it->second)) {
76 scoped_ptr<Span> next = RemoveFromFreeList(next_it->second);
77 DCHECK_EQ(next->start_, span->start_ + span->length_);
78 span->length_ += next->length_;
79 spans_[span->start_ + span->length_ - 1] = span.get();
82 free_spans_.Append(span.release());
85 scoped_ptr<DiscardableSharedMemoryHeap::Span>
86 DiscardableSharedMemoryHeap::Split(Span* span, size_t blocks) {
87 DCHECK(blocks);
88 DCHECK_LT(blocks, span->length_);
90 scoped_ptr<Span> leftover(new Span(
91 span->shared_memory_, span->start_ + blocks, span->length_ - blocks));
92 RegisterSpan(leftover.get());
93 spans_[span->start_ + blocks - 1] = span;
94 span->length_ = blocks;
95 return leftover.Pass();
98 scoped_ptr<DiscardableSharedMemoryHeap::Span>
99 DiscardableSharedMemoryHeap::SearchFreeList(size_t blocks) {
100 DCHECK(blocks);
102 // Search through list to find best span.
103 Span* best = nullptr;
105 // This implements address-ordered best-fit.
106 for (Span* span = free_spans_.head()->value(); span != free_spans_.end();
107 span = span->next()->value()) {
108 // Skip span if it's not large enough.
109 if (span->length_ < blocks)
110 continue;
112 if (best) {
113 // Skip span if |best| is a better fit.
114 if (span->length_ > best->length_)
115 continue;
117 // Skip span if |best| starts at a lower address.
118 if ((span->length_ == best->length_) && (span->start_ > best->start_))
119 continue;
122 best = span;
125 return best ? Carve(best, blocks) : nullptr;
128 void DiscardableSharedMemoryHeap::DeleteSpan(scoped_ptr<Span> span) {
129 DCHECK(!IsInFreeList(span.get()));
130 spans_.erase(span->start_);
131 if (span->length_ > 1)
132 spans_.erase(span->start_ + span->length_ - 1);
135 scoped_ptr<DiscardableSharedMemoryHeap::Span>
136 DiscardableSharedMemoryHeap::RemoveFromFreeList(Span* span) {
137 span->RemoveFromList();
138 return make_scoped_ptr(span);
141 scoped_ptr<DiscardableSharedMemoryHeap::Span>
142 DiscardableSharedMemoryHeap::Carve(Span* span, size_t blocks) {
143 scoped_ptr<Span> serving = RemoveFromFreeList(span);
145 const int extra = serving->length_ - blocks;
146 if (extra) {
147 scoped_ptr<Span> leftover(
148 new Span(serving->shared_memory_, serving->start_ + blocks, extra));
149 RegisterSpan(leftover.get());
151 // No need to coalesce as the previous span of |leftover| was just split
152 // and the next span of |leftover| was not previously coalesced with
153 // |span|.
154 free_spans_.Append(leftover.release());
156 serving->length_ = blocks;
157 spans_[serving->start_ + blocks - 1] = serving.get();
160 return serving.Pass();
163 void DiscardableSharedMemoryHeap::RegisterSpan(Span* span) {
164 spans_[span->start_] = span;
165 if (span->length_ > 1)
166 spans_[span->start_ + span->length_ - 1] = span;
169 } // namespace content