PepperVideoDestinationHost: Check that recieved frames have strictly increasing times...
[chromium-blink-merge.git] / content / common / discardable_shared_memory_heap.cc
blob427c3595e59922b71e73ca97b38f4f68172087d5
1 // Copyright 2014 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 #include "content/common/discardable_shared_memory_heap.h"
7 #include "base/memory/discardable_shared_memory.h"
9 namespace content {
10 namespace {
12 bool IsPowerOfTwo(size_t x) {
13 return (x & (x - 1)) == 0;
16 bool IsInFreeList(DiscardableSharedMemoryHeap::Span* span) {
17 return span->previous() || span->next();
20 } // namespace
22 DiscardableSharedMemoryHeap::Span::Span(
23 linked_ptr<base::DiscardableSharedMemory> shared_memory,
24 size_t start,
25 size_t length)
26 : shared_memory_(shared_memory), start_(start), length_(length) {
29 DiscardableSharedMemoryHeap::Span::~Span() {
32 DiscardableSharedMemoryHeap::DiscardableSharedMemoryHeap(size_t block_size)
33 : block_size_(block_size) {
34 DCHECK_NE(block_size_, 0u);
35 DCHECK(IsPowerOfTwo(block_size_));
38 DiscardableSharedMemoryHeap::~DiscardableSharedMemoryHeap() {
39 while (!free_spans_.empty())
40 RemoveFromFreeList(free_spans_.tail()->value());
43 scoped_ptr<DiscardableSharedMemoryHeap::Span> DiscardableSharedMemoryHeap::Grow(
44 scoped_ptr<base::DiscardableSharedMemory> memory,
45 size_t size) {
46 linked_ptr<base::DiscardableSharedMemory> shared_memory(memory.release());
48 // Memory must be aligned to block size.
49 DCHECK_EQ(
50 reinterpret_cast<size_t>(shared_memory->memory()) & (block_size_ - 1),
51 0u);
52 DCHECK_EQ(size & (block_size_ - 1), 0u);
54 scoped_ptr<Span> span(
55 new Span(shared_memory,
56 reinterpret_cast<size_t>(shared_memory->memory()) / block_size_,
57 size / block_size_));
58 DCHECK(spans_.find(span->start_) == spans_.end());
59 DCHECK(spans_.find(span->start_ + span->length_ - 1) == spans_.end());
60 RegisterSpan(span.get());
61 return span.Pass();
64 void DiscardableSharedMemoryHeap::MergeIntoFreeList(scoped_ptr<Span> span) {
65 // Merge with previous span if possible.
66 SpanMap::iterator prev_it = spans_.find(span->start_ - 1);
67 if (prev_it != spans_.end() && IsInFreeList(prev_it->second)) {
68 scoped_ptr<Span> prev = RemoveFromFreeList(prev_it->second);
69 DCHECK_EQ(prev->start_ + prev->length_, span->start_);
70 spans_.erase(span->start_);
71 span->start_ -= prev->length_;
72 span->length_ += prev->length_;
73 DeleteSpan(prev.Pass());
74 spans_[span->start_] = span.get();
77 // Merge with next span if possible.
78 SpanMap::iterator next_it = spans_.find(span->start_ + span->length_);
79 if (next_it != spans_.end() && IsInFreeList(next_it->second)) {
80 scoped_ptr<Span> next = RemoveFromFreeList(next_it->second);
81 DCHECK_EQ(next->start_, span->start_ + span->length_);
82 if (span->length_ > 1)
83 spans_.erase(span->start_ + span->length_ - 1);
84 span->length_ += next->length_;
85 DeleteSpan(next.Pass());
86 spans_[span->start_ + span->length_ - 1] = span.get();
89 free_spans_.Append(span.release());
92 scoped_ptr<DiscardableSharedMemoryHeap::Span>
93 DiscardableSharedMemoryHeap::Split(Span* span, size_t blocks) {
94 DCHECK(blocks);
95 DCHECK_LT(blocks, span->length_);
97 scoped_ptr<Span> leftover(new Span(
98 span->shared_memory_, span->start_ + blocks, span->length_ - blocks));
99 DCHECK_IMPLIES(leftover->length_ > 1,
100 spans_.find(leftover->start_) == spans_.end());
101 RegisterSpan(leftover.get());
102 spans_[span->start_ + blocks - 1] = span;
103 span->length_ = blocks;
104 return leftover.Pass();
107 scoped_ptr<DiscardableSharedMemoryHeap::Span>
108 DiscardableSharedMemoryHeap::SearchFreeList(size_t blocks) {
109 DCHECK(blocks);
111 // Search through list to find best span.
112 Span* best = nullptr;
114 // This implements address-ordered best-fit.
115 for (Span* span = free_spans_.head()->value(); span != free_spans_.end();
116 span = span->next()->value()) {
117 // Skip span if it's not large enough.
118 if (span->length_ < blocks)
119 continue;
121 if (best) {
122 // Skip span if |best| is a better fit.
123 if (span->length_ > best->length_)
124 continue;
126 // Skip span if |best| starts at a lower address.
127 if ((span->length_ == best->length_) && (span->start_ > best->start_))
128 continue;
131 best = span;
134 return best ? Carve(best, blocks) : nullptr;
137 void DiscardableSharedMemoryHeap::DeleteSpan(scoped_ptr<Span> span) {
138 DCHECK(!IsInFreeList(span.get()));
139 spans_.erase(span->start_);
140 if (span->length_ > 1)
141 spans_.erase(span->start_ + span->length_ - 1);
144 scoped_ptr<DiscardableSharedMemoryHeap::Span>
145 DiscardableSharedMemoryHeap::RemoveFromFreeList(Span* span) {
146 span->RemoveFromList();
147 return make_scoped_ptr(span);
150 scoped_ptr<DiscardableSharedMemoryHeap::Span>
151 DiscardableSharedMemoryHeap::Carve(Span* span, size_t blocks) {
152 scoped_ptr<Span> serving = RemoveFromFreeList(span);
154 const int extra = serving->length_ - blocks;
155 if (extra) {
156 scoped_ptr<Span> leftover(
157 new Span(serving->shared_memory_, serving->start_ + blocks, extra));
158 DCHECK_IMPLIES(extra > 1, spans_.find(leftover->start_) == spans_.end());
159 RegisterSpan(leftover.get());
161 // No need to coalesce as the previous span of |leftover| was just split
162 // and the next span of |leftover| was not previously coalesced with
163 // |span|.
164 free_spans_.Append(leftover.release());
166 serving->length_ = blocks;
167 spans_[serving->start_ + blocks - 1] = serving.get();
170 return serving.Pass();
173 void DiscardableSharedMemoryHeap::RegisterSpan(Span* span) {
174 spans_[span->start_] = span;
175 if (span->length_ > 1)
176 spans_[span->start_ + span->length_ - 1] = span;
179 } // namespace content