Add ICU message format support
[chromium-blink-merge.git] / content / common / discardable_shared_memory_heap.cc
blob958fa82554efecf5d9c4dee25bb6206cf7e891c9
1 // Copyright 2014 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 #include "content/common/discardable_shared_memory_heap.h"
7 #include <algorithm>
9 #include "base/format_macros.h"
10 #include "base/memory/discardable_shared_memory.h"
11 #include "base/strings/stringprintf.h"
12 #include "base/trace_event/memory_dump_manager.h"
14 namespace content {
15 namespace {
17 bool IsPowerOfTwo(size_t x) {
18 return (x & (x - 1)) == 0;
21 bool IsInFreeList(DiscardableSharedMemoryHeap::Span* span) {
22 return span->previous() || span->next();
25 } // namespace
27 DiscardableSharedMemoryHeap::Span::Span(
28 base::DiscardableSharedMemory* shared_memory,
29 size_t start,
30 size_t length)
31 : shared_memory_(shared_memory), start_(start), length_(length) {
34 DiscardableSharedMemoryHeap::Span::~Span() {
37 DiscardableSharedMemoryHeap::ScopedMemorySegment::ScopedMemorySegment(
38 DiscardableSharedMemoryHeap* heap,
39 scoped_ptr<base::DiscardableSharedMemory> shared_memory,
40 size_t size,
41 int32_t id,
42 const base::Closure& deleted_callback)
43 : heap_(heap),
44 shared_memory_(shared_memory.Pass()),
45 size_(size),
46 id_(id),
47 deleted_callback_(deleted_callback) {
50 DiscardableSharedMemoryHeap::ScopedMemorySegment::~ScopedMemorySegment() {
51 heap_->ReleaseMemory(shared_memory_.get(), size_);
52 deleted_callback_.Run();
55 bool DiscardableSharedMemoryHeap::ScopedMemorySegment::IsUsed() const {
56 return heap_->IsMemoryUsed(shared_memory_.get(), size_);
59 bool DiscardableSharedMemoryHeap::ScopedMemorySegment::IsResident() const {
60 return heap_->IsMemoryResident(shared_memory_.get());
63 void DiscardableSharedMemoryHeap::ScopedMemorySegment::OnMemoryDump(
64 base::trace_event::ProcessMemoryDump* pmd) const {
65 heap_->OnMemoryDump(shared_memory_.get(), size_, id_, pmd);
68 DiscardableSharedMemoryHeap::DiscardableSharedMemoryHeap(size_t block_size)
69 : block_size_(block_size), num_blocks_(0), num_free_blocks_(0) {
70 DCHECK_NE(block_size_, 0u);
71 DCHECK(IsPowerOfTwo(block_size_));
74 DiscardableSharedMemoryHeap::~DiscardableSharedMemoryHeap() {
75 memory_segments_.clear();
76 DCHECK_EQ(num_blocks_, 0u);
77 DCHECK_EQ(num_free_blocks_, 0u);
78 DCHECK_EQ(std::count_if(free_spans_, free_spans_ + arraysize(free_spans_),
79 [](const base::LinkedList<Span>& free_spans) {
80 return !free_spans.empty();
81 }),
82 0);
85 scoped_ptr<DiscardableSharedMemoryHeap::Span> DiscardableSharedMemoryHeap::Grow(
86 scoped_ptr<base::DiscardableSharedMemory> shared_memory,
87 size_t size,
88 int32_t id,
89 const base::Closure& deleted_callback) {
90 // Memory must be aligned to block size.
91 DCHECK_EQ(
92 reinterpret_cast<size_t>(shared_memory->memory()) & (block_size_ - 1),
93 0u);
94 DCHECK_EQ(size & (block_size_ - 1), 0u);
96 scoped_ptr<Span> span(
97 new Span(shared_memory.get(),
98 reinterpret_cast<size_t>(shared_memory->memory()) / block_size_,
99 size / block_size_));
100 DCHECK(spans_.find(span->start_) == spans_.end());
101 DCHECK(spans_.find(span->start_ + span->length_ - 1) == spans_.end());
102 RegisterSpan(span.get());
104 num_blocks_ += span->length_;
106 // Start tracking if segment is resident by adding it to |memory_segments_|.
107 memory_segments_.push_back(new ScopedMemorySegment(
108 this, shared_memory.Pass(), size, id, deleted_callback));
110 return span.Pass();
113 void DiscardableSharedMemoryHeap::MergeIntoFreeLists(scoped_ptr<Span> span) {
114 DCHECK(span->shared_memory_);
116 // First add length of |span| to |num_free_blocks_|.
117 num_free_blocks_ += span->length_;
119 // Merge with previous span if possible.
120 SpanMap::iterator prev_it = spans_.find(span->start_ - 1);
121 if (prev_it != spans_.end() && IsInFreeList(prev_it->second)) {
122 scoped_ptr<Span> prev = RemoveFromFreeList(prev_it->second);
123 DCHECK_EQ(prev->start_ + prev->length_, span->start_);
124 UnregisterSpan(prev.get());
125 if (span->length_ > 1)
126 spans_.erase(span->start_);
127 span->start_ -= prev->length_;
128 span->length_ += prev->length_;
129 spans_[span->start_] = span.get();
132 // Merge with next span if possible.
133 SpanMap::iterator next_it = spans_.find(span->start_ + span->length_);
134 if (next_it != spans_.end() && IsInFreeList(next_it->second)) {
135 scoped_ptr<Span> next = RemoveFromFreeList(next_it->second);
136 DCHECK_EQ(next->start_, span->start_ + span->length_);
137 UnregisterSpan(next.get());
138 if (span->length_ > 1)
139 spans_.erase(span->start_ + span->length_ - 1);
140 span->length_ += next->length_;
141 spans_[span->start_ + span->length_ - 1] = span.get();
144 InsertIntoFreeList(span.Pass());
147 scoped_ptr<DiscardableSharedMemoryHeap::Span>
148 DiscardableSharedMemoryHeap::Split(Span* span, size_t blocks) {
149 DCHECK(blocks);
150 DCHECK_LT(blocks, span->length_);
152 scoped_ptr<Span> leftover(new Span(
153 span->shared_memory_, span->start_ + blocks, span->length_ - blocks));
154 DCHECK_IMPLIES(leftover->length_ > 1,
155 spans_.find(leftover->start_) == spans_.end());
156 RegisterSpan(leftover.get());
157 spans_[span->start_ + blocks - 1] = span;
158 span->length_ = blocks;
159 return leftover.Pass();
162 scoped_ptr<DiscardableSharedMemoryHeap::Span>
163 DiscardableSharedMemoryHeap::SearchFreeLists(size_t blocks, size_t slack) {
164 DCHECK(blocks);
166 size_t length = blocks;
167 size_t max_length = blocks + slack;
169 // Search array of free lists for a suitable span.
170 while (length - 1 < arraysize(free_spans_) - 1) {
171 const base::LinkedList<Span>& free_spans = free_spans_[length - 1];
172 if (!free_spans.empty()) {
173 // Return the most recently used span located in tail.
174 return Carve(free_spans.tail()->value(), blocks);
177 // Return early after surpassing |max_length|.
178 if (++length > max_length)
179 return nullptr;
182 const base::LinkedList<Span>& overflow_free_spans =
183 free_spans_[arraysize(free_spans_) - 1];
185 // Search overflow free list for a suitable span. Starting with the most
186 // recently used span located in tail and moving towards head.
187 for (base::LinkNode<Span>* node = overflow_free_spans.tail();
188 node != overflow_free_spans.end(); node = node->previous()) {
189 Span* span = node->value();
190 if (span->length_ >= blocks && span->length_ <= max_length)
191 return Carve(span, blocks);
194 return nullptr;
197 void DiscardableSharedMemoryHeap::ReleaseFreeMemory() {
198 // Erase all free segments after rearranging the segments in such a way
199 // that used segments precede all free segments.
200 memory_segments_.erase(
201 std::partition(
202 memory_segments_.begin(), memory_segments_.end(),
203 [](const ScopedMemorySegment* segment) { return segment->IsUsed(); }),
204 memory_segments_.end());
207 void DiscardableSharedMemoryHeap::ReleasePurgedMemory() {
208 // Erase all purged segments after rearranging the segments in such a way
209 // that resident segments precede all purged segments.
210 memory_segments_.erase(
211 std::partition(memory_segments_.begin(), memory_segments_.end(),
212 [](const ScopedMemorySegment* segment) {
213 return segment->IsResident();
215 memory_segments_.end());
218 size_t DiscardableSharedMemoryHeap::GetSize() const {
219 return num_blocks_ * block_size_;
222 size_t DiscardableSharedMemoryHeap::GetSizeOfFreeLists() const {
223 return num_free_blocks_ * block_size_;
226 bool DiscardableSharedMemoryHeap::OnMemoryDump(
227 base::trace_event::ProcessMemoryDump* pmd) {
228 std::for_each(
229 memory_segments_.begin(), memory_segments_.end(),
230 [pmd](const ScopedMemorySegment* segment) {
231 segment->OnMemoryDump(pmd);
233 return true;
236 void DiscardableSharedMemoryHeap::InsertIntoFreeList(
237 scoped_ptr<DiscardableSharedMemoryHeap::Span> span) {
238 DCHECK(!IsInFreeList(span.get()));
239 size_t index = std::min(span->length_, arraysize(free_spans_)) - 1;
240 free_spans_[index].Append(span.release());
243 scoped_ptr<DiscardableSharedMemoryHeap::Span>
244 DiscardableSharedMemoryHeap::RemoveFromFreeList(Span* span) {
245 DCHECK(IsInFreeList(span));
246 span->RemoveFromList();
247 return make_scoped_ptr(span);
250 scoped_ptr<DiscardableSharedMemoryHeap::Span>
251 DiscardableSharedMemoryHeap::Carve(Span* span, size_t blocks) {
252 scoped_ptr<Span> serving = RemoveFromFreeList(span);
254 const int extra = serving->length_ - blocks;
255 if (extra) {
256 scoped_ptr<Span> leftover(
257 new Span(serving->shared_memory_, serving->start_ + blocks, extra));
258 DCHECK_IMPLIES(extra > 1, spans_.find(leftover->start_) == spans_.end());
259 RegisterSpan(leftover.get());
261 // No need to coalesce as the previous span of |leftover| was just split
262 // and the next span of |leftover| was not previously coalesced with
263 // |span|.
264 InsertIntoFreeList(leftover.Pass());
266 serving->length_ = blocks;
267 spans_[serving->start_ + blocks - 1] = serving.get();
270 // |serving| is no longer in the free list, remove its length from
271 // |num_free_blocks_|.
272 DCHECK_GE(num_free_blocks_, serving->length_);
273 num_free_blocks_ -= serving->length_;
275 return serving.Pass();
278 void DiscardableSharedMemoryHeap::RegisterSpan(Span* span) {
279 spans_[span->start_] = span;
280 if (span->length_ > 1)
281 spans_[span->start_ + span->length_ - 1] = span;
284 void DiscardableSharedMemoryHeap::UnregisterSpan(Span* span) {
285 DCHECK(spans_.find(span->start_) != spans_.end());
286 DCHECK_EQ(spans_[span->start_], span);
287 spans_.erase(span->start_);
288 if (span->length_ > 1) {
289 DCHECK(spans_.find(span->start_ + span->length_ - 1) != spans_.end());
290 DCHECK_EQ(spans_[span->start_ + span->length_ - 1], span);
291 spans_.erase(span->start_ + span->length_ - 1);
295 bool DiscardableSharedMemoryHeap::IsMemoryUsed(
296 const base::DiscardableSharedMemory* shared_memory,
297 size_t size) {
298 size_t offset =
299 reinterpret_cast<size_t>(shared_memory->memory()) / block_size_;
300 size_t length = size / block_size_;
301 DCHECK(spans_.find(offset) != spans_.end());
302 Span* span = spans_[offset];
303 DCHECK_LE(span->length_, length);
304 // Memory is used if first span is not in free list or shorter than segment.
305 return !IsInFreeList(span) || span->length_ != length;
308 bool DiscardableSharedMemoryHeap::IsMemoryResident(
309 const base::DiscardableSharedMemory* shared_memory) {
310 return shared_memory->IsMemoryResident();
313 void DiscardableSharedMemoryHeap::ReleaseMemory(
314 const base::DiscardableSharedMemory* shared_memory,
315 size_t size) {
316 size_t offset =
317 reinterpret_cast<size_t>(shared_memory->memory()) / block_size_;
318 size_t end = offset + size / block_size_;
319 while (offset < end) {
320 DCHECK(spans_.find(offset) != spans_.end());
321 Span* span = spans_[offset];
322 DCHECK_EQ(span->shared_memory_, shared_memory);
323 span->shared_memory_ = nullptr;
324 UnregisterSpan(span);
326 offset += span->length_;
328 DCHECK_GE(num_blocks_, span->length_);
329 num_blocks_ -= span->length_;
331 // If |span| is in the free list, remove it and update |num_free_blocks_|.
332 if (IsInFreeList(span)) {
333 DCHECK_GE(num_free_blocks_, span->length_);
334 num_free_blocks_ -= span->length_;
335 RemoveFromFreeList(span);
340 void DiscardableSharedMemoryHeap::OnMemoryDump(
341 const base::DiscardableSharedMemory* shared_memory,
342 size_t size,
343 int32_t segment_id,
344 base::trace_event::ProcessMemoryDump* pmd) {
345 size_t allocated_objects_count = 0;
346 size_t allocated_objects_size_in_bytes = 0;
347 size_t offset =
348 reinterpret_cast<size_t>(shared_memory->memory()) / block_size_;
349 size_t end = offset + size / block_size_;
350 while (offset < end) {
351 Span* span = spans_[offset];
352 if (!IsInFreeList(span)) {
353 allocated_objects_count++;
354 allocated_objects_size_in_bytes += span->length_ * block_size_;
356 offset += span->length_;
359 std::string segment_dump_name =
360 base::StringPrintf("discardable/segment_%d", segment_id);
361 base::trace_event::MemoryAllocatorDump* segment_dump =
362 pmd->CreateAllocatorDump(segment_dump_name);
363 segment_dump->AddScalar(base::trace_event::MemoryAllocatorDump::kNameSize,
364 base::trace_event::MemoryAllocatorDump::kUnitsBytes,
365 static_cast<uint64_t>(size));
367 base::trace_event::MemoryAllocatorDump* obj_dump =
368 pmd->CreateAllocatorDump(segment_dump_name + "/allocated_objects");
369 obj_dump->AddScalar(base::trace_event::MemoryAllocatorDump::kNameObjectsCount,
370 base::trace_event::MemoryAllocatorDump::kUnitsObjects,
371 static_cast<uint64_t>(allocated_objects_count));
372 obj_dump->AddScalar(base::trace_event::MemoryAllocatorDump::kNameSize,
373 base::trace_event::MemoryAllocatorDump::kUnitsBytes,
374 static_cast<uint64_t>(allocated_objects_size_in_bytes));
376 // Emit an ownership edge towards a global allocator dump node. This allows
377 // to avoid double-counting segments when both browser and child process emit
378 // them. In the special case of single-process-mode, this will be the only
379 // dumper active and the single ownership edge will become a no-op in the UI.
380 const uint64 tracing_process_id =
381 base::trace_event::MemoryDumpManager::GetInstance()
382 ->GetTracingProcessId();
383 base::trace_event::MemoryAllocatorDumpGuid shared_segment_guid =
384 GetSegmentGUIDForTracing(tracing_process_id, segment_id);
385 pmd->CreateSharedGlobalAllocatorDump(shared_segment_guid);
387 // By creating an edge with a higher |importance| (w.r.t. browser-side dumps)
388 // the tracing UI will account the effective size of the segment to the child.
389 const int kImportance = 2;
390 pmd->AddOwnershipEdge(segment_dump->guid(), shared_segment_guid, kImportance);
393 // static
394 base::trace_event::MemoryAllocatorDumpGuid
395 DiscardableSharedMemoryHeap::GetSegmentGUIDForTracing(uint64 tracing_process_id,
396 int32 segment_id) {
397 return base::trace_event::MemoryAllocatorDumpGuid(base::StringPrintf(
398 "discardable-x-process/%" PRIx64 "/%d", tracing_process_id, segment_id));
401 } // namespace content