1 // Copyright 2014 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 #include "content/common/discardable_shared_memory_heap.h"
9 #include "base/format_macros.h"
10 #include "base/memory/discardable_shared_memory.h"
11 #include "base/strings/stringprintf.h"
12 #include "base/trace_event/memory_dump_manager.h"
17 bool IsPowerOfTwo(size_t x
) {
18 return (x
& (x
- 1)) == 0;
21 bool IsInFreeList(DiscardableSharedMemoryHeap::Span
* span
) {
22 return span
->previous() || span
->next();
27 DiscardableSharedMemoryHeap::Span::Span(
28 base::DiscardableSharedMemory
* shared_memory
,
31 : shared_memory_(shared_memory
), start_(start
), length_(length
) {
34 DiscardableSharedMemoryHeap::Span::~Span() {
37 DiscardableSharedMemoryHeap::ScopedMemorySegment::ScopedMemorySegment(
38 DiscardableSharedMemoryHeap
* heap
,
39 scoped_ptr
<base::DiscardableSharedMemory
> shared_memory
,
42 const base::Closure
& deleted_callback
)
44 shared_memory_(shared_memory
.Pass()),
47 deleted_callback_(deleted_callback
) {
50 DiscardableSharedMemoryHeap::ScopedMemorySegment::~ScopedMemorySegment() {
51 heap_
->ReleaseMemory(shared_memory_
.get(), size_
);
52 deleted_callback_
.Run();
55 bool DiscardableSharedMemoryHeap::ScopedMemorySegment::IsUsed() const {
56 return heap_
->IsMemoryUsed(shared_memory_
.get(), size_
);
59 bool DiscardableSharedMemoryHeap::ScopedMemorySegment::IsResident() const {
60 return heap_
->IsMemoryResident(shared_memory_
.get());
63 bool DiscardableSharedMemoryHeap::ScopedMemorySegment::ContainsSpan(
65 return shared_memory_
== span
->shared_memory();
68 base::trace_event::MemoryAllocatorDump
*
69 DiscardableSharedMemoryHeap::ScopedMemorySegment::CreateMemoryAllocatorDump(
73 base::trace_event::ProcessMemoryDump
* pmd
) const {
74 DCHECK_EQ(shared_memory_
, span
->shared_memory());
75 base::trace_event::MemoryAllocatorDump
* dump
= pmd
->CreateAllocatorDump(name
);
76 dump
->AddScalar(base::trace_event::MemoryAllocatorDump::kNameSize
,
77 base::trace_event::MemoryAllocatorDump::kUnitsBytes
,
78 static_cast<uint64_t>(span
->length() * block_size
));
80 pmd
->AddSuballocation(
82 base::StringPrintf("discardable/segment_%d/allocated_objects", id_
));
86 void DiscardableSharedMemoryHeap::ScopedMemorySegment::OnMemoryDump(
87 base::trace_event::ProcessMemoryDump
* pmd
) const {
88 heap_
->OnMemoryDump(shared_memory_
.get(), size_
, id_
, pmd
);
91 DiscardableSharedMemoryHeap::DiscardableSharedMemoryHeap(size_t block_size
)
92 : block_size_(block_size
), num_blocks_(0), num_free_blocks_(0) {
93 DCHECK_NE(block_size_
, 0u);
94 DCHECK(IsPowerOfTwo(block_size_
));
97 DiscardableSharedMemoryHeap::~DiscardableSharedMemoryHeap() {
98 memory_segments_
.clear();
99 DCHECK_EQ(num_blocks_
, 0u);
100 DCHECK_EQ(num_free_blocks_
, 0u);
101 DCHECK_EQ(std::count_if(free_spans_
, free_spans_
+ arraysize(free_spans_
),
102 [](const base::LinkedList
<Span
>& free_spans
) {
103 return !free_spans
.empty();
108 scoped_ptr
<DiscardableSharedMemoryHeap::Span
> DiscardableSharedMemoryHeap::Grow(
109 scoped_ptr
<base::DiscardableSharedMemory
> shared_memory
,
112 const base::Closure
& deleted_callback
) {
113 // Memory must be aligned to block size.
115 reinterpret_cast<size_t>(shared_memory
->memory()) & (block_size_
- 1),
117 DCHECK_EQ(size
& (block_size_
- 1), 0u);
119 scoped_ptr
<Span
> span(
120 new Span(shared_memory
.get(),
121 reinterpret_cast<size_t>(shared_memory
->memory()) / block_size_
,
122 size
/ block_size_
));
123 DCHECK(spans_
.find(span
->start_
) == spans_
.end());
124 DCHECK(spans_
.find(span
->start_
+ span
->length_
- 1) == spans_
.end());
125 RegisterSpan(span
.get());
127 num_blocks_
+= span
->length_
;
129 // Start tracking if segment is resident by adding it to |memory_segments_|.
130 memory_segments_
.push_back(new ScopedMemorySegment(
131 this, shared_memory
.Pass(), size
, id
, deleted_callback
));
136 void DiscardableSharedMemoryHeap::MergeIntoFreeLists(scoped_ptr
<Span
> span
) {
137 DCHECK(span
->shared_memory_
);
139 // First add length of |span| to |num_free_blocks_|.
140 num_free_blocks_
+= span
->length_
;
142 // Merge with previous span if possible.
143 SpanMap::iterator prev_it
= spans_
.find(span
->start_
- 1);
144 if (prev_it
!= spans_
.end() && IsInFreeList(prev_it
->second
)) {
145 scoped_ptr
<Span
> prev
= RemoveFromFreeList(prev_it
->second
);
146 DCHECK_EQ(prev
->start_
+ prev
->length_
, span
->start_
);
147 UnregisterSpan(prev
.get());
148 if (span
->length_
> 1)
149 spans_
.erase(span
->start_
);
150 span
->start_
-= prev
->length_
;
151 span
->length_
+= prev
->length_
;
152 spans_
[span
->start_
] = span
.get();
155 // Merge with next span if possible.
156 SpanMap::iterator next_it
= spans_
.find(span
->start_
+ span
->length_
);
157 if (next_it
!= spans_
.end() && IsInFreeList(next_it
->second
)) {
158 scoped_ptr
<Span
> next
= RemoveFromFreeList(next_it
->second
);
159 DCHECK_EQ(next
->start_
, span
->start_
+ span
->length_
);
160 UnregisterSpan(next
.get());
161 if (span
->length_
> 1)
162 spans_
.erase(span
->start_
+ span
->length_
- 1);
163 span
->length_
+= next
->length_
;
164 spans_
[span
->start_
+ span
->length_
- 1] = span
.get();
167 InsertIntoFreeList(span
.Pass());
170 scoped_ptr
<DiscardableSharedMemoryHeap::Span
>
171 DiscardableSharedMemoryHeap::Split(Span
* span
, size_t blocks
) {
173 DCHECK_LT(blocks
, span
->length_
);
175 scoped_ptr
<Span
> leftover(new Span(
176 span
->shared_memory_
, span
->start_
+ blocks
, span
->length_
- blocks
));
177 DCHECK_IMPLIES(leftover
->length_
> 1,
178 spans_
.find(leftover
->start_
) == spans_
.end());
179 RegisterSpan(leftover
.get());
180 spans_
[span
->start_
+ blocks
- 1] = span
;
181 span
->length_
= blocks
;
182 return leftover
.Pass();
185 scoped_ptr
<DiscardableSharedMemoryHeap::Span
>
186 DiscardableSharedMemoryHeap::SearchFreeLists(size_t blocks
, size_t slack
) {
189 size_t length
= blocks
;
190 size_t max_length
= blocks
+ slack
;
192 // Search array of free lists for a suitable span.
193 while (length
- 1 < arraysize(free_spans_
) - 1) {
194 const base::LinkedList
<Span
>& free_spans
= free_spans_
[length
- 1];
195 if (!free_spans
.empty()) {
196 // Return the most recently used span located in tail.
197 return Carve(free_spans
.tail()->value(), blocks
);
200 // Return early after surpassing |max_length|.
201 if (++length
> max_length
)
205 const base::LinkedList
<Span
>& overflow_free_spans
=
206 free_spans_
[arraysize(free_spans_
) - 1];
208 // Search overflow free list for a suitable span. Starting with the most
209 // recently used span located in tail and moving towards head.
210 for (base::LinkNode
<Span
>* node
= overflow_free_spans
.tail();
211 node
!= overflow_free_spans
.end(); node
= node
->previous()) {
212 Span
* span
= node
->value();
213 if (span
->length_
>= blocks
&& span
->length_
<= max_length
)
214 return Carve(span
, blocks
);
220 void DiscardableSharedMemoryHeap::ReleaseFreeMemory() {
221 // Erase all free segments after rearranging the segments in such a way
222 // that used segments precede all free segments.
223 memory_segments_
.erase(
225 memory_segments_
.begin(), memory_segments_
.end(),
226 [](const ScopedMemorySegment
* segment
) { return segment
->IsUsed(); }),
227 memory_segments_
.end());
230 void DiscardableSharedMemoryHeap::ReleasePurgedMemory() {
231 // Erase all purged segments after rearranging the segments in such a way
232 // that resident segments precede all purged segments.
233 memory_segments_
.erase(
234 std::partition(memory_segments_
.begin(), memory_segments_
.end(),
235 [](const ScopedMemorySegment
* segment
) {
236 return segment
->IsResident();
238 memory_segments_
.end());
241 size_t DiscardableSharedMemoryHeap::GetSize() const {
242 return num_blocks_
* block_size_
;
245 size_t DiscardableSharedMemoryHeap::GetSizeOfFreeLists() const {
246 return num_free_blocks_
* block_size_
;
249 bool DiscardableSharedMemoryHeap::OnMemoryDump(
250 base::trace_event::ProcessMemoryDump
* pmd
) {
252 memory_segments_
.begin(), memory_segments_
.end(),
253 [pmd
](const ScopedMemorySegment
* segment
) {
254 segment
->OnMemoryDump(pmd
);
259 void DiscardableSharedMemoryHeap::InsertIntoFreeList(
260 scoped_ptr
<DiscardableSharedMemoryHeap::Span
> span
) {
261 DCHECK(!IsInFreeList(span
.get()));
262 size_t index
= std::min(span
->length_
, arraysize(free_spans_
)) - 1;
263 free_spans_
[index
].Append(span
.release());
266 scoped_ptr
<DiscardableSharedMemoryHeap::Span
>
267 DiscardableSharedMemoryHeap::RemoveFromFreeList(Span
* span
) {
268 DCHECK(IsInFreeList(span
));
269 span
->RemoveFromList();
270 return make_scoped_ptr(span
);
273 scoped_ptr
<DiscardableSharedMemoryHeap::Span
>
274 DiscardableSharedMemoryHeap::Carve(Span
* span
, size_t blocks
) {
275 scoped_ptr
<Span
> serving
= RemoveFromFreeList(span
);
277 const int extra
= serving
->length_
- blocks
;
279 scoped_ptr
<Span
> leftover(
280 new Span(serving
->shared_memory_
, serving
->start_
+ blocks
, extra
));
281 DCHECK_IMPLIES(extra
> 1, spans_
.find(leftover
->start_
) == spans_
.end());
282 RegisterSpan(leftover
.get());
284 // No need to coalesce as the previous span of |leftover| was just split
285 // and the next span of |leftover| was not previously coalesced with
287 InsertIntoFreeList(leftover
.Pass());
289 serving
->length_
= blocks
;
290 spans_
[serving
->start_
+ blocks
- 1] = serving
.get();
293 // |serving| is no longer in the free list, remove its length from
294 // |num_free_blocks_|.
295 DCHECK_GE(num_free_blocks_
, serving
->length_
);
296 num_free_blocks_
-= serving
->length_
;
298 return serving
.Pass();
301 void DiscardableSharedMemoryHeap::RegisterSpan(Span
* span
) {
302 spans_
[span
->start_
] = span
;
303 if (span
->length_
> 1)
304 spans_
[span
->start_
+ span
->length_
- 1] = span
;
307 void DiscardableSharedMemoryHeap::UnregisterSpan(Span
* span
) {
308 DCHECK(spans_
.find(span
->start_
) != spans_
.end());
309 DCHECK_EQ(spans_
[span
->start_
], span
);
310 spans_
.erase(span
->start_
);
311 if (span
->length_
> 1) {
312 DCHECK(spans_
.find(span
->start_
+ span
->length_
- 1) != spans_
.end());
313 DCHECK_EQ(spans_
[span
->start_
+ span
->length_
- 1], span
);
314 spans_
.erase(span
->start_
+ span
->length_
- 1);
318 bool DiscardableSharedMemoryHeap::IsMemoryUsed(
319 const base::DiscardableSharedMemory
* shared_memory
,
322 reinterpret_cast<size_t>(shared_memory
->memory()) / block_size_
;
323 size_t length
= size
/ block_size_
;
324 DCHECK(spans_
.find(offset
) != spans_
.end());
325 Span
* span
= spans_
[offset
];
326 DCHECK_LE(span
->length_
, length
);
327 // Memory is used if first span is not in free list or shorter than segment.
328 return !IsInFreeList(span
) || span
->length_
!= length
;
331 bool DiscardableSharedMemoryHeap::IsMemoryResident(
332 const base::DiscardableSharedMemory
* shared_memory
) {
333 return shared_memory
->IsMemoryResident();
336 void DiscardableSharedMemoryHeap::ReleaseMemory(
337 const base::DiscardableSharedMemory
* shared_memory
,
340 reinterpret_cast<size_t>(shared_memory
->memory()) / block_size_
;
341 size_t end
= offset
+ size
/ block_size_
;
342 while (offset
< end
) {
343 DCHECK(spans_
.find(offset
) != spans_
.end());
344 Span
* span
= spans_
[offset
];
345 DCHECK_EQ(span
->shared_memory_
, shared_memory
);
346 span
->shared_memory_
= nullptr;
347 UnregisterSpan(span
);
349 offset
+= span
->length_
;
351 DCHECK_GE(num_blocks_
, span
->length_
);
352 num_blocks_
-= span
->length_
;
354 // If |span| is in the free list, remove it and update |num_free_blocks_|.
355 if (IsInFreeList(span
)) {
356 DCHECK_GE(num_free_blocks_
, span
->length_
);
357 num_free_blocks_
-= span
->length_
;
358 RemoveFromFreeList(span
);
363 void DiscardableSharedMemoryHeap::OnMemoryDump(
364 const base::DiscardableSharedMemory
* shared_memory
,
367 base::trace_event::ProcessMemoryDump
* pmd
) {
368 size_t allocated_objects_count
= 0;
369 size_t allocated_objects_size_in_bytes
= 0;
371 reinterpret_cast<size_t>(shared_memory
->memory()) / block_size_
;
372 size_t end
= offset
+ size
/ block_size_
;
373 while (offset
< end
) {
374 Span
* span
= spans_
[offset
];
375 if (!IsInFreeList(span
)) {
376 allocated_objects_count
++;
377 allocated_objects_size_in_bytes
+= span
->length_
* block_size_
;
379 offset
+= span
->length_
;
382 std::string segment_dump_name
=
383 base::StringPrintf("discardable/segment_%d", segment_id
);
384 base::trace_event::MemoryAllocatorDump
* segment_dump
=
385 pmd
->CreateAllocatorDump(segment_dump_name
);
386 segment_dump
->AddScalar(base::trace_event::MemoryAllocatorDump::kNameSize
,
387 base::trace_event::MemoryAllocatorDump::kUnitsBytes
,
388 static_cast<uint64_t>(size
));
390 base::trace_event::MemoryAllocatorDump
* obj_dump
=
391 pmd
->CreateAllocatorDump(segment_dump_name
+ "/allocated_objects");
392 obj_dump
->AddScalar(base::trace_event::MemoryAllocatorDump::kNameObjectsCount
,
393 base::trace_event::MemoryAllocatorDump::kUnitsObjects
,
394 static_cast<uint64_t>(allocated_objects_count
));
395 obj_dump
->AddScalar(base::trace_event::MemoryAllocatorDump::kNameSize
,
396 base::trace_event::MemoryAllocatorDump::kUnitsBytes
,
397 static_cast<uint64_t>(allocated_objects_size_in_bytes
));
399 // Emit an ownership edge towards a global allocator dump node. This allows
400 // to avoid double-counting segments when both browser and child process emit
401 // them. In the special case of single-process-mode, this will be the only
402 // dumper active and the single ownership edge will become a no-op in the UI.
403 const uint64 tracing_process_id
=
404 base::trace_event::MemoryDumpManager::GetInstance()
405 ->GetTracingProcessId();
406 base::trace_event::MemoryAllocatorDumpGuid shared_segment_guid
=
407 GetSegmentGUIDForTracing(tracing_process_id
, segment_id
);
408 pmd
->CreateSharedGlobalAllocatorDump(shared_segment_guid
);
410 // By creating an edge with a higher |importance| (w.r.t. browser-side dumps)
411 // the tracing UI will account the effective size of the segment to the child.
412 const int kImportance
= 2;
413 pmd
->AddOwnershipEdge(segment_dump
->guid(), shared_segment_guid
, kImportance
);
417 base::trace_event::MemoryAllocatorDumpGuid
418 DiscardableSharedMemoryHeap::GetSegmentGUIDForTracing(uint64 tracing_process_id
,
420 return base::trace_event::MemoryAllocatorDumpGuid(base::StringPrintf(
421 "discardable-x-process/%" PRIx64
"/%d", tracing_process_id
, segment_id
));
424 base::trace_event::MemoryAllocatorDump
*
425 DiscardableSharedMemoryHeap::CreateMemoryAllocatorDump(
428 base::trace_event::ProcessMemoryDump
* pmd
) const {
429 if (!span
->shared_memory()) {
430 base::trace_event::MemoryAllocatorDump
* dump
=
431 pmd
->CreateAllocatorDump(name
);
432 dump
->AddScalar(base::trace_event::MemoryAllocatorDump::kNameSize
,
433 base::trace_event::MemoryAllocatorDump::kUnitsBytes
, 0u);
437 ScopedVector
<ScopedMemorySegment
>::const_iterator it
=
438 std::find_if(memory_segments_
.begin(), memory_segments_
.end(),
439 [span
](const ScopedMemorySegment
* segment
) {
440 return segment
->ContainsSpan(span
);
442 DCHECK(it
!= memory_segments_
.end());
443 return (*it
)->CreateMemoryAllocatorDump(span
, block_size_
, name
, pmd
);
446 } // namespace content