1 // Copyright 2014 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 #include "content/common/discardable_shared_memory_heap.h"
9 #include "base/memory/discardable_shared_memory.h"
10 #include "base/strings/stringprintf.h"
15 const char kMemoryAllocatorHeapNamePrefix
[] = "segment";
16 const char kMemoryAllocatorName
[] = "discardable";
18 bool IsPowerOfTwo(size_t x
) {
19 return (x
& (x
- 1)) == 0;
22 bool IsInFreeList(DiscardableSharedMemoryHeap::Span
* span
) {
23 return span
->previous() || span
->next();
28 DiscardableSharedMemoryHeap::Span::Span(
29 base::DiscardableSharedMemory
* shared_memory
,
32 : shared_memory_(shared_memory
), start_(start
), length_(length
) {
35 DiscardableSharedMemoryHeap::Span::~Span() {
38 DiscardableSharedMemoryHeap::ScopedMemorySegment::ScopedMemorySegment(
39 DiscardableSharedMemoryHeap
* heap
,
40 scoped_ptr
<base::DiscardableSharedMemory
> shared_memory
,
43 const base::Closure
& deleted_callback
)
45 shared_memory_(shared_memory
.Pass()),
48 deleted_callback_(deleted_callback
) {
51 DiscardableSharedMemoryHeap::ScopedMemorySegment::~ScopedMemorySegment() {
52 heap_
->ReleaseMemory(shared_memory_
.get(), size_
);
53 deleted_callback_
.Run();
56 bool DiscardableSharedMemoryHeap::ScopedMemorySegment::IsUsed() const {
57 return heap_
->IsMemoryUsed(shared_memory_
.get(), size_
);
60 bool DiscardableSharedMemoryHeap::ScopedMemorySegment::IsResident() const {
61 return heap_
->IsMemoryResident(shared_memory_
.get());
64 void DiscardableSharedMemoryHeap::ScopedMemorySegment::OnMemoryDump(
65 base::trace_event::ProcessMemoryDump
* pmd
) const {
66 heap_
->OnMemoryDump(shared_memory_
.get(), size_
, id_
, pmd
);
69 DiscardableSharedMemoryHeap::DiscardableSharedMemoryHeap(size_t block_size
)
70 : block_size_(block_size
), num_blocks_(0), num_free_blocks_(0) {
71 DCHECK_NE(block_size_
, 0u);
72 DCHECK(IsPowerOfTwo(block_size_
));
75 DiscardableSharedMemoryHeap::~DiscardableSharedMemoryHeap() {
76 memory_segments_
.clear();
77 DCHECK_EQ(num_blocks_
, 0u);
78 DCHECK_EQ(num_free_blocks_
, 0u);
79 DCHECK_EQ(std::count_if(free_spans_
, free_spans_
+ arraysize(free_spans_
),
80 [](const base::LinkedList
<Span
>& free_spans
) {
81 return !free_spans
.empty();
86 scoped_ptr
<DiscardableSharedMemoryHeap::Span
> DiscardableSharedMemoryHeap::Grow(
87 scoped_ptr
<base::DiscardableSharedMemory
> shared_memory
,
90 const base::Closure
& deleted_callback
) {
91 // Memory must be aligned to block size.
93 reinterpret_cast<size_t>(shared_memory
->memory()) & (block_size_
- 1),
95 DCHECK_EQ(size
& (block_size_
- 1), 0u);
97 scoped_ptr
<Span
> span(
98 new Span(shared_memory
.get(),
99 reinterpret_cast<size_t>(shared_memory
->memory()) / block_size_
,
100 size
/ block_size_
));
101 DCHECK(spans_
.find(span
->start_
) == spans_
.end());
102 DCHECK(spans_
.find(span
->start_
+ span
->length_
- 1) == spans_
.end());
103 RegisterSpan(span
.get());
105 num_blocks_
+= span
->length_
;
107 // Start tracking if segment is resident by adding it to |memory_segments_|.
108 memory_segments_
.push_back(new ScopedMemorySegment(
109 this, shared_memory
.Pass(), size
, id
, deleted_callback
));
114 void DiscardableSharedMemoryHeap::MergeIntoFreeLists(scoped_ptr
<Span
> span
) {
115 DCHECK(span
->shared_memory_
);
117 // First add length of |span| to |num_free_blocks_|.
118 num_free_blocks_
+= span
->length_
;
120 // Merge with previous span if possible.
121 SpanMap::iterator prev_it
= spans_
.find(span
->start_
- 1);
122 if (prev_it
!= spans_
.end() && IsInFreeList(prev_it
->second
)) {
123 scoped_ptr
<Span
> prev
= RemoveFromFreeList(prev_it
->second
);
124 DCHECK_EQ(prev
->start_
+ prev
->length_
, span
->start_
);
125 UnregisterSpan(prev
.get());
126 if (span
->length_
> 1)
127 spans_
.erase(span
->start_
);
128 span
->start_
-= prev
->length_
;
129 span
->length_
+= prev
->length_
;
130 spans_
[span
->start_
] = span
.get();
133 // Merge with next span if possible.
134 SpanMap::iterator next_it
= spans_
.find(span
->start_
+ span
->length_
);
135 if (next_it
!= spans_
.end() && IsInFreeList(next_it
->second
)) {
136 scoped_ptr
<Span
> next
= RemoveFromFreeList(next_it
->second
);
137 DCHECK_EQ(next
->start_
, span
->start_
+ span
->length_
);
138 UnregisterSpan(next
.get());
139 if (span
->length_
> 1)
140 spans_
.erase(span
->start_
+ span
->length_
- 1);
141 span
->length_
+= next
->length_
;
142 spans_
[span
->start_
+ span
->length_
- 1] = span
.get();
145 InsertIntoFreeList(span
.Pass());
148 scoped_ptr
<DiscardableSharedMemoryHeap::Span
>
149 DiscardableSharedMemoryHeap::Split(Span
* span
, size_t blocks
) {
151 DCHECK_LT(blocks
, span
->length_
);
153 scoped_ptr
<Span
> leftover(new Span(
154 span
->shared_memory_
, span
->start_
+ blocks
, span
->length_
- blocks
));
155 DCHECK_IMPLIES(leftover
->length_
> 1,
156 spans_
.find(leftover
->start_
) == spans_
.end());
157 RegisterSpan(leftover
.get());
158 spans_
[span
->start_
+ blocks
- 1] = span
;
159 span
->length_
= blocks
;
160 return leftover
.Pass();
163 scoped_ptr
<DiscardableSharedMemoryHeap::Span
>
164 DiscardableSharedMemoryHeap::SearchFreeLists(size_t blocks
, size_t slack
) {
167 size_t length
= blocks
;
168 size_t max_length
= blocks
+ slack
;
170 // Search array of free lists for a suitable span.
171 while (length
- 1 < arraysize(free_spans_
) - 1) {
172 const base::LinkedList
<Span
>& free_spans
= free_spans_
[length
- 1];
173 if (!free_spans
.empty()) {
174 // Return the most recently used span located in tail.
175 return Carve(free_spans
.tail()->value(), blocks
);
178 // Return early after surpassing |max_length|.
179 if (++length
> max_length
)
183 const base::LinkedList
<Span
>& overflow_free_spans
=
184 free_spans_
[arraysize(free_spans_
) - 1];
186 // Search overflow free list for a suitable span. Starting with the most
187 // recently used span located in tail and moving towards head.
188 for (base::LinkNode
<Span
>* node
= overflow_free_spans
.tail();
189 node
!= overflow_free_spans
.end(); node
= node
->previous()) {
190 Span
* span
= node
->value();
191 if (span
->length_
>= blocks
&& span
->length_
<= max_length
)
192 return Carve(span
, blocks
);
198 void DiscardableSharedMemoryHeap::ReleaseFreeMemory() {
199 // Erase all free segments after rearranging the segments in such a way
200 // that used segments precede all free segments.
201 memory_segments_
.erase(
203 memory_segments_
.begin(), memory_segments_
.end(),
204 [](const ScopedMemorySegment
* segment
) { return segment
->IsUsed(); }),
205 memory_segments_
.end());
208 void DiscardableSharedMemoryHeap::ReleasePurgedMemory() {
209 // Erase all purged segments after rearranging the segments in such a way
210 // that resident segments precede all purged segments.
211 memory_segments_
.erase(
212 std::partition(memory_segments_
.begin(), memory_segments_
.end(),
213 [](const ScopedMemorySegment
* segment
) {
214 return segment
->IsResident();
216 memory_segments_
.end());
219 size_t DiscardableSharedMemoryHeap::GetSize() const {
220 return num_blocks_
* block_size_
;
223 size_t DiscardableSharedMemoryHeap::GetSizeOfFreeLists() const {
224 return num_free_blocks_
* block_size_
;
227 bool DiscardableSharedMemoryHeap::OnMemoryDump(
228 base::trace_event::ProcessMemoryDump
* pmd
) {
230 memory_segments_
.begin(), memory_segments_
.end(),
231 [pmd
](const ScopedMemorySegment
* segment
) {
232 segment
->OnMemoryDump(pmd
);
237 void DiscardableSharedMemoryHeap::InsertIntoFreeList(
238 scoped_ptr
<DiscardableSharedMemoryHeap::Span
> span
) {
239 DCHECK(!IsInFreeList(span
.get()));
240 size_t index
= std::min(span
->length_
, arraysize(free_spans_
)) - 1;
241 free_spans_
[index
].Append(span
.release());
244 scoped_ptr
<DiscardableSharedMemoryHeap::Span
>
245 DiscardableSharedMemoryHeap::RemoveFromFreeList(Span
* span
) {
246 DCHECK(IsInFreeList(span
));
247 span
->RemoveFromList();
248 return make_scoped_ptr(span
);
251 scoped_ptr
<DiscardableSharedMemoryHeap::Span
>
252 DiscardableSharedMemoryHeap::Carve(Span
* span
, size_t blocks
) {
253 scoped_ptr
<Span
> serving
= RemoveFromFreeList(span
);
255 const int extra
= serving
->length_
- blocks
;
257 scoped_ptr
<Span
> leftover(
258 new Span(serving
->shared_memory_
, serving
->start_
+ blocks
, extra
));
259 DCHECK_IMPLIES(extra
> 1, spans_
.find(leftover
->start_
) == spans_
.end());
260 RegisterSpan(leftover
.get());
262 // No need to coalesce as the previous span of |leftover| was just split
263 // and the next span of |leftover| was not previously coalesced with
265 InsertIntoFreeList(leftover
.Pass());
267 serving
->length_
= blocks
;
268 spans_
[serving
->start_
+ blocks
- 1] = serving
.get();
271 // |serving| is no longer in the free list, remove its length from
272 // |num_free_blocks_|.
273 DCHECK_GE(num_free_blocks_
, serving
->length_
);
274 num_free_blocks_
-= serving
->length_
;
276 return serving
.Pass();
279 void DiscardableSharedMemoryHeap::RegisterSpan(Span
* span
) {
280 spans_
[span
->start_
] = span
;
281 if (span
->length_
> 1)
282 spans_
[span
->start_
+ span
->length_
- 1] = span
;
285 void DiscardableSharedMemoryHeap::UnregisterSpan(Span
* span
) {
286 DCHECK(spans_
.find(span
->start_
) != spans_
.end());
287 DCHECK_EQ(spans_
[span
->start_
], span
);
288 spans_
.erase(span
->start_
);
289 if (span
->length_
> 1) {
290 DCHECK(spans_
.find(span
->start_
+ span
->length_
- 1) != spans_
.end());
291 DCHECK_EQ(spans_
[span
->start_
+ span
->length_
- 1], span
);
292 spans_
.erase(span
->start_
+ span
->length_
- 1);
296 bool DiscardableSharedMemoryHeap::IsMemoryUsed(
297 const base::DiscardableSharedMemory
* shared_memory
,
300 reinterpret_cast<size_t>(shared_memory
->memory()) / block_size_
;
301 size_t length
= size
/ block_size_
;
302 DCHECK(spans_
.find(offset
) != spans_
.end());
303 Span
* span
= spans_
[offset
];
304 DCHECK_LE(span
->length_
, length
);
305 // Memory is used if first span is not in free list or shorter than segment.
306 return !IsInFreeList(span
) || span
->length_
!= length
;
309 bool DiscardableSharedMemoryHeap::IsMemoryResident(
310 const base::DiscardableSharedMemory
* shared_memory
) {
311 return shared_memory
->IsMemoryResident();
314 void DiscardableSharedMemoryHeap::ReleaseMemory(
315 const base::DiscardableSharedMemory
* shared_memory
,
318 reinterpret_cast<size_t>(shared_memory
->memory()) / block_size_
;
319 size_t end
= offset
+ size
/ block_size_
;
320 while (offset
< end
) {
321 DCHECK(spans_
.find(offset
) != spans_
.end());
322 Span
* span
= spans_
[offset
];
323 DCHECK_EQ(span
->shared_memory_
, shared_memory
);
324 span
->shared_memory_
= nullptr;
325 UnregisterSpan(span
);
327 offset
+= span
->length_
;
329 DCHECK_GE(num_blocks_
, span
->length_
);
330 num_blocks_
-= span
->length_
;
332 // If |span| is in the free list, remove it and update |num_free_blocks_|.
333 if (IsInFreeList(span
)) {
334 DCHECK_GE(num_free_blocks_
, span
->length_
);
335 num_free_blocks_
-= span
->length_
;
336 RemoveFromFreeList(span
);
341 void DiscardableSharedMemoryHeap::OnMemoryDump(
342 const base::DiscardableSharedMemory
* shared_memory
,
345 base::trace_event::ProcessMemoryDump
* pmd
) {
346 std::string heap_name
= base::StringPrintf(
347 "%s/%s_%d", kMemoryAllocatorName
, kMemoryAllocatorHeapNamePrefix
, id
);
348 base::trace_event::MemoryAllocatorDump
* dump
=
349 pmd
->CreateAllocatorDump(heap_name
);
352 size_t allocated_objects_count
= 0;
353 size_t allocated_objects_size_in_bytes
= 0;
355 reinterpret_cast<size_t>(shared_memory
->memory()) / block_size_
;
356 size_t end
= offset
+ size
/ block_size_
;
357 while (offset
< end
) {
358 Span
* span
= spans_
[offset
];
359 if (!IsInFreeList(span
)) {
360 allocated_objects_count
++;
361 allocated_objects_size_in_bytes
+= span
->length_
* block_size_
;
363 offset
+= span
->length_
;
366 dump
->AddScalar(base::trace_event::MemoryAllocatorDump::kNameOuterSize
,
367 base::trace_event::MemoryAllocatorDump::kUnitsBytes
,
368 static_cast<uint64_t>(size
));
369 dump
->AddScalar(base::trace_event::MemoryAllocatorDump::kNameObjectsCount
,
370 base::trace_event::MemoryAllocatorDump::kUnitsObjects
,
371 static_cast<uint64_t>(allocated_objects_count
));
372 dump
->AddScalar(base::trace_event::MemoryAllocatorDump::kNameInnerSize
,
373 base::trace_event::MemoryAllocatorDump::kUnitsBytes
,
374 static_cast<uint64_t>(allocated_objects_size_in_bytes
));
377 } // namespace content