1 // Copyright 2014 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 #include "content/common/discardable_shared_memory_heap.h"
9 #include "base/memory/discardable_shared_memory.h"
14 bool IsPowerOfTwo(size_t x
) {
15 return (x
& (x
- 1)) == 0;
18 bool IsInFreeList(DiscardableSharedMemoryHeap::Span
* span
) {
19 return span
->previous() || span
->next();
24 DiscardableSharedMemoryHeap::Span::Span(
25 base::DiscardableSharedMemory
* shared_memory
,
28 : shared_memory_(shared_memory
), start_(start
), length_(length
) {
31 DiscardableSharedMemoryHeap::Span::~Span() {
34 DiscardableSharedMemoryHeap::ScopedMemorySegment::ScopedMemorySegment(
35 DiscardableSharedMemoryHeap
* heap
,
36 scoped_ptr
<base::DiscardableSharedMemory
> shared_memory
,
38 const base::Closure
& deleted_callback
)
40 shared_memory_(shared_memory
.Pass()),
42 deleted_callback_(deleted_callback
) {
45 DiscardableSharedMemoryHeap::ScopedMemorySegment::~ScopedMemorySegment() {
46 heap_
->ReleaseMemory(shared_memory_
.get(), size_
);
47 deleted_callback_
.Run();
50 bool DiscardableSharedMemoryHeap::ScopedMemorySegment::IsUsed() const {
51 return heap_
->IsMemoryUsed(shared_memory_
.get(), size_
);
54 bool DiscardableSharedMemoryHeap::ScopedMemorySegment::IsResident() const {
55 return heap_
->IsMemoryResident(shared_memory_
.get());
58 DiscardableSharedMemoryHeap::DiscardableSharedMemoryHeap(size_t block_size
)
59 : block_size_(block_size
), num_blocks_(0), num_free_blocks_(0) {
60 DCHECK_NE(block_size_
, 0u);
61 DCHECK(IsPowerOfTwo(block_size_
));
64 DiscardableSharedMemoryHeap::~DiscardableSharedMemoryHeap() {
65 memory_segments_
.clear();
66 DCHECK_EQ(num_blocks_
, 0u);
67 DCHECK_EQ(num_free_blocks_
, 0u);
68 DCHECK_EQ(std::count_if(free_spans_
, free_spans_
+ arraysize(free_spans_
),
69 [](const base::LinkedList
<Span
>& free_spans
) {
70 return !free_spans
.empty();
75 scoped_ptr
<DiscardableSharedMemoryHeap::Span
> DiscardableSharedMemoryHeap::Grow(
76 scoped_ptr
<base::DiscardableSharedMemory
> shared_memory
,
78 const base::Closure
& deleted_callback
) {
79 // Memory must be aligned to block size.
81 reinterpret_cast<size_t>(shared_memory
->memory()) & (block_size_
- 1),
83 DCHECK_EQ(size
& (block_size_
- 1), 0u);
85 scoped_ptr
<Span
> span(
86 new Span(shared_memory
.get(),
87 reinterpret_cast<size_t>(shared_memory
->memory()) / block_size_
,
89 DCHECK(spans_
.find(span
->start_
) == spans_
.end());
90 DCHECK(spans_
.find(span
->start_
+ span
->length_
- 1) == spans_
.end());
91 RegisterSpan(span
.get());
93 num_blocks_
+= span
->length_
;
95 // Start tracking if segment is resident by adding it to |memory_segments_|.
96 memory_segments_
.push_back(new ScopedMemorySegment(this, shared_memory
.Pass(),
97 size
, deleted_callback
));
102 void DiscardableSharedMemoryHeap::MergeIntoFreeLists(scoped_ptr
<Span
> span
) {
103 DCHECK(span
->shared_memory_
);
105 // First add length of |span| to |num_free_blocks_|.
106 num_free_blocks_
+= span
->length_
;
108 // Merge with previous span if possible.
109 SpanMap::iterator prev_it
= spans_
.find(span
->start_
- 1);
110 if (prev_it
!= spans_
.end() && IsInFreeList(prev_it
->second
)) {
111 scoped_ptr
<Span
> prev
= RemoveFromFreeList(prev_it
->second
);
112 DCHECK_EQ(prev
->start_
+ prev
->length_
, span
->start_
);
113 UnregisterSpan(prev
.get());
114 if (span
->length_
> 1)
115 spans_
.erase(span
->start_
);
116 span
->start_
-= prev
->length_
;
117 span
->length_
+= prev
->length_
;
118 spans_
[span
->start_
] = span
.get();
121 // Merge with next span if possible.
122 SpanMap::iterator next_it
= spans_
.find(span
->start_
+ span
->length_
);
123 if (next_it
!= spans_
.end() && IsInFreeList(next_it
->second
)) {
124 scoped_ptr
<Span
> next
= RemoveFromFreeList(next_it
->second
);
125 DCHECK_EQ(next
->start_
, span
->start_
+ span
->length_
);
126 UnregisterSpan(next
.get());
127 if (span
->length_
> 1)
128 spans_
.erase(span
->start_
+ span
->length_
- 1);
129 span
->length_
+= next
->length_
;
130 spans_
[span
->start_
+ span
->length_
- 1] = span
.get();
133 InsertIntoFreeList(span
.Pass());
136 scoped_ptr
<DiscardableSharedMemoryHeap::Span
>
137 DiscardableSharedMemoryHeap::Split(Span
* span
, size_t blocks
) {
139 DCHECK_LT(blocks
, span
->length_
);
141 scoped_ptr
<Span
> leftover(new Span(
142 span
->shared_memory_
, span
->start_
+ blocks
, span
->length_
- blocks
));
143 DCHECK_IMPLIES(leftover
->length_
> 1,
144 spans_
.find(leftover
->start_
) == spans_
.end());
145 RegisterSpan(leftover
.get());
146 spans_
[span
->start_
+ blocks
- 1] = span
;
147 span
->length_
= blocks
;
148 return leftover
.Pass();
151 scoped_ptr
<DiscardableSharedMemoryHeap::Span
>
152 DiscardableSharedMemoryHeap::SearchFreeLists(size_t blocks
, size_t slack
) {
155 size_t length
= blocks
;
156 size_t max_length
= blocks
+ slack
;
158 // Search array of free lists for a suitable span.
159 while (length
- 1 < arraysize(free_spans_
) - 1) {
160 const base::LinkedList
<Span
>& free_spans
= free_spans_
[length
- 1];
161 if (!free_spans
.empty()) {
162 // Return the most recently used span located in tail.
163 return Carve(free_spans
.tail()->value(), blocks
);
166 // Return early after surpassing |max_length|.
167 if (++length
> max_length
)
171 const base::LinkedList
<Span
>& overflow_free_spans
=
172 free_spans_
[arraysize(free_spans_
) - 1];
174 // Search overflow free list for a suitable span. Starting with the most
175 // recently used span located in tail and moving towards head.
176 for (base::LinkNode
<Span
>* node
= overflow_free_spans
.tail();
177 node
!= overflow_free_spans
.end(); node
= node
->previous()) {
178 Span
* span
= node
->value();
179 if (span
->length_
>= blocks
&& span
->length_
<= max_length
)
180 return Carve(span
, blocks
);
186 void DiscardableSharedMemoryHeap::ReleaseFreeMemory() {
187 // Erase all free segments after rearranging the segments in such a way
188 // that used segments precede all free segments.
189 memory_segments_
.erase(
191 memory_segments_
.begin(), memory_segments_
.end(),
192 [](const ScopedMemorySegment
* segment
) { return segment
->IsUsed(); }),
193 memory_segments_
.end());
196 void DiscardableSharedMemoryHeap::ReleasePurgedMemory() {
197 // Erase all purged segments after rearranging the segments in such a way
198 // that resident segments precede all purged segments.
199 memory_segments_
.erase(
200 std::partition(memory_segments_
.begin(), memory_segments_
.end(),
201 [](const ScopedMemorySegment
* segment
) {
202 return segment
->IsResident();
204 memory_segments_
.end());
207 size_t DiscardableSharedMemoryHeap::GetSize() const {
208 return num_blocks_
* block_size_
;
211 size_t DiscardableSharedMemoryHeap::GetSizeOfFreeLists() const {
212 return num_free_blocks_
* block_size_
;
215 void DiscardableSharedMemoryHeap::InsertIntoFreeList(
216 scoped_ptr
<DiscardableSharedMemoryHeap::Span
> span
) {
217 DCHECK(!IsInFreeList(span
.get()));
218 size_t index
= std::min(span
->length_
, arraysize(free_spans_
)) - 1;
219 free_spans_
[index
].Append(span
.release());
222 scoped_ptr
<DiscardableSharedMemoryHeap::Span
>
223 DiscardableSharedMemoryHeap::RemoveFromFreeList(Span
* span
) {
224 DCHECK(IsInFreeList(span
));
225 span
->RemoveFromList();
226 return make_scoped_ptr(span
);
229 scoped_ptr
<DiscardableSharedMemoryHeap::Span
>
230 DiscardableSharedMemoryHeap::Carve(Span
* span
, size_t blocks
) {
231 scoped_ptr
<Span
> serving
= RemoveFromFreeList(span
);
233 const int extra
= serving
->length_
- blocks
;
235 scoped_ptr
<Span
> leftover(
236 new Span(serving
->shared_memory_
, serving
->start_
+ blocks
, extra
));
237 DCHECK_IMPLIES(extra
> 1, spans_
.find(leftover
->start_
) == spans_
.end());
238 RegisterSpan(leftover
.get());
240 // No need to coalesce as the previous span of |leftover| was just split
241 // and the next span of |leftover| was not previously coalesced with
243 InsertIntoFreeList(leftover
.Pass());
245 serving
->length_
= blocks
;
246 spans_
[serving
->start_
+ blocks
- 1] = serving
.get();
249 // |serving| is no longer in the free list, remove its length from
250 // |num_free_blocks_|.
251 DCHECK_GE(num_free_blocks_
, serving
->length_
);
252 num_free_blocks_
-= serving
->length_
;
254 return serving
.Pass();
257 void DiscardableSharedMemoryHeap::RegisterSpan(Span
* span
) {
258 spans_
[span
->start_
] = span
;
259 if (span
->length_
> 1)
260 spans_
[span
->start_
+ span
->length_
- 1] = span
;
263 void DiscardableSharedMemoryHeap::UnregisterSpan(Span
* span
) {
264 DCHECK(spans_
.find(span
->start_
) != spans_
.end());
265 DCHECK_EQ(spans_
[span
->start_
], span
);
266 spans_
.erase(span
->start_
);
267 if (span
->length_
> 1) {
268 DCHECK(spans_
.find(span
->start_
+ span
->length_
- 1) != spans_
.end());
269 DCHECK_EQ(spans_
[span
->start_
+ span
->length_
- 1], span
);
270 spans_
.erase(span
->start_
+ span
->length_
- 1);
274 bool DiscardableSharedMemoryHeap::IsMemoryUsed(
275 const base::DiscardableSharedMemory
* shared_memory
,
278 reinterpret_cast<size_t>(shared_memory
->memory()) / block_size_
;
279 size_t length
= size
/ block_size_
;
280 DCHECK(spans_
.find(offset
) != spans_
.end());
281 Span
* span
= spans_
[offset
];
282 DCHECK_LE(span
->length_
, length
);
283 // Memory is used if first span is not in free list or shorter than segment.
284 return !IsInFreeList(span
) || span
->length_
!= length
;
287 bool DiscardableSharedMemoryHeap::IsMemoryResident(
288 const base::DiscardableSharedMemory
* shared_memory
) {
289 return shared_memory
->IsMemoryResident();
292 void DiscardableSharedMemoryHeap::ReleaseMemory(
293 const base::DiscardableSharedMemory
* shared_memory
,
296 reinterpret_cast<size_t>(shared_memory
->memory()) / block_size_
;
297 size_t end
= offset
+ size
/ block_size_
;
298 while (offset
< end
) {
299 DCHECK(spans_
.find(offset
) != spans_
.end());
300 Span
* span
= spans_
[offset
];
301 DCHECK_EQ(span
->shared_memory_
, shared_memory
);
302 span
->shared_memory_
= nullptr;
303 UnregisterSpan(span
);
305 offset
+= span
->length_
;
307 DCHECK_GE(num_blocks_
, span
->length_
);
308 num_blocks_
-= span
->length_
;
310 // If |span| is in the free list, remove it and update |num_free_blocks_|.
311 if (IsInFreeList(span
)) {
312 DCHECK_GE(num_free_blocks_
, span
->length_
);
313 num_free_blocks_
-= span
->length_
;
314 RemoveFromFreeList(span
);
319 } // namespace content