Add an UMA stat to be able to see if the User pods are show on start screen,
[chromium-blink-merge.git] / content / child / child_discardable_shared_memory_manager.cc
blob73b4cb167a80ec5613c247c71d88ffd564ad51e3
1 // Copyright 2014 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 #include "content/child/child_discardable_shared_memory_manager.h"
7 #include "base/atomic_sequence_num.h"
8 #include "base/bind.h"
9 #include "base/debug/crash_logging.h"
10 #include "base/memory/discardable_memory.h"
11 #include "base/memory/discardable_shared_memory.h"
12 #include "base/metrics/histogram.h"
13 #include "base/process/memory.h"
14 #include "base/process/process_metrics.h"
15 #include "base/strings/string_number_conversions.h"
16 #include "base/trace_event/trace_event.h"
17 #include "content/common/child_process_messages.h"
19 namespace content {
20 namespace {
22 // Default allocation size.
23 #if defined(OS_ANDROID)
24 // Larger allocation size on Android to avoid reaching the FD-limit.
25 const size_t kAllocationSize = 32 * 1024 * 1024;
26 #else
27 const size_t kAllocationSize = 4 * 1024 * 1024;
28 #endif
30 // Global atomic to generate unique discardable shared memory IDs.
31 base::StaticAtomicSequenceNumber g_next_discardable_shared_memory_id;
33 class DiscardableMemoryImpl : public base::DiscardableMemory {
34 public:
35 DiscardableMemoryImpl(ChildDiscardableSharedMemoryManager* manager,
36 scoped_ptr<DiscardableSharedMemoryHeap::Span> span)
37 : manager_(manager), span_(span.Pass()), is_locked_(true) {}
39 ~DiscardableMemoryImpl() override {
40 if (is_locked_)
41 manager_->UnlockSpan(span_.get());
43 manager_->ReleaseSpan(span_.Pass());
46 // Overridden from base::DiscardableMemory:
47 bool Lock() override {
48 DCHECK(!is_locked_);
50 if (!manager_->LockSpan(span_.get()))
51 return false;
53 is_locked_ = true;
54 return true;
56 void Unlock() override {
57 DCHECK(is_locked_);
59 manager_->UnlockSpan(span_.get());
60 is_locked_ = false;
62 void* Memory() const override {
63 DCHECK(is_locked_);
64 return reinterpret_cast<void*>(span_->start() * base::GetPageSize());
67 private:
68 ChildDiscardableSharedMemoryManager* const manager_;
69 scoped_ptr<DiscardableSharedMemoryHeap::Span> span_;
70 bool is_locked_;
72 DISALLOW_COPY_AND_ASSIGN(DiscardableMemoryImpl);
75 void SendDeletedDiscardableSharedMemoryMessage(
76 scoped_refptr<ThreadSafeSender> sender,
77 DiscardableSharedMemoryId id) {
78 sender->Send(new ChildProcessHostMsg_DeletedDiscardableSharedMemory(id));
81 } // namespace
83 ChildDiscardableSharedMemoryManager::ChildDiscardableSharedMemoryManager(
84 ThreadSafeSender* sender)
85 : heap_(base::GetPageSize()), sender_(sender) {
88 ChildDiscardableSharedMemoryManager::~ChildDiscardableSharedMemoryManager() {
89 // TODO(reveman): Determine if this DCHECK can be enabled. crbug.com/430533
90 // DCHECK_EQ(heap_.GetSize(), heap_.GetSizeOfFreeLists());
91 if (heap_.GetSize())
92 MemoryUsageChanged(0, 0);
95 scoped_ptr<base::DiscardableMemory>
96 ChildDiscardableSharedMemoryManager::AllocateLockedDiscardableMemory(
97 size_t size) {
98 base::AutoLock lock(lock_);
100 DCHECK_NE(size, 0u);
102 UMA_HISTOGRAM_CUSTOM_COUNTS("Memory.DiscardableAllocationSize",
103 size / 1024, // In KB
105 4 * 1024 * 1024, // 4 GB
106 50);
108 // Round up to multiple of page size.
109 size_t pages = (size + base::GetPageSize() - 1) / base::GetPageSize();
111 // Default allocation size in pages.
112 size_t allocation_pages = kAllocationSize / base::GetPageSize();
114 size_t slack = 0;
115 // When searching the free lists, allow a slack between required size and
116 // free span size that is less or equal to kAllocationSize. This is to
117 // avoid segments larger then kAllocationSize unless they are a perfect
118 // fit. The result is that large allocations can be reused without reducing
119 // the ability to discard memory.
120 if (pages < allocation_pages)
121 slack = allocation_pages - pages;
123 size_t heap_size_prior_to_releasing_purged_memory = heap_.GetSize();
124 for (;;) {
125 // Search free lists for suitable span.
126 scoped_ptr<DiscardableSharedMemoryHeap::Span> free_span =
127 heap_.SearchFreeLists(pages, slack);
128 if (!free_span.get())
129 break;
131 // Attempt to lock |free_span|. Delete span and search free lists again
132 // if locking failed.
133 if (free_span->shared_memory()->Lock(
134 free_span->start() * base::GetPageSize() -
135 reinterpret_cast<size_t>(free_span->shared_memory()->memory()),
136 free_span->length() * base::GetPageSize()) ==
137 base::DiscardableSharedMemory::FAILED) {
138 DCHECK(!free_span->shared_memory()->IsMemoryResident());
139 // We have to release purged memory before |free_span| can be destroyed.
140 heap_.ReleasePurgedMemory();
141 DCHECK(!free_span->shared_memory());
142 continue;
145 // Memory usage is guaranteed to have changed after having removed
146 // at least one span from the free lists.
147 MemoryUsageChanged(heap_.GetSize(), heap_.GetSizeOfFreeLists());
149 return make_scoped_ptr(new DiscardableMemoryImpl(this, free_span.Pass()));
152 // Release purged memory to free up the address space before we attempt to
153 // allocate more memory.
154 heap_.ReleasePurgedMemory();
156 // Make sure crash keys are up to date in case allocation fails.
157 if (heap_.GetSize() != heap_size_prior_to_releasing_purged_memory)
158 MemoryUsageChanged(heap_.GetSize(), heap_.GetSizeOfFreeLists());
160 size_t pages_to_allocate =
161 std::max(kAllocationSize / base::GetPageSize(), pages);
162 size_t allocation_size_in_bytes = pages_to_allocate * base::GetPageSize();
164 DiscardableSharedMemoryId new_id =
165 g_next_discardable_shared_memory_id.GetNext();
167 // Ask parent process to allocate a new discardable shared memory segment.
168 scoped_ptr<base::DiscardableSharedMemory> shared_memory(
169 AllocateLockedDiscardableSharedMemory(allocation_size_in_bytes, new_id));
171 // Create span for allocated memory.
172 scoped_ptr<DiscardableSharedMemoryHeap::Span> new_span(heap_.Grow(
173 shared_memory.Pass(), allocation_size_in_bytes,
174 base::Bind(&SendDeletedDiscardableSharedMemoryMessage, sender_, new_id)));
176 // Unlock and insert any left over memory into free lists.
177 if (pages < pages_to_allocate) {
178 scoped_ptr<DiscardableSharedMemoryHeap::Span> leftover =
179 heap_.Split(new_span.get(), pages);
180 leftover->shared_memory()->Unlock(
181 leftover->start() * base::GetPageSize() -
182 reinterpret_cast<size_t>(leftover->shared_memory()->memory()),
183 leftover->length() * base::GetPageSize());
184 heap_.MergeIntoFreeLists(leftover.Pass());
187 MemoryUsageChanged(heap_.GetSize(), heap_.GetSizeOfFreeLists());
189 return make_scoped_ptr(new DiscardableMemoryImpl(this, new_span.Pass()));
192 void ChildDiscardableSharedMemoryManager::ReleaseFreeMemory() {
193 base::AutoLock lock(lock_);
195 size_t heap_size_prior_to_releasing_memory = heap_.GetSize();
197 // Release both purged and free memory.
198 heap_.ReleasePurgedMemory();
199 heap_.ReleaseFreeMemory();
201 if (heap_.GetSize() != heap_size_prior_to_releasing_memory)
202 MemoryUsageChanged(heap_.GetSize(), heap_.GetSizeOfFreeLists());
205 bool ChildDiscardableSharedMemoryManager::LockSpan(
206 DiscardableSharedMemoryHeap::Span* span) {
207 base::AutoLock lock(lock_);
209 if (!span->shared_memory())
210 return false;
212 size_t offset = span->start() * base::GetPageSize() -
213 reinterpret_cast<size_t>(span->shared_memory()->memory());
214 size_t length = span->length() * base::GetPageSize();
216 switch (span->shared_memory()->Lock(offset, length)) {
217 case base::DiscardableSharedMemory::SUCCESS:
218 return true;
219 case base::DiscardableSharedMemory::PURGED:
220 span->shared_memory()->Unlock(offset, length);
221 return false;
222 case base::DiscardableSharedMemory::FAILED:
223 return false;
226 NOTREACHED();
227 return false;
230 void ChildDiscardableSharedMemoryManager::UnlockSpan(
231 DiscardableSharedMemoryHeap::Span* span) {
232 base::AutoLock lock(lock_);
234 DCHECK(span->shared_memory());
235 size_t offset = span->start() * base::GetPageSize() -
236 reinterpret_cast<size_t>(span->shared_memory()->memory());
237 size_t length = span->length() * base::GetPageSize();
239 return span->shared_memory()->Unlock(offset, length);
242 void ChildDiscardableSharedMemoryManager::ReleaseSpan(
243 scoped_ptr<DiscardableSharedMemoryHeap::Span> span) {
244 base::AutoLock lock(lock_);
246 // Delete span instead of merging it into free lists if memory is gone.
247 if (!span->shared_memory())
248 return;
250 heap_.MergeIntoFreeLists(span.Pass());
252 // Bytes of free memory changed.
253 MemoryUsageChanged(heap_.GetSize(), heap_.GetSizeOfFreeLists());
256 scoped_ptr<base::DiscardableSharedMemory>
257 ChildDiscardableSharedMemoryManager::AllocateLockedDiscardableSharedMemory(
258 size_t size,
259 DiscardableSharedMemoryId id) {
260 TRACE_EVENT2("renderer",
261 "ChildDiscardableSharedMemoryManager::"
262 "AllocateLockedDiscardableSharedMemory",
263 "size", size, "id", id);
265 base::SharedMemoryHandle handle = base::SharedMemory::NULLHandle();
266 sender_->Send(
267 new ChildProcessHostMsg_SyncAllocateLockedDiscardableSharedMemory(
268 size, id, &handle));
269 scoped_ptr<base::DiscardableSharedMemory> memory(
270 new base::DiscardableSharedMemory(handle));
271 if (!memory->Map(size))
272 base::TerminateBecauseOutOfMemory(size);
273 return memory.Pass();
276 void ChildDiscardableSharedMemoryManager::MemoryUsageChanged(
277 size_t new_bytes_total,
278 size_t new_bytes_free) const {
279 TRACE_COUNTER2("renderer", "DiscardableMemoryUsage", "allocated",
280 new_bytes_total - new_bytes_free, "free", new_bytes_free);
282 static const char kDiscardableMemoryAllocatedKey[] =
283 "discardable-memory-allocated";
284 base::debug::SetCrashKeyValue(kDiscardableMemoryAllocatedKey,
285 base::Uint64ToString(new_bytes_total));
287 static const char kDiscardableMemoryFreeKey[] = "discardable-memory-free";
288 base::debug::SetCrashKeyValue(kDiscardableMemoryFreeKey,
289 base::Uint64ToString(new_bytes_free));
292 } // namespace content