Extension syncing: Introduce a NeedsSync pref
[chromium-blink-merge.git] / base / trace_event / trace_event_memory_overhead.cc
blob0cc3d59890c57884e8fe035d1afba7f6600528b1
1 // Copyright 2015 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 #include "base/trace_event/trace_event_memory_overhead.h"
7 #include <algorithm>
9 #include "base/memory/ref_counted_memory.h"
10 #include "base/strings/stringprintf.h"
11 #include "base/trace_event/memory_allocator_dump.h"
12 #include "base/trace_event/process_memory_dump.h"
13 #include "base/values.h"
15 namespace {
16 size_t RoundUp(size_t size, size_t alignment) {
17 return (size + alignment - 1) & ~(alignment - 1);
19 } // namespace
21 namespace base {
22 namespace trace_event {
24 TraceEventMemoryOverhead::TraceEventMemoryOverhead() {
27 TraceEventMemoryOverhead::~TraceEventMemoryOverhead() {
30 void TraceEventMemoryOverhead::AddOrCreateInternal(
31 const char* object_type,
32 size_t count,
33 size_t allocated_size_in_bytes,
34 size_t resident_size_in_bytes) {
35 auto it = allocated_objects_.find(object_type);
36 if (it == allocated_objects_.end()) {
37 allocated_objects_.insert(std::make_pair(
38 object_type,
39 ObjectCountAndSize(
40 {count, allocated_size_in_bytes, resident_size_in_bytes})));
41 return;
43 it->second.count += count;
44 it->second.allocated_size_in_bytes += allocated_size_in_bytes;
45 it->second.resident_size_in_bytes += resident_size_in_bytes;
48 void TraceEventMemoryOverhead::Add(const char* object_type,
49 size_t allocated_size_in_bytes) {
50 Add(object_type, allocated_size_in_bytes, allocated_size_in_bytes);
53 void TraceEventMemoryOverhead::Add(const char* object_type,
54 size_t allocated_size_in_bytes,
55 size_t resident_size_in_bytes) {
56 AddOrCreateInternal(object_type, 1, allocated_size_in_bytes,
57 resident_size_in_bytes);
60 void TraceEventMemoryOverhead::AddString(const std::string& str) {
61 // The number below are empirical and mainly based on profiling of real-world
62 // std::string implementations:
63 // - even short string end up malloc()-inc at least 32 bytes.
64 // - longer stings seem to malloc() multiples of 16 bytes.
65 Add("std::string",
66 sizeof(std::string) + std::max<size_t>(RoundUp(str.capacity(), 16), 32u));
69 void TraceEventMemoryOverhead::AddRefCountedString(
70 const RefCountedString& str) {
71 Add("RefCountedString", sizeof(RefCountedString));
72 AddString(str.data());
75 void TraceEventMemoryOverhead::AddValue(const Value& value) {
76 switch (value.GetType()) {
77 case Value::TYPE_NULL:
78 case Value::TYPE_BOOLEAN:
79 case Value::TYPE_INTEGER:
80 case Value::TYPE_DOUBLE:
81 Add("FundamentalValue", sizeof(Value));
82 break;
84 case Value::TYPE_STRING: {
85 const StringValue* string_value = nullptr;
86 value.GetAsString(&string_value);
87 Add("StringValue", sizeof(StringValue));
88 AddString(string_value->GetString());
89 } break;
91 case Value::TYPE_BINARY: {
92 const BinaryValue* binary_value = nullptr;
93 value.GetAsBinary(&binary_value);
94 Add("BinaryValue", sizeof(BinaryValue) + binary_value->GetSize());
95 } break;
97 case Value::TYPE_DICTIONARY: {
98 const DictionaryValue* dictionary_value = nullptr;
99 value.GetAsDictionary(&dictionary_value);
100 Add("DictionaryValue", sizeof(DictionaryValue));
101 for (DictionaryValue::Iterator it(*dictionary_value); !it.IsAtEnd();
102 it.Advance()) {
103 AddString(it.key());
104 AddValue(it.value());
106 } break;
108 case Value::TYPE_LIST: {
109 const ListValue* list_value = nullptr;
110 value.GetAsList(&list_value);
111 Add("ListValue", sizeof(ListValue));
112 for (const Value* v : *list_value)
113 AddValue(*v);
114 } break;
116 default:
117 NOTREACHED();
121 void TraceEventMemoryOverhead::AddSelf() {
122 size_t estimated_size = sizeof(*this);
123 // If the SmallMap did overflow its static capacity, its elements will be
124 // allocated on the heap and have to be accounted separately.
125 if (allocated_objects_.UsingFullMap())
126 estimated_size += sizeof(map_type::value_type) * allocated_objects_.size();
127 Add("TraceEventMemoryOverhead", estimated_size);
130 void TraceEventMemoryOverhead::Update(const TraceEventMemoryOverhead& other) {
131 for (const auto& it : other.allocated_objects_) {
132 AddOrCreateInternal(it.first, it.second.count,
133 it.second.allocated_size_in_bytes,
134 it.second.resident_size_in_bytes);
138 void TraceEventMemoryOverhead::DumpInto(const char* base_name,
139 ProcessMemoryDump* pmd) const {
140 for (const auto& it : allocated_objects_) {
141 std::string dump_name = StringPrintf("%s/%s", base_name, it.first);
142 MemoryAllocatorDump* mad = pmd->CreateAllocatorDump(dump_name);
143 mad->AddScalar(MemoryAllocatorDump::kNameSize,
144 MemoryAllocatorDump::kUnitsBytes,
145 it.second.allocated_size_in_bytes);
146 mad->AddScalar("resident_size", MemoryAllocatorDump::kUnitsBytes,
147 it.second.resident_size_in_bytes);
148 mad->AddScalar(MemoryAllocatorDump::kNameObjectsCount,
149 MemoryAllocatorDump::kUnitsObjects, it.second.count);
153 } // namespace trace_event
154 } // namespace base