[MemSheriff] More sendto parameter issues.
[chromium-blink-merge.git] / tools / deep_memory_profiler / lib / bucket.py
blob310d2e03c75bd2506b18e2d5a8533358655a572f
1 # Copyright 2013 The Chromium Authors. All rights reserved.
2 # Use of this source code is governed by a BSD-style license that can be
3 # found in the LICENSE file.
5 import logging
6 import os
8 from lib.symbol import FUNCTION_SYMBOLS, SOURCEFILE_SYMBOLS, TYPEINFO_SYMBOLS
11 LOGGER = logging.getLogger('dmprof')
14 class Bucket(object):
15 """Represents a bucket, which is a unit of memory block classification."""
17 def __init__(self, stacktrace, allocator_type, typeinfo, typeinfo_name):
18 self._stacktrace = stacktrace
19 self._allocator_type = allocator_type
20 self._typeinfo = typeinfo
21 self._typeinfo_name = typeinfo_name
23 self._symbolized_stackfunction = stacktrace
24 self._symbolized_joined_stackfunction = ''
25 self._symbolized_stacksourcefile = stacktrace
26 self._symbolized_joined_stacksourcefile = ''
27 self._symbolized_typeinfo = typeinfo_name
29 self.component_cache = ''
31 def __str__(self):
32 result = []
33 result.append(self._allocator_type)
34 if self._symbolized_typeinfo == 'no typeinfo':
35 result.append('tno_typeinfo')
36 else:
37 result.append('t' + self._symbolized_typeinfo)
38 result.append('n' + self._typeinfo_name)
39 result.extend(['%s(@%s)' % (function, sourcefile)
40 for function, sourcefile
41 in zip(self._symbolized_stackfunction,
42 self._symbolized_stacksourcefile)])
43 return ' '.join(result)
45 def symbolize(self, symbol_mapping_cache):
46 """Makes a symbolized stacktrace and typeinfo with |symbol_mapping_cache|.
48 Args:
49 symbol_mapping_cache: A SymbolMappingCache object.
50 """
51 # TODO(dmikurube): Fill explicitly with numbers if symbol not found.
52 self._symbolized_stackfunction = [
53 symbol_mapping_cache.lookup(FUNCTION_SYMBOLS, address)
54 for address in self._stacktrace]
55 self._symbolized_joined_stackfunction = ' '.join(
56 self._symbolized_stackfunction)
57 self._symbolized_stacksourcefile = [
58 symbol_mapping_cache.lookup(SOURCEFILE_SYMBOLS, address)
59 for address in self._stacktrace]
60 self._symbolized_joined_stacksourcefile = ' '.join(
61 self._symbolized_stacksourcefile)
62 if not self._typeinfo:
63 self._symbolized_typeinfo = 'no typeinfo'
64 else:
65 self._symbolized_typeinfo = symbol_mapping_cache.lookup(
66 TYPEINFO_SYMBOLS, self._typeinfo)
67 if not self._symbolized_typeinfo:
68 self._symbolized_typeinfo = 'no typeinfo'
70 def clear_component_cache(self):
71 self.component_cache = ''
73 @property
74 def stacktrace(self):
75 return self._stacktrace
77 @property
78 def allocator_type(self):
79 return self._allocator_type
81 @property
82 def typeinfo(self):
83 return self._typeinfo
85 @property
86 def typeinfo_name(self):
87 return self._typeinfo_name
89 @property
90 def symbolized_stackfunction(self):
91 return self._symbolized_stackfunction
93 @property
94 def symbolized_joined_stackfunction(self):
95 return self._symbolized_joined_stackfunction
97 @property
98 def symbolized_stacksourcefile(self):
99 return self._symbolized_stacksourcefile
101 @property
102 def symbolized_joined_stacksourcefile(self):
103 return self._symbolized_joined_stacksourcefile
105 @property
106 def symbolized_typeinfo(self):
107 return self._symbolized_typeinfo
110 class BucketSet(object):
111 """Represents a set of bucket."""
112 def __init__(self):
113 self._buckets = {}
114 self._code_addresses = set()
115 self._typeinfo_addresses = set()
117 def load(self, prefix):
118 """Loads all related bucket files.
120 Args:
121 prefix: A prefix string for bucket file names.
123 LOGGER.info('Loading bucket files.')
125 n = 0
126 skipped = 0
127 while True:
128 path = '%s.%04d.buckets' % (prefix, n)
129 if not os.path.exists(path) or not os.stat(path).st_size:
130 if skipped > 10:
131 break
132 n += 1
133 skipped += 1
134 continue
135 LOGGER.info(' %s' % path)
136 with open(path, 'r') as f:
137 self._load_file(f)
138 n += 1
139 skipped = 0
141 def _load_file(self, bucket_f):
142 for line in bucket_f:
143 words = line.split()
144 typeinfo = None
145 typeinfo_name = ''
146 stacktrace_begin = 2
147 for index, word in enumerate(words):
148 if index < 2:
149 continue
150 if word[0] == 't':
151 typeinfo = int(word[1:], 16)
152 self._typeinfo_addresses.add(typeinfo)
153 elif word[0] == 'n':
154 typeinfo_name = word[1:]
155 else:
156 stacktrace_begin = index
157 break
158 stacktrace = [int(address, 16) for address in words[stacktrace_begin:]]
159 for frame in stacktrace:
160 self._code_addresses.add(frame)
161 self._buckets[int(words[0])] = Bucket(
162 stacktrace, words[1], typeinfo, typeinfo_name)
164 def __iter__(self):
165 for bucket_id, bucket_content in self._buckets.iteritems():
166 yield bucket_id, bucket_content
168 def __getitem__(self, bucket_id):
169 return self._buckets[bucket_id]
171 def get(self, bucket_id):
172 return self._buckets.get(bucket_id)
174 def symbolize(self, symbol_mapping_cache):
175 for bucket_content in self._buckets.itervalues():
176 bucket_content.symbolize(symbol_mapping_cache)
178 def clear_component_cache(self):
179 for bucket_content in self._buckets.itervalues():
180 bucket_content.clear_component_cache()
182 def iter_addresses(self, symbol_type):
183 if symbol_type in [FUNCTION_SYMBOLS, SOURCEFILE_SYMBOLS]:
184 for function in self._code_addresses:
185 yield function
186 else:
187 for function in self._typeinfo_addresses:
188 yield function