3 from __future__
import print_function
6 # Try to use the C parser.
8 from yaml
import CLoader
as Loader
10 print("For faster parsing, you may want to install libYAML for PyYAML")
11 from yaml
import Loader
14 from collections
import defaultdict
17 from multiprocessing
import Lock
21 # The previously builtin function `intern()` was moved
22 # to the `sys` module in Python 3.
23 from sys
import intern
33 except AttributeError:
36 return iter(d
.values())
38 return iter(d
.items())
47 def html_file_name(filename
):
48 return filename
.replace('/', '_').replace('#', '_') + ".html"
51 def make_link(File
, Line
):
52 return "\"{}#L{}\"".format(html_file_name(File
), Line
)
55 class Remark(yaml
.YAMLObject
):
56 # Work-around for http://pyyaml.org/ticket/154.
59 default_demangler
= 'c++filt -n'
63 def set_demangler(cls
, demangler
):
64 cls
.demangler_proc
= subprocess
.Popen(demangler
.split(), stdin
=subprocess
.PIPE
, stdout
=subprocess
.PIPE
)
65 cls
.demangler_lock
= Lock()
68 def demangle(cls
, name
):
69 with cls
.demangler_lock
:
70 cls
.demangler_proc
.stdin
.write((name
+ '\n').encode('utf-8'))
71 cls
.demangler_proc
.stdin
.flush()
72 return cls
.demangler_proc
.stdout
.readline().rstrip().decode('utf-8')
74 # Intern all strings since we have lot of duplication across filenames,
77 # Change Args from a list of dicts to a tuple of tuples. This saves
78 # memory in two ways. One, a small tuple is significantly smaller than a
79 # small dict. Two, using tuple instead of list allows Args to be directly
80 # used as part of the key (in Python only immutable types are hashable).
81 def _reduce_memory(self
):
82 self
.Pass
= intern(self
.Pass
)
83 self
.Name
= intern(self
.Name
)
85 # Can't intern unicode strings.
86 self
.Function
= intern(self
.Function
)
90 def _reduce_memory_dict(old_dict
):
92 for (k
, v
) in iteritems(old_dict
):
99 # This handles [{'Caller': ..., 'DebugLoc': { 'File': ... }}]
100 v
= _reduce_memory_dict(v
)
102 return tuple(new_dict
.items())
104 self
.Args
= tuple([_reduce_memory_dict(arg_dict
) for arg_dict
in self
.Args
])
106 # The inverse operation of the dictonary-related memory optimization in
107 # _reduce_memory_dict. E.g.
108 # (('DebugLoc', (('File', ...) ... ))) -> [{'DebugLoc': {'File': ...} ....}]
109 def recover_yaml_structure(self
):
110 def tuple_to_dict(t
):
118 self
.Args
= [tuple_to_dict(arg_tuple
) for arg_tuple
in self
.Args
]
120 def canonicalize(self
):
121 if not hasattr(self
, 'Hotness'):
123 if not hasattr(self
, 'Args'):
125 self
._reduce
_memory
()
129 return self
.DebugLoc
['File']
133 return int(self
.DebugLoc
['Line'])
137 return self
.DebugLoc
['Column']
140 def DebugLocString(self
):
141 return "{}:{}:{}".format(self
.File
, self
.Line
, self
.Column
)
144 def DemangledFunctionName(self
):
145 return self
.demangle(self
.Function
)
149 return make_link(self
.File
, self
.Line
)
151 def getArgString(self
, mapping
):
152 mapping
= dict(list(mapping
))
153 dl
= mapping
.get('DebugLoc')
155 del mapping
['DebugLoc']
157 assert(len(mapping
) == 1)
158 (key
, value
) = list(mapping
.items())[0]
160 if key
== 'Caller' or key
== 'Callee' or key
== 'DirectCallee':
161 value
= cgi
.escape(self
.demangle(value
))
163 if dl
and key
!= 'Caller':
164 dl_dict
= dict(list(dl
))
165 return u
"<a href={}>{}</a>".format(
166 make_link(dl_dict
['File'], dl_dict
['Line']), value
)
170 # Return a cached dictionary for the arguments. The key for each entry is
171 # the argument key (e.g. 'Callee' for inlining remarks. The value is a
172 # list containing the value (e.g. for 'Callee' the function) and
173 # optionally a DebugLoc.
174 def getArgDict(self
):
175 if hasattr(self
, 'ArgDict'):
178 for arg
in self
.Args
:
180 if arg
[0][0] == 'DebugLoc':
183 assert(arg
[1][0] == 'DebugLoc')
186 key
= arg
[1 - dbgidx
][0]
187 entry
= (arg
[1 - dbgidx
][1], arg
[dbgidx
][1])
193 self
.ArgDict
[key
] = entry
196 def getDiffPrefix(self
):
197 if hasattr(self
, 'Added'):
205 def PassWithDiffPrefix(self
):
206 return self
.getDiffPrefix() + self
.Pass
210 # Args is a list of mappings (dictionaries)
211 values
= [self
.getArgString(mapping
) for mapping
in self
.Args
]
212 return "".join(values
)
215 def RelativeHotness(self
):
217 return "{0:.2f}%".format(self
.Hotness
* 100. / self
.max_hotness
)
223 return (self
.__class
__, self
.PassWithDiffPrefix
, self
.Name
, self
.File
,
224 self
.Line
, self
.Column
, self
.Function
, self
.Args
)
227 return hash(self
.key
)
229 def __eq__(self
, other
):
230 return self
.key
== other
.key
236 class Analysis(Remark
):
237 yaml_tag
= '!Analysis'
244 class AnalysisFPCommute(Analysis
):
245 yaml_tag
= '!AnalysisFPCommute'
248 class AnalysisAliasing(Analysis
):
249 yaml_tag
= '!AnalysisAliasing'
252 class Passed(Remark
):
260 class Missed(Remark
):
268 def get_remarks(input_file
, filter_
):
271 file_remarks
= defaultdict(functools
.partial(defaultdict
, list))
273 with
open(input_file
) as f
:
274 docs
= yaml
.load_all(f
, Loader
=Loader
)
276 filter_e
= re
.compile(filter_
)
278 remark
.canonicalize()
279 # Avoid remarks withoug debug location or if they are duplicated
280 if not hasattr(remark
, 'DebugLoc') or remark
.key
in all_remarks
:
283 if filter_
and not filter_e
.search(remark
.Pass
):
286 all_remarks
[remark
.key
] = remark
288 file_remarks
[remark
.File
][remark
.Line
].append(remark
)
290 # If we're reading a back a diff yaml file, max_hotness is already
291 # captured which may actually be less than the max hotness found
293 if hasattr(remark
, 'max_hotness'):
294 max_hotness
= remark
.max_hotness
295 max_hotness
= max(max_hotness
, remark
.Hotness
)
297 return max_hotness
, all_remarks
, file_remarks
300 def gather_results(filenames
, num_jobs
, should_print_progress
, filter_
):
301 if should_print_progress
:
302 print('Reading YAML files...')
303 if not Remark
.demangler_proc
:
304 Remark
.set_demangler(Remark
.default_demangler
)
305 remarks
= optpmap
.pmap(
306 get_remarks
, filenames
, num_jobs
, should_print_progress
, filter_
)
307 max_hotness
= max(entry
[0] for entry
in remarks
)
309 def merge_file_remarks(file_remarks_job
, all_remarks
, merged
):
310 for filename
, d
in iteritems(file_remarks_job
):
311 for line
, remarks
in iteritems(d
):
312 for remark
in remarks
:
313 # Bring max_hotness into the remarks so that
314 # RelativeHotness does not depend on an external global.
315 remark
.max_hotness
= max_hotness
316 if remark
.key
not in all_remarks
:
317 merged
[filename
][line
].append(remark
)
320 file_remarks
= defaultdict(functools
.partial(defaultdict
, list))
321 for _
, all_remarks_job
, file_remarks_job
in remarks
:
322 merge_file_remarks(file_remarks_job
, all_remarks
, file_remarks
)
323 all_remarks
.update(all_remarks_job
)
325 return all_remarks
, file_remarks
, max_hotness
!= 0
328 def find_opt_files(*dirs_or_files
):
330 for dir_or_file
in dirs_or_files
:
331 if os
.path
.isfile(dir_or_file
):
332 all
.append(dir_or_file
)
334 for dir, subdirs
, files
in os
.walk(dir_or_file
):
335 # Exclude mounted directories and symlinks (os.walk default).
336 subdirs
[:] = [d
for d
in subdirs
337 if not os
.path
.ismount(os
.path
.join(dir, d
))]
339 if fnmatch
.fnmatch(file, "*.opt.yaml*"):
340 all
.append(os
.path
.join(dir, file))