1 #!/usr/bin/env @PYTHON_BASENAME@
3 from __future__
import print_function
6 # Try to use the C parser.
8 from yaml
import CLoader
as Loader
10 print("For faster parsing, you may want to install libYAML for PyYAML")
11 from yaml
import Loader
14 from collections
import defaultdict
17 from multiprocessing
import Lock
21 # The previously builtin function `intern()` was moved
22 # to the `sys` module in Python 3.
23 from sys
import intern
33 except AttributeError:
36 return iter(d
.values())
38 return iter(d
.items())
47 def html_file_name(filename
):
48 return filename
.replace('/', '_').replace('#', '_') + ".html"
51 def make_link(File
, Line
):
52 return "\"{}#L{}\"".format(html_file_name(File
), Line
)
55 class Remark(yaml
.YAMLObject
):
56 # Work-around for http://pyyaml.org/ticket/154.
59 default_demangler
= 'c++filt -n'
63 def set_demangler(cls
, demangler
):
64 cls
.demangler_proc
= subprocess
.Popen(demangler
.split(), stdin
=subprocess
.PIPE
, stdout
=subprocess
.PIPE
)
65 cls
.demangler_lock
= Lock()
68 def demangle(cls
, name
):
69 with cls
.demangler_lock
:
70 cls
.demangler_proc
.stdin
.write((name
+ '\n').encode('utf-8'))
71 cls
.demangler_proc
.stdin
.flush()
72 return cls
.demangler_proc
.stdout
.readline().rstrip().decode('utf-8')
74 # Intern all strings since we have lot of duplication across filenames,
77 # Change Args from a list of dicts to a tuple of tuples. This saves
78 # memory in two ways. One, a small tuple is significantly smaller than a
79 # small dict. Two, using tuple instead of list allows Args to be directly
80 # used as part of the key (in Python only immutable types are hashable).
81 def _reduce_memory(self
):
82 self
.Pass
= intern(self
.Pass
)
83 self
.Name
= intern(self
.Name
)
85 # Can't intern unicode strings.
86 self
.Function
= intern(self
.Function
)
90 def _reduce_memory_dict(old_dict
):
92 for (k
, v
) in iteritems(old_dict
):
99 # This handles [{'Caller': ..., 'DebugLoc': { 'File': ... }}]
100 v
= _reduce_memory_dict(v
)
102 return tuple(new_dict
.items())
104 self
.Args
= tuple([_reduce_memory_dict(arg_dict
) for arg_dict
in self
.Args
])
106 # The inverse operation of the dictonary-related memory optimization in
107 # _reduce_memory_dict. E.g.
108 # (('DebugLoc', (('File', ...) ... ))) -> [{'DebugLoc': {'File': ...} ....}]
109 def recover_yaml_structure(self
):
110 def tuple_to_dict(t
):
118 self
.Args
= [tuple_to_dict(arg_tuple
) for arg_tuple
in self
.Args
]
120 def canonicalize(self
):
121 if not hasattr(self
, 'Hotness'):
123 if not hasattr(self
, 'Args'):
125 self
._reduce
_memory
()
129 return self
.DebugLoc
['File']
133 return int(self
.DebugLoc
['Line'])
137 return self
.DebugLoc
['Column']
140 def DebugLocString(self
):
141 return "{}:{}:{}".format(self
.File
, self
.Line
, self
.Column
)
144 def DemangledFunctionName(self
):
145 return self
.demangle(self
.Function
)
149 return make_link(self
.File
, self
.Line
)
151 def getArgString(self
, mapping
):
152 mapping
= dict(list(mapping
))
153 dl
= mapping
.get('DebugLoc')
155 del mapping
['DebugLoc']
157 assert(len(mapping
) == 1)
158 (key
, value
) = list(mapping
.items())[0]
160 if key
== 'Caller' or key
== 'Callee' or key
== 'DirectCallee':
161 value
= cgi
.escape(self
.demangle(value
))
163 if dl
and key
!= 'Caller':
164 dl_dict
= dict(list(dl
))
165 return u
"<a href={}>{}</a>".format(
166 make_link(dl_dict
['File'], dl_dict
['Line']), value
)
170 # Return a cached dictionary for the arguments. The key for each entry is
171 # the argument key (e.g. 'Callee' for inlining remarks. The value is a
172 # list containing the value (e.g. for 'Callee' the function) and
173 # optionally a DebugLoc.
174 def getArgDict(self
):
175 if hasattr(self
, 'ArgDict'):
178 for arg
in self
.Args
:
180 if arg
[0][0] == 'DebugLoc':
183 assert(arg
[1][0] == 'DebugLoc')
186 key
= arg
[1 - dbgidx
][0]
187 entry
= (arg
[1 - dbgidx
][1], arg
[dbgidx
][1])
193 self
.ArgDict
[key
] = entry
196 def getDiffPrefix(self
):
197 if hasattr(self
, 'Added'):
205 def PassWithDiffPrefix(self
):
206 return self
.getDiffPrefix() + self
.Pass
210 # Args is a list of mappings (dictionaries)
211 values
= [self
.getArgString(mapping
) for mapping
in self
.Args
]
212 return "".join(values
)
215 def RelativeHotness(self
):
217 return "{0:.2f}%".format(self
.Hotness
* 100. / self
.max_hotness
)
223 return (self
.__class
__, self
.PassWithDiffPrefix
, self
.Name
, self
.File
,
224 self
.Line
, self
.Column
, self
.Function
, self
.Args
)
227 return hash(self
.key
)
229 def __eq__(self
, other
):
230 return self
.key
== other
.key
236 class Analysis(Remark
):
237 yaml_tag
= '!Analysis'
244 class AnalysisFPCommute(Analysis
):
245 yaml_tag
= '!AnalysisFPCommute'
248 class AnalysisAliasing(Analysis
):
249 yaml_tag
= '!AnalysisAliasing'
252 class Passed(Remark
):
260 class Missed(Remark
):
267 class Failure(Missed
):
268 yaml_tag
= '!Failure'
270 def get_remarks(input_file
, filter_
=None):
273 file_remarks
= defaultdict(functools
.partial(defaultdict
, list))
275 with
open(input_file
) as f
:
276 docs
= yaml
.load_all(f
, Loader
=Loader
)
280 filter_e
= re
.compile(filter_
)
282 remark
.canonicalize()
283 # Avoid remarks withoug debug location or if they are duplicated
284 if not hasattr(remark
, 'DebugLoc') or remark
.key
in all_remarks
:
287 if filter_e
and not filter_e
.search(remark
.Pass
):
290 all_remarks
[remark
.key
] = remark
292 file_remarks
[remark
.File
][remark
.Line
].append(remark
)
294 # If we're reading a back a diff yaml file, max_hotness is already
295 # captured which may actually be less than the max hotness found
297 if hasattr(remark
, 'max_hotness'):
298 max_hotness
= remark
.max_hotness
299 max_hotness
= max(max_hotness
, remark
.Hotness
)
301 return max_hotness
, all_remarks
, file_remarks
304 def gather_results(filenames
, num_jobs
, should_print_progress
, filter_
=None):
305 if should_print_progress
:
306 print('Reading YAML files...')
307 if not Remark
.demangler_proc
:
308 Remark
.set_demangler(Remark
.default_demangler
)
309 remarks
= optpmap
.pmap(
310 get_remarks
, filenames
, num_jobs
, should_print_progress
, filter_
)
311 max_hotness
= max(entry
[0] for entry
in remarks
)
313 def merge_file_remarks(file_remarks_job
, all_remarks
, merged
):
314 for filename
, d
in iteritems(file_remarks_job
):
315 for line
, remarks
in iteritems(d
):
316 for remark
in remarks
:
317 # Bring max_hotness into the remarks so that
318 # RelativeHotness does not depend on an external global.
319 remark
.max_hotness
= max_hotness
320 if remark
.key
not in all_remarks
:
321 merged
[filename
][line
].append(remark
)
324 file_remarks
= defaultdict(functools
.partial(defaultdict
, list))
325 for _
, all_remarks_job
, file_remarks_job
in remarks
:
326 merge_file_remarks(file_remarks_job
, all_remarks
, file_remarks
)
327 all_remarks
.update(all_remarks_job
)
329 return all_remarks
, file_remarks
, max_hotness
!= 0
332 def find_opt_files(*dirs_or_files
):
334 for dir_or_file
in dirs_or_files
:
335 if os
.path
.isfile(dir_or_file
):
336 all
.append(dir_or_file
)
338 for dir, subdirs
, files
in os
.walk(dir_or_file
):
339 # Exclude mounted directories and symlinks (os.walk default).
340 subdirs
[:] = [d
for d
in subdirs
341 if not os
.path
.ismount(os
.path
.join(dir, d
))]
343 if fnmatch
.fnmatch(file, "*.opt.yaml*"):
344 all
.append(os
.path
.join(dir, file))