3 from __future__
import print_function
8 # Try to use the C parser.
10 from yaml
import CLoader
as Loader
12 print("For faster parsing, you may want to install libYAML for PyYAML")
13 from yaml
import Loader
16 from collections
import defaultdict
19 from multiprocessing
import Lock
24 # The previously builtin function `intern()` was moved
25 # to the `sys` module in Python 3.
26 from sys
import intern
36 except AttributeError:
39 return iter(d
.values())
42 return iter(d
.items())
53 def html_file_name(filename
):
54 return filename
.replace("/", "_").replace("#", "_") + ".html"
57 def make_link(File
, Line
):
58 return '"{}#L{}"'.format(html_file_name(File
), Line
)
61 class Remark(yaml
.YAMLObject
):
62 # Work-around for http://pyyaml.org/ticket/154.
65 default_demangler
= "c++filt -n"
69 def set_demangler(cls
, demangler
):
70 cls
.demangler_proc
= subprocess
.Popen(
71 demangler
.split(), stdin
=subprocess
.PIPE
, stdout
=subprocess
.PIPE
73 cls
.demangler_lock
= Lock()
76 def demangle(cls
, name
):
77 with cls
.demangler_lock
:
78 cls
.demangler_proc
.stdin
.write((name
+ "\n").encode("utf-8"))
79 cls
.demangler_proc
.stdin
.flush()
80 return cls
.demangler_proc
.stdout
.readline().rstrip().decode("utf-8")
82 # Intern all strings since we have lot of duplication across filenames,
85 # Change Args from a list of dicts to a tuple of tuples. This saves
86 # memory in two ways. One, a small tuple is significantly smaller than a
87 # small dict. Two, using tuple instead of list allows Args to be directly
88 # used as part of the key (in Python only immutable types are hashable).
89 def _reduce_memory(self
):
90 self
.Pass
= intern(self
.Pass
)
91 self
.Name
= intern(self
.Name
)
93 # Can't intern unicode strings.
94 self
.Function
= intern(self
.Function
)
98 def _reduce_memory_dict(old_dict
):
100 for (k
, v
) in iteritems(old_dict
):
106 elif type(v
) is dict:
107 # This handles [{'Caller': ..., 'DebugLoc': { 'File': ... }}]
108 v
= _reduce_memory_dict(v
)
110 return tuple(new_dict
.items())
112 self
.Args
= tuple([_reduce_memory_dict(arg_dict
) for arg_dict
in self
.Args
])
114 # The inverse operation of the dictonary-related memory optimization in
115 # _reduce_memory_dict. E.g.
116 # (('DebugLoc', (('File', ...) ... ))) -> [{'DebugLoc': {'File': ...} ....}]
117 def recover_yaml_structure(self
):
118 def tuple_to_dict(t
):
126 self
.Args
= [tuple_to_dict(arg_tuple
) for arg_tuple
in self
.Args
]
128 def canonicalize(self
):
129 if not hasattr(self
, "Hotness"):
131 if not hasattr(self
, "Args"):
133 self
._reduce
_memory
()
137 return self
.DebugLoc
["File"]
141 return int(self
.DebugLoc
["Line"])
145 return self
.DebugLoc
["Column"]
148 def DebugLocString(self
):
149 return "{}:{}:{}".format(self
.File
, self
.Line
, self
.Column
)
152 def DemangledFunctionName(self
):
153 return self
.demangle(self
.Function
)
157 return make_link(self
.File
, self
.Line
)
159 def getArgString(self
, mapping
):
160 mapping
= dict(list(mapping
))
161 dl
= mapping
.get("DebugLoc")
163 del mapping
["DebugLoc"]
165 assert len(mapping
) == 1
166 (key
, value
) = list(mapping
.items())[0]
168 if key
== "Caller" or key
== "Callee" or key
== "DirectCallee":
169 value
= html
.escape(self
.demangle(value
))
171 if dl
and key
!= "Caller":
172 dl_dict
= dict(list(dl
))
173 return "<a href={}>{}</a>".format(
174 make_link(dl_dict
["File"], dl_dict
["Line"]), value
179 # Return a cached dictionary for the arguments. The key for each entry is
180 # the argument key (e.g. 'Callee' for inlining remarks. The value is a
181 # list containing the value (e.g. for 'Callee' the function) and
182 # optionally a DebugLoc.
183 def getArgDict(self
):
184 if hasattr(self
, "ArgDict"):
187 for arg
in self
.Args
:
189 if arg
[0][0] == "DebugLoc":
192 assert arg
[1][0] == "DebugLoc"
195 key
= arg
[1 - dbgidx
][0]
196 entry
= (arg
[1 - dbgidx
][1], arg
[dbgidx
][1])
202 self
.ArgDict
[key
] = entry
205 def getDiffPrefix(self
):
206 if hasattr(self
, "Added"):
214 def PassWithDiffPrefix(self
):
215 return self
.getDiffPrefix() + self
.Pass
219 # Args is a list of mappings (dictionaries)
220 values
= [self
.getArgString(mapping
) for mapping
in self
.Args
]
221 return "".join(values
)
224 def RelativeHotness(self
):
226 return "{0:.2f}%".format(self
.Hotness
* 100.0 / self
.max_hotness
)
234 self
.PassWithDiffPrefix
,
244 return hash(self
.key
)
246 def __eq__(self
, other
):
247 return self
.key
== other
.key
253 class Analysis(Remark
):
254 yaml_tag
= "!Analysis"
261 class AnalysisFPCommute(Analysis
):
262 yaml_tag
= "!AnalysisFPCommute"
265 class AnalysisAliasing(Analysis
):
266 yaml_tag
= "!AnalysisAliasing"
269 class Passed(Remark
):
277 class Missed(Remark
):
285 class Failure(Missed
):
286 yaml_tag
= "!Failure"
289 def get_remarks(input_file
, filter_
=None):
292 file_remarks
= defaultdict(functools
.partial(defaultdict
, list))
294 with io
.open(input_file
, encoding
="utf-8") as f
:
295 docs
= yaml
.load_all(f
, Loader
=Loader
)
299 filter_e
= re
.compile(filter_
)
301 remark
.canonicalize()
302 # Avoid remarks withoug debug location or if they are duplicated
303 if not hasattr(remark
, "DebugLoc") or remark
.key
in all_remarks
:
306 if filter_e
and not filter_e
.search(remark
.Pass
):
309 all_remarks
[remark
.key
] = remark
311 file_remarks
[remark
.File
][remark
.Line
].append(remark
)
313 # If we're reading a back a diff yaml file, max_hotness is already
314 # captured which may actually be less than the max hotness found
316 if hasattr(remark
, "max_hotness"):
317 max_hotness
= remark
.max_hotness
318 max_hotness
= max(max_hotness
, remark
.Hotness
)
320 return max_hotness
, all_remarks
, file_remarks
323 def gather_results(filenames
, num_jobs
, should_print_progress
, filter_
=None):
324 if should_print_progress
:
325 print("Reading YAML files...")
326 if not Remark
.demangler_proc
:
327 Remark
.set_demangler(Remark
.default_demangler
)
328 remarks
= optpmap
.pmap(
329 get_remarks
, filenames
, num_jobs
, should_print_progress
, filter_
331 max_hotness
= max(entry
[0] for entry
in remarks
)
333 def merge_file_remarks(file_remarks_job
, all_remarks
, merged
):
334 for filename
, d
in iteritems(file_remarks_job
):
335 for line
, remarks
in iteritems(d
):
336 for remark
in remarks
:
337 # Bring max_hotness into the remarks so that
338 # RelativeHotness does not depend on an external global.
339 remark
.max_hotness
= max_hotness
340 if remark
.key
not in all_remarks
:
341 merged
[filename
][line
].append(remark
)
344 file_remarks
= defaultdict(functools
.partial(defaultdict
, list))
345 for _
, all_remarks_job
, file_remarks_job
in remarks
:
346 merge_file_remarks(file_remarks_job
, all_remarks
, file_remarks
)
347 all_remarks
.update(all_remarks_job
)
349 return all_remarks
, file_remarks
, max_hotness
!= 0
352 def find_opt_files(*dirs_or_files
):
354 for dir_or_file
in dirs_or_files
:
355 if os
.path
.isfile(dir_or_file
):
356 all
.append(dir_or_file
)
358 for dir, subdirs
, files
in os
.walk(dir_or_file
):
359 # Exclude mounted directories and symlinks (os.walk default).
361 d
for d
in subdirs
if not os
.path
.ismount(os
.path
.join(dir, d
))
364 if fnmatch
.fnmatch(file, "*.opt.yaml*"):
365 all
.append(os
.path
.join(dir, file))