Revert 224458 "Enabling MediaStreamInfoBarTest.DenyingCameraDoes..."
[chromium-blink-merge.git] / tools / valgrind / tsan_analyze.py
blob1950f6efcc70fee9868e01540c3139334809bf07
1 #!/usr/bin/env python
2 # Copyright (c) 2011 The Chromium Authors. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
6 # tsan_analyze.py
8 ''' Given a ThreadSanitizer output file, parses errors and uniques them.'''
10 import gdb_helper
12 from collections import defaultdict
13 import hashlib
14 import logging
15 import optparse
16 import os
17 import re
18 import subprocess
19 import sys
20 import time
22 import common
24 # Global symbol table (ugh)
25 TheAddressTable = None
27 class _StackTraceLine(object):
28 def __init__(self, line, address, binary):
29 self.raw_line_ = line
30 self.address = address
31 self.binary = binary
32 def __str__(self):
33 global TheAddressTable
34 file, line = TheAddressTable.GetFileLine(self.binary, self.address)
35 if (file is None) or (line is None):
36 return self.raw_line_
37 else:
38 return self.raw_line_.replace(self.binary, '%s:%s' % (file, line))
40 class TsanAnalyzer(object):
41 ''' Given a set of ThreadSanitizer output files, parse all the errors out of
42 them, unique them and output the results.'''
44 LOAD_LIB_RE = re.compile('--[0-9]+-- ([^(:]*) \((0x[0-9a-f]+)\)')
45 TSAN_LINE_RE = re.compile('==[0-9]+==\s*[#0-9]+\s*'
46 '([0-9A-Fa-fx]+):'
47 '(?:[^ ]* )*'
48 '([^ :\n]+)'
49 '')
50 THREAD_CREATION_STR = ("INFO: T.* "
51 "(has been created by T.* at this point|is program's main thread)")
53 SANITY_TEST_SUPPRESSION = ("ThreadSanitizer sanity test "
54 "(ToolsSanityTest.DataRace)")
55 TSAN_RACE_DESCRIPTION = "Possible data race"
56 TSAN_WARNING_DESCRIPTION = ("Unlocking a non-locked lock"
57 "|accessing an invalid lock"
58 "|which did not acquire this lock")
59 RACE_VERIFIER_LINE = "Confirmed a race|unexpected race"
60 TSAN_ASSERTION = "Assertion failed: "
62 def __init__(self, source_dir, use_gdb=False):
63 '''Reads in a set of files.
65 Args:
66 source_dir: Path to top of source tree for this build
67 '''
69 self._use_gdb = use_gdb
70 self._cur_testcase = None
72 def ReadLine(self):
73 self.line_ = self.cur_fd_.readline()
74 self.stack_trace_line_ = None
75 if not self._use_gdb:
76 return
77 global TheAddressTable
78 match = TsanAnalyzer.LOAD_LIB_RE.match(self.line_)
79 if match:
80 binary, ip = match.groups()
81 TheAddressTable.AddBinaryAt(binary, ip)
82 return
83 match = TsanAnalyzer.TSAN_LINE_RE.match(self.line_)
84 if match:
85 address, binary_name = match.groups()
86 stack_trace_line = _StackTraceLine(self.line_, address, binary_name)
87 TheAddressTable.Add(stack_trace_line.binary, stack_trace_line.address)
88 self.stack_trace_line_ = stack_trace_line
90 def ReadSection(self):
91 """ Example of a section:
92 ==4528== WARNING: Possible data race: {{{
93 ==4528== T20 (L{}):
94 ==4528== #0 MyTest::Foo1
95 ==4528== #1 MyThread::ThreadBody
96 ==4528== Concurrent write happened at this point:
97 ==4528== T19 (L{}):
98 ==4528== #0 MyTest::Foo2
99 ==4528== #1 MyThread::ThreadBody
100 ==4528== }}}
101 ------- suppression -------
103 <Put your suppression name here>
104 ThreadSanitizer:Race
105 fun:MyTest::Foo1
106 fun:MyThread::ThreadBody
108 ------- end suppression -------
110 result = [self.line_]
111 if re.search("{{{", self.line_):
112 while not re.search('}}}', self.line_):
113 self.ReadLine()
114 if self.stack_trace_line_ is None:
115 result.append(self.line_)
116 else:
117 result.append(self.stack_trace_line_)
118 self.ReadLine()
119 if re.match('-+ suppression -+', self.line_):
120 # We need to calculate the suppression hash and prepend a line like
121 # "Suppression (error hash=#0123456789ABCDEF#):" so the buildbot can
122 # extract the suppression snippet.
123 supp = ""
124 while not re.match('-+ end suppression -+', self.line_):
125 self.ReadLine()
126 supp += self.line_
127 self.ReadLine()
128 if self._cur_testcase:
129 result.append("The report came from the `%s` test.\n" % \
130 self._cur_testcase)
131 result.append("Suppression (error hash=#%016X#):\n" % \
132 (int(hashlib.md5(supp).hexdigest()[:16], 16)))
133 result.append(" For more info on using suppressions see "
134 "http://dev.chromium.org/developers/how-tos/using-valgrind/threadsanitizer#TOC-Suppressing-data-races\n")
135 result.append(supp)
136 else:
137 self.ReadLine()
139 return result
141 def ReadTillTheEnd(self):
142 result = [self.line_]
143 while self.line_:
144 self.ReadLine()
145 result.append(self.line_)
146 return result
148 def ParseReportFile(self, filename):
149 '''Parses a report file and returns a list of ThreadSanitizer reports.
152 Args:
153 filename: report filename.
154 Returns:
155 list of (list of (str iff self._use_gdb, _StackTraceLine otherwise)).
157 ret = []
158 self.cur_fd_ = open(filename, 'r')
160 while True:
161 # Read ThreadSanitizer reports.
162 self.ReadLine()
163 if not self.line_:
164 break
166 while True:
167 tmp = []
168 while re.search(TsanAnalyzer.RACE_VERIFIER_LINE, self.line_):
169 tmp.append(self.line_)
170 self.ReadLine()
171 while re.search(TsanAnalyzer.THREAD_CREATION_STR, self.line_):
172 tmp.extend(self.ReadSection())
173 if re.search(TsanAnalyzer.TSAN_RACE_DESCRIPTION, self.line_):
174 tmp.extend(self.ReadSection())
175 ret.append(tmp) # includes RaceVerifier and thread creation stacks
176 elif (re.search(TsanAnalyzer.TSAN_WARNING_DESCRIPTION, self.line_) and
177 not common.IsWindows()): # workaround for http://crbug.com/53198
178 tmp.extend(self.ReadSection())
179 ret.append(tmp)
180 else:
181 break
183 tmp = []
184 if re.search(TsanAnalyzer.TSAN_ASSERTION, self.line_):
185 tmp.extend(self.ReadTillTheEnd())
186 ret.append(tmp)
187 break
189 match = re.search("used_suppression:\s+([0-9]+)\s(.*)", self.line_)
190 if match:
191 count, supp_name = match.groups()
192 count = int(count)
193 self.used_suppressions[supp_name] += count
194 self.cur_fd_.close()
195 return ret
197 def GetReports(self, files):
198 '''Extracts reports from a set of files.
200 Reads a set of files and returns a list of all discovered
201 ThreadSanitizer race reports. As a side effect, populates
202 self.used_suppressions with appropriate info.
205 global TheAddressTable
206 if self._use_gdb:
207 TheAddressTable = gdb_helper.AddressTable()
208 else:
209 TheAddressTable = None
210 reports = []
211 self.used_suppressions = defaultdict(int)
212 for file in files:
213 reports.extend(self.ParseReportFile(file))
214 if self._use_gdb:
215 TheAddressTable.ResolveAll()
216 # Make each line of each report a string.
217 reports = map(lambda(x): map(str, x), reports)
218 return [''.join(report_lines) for report_lines in reports]
220 def Report(self, files, testcase, check_sanity=False):
221 '''Reads in a set of files and prints ThreadSanitizer report.
223 Args:
224 files: A list of filenames.
225 check_sanity: if true, search for SANITY_TEST_SUPPRESSIONS
228 # We set up _cur_testcase class-wide variable to avoid passing it through
229 # about 5 functions.
230 self._cur_testcase = testcase
231 reports = self.GetReports(files)
232 self._cur_testcase = None # just in case, shouldn't be used anymore
234 common.PrintUsedSuppressionsList(self.used_suppressions)
237 retcode = 0
238 if reports:
239 sys.stdout.flush()
240 sys.stderr.flush()
241 logging.info("FAIL! Found %i report(s)" % len(reports))
242 for report in reports:
243 logging.info('\n' + report)
244 sys.stdout.flush()
245 retcode = -1
247 # Report tool's insanity even if there were errors.
248 if (check_sanity and
249 TsanAnalyzer.SANITY_TEST_SUPPRESSION not in self.used_suppressions):
250 logging.error("FAIL! Sanity check failed!")
251 retcode = -3
253 if retcode != 0:
254 return retcode
256 logging.info("PASS: No reports found")
257 return 0
260 def main():
261 '''For testing only. The TsanAnalyzer class should be imported instead.'''
262 parser = optparse.OptionParser("usage: %prog [options] <files to analyze>")
263 parser.add_option("", "--source_dir",
264 help="path to top of source tree for this build"
265 "(used to normalize source paths in baseline)")
267 (options, args) = parser.parse_args()
268 if not args:
269 parser.error("no filename specified")
270 filenames = args
272 logging.getLogger().setLevel(logging.INFO)
273 analyzer = TsanAnalyzer(options.source_dir, use_gdb=True)
274 return analyzer.Report(filenames, None)
277 if __name__ == '__main__':
278 sys.exit(main())