Disable ContentSettingBubbleModelTest.RPHAllow which is flaky.
[chromium-blink-merge.git] / tools / valgrind / test_suppressions.py
blob75bc6b9227448fc633a8b3d0776fcba075715541
1 #!/usr/bin/env python
2 # Copyright (c) 2012 The Chromium Authors. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
6 from collections import defaultdict
7 import os
8 import re
9 import sys
11 import suppressions
14 def ReadReportsFromFile(filename):
15 """ Returns a list of (report_hash, report) and the URL of the report on the
16 waterfall.
17 """
18 input_file = file(filename, 'r')
19 # reports is a list of (error hash, report) pairs.
20 reports = []
21 in_suppression = False
22 cur_supp = []
23 # This stores the last error hash found while reading the file.
24 last_hash = ""
25 for line in input_file:
26 line = line.strip()
27 line = line.replace("</span><span class=\"stdout\">", "")
28 line = line.replace("</span><span class=\"stderr\">", "")
29 line = line.replace("&lt;", "<")
30 line = line.replace("&gt;", ">")
31 if in_suppression:
32 if line == "}":
33 cur_supp += ["}"]
34 reports += [[last_hash, "\n".join(cur_supp)]]
35 in_suppression = False
36 cur_supp = []
37 last_hash = ""
38 else:
39 cur_supp += [" "*3 + line]
40 elif line == "{":
41 in_suppression = True
42 cur_supp = ["{"]
43 elif line.find("Suppression (error hash=#") == 0:
44 last_hash = line[25:41]
45 # The line at the end of the file is assumed to store the URL of the report.
46 return reports,line
49 def main(argv):
50 supp = suppressions.GetSuppressions()
52 # all_reports is a map {report: list of urls containing this report}
53 all_reports = defaultdict(list)
54 report_hashes = {}
56 for f in argv:
57 f_reports, url = ReadReportsFromFile(f)
58 for (hash, report) in f_reports:
59 all_reports[report] += [url]
60 report_hashes[report] = hash
62 reports_count = 0
63 for r in all_reports:
64 cur_supp = supp['common_suppressions']
65 if all([re.search("%20Mac%20|mac_valgrind", url)
66 for url in all_reports[r]]):
67 # Include mac suppressions if the report is only present on Mac
68 cur_supp += supp['mac_suppressions']
69 elif all([re.search("Windows%20", url) for url in all_reports[r]]):
70 # Include win32 suppressions if the report is only present on Windows
71 cur_supp += supp['win_suppressions']
72 elif all([re.search("Linux%20", url) for url in all_reports[r]]):
73 cur_supp += supp['linux_suppressions']
74 elif all([re.search("%20Heapcheck", url)
75 for url in all_reports[r]]):
76 cur_supp += supp['heapcheck_suppressions']
77 if all(["DrMemory" in url for url in all_reports[r]]):
78 cur_supp += supp['drmem_suppressions']
79 if all(["DrMemory%20full" in url for url in all_reports[r]]):
80 cur_supp += supp['drmem_full_suppressions']
82 match = False
83 for s in cur_supp:
84 if s.Match(r.split("\n")):
85 match = True
86 break
87 if not match:
88 reports_count += 1
89 print "==================================="
90 print "This report observed at"
91 for url in all_reports[r]:
92 print " %s" % url
93 print "didn't match any suppressions:"
94 print "Suppression (error hash=#%s#):" % (report_hashes[r])
95 print r
96 print "==================================="
98 if reports_count > 0:
99 print ("%d unique reports don't match any of the suppressions" %
100 reports_count)
101 else:
102 print "Congratulations! All reports are suppressed!"
103 # TODO(timurrrr): also make sure none of the old suppressions
104 # were narrowed too much.
107 if __name__ == "__main__":
108 main(sys.argv[1:])