Run DCE after a LoopFlatten test to reduce spurious output [nfc]
[llvm-project.git] / clang / tools / scan-build-py / tests / unit / test_report.py
blob4d85590a211229631d09854ad1b01fa8eb1bdc7c
1 # -*- coding: utf-8 -*-
2 # Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
3 # See https://llvm.org/LICENSE.txt for license information.
4 # SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 import json
7 import libear
8 import libscanbuild.report as sut
9 import unittest
10 import os
11 import os.path
14 def run_bug_parse(content):
15 with libear.TemporaryDirectory() as tmpdir:
16 file_name = os.path.join(tmpdir, "test.html")
17 with open(file_name, "w") as handle:
18 handle.writelines(content)
19 for bug in sut.parse_bug_html(file_name):
20 return bug
23 def run_crash_parse(content, preproc):
24 with libear.TemporaryDirectory() as tmpdir:
25 file_name = os.path.join(tmpdir, preproc + ".info.txt")
26 with open(file_name, "w") as handle:
27 handle.writelines(content)
28 return sut.parse_crash(file_name)
31 class ParseFileTest(unittest.TestCase):
32 def test_parse_bug(self):
33 content = [
34 "some header\n",
35 "<!-- BUGDESC Division by zero -->\n",
36 "<!-- BUGTYPE Division by zero -->\n",
37 "<!-- BUGCATEGORY Logic error -->\n",
38 "<!-- BUGFILE xx -->\n",
39 "<!-- BUGLINE 5 -->\n",
40 "<!-- BUGCOLUMN 22 -->\n",
41 "<!-- BUGPATHLENGTH 4 -->\n",
42 "<!-- BUGMETAEND -->\n",
43 "<!-- REPORTHEADER -->\n",
44 "some tails\n",
46 result = run_bug_parse(content)
47 self.assertEqual(result["bug_category"], "Logic error")
48 self.assertEqual(result["bug_path_length"], 4)
49 self.assertEqual(result["bug_line"], 5)
50 self.assertEqual(result["bug_description"], "Division by zero")
51 self.assertEqual(result["bug_type"], "Division by zero")
52 self.assertEqual(result["bug_file"], "xx")
54 def test_parse_bug_empty(self):
55 content = []
56 result = run_bug_parse(content)
57 self.assertEqual(result["bug_category"], "Other")
58 self.assertEqual(result["bug_path_length"], 1)
59 self.assertEqual(result["bug_line"], 0)
61 def test_parse_crash(self):
62 content = [
63 "/some/path/file.c\n",
64 "Some very serious Error\n",
65 "bla\n",
66 "bla-bla\n",
68 result = run_crash_parse(content, "file.i")
69 self.assertEqual(result["source"], content[0].rstrip())
70 self.assertEqual(result["problem"], content[1].rstrip())
71 self.assertEqual(os.path.basename(result["file"]), "file.i")
72 self.assertEqual(os.path.basename(result["info"]), "file.i.info.txt")
73 self.assertEqual(os.path.basename(result["stderr"]), "file.i.stderr.txt")
75 def test_parse_real_crash(self):
76 import libscanbuild.analyze as sut2
77 import re
79 with libear.TemporaryDirectory() as tmpdir:
80 filename = os.path.join(tmpdir, "test.c")
81 with open(filename, "w") as handle:
82 handle.write("int main() { return 0")
83 # produce failure report
84 opts = {
85 "clang": "clang",
86 "directory": os.getcwd(),
87 "flags": [],
88 "file": filename,
89 "output_dir": tmpdir,
90 "language": "c",
91 "error_type": "other_error",
92 "error_output": "some output",
93 "exit_code": 13,
95 sut2.report_failure(opts)
96 # find the info file
97 pp_file = None
98 for root, _, files in os.walk(tmpdir):
99 keys = [os.path.join(root, name) for name in files]
100 for key in keys:
101 if re.match(r"^(.*/)+clang(.*)\.i$", key):
102 pp_file = key
103 self.assertIsNot(pp_file, None)
104 # read the failure report back
105 result = sut.parse_crash(pp_file + ".info.txt")
106 self.assertEqual(result["source"], filename)
107 self.assertEqual(result["problem"], "Other Error")
108 self.assertEqual(result["file"], pp_file)
109 self.assertEqual(result["info"], pp_file + ".info.txt")
110 self.assertEqual(result["stderr"], pp_file + ".stderr.txt")
113 class ReportMethodTest(unittest.TestCase):
114 def test_chop(self):
115 self.assertEqual("file", sut.chop("/prefix", "/prefix/file"))
116 self.assertEqual("file", sut.chop("/prefix/", "/prefix/file"))
117 self.assertEqual("lib/file", sut.chop("/prefix/", "/prefix/lib/file"))
118 self.assertEqual("/prefix/file", sut.chop("", "/prefix/file"))
120 def test_chop_when_cwd(self):
121 self.assertEqual("../src/file", sut.chop("/cwd", "/src/file"))
122 self.assertEqual("../src/file", sut.chop("/prefix/cwd", "/prefix/src/file"))
125 class GetPrefixFromCompilationDatabaseTest(unittest.TestCase):
126 def test_with_different_filenames(self):
127 self.assertEqual(sut.commonprefix(["/tmp/a.c", "/tmp/b.c"]), "/tmp")
129 def test_with_different_dirnames(self):
130 self.assertEqual(sut.commonprefix(["/tmp/abs/a.c", "/tmp/ack/b.c"]), "/tmp")
132 def test_no_common_prefix(self):
133 self.assertEqual(sut.commonprefix(["/tmp/abs/a.c", "/usr/ack/b.c"]), "/")
135 def test_with_single_file(self):
136 self.assertEqual(sut.commonprefix(["/tmp/a.c"]), "/tmp")
138 def test_empty(self):
139 self.assertEqual(sut.commonprefix([]), "")
142 class MergeSarifTest(unittest.TestCase):
143 def test_merging_sarif(self):
144 sarif1 = {
145 "$schema": "https://raw.githubusercontent.com/oasis-tcs/sarif-spec/master/Schemata/sarif-schema-2.1.0.json",
146 "runs": [
148 "artifacts": [
150 "length": 100,
151 "location": {
152 "uri": "//clang/tools/scan-build-py/tests/unit/test_report.py"
154 "mimeType": "text/plain",
155 "roles": ["resultFile"],
158 "columnKind": "unicodeCodePoints",
159 "results": [
161 "codeFlows": [
163 "threadFlows": [
165 "locations": [
167 "importance": "important",
168 "location": {
169 "message": {
170 "text": "test message 1"
172 "physicalLocation": {
173 "artifactLocation": {
174 "index": 0,
175 "uri": "//clang/tools/scan-build-py/tests/unit/test_report.py",
177 "region": {
178 "endColumn": 5,
179 "startColumn": 1,
180 "startLine": 2,
192 "codeFlows": [
194 "threadFlows": [
196 "locations": [
198 "importance": "important",
199 "location": {
200 "message": {
201 "text": "test message 2"
203 "physicalLocation": {
204 "artifactLocation": {
205 "index": 0,
206 "uri": "//clang/tools/scan-build-py/tests/unit/test_report.py",
208 "region": {
209 "endColumn": 23,
210 "startColumn": 9,
211 "startLine": 10,
223 "tool": {
224 "driver": {
225 "fullName": "clang static analyzer",
226 "language": "en-US",
227 "name": "clang",
228 "rules": [
230 "fullDescription": {
231 "text": "test rule for merge sarif test"
233 "helpUrl": "//clang/tools/scan-build-py/tests/unit/test_report.py",
234 "id": "testId",
235 "name": "testName",
238 "version": "test clang",
243 "version": "2.1.0",
245 sarif2 = {
246 "$schema": "https://raw.githubusercontent.com/oasis-tcs/sarif-spec/master/Schemata/sarif-schema-2.1.0.json",
247 "runs": [
249 "artifacts": [
251 "length": 1523,
252 "location": {
253 "uri": "//clang/tools/scan-build-py/tests/unit/test_report.py"
255 "mimeType": "text/plain",
256 "roles": ["resultFile"],
259 "columnKind": "unicodeCodePoints",
260 "results": [
262 "codeFlows": [
264 "threadFlows": [
266 "locations": [
268 "importance": "important",
269 "location": {
270 "message": {
271 "text": "test message 3"
273 "physicalLocation": {
274 "artifactLocation": {
275 "index": 0,
276 "uri": "//clang/tools/scan-build-py/tests/unit/test_report.py",
278 "region": {
279 "endColumn": 99,
280 "startColumn": 99,
281 "startLine": 17,
293 "codeFlows": [
295 "threadFlows": [
297 "locations": [
299 "importance": "important",
300 "location": {
301 "message": {
302 "text": "test message 4"
304 "physicalLocation": {
305 "artifactLocation": {
306 "index": 0,
307 "uri": "//clang/tools/scan-build-py/tests/unit/test_report.py",
309 "region": {
310 "endColumn": 305,
311 "startColumn": 304,
312 "startLine": 1,
324 "tool": {
325 "driver": {
326 "fullName": "clang static analyzer",
327 "language": "en-US",
328 "name": "clang",
329 "rules": [
331 "fullDescription": {
332 "text": "test rule for merge sarif test"
334 "helpUrl": "//clang/tools/scan-build-py/tests/unit/test_report.py",
335 "id": "testId",
336 "name": "testName",
339 "version": "test clang",
344 "version": "2.1.0",
347 contents = [sarif1, sarif2]
348 with libear.TemporaryDirectory() as tmpdir:
349 for idx, content in enumerate(contents):
350 file_name = os.path.join(tmpdir, "results-{}.sarif".format(idx))
351 with open(file_name, "w") as handle:
352 json.dump(content, handle)
354 sut.merge_sarif_files(tmpdir, sort_files=True)
356 self.assertIn("results-merged.sarif", os.listdir(tmpdir))
357 with open(os.path.join(tmpdir, "results-merged.sarif")) as f:
358 merged = json.load(f)
359 self.assertEqual(len(merged["runs"]), 2)
360 self.assertEqual(len(merged["runs"][0]["results"]), 2)
361 self.assertEqual(len(merged["runs"][1]["results"]), 2)
363 expected = sarif1
364 for run in sarif2["runs"]:
365 expected["runs"].append(run)
367 self.assertEqual(merged, expected)
369 def test_merge_updates_embedded_link(self):
370 sarif1 = {
371 "runs": [
373 "results": [
375 "codeFlows": [
377 "message": {
378 "text": "test message 1-1 [link](sarif:/runs/1/results/0) [link2](sarif:/runs/1/results/0)"
380 "threadFlows": [
382 "message": {
383 "text": "test message 1-2 [link](sarif:/runs/1/results/0)"
393 "results": [
395 "codeFlows": [
397 "message": {
398 "text": "test message 2-1 [link](sarif:/runs/0/results/0)"
400 "threadFlows": [
402 "message": {
403 "text": "test message 2-2 [link](sarif:/runs/0/results/0)"
414 sarif2 = {
415 "runs": [
417 "results": [
419 "codeFlows": [
421 "message": {
422 "text": "test message 3-1 [link](sarif:/runs/1/results/0) [link2](sarif:/runs/1/results/0)"
424 "threadFlows": [
426 "message": {
427 "text": "test message 3-2 [link](sarif:/runs/1/results/0)"
437 "results": [
439 "codeFlows": [
441 "message": {
442 "text": "test message 4-1 [link](sarif:/runs/0/results/0)"
444 "threadFlows": [
446 "message": {
447 "text": "test message 4-2 [link](sarif:/runs/0/results/0)"
458 sarif3 = {
459 "runs": [
461 "results": [
463 "codeFlows": [
465 "message": {
466 "text": "test message 5-1 [link](sarif:/runs/1/results/0) [link2](sarif:/runs/1/results/0)"
468 "threadFlows": [
470 "message": {
471 "text": "test message 5-2 [link](sarif:/runs/1/results/0)"
481 "results": [
483 "codeFlows": [
485 "message": {
486 "text": "test message 6-1 [link](sarif:/runs/0/results/0)"
488 "threadFlows": [
490 "message": {
491 "text": "test message 6-2 [link](sarif:/runs/0/results/0)"
503 contents = [sarif1, sarif2, sarif3]
505 with libear.TemporaryDirectory() as tmpdir:
506 for idx, content in enumerate(contents):
507 file_name = os.path.join(tmpdir, "results-{}.sarif".format(idx))
508 with open(file_name, "w") as handle:
509 json.dump(content, handle)
511 sut.merge_sarif_files(tmpdir, sort_files=True)
513 self.assertIn("results-merged.sarif", os.listdir(tmpdir))
514 with open(os.path.join(tmpdir, "results-merged.sarif")) as f:
515 merged = json.load(f)
516 self.assertEqual(len(merged["runs"]), 6)
518 code_flows = [
519 merged["runs"][x]["results"][0]["codeFlows"][0]["message"]["text"]
520 for x in range(6)
522 thread_flows = [
523 merged["runs"][x]["results"][0]["codeFlows"][0]["threadFlows"][0][
524 "message"
525 ]["text"]
526 for x in range(6)
529 # The run index should be updated for the second and third sets of runs
530 self.assertEqual(
531 code_flows,
533 "test message 1-1 [link](sarif:/runs/1/results/0) [link2](sarif:/runs/1/results/0)",
534 "test message 2-1 [link](sarif:/runs/0/results/0)",
535 "test message 3-1 [link](sarif:/runs/3/results/0) [link2](sarif:/runs/3/results/0)",
536 "test message 4-1 [link](sarif:/runs/2/results/0)",
537 "test message 5-1 [link](sarif:/runs/5/results/0) [link2](sarif:/runs/5/results/0)",
538 "test message 6-1 [link](sarif:/runs/4/results/0)",
541 self.assertEquals(
542 thread_flows,
544 "test message 1-2 [link](sarif:/runs/1/results/0)",
545 "test message 2-2 [link](sarif:/runs/0/results/0)",
546 "test message 3-2 [link](sarif:/runs/3/results/0)",
547 "test message 4-2 [link](sarif:/runs/2/results/0)",
548 "test message 5-2 [link](sarif:/runs/5/results/0)",
549 "test message 6-2 [link](sarif:/runs/4/results/0)",
553 def test_overflow_run_count(self):
554 sarif1 = {
555 "runs": [
557 "results": [
558 {"message": {"text": "run 1-0 [link](sarif:/runs/1/results/0)"}}
562 "results": [
563 {"message": {"text": "run 1-1 [link](sarif:/runs/2/results/0)"}}
567 "results": [
568 {"message": {"text": "run 1-2 [link](sarif:/runs/3/results/0)"}}
572 "results": [
573 {"message": {"text": "run 1-3 [link](sarif:/runs/4/results/0)"}}
577 "results": [
578 {"message": {"text": "run 1-4 [link](sarif:/runs/5/results/0)"}}
582 "results": [
583 {"message": {"text": "run 1-5 [link](sarif:/runs/6/results/0)"}}
587 "results": [
588 {"message": {"text": "run 1-6 [link](sarif:/runs/7/results/0)"}}
592 "results": [
593 {"message": {"text": "run 1-7 [link](sarif:/runs/8/results/0)"}}
597 "results": [
598 {"message": {"text": "run 1-8 [link](sarif:/runs/9/results/0)"}}
602 "results": [
603 {"message": {"text": "run 1-9 [link](sarif:/runs/0/results/0)"}}
608 sarif2 = {
609 "runs": [
611 "results": [
613 "message": {
614 "text": "run 2-0 [link](sarif:/runs/1/results/0) [link2](sarif:/runs/2/results/0)"
620 "results": [
621 {"message": {"text": "run 2-1 [link](sarif:/runs/2/results/0)"}}
625 "results": [
626 {"message": {"text": "run 2-2 [link](sarif:/runs/3/results/0)"}}
630 "results": [
631 {"message": {"text": "run 2-3 [link](sarif:/runs/4/results/0)"}}
635 "results": [
636 {"message": {"text": "run 2-4 [link](sarif:/runs/5/results/0)"}}
640 "results": [
641 {"message": {"text": "run 2-5 [link](sarif:/runs/6/results/0)"}}
645 "results": [
646 {"message": {"text": "run 2-6 [link](sarif:/runs/7/results/0)"}}
650 "results": [
651 {"message": {"text": "run 2-7 [link](sarif:/runs/8/results/0)"}}
655 "results": [
656 {"message": {"text": "run 2-8 [link](sarif:/runs/9/results/0)"}}
660 "results": [
661 {"message": {"text": "run 2-9 [link](sarif:/runs/0/results/0)"}}
667 contents = [sarif1, sarif2]
668 with libear.TemporaryDirectory() as tmpdir:
669 for idx, content in enumerate(contents):
670 file_name = os.path.join(tmpdir, "results-{}.sarif".format(idx))
671 with open(file_name, "w") as handle:
672 json.dump(content, handle)
674 sut.merge_sarif_files(tmpdir, sort_files=True)
676 self.assertIn("results-merged.sarif", os.listdir(tmpdir))
677 with open(os.path.join(tmpdir, "results-merged.sarif")) as f:
678 merged = json.load(f)
679 self.assertEqual(len(merged["runs"]), 20)
681 messages = [
682 merged["runs"][x]["results"][0]["message"]["text"]
683 for x in range(20)
685 self.assertEqual(
686 messages,
688 "run 1-0 [link](sarif:/runs/1/results/0)",
689 "run 1-1 [link](sarif:/runs/2/results/0)",
690 "run 1-2 [link](sarif:/runs/3/results/0)",
691 "run 1-3 [link](sarif:/runs/4/results/0)",
692 "run 1-4 [link](sarif:/runs/5/results/0)",
693 "run 1-5 [link](sarif:/runs/6/results/0)",
694 "run 1-6 [link](sarif:/runs/7/results/0)",
695 "run 1-7 [link](sarif:/runs/8/results/0)",
696 "run 1-8 [link](sarif:/runs/9/results/0)",
697 "run 1-9 [link](sarif:/runs/0/results/0)",
698 "run 2-0 [link](sarif:/runs/11/results/0) [link2](sarif:/runs/12/results/0)",
699 "run 2-1 [link](sarif:/runs/12/results/0)",
700 "run 2-2 [link](sarif:/runs/13/results/0)",
701 "run 2-3 [link](sarif:/runs/14/results/0)",
702 "run 2-4 [link](sarif:/runs/15/results/0)",
703 "run 2-5 [link](sarif:/runs/16/results/0)",
704 "run 2-6 [link](sarif:/runs/17/results/0)",
705 "run 2-7 [link](sarif:/runs/18/results/0)",
706 "run 2-8 [link](sarif:/runs/19/results/0)",
707 "run 2-9 [link](sarif:/runs/10/results/0)",