1 # -*- coding: utf-8 -*-
2 # Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
3 # See https://llvm.org/LICENSE.txt for license information.
4 # SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
8 import libscanbuild
.report
as sut
14 def run_bug_parse(content
):
15 with libear
.TemporaryDirectory() as tmpdir
:
16 file_name
= os
.path
.join(tmpdir
, "test.html")
17 with
open(file_name
, "w") as handle
:
18 handle
.writelines(content
)
19 for bug
in sut
.parse_bug_html(file_name
):
23 def run_crash_parse(content
, preproc
):
24 with libear
.TemporaryDirectory() as tmpdir
:
25 file_name
= os
.path
.join(tmpdir
, preproc
+ ".info.txt")
26 with
open(file_name
, "w") as handle
:
27 handle
.writelines(content
)
28 return sut
.parse_crash(file_name
)
31 class ParseFileTest(unittest
.TestCase
):
32 def test_parse_bug(self
):
35 "<!-- BUGDESC Division by zero -->\n",
36 "<!-- BUGTYPE Division by zero -->\n",
37 "<!-- BUGCATEGORY Logic error -->\n",
38 "<!-- BUGFILE xx -->\n",
39 "<!-- BUGLINE 5 -->\n",
40 "<!-- BUGCOLUMN 22 -->\n",
41 "<!-- BUGPATHLENGTH 4 -->\n",
42 "<!-- BUGMETAEND -->\n",
43 "<!-- REPORTHEADER -->\n",
46 result
= run_bug_parse(content
)
47 self
.assertEqual(result
["bug_category"], "Logic error")
48 self
.assertEqual(result
["bug_path_length"], 4)
49 self
.assertEqual(result
["bug_line"], 5)
50 self
.assertEqual(result
["bug_description"], "Division by zero")
51 self
.assertEqual(result
["bug_type"], "Division by zero")
52 self
.assertEqual(result
["bug_file"], "xx")
54 def test_parse_bug_empty(self
):
56 result
= run_bug_parse(content
)
57 self
.assertEqual(result
["bug_category"], "Other")
58 self
.assertEqual(result
["bug_path_length"], 1)
59 self
.assertEqual(result
["bug_line"], 0)
61 def test_parse_crash(self
):
63 "/some/path/file.c\n",
64 "Some very serious Error\n",
68 result
= run_crash_parse(content
, "file.i")
69 self
.assertEqual(result
["source"], content
[0].rstrip())
70 self
.assertEqual(result
["problem"], content
[1].rstrip())
71 self
.assertEqual(os
.path
.basename(result
["file"]), "file.i")
72 self
.assertEqual(os
.path
.basename(result
["info"]), "file.i.info.txt")
73 self
.assertEqual(os
.path
.basename(result
["stderr"]), "file.i.stderr.txt")
75 def test_parse_real_crash(self
):
76 import libscanbuild
.analyze
as sut2
79 with libear
.TemporaryDirectory() as tmpdir
:
80 filename
= os
.path
.join(tmpdir
, "test.c")
81 with
open(filename
, "w") as handle
:
82 handle
.write("int main() { return 0")
83 # produce failure report
86 "directory": os
.getcwd(),
91 "error_type": "other_error",
92 "error_output": "some output",
95 sut2
.report_failure(opts
)
98 for root
, _
, files
in os
.walk(tmpdir
):
99 keys
= [os
.path
.join(root
, name
) for name
in files
]
101 if re
.match(r
"^(.*/)+clang(.*)\.i$", key
):
103 self
.assertIsNot(pp_file
, None)
104 # read the failure report back
105 result
= sut
.parse_crash(pp_file
+ ".info.txt")
106 self
.assertEqual(result
["source"], filename
)
107 self
.assertEqual(result
["problem"], "Other Error")
108 self
.assertEqual(result
["file"], pp_file
)
109 self
.assertEqual(result
["info"], pp_file
+ ".info.txt")
110 self
.assertEqual(result
["stderr"], pp_file
+ ".stderr.txt")
113 class ReportMethodTest(unittest
.TestCase
):
115 self
.assertEqual("file", sut
.chop("/prefix", "/prefix/file"))
116 self
.assertEqual("file", sut
.chop("/prefix/", "/prefix/file"))
117 self
.assertEqual("lib/file", sut
.chop("/prefix/", "/prefix/lib/file"))
118 self
.assertEqual("/prefix/file", sut
.chop("", "/prefix/file"))
120 def test_chop_when_cwd(self
):
121 self
.assertEqual("../src/file", sut
.chop("/cwd", "/src/file"))
122 self
.assertEqual("../src/file", sut
.chop("/prefix/cwd", "/prefix/src/file"))
125 class GetPrefixFromCompilationDatabaseTest(unittest
.TestCase
):
126 def test_with_different_filenames(self
):
127 self
.assertEqual(sut
.commonprefix(["/tmp/a.c", "/tmp/b.c"]), "/tmp")
129 def test_with_different_dirnames(self
):
130 self
.assertEqual(sut
.commonprefix(["/tmp/abs/a.c", "/tmp/ack/b.c"]), "/tmp")
132 def test_no_common_prefix(self
):
133 self
.assertEqual(sut
.commonprefix(["/tmp/abs/a.c", "/usr/ack/b.c"]), "/")
135 def test_with_single_file(self
):
136 self
.assertEqual(sut
.commonprefix(["/tmp/a.c"]), "/tmp")
138 def test_empty(self
):
139 self
.assertEqual(sut
.commonprefix([]), "")
142 class MergeSarifTest(unittest
.TestCase
):
143 def test_merging_sarif(self
):
145 "$schema": "https://raw.githubusercontent.com/oasis-tcs/sarif-spec/master/Schemata/sarif-schema-2.1.0.json",
152 "uri": "//clang/tools/scan-build-py/tests/unit/test_report.py"
154 "mimeType": "text/plain",
155 "roles": ["resultFile"],
158 "columnKind": "unicodeCodePoints",
167 "importance": "important",
170 "text": "test message 1"
172 "physicalLocation": {
173 "artifactLocation": {
175 "uri": "//clang/tools/scan-build-py/tests/unit/test_report.py",
198 "importance": "important",
201 "text": "test message 2"
203 "physicalLocation": {
204 "artifactLocation": {
206 "uri": "//clang/tools/scan-build-py/tests/unit/test_report.py",
225 "fullName": "clang static analyzer",
231 "text": "test rule for merge sarif test"
233 "helpUrl": "//clang/tools/scan-build-py/tests/unit/test_report.py",
238 "version": "test clang",
246 "$schema": "https://raw.githubusercontent.com/oasis-tcs/sarif-spec/master/Schemata/sarif-schema-2.1.0.json",
253 "uri": "//clang/tools/scan-build-py/tests/unit/test_report.py"
255 "mimeType": "text/plain",
256 "roles": ["resultFile"],
259 "columnKind": "unicodeCodePoints",
268 "importance": "important",
271 "text": "test message 3"
273 "physicalLocation": {
274 "artifactLocation": {
276 "uri": "//clang/tools/scan-build-py/tests/unit/test_report.py",
299 "importance": "important",
302 "text": "test message 4"
304 "physicalLocation": {
305 "artifactLocation": {
307 "uri": "//clang/tools/scan-build-py/tests/unit/test_report.py",
326 "fullName": "clang static analyzer",
332 "text": "test rule for merge sarif test"
334 "helpUrl": "//clang/tools/scan-build-py/tests/unit/test_report.py",
339 "version": "test clang",
347 contents
= [sarif1
, sarif2
]
348 with libear
.TemporaryDirectory() as tmpdir
:
349 for idx
, content
in enumerate(contents
):
350 file_name
= os
.path
.join(tmpdir
, "results-{}.sarif".format(idx
))
351 with
open(file_name
, "w") as handle
:
352 json
.dump(content
, handle
)
354 sut
.merge_sarif_files(tmpdir
, sort_files
=True)
356 self
.assertIn("results-merged.sarif", os
.listdir(tmpdir
))
357 with
open(os
.path
.join(tmpdir
, "results-merged.sarif")) as f
:
358 merged
= json
.load(f
)
359 self
.assertEqual(len(merged
["runs"]), 2)
360 self
.assertEqual(len(merged
["runs"][0]["results"]), 2)
361 self
.assertEqual(len(merged
["runs"][1]["results"]), 2)
364 for run
in sarif2
["runs"]:
365 expected
["runs"].append(run
)
367 self
.assertEqual(merged
, expected
)
369 def test_merge_updates_embedded_link(self
):
378 "text": "test message 1-1 [link](sarif:/runs/1/results/0) [link2](sarif:/runs/1/results/0)"
383 "text": "test message 1-2 [link](sarif:/runs/1/results/0)"
398 "text": "test message 2-1 [link](sarif:/runs/0/results/0)"
403 "text": "test message 2-2 [link](sarif:/runs/0/results/0)"
422 "text": "test message 3-1 [link](sarif:/runs/1/results/0) [link2](sarif:/runs/1/results/0)"
427 "text": "test message 3-2 [link](sarif:/runs/1/results/0)"
442 "text": "test message 4-1 [link](sarif:/runs/0/results/0)"
447 "text": "test message 4-2 [link](sarif:/runs/0/results/0)"
466 "text": "test message 5-1 [link](sarif:/runs/1/results/0) [link2](sarif:/runs/1/results/0)"
471 "text": "test message 5-2 [link](sarif:/runs/1/results/0)"
486 "text": "test message 6-1 [link](sarif:/runs/0/results/0)"
491 "text": "test message 6-2 [link](sarif:/runs/0/results/0)"
503 contents
= [sarif1
, sarif2
, sarif3
]
505 with libear
.TemporaryDirectory() as tmpdir
:
506 for idx
, content
in enumerate(contents
):
507 file_name
= os
.path
.join(tmpdir
, "results-{}.sarif".format(idx
))
508 with
open(file_name
, "w") as handle
:
509 json
.dump(content
, handle
)
511 sut
.merge_sarif_files(tmpdir
, sort_files
=True)
513 self
.assertIn("results-merged.sarif", os
.listdir(tmpdir
))
514 with
open(os
.path
.join(tmpdir
, "results-merged.sarif")) as f
:
515 merged
= json
.load(f
)
516 self
.assertEqual(len(merged
["runs"]), 6)
519 merged
["runs"][x
]["results"][0]["codeFlows"][0]["message"]["text"]
523 merged
["runs"][x
]["results"][0]["codeFlows"][0]["threadFlows"][0][
529 # The run index should be updated for the second and third sets of runs
533 "test message 1-1 [link](sarif:/runs/1/results/0) [link2](sarif:/runs/1/results/0)",
534 "test message 2-1 [link](sarif:/runs/0/results/0)",
535 "test message 3-1 [link](sarif:/runs/3/results/0) [link2](sarif:/runs/3/results/0)",
536 "test message 4-1 [link](sarif:/runs/2/results/0)",
537 "test message 5-1 [link](sarif:/runs/5/results/0) [link2](sarif:/runs/5/results/0)",
538 "test message 6-1 [link](sarif:/runs/4/results/0)",
544 "test message 1-2 [link](sarif:/runs/1/results/0)",
545 "test message 2-2 [link](sarif:/runs/0/results/0)",
546 "test message 3-2 [link](sarif:/runs/3/results/0)",
547 "test message 4-2 [link](sarif:/runs/2/results/0)",
548 "test message 5-2 [link](sarif:/runs/5/results/0)",
549 "test message 6-2 [link](sarif:/runs/4/results/0)",
553 def test_overflow_run_count(self
):
558 {"message": {"text": "run 1-0 [link](sarif:/runs/1/results/0)"}}
563 {"message": {"text": "run 1-1 [link](sarif:/runs/2/results/0)"}}
568 {"message": {"text": "run 1-2 [link](sarif:/runs/3/results/0)"}}
573 {"message": {"text": "run 1-3 [link](sarif:/runs/4/results/0)"}}
578 {"message": {"text": "run 1-4 [link](sarif:/runs/5/results/0)"}}
583 {"message": {"text": "run 1-5 [link](sarif:/runs/6/results/0)"}}
588 {"message": {"text": "run 1-6 [link](sarif:/runs/7/results/0)"}}
593 {"message": {"text": "run 1-7 [link](sarif:/runs/8/results/0)"}}
598 {"message": {"text": "run 1-8 [link](sarif:/runs/9/results/0)"}}
603 {"message": {"text": "run 1-9 [link](sarif:/runs/0/results/0)"}}
614 "text": "run 2-0 [link](sarif:/runs/1/results/0) [link2](sarif:/runs/2/results/0)"
621 {"message": {"text": "run 2-1 [link](sarif:/runs/2/results/0)"}}
626 {"message": {"text": "run 2-2 [link](sarif:/runs/3/results/0)"}}
631 {"message": {"text": "run 2-3 [link](sarif:/runs/4/results/0)"}}
636 {"message": {"text": "run 2-4 [link](sarif:/runs/5/results/0)"}}
641 {"message": {"text": "run 2-5 [link](sarif:/runs/6/results/0)"}}
646 {"message": {"text": "run 2-6 [link](sarif:/runs/7/results/0)"}}
651 {"message": {"text": "run 2-7 [link](sarif:/runs/8/results/0)"}}
656 {"message": {"text": "run 2-8 [link](sarif:/runs/9/results/0)"}}
661 {"message": {"text": "run 2-9 [link](sarif:/runs/0/results/0)"}}
667 contents
= [sarif1
, sarif2
]
668 with libear
.TemporaryDirectory() as tmpdir
:
669 for idx
, content
in enumerate(contents
):
670 file_name
= os
.path
.join(tmpdir
, "results-{}.sarif".format(idx
))
671 with
open(file_name
, "w") as handle
:
672 json
.dump(content
, handle
)
674 sut
.merge_sarif_files(tmpdir
, sort_files
=True)
676 self
.assertIn("results-merged.sarif", os
.listdir(tmpdir
))
677 with
open(os
.path
.join(tmpdir
, "results-merged.sarif")) as f
:
678 merged
= json
.load(f
)
679 self
.assertEqual(len(merged
["runs"]), 20)
682 merged
["runs"][x
]["results"][0]["message"]["text"]
688 "run 1-0 [link](sarif:/runs/1/results/0)",
689 "run 1-1 [link](sarif:/runs/2/results/0)",
690 "run 1-2 [link](sarif:/runs/3/results/0)",
691 "run 1-3 [link](sarif:/runs/4/results/0)",
692 "run 1-4 [link](sarif:/runs/5/results/0)",
693 "run 1-5 [link](sarif:/runs/6/results/0)",
694 "run 1-6 [link](sarif:/runs/7/results/0)",
695 "run 1-7 [link](sarif:/runs/8/results/0)",
696 "run 1-8 [link](sarif:/runs/9/results/0)",
697 "run 1-9 [link](sarif:/runs/0/results/0)",
698 "run 2-0 [link](sarif:/runs/11/results/0) [link2](sarif:/runs/12/results/0)",
699 "run 2-1 [link](sarif:/runs/12/results/0)",
700 "run 2-2 [link](sarif:/runs/13/results/0)",
701 "run 2-3 [link](sarif:/runs/14/results/0)",
702 "run 2-4 [link](sarif:/runs/15/results/0)",
703 "run 2-5 [link](sarif:/runs/16/results/0)",
704 "run 2-6 [link](sarif:/runs/17/results/0)",
705 "run 2-7 [link](sarif:/runs/18/results/0)",
706 "run 2-8 [link](sarif:/runs/19/results/0)",
707 "run 2-9 [link](sarif:/runs/10/results/0)",