[docs] Add LICENSE.txt to the root of the mono-repo
[llvm-project.git] / clang / test / AST / gen_ast_dump_json_test.py
blobe6720485b3bf70ab3465670e0f9cbd63aa3b412a
1 #!/usr/bin/env python3
3 from __future__ import print_function
4 from collections import OrderedDict
5 from shutil import copyfile
6 import argparse
7 import json
8 import os
9 import re
10 import subprocess
11 import sys
12 import tempfile
15 def normalize(dict_var):
16 for k, v in dict_var.items():
17 if isinstance(v, OrderedDict):
18 normalize(v)
19 elif isinstance(v, list):
20 for e in v:
21 if isinstance(e, OrderedDict):
22 normalize(e)
23 elif type(v) is str:
24 if v != "0x0" and re.match(r"0x[0-9A-Fa-f]+", v):
25 dict_var[k] = '0x{{.*}}'
26 elif os.path.isfile(v):
27 dict_var[k] = '{{.*}}'
28 else:
29 splits = (v.split(' '))
30 out_splits = []
31 for split in splits:
32 inner_splits = split.rsplit(':',2)
33 if os.path.isfile(inner_splits[0]):
34 out_splits.append(
35 '{{.*}}:%s:%s'
36 %(inner_splits[1],
37 inner_splits[2]))
38 continue
39 out_splits.append(split)
41 dict_var[k] = ' '.join(out_splits)
44 def filter_json(dict_var, filters, out):
45 for k, v in dict_var.items():
46 if type(v) is str:
47 if v in filters:
48 out.append(dict_var)
49 break
50 elif isinstance(v, OrderedDict):
51 filter_json(v, filters, out)
52 elif isinstance(v, list):
53 for e in v:
54 if isinstance(e, OrderedDict):
55 filter_json(e, filters, out)
58 def default_clang_path():
59 guessed_clang = os.path.join(os.path.dirname(__file__), "clang")
60 if os.path.isfile(guessed_clang):
61 return guessed_clang
62 return None
65 def main():
66 parser = argparse.ArgumentParser()
67 parser.add_argument("--clang", help="The clang binary (could be a relative or absolute path)",
68 action="store", default=default_clang_path())
69 parser.add_argument("--source", help="the source file(s). Without --update, the command used to generate the JSON "
70 "will be of the format <clang> -cc1 -ast-dump=json <opts> <source>",
71 action="store", nargs=argparse.ONE_OR_MORE, required=True)
72 parser.add_argument("--filters", help="comma separated list of AST filters. Ex: --filters=TypedefDecl,BuiltinType",
73 action="store", default='')
74 update_or_generate_group = parser.add_mutually_exclusive_group()
75 update_or_generate_group.add_argument("--update", help="Update the file in-place", action="store_true")
76 update_or_generate_group.add_argument("--opts", help="other options",
77 action="store", default='', type=str)
78 parser.add_argument("--update-manual", help="When using --update, also update files that do not have the "
79 "autogenerated disclaimer", action="store_true")
80 args = parser.parse_args()
82 if not args.source:
83 sys.exit("Specify the source file to give to clang.")
85 clang_binary = os.path.abspath(args.clang)
86 if not os.path.isfile(clang_binary):
87 sys.exit("clang binary specified not present.")
89 for src in args.source:
90 process_file(src, clang_binary, cmdline_filters=args.filters,
91 cmdline_opts=args.opts, do_update=args.update,
92 force_update=args.update_manual)
95 def process_file(source_file, clang_binary, cmdline_filters, cmdline_opts,
96 do_update, force_update):
97 note_firstline = "// NOTE: CHECK lines have been autogenerated by " \
98 "gen_ast_dump_json_test.py"
99 filters_line_prefix = "// using --filters="
100 note = note_firstline
102 cmd = [clang_binary, "-cc1"]
103 if do_update:
104 # When updating the first line of the test must be a RUN: line
105 with open(source_file, "r") as srcf:
106 first_line = srcf.readline()
107 found_autogenerated_line = False
108 filters_line = None
109 for i, line in enumerate(srcf.readlines()):
110 if found_autogenerated_line:
111 # print("Filters line: '", line.rstrip(), "'", sep="")
112 if line.startswith(filters_line_prefix):
113 filters_line = line[len(filters_line_prefix):].rstrip()
114 break
115 if line.startswith(note_firstline):
116 found_autogenerated_line = True
117 # print("Found autogenerated disclaimer at line", i + 1)
118 if not found_autogenerated_line and not force_update:
119 print("Not updating", source_file, "since it is not autogenerated.",
120 file=sys.stderr)
121 return
122 if not cmdline_filters and filters_line:
123 cmdline_filters = filters_line
124 print("Inferred filters as '" + cmdline_filters + "'")
126 if "RUN: %clang_cc1 " not in first_line:
127 sys.exit("When using --update the first line of the input file must contain RUN: %clang_cc1")
128 clang_start = first_line.find("%clang_cc1") + len("%clang_cc1")
129 file_check_idx = first_line.rfind("| FileCheck")
130 if file_check_idx:
131 dump_cmd = first_line[clang_start:file_check_idx]
132 else:
133 dump_cmd = first_line[clang_start:]
134 print("Inferred run arguments as '", dump_cmd, "'", sep="")
135 options = dump_cmd.split()
136 if "-ast-dump=json" not in options:
137 sys.exit("ERROR: RUN: line does not contain -ast-dump=json")
138 if "%s" not in options:
139 sys.exit("ERROR: RUN: line does not contain %s")
140 options.remove("%s")
141 else:
142 options = cmdline_opts.split()
143 options.append("-ast-dump=json")
144 cmd.extend(options)
145 using_ast_dump_filter = any('ast-dump-filter' in arg for arg in cmd)
146 cmd.append(source_file)
147 print("Will run", cmd)
148 filters = set()
149 if cmdline_filters:
150 note += "\n" + filters_line_prefix + cmdline_filters
151 filters = set(cmdline_filters.split(','))
152 print("Will use the following filters:", filters)
154 try:
155 json_str = subprocess.check_output(cmd).decode()
156 except Exception as ex:
157 print("The clang command failed with %s" % ex)
158 return -1
160 out_asts = []
161 if using_ast_dump_filter:
162 # If we're using a filter, then we might have multiple JSON objects
163 # in the output. To parse each out, we use a manual JSONDecoder in
164 # "raw" mode and update our location in the string based on where the
165 # last document ended.
166 decoder = json.JSONDecoder(object_hook=OrderedDict)
167 doc_start = 0
168 prev_end = 0
169 while True:
170 try:
171 prev_end = doc_start
172 (j, doc_start) = decoder.raw_decode(json_str[doc_start:])
173 doc_start += prev_end + 1
174 normalize(j)
175 out_asts.append(j)
176 except:
177 break
178 else:
179 j = json.loads(json_str, object_pairs_hook=OrderedDict)
180 normalize(j)
182 if len(filters) == 0:
183 out_asts.append(j)
184 else:
185 filter_json(j, filters, out_asts)
187 with tempfile.NamedTemporaryFile("w", delete=False) as f:
188 with open(source_file, "r") as srcf:
189 for line in srcf.readlines():
190 # copy up to the note:
191 if line.rstrip() == note_firstline:
192 break
193 f.write(line)
194 f.write(note + "\n")
195 for out_ast in out_asts:
196 append_str = json.dumps(out_ast, indent=1, ensure_ascii=False)
197 out_str = '\n\n'
198 out_str += "// CHECK-NOT: {{^}}Dumping\n"
199 index = 0
200 for append_line in append_str.splitlines()[2:]:
201 if index == 0:
202 out_str += '// CHECK: %s\n' %(append_line.rstrip())
203 index += 1
204 else:
205 out_str += '// CHECK-NEXT: %s\n' %(append_line.rstrip())
207 f.write(out_str)
208 f.flush()
209 f.close()
210 if do_update:
211 print("Updating json appended source file to %s." % source_file)
212 copyfile(f.name, source_file)
213 else:
214 partition = source_file.rpartition('.')
215 dest_path = '%s-json%s%s' % (partition[0], partition[1], partition[2])
216 print("Writing json appended source file to %s." % dest_path)
217 copyfile(f.name, dest_path)
218 os.remove(f.name)
219 return 0
222 if __name__ == '__main__':
223 main()