Add test and improved docs for custom checkSequence
[gromacs/AngularHB.git] / docs / doxygen / check-source.py
blob1beba98132ed05599649c6d3e51d716406fe55a4
1 #!/usr/bin/python
3 # This file is part of the GROMACS molecular simulation package.
5 # Copyright (c) 2014,2015, by the GROMACS development team, led by
6 # Mark Abraham, David van der Spoel, Berk Hess, and Erik Lindahl,
7 # and including many others, as listed in the AUTHORS file in the
8 # top-level source directory and at http://www.gromacs.org.
10 # GROMACS is free software; you can redistribute it and/or
11 # modify it under the terms of the GNU Lesser General Public License
12 # as published by the Free Software Foundation; either version 2.1
13 # of the License, or (at your option) any later version.
15 # GROMACS is distributed in the hope that it will be useful,
16 # but WITHOUT ANY WARRANTY; without even the implied warranty of
17 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
18 # Lesser General Public License for more details.
20 # You should have received a copy of the GNU Lesser General Public
21 # License along with GROMACS; if not, see
22 # http://www.gnu.org/licenses, or write to the Free Software Foundation,
23 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
25 # If you want to redistribute modifications to GROMACS, please
26 # consider that scientific software is very special. Version
27 # control is crucial - bugs must be traceable. We will be happy to
28 # consider code for inclusion in the official distribution, but
29 # derived work must not be called official GROMACS. Details are found
30 # in the README & COPYING files - if they are missing, get the
31 # official version at http://www.gromacs.org.
33 # To help us fund GROMACS development, we humbly ask that you cite
34 # the research papers on the package. Check out http://www.gromacs.org.
36 """Check source code and Doxygen documentation for issues
38 This script checks for some issues in the Doxygen documentation, as well as
39 general issues in the source code, mainly using Doxygen XML output and #include
40 dependencies parsed from source files. Part of the checks are generic, like
41 checking that all documented entities have brief descriptions. Other are
42 specific to GROMACS, like checking that only installed headers contribute to
43 the public API documentation.
45 The checks should be self-evident from the source code of the script
46 (they are also described in docs/dev-manual/gmxtree.rst).
47 All the logic of parsing the Doxygen XML output and creating a GROMACS-specific
48 representation of the source tree is separated into separate Python modules
49 (doxygenxml.py and gmxtree.py, respectively). Similarly, logic for handling
50 the output messages is in reporter.py. This leaves only the actual checks and
51 the script command-line interface in this file.
53 The script can be run using the 'check-source' target generated by CMake.
54 This target takes care of generating all the necessary input files and passing
55 them to the script.
56 """
58 import sys
59 from optparse import OptionParser
61 import gmxtree
62 from gmxtree import GromacsTree, DocType
63 from includesorter import IncludeSorter
64 from reporter import Reporter
66 def check_file(fileobj, tree, reporter):
67 """Check file-level issues."""
68 if not fileobj.is_external() and fileobj.get_relpath().startswith('src/'):
69 includes = fileobj.get_includes()
70 if fileobj.is_source_file():
71 if includes:
72 firstinclude = includes[0].get_file()
73 if not firstinclude or firstinclude.get_name() != "gmxpre.h":
74 reporter.code_issue(includes[0],
75 "does not include \"gmxpre.h\" first")
76 else:
77 reporter.code_issue(fileobj, "does not include \"gmxpre.h\"")
78 used_define_files = fileobj.get_used_define_files()
79 for define_file in tree.get_checked_define_files():
80 includes_file = False
81 for include in includes:
82 if include.get_file() == define_file:
83 includes_file = True
84 break
85 if includes_file:
86 if not define_file in used_define_files:
87 reporter.code_issue(fileobj,
88 "includes \"{0}\" unnecessarily".format(define_file.get_name()))
89 else:
90 if define_file in used_define_files:
91 used_defines = list(fileobj.get_used_defines(define_file))
92 if len(used_defines) > 3:
93 used_defines = used_defines[:3] + ['...']
94 used_defines = ', '.join(used_defines)
95 reporter.code_issue(fileobj,
96 "should include \"{0}\"".format(define_file.get_name()),
97 details=["uses " + used_defines])
99 if not fileobj.is_documented():
100 # TODO: Add rules for required documentation
101 return
103 if fileobj.is_source_file():
104 # TODO: Add rule to exclude examples from this check
105 if fileobj.is_installed():
106 reporter.file_error(fileobj, "source file is installed")
107 if fileobj.get_doc_type() != DocType.internal:
108 reporter.file_error(fileobj,
109 "source file documentation appears outside full documentation")
110 elif fileobj.get_api_type() != DocType.internal:
111 reporter.file_error(fileobj, "source file marked as non-internal")
112 elif fileobj.is_test_file() and fileobj.is_installed():
113 reporter.file_error(fileobj, "test file is installed")
114 elif fileobj.is_installed():
115 if fileobj.get_doc_type() != DocType.public:
116 reporter.file_error(fileobj,
117 "public header has non-public documentation")
118 elif fileobj.get_doc_type() == DocType.public:
119 reporter.file_error(fileobj,
120 "non-installed header has public documentation")
121 elif fileobj.get_api_type() == DocType.public:
122 reporter.file_error(fileobj,
123 "non-installed header specified as part of public API")
124 elif fileobj.get_doc_type() < fileobj.get_api_type():
125 reporter.file_error(fileobj,
126 "API type ({0}) conflicts with documentation visibility ({1})"
127 .format(fileobj.get_api_type(), fileobj.get_doc_type()))
129 if not fileobj.has_brief_description():
130 reporter.file_error(fileobj,
131 "is documented, but does not have brief description")
133 expectedmod = fileobj.get_expected_module()
134 if expectedmod:
135 docmodules = fileobj.get_doc_modules()
136 if docmodules:
137 for module in docmodules:
138 if module != expectedmod:
139 reporter.file_error(fileobj,
140 "is documented in incorrect module: {0}"
141 .format(module.get_name()))
142 elif expectedmod.is_documented():
143 reporter.file_error(fileobj,
144 "is not documented in any module, but {0} exists"
145 .format(expectedmod.get_name()))
147 def check_include(fileobj, includedfile, reporter):
148 """Check an #include directive."""
149 otherfile = includedfile.get_file()
150 if includedfile.is_system():
151 if not otherfile:
152 return
153 reporter.code_issue(includedfile,
154 "includes local file as {0}".format(includedfile))
155 if not otherfile:
156 reporter.code_issue(includedfile,
157 "includes non-local file as {0}".format(includedfile))
158 if not otherfile:
159 return
160 if fileobj.is_installed() and not otherfile.is_installed():
161 reporter.code_issue(includedfile,
162 "installed header includes non-installed {0}"
163 .format(includedfile))
164 filemodule = fileobj.get_module()
165 othermodule = otherfile.get_module()
166 if fileobj.is_documented() and otherfile.is_documented():
167 filetype = fileobj.get_doc_type()
168 othertype = otherfile.get_doc_type()
169 if filetype > othertype:
170 reporter.code_issue(includedfile,
171 "{0} file includes {1} file {2}"
172 .format(filetype, othertype, includedfile))
173 check_api = (otherfile.api_type_is_reliable() and filemodule != othermodule)
174 if check_api and otherfile.get_api_type() < DocType.library:
175 reporter.code_issue(includedfile,
176 "included file {0} is not documented as exposed outside its module"
177 .format(includedfile))
179 def check_entity(entity, reporter):
180 """Check documentation for a code construct."""
181 if entity.is_documented():
182 if not entity.has_brief_description():
183 reporter.doc_error(entity,
184 "is documented, but does not have brief description")
186 def check_class(classobj, reporter):
187 """Check documentation for a class/struct/union."""
188 check_entity(classobj, reporter)
189 if classobj.is_documented():
190 classtype = classobj.get_doc_type()
191 filetype = classobj.get_file_doc_type()
192 if classtype == DocType.public and not classobj.is_in_installed_file():
193 reporter.doc_error(classobj,
194 "has public documentation, but is not in installed header")
195 elif filetype is not DocType.none and classtype > filetype:
196 reporter.doc_error(classobj,
197 "is in {0} file(s), but appears in {1} documentation"
198 .format(filetype, classtype))
200 def check_member(member, reporter, check_ignored):
201 """Check documentation for a generic member."""
202 check_entity(member, reporter)
203 if member.is_documented():
204 if check_ignored and not member.is_visible():
205 reporter.doc_note(member,
206 "is documented, but is ignored by Doxygen, because its scope is not documented")
207 if member.has_inbody_description():
208 reporter.doc_note(member, "has in-body comments, which are ignored")
210 def check_cycles(graph, reporter):
211 """Check cyclic dependencies in a dependency graph.
213 The graph parameter provides the graph to check. It should be an object
214 that has three methods:
215 iternodes():
216 Return the list of nodes in the graph.
217 iteredges(node):
218 Return the list of edges from a given node.
219 The list should contain (node, edge) pairs, where node is an object
220 returned by iternodes() and edge is any object.
221 report_cycle(cycle, reporter):
222 Process a found cycle. cycle contains a list of (node, edge) pairs
223 that describe the cycle. edge is the edge object that leads _to_
224 the node in the cycle.
226 This is implemented using an extended DFS-based strongly connected
227 component (SCC) search, written using a stack instead of recursion.
228 The base algorithm is Tarjan's SCC search:
229 http://en.wikipedia.org/wiki/Tarjan's_strongly_connected_components_algorithm
231 Each back edge that is encountered during the search is reported as a
232 cycle. Additionally, if a cross edge is encountered that is within the
233 current SCC, the target node and all its children in the current SCC will
234 be visited again to find all cycles. All steps except cycle detection are
235 omitted for such re-traversal.
237 To avoid duplicates from cycles that do not include all nodes in an SCC,
238 a cycle is only reported if the target of the back edge is still active
239 in the search, i.e., all edges from it have not yet been traversed.
241 # The DFS stack; next node is always popped from the end.
242 # Stores (node, edge) pairs.
243 # edge is None for start nodes and for post-order processing.
244 dfsstack = []
245 for node in graph.iternodes():
246 dfsstack.append((node, None))
247 # Stack of visited nodes that have not yet been assigned to a strongly
248 # connected component.
249 visitstack = []
250 # List of nodes in the DFS recursion stack.
251 currlist = []
252 # Set of nodes in currlist for more efficient searching.
253 currset = set()
254 # Counter for initializing preorder.
255 visit_count = 0
256 # DFS pre-order for nodes: initialized when a node is first encountered
257 # in the search.
258 preorder = dict()
259 # Lowest pre-order index reachable from this node.
260 # Initialized to pre-order, and updated during post-order processing.
261 linkorder = dict()
262 # Set to True for a node when first encountered, and set to False when
263 # a strongly connected component has been processed.
264 in_progress = dict()
265 # The DFS search
266 while dfsstack:
267 currnode, curredge = dfsstack.pop()
268 # curredge is None if this is a start node or post-order traversal.
269 # currlist is empty if this is a start node.
270 if curredge is None and currlist:
271 # All children visited: post-order processing.
272 done = currlist.pop()[0]
273 assert done == currnode
274 currset.remove(currnode)
275 # If this is the first time this node is encountered, fill
276 # linkorder and check for strongly connected components.
277 if linkorder[currnode] == preorder[currnode]:
278 children = [x for x, dummy in graph.iteredges(currnode) if in_progress[x]]
279 if children:
280 linkorder[currnode] = min([linkorder[x] for x in children])
281 if preorder[currnode] <= linkorder[currnode]:
282 # This is a root of a strongly connected component.
283 while visitstack:
284 node = visitstack.pop()
285 in_progress[node] = False
286 if node == currnode:
287 break
288 else:
289 assert False
290 continue
291 if currnode not in preorder:
292 # First encounter of this node: pre-order processing.
293 preorder[currnode] = visit_count
294 linkorder[currnode] = visit_count
295 visitstack.append(currnode)
296 visit_count += 1
297 in_progress[currnode] = True
298 elif not in_progress[currnode]:
299 # Do not enter processed components again.
300 continue
301 currlist.append((currnode, curredge))
302 currset.add(currnode)
303 # add entry for post-order traversal
304 dfsstack.append((currnode, None))
305 for nextnode, edge in graph.iteredges(currnode):
306 if nextnode not in preorder:
307 # Not seen previously: push
308 dfsstack.append((nextnode, edge))
309 else:
310 # If an already visited node is in the same component, it is
311 # either part of a cycle, or we need to traverse it again to
312 # find all cycles.
313 if in_progress[nextnode]:
314 if nextnode not in currset:
315 dfsstack.append((nextnode, edge))
316 # Only report cycles to nodes that haven't been processed
317 # yet to avoid duplicates.
318 elif linkorder[nextnode] == preorder[nextnode]:
319 for index in xrange(len(currlist)):
320 if currlist[index][0] == nextnode:
321 cycle = [(nextnode, edge)]
322 cycle.extend(currlist[index+1:])
323 graph.report_cycle(cycle, reporter)
324 break
325 else:
326 assert False
328 class ModuleDependencyGraph(object):
330 """Module dependency graph representation for check_cycles().
332 In the reported graph, the nodes are gmxtree.Module objects and the edges
333 are gmxtree.ModuleDependency objects.
336 def __init__(self, tree):
337 self._tree = tree
339 def iternodes(self):
340 return self._tree.get_modules()
342 def iteredges(self, module):
343 for dependency in module.get_dependencies():
344 if not dependency.is_test_only_dependency():
345 yield (dependency.get_other_module(), dependency)
347 def report_cycle(self, cycle, reporter):
348 if any([x[1].is_cycle_suppressed() for x in cycle]):
349 # TODO: Report unused suppressions.
350 return
351 modulelist = ' -> '.join([x[0].get_name()[7:] for x in cycle])
352 summary = 'module-level cyclic dependency: ' + modulelist
353 reporter.cyclic_issue(summary)
355 def check_all(tree, reporter, check_ignored):
356 """Do all checks for the GROMACS tree."""
357 includesorter = IncludeSorter()
358 for fileobj in tree.get_files():
359 if isinstance(fileobj, gmxtree.GeneratorSourceFile):
360 continue
361 check_file(fileobj, tree, reporter)
362 for includedfile in fileobj.get_includes():
363 check_include(fileobj, includedfile, reporter)
364 if fileobj.should_includes_be_sorted():
365 is_sorted, details = includesorter.check_sorted(fileobj)
366 if not is_sorted:
367 details.append("You can use includesorter.py to do the sorting automatically; see docs/dev-manual/gmxtree.rst")
368 reporter.code_issue(fileobj,
369 "include style/order is not consistent; see docs/dev-manual/includestyle.rst", details)
371 for classobj in tree.get_classes():
372 check_class(classobj, reporter)
374 for memberobj in tree.get_members():
375 check_member(memberobj, reporter, check_ignored)
377 check_cycles(ModuleDependencyGraph(tree), reporter)
378 tree.report_unused_cycle_suppressions(reporter)
380 def main():
381 """Run the checking script."""
382 parser = OptionParser()
383 parser.add_option('-S', '--source-root',
384 help='Source tree root directory')
385 parser.add_option('-B', '--build-root',
386 help='Build tree root directory')
387 parser.add_option('-l', '--log',
388 help='Write issues into a given log file in addition to stderr')
389 parser.add_option('--ignore',
390 help='Set file with patterns for messages to ignore')
391 parser.add_option('--ignore-cycles',
392 help='Set file with module dependencies to ignore in cycles')
393 parser.add_option('--check-ignored', action='store_true',
394 help='Issue notes for comments ignored by Doxygen')
395 parser.add_option('-q', '--quiet', action='store_true',
396 help='Do not write status messages')
397 parser.add_option('--exitcode', action='store_true',
398 help='Return non-zero exit code if there are warnings')
399 options, args = parser.parse_args()
401 reporter = Reporter(options.log)
402 if options.ignore:
403 reporter.load_filters(options.ignore)
405 if not options.quiet:
406 sys.stderr.write('Scanning source tree...\n')
407 tree = GromacsTree(options.source_root, options.build_root, reporter)
408 tree.load_git_attributes()
409 tree.load_installed_file_list()
410 if not options.quiet:
411 sys.stderr.write('Reading source files...\n')
412 # TODO: The checking should be possible without storing everything in memory
413 tree.scan_files(keep_contents=True)
414 if not options.quiet:
415 sys.stderr.write('Finding config.h and other preprocessor macro uses...\n')
416 tree.find_define_file_uses()
417 if options.ignore_cycles:
418 tree.load_cycle_suppression_list(options.ignore_cycles)
419 if not options.quiet:
420 sys.stderr.write('Reading Doxygen XML files...\n')
421 tree.load_xml()
423 reporter.write_pending()
425 if not options.quiet:
426 sys.stderr.write('Checking...\n')
428 check_all(tree, reporter, options.check_ignored)
430 reporter.write_pending()
431 reporter.report_unused_filters()
432 reporter.close_log()
434 if options.exitcode and reporter.had_warnings():
435 sys.exit(1)
437 main()