Roll src/third_party/WebKit 2ea3d3a:45ff499 (svn 191101:191104)
[chromium-blink-merge.git] / PRESUBMIT.py
bloba57a2c01b4401b1bacb8dad25121ac874ed74bf1
1 # Copyright (c) 2012 The Chromium Authors. All rights reserved.
2 # Use of this source code is governed by a BSD-style license that can be
3 # found in the LICENSE file.
5 """Top-level presubmit script for Chromium.
7 See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts
8 for more details about the presubmit API built into depot_tools.
9 """
12 _EXCLUDED_PATHS = (
13 r"^breakpad[\\\/].*",
14 r"^native_client_sdk[\\\/]src[\\\/]build_tools[\\\/]make_rules.py",
15 r"^native_client_sdk[\\\/]src[\\\/]build_tools[\\\/]make_simple.py",
16 r"^native_client_sdk[\\\/]src[\\\/]tools[\\\/].*.mk",
17 r"^net[\\\/]tools[\\\/]spdyshark[\\\/].*",
18 r"^skia[\\\/].*",
19 r"^v8[\\\/].*",
20 r".*MakeFile$",
21 r".+_autogen\.h$",
22 r".+[\\\/]pnacl_shim\.c$",
23 r"^gpu[\\\/]config[\\\/].*_list_json\.cc$",
24 r"^chrome[\\\/]browser[\\\/]resources[\\\/]pdf[\\\/]index.js"
27 # The NetscapePlugIn library is excluded from pan-project as it will soon
28 # be deleted together with the rest of the NPAPI and it's not worthwhile to
29 # update the coding style until then.
30 _TESTRUNNER_PATHS = (
31 r"^content[\\\/]shell[\\\/]tools[\\\/]plugin[\\\/].*",
34 # Fragment of a regular expression that matches C++ and Objective-C++
35 # implementation files.
36 _IMPLEMENTATION_EXTENSIONS = r'\.(cc|cpp|cxx|mm)$'
38 # Regular expression that matches code only used for test binaries
39 # (best effort).
40 _TEST_CODE_EXCLUDED_PATHS = (
41 r'.*[\\\/](fake_|test_|mock_).+%s' % _IMPLEMENTATION_EXTENSIONS,
42 r'.+_test_(base|support|util)%s' % _IMPLEMENTATION_EXTENSIONS,
43 r'.+_(api|browser|kif|perf|pixel|unit|ui)?test(_[a-z]+)?%s' %
44 _IMPLEMENTATION_EXTENSIONS,
45 r'.+profile_sync_service_harness%s' % _IMPLEMENTATION_EXTENSIONS,
46 r'.*[\\\/](test|tool(s)?)[\\\/].*',
47 # content_shell is used for running layout tests.
48 r'content[\\\/]shell[\\\/].*',
49 # At request of folks maintaining this folder.
50 r'chrome[\\\/]browser[\\\/]automation[\\\/].*',
51 # Non-production example code.
52 r'mojo[\\\/]examples[\\\/].*',
53 # Launcher for running iOS tests on the simulator.
54 r'testing[\\\/]iossim[\\\/]iossim\.mm$',
57 _TEST_ONLY_WARNING = (
58 'You might be calling functions intended only for testing from\n'
59 'production code. It is OK to ignore this warning if you know what\n'
60 'you are doing, as the heuristics used to detect the situation are\n'
61 'not perfect. The commit queue will not block on this warning.')
64 _INCLUDE_ORDER_WARNING = (
65 'Your #include order seems to be broken. Send mail to\n'
66 'marja@chromium.org if this is not the case.')
69 _BANNED_OBJC_FUNCTIONS = (
71 'addTrackingRect:',
73 'The use of -[NSView addTrackingRect:owner:userData:assumeInside:] is'
74 'prohibited. Please use CrTrackingArea instead.',
75 'http://dev.chromium.org/developers/coding-style/cocoa-dos-and-donts',
77 False,
80 r'/NSTrackingArea\W',
82 'The use of NSTrackingAreas is prohibited. Please use CrTrackingArea',
83 'instead.',
84 'http://dev.chromium.org/developers/coding-style/cocoa-dos-and-donts',
86 False,
89 'convertPointFromBase:',
91 'The use of -[NSView convertPointFromBase:] is almost certainly wrong.',
92 'Please use |convertPoint:(point) fromView:nil| instead.',
93 'http://dev.chromium.org/developers/coding-style/cocoa-dos-and-donts',
95 True,
98 'convertPointToBase:',
100 'The use of -[NSView convertPointToBase:] is almost certainly wrong.',
101 'Please use |convertPoint:(point) toView:nil| instead.',
102 'http://dev.chromium.org/developers/coding-style/cocoa-dos-and-donts',
104 True,
107 'convertRectFromBase:',
109 'The use of -[NSView convertRectFromBase:] is almost certainly wrong.',
110 'Please use |convertRect:(point) fromView:nil| instead.',
111 'http://dev.chromium.org/developers/coding-style/cocoa-dos-and-donts',
113 True,
116 'convertRectToBase:',
118 'The use of -[NSView convertRectToBase:] is almost certainly wrong.',
119 'Please use |convertRect:(point) toView:nil| instead.',
120 'http://dev.chromium.org/developers/coding-style/cocoa-dos-and-donts',
122 True,
125 'convertSizeFromBase:',
127 'The use of -[NSView convertSizeFromBase:] is almost certainly wrong.',
128 'Please use |convertSize:(point) fromView:nil| instead.',
129 'http://dev.chromium.org/developers/coding-style/cocoa-dos-and-donts',
131 True,
134 'convertSizeToBase:',
136 'The use of -[NSView convertSizeToBase:] is almost certainly wrong.',
137 'Please use |convertSize:(point) toView:nil| instead.',
138 'http://dev.chromium.org/developers/coding-style/cocoa-dos-and-donts',
140 True,
145 _BANNED_CPP_FUNCTIONS = (
146 # Make sure that gtest's FRIEND_TEST() macro is not used; the
147 # FRIEND_TEST_ALL_PREFIXES() macro from base/gtest_prod_util.h should be
148 # used instead since that allows for FLAKY_ and DISABLED_ prefixes.
150 'FRIEND_TEST(',
152 'Chromium code should not use gtest\'s FRIEND_TEST() macro. Include',
153 'base/gtest_prod_util.h and use FRIEND_TEST_ALL_PREFIXES() instead.',
155 False,
159 'ScopedAllowIO',
161 'New code should not use ScopedAllowIO. Post a task to the blocking',
162 'pool or the FILE thread instead.',
164 True,
166 r"^base[\\\/]process[\\\/]process_metrics_linux\.cc$",
167 r"^chrome[\\\/]browser[\\\/]chromeos[\\\/]boot_times_recorder\.cc$",
168 r"^chrome[\\\/]browser[\\\/]chromeos[\\\/]"
169 "customization_document_browsertest\.cc$",
170 r"^components[\\\/]crash[\\\/]app[\\\/]breakpad_mac\.mm$",
171 r"^content[\\\/]shell[\\\/]browser[\\\/]shell_browser_main\.cc$",
172 r"^content[\\\/]shell[\\\/]browser[\\\/]shell_message_filter\.cc$",
173 r"^mojo[\\\/]edk[\\\/]embedder[\\\/]" +
174 r"simple_platform_shared_buffer_posix\.cc$",
175 r"^net[\\\/]disk_cache[\\\/]cache_util\.cc$",
176 r"^net[\\\/]url_request[\\\/]test_url_fetcher_factory\.cc$",
177 r"^ui[\\\/]ozone[\\\/]platform[\\\/]dri[\\\/]native_display_delegate_proxy\.cc$",
181 'SkRefPtr',
183 'The use of SkRefPtr is prohibited. ',
184 'Please use skia::RefPtr instead.'
186 True,
190 'SkAutoRef',
192 'The indirect use of SkRefPtr via SkAutoRef is prohibited. ',
193 'Please use skia::RefPtr instead.'
195 True,
199 'SkAutoTUnref',
201 'The use of SkAutoTUnref is dangerous because it implicitly ',
202 'converts to a raw pointer. Please use skia::RefPtr instead.'
204 True,
208 'SkAutoUnref',
210 'The indirect use of SkAutoTUnref through SkAutoUnref is dangerous ',
211 'because it implicitly converts to a raw pointer. ',
212 'Please use skia::RefPtr instead.'
214 True,
218 r'/HANDLE_EINTR\(.*close',
220 'HANDLE_EINTR(close) is invalid. If close fails with EINTR, the file',
221 'descriptor will be closed, and it is incorrect to retry the close.',
222 'Either call close directly and ignore its return value, or wrap close',
223 'in IGNORE_EINTR to use its return value. See http://crbug.com/269623'
225 True,
229 r'/IGNORE_EINTR\((?!.*close)',
231 'IGNORE_EINTR is only valid when wrapping close. To wrap other system',
232 'calls, use HANDLE_EINTR. See http://crbug.com/269623',
234 True,
236 # Files that #define IGNORE_EINTR.
237 r'^base[\\\/]posix[\\\/]eintr_wrapper\.h$',
238 r'^ppapi[\\\/]tests[\\\/]test_broker\.cc$',
242 r'/v8::Extension\(',
244 'Do not introduce new v8::Extensions into the code base, use',
245 'gin::Wrappable instead. See http://crbug.com/334679',
247 True,
249 r'extensions[\\\/]renderer[\\\/]safe_builtins\.*',
254 _IPC_ENUM_TRAITS_DEPRECATED = (
255 'You are using IPC_ENUM_TRAITS() in your code. It has been deprecated.\n'
256 'See http://www.chromium.org/Home/chromium-security/education/security-tips-for-ipc')
259 _VALID_OS_MACROS = (
260 # Please keep sorted.
261 'OS_ANDROID',
262 'OS_ANDROID_HOST',
263 'OS_BSD',
264 'OS_CAT', # For testing.
265 'OS_CHROMEOS',
266 'OS_FREEBSD',
267 'OS_IOS',
268 'OS_LINUX',
269 'OS_MACOSX',
270 'OS_NACL',
271 'OS_NACL_NONSFI',
272 'OS_NACL_SFI',
273 'OS_OPENBSD',
274 'OS_POSIX',
275 'OS_QNX',
276 'OS_SOLARIS',
277 'OS_WIN',
281 def _CheckNoProductionCodeUsingTestOnlyFunctions(input_api, output_api):
282 """Attempts to prevent use of functions intended only for testing in
283 non-testing code. For now this is just a best-effort implementation
284 that ignores header files and may have some false positives. A
285 better implementation would probably need a proper C++ parser.
287 # We only scan .cc files and the like, as the declaration of
288 # for-testing functions in header files are hard to distinguish from
289 # calls to such functions without a proper C++ parser.
290 file_inclusion_pattern = r'.+%s' % _IMPLEMENTATION_EXTENSIONS
292 base_function_pattern = r'[ :]test::[^\s]+|ForTest(ing)?|for_test(ing)?'
293 inclusion_pattern = input_api.re.compile(r'(%s)\s*\(' % base_function_pattern)
294 comment_pattern = input_api.re.compile(r'//.*(%s)' % base_function_pattern)
295 exclusion_pattern = input_api.re.compile(
296 r'::[A-Za-z0-9_]+(%s)|(%s)[^;]+\{' % (
297 base_function_pattern, base_function_pattern))
299 def FilterFile(affected_file):
300 black_list = (_EXCLUDED_PATHS +
301 _TEST_CODE_EXCLUDED_PATHS +
302 input_api.DEFAULT_BLACK_LIST)
303 return input_api.FilterSourceFile(
304 affected_file,
305 white_list=(file_inclusion_pattern, ),
306 black_list=black_list)
308 problems = []
309 for f in input_api.AffectedSourceFiles(FilterFile):
310 local_path = f.LocalPath()
311 for line_number, line in f.ChangedContents():
312 if (inclusion_pattern.search(line) and
313 not comment_pattern.search(line) and
314 not exclusion_pattern.search(line)):
315 problems.append(
316 '%s:%d\n %s' % (local_path, line_number, line.strip()))
318 if problems:
319 return [output_api.PresubmitPromptOrNotify(_TEST_ONLY_WARNING, problems)]
320 else:
321 return []
324 def _CheckNoIOStreamInHeaders(input_api, output_api):
325 """Checks to make sure no .h files include <iostream>."""
326 files = []
327 pattern = input_api.re.compile(r'^#include\s*<iostream>',
328 input_api.re.MULTILINE)
329 for f in input_api.AffectedSourceFiles(input_api.FilterSourceFile):
330 if not f.LocalPath().endswith('.h'):
331 continue
332 contents = input_api.ReadFile(f)
333 if pattern.search(contents):
334 files.append(f)
336 if len(files):
337 return [ output_api.PresubmitError(
338 'Do not #include <iostream> in header files, since it inserts static '
339 'initialization into every file including the header. Instead, '
340 '#include <ostream>. See http://crbug.com/94794',
341 files) ]
342 return []
345 def _CheckNoUNIT_TESTInSourceFiles(input_api, output_api):
346 """Checks to make sure no source files use UNIT_TEST"""
347 problems = []
348 for f in input_api.AffectedFiles():
349 if (not f.LocalPath().endswith(('.cc', '.mm'))):
350 continue
352 for line_num, line in f.ChangedContents():
353 if 'UNIT_TEST ' in line or line.endswith('UNIT_TEST'):
354 problems.append(' %s:%d' % (f.LocalPath(), line_num))
356 if not problems:
357 return []
358 return [output_api.PresubmitPromptWarning('UNIT_TEST is only for headers.\n' +
359 '\n'.join(problems))]
362 def _FindHistogramNameInLine(histogram_name, line):
363 """Tries to find a histogram name or prefix in a line."""
364 if not "affected-histogram" in line:
365 return histogram_name in line
366 # A histogram_suffixes tag type has an affected-histogram name as a prefix of
367 # the histogram_name.
368 if not '"' in line:
369 return False
370 histogram_prefix = line.split('\"')[1]
371 return histogram_prefix in histogram_name
374 def _CheckUmaHistogramChanges(input_api, output_api):
375 """Check that UMA histogram names in touched lines can still be found in other
376 lines of the patch or in histograms.xml. Note that this check would not catch
377 the reverse: changes in histograms.xml not matched in the code itself."""
378 touched_histograms = []
379 histograms_xml_modifications = []
380 pattern = input_api.re.compile('UMA_HISTOGRAM.*\("(.*)"')
381 for f in input_api.AffectedFiles():
382 # If histograms.xml itself is modified, keep the modified lines for later.
383 if f.LocalPath().endswith(('histograms.xml')):
384 histograms_xml_modifications = f.ChangedContents()
385 continue
386 if not f.LocalPath().endswith(('cc', 'mm', 'cpp')):
387 continue
388 for line_num, line in f.ChangedContents():
389 found = pattern.search(line)
390 if found:
391 touched_histograms.append([found.group(1), f, line_num])
393 # Search for the touched histogram names in the local modifications to
394 # histograms.xml, and, if not found, on the base histograms.xml file.
395 unmatched_histograms = []
396 for histogram_info in touched_histograms:
397 histogram_name_found = False
398 for line_num, line in histograms_xml_modifications:
399 histogram_name_found = _FindHistogramNameInLine(histogram_info[0], line)
400 if histogram_name_found:
401 break
402 if not histogram_name_found:
403 unmatched_histograms.append(histogram_info)
405 problems = []
406 if unmatched_histograms:
407 with open('tools/metrics/histograms/histograms.xml') as histograms_xml:
408 for histogram_name, f, line_num in unmatched_histograms:
409 histograms_xml.seek(0)
410 histogram_name_found = False
411 for line in histograms_xml:
412 histogram_name_found = _FindHistogramNameInLine(histogram_name, line)
413 if histogram_name_found:
414 break
415 if not histogram_name_found:
416 problems.append(' [%s:%d] %s' %
417 (f.LocalPath(), line_num, histogram_name))
419 if not problems:
420 return []
421 return [output_api.PresubmitPromptWarning('Some UMA_HISTOGRAM lines have '
422 'been modified and the associated histogram name has no match in either '
423 'metrics/histograms.xml or the modifications of it:', problems)]
426 def _CheckNoNewWStrings(input_api, output_api):
427 """Checks to make sure we don't introduce use of wstrings."""
428 problems = []
429 for f in input_api.AffectedFiles():
430 if (not f.LocalPath().endswith(('.cc', '.h')) or
431 f.LocalPath().endswith(('test.cc', '_win.cc', '_win.h')) or
432 '/win/' in f.LocalPath()):
433 continue
435 allowWString = False
436 for line_num, line in f.ChangedContents():
437 if 'presubmit: allow wstring' in line:
438 allowWString = True
439 elif not allowWString and 'wstring' in line:
440 problems.append(' %s:%d' % (f.LocalPath(), line_num))
441 allowWString = False
442 else:
443 allowWString = False
445 if not problems:
446 return []
447 return [output_api.PresubmitPromptWarning('New code should not use wstrings.'
448 ' If you are calling a cross-platform API that accepts a wstring, '
449 'fix the API.\n' +
450 '\n'.join(problems))]
453 def _CheckNoDEPSGIT(input_api, output_api):
454 """Make sure .DEPS.git is never modified manually."""
455 if any(f.LocalPath().endswith('.DEPS.git') for f in
456 input_api.AffectedFiles()):
457 return [output_api.PresubmitError(
458 'Never commit changes to .DEPS.git. This file is maintained by an\n'
459 'automated system based on what\'s in DEPS and your changes will be\n'
460 'overwritten.\n'
461 'See https://sites.google.com/a/chromium.org/dev/developers/how-tos/get-the-code#Rolling_DEPS\n'
462 'for more information')]
463 return []
466 def _CheckValidHostsInDEPS(input_api, output_api):
467 """Checks that DEPS file deps are from allowed_hosts."""
468 # Run only if DEPS file has been modified to annoy fewer bystanders.
469 if all(f.LocalPath() != 'DEPS' for f in input_api.AffectedFiles()):
470 return []
471 # Outsource work to gclient verify
472 try:
473 input_api.subprocess.check_output(['gclient', 'verify'])
474 return []
475 except input_api.subprocess.CalledProcessError, error:
476 return [output_api.PresubmitError(
477 'DEPS file must have only git dependencies.',
478 long_text=error.output)]
481 def _CheckNoBannedFunctions(input_api, output_api):
482 """Make sure that banned functions are not used."""
483 warnings = []
484 errors = []
486 file_filter = lambda f: f.LocalPath().endswith(('.mm', '.m', '.h'))
487 for f in input_api.AffectedFiles(file_filter=file_filter):
488 for line_num, line in f.ChangedContents():
489 for func_name, message, error in _BANNED_OBJC_FUNCTIONS:
490 matched = False
491 if func_name[0:1] == '/':
492 regex = func_name[1:]
493 if input_api.re.search(regex, line):
494 matched = True
495 elif func_name in line:
496 matched = True
497 if matched:
498 problems = warnings;
499 if error:
500 problems = errors;
501 problems.append(' %s:%d:' % (f.LocalPath(), line_num))
502 for message_line in message:
503 problems.append(' %s' % message_line)
505 file_filter = lambda f: f.LocalPath().endswith(('.cc', '.mm', '.h'))
506 for f in input_api.AffectedFiles(file_filter=file_filter):
507 for line_num, line in f.ChangedContents():
508 for func_name, message, error, excluded_paths in _BANNED_CPP_FUNCTIONS:
509 def IsBlacklisted(affected_file, blacklist):
510 local_path = affected_file.LocalPath()
511 for item in blacklist:
512 if input_api.re.match(item, local_path):
513 return True
514 return False
515 if IsBlacklisted(f, excluded_paths):
516 continue
517 matched = False
518 if func_name[0:1] == '/':
519 regex = func_name[1:]
520 if input_api.re.search(regex, line):
521 matched = True
522 elif func_name in line:
523 matched = True
524 if matched:
525 problems = warnings;
526 if error:
527 problems = errors;
528 problems.append(' %s:%d:' % (f.LocalPath(), line_num))
529 for message_line in message:
530 problems.append(' %s' % message_line)
532 result = []
533 if (warnings):
534 result.append(output_api.PresubmitPromptWarning(
535 'Banned functions were used.\n' + '\n'.join(warnings)))
536 if (errors):
537 result.append(output_api.PresubmitError(
538 'Banned functions were used.\n' + '\n'.join(errors)))
539 return result
542 def _CheckNoPragmaOnce(input_api, output_api):
543 """Make sure that banned functions are not used."""
544 files = []
545 pattern = input_api.re.compile(r'^#pragma\s+once',
546 input_api.re.MULTILINE)
547 for f in input_api.AffectedSourceFiles(input_api.FilterSourceFile):
548 if not f.LocalPath().endswith('.h'):
549 continue
550 contents = input_api.ReadFile(f)
551 if pattern.search(contents):
552 files.append(f)
554 if files:
555 return [output_api.PresubmitError(
556 'Do not use #pragma once in header files.\n'
557 'See http://www.chromium.org/developers/coding-style#TOC-File-headers',
558 files)]
559 return []
562 def _CheckNoTrinaryTrueFalse(input_api, output_api):
563 """Checks to make sure we don't introduce use of foo ? true : false."""
564 problems = []
565 pattern = input_api.re.compile(r'\?\s*(true|false)\s*:\s*(true|false)')
566 for f in input_api.AffectedFiles():
567 if not f.LocalPath().endswith(('.cc', '.h', '.inl', '.m', '.mm')):
568 continue
570 for line_num, line in f.ChangedContents():
571 if pattern.match(line):
572 problems.append(' %s:%d' % (f.LocalPath(), line_num))
574 if not problems:
575 return []
576 return [output_api.PresubmitPromptWarning(
577 'Please consider avoiding the "? true : false" pattern if possible.\n' +
578 '\n'.join(problems))]
581 def _CheckUnwantedDependencies(input_api, output_api):
582 """Runs checkdeps on #include statements added in this
583 change. Breaking - rules is an error, breaking ! rules is a
584 warning.
586 import sys
587 # We need to wait until we have an input_api object and use this
588 # roundabout construct to import checkdeps because this file is
589 # eval-ed and thus doesn't have __file__.
590 original_sys_path = sys.path
591 try:
592 sys.path = sys.path + [input_api.os_path.join(
593 input_api.PresubmitLocalPath(), 'buildtools', 'checkdeps')]
594 import checkdeps
595 from cpp_checker import CppChecker
596 from rules import Rule
597 finally:
598 # Restore sys.path to what it was before.
599 sys.path = original_sys_path
601 added_includes = []
602 for f in input_api.AffectedFiles():
603 if not CppChecker.IsCppFile(f.LocalPath()):
604 continue
606 changed_lines = [line for line_num, line in f.ChangedContents()]
607 added_includes.append([f.LocalPath(), changed_lines])
609 deps_checker = checkdeps.DepsChecker(input_api.PresubmitLocalPath())
611 error_descriptions = []
612 warning_descriptions = []
613 for path, rule_type, rule_description in deps_checker.CheckAddedCppIncludes(
614 added_includes):
615 description_with_path = '%s\n %s' % (path, rule_description)
616 if rule_type == Rule.DISALLOW:
617 error_descriptions.append(description_with_path)
618 else:
619 warning_descriptions.append(description_with_path)
621 results = []
622 if error_descriptions:
623 results.append(output_api.PresubmitError(
624 'You added one or more #includes that violate checkdeps rules.',
625 error_descriptions))
626 if warning_descriptions:
627 results.append(output_api.PresubmitPromptOrNotify(
628 'You added one or more #includes of files that are temporarily\n'
629 'allowed but being removed. Can you avoid introducing the\n'
630 '#include? See relevant DEPS file(s) for details and contacts.',
631 warning_descriptions))
632 return results
635 def _CheckFilePermissions(input_api, output_api):
636 """Check that all files have their permissions properly set."""
637 if input_api.platform == 'win32':
638 return []
639 args = [input_api.python_executable, 'tools/checkperms/checkperms.py',
640 '--root', input_api.change.RepositoryRoot()]
641 for f in input_api.AffectedFiles():
642 args += ['--file', f.LocalPath()]
643 checkperms = input_api.subprocess.Popen(args,
644 stdout=input_api.subprocess.PIPE)
645 errors = checkperms.communicate()[0].strip()
646 if errors:
647 return [output_api.PresubmitError('checkperms.py failed.',
648 errors.splitlines())]
649 return []
652 def _CheckNoAuraWindowPropertyHInHeaders(input_api, output_api):
653 """Makes sure we don't include ui/aura/window_property.h
654 in header files.
656 pattern = input_api.re.compile(r'^#include\s*"ui/aura/window_property.h"')
657 errors = []
658 for f in input_api.AffectedFiles():
659 if not f.LocalPath().endswith('.h'):
660 continue
661 for line_num, line in f.ChangedContents():
662 if pattern.match(line):
663 errors.append(' %s:%d' % (f.LocalPath(), line_num))
665 results = []
666 if errors:
667 results.append(output_api.PresubmitError(
668 'Header files should not include ui/aura/window_property.h', errors))
669 return results
672 def _CheckIncludeOrderForScope(scope, input_api, file_path, changed_linenums):
673 """Checks that the lines in scope occur in the right order.
675 1. C system files in alphabetical order
676 2. C++ system files in alphabetical order
677 3. Project's .h files
680 c_system_include_pattern = input_api.re.compile(r'\s*#include <.*\.h>')
681 cpp_system_include_pattern = input_api.re.compile(r'\s*#include <.*>')
682 custom_include_pattern = input_api.re.compile(r'\s*#include ".*')
684 C_SYSTEM_INCLUDES, CPP_SYSTEM_INCLUDES, CUSTOM_INCLUDES = range(3)
686 state = C_SYSTEM_INCLUDES
688 previous_line = ''
689 previous_line_num = 0
690 problem_linenums = []
691 for line_num, line in scope:
692 if c_system_include_pattern.match(line):
693 if state != C_SYSTEM_INCLUDES:
694 problem_linenums.append((line_num, previous_line_num))
695 elif previous_line and previous_line > line:
696 problem_linenums.append((line_num, previous_line_num))
697 elif cpp_system_include_pattern.match(line):
698 if state == C_SYSTEM_INCLUDES:
699 state = CPP_SYSTEM_INCLUDES
700 elif state == CUSTOM_INCLUDES:
701 problem_linenums.append((line_num, previous_line_num))
702 elif previous_line and previous_line > line:
703 problem_linenums.append((line_num, previous_line_num))
704 elif custom_include_pattern.match(line):
705 if state != CUSTOM_INCLUDES:
706 state = CUSTOM_INCLUDES
707 elif previous_line and previous_line > line:
708 problem_linenums.append((line_num, previous_line_num))
709 else:
710 problem_linenums.append(line_num)
711 previous_line = line
712 previous_line_num = line_num
714 warnings = []
715 for (line_num, previous_line_num) in problem_linenums:
716 if line_num in changed_linenums or previous_line_num in changed_linenums:
717 warnings.append(' %s:%d' % (file_path, line_num))
718 return warnings
721 def _CheckIncludeOrderInFile(input_api, f, changed_linenums):
722 """Checks the #include order for the given file f."""
724 system_include_pattern = input_api.re.compile(r'\s*#include \<.*')
725 # Exclude the following includes from the check:
726 # 1) #include <.../...>, e.g., <sys/...> includes often need to appear in a
727 # specific order.
728 # 2) <atlbase.h>, "build/build_config.h"
729 excluded_include_pattern = input_api.re.compile(
730 r'\s*#include (\<.*/.*|\<atlbase\.h\>|"build/build_config.h")')
731 custom_include_pattern = input_api.re.compile(r'\s*#include "(?P<FILE>.*)"')
732 # Match the final or penultimate token if it is xxxtest so we can ignore it
733 # when considering the special first include.
734 test_file_tag_pattern = input_api.re.compile(
735 r'_[a-z]+test(?=(_[a-zA-Z0-9]+)?\.)')
736 if_pattern = input_api.re.compile(
737 r'\s*#\s*(if|elif|else|endif|define|undef).*')
738 # Some files need specialized order of includes; exclude such files from this
739 # check.
740 uncheckable_includes_pattern = input_api.re.compile(
741 r'\s*#include '
742 '("ipc/.*macros\.h"|<windows\.h>|".*gl.*autogen.h")\s*')
744 contents = f.NewContents()
745 warnings = []
746 line_num = 0
748 # Handle the special first include. If the first include file is
749 # some/path/file.h, the corresponding including file can be some/path/file.cc,
750 # some/other/path/file.cc, some/path/file_platform.cc, some/path/file-suffix.h
751 # etc. It's also possible that no special first include exists.
752 # If the included file is some/path/file_platform.h the including file could
753 # also be some/path/file_xxxtest_platform.h.
754 including_file_base_name = test_file_tag_pattern.sub(
755 '', input_api.os_path.basename(f.LocalPath()))
757 for line in contents:
758 line_num += 1
759 if system_include_pattern.match(line):
760 # No special first include -> process the line again along with normal
761 # includes.
762 line_num -= 1
763 break
764 match = custom_include_pattern.match(line)
765 if match:
766 match_dict = match.groupdict()
767 header_basename = test_file_tag_pattern.sub(
768 '', input_api.os_path.basename(match_dict['FILE'])).replace('.h', '')
770 if header_basename not in including_file_base_name:
771 # No special first include -> process the line again along with normal
772 # includes.
773 line_num -= 1
774 break
776 # Split into scopes: Each region between #if and #endif is its own scope.
777 scopes = []
778 current_scope = []
779 for line in contents[line_num:]:
780 line_num += 1
781 if uncheckable_includes_pattern.match(line):
782 continue
783 if if_pattern.match(line):
784 scopes.append(current_scope)
785 current_scope = []
786 elif ((system_include_pattern.match(line) or
787 custom_include_pattern.match(line)) and
788 not excluded_include_pattern.match(line)):
789 current_scope.append((line_num, line))
790 scopes.append(current_scope)
792 for scope in scopes:
793 warnings.extend(_CheckIncludeOrderForScope(scope, input_api, f.LocalPath(),
794 changed_linenums))
795 return warnings
798 def _CheckIncludeOrder(input_api, output_api):
799 """Checks that the #include order is correct.
801 1. The corresponding header for source files.
802 2. C system files in alphabetical order
803 3. C++ system files in alphabetical order
804 4. Project's .h files in alphabetical order
806 Each region separated by #if, #elif, #else, #endif, #define and #undef follows
807 these rules separately.
809 def FileFilterIncludeOrder(affected_file):
810 black_list = (_EXCLUDED_PATHS + input_api.DEFAULT_BLACK_LIST)
811 return input_api.FilterSourceFile(affected_file, black_list=black_list)
813 warnings = []
814 for f in input_api.AffectedFiles(file_filter=FileFilterIncludeOrder):
815 if f.LocalPath().endswith(('.cc', '.h')):
816 changed_linenums = set(line_num for line_num, _ in f.ChangedContents())
817 warnings.extend(_CheckIncludeOrderInFile(input_api, f, changed_linenums))
819 results = []
820 if warnings:
821 results.append(output_api.PresubmitPromptOrNotify(_INCLUDE_ORDER_WARNING,
822 warnings))
823 return results
826 def _CheckForVersionControlConflictsInFile(input_api, f):
827 pattern = input_api.re.compile('^(?:<<<<<<<|>>>>>>>) |^=======$')
828 errors = []
829 for line_num, line in f.ChangedContents():
830 if pattern.match(line):
831 errors.append(' %s:%d %s' % (f.LocalPath(), line_num, line))
832 return errors
835 def _CheckForVersionControlConflicts(input_api, output_api):
836 """Usually this is not intentional and will cause a compile failure."""
837 errors = []
838 for f in input_api.AffectedFiles():
839 errors.extend(_CheckForVersionControlConflictsInFile(input_api, f))
841 results = []
842 if errors:
843 results.append(output_api.PresubmitError(
844 'Version control conflict markers found, please resolve.', errors))
845 return results
848 def _CheckHardcodedGoogleHostsInLowerLayers(input_api, output_api):
849 def FilterFile(affected_file):
850 """Filter function for use with input_api.AffectedSourceFiles,
851 below. This filters out everything except non-test files from
852 top-level directories that generally speaking should not hard-code
853 service URLs (e.g. src/android_webview/, src/content/ and others).
855 return input_api.FilterSourceFile(
856 affected_file,
857 white_list=(r'^(android_webview|base|content|net)[\\\/].*', ),
858 black_list=(_EXCLUDED_PATHS +
859 _TEST_CODE_EXCLUDED_PATHS +
860 input_api.DEFAULT_BLACK_LIST))
862 base_pattern = '"[^"]*google\.com[^"]*"'
863 comment_pattern = input_api.re.compile('//.*%s' % base_pattern)
864 pattern = input_api.re.compile(base_pattern)
865 problems = [] # items are (filename, line_number, line)
866 for f in input_api.AffectedSourceFiles(FilterFile):
867 for line_num, line in f.ChangedContents():
868 if not comment_pattern.search(line) and pattern.search(line):
869 problems.append((f.LocalPath(), line_num, line))
871 if problems:
872 return [output_api.PresubmitPromptOrNotify(
873 'Most layers below src/chrome/ should not hardcode service URLs.\n'
874 'Are you sure this is correct?',
875 [' %s:%d: %s' % (
876 problem[0], problem[1], problem[2]) for problem in problems])]
877 else:
878 return []
881 def _CheckNoAbbreviationInPngFileName(input_api, output_api):
882 """Makes sure there are no abbreviations in the name of PNG files.
883 The native_client_sdk directory is excluded because it has auto-generated PNG
884 files for documentation.
886 errors = []
887 white_list = (r'.*_[a-z]_.*\.png$|.*_[a-z]\.png$',)
888 black_list = (r'^native_client_sdk[\\\/]',)
889 file_filter = lambda f: input_api.FilterSourceFile(
890 f, white_list=white_list, black_list=black_list)
891 for f in input_api.AffectedFiles(include_deletes=False,
892 file_filter=file_filter):
893 errors.append(' %s' % f.LocalPath())
895 results = []
896 if errors:
897 results.append(output_api.PresubmitError(
898 'The name of PNG files should not have abbreviations. \n'
899 'Use _hover.png, _center.png, instead of _h.png, _c.png.\n'
900 'Contact oshima@chromium.org if you have questions.', errors))
901 return results
904 def _FilesToCheckForIncomingDeps(re, changed_lines):
905 """Helper method for _CheckAddedDepsHaveTargetApprovals. Returns
906 a set of DEPS entries that we should look up.
908 For a directory (rather than a specific filename) we fake a path to
909 a specific filename by adding /DEPS. This is chosen as a file that
910 will seldom or never be subject to per-file include_rules.
912 # We ignore deps entries on auto-generated directories.
913 AUTO_GENERATED_DIRS = ['grit', 'jni']
915 # This pattern grabs the path without basename in the first
916 # parentheses, and the basename (if present) in the second. It
917 # relies on the simple heuristic that if there is a basename it will
918 # be a header file ending in ".h".
919 pattern = re.compile(
920 r"""['"]\+([^'"]+?)(/[a-zA-Z0-9_]+\.h)?['"].*""")
921 results = set()
922 for changed_line in changed_lines:
923 m = pattern.match(changed_line)
924 if m:
925 path = m.group(1)
926 if path.split('/')[0] not in AUTO_GENERATED_DIRS:
927 if m.group(2):
928 results.add('%s%s' % (path, m.group(2)))
929 else:
930 results.add('%s/DEPS' % path)
931 return results
934 def _CheckAddedDepsHaveTargetApprovals(input_api, output_api):
935 """When a dependency prefixed with + is added to a DEPS file, we
936 want to make sure that the change is reviewed by an OWNER of the
937 target file or directory, to avoid layering violations from being
938 introduced. This check verifies that this happens.
940 changed_lines = set()
941 for f in input_api.AffectedFiles():
942 filename = input_api.os_path.basename(f.LocalPath())
943 if filename == 'DEPS':
944 changed_lines |= set(line.strip()
945 for line_num, line
946 in f.ChangedContents())
947 if not changed_lines:
948 return []
950 virtual_depended_on_files = _FilesToCheckForIncomingDeps(input_api.re,
951 changed_lines)
952 if not virtual_depended_on_files:
953 return []
955 if input_api.is_committing:
956 if input_api.tbr:
957 return [output_api.PresubmitNotifyResult(
958 '--tbr was specified, skipping OWNERS check for DEPS additions')]
959 if not input_api.change.issue:
960 return [output_api.PresubmitError(
961 "DEPS approval by OWNERS check failed: this change has "
962 "no Rietveld issue number, so we can't check it for approvals.")]
963 output = output_api.PresubmitError
964 else:
965 output = output_api.PresubmitNotifyResult
967 owners_db = input_api.owners_db
968 owner_email, reviewers = input_api.canned_checks._RietveldOwnerAndReviewers(
969 input_api,
970 owners_db.email_regexp,
971 approval_needed=input_api.is_committing)
973 owner_email = owner_email or input_api.change.author_email
975 reviewers_plus_owner = set(reviewers)
976 if owner_email:
977 reviewers_plus_owner.add(owner_email)
978 missing_files = owners_db.files_not_covered_by(virtual_depended_on_files,
979 reviewers_plus_owner)
981 # We strip the /DEPS part that was added by
982 # _FilesToCheckForIncomingDeps to fake a path to a file in a
983 # directory.
984 def StripDeps(path):
985 start_deps = path.rfind('/DEPS')
986 if start_deps != -1:
987 return path[:start_deps]
988 else:
989 return path
990 unapproved_dependencies = ["'+%s'," % StripDeps(path)
991 for path in missing_files]
993 if unapproved_dependencies:
994 output_list = [
995 output('Missing LGTM from OWNERS of dependencies added to DEPS:\n %s' %
996 '\n '.join(sorted(unapproved_dependencies)))]
997 if not input_api.is_committing:
998 suggested_owners = owners_db.reviewers_for(missing_files, owner_email)
999 output_list.append(output(
1000 'Suggested missing target path OWNERS:\n %s' %
1001 '\n '.join(suggested_owners or [])))
1002 return output_list
1004 return []
1007 def _CheckSpamLogging(input_api, output_api):
1008 file_inclusion_pattern = r'.+%s' % _IMPLEMENTATION_EXTENSIONS
1009 black_list = (_EXCLUDED_PATHS +
1010 _TEST_CODE_EXCLUDED_PATHS +
1011 input_api.DEFAULT_BLACK_LIST +
1012 (r"^base[\\\/]logging\.h$",
1013 r"^base[\\\/]logging\.cc$",
1014 r"^chrome[\\\/]app[\\\/]chrome_main_delegate\.cc$",
1015 r"^chrome[\\\/]browser[\\\/]chrome_browser_main\.cc$",
1016 r"^chrome[\\\/]browser[\\\/]ui[\\\/]startup[\\\/]"
1017 r"startup_browser_creator\.cc$",
1018 r"^chrome[\\\/]installer[\\\/]setup[\\\/].*",
1019 r"chrome[\\\/]browser[\\\/]diagnostics[\\\/]" +
1020 r"diagnostics_writer\.cc$",
1021 r"^chrome_elf[\\\/]dll_hash[\\\/]dll_hash_main\.cc$",
1022 r"^chromecast[\\\/]",
1023 r"^cloud_print[\\\/]",
1024 r"^content[\\\/]common[\\\/]gpu[\\\/]client[\\\/]"
1025 r"gl_helper_benchmark\.cc$",
1026 r"^courgette[\\\/]courgette_tool\.cc$",
1027 r"^extensions[\\\/]renderer[\\\/]logging_native_handler\.cc$",
1028 r"^ipc[\\\/]ipc_logging\.cc$",
1029 r"^native_client_sdk[\\\/]",
1030 r"^remoting[\\\/]base[\\\/]logging\.h$",
1031 r"^remoting[\\\/]host[\\\/].*",
1032 r"^sandbox[\\\/]linux[\\\/].*",
1033 r"^tools[\\\/]",
1034 r"^ui[\\\/]aura[\\\/]bench[\\\/]bench_main\.cc$",
1035 r"^storage[\\\/]browser[\\\/]fileapi[\\\/]" +
1036 r"dump_file_system.cc$",))
1037 source_file_filter = lambda x: input_api.FilterSourceFile(
1038 x, white_list=(file_inclusion_pattern,), black_list=black_list)
1040 log_info = []
1041 printf = []
1043 for f in input_api.AffectedSourceFiles(source_file_filter):
1044 contents = input_api.ReadFile(f, 'rb')
1045 if input_api.re.search(r"\bD?LOG\s*\(\s*INFO\s*\)", contents):
1046 log_info.append(f.LocalPath())
1047 elif input_api.re.search(r"\bD?LOG_IF\s*\(\s*INFO\s*,", contents):
1048 log_info.append(f.LocalPath())
1050 if input_api.re.search(r"\bprintf\(", contents):
1051 printf.append(f.LocalPath())
1052 elif input_api.re.search(r"\bfprintf\((stdout|stderr)", contents):
1053 printf.append(f.LocalPath())
1055 if log_info:
1056 return [output_api.PresubmitError(
1057 'These files spam the console log with LOG(INFO):',
1058 items=log_info)]
1059 if printf:
1060 return [output_api.PresubmitError(
1061 'These files spam the console log with printf/fprintf:',
1062 items=printf)]
1063 return []
1066 def _CheckForAnonymousVariables(input_api, output_api):
1067 """These types are all expected to hold locks while in scope and
1068 so should never be anonymous (which causes them to be immediately
1069 destroyed)."""
1070 they_who_must_be_named = [
1071 'base::AutoLock',
1072 'base::AutoReset',
1073 'base::AutoUnlock',
1074 'SkAutoAlphaRestore',
1075 'SkAutoBitmapShaderInstall',
1076 'SkAutoBlitterChoose',
1077 'SkAutoBounderCommit',
1078 'SkAutoCallProc',
1079 'SkAutoCanvasRestore',
1080 'SkAutoCommentBlock',
1081 'SkAutoDescriptor',
1082 'SkAutoDisableDirectionCheck',
1083 'SkAutoDisableOvalCheck',
1084 'SkAutoFree',
1085 'SkAutoGlyphCache',
1086 'SkAutoHDC',
1087 'SkAutoLockColors',
1088 'SkAutoLockPixels',
1089 'SkAutoMalloc',
1090 'SkAutoMaskFreeImage',
1091 'SkAutoMutexAcquire',
1092 'SkAutoPathBoundsUpdate',
1093 'SkAutoPDFRelease',
1094 'SkAutoRasterClipValidate',
1095 'SkAutoRef',
1096 'SkAutoTime',
1097 'SkAutoTrace',
1098 'SkAutoUnref',
1100 anonymous = r'(%s)\s*[({]' % '|'.join(they_who_must_be_named)
1101 # bad: base::AutoLock(lock.get());
1102 # not bad: base::AutoLock lock(lock.get());
1103 bad_pattern = input_api.re.compile(anonymous)
1104 # good: new base::AutoLock(lock.get())
1105 good_pattern = input_api.re.compile(r'\bnew\s*' + anonymous)
1106 errors = []
1108 for f in input_api.AffectedFiles():
1109 if not f.LocalPath().endswith(('.cc', '.h', '.inl', '.m', '.mm')):
1110 continue
1111 for linenum, line in f.ChangedContents():
1112 if bad_pattern.search(line) and not good_pattern.search(line):
1113 errors.append('%s:%d' % (f.LocalPath(), linenum))
1115 if errors:
1116 return [output_api.PresubmitError(
1117 'These lines create anonymous variables that need to be named:',
1118 items=errors)]
1119 return []
1122 def _CheckCygwinShell(input_api, output_api):
1123 source_file_filter = lambda x: input_api.FilterSourceFile(
1124 x, white_list=(r'.+\.(gyp|gypi)$',))
1125 cygwin_shell = []
1127 for f in input_api.AffectedSourceFiles(source_file_filter):
1128 for linenum, line in f.ChangedContents():
1129 if 'msvs_cygwin_shell' in line:
1130 cygwin_shell.append(f.LocalPath())
1131 break
1133 if cygwin_shell:
1134 return [output_api.PresubmitError(
1135 'These files should not use msvs_cygwin_shell (the default is 0):',
1136 items=cygwin_shell)]
1137 return []
1140 def _CheckUserActionUpdate(input_api, output_api):
1141 """Checks if any new user action has been added."""
1142 if any('actions.xml' == input_api.os_path.basename(f) for f in
1143 input_api.LocalPaths()):
1144 # If actions.xml is already included in the changelist, the PRESUBMIT
1145 # for actions.xml will do a more complete presubmit check.
1146 return []
1148 file_filter = lambda f: f.LocalPath().endswith(('.cc', '.mm'))
1149 action_re = r'[^a-zA-Z]UserMetricsAction\("([^"]*)'
1150 current_actions = None
1151 for f in input_api.AffectedFiles(file_filter=file_filter):
1152 for line_num, line in f.ChangedContents():
1153 match = input_api.re.search(action_re, line)
1154 if match:
1155 # Loads contents in tools/metrics/actions/actions.xml to memory. It's
1156 # loaded only once.
1157 if not current_actions:
1158 with open('tools/metrics/actions/actions.xml') as actions_f:
1159 current_actions = actions_f.read()
1160 # Search for the matched user action name in |current_actions|.
1161 for action_name in match.groups():
1162 action = 'name="{0}"'.format(action_name)
1163 if action not in current_actions:
1164 return [output_api.PresubmitPromptWarning(
1165 'File %s line %d: %s is missing in '
1166 'tools/metrics/actions/actions.xml. Please run '
1167 'tools/metrics/actions/extract_actions.py to update.'
1168 % (f.LocalPath(), line_num, action_name))]
1169 return []
1172 def _GetJSONParseError(input_api, filename, eat_comments=True):
1173 try:
1174 contents = input_api.ReadFile(filename)
1175 if eat_comments:
1176 json_comment_eater = input_api.os_path.join(
1177 input_api.PresubmitLocalPath(),
1178 'tools', 'json_comment_eater', 'json_comment_eater.py')
1179 process = input_api.subprocess.Popen(
1180 [input_api.python_executable, json_comment_eater],
1181 stdin=input_api.subprocess.PIPE,
1182 stdout=input_api.subprocess.PIPE,
1183 universal_newlines=True)
1184 (contents, _) = process.communicate(input=contents)
1186 input_api.json.loads(contents)
1187 except ValueError as e:
1188 return e
1189 return None
1192 def _GetIDLParseError(input_api, filename):
1193 try:
1194 contents = input_api.ReadFile(filename)
1195 idl_schema = input_api.os_path.join(
1196 input_api.PresubmitLocalPath(),
1197 'tools', 'json_schema_compiler', 'idl_schema.py')
1198 process = input_api.subprocess.Popen(
1199 [input_api.python_executable, idl_schema],
1200 stdin=input_api.subprocess.PIPE,
1201 stdout=input_api.subprocess.PIPE,
1202 stderr=input_api.subprocess.PIPE,
1203 universal_newlines=True)
1204 (_, error) = process.communicate(input=contents)
1205 return error or None
1206 except ValueError as e:
1207 return e
1210 def _CheckParseErrors(input_api, output_api):
1211 """Check that IDL and JSON files do not contain syntax errors."""
1212 actions = {
1213 '.idl': _GetIDLParseError,
1214 '.json': _GetJSONParseError,
1216 # These paths contain test data and other known invalid JSON files.
1217 excluded_patterns = [
1218 r'test[\\\/]data[\\\/]',
1219 r'^components[\\\/]policy[\\\/]resources[\\\/]policy_templates\.json$',
1221 # Most JSON files are preprocessed and support comments, but these do not.
1222 json_no_comments_patterns = [
1223 r'^testing[\\\/]',
1225 # Only run IDL checker on files in these directories.
1226 idl_included_patterns = [
1227 r'^chrome[\\\/]common[\\\/]extensions[\\\/]api[\\\/]',
1228 r'^extensions[\\\/]common[\\\/]api[\\\/]',
1231 def get_action(affected_file):
1232 filename = affected_file.LocalPath()
1233 return actions.get(input_api.os_path.splitext(filename)[1])
1235 def MatchesFile(patterns, path):
1236 for pattern in patterns:
1237 if input_api.re.search(pattern, path):
1238 return True
1239 return False
1241 def FilterFile(affected_file):
1242 action = get_action(affected_file)
1243 if not action:
1244 return False
1245 path = affected_file.LocalPath()
1247 if MatchesFile(excluded_patterns, path):
1248 return False
1250 if (action == _GetIDLParseError and
1251 not MatchesFile(idl_included_patterns, path)):
1252 return False
1253 return True
1255 results = []
1256 for affected_file in input_api.AffectedFiles(
1257 file_filter=FilterFile, include_deletes=False):
1258 action = get_action(affected_file)
1259 kwargs = {}
1260 if (action == _GetJSONParseError and
1261 MatchesFile(json_no_comments_patterns, affected_file.LocalPath())):
1262 kwargs['eat_comments'] = False
1263 parse_error = action(input_api,
1264 affected_file.AbsoluteLocalPath(),
1265 **kwargs)
1266 if parse_error:
1267 results.append(output_api.PresubmitError('%s could not be parsed: %s' %
1268 (affected_file.LocalPath(), parse_error)))
1269 return results
1272 def _CheckJavaStyle(input_api, output_api):
1273 """Runs checkstyle on changed java files and returns errors if any exist."""
1274 import sys
1275 original_sys_path = sys.path
1276 try:
1277 sys.path = sys.path + [input_api.os_path.join(
1278 input_api.PresubmitLocalPath(), 'tools', 'android', 'checkstyle')]
1279 import checkstyle
1280 finally:
1281 # Restore sys.path to what it was before.
1282 sys.path = original_sys_path
1284 return checkstyle.RunCheckstyle(
1285 input_api, output_api, 'tools/android/checkstyle/chromium-style-5.0.xml',
1286 black_list=_EXCLUDED_PATHS + input_api.DEFAULT_BLACK_LIST)
1289 def _CheckForCopyrightedCode(input_api, output_api):
1290 """Verifies that newly added code doesn't contain copyrighted material
1291 and is properly licensed under the standard Chromium license.
1293 As there can be false positives, we maintain a whitelist file. This check
1294 also verifies that the whitelist file is up to date.
1296 import sys
1297 original_sys_path = sys.path
1298 try:
1299 sys.path = sys.path + [input_api.os_path.join(
1300 input_api.PresubmitLocalPath(), 'android_webview', 'tools')]
1301 import copyright_scanner
1302 finally:
1303 # Restore sys.path to what it was before.
1304 sys.path = original_sys_path
1306 return copyright_scanner.ScanAtPresubmit(input_api, output_api)
1309 def _CheckSingletonInHeaders(input_api, output_api):
1310 """Checks to make sure no header files have |Singleton<|."""
1311 def FileFilter(affected_file):
1312 # It's ok for base/memory/singleton.h to have |Singleton<|.
1313 black_list = (_EXCLUDED_PATHS +
1314 input_api.DEFAULT_BLACK_LIST +
1315 (r"^base[\\\/]memory[\\\/]singleton\.h$",))
1316 return input_api.FilterSourceFile(affected_file, black_list=black_list)
1318 pattern = input_api.re.compile(r'(?<!class\s)Singleton\s*<')
1319 files = []
1320 for f in input_api.AffectedSourceFiles(FileFilter):
1321 if (f.LocalPath().endswith('.h') or f.LocalPath().endswith('.hxx') or
1322 f.LocalPath().endswith('.hpp') or f.LocalPath().endswith('.inl')):
1323 contents = input_api.ReadFile(f)
1324 for line in contents.splitlines(False):
1325 if (not input_api.re.match(r'//', line) and # Strip C++ comment.
1326 pattern.search(line)):
1327 files.append(f)
1328 break
1330 if files:
1331 return [ output_api.PresubmitError(
1332 'Found Singleton<T> in the following header files.\n' +
1333 'Please move them to an appropriate source file so that the ' +
1334 'template gets instantiated in a single compilation unit.',
1335 files) ]
1336 return []
1339 _DEPRECATED_CSS = [
1340 # Values
1341 ( "-webkit-box", "flex" ),
1342 ( "-webkit-inline-box", "inline-flex" ),
1343 ( "-webkit-flex", "flex" ),
1344 ( "-webkit-inline-flex", "inline-flex" ),
1345 ( "-webkit-min-content", "min-content" ),
1346 ( "-webkit-max-content", "max-content" ),
1348 # Properties
1349 ( "-webkit-background-clip", "background-clip" ),
1350 ( "-webkit-background-origin", "background-origin" ),
1351 ( "-webkit-background-size", "background-size" ),
1352 ( "-webkit-box-shadow", "box-shadow" ),
1354 # Functions
1355 ( "-webkit-gradient", "gradient" ),
1356 ( "-webkit-repeating-gradient", "repeating-gradient" ),
1357 ( "-webkit-linear-gradient", "linear-gradient" ),
1358 ( "-webkit-repeating-linear-gradient", "repeating-linear-gradient" ),
1359 ( "-webkit-radial-gradient", "radial-gradient" ),
1360 ( "-webkit-repeating-radial-gradient", "repeating-radial-gradient" ),
1363 def _CheckNoDeprecatedCSS(input_api, output_api):
1364 """ Make sure that we don't use deprecated CSS
1365 properties, functions or values. Our external
1366 documentation is ignored by the hooks as it
1367 needs to be consumed by WebKit. """
1368 results = []
1369 file_inclusion_pattern = (r".+\.css$",)
1370 black_list = (_EXCLUDED_PATHS +
1371 _TEST_CODE_EXCLUDED_PATHS +
1372 input_api.DEFAULT_BLACK_LIST +
1373 (r"^chrome/common/extensions/docs",
1374 r"^chrome/docs",
1375 r"^native_client_sdk"))
1376 file_filter = lambda f: input_api.FilterSourceFile(
1377 f, white_list=file_inclusion_pattern, black_list=black_list)
1378 for fpath in input_api.AffectedFiles(file_filter=file_filter):
1379 for line_num, line in fpath.ChangedContents():
1380 for (deprecated_value, value) in _DEPRECATED_CSS:
1381 if deprecated_value in line:
1382 results.append(output_api.PresubmitError(
1383 "%s:%d: Use of deprecated CSS %s, use %s instead" %
1384 (fpath.LocalPath(), line_num, deprecated_value, value)))
1385 return results
1388 _DEPRECATED_JS = [
1389 ( "__lookupGetter__", "Object.getOwnPropertyDescriptor" ),
1390 ( "__defineGetter__", "Object.defineProperty" ),
1391 ( "__defineSetter__", "Object.defineProperty" ),
1394 def _CheckNoDeprecatedJS(input_api, output_api):
1395 """Make sure that we don't use deprecated JS in Chrome code."""
1396 results = []
1397 file_inclusion_pattern = (r".+\.js$",) # TODO(dbeam): .html?
1398 black_list = (_EXCLUDED_PATHS + _TEST_CODE_EXCLUDED_PATHS +
1399 input_api.DEFAULT_BLACK_LIST)
1400 file_filter = lambda f: input_api.FilterSourceFile(
1401 f, white_list=file_inclusion_pattern, black_list=black_list)
1402 for fpath in input_api.AffectedFiles(file_filter=file_filter):
1403 for lnum, line in fpath.ChangedContents():
1404 for (deprecated, replacement) in _DEPRECATED_JS:
1405 if deprecated in line:
1406 results.append(output_api.PresubmitError(
1407 "%s:%d: Use of deprecated JS %s, use %s instead" %
1408 (fpath.LocalPath(), lnum, deprecated, replacement)))
1409 return results
1412 def _CommonChecks(input_api, output_api):
1413 """Checks common to both upload and commit."""
1414 results = []
1415 results.extend(input_api.canned_checks.PanProjectChecks(
1416 input_api, output_api,
1417 excluded_paths=_EXCLUDED_PATHS + _TESTRUNNER_PATHS))
1418 results.extend(_CheckAuthorizedAuthor(input_api, output_api))
1419 results.extend(
1420 _CheckNoProductionCodeUsingTestOnlyFunctions(input_api, output_api))
1421 results.extend(_CheckNoIOStreamInHeaders(input_api, output_api))
1422 results.extend(_CheckNoUNIT_TESTInSourceFiles(input_api, output_api))
1423 results.extend(_CheckNoNewWStrings(input_api, output_api))
1424 results.extend(_CheckNoDEPSGIT(input_api, output_api))
1425 results.extend(_CheckNoBannedFunctions(input_api, output_api))
1426 results.extend(_CheckNoPragmaOnce(input_api, output_api))
1427 results.extend(_CheckNoTrinaryTrueFalse(input_api, output_api))
1428 results.extend(_CheckUnwantedDependencies(input_api, output_api))
1429 results.extend(_CheckFilePermissions(input_api, output_api))
1430 results.extend(_CheckNoAuraWindowPropertyHInHeaders(input_api, output_api))
1431 results.extend(_CheckIncludeOrder(input_api, output_api))
1432 results.extend(_CheckForVersionControlConflicts(input_api, output_api))
1433 results.extend(_CheckPatchFiles(input_api, output_api))
1434 results.extend(_CheckHardcodedGoogleHostsInLowerLayers(input_api, output_api))
1435 results.extend(_CheckNoAbbreviationInPngFileName(input_api, output_api))
1436 results.extend(_CheckForInvalidOSMacros(input_api, output_api))
1437 results.extend(_CheckForInvalidIfDefinedMacros(input_api, output_api))
1438 # TODO(danakj): Remove this when base/move.h is removed.
1439 results.extend(_CheckForUsingSideEffectsOfPass(input_api, output_api))
1440 results.extend(_CheckAddedDepsHaveTargetApprovals(input_api, output_api))
1441 results.extend(
1442 input_api.canned_checks.CheckChangeHasNoTabs(
1443 input_api,
1444 output_api,
1445 source_file_filter=lambda x: x.LocalPath().endswith('.grd')))
1446 results.extend(_CheckSpamLogging(input_api, output_api))
1447 results.extend(_CheckForAnonymousVariables(input_api, output_api))
1448 results.extend(_CheckCygwinShell(input_api, output_api))
1449 results.extend(_CheckUserActionUpdate(input_api, output_api))
1450 results.extend(_CheckNoDeprecatedCSS(input_api, output_api))
1451 results.extend(_CheckNoDeprecatedJS(input_api, output_api))
1452 results.extend(_CheckParseErrors(input_api, output_api))
1453 results.extend(_CheckForIPCRules(input_api, output_api))
1454 results.extend(_CheckForCopyrightedCode(input_api, output_api))
1455 results.extend(_CheckForWindowsLineEndings(input_api, output_api))
1456 results.extend(_CheckSingletonInHeaders(input_api, output_api))
1458 if any('PRESUBMIT.py' == f.LocalPath() for f in input_api.AffectedFiles()):
1459 results.extend(input_api.canned_checks.RunUnitTestsInDirectory(
1460 input_api, output_api,
1461 input_api.PresubmitLocalPath(),
1462 whitelist=[r'^PRESUBMIT_test\.py$']))
1463 return results
1466 def _CheckAuthorizedAuthor(input_api, output_api):
1467 """For non-googler/chromites committers, verify the author's email address is
1468 in AUTHORS.
1470 # TODO(maruel): Add it to input_api?
1471 import fnmatch
1473 author = input_api.change.author_email
1474 if not author:
1475 input_api.logging.info('No author, skipping AUTHOR check')
1476 return []
1477 authors_path = input_api.os_path.join(
1478 input_api.PresubmitLocalPath(), 'AUTHORS')
1479 valid_authors = (
1480 input_api.re.match(r'[^#]+\s+\<(.+?)\>\s*$', line)
1481 for line in open(authors_path))
1482 valid_authors = [item.group(1).lower() for item in valid_authors if item]
1483 if not any(fnmatch.fnmatch(author.lower(), valid) for valid in valid_authors):
1484 input_api.logging.info('Valid authors are %s', ', '.join(valid_authors))
1485 return [output_api.PresubmitPromptWarning(
1486 ('%s is not in AUTHORS file. If you are a new contributor, please visit'
1487 '\n'
1488 'http://www.chromium.org/developers/contributing-code and read the '
1489 '"Legal" section\n'
1490 'If you are a chromite, verify the contributor signed the CLA.') %
1491 author)]
1492 return []
1495 def _CheckPatchFiles(input_api, output_api):
1496 problems = [f.LocalPath() for f in input_api.AffectedFiles()
1497 if f.LocalPath().endswith(('.orig', '.rej'))]
1498 if problems:
1499 return [output_api.PresubmitError(
1500 "Don't commit .rej and .orig files.", problems)]
1501 else:
1502 return []
1505 def _DidYouMeanOSMacro(bad_macro):
1506 try:
1507 return {'A': 'OS_ANDROID',
1508 'B': 'OS_BSD',
1509 'C': 'OS_CHROMEOS',
1510 'F': 'OS_FREEBSD',
1511 'L': 'OS_LINUX',
1512 'M': 'OS_MACOSX',
1513 'N': 'OS_NACL',
1514 'O': 'OS_OPENBSD',
1515 'P': 'OS_POSIX',
1516 'S': 'OS_SOLARIS',
1517 'W': 'OS_WIN'}[bad_macro[3].upper()]
1518 except KeyError:
1519 return ''
1522 def _CheckForInvalidOSMacrosInFile(input_api, f):
1523 """Check for sensible looking, totally invalid OS macros."""
1524 preprocessor_statement = input_api.re.compile(r'^\s*#')
1525 os_macro = input_api.re.compile(r'defined\((OS_[^)]+)\)')
1526 results = []
1527 for lnum, line in f.ChangedContents():
1528 if preprocessor_statement.search(line):
1529 for match in os_macro.finditer(line):
1530 if not match.group(1) in _VALID_OS_MACROS:
1531 good = _DidYouMeanOSMacro(match.group(1))
1532 did_you_mean = ' (did you mean %s?)' % good if good else ''
1533 results.append(' %s:%d %s%s' % (f.LocalPath(),
1534 lnum,
1535 match.group(1),
1536 did_you_mean))
1537 return results
1540 def _CheckForInvalidOSMacros(input_api, output_api):
1541 """Check all affected files for invalid OS macros."""
1542 bad_macros = []
1543 for f in input_api.AffectedFiles():
1544 if not f.LocalPath().endswith(('.py', '.js', '.html', '.css')):
1545 bad_macros.extend(_CheckForInvalidOSMacrosInFile(input_api, f))
1547 if not bad_macros:
1548 return []
1550 return [output_api.PresubmitError(
1551 'Possibly invalid OS macro[s] found. Please fix your code\n'
1552 'or add your macro to src/PRESUBMIT.py.', bad_macros)]
1555 def _CheckForInvalidIfDefinedMacrosInFile(input_api, f):
1556 """Check all affected files for invalid "if defined" macros."""
1557 ALWAYS_DEFINED_MACROS = (
1558 "TARGET_CPU_PPC",
1559 "TARGET_CPU_PPC64",
1560 "TARGET_CPU_68K",
1561 "TARGET_CPU_X86",
1562 "TARGET_CPU_ARM",
1563 "TARGET_CPU_MIPS",
1564 "TARGET_CPU_SPARC",
1565 "TARGET_CPU_ALPHA",
1566 "TARGET_IPHONE_SIMULATOR",
1567 "TARGET_OS_EMBEDDED",
1568 "TARGET_OS_IPHONE",
1569 "TARGET_OS_MAC",
1570 "TARGET_OS_UNIX",
1571 "TARGET_OS_WIN32",
1573 ifdef_macro = input_api.re.compile(r'^\s*#.*(?:ifdef\s|defined\()([^\s\)]+)')
1574 results = []
1575 for lnum, line in f.ChangedContents():
1576 for match in ifdef_macro.finditer(line):
1577 if match.group(1) in ALWAYS_DEFINED_MACROS:
1578 always_defined = ' %s is always defined. ' % match.group(1)
1579 did_you_mean = 'Did you mean \'#if %s\'?' % match.group(1)
1580 results.append(' %s:%d %s\n\t%s' % (f.LocalPath(),
1581 lnum,
1582 always_defined,
1583 did_you_mean))
1584 return results
1587 def _CheckForInvalidIfDefinedMacros(input_api, output_api):
1588 """Check all affected files for invalid "if defined" macros."""
1589 bad_macros = []
1590 for f in input_api.AffectedFiles():
1591 if f.LocalPath().endswith(('.h', '.c', '.cc', '.m', '.mm')):
1592 bad_macros.extend(_CheckForInvalidIfDefinedMacrosInFile(input_api, f))
1594 if not bad_macros:
1595 return []
1597 return [output_api.PresubmitError(
1598 'Found ifdef check on always-defined macro[s]. Please fix your code\n'
1599 'or check the list of ALWAYS_DEFINED_MACROS in src/PRESUBMIT.py.',
1600 bad_macros)]
1603 def _CheckForUsingSideEffectsOfPass(input_api, output_api):
1604 """Check all affected files for using side effects of Pass."""
1605 errors = []
1606 for f in input_api.AffectedFiles():
1607 if f.LocalPath().endswith(('.h', '.c', '.cc', '.m', '.mm')):
1608 for lnum, line in f.ChangedContents():
1609 # Disallow Foo(*my_scoped_thing.Pass()); See crbug.com/418297.
1610 if input_api.re.search(r'\*[a-zA-Z0-9_]+\.Pass\(\)', line):
1611 errors.append(output_api.PresubmitError(
1612 ('%s:%d uses *foo.Pass() to delete the contents of scoped_ptr. ' +
1613 'See crbug.com/418297.') % (f.LocalPath(), lnum)))
1614 return errors
1617 def _CheckForIPCRules(input_api, output_api):
1618 """Check for same IPC rules described in
1619 http://www.chromium.org/Home/chromium-security/education/security-tips-for-ipc
1621 base_pattern = r'IPC_ENUM_TRAITS\('
1622 inclusion_pattern = input_api.re.compile(r'(%s)' % base_pattern)
1623 comment_pattern = input_api.re.compile(r'//.*(%s)' % base_pattern)
1625 problems = []
1626 for f in input_api.AffectedSourceFiles(None):
1627 local_path = f.LocalPath()
1628 if not local_path.endswith('.h'):
1629 continue
1630 for line_number, line in f.ChangedContents():
1631 if inclusion_pattern.search(line) and not comment_pattern.search(line):
1632 problems.append(
1633 '%s:%d\n %s' % (local_path, line_number, line.strip()))
1635 if problems:
1636 return [output_api.PresubmitPromptWarning(
1637 _IPC_ENUM_TRAITS_DEPRECATED, problems)]
1638 else:
1639 return []
1642 def _CheckForWindowsLineEndings(input_api, output_api):
1643 """Check source code and known ascii text files for Windows style line
1644 endings.
1646 known_text_files = r'.*\.(txt|html|htm|mhtml|py)$'
1648 file_inclusion_pattern = (
1649 known_text_files,
1650 r'.+%s' % _IMPLEMENTATION_EXTENSIONS
1653 filter = lambda f: input_api.FilterSourceFile(
1654 f, white_list=file_inclusion_pattern, black_list=None)
1655 files = [f.LocalPath() for f in
1656 input_api.AffectedSourceFiles(filter)]
1658 problems = []
1660 for file in files:
1661 fp = open(file, 'r')
1662 for line in fp:
1663 if line.endswith('\r\n'):
1664 problems.append(file)
1665 break
1666 fp.close()
1668 if problems:
1669 return [output_api.PresubmitPromptWarning('Are you sure that you want '
1670 'these files to contain Windows style line endings?\n' +
1671 '\n'.join(problems))]
1673 return []
1676 def CheckChangeOnUpload(input_api, output_api):
1677 results = []
1678 results.extend(_CommonChecks(input_api, output_api))
1679 results.extend(_CheckValidHostsInDEPS(input_api, output_api))
1680 results.extend(_CheckJavaStyle(input_api, output_api))
1681 results.extend(
1682 input_api.canned_checks.CheckGNFormatted(input_api, output_api))
1683 results.extend(_CheckUmaHistogramChanges(input_api, output_api))
1684 return results
1687 def GetTryServerMasterForBot(bot):
1688 """Returns the Try Server master for the given bot.
1690 It tries to guess the master from the bot name, but may still fail
1691 and return None. There is no longer a default master.
1693 # Potentially ambiguous bot names are listed explicitly.
1694 master_map = {
1695 'chromium_presubmit': 'tryserver.chromium.linux',
1696 'blink_presubmit': 'tryserver.chromium.linux',
1697 'tools_build_presubmit': 'tryserver.chromium.linux',
1699 master = master_map.get(bot)
1700 if not master:
1701 if 'linux' in bot or 'android' in bot or 'presubmit' in bot:
1702 master = 'tryserver.chromium.linux'
1703 elif 'win' in bot:
1704 master = 'tryserver.chromium.win'
1705 elif 'mac' in bot or 'ios' in bot:
1706 master = 'tryserver.chromium.mac'
1707 return master
1710 def GetDefaultTryConfigs(bots):
1711 """Returns a list of ('bot', set(['tests']), filtered by [bots].
1714 builders_and_tests = dict((bot, set(['defaulttests'])) for bot in bots)
1716 # Build up the mapping from tryserver master to bot/test.
1717 out = dict()
1718 for bot, tests in builders_and_tests.iteritems():
1719 out.setdefault(GetTryServerMasterForBot(bot), {})[bot] = tests
1720 return out
1723 def CheckChangeOnCommit(input_api, output_api):
1724 results = []
1725 results.extend(_CommonChecks(input_api, output_api))
1726 # TODO(thestig) temporarily disabled, doesn't work in third_party/
1727 #results.extend(input_api.canned_checks.CheckSvnModifiedDirectories(
1728 # input_api, output_api, sources))
1729 # Make sure the tree is 'open'.
1730 results.extend(input_api.canned_checks.CheckTreeIsOpen(
1731 input_api,
1732 output_api,
1733 json_url='http://chromium-status.appspot.com/current?format=json'))
1735 results.extend(input_api.canned_checks.CheckChangeHasBugField(
1736 input_api, output_api))
1737 results.extend(input_api.canned_checks.CheckChangeHasDescription(
1738 input_api, output_api))
1739 return results
1742 def GetPreferredTryMasters(project, change):
1743 import re
1744 files = change.LocalPaths()
1746 import os
1747 import json
1748 with open(os.path.join(
1749 change.RepositoryRoot(), 'testing', 'commit_queue', 'config.json')) as f:
1750 cq_config = json.load(f)
1751 cq_trybots = cq_config.get('trybots', {})
1752 builders = cq_trybots.get('launched', {})
1753 for master, master_config in cq_trybots.get('triggered', {}).iteritems():
1754 for triggered_bot in master_config:
1755 builders.get(master, {}).pop(triggered_bot, None)
1757 # Explicitly iterate over copies of dicts since we mutate them.
1758 for master in builders.keys():
1759 for builder in builders[master].keys():
1760 # Do not trigger presubmit builders, since they're likely to fail
1761 # (e.g. OWNERS checks before finished code review), and we're
1762 # running local presubmit anyway.
1763 if 'presubmit' in builder:
1764 builders[master].pop(builder)
1766 return builders