Revert of DataReductionProxyStatisticsPrefs should support WeakPtr (patchset #10...
[chromium-blink-merge.git] / PRESUBMIT.py
blob8e4880b28b65825aaf1763a646091a3e2bbfe8e6
1 # Copyright (c) 2012 The Chromium Authors. All rights reserved.
2 # Use of this source code is governed by a BSD-style license that can be
3 # found in the LICENSE file.
5 """Top-level presubmit script for Chromium.
7 See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts
8 for more details about the presubmit API built into depot_tools.
9 """
12 _EXCLUDED_PATHS = (
13 r"^breakpad[\\\/].*",
14 r"^native_client_sdk[\\\/]src[\\\/]build_tools[\\\/]make_rules.py",
15 r"^native_client_sdk[\\\/]src[\\\/]build_tools[\\\/]make_simple.py",
16 r"^native_client_sdk[\\\/]src[\\\/]tools[\\\/].*.mk",
17 r"^net[\\\/]tools[\\\/]spdyshark[\\\/].*",
18 r"^skia[\\\/].*",
19 r"^v8[\\\/].*",
20 r".*MakeFile$",
21 r".+_autogen\.h$",
22 r".+[\\\/]pnacl_shim\.c$",
23 r"^gpu[\\\/]config[\\\/].*_list_json\.cc$",
24 r"^chrome[\\\/]browser[\\\/]resources[\\\/]pdf[\\\/]index.js"
27 # The NetscapePlugIn library is excluded from pan-project as it will soon
28 # be deleted together with the rest of the NPAPI and it's not worthwhile to
29 # update the coding style until then.
30 _TESTRUNNER_PATHS = (
31 r"^content[\\\/]shell[\\\/]tools[\\\/]plugin[\\\/].*",
34 # Fragment of a regular expression that matches C++ and Objective-C++
35 # implementation files.
36 _IMPLEMENTATION_EXTENSIONS = r'\.(cc|cpp|cxx|mm)$'
38 # Regular expression that matches code only used for test binaries
39 # (best effort).
40 _TEST_CODE_EXCLUDED_PATHS = (
41 r'.*[\\\/](fake_|test_|mock_).+%s' % _IMPLEMENTATION_EXTENSIONS,
42 r'.+_test_(base|support|util)%s' % _IMPLEMENTATION_EXTENSIONS,
43 r'.+_(api|browser|kif|perf|pixel|unit|ui)?test(_[a-z]+)?%s' %
44 _IMPLEMENTATION_EXTENSIONS,
45 r'.+profile_sync_service_harness%s' % _IMPLEMENTATION_EXTENSIONS,
46 r'.*[\\\/](test|tool(s)?)[\\\/].*',
47 # content_shell is used for running layout tests.
48 r'content[\\\/]shell[\\\/].*',
49 # At request of folks maintaining this folder.
50 r'chrome[\\\/]browser[\\\/]automation[\\\/].*',
51 # Non-production example code.
52 r'mojo[\\\/]examples[\\\/].*',
53 # Launcher for running iOS tests on the simulator.
54 r'testing[\\\/]iossim[\\\/]iossim\.mm$',
57 _TEST_ONLY_WARNING = (
58 'You might be calling functions intended only for testing from\n'
59 'production code. It is OK to ignore this warning if you know what\n'
60 'you are doing, as the heuristics used to detect the situation are\n'
61 'not perfect. The commit queue will not block on this warning.')
64 _INCLUDE_ORDER_WARNING = (
65 'Your #include order seems to be broken. Send mail to\n'
66 'marja@chromium.org if this is not the case.')
69 _BANNED_OBJC_FUNCTIONS = (
71 'addTrackingRect:',
73 'The use of -[NSView addTrackingRect:owner:userData:assumeInside:] is'
74 'prohibited. Please use CrTrackingArea instead.',
75 'http://dev.chromium.org/developers/coding-style/cocoa-dos-and-donts',
77 False,
80 r'/NSTrackingArea\W',
82 'The use of NSTrackingAreas is prohibited. Please use CrTrackingArea',
83 'instead.',
84 'http://dev.chromium.org/developers/coding-style/cocoa-dos-and-donts',
86 False,
89 'convertPointFromBase:',
91 'The use of -[NSView convertPointFromBase:] is almost certainly wrong.',
92 'Please use |convertPoint:(point) fromView:nil| instead.',
93 'http://dev.chromium.org/developers/coding-style/cocoa-dos-and-donts',
95 True,
98 'convertPointToBase:',
100 'The use of -[NSView convertPointToBase:] is almost certainly wrong.',
101 'Please use |convertPoint:(point) toView:nil| instead.',
102 'http://dev.chromium.org/developers/coding-style/cocoa-dos-and-donts',
104 True,
107 'convertRectFromBase:',
109 'The use of -[NSView convertRectFromBase:] is almost certainly wrong.',
110 'Please use |convertRect:(point) fromView:nil| instead.',
111 'http://dev.chromium.org/developers/coding-style/cocoa-dos-and-donts',
113 True,
116 'convertRectToBase:',
118 'The use of -[NSView convertRectToBase:] is almost certainly wrong.',
119 'Please use |convertRect:(point) toView:nil| instead.',
120 'http://dev.chromium.org/developers/coding-style/cocoa-dos-and-donts',
122 True,
125 'convertSizeFromBase:',
127 'The use of -[NSView convertSizeFromBase:] is almost certainly wrong.',
128 'Please use |convertSize:(point) fromView:nil| instead.',
129 'http://dev.chromium.org/developers/coding-style/cocoa-dos-and-donts',
131 True,
134 'convertSizeToBase:',
136 'The use of -[NSView convertSizeToBase:] is almost certainly wrong.',
137 'Please use |convertSize:(point) toView:nil| instead.',
138 'http://dev.chromium.org/developers/coding-style/cocoa-dos-and-donts',
140 True,
145 _BANNED_CPP_FUNCTIONS = (
146 # Make sure that gtest's FRIEND_TEST() macro is not used; the
147 # FRIEND_TEST_ALL_PREFIXES() macro from base/gtest_prod_util.h should be
148 # used instead since that allows for FLAKY_ and DISABLED_ prefixes.
150 'FRIEND_TEST(',
152 'Chromium code should not use gtest\'s FRIEND_TEST() macro. Include',
153 'base/gtest_prod_util.h and use FRIEND_TEST_ALL_PREFIXES() instead.',
155 False,
159 'ScopedAllowIO',
161 'New code should not use ScopedAllowIO. Post a task to the blocking',
162 'pool or the FILE thread instead.',
164 True,
166 r"^base[\\\/]process[\\\/]process_metrics_linux\.cc$",
167 r"^chrome[\\\/]browser[\\\/]chromeos[\\\/]boot_times_recorder\.cc$",
168 r"^chrome[\\\/]browser[\\\/]chromeos[\\\/]"
169 "customization_document_browsertest\.cc$",
170 r"^components[\\\/]crash[\\\/]app[\\\/]breakpad_mac\.mm$",
171 r"^content[\\\/]shell[\\\/]browser[\\\/]shell_browser_main\.cc$",
172 r"^content[\\\/]shell[\\\/]browser[\\\/]shell_message_filter\.cc$",
173 r"^mojo[\\\/]edk[\\\/]embedder[\\\/]" +
174 r"simple_platform_shared_buffer_posix\.cc$",
175 r"^net[\\\/]disk_cache[\\\/]cache_util\.cc$",
176 r"^net[\\\/]url_request[\\\/]test_url_fetcher_factory\.cc$",
177 r"^ui[\\\/]ozone[\\\/]platform[\\\/]dri[\\\/]native_display_delegate_proxy\.cc$",
181 'SkRefPtr',
183 'The use of SkRefPtr is prohibited. ',
184 'Please use skia::RefPtr instead.'
186 True,
190 'SkAutoRef',
192 'The indirect use of SkRefPtr via SkAutoRef is prohibited. ',
193 'Please use skia::RefPtr instead.'
195 True,
199 'SkAutoTUnref',
201 'The use of SkAutoTUnref is dangerous because it implicitly ',
202 'converts to a raw pointer. Please use skia::RefPtr instead.'
204 True,
208 'SkAutoUnref',
210 'The indirect use of SkAutoTUnref through SkAutoUnref is dangerous ',
211 'because it implicitly converts to a raw pointer. ',
212 'Please use skia::RefPtr instead.'
214 True,
218 r'/HANDLE_EINTR\(.*close',
220 'HANDLE_EINTR(close) is invalid. If close fails with EINTR, the file',
221 'descriptor will be closed, and it is incorrect to retry the close.',
222 'Either call close directly and ignore its return value, or wrap close',
223 'in IGNORE_EINTR to use its return value. See http://crbug.com/269623'
225 True,
229 r'/IGNORE_EINTR\((?!.*close)',
231 'IGNORE_EINTR is only valid when wrapping close. To wrap other system',
232 'calls, use HANDLE_EINTR. See http://crbug.com/269623',
234 True,
236 # Files that #define IGNORE_EINTR.
237 r'^base[\\\/]posix[\\\/]eintr_wrapper\.h$',
238 r'^ppapi[\\\/]tests[\\\/]test_broker\.cc$',
242 r'/v8::Extension\(',
244 'Do not introduce new v8::Extensions into the code base, use',
245 'gin::Wrappable instead. See http://crbug.com/334679',
247 True,
249 r'extensions[\\\/]renderer[\\\/]safe_builtins\.*',
254 _IPC_ENUM_TRAITS_DEPRECATED = (
255 'You are using IPC_ENUM_TRAITS() in your code. It has been deprecated.\n'
256 'See http://www.chromium.org/Home/chromium-security/education/security-tips-for-ipc')
259 _VALID_OS_MACROS = (
260 # Please keep sorted.
261 'OS_ANDROID',
262 'OS_ANDROID_HOST',
263 'OS_BSD',
264 'OS_CAT', # For testing.
265 'OS_CHROMEOS',
266 'OS_FREEBSD',
267 'OS_IOS',
268 'OS_LINUX',
269 'OS_MACOSX',
270 'OS_NACL',
271 'OS_NACL_NONSFI',
272 'OS_NACL_SFI',
273 'OS_OPENBSD',
274 'OS_POSIX',
275 'OS_QNX',
276 'OS_SOLARIS',
277 'OS_WIN',
281 def _CheckNoProductionCodeUsingTestOnlyFunctions(input_api, output_api):
282 """Attempts to prevent use of functions intended only for testing in
283 non-testing code. For now this is just a best-effort implementation
284 that ignores header files and may have some false positives. A
285 better implementation would probably need a proper C++ parser.
287 # We only scan .cc files and the like, as the declaration of
288 # for-testing functions in header files are hard to distinguish from
289 # calls to such functions without a proper C++ parser.
290 file_inclusion_pattern = r'.+%s' % _IMPLEMENTATION_EXTENSIONS
292 base_function_pattern = r'[ :]test::[^\s]+|ForTest(ing)?|for_test(ing)?'
293 inclusion_pattern = input_api.re.compile(r'(%s)\s*\(' % base_function_pattern)
294 comment_pattern = input_api.re.compile(r'//.*(%s)' % base_function_pattern)
295 exclusion_pattern = input_api.re.compile(
296 r'::[A-Za-z0-9_]+(%s)|(%s)[^;]+\{' % (
297 base_function_pattern, base_function_pattern))
299 def FilterFile(affected_file):
300 black_list = (_EXCLUDED_PATHS +
301 _TEST_CODE_EXCLUDED_PATHS +
302 input_api.DEFAULT_BLACK_LIST)
303 return input_api.FilterSourceFile(
304 affected_file,
305 white_list=(file_inclusion_pattern, ),
306 black_list=black_list)
308 problems = []
309 for f in input_api.AffectedSourceFiles(FilterFile):
310 local_path = f.LocalPath()
311 for line_number, line in f.ChangedContents():
312 if (inclusion_pattern.search(line) and
313 not comment_pattern.search(line) and
314 not exclusion_pattern.search(line)):
315 problems.append(
316 '%s:%d\n %s' % (local_path, line_number, line.strip()))
318 if problems:
319 return [output_api.PresubmitPromptOrNotify(_TEST_ONLY_WARNING, problems)]
320 else:
321 return []
324 def _CheckNoIOStreamInHeaders(input_api, output_api):
325 """Checks to make sure no .h files include <iostream>."""
326 files = []
327 pattern = input_api.re.compile(r'^#include\s*<iostream>',
328 input_api.re.MULTILINE)
329 for f in input_api.AffectedSourceFiles(input_api.FilterSourceFile):
330 if not f.LocalPath().endswith('.h'):
331 continue
332 contents = input_api.ReadFile(f)
333 if pattern.search(contents):
334 files.append(f)
336 if len(files):
337 return [ output_api.PresubmitError(
338 'Do not #include <iostream> in header files, since it inserts static '
339 'initialization into every file including the header. Instead, '
340 '#include <ostream>. See http://crbug.com/94794',
341 files) ]
342 return []
345 def _CheckNoUNIT_TESTInSourceFiles(input_api, output_api):
346 """Checks to make sure no source files use UNIT_TEST"""
347 problems = []
348 for f in input_api.AffectedFiles():
349 if (not f.LocalPath().endswith(('.cc', '.mm'))):
350 continue
352 for line_num, line in f.ChangedContents():
353 if 'UNIT_TEST ' in line or line.endswith('UNIT_TEST'):
354 problems.append(' %s:%d' % (f.LocalPath(), line_num))
356 if not problems:
357 return []
358 return [output_api.PresubmitPromptWarning('UNIT_TEST is only for headers.\n' +
359 '\n'.join(problems))]
362 def _FindHistogramNameInLine(histogram_name, line):
363 """Tries to find a histogram name or prefix in a line."""
364 if not "affected-histogram" in line:
365 return histogram_name in line
366 # A histogram_suffixes tag type has an affected-histogram name as a prefix of
367 # the histogram_name.
368 if not '"' in line:
369 return False
370 histogram_prefix = line.split('\"')[1]
371 return histogram_prefix in histogram_name
374 def _CheckUmaHistogramChanges(input_api, output_api):
375 """Check that UMA histogram names in touched lines can still be found in other
376 lines of the patch or in histograms.xml. Note that this check would not catch
377 the reverse: changes in histograms.xml not matched in the code itself."""
378 touched_histograms = []
379 histograms_xml_modifications = []
380 pattern = input_api.re.compile('UMA_HISTOGRAM.*\("(.*)"')
381 for f in input_api.AffectedFiles():
382 # If histograms.xml itself is modified, keep the modified lines for later.
383 if f.LocalPath().endswith(('histograms.xml')):
384 histograms_xml_modifications = f.ChangedContents()
385 continue
386 if not f.LocalPath().endswith(('cc', 'mm', 'cpp')):
387 continue
388 for line_num, line in f.ChangedContents():
389 found = pattern.search(line)
390 if found:
391 touched_histograms.append([found.group(1), f, line_num])
393 # Search for the touched histogram names in the local modifications to
394 # histograms.xml, and, if not found, on the base histograms.xml file.
395 unmatched_histograms = []
396 for histogram_info in touched_histograms:
397 histogram_name_found = False
398 for line_num, line in histograms_xml_modifications:
399 histogram_name_found = _FindHistogramNameInLine(histogram_info[0], line)
400 if histogram_name_found:
401 break
402 if not histogram_name_found:
403 unmatched_histograms.append(histogram_info)
405 problems = []
406 if unmatched_histograms:
407 with open('tools/metrics/histograms/histograms.xml') as histograms_xml:
408 for histogram_name, f, line_num in unmatched_histograms:
409 histogram_name_found = False
410 for line in histograms_xml:
411 histogram_name_found = _FindHistogramNameInLine(histogram_name, line)
412 if histogram_name_found:
413 break
414 if not histogram_name_found:
415 problems.append(' [%s:%d] %s' %
416 (f.LocalPath(), line_num, histogram_name))
418 if not problems:
419 return []
420 return [output_api.PresubmitPromptWarning('Some UMA_HISTOGRAM lines have '
421 'been modified and the associated histogram name has no match in either '
422 'metrics/histograms.xml or the modifications of it:', problems)]
425 def _CheckNoNewWStrings(input_api, output_api):
426 """Checks to make sure we don't introduce use of wstrings."""
427 problems = []
428 for f in input_api.AffectedFiles():
429 if (not f.LocalPath().endswith(('.cc', '.h')) or
430 f.LocalPath().endswith(('test.cc', '_win.cc', '_win.h')) or
431 '/win/' in f.LocalPath()):
432 continue
434 allowWString = False
435 for line_num, line in f.ChangedContents():
436 if 'presubmit: allow wstring' in line:
437 allowWString = True
438 elif not allowWString and 'wstring' in line:
439 problems.append(' %s:%d' % (f.LocalPath(), line_num))
440 allowWString = False
441 else:
442 allowWString = False
444 if not problems:
445 return []
446 return [output_api.PresubmitPromptWarning('New code should not use wstrings.'
447 ' If you are calling a cross-platform API that accepts a wstring, '
448 'fix the API.\n' +
449 '\n'.join(problems))]
452 def _CheckNoDEPSGIT(input_api, output_api):
453 """Make sure .DEPS.git is never modified manually."""
454 if any(f.LocalPath().endswith('.DEPS.git') for f in
455 input_api.AffectedFiles()):
456 return [output_api.PresubmitError(
457 'Never commit changes to .DEPS.git. This file is maintained by an\n'
458 'automated system based on what\'s in DEPS and your changes will be\n'
459 'overwritten.\n'
460 'See https://sites.google.com/a/chromium.org/dev/developers/how-tos/get-the-code#Rolling_DEPS\n'
461 'for more information')]
462 return []
465 def _CheckValidHostsInDEPS(input_api, output_api):
466 """Checks that DEPS file deps are from allowed_hosts."""
467 # Run only if DEPS file has been modified to annoy fewer bystanders.
468 if all(f.LocalPath() != 'DEPS' for f in input_api.AffectedFiles()):
469 return []
470 # Outsource work to gclient verify
471 try:
472 input_api.subprocess.check_output(['gclient', 'verify'])
473 return []
474 except input_api.subprocess.CalledProcessError, error:
475 return [output_api.PresubmitError(
476 'DEPS file must have only git dependencies.',
477 long_text=error.output)]
480 def _CheckNoBannedFunctions(input_api, output_api):
481 """Make sure that banned functions are not used."""
482 warnings = []
483 errors = []
485 file_filter = lambda f: f.LocalPath().endswith(('.mm', '.m', '.h'))
486 for f in input_api.AffectedFiles(file_filter=file_filter):
487 for line_num, line in f.ChangedContents():
488 for func_name, message, error in _BANNED_OBJC_FUNCTIONS:
489 matched = False
490 if func_name[0:1] == '/':
491 regex = func_name[1:]
492 if input_api.re.search(regex, line):
493 matched = True
494 elif func_name in line:
495 matched = True
496 if matched:
497 problems = warnings;
498 if error:
499 problems = errors;
500 problems.append(' %s:%d:' % (f.LocalPath(), line_num))
501 for message_line in message:
502 problems.append(' %s' % message_line)
504 file_filter = lambda f: f.LocalPath().endswith(('.cc', '.mm', '.h'))
505 for f in input_api.AffectedFiles(file_filter=file_filter):
506 for line_num, line in f.ChangedContents():
507 for func_name, message, error, excluded_paths in _BANNED_CPP_FUNCTIONS:
508 def IsBlacklisted(affected_file, blacklist):
509 local_path = affected_file.LocalPath()
510 for item in blacklist:
511 if input_api.re.match(item, local_path):
512 return True
513 return False
514 if IsBlacklisted(f, excluded_paths):
515 continue
516 matched = False
517 if func_name[0:1] == '/':
518 regex = func_name[1:]
519 if input_api.re.search(regex, line):
520 matched = True
521 elif func_name in line:
522 matched = True
523 if matched:
524 problems = warnings;
525 if error:
526 problems = errors;
527 problems.append(' %s:%d:' % (f.LocalPath(), line_num))
528 for message_line in message:
529 problems.append(' %s' % message_line)
531 result = []
532 if (warnings):
533 result.append(output_api.PresubmitPromptWarning(
534 'Banned functions were used.\n' + '\n'.join(warnings)))
535 if (errors):
536 result.append(output_api.PresubmitError(
537 'Banned functions were used.\n' + '\n'.join(errors)))
538 return result
541 def _CheckNoPragmaOnce(input_api, output_api):
542 """Make sure that banned functions are not used."""
543 files = []
544 pattern = input_api.re.compile(r'^#pragma\s+once',
545 input_api.re.MULTILINE)
546 for f in input_api.AffectedSourceFiles(input_api.FilterSourceFile):
547 if not f.LocalPath().endswith('.h'):
548 continue
549 contents = input_api.ReadFile(f)
550 if pattern.search(contents):
551 files.append(f)
553 if files:
554 return [output_api.PresubmitError(
555 'Do not use #pragma once in header files.\n'
556 'See http://www.chromium.org/developers/coding-style#TOC-File-headers',
557 files)]
558 return []
561 def _CheckNoTrinaryTrueFalse(input_api, output_api):
562 """Checks to make sure we don't introduce use of foo ? true : false."""
563 problems = []
564 pattern = input_api.re.compile(r'\?\s*(true|false)\s*:\s*(true|false)')
565 for f in input_api.AffectedFiles():
566 if not f.LocalPath().endswith(('.cc', '.h', '.inl', '.m', '.mm')):
567 continue
569 for line_num, line in f.ChangedContents():
570 if pattern.match(line):
571 problems.append(' %s:%d' % (f.LocalPath(), line_num))
573 if not problems:
574 return []
575 return [output_api.PresubmitPromptWarning(
576 'Please consider avoiding the "? true : false" pattern if possible.\n' +
577 '\n'.join(problems))]
580 def _CheckUnwantedDependencies(input_api, output_api):
581 """Runs checkdeps on #include statements added in this
582 change. Breaking - rules is an error, breaking ! rules is a
583 warning.
585 import sys
586 # We need to wait until we have an input_api object and use this
587 # roundabout construct to import checkdeps because this file is
588 # eval-ed and thus doesn't have __file__.
589 original_sys_path = sys.path
590 try:
591 sys.path = sys.path + [input_api.os_path.join(
592 input_api.PresubmitLocalPath(), 'buildtools', 'checkdeps')]
593 import checkdeps
594 from cpp_checker import CppChecker
595 from rules import Rule
596 finally:
597 # Restore sys.path to what it was before.
598 sys.path = original_sys_path
600 added_includes = []
601 for f in input_api.AffectedFiles():
602 if not CppChecker.IsCppFile(f.LocalPath()):
603 continue
605 changed_lines = [line for line_num, line in f.ChangedContents()]
606 added_includes.append([f.LocalPath(), changed_lines])
608 deps_checker = checkdeps.DepsChecker(input_api.PresubmitLocalPath())
610 error_descriptions = []
611 warning_descriptions = []
612 for path, rule_type, rule_description in deps_checker.CheckAddedCppIncludes(
613 added_includes):
614 description_with_path = '%s\n %s' % (path, rule_description)
615 if rule_type == Rule.DISALLOW:
616 error_descriptions.append(description_with_path)
617 else:
618 warning_descriptions.append(description_with_path)
620 results = []
621 if error_descriptions:
622 results.append(output_api.PresubmitError(
623 'You added one or more #includes that violate checkdeps rules.',
624 error_descriptions))
625 if warning_descriptions:
626 results.append(output_api.PresubmitPromptOrNotify(
627 'You added one or more #includes of files that are temporarily\n'
628 'allowed but being removed. Can you avoid introducing the\n'
629 '#include? See relevant DEPS file(s) for details and contacts.',
630 warning_descriptions))
631 return results
634 def _CheckFilePermissions(input_api, output_api):
635 """Check that all files have their permissions properly set."""
636 if input_api.platform == 'win32':
637 return []
638 args = [input_api.python_executable, 'tools/checkperms/checkperms.py',
639 '--root', input_api.change.RepositoryRoot()]
640 for f in input_api.AffectedFiles():
641 args += ['--file', f.LocalPath()]
642 checkperms = input_api.subprocess.Popen(args,
643 stdout=input_api.subprocess.PIPE)
644 errors = checkperms.communicate()[0].strip()
645 if errors:
646 return [output_api.PresubmitError('checkperms.py failed.',
647 errors.splitlines())]
648 return []
651 def _CheckNoAuraWindowPropertyHInHeaders(input_api, output_api):
652 """Makes sure we don't include ui/aura/window_property.h
653 in header files.
655 pattern = input_api.re.compile(r'^#include\s*"ui/aura/window_property.h"')
656 errors = []
657 for f in input_api.AffectedFiles():
658 if not f.LocalPath().endswith('.h'):
659 continue
660 for line_num, line in f.ChangedContents():
661 if pattern.match(line):
662 errors.append(' %s:%d' % (f.LocalPath(), line_num))
664 results = []
665 if errors:
666 results.append(output_api.PresubmitError(
667 'Header files should not include ui/aura/window_property.h', errors))
668 return results
671 def _CheckIncludeOrderForScope(scope, input_api, file_path, changed_linenums):
672 """Checks that the lines in scope occur in the right order.
674 1. C system files in alphabetical order
675 2. C++ system files in alphabetical order
676 3. Project's .h files
679 c_system_include_pattern = input_api.re.compile(r'\s*#include <.*\.h>')
680 cpp_system_include_pattern = input_api.re.compile(r'\s*#include <.*>')
681 custom_include_pattern = input_api.re.compile(r'\s*#include ".*')
683 C_SYSTEM_INCLUDES, CPP_SYSTEM_INCLUDES, CUSTOM_INCLUDES = range(3)
685 state = C_SYSTEM_INCLUDES
687 previous_line = ''
688 previous_line_num = 0
689 problem_linenums = []
690 for line_num, line in scope:
691 if c_system_include_pattern.match(line):
692 if state != C_SYSTEM_INCLUDES:
693 problem_linenums.append((line_num, previous_line_num))
694 elif previous_line and previous_line > line:
695 problem_linenums.append((line_num, previous_line_num))
696 elif cpp_system_include_pattern.match(line):
697 if state == C_SYSTEM_INCLUDES:
698 state = CPP_SYSTEM_INCLUDES
699 elif state == CUSTOM_INCLUDES:
700 problem_linenums.append((line_num, previous_line_num))
701 elif previous_line and previous_line > line:
702 problem_linenums.append((line_num, previous_line_num))
703 elif custom_include_pattern.match(line):
704 if state != CUSTOM_INCLUDES:
705 state = CUSTOM_INCLUDES
706 elif previous_line and previous_line > line:
707 problem_linenums.append((line_num, previous_line_num))
708 else:
709 problem_linenums.append(line_num)
710 previous_line = line
711 previous_line_num = line_num
713 warnings = []
714 for (line_num, previous_line_num) in problem_linenums:
715 if line_num in changed_linenums or previous_line_num in changed_linenums:
716 warnings.append(' %s:%d' % (file_path, line_num))
717 return warnings
720 def _CheckIncludeOrderInFile(input_api, f, changed_linenums):
721 """Checks the #include order for the given file f."""
723 system_include_pattern = input_api.re.compile(r'\s*#include \<.*')
724 # Exclude the following includes from the check:
725 # 1) #include <.../...>, e.g., <sys/...> includes often need to appear in a
726 # specific order.
727 # 2) <atlbase.h>, "build/build_config.h"
728 excluded_include_pattern = input_api.re.compile(
729 r'\s*#include (\<.*/.*|\<atlbase\.h\>|"build/build_config.h")')
730 custom_include_pattern = input_api.re.compile(r'\s*#include "(?P<FILE>.*)"')
731 # Match the final or penultimate token if it is xxxtest so we can ignore it
732 # when considering the special first include.
733 test_file_tag_pattern = input_api.re.compile(
734 r'_[a-z]+test(?=(_[a-zA-Z0-9]+)?\.)')
735 if_pattern = input_api.re.compile(
736 r'\s*#\s*(if|elif|else|endif|define|undef).*')
737 # Some files need specialized order of includes; exclude such files from this
738 # check.
739 uncheckable_includes_pattern = input_api.re.compile(
740 r'\s*#include '
741 '("ipc/.*macros\.h"|<windows\.h>|".*gl.*autogen.h")\s*')
743 contents = f.NewContents()
744 warnings = []
745 line_num = 0
747 # Handle the special first include. If the first include file is
748 # some/path/file.h, the corresponding including file can be some/path/file.cc,
749 # some/other/path/file.cc, some/path/file_platform.cc, some/path/file-suffix.h
750 # etc. It's also possible that no special first include exists.
751 # If the included file is some/path/file_platform.h the including file could
752 # also be some/path/file_xxxtest_platform.h.
753 including_file_base_name = test_file_tag_pattern.sub(
754 '', input_api.os_path.basename(f.LocalPath()))
756 for line in contents:
757 line_num += 1
758 if system_include_pattern.match(line):
759 # No special first include -> process the line again along with normal
760 # includes.
761 line_num -= 1
762 break
763 match = custom_include_pattern.match(line)
764 if match:
765 match_dict = match.groupdict()
766 header_basename = test_file_tag_pattern.sub(
767 '', input_api.os_path.basename(match_dict['FILE'])).replace('.h', '')
769 if header_basename not in including_file_base_name:
770 # No special first include -> process the line again along with normal
771 # includes.
772 line_num -= 1
773 break
775 # Split into scopes: Each region between #if and #endif is its own scope.
776 scopes = []
777 current_scope = []
778 for line in contents[line_num:]:
779 line_num += 1
780 if uncheckable_includes_pattern.match(line):
781 continue
782 if if_pattern.match(line):
783 scopes.append(current_scope)
784 current_scope = []
785 elif ((system_include_pattern.match(line) or
786 custom_include_pattern.match(line)) and
787 not excluded_include_pattern.match(line)):
788 current_scope.append((line_num, line))
789 scopes.append(current_scope)
791 for scope in scopes:
792 warnings.extend(_CheckIncludeOrderForScope(scope, input_api, f.LocalPath(),
793 changed_linenums))
794 return warnings
797 def _CheckIncludeOrder(input_api, output_api):
798 """Checks that the #include order is correct.
800 1. The corresponding header for source files.
801 2. C system files in alphabetical order
802 3. C++ system files in alphabetical order
803 4. Project's .h files in alphabetical order
805 Each region separated by #if, #elif, #else, #endif, #define and #undef follows
806 these rules separately.
808 def FileFilterIncludeOrder(affected_file):
809 black_list = (_EXCLUDED_PATHS + input_api.DEFAULT_BLACK_LIST)
810 return input_api.FilterSourceFile(affected_file, black_list=black_list)
812 warnings = []
813 for f in input_api.AffectedFiles(file_filter=FileFilterIncludeOrder):
814 if f.LocalPath().endswith(('.cc', '.h')):
815 changed_linenums = set(line_num for line_num, _ in f.ChangedContents())
816 warnings.extend(_CheckIncludeOrderInFile(input_api, f, changed_linenums))
818 results = []
819 if warnings:
820 results.append(output_api.PresubmitPromptOrNotify(_INCLUDE_ORDER_WARNING,
821 warnings))
822 return results
825 def _CheckForVersionControlConflictsInFile(input_api, f):
826 pattern = input_api.re.compile('^(?:<<<<<<<|>>>>>>>) |^=======$')
827 errors = []
828 for line_num, line in f.ChangedContents():
829 if pattern.match(line):
830 errors.append(' %s:%d %s' % (f.LocalPath(), line_num, line))
831 return errors
834 def _CheckForVersionControlConflicts(input_api, output_api):
835 """Usually this is not intentional and will cause a compile failure."""
836 errors = []
837 for f in input_api.AffectedFiles():
838 errors.extend(_CheckForVersionControlConflictsInFile(input_api, f))
840 results = []
841 if errors:
842 results.append(output_api.PresubmitError(
843 'Version control conflict markers found, please resolve.', errors))
844 return results
847 def _CheckHardcodedGoogleHostsInLowerLayers(input_api, output_api):
848 def FilterFile(affected_file):
849 """Filter function for use with input_api.AffectedSourceFiles,
850 below. This filters out everything except non-test files from
851 top-level directories that generally speaking should not hard-code
852 service URLs (e.g. src/android_webview/, src/content/ and others).
854 return input_api.FilterSourceFile(
855 affected_file,
856 white_list=(r'^(android_webview|base|content|net)[\\\/].*', ),
857 black_list=(_EXCLUDED_PATHS +
858 _TEST_CODE_EXCLUDED_PATHS +
859 input_api.DEFAULT_BLACK_LIST))
861 base_pattern = '"[^"]*google\.com[^"]*"'
862 comment_pattern = input_api.re.compile('//.*%s' % base_pattern)
863 pattern = input_api.re.compile(base_pattern)
864 problems = [] # items are (filename, line_number, line)
865 for f in input_api.AffectedSourceFiles(FilterFile):
866 for line_num, line in f.ChangedContents():
867 if not comment_pattern.search(line) and pattern.search(line):
868 problems.append((f.LocalPath(), line_num, line))
870 if problems:
871 return [output_api.PresubmitPromptOrNotify(
872 'Most layers below src/chrome/ should not hardcode service URLs.\n'
873 'Are you sure this is correct?',
874 [' %s:%d: %s' % (
875 problem[0], problem[1], problem[2]) for problem in problems])]
876 else:
877 return []
880 def _CheckNoAbbreviationInPngFileName(input_api, output_api):
881 """Makes sure there are no abbreviations in the name of PNG files.
882 The native_client_sdk directory is excluded because it has auto-generated PNG
883 files for documentation.
885 errors = []
886 white_list = (r'.*_[a-z]_.*\.png$|.*_[a-z]\.png$',)
887 black_list = (r'^native_client_sdk[\\\/]',)
888 file_filter = lambda f: input_api.FilterSourceFile(
889 f, white_list=white_list, black_list=black_list)
890 for f in input_api.AffectedFiles(include_deletes=False,
891 file_filter=file_filter):
892 errors.append(' %s' % f.LocalPath())
894 results = []
895 if errors:
896 results.append(output_api.PresubmitError(
897 'The name of PNG files should not have abbreviations. \n'
898 'Use _hover.png, _center.png, instead of _h.png, _c.png.\n'
899 'Contact oshima@chromium.org if you have questions.', errors))
900 return results
903 def _FilesToCheckForIncomingDeps(re, changed_lines):
904 """Helper method for _CheckAddedDepsHaveTargetApprovals. Returns
905 a set of DEPS entries that we should look up.
907 For a directory (rather than a specific filename) we fake a path to
908 a specific filename by adding /DEPS. This is chosen as a file that
909 will seldom or never be subject to per-file include_rules.
911 # We ignore deps entries on auto-generated directories.
912 AUTO_GENERATED_DIRS = ['grit', 'jni']
914 # This pattern grabs the path without basename in the first
915 # parentheses, and the basename (if present) in the second. It
916 # relies on the simple heuristic that if there is a basename it will
917 # be a header file ending in ".h".
918 pattern = re.compile(
919 r"""['"]\+([^'"]+?)(/[a-zA-Z0-9_]+\.h)?['"].*""")
920 results = set()
921 for changed_line in changed_lines:
922 m = pattern.match(changed_line)
923 if m:
924 path = m.group(1)
925 if path.split('/')[0] not in AUTO_GENERATED_DIRS:
926 if m.group(2):
927 results.add('%s%s' % (path, m.group(2)))
928 else:
929 results.add('%s/DEPS' % path)
930 return results
933 def _CheckAddedDepsHaveTargetApprovals(input_api, output_api):
934 """When a dependency prefixed with + is added to a DEPS file, we
935 want to make sure that the change is reviewed by an OWNER of the
936 target file or directory, to avoid layering violations from being
937 introduced. This check verifies that this happens.
939 changed_lines = set()
940 for f in input_api.AffectedFiles():
941 filename = input_api.os_path.basename(f.LocalPath())
942 if filename == 'DEPS':
943 changed_lines |= set(line.strip()
944 for line_num, line
945 in f.ChangedContents())
946 if not changed_lines:
947 return []
949 virtual_depended_on_files = _FilesToCheckForIncomingDeps(input_api.re,
950 changed_lines)
951 if not virtual_depended_on_files:
952 return []
954 if input_api.is_committing:
955 if input_api.tbr:
956 return [output_api.PresubmitNotifyResult(
957 '--tbr was specified, skipping OWNERS check for DEPS additions')]
958 if not input_api.change.issue:
959 return [output_api.PresubmitError(
960 "DEPS approval by OWNERS check failed: this change has "
961 "no Rietveld issue number, so we can't check it for approvals.")]
962 output = output_api.PresubmitError
963 else:
964 output = output_api.PresubmitNotifyResult
966 owners_db = input_api.owners_db
967 owner_email, reviewers = input_api.canned_checks._RietveldOwnerAndReviewers(
968 input_api,
969 owners_db.email_regexp,
970 approval_needed=input_api.is_committing)
972 owner_email = owner_email or input_api.change.author_email
974 reviewers_plus_owner = set(reviewers)
975 if owner_email:
976 reviewers_plus_owner.add(owner_email)
977 missing_files = owners_db.files_not_covered_by(virtual_depended_on_files,
978 reviewers_plus_owner)
980 # We strip the /DEPS part that was added by
981 # _FilesToCheckForIncomingDeps to fake a path to a file in a
982 # directory.
983 def StripDeps(path):
984 start_deps = path.rfind('/DEPS')
985 if start_deps != -1:
986 return path[:start_deps]
987 else:
988 return path
989 unapproved_dependencies = ["'+%s'," % StripDeps(path)
990 for path in missing_files]
992 if unapproved_dependencies:
993 output_list = [
994 output('Missing LGTM from OWNERS of dependencies added to DEPS:\n %s' %
995 '\n '.join(sorted(unapproved_dependencies)))]
996 if not input_api.is_committing:
997 suggested_owners = owners_db.reviewers_for(missing_files, owner_email)
998 output_list.append(output(
999 'Suggested missing target path OWNERS:\n %s' %
1000 '\n '.join(suggested_owners or [])))
1001 return output_list
1003 return []
1006 def _CheckSpamLogging(input_api, output_api):
1007 file_inclusion_pattern = r'.+%s' % _IMPLEMENTATION_EXTENSIONS
1008 black_list = (_EXCLUDED_PATHS +
1009 _TEST_CODE_EXCLUDED_PATHS +
1010 input_api.DEFAULT_BLACK_LIST +
1011 (r"^base[\\\/]logging\.h$",
1012 r"^base[\\\/]logging\.cc$",
1013 r"^chrome[\\\/]app[\\\/]chrome_main_delegate\.cc$",
1014 r"^chrome[\\\/]browser[\\\/]chrome_browser_main\.cc$",
1015 r"^chrome[\\\/]browser[\\\/]ui[\\\/]startup[\\\/]"
1016 r"startup_browser_creator\.cc$",
1017 r"^chrome[\\\/]installer[\\\/]setup[\\\/].*",
1018 r"chrome[\\\/]browser[\\\/]diagnostics[\\\/]" +
1019 r"diagnostics_writer\.cc$",
1020 r"^chrome_elf[\\\/]dll_hash[\\\/]dll_hash_main\.cc$",
1021 r"^chromecast[\\\/]",
1022 r"^cloud_print[\\\/]",
1023 r"^content[\\\/]common[\\\/]gpu[\\\/]client[\\\/]"
1024 r"gl_helper_benchmark\.cc$",
1025 r"^courgette[\\\/]courgette_tool\.cc$",
1026 r"^extensions[\\\/]renderer[\\\/]logging_native_handler\.cc$",
1027 r"^ipc[\\\/]ipc_logging\.cc$",
1028 r"^native_client_sdk[\\\/]",
1029 r"^remoting[\\\/]base[\\\/]logging\.h$",
1030 r"^remoting[\\\/]host[\\\/].*",
1031 r"^sandbox[\\\/]linux[\\\/].*",
1032 r"^tools[\\\/]",
1033 r"^ui[\\\/]aura[\\\/]bench[\\\/]bench_main\.cc$",
1034 r"^storage[\\\/]browser[\\\/]fileapi[\\\/]" +
1035 r"dump_file_system.cc$",))
1036 source_file_filter = lambda x: input_api.FilterSourceFile(
1037 x, white_list=(file_inclusion_pattern,), black_list=black_list)
1039 log_info = []
1040 printf = []
1042 for f in input_api.AffectedSourceFiles(source_file_filter):
1043 contents = input_api.ReadFile(f, 'rb')
1044 if input_api.re.search(r"\bD?LOG\s*\(\s*INFO\s*\)", contents):
1045 log_info.append(f.LocalPath())
1046 elif input_api.re.search(r"\bD?LOG_IF\s*\(\s*INFO\s*,", contents):
1047 log_info.append(f.LocalPath())
1049 if input_api.re.search(r"\bprintf\(", contents):
1050 printf.append(f.LocalPath())
1051 elif input_api.re.search(r"\bfprintf\((stdout|stderr)", contents):
1052 printf.append(f.LocalPath())
1054 if log_info:
1055 return [output_api.PresubmitError(
1056 'These files spam the console log with LOG(INFO):',
1057 items=log_info)]
1058 if printf:
1059 return [output_api.PresubmitError(
1060 'These files spam the console log with printf/fprintf:',
1061 items=printf)]
1062 return []
1065 def _CheckForAnonymousVariables(input_api, output_api):
1066 """These types are all expected to hold locks while in scope and
1067 so should never be anonymous (which causes them to be immediately
1068 destroyed)."""
1069 they_who_must_be_named = [
1070 'base::AutoLock',
1071 'base::AutoReset',
1072 'base::AutoUnlock',
1073 'SkAutoAlphaRestore',
1074 'SkAutoBitmapShaderInstall',
1075 'SkAutoBlitterChoose',
1076 'SkAutoBounderCommit',
1077 'SkAutoCallProc',
1078 'SkAutoCanvasRestore',
1079 'SkAutoCommentBlock',
1080 'SkAutoDescriptor',
1081 'SkAutoDisableDirectionCheck',
1082 'SkAutoDisableOvalCheck',
1083 'SkAutoFree',
1084 'SkAutoGlyphCache',
1085 'SkAutoHDC',
1086 'SkAutoLockColors',
1087 'SkAutoLockPixels',
1088 'SkAutoMalloc',
1089 'SkAutoMaskFreeImage',
1090 'SkAutoMutexAcquire',
1091 'SkAutoPathBoundsUpdate',
1092 'SkAutoPDFRelease',
1093 'SkAutoRasterClipValidate',
1094 'SkAutoRef',
1095 'SkAutoTime',
1096 'SkAutoTrace',
1097 'SkAutoUnref',
1099 anonymous = r'(%s)\s*[({]' % '|'.join(they_who_must_be_named)
1100 # bad: base::AutoLock(lock.get());
1101 # not bad: base::AutoLock lock(lock.get());
1102 bad_pattern = input_api.re.compile(anonymous)
1103 # good: new base::AutoLock(lock.get())
1104 good_pattern = input_api.re.compile(r'\bnew\s*' + anonymous)
1105 errors = []
1107 for f in input_api.AffectedFiles():
1108 if not f.LocalPath().endswith(('.cc', '.h', '.inl', '.m', '.mm')):
1109 continue
1110 for linenum, line in f.ChangedContents():
1111 if bad_pattern.search(line) and not good_pattern.search(line):
1112 errors.append('%s:%d' % (f.LocalPath(), linenum))
1114 if errors:
1115 return [output_api.PresubmitError(
1116 'These lines create anonymous variables that need to be named:',
1117 items=errors)]
1118 return []
1121 def _CheckCygwinShell(input_api, output_api):
1122 source_file_filter = lambda x: input_api.FilterSourceFile(
1123 x, white_list=(r'.+\.(gyp|gypi)$',))
1124 cygwin_shell = []
1126 for f in input_api.AffectedSourceFiles(source_file_filter):
1127 for linenum, line in f.ChangedContents():
1128 if 'msvs_cygwin_shell' in line:
1129 cygwin_shell.append(f.LocalPath())
1130 break
1132 if cygwin_shell:
1133 return [output_api.PresubmitError(
1134 'These files should not use msvs_cygwin_shell (the default is 0):',
1135 items=cygwin_shell)]
1136 return []
1139 def _CheckUserActionUpdate(input_api, output_api):
1140 """Checks if any new user action has been added."""
1141 if any('actions.xml' == input_api.os_path.basename(f) for f in
1142 input_api.LocalPaths()):
1143 # If actions.xml is already included in the changelist, the PRESUBMIT
1144 # for actions.xml will do a more complete presubmit check.
1145 return []
1147 file_filter = lambda f: f.LocalPath().endswith(('.cc', '.mm'))
1148 action_re = r'[^a-zA-Z]UserMetricsAction\("([^"]*)'
1149 current_actions = None
1150 for f in input_api.AffectedFiles(file_filter=file_filter):
1151 for line_num, line in f.ChangedContents():
1152 match = input_api.re.search(action_re, line)
1153 if match:
1154 # Loads contents in tools/metrics/actions/actions.xml to memory. It's
1155 # loaded only once.
1156 if not current_actions:
1157 with open('tools/metrics/actions/actions.xml') as actions_f:
1158 current_actions = actions_f.read()
1159 # Search for the matched user action name in |current_actions|.
1160 for action_name in match.groups():
1161 action = 'name="{0}"'.format(action_name)
1162 if action not in current_actions:
1163 return [output_api.PresubmitPromptWarning(
1164 'File %s line %d: %s is missing in '
1165 'tools/metrics/actions/actions.xml. Please run '
1166 'tools/metrics/actions/extract_actions.py to update.'
1167 % (f.LocalPath(), line_num, action_name))]
1168 return []
1171 def _GetJSONParseError(input_api, filename, eat_comments=True):
1172 try:
1173 contents = input_api.ReadFile(filename)
1174 if eat_comments:
1175 json_comment_eater = input_api.os_path.join(
1176 input_api.PresubmitLocalPath(),
1177 'tools', 'json_comment_eater', 'json_comment_eater.py')
1178 process = input_api.subprocess.Popen(
1179 [input_api.python_executable, json_comment_eater],
1180 stdin=input_api.subprocess.PIPE,
1181 stdout=input_api.subprocess.PIPE,
1182 universal_newlines=True)
1183 (contents, _) = process.communicate(input=contents)
1185 input_api.json.loads(contents)
1186 except ValueError as e:
1187 return e
1188 return None
1191 def _GetIDLParseError(input_api, filename):
1192 try:
1193 contents = input_api.ReadFile(filename)
1194 idl_schema = input_api.os_path.join(
1195 input_api.PresubmitLocalPath(),
1196 'tools', 'json_schema_compiler', 'idl_schema.py')
1197 process = input_api.subprocess.Popen(
1198 [input_api.python_executable, idl_schema],
1199 stdin=input_api.subprocess.PIPE,
1200 stdout=input_api.subprocess.PIPE,
1201 stderr=input_api.subprocess.PIPE,
1202 universal_newlines=True)
1203 (_, error) = process.communicate(input=contents)
1204 return error or None
1205 except ValueError as e:
1206 return e
1209 def _CheckParseErrors(input_api, output_api):
1210 """Check that IDL and JSON files do not contain syntax errors."""
1211 actions = {
1212 '.idl': _GetIDLParseError,
1213 '.json': _GetJSONParseError,
1215 # These paths contain test data and other known invalid JSON files.
1216 excluded_patterns = [
1217 r'test[\\\/]data[\\\/]',
1218 r'^components[\\\/]policy[\\\/]resources[\\\/]policy_templates\.json$',
1220 # Most JSON files are preprocessed and support comments, but these do not.
1221 json_no_comments_patterns = [
1222 r'^testing[\\\/]',
1224 # Only run IDL checker on files in these directories.
1225 idl_included_patterns = [
1226 r'^chrome[\\\/]common[\\\/]extensions[\\\/]api[\\\/]',
1227 r'^extensions[\\\/]common[\\\/]api[\\\/]',
1230 def get_action(affected_file):
1231 filename = affected_file.LocalPath()
1232 return actions.get(input_api.os_path.splitext(filename)[1])
1234 def MatchesFile(patterns, path):
1235 for pattern in patterns:
1236 if input_api.re.search(pattern, path):
1237 return True
1238 return False
1240 def FilterFile(affected_file):
1241 action = get_action(affected_file)
1242 if not action:
1243 return False
1244 path = affected_file.LocalPath()
1246 if MatchesFile(excluded_patterns, path):
1247 return False
1249 if (action == _GetIDLParseError and
1250 not MatchesFile(idl_included_patterns, path)):
1251 return False
1252 return True
1254 results = []
1255 for affected_file in input_api.AffectedFiles(
1256 file_filter=FilterFile, include_deletes=False):
1257 action = get_action(affected_file)
1258 kwargs = {}
1259 if (action == _GetJSONParseError and
1260 MatchesFile(json_no_comments_patterns, affected_file.LocalPath())):
1261 kwargs['eat_comments'] = False
1262 parse_error = action(input_api,
1263 affected_file.AbsoluteLocalPath(),
1264 **kwargs)
1265 if parse_error:
1266 results.append(output_api.PresubmitError('%s could not be parsed: %s' %
1267 (affected_file.LocalPath(), parse_error)))
1268 return results
1271 def _CheckJavaStyle(input_api, output_api):
1272 """Runs checkstyle on changed java files and returns errors if any exist."""
1273 import sys
1274 original_sys_path = sys.path
1275 try:
1276 sys.path = sys.path + [input_api.os_path.join(
1277 input_api.PresubmitLocalPath(), 'tools', 'android', 'checkstyle')]
1278 import checkstyle
1279 finally:
1280 # Restore sys.path to what it was before.
1281 sys.path = original_sys_path
1283 return checkstyle.RunCheckstyle(
1284 input_api, output_api, 'tools/android/checkstyle/chromium-style-5.0.xml',
1285 black_list=_EXCLUDED_PATHS + input_api.DEFAULT_BLACK_LIST)
1288 def _CheckForCopyrightedCode(input_api, output_api):
1289 """Verifies that newly added code doesn't contain copyrighted material
1290 and is properly licensed under the standard Chromium license.
1292 As there can be false positives, we maintain a whitelist file. This check
1293 also verifies that the whitelist file is up to date.
1295 import sys
1296 original_sys_path = sys.path
1297 try:
1298 sys.path = sys.path + [input_api.os_path.join(
1299 input_api.PresubmitLocalPath(), 'android_webview', 'tools')]
1300 import copyright_scanner
1301 finally:
1302 # Restore sys.path to what it was before.
1303 sys.path = original_sys_path
1305 return copyright_scanner.ScanAtPresubmit(input_api, output_api)
1308 _DEPRECATED_CSS = [
1309 # Values
1310 ( "-webkit-box", "flex" ),
1311 ( "-webkit-inline-box", "inline-flex" ),
1312 ( "-webkit-flex", "flex" ),
1313 ( "-webkit-inline-flex", "inline-flex" ),
1314 ( "-webkit-min-content", "min-content" ),
1315 ( "-webkit-max-content", "max-content" ),
1317 # Properties
1318 ( "-webkit-background-clip", "background-clip" ),
1319 ( "-webkit-background-origin", "background-origin" ),
1320 ( "-webkit-background-size", "background-size" ),
1321 ( "-webkit-box-shadow", "box-shadow" ),
1323 # Functions
1324 ( "-webkit-gradient", "gradient" ),
1325 ( "-webkit-repeating-gradient", "repeating-gradient" ),
1326 ( "-webkit-linear-gradient", "linear-gradient" ),
1327 ( "-webkit-repeating-linear-gradient", "repeating-linear-gradient" ),
1328 ( "-webkit-radial-gradient", "radial-gradient" ),
1329 ( "-webkit-repeating-radial-gradient", "repeating-radial-gradient" ),
1332 def _CheckNoDeprecatedCSS(input_api, output_api):
1333 """ Make sure that we don't use deprecated CSS
1334 properties, functions or values. Our external
1335 documentation is ignored by the hooks as it
1336 needs to be consumed by WebKit. """
1337 results = []
1338 file_inclusion_pattern = (r".+\.css$",)
1339 black_list = (_EXCLUDED_PATHS +
1340 _TEST_CODE_EXCLUDED_PATHS +
1341 input_api.DEFAULT_BLACK_LIST +
1342 (r"^chrome/common/extensions/docs",
1343 r"^chrome/docs",
1344 r"^native_client_sdk"))
1345 file_filter = lambda f: input_api.FilterSourceFile(
1346 f, white_list=file_inclusion_pattern, black_list=black_list)
1347 for fpath in input_api.AffectedFiles(file_filter=file_filter):
1348 for line_num, line in fpath.ChangedContents():
1349 for (deprecated_value, value) in _DEPRECATED_CSS:
1350 if deprecated_value in line:
1351 results.append(output_api.PresubmitError(
1352 "%s:%d: Use of deprecated CSS %s, use %s instead" %
1353 (fpath.LocalPath(), line_num, deprecated_value, value)))
1354 return results
1357 _DEPRECATED_JS = [
1358 ( "__lookupGetter__", "Object.getOwnPropertyDescriptor" ),
1359 ( "__defineGetter__", "Object.defineProperty" ),
1360 ( "__defineSetter__", "Object.defineProperty" ),
1363 def _CheckNoDeprecatedJS(input_api, output_api):
1364 """Make sure that we don't use deprecated JS in Chrome code."""
1365 results = []
1366 file_inclusion_pattern = (r".+\.js$",) # TODO(dbeam): .html?
1367 black_list = (_EXCLUDED_PATHS + _TEST_CODE_EXCLUDED_PATHS +
1368 input_api.DEFAULT_BLACK_LIST)
1369 file_filter = lambda f: input_api.FilterSourceFile(
1370 f, white_list=file_inclusion_pattern, black_list=black_list)
1371 for fpath in input_api.AffectedFiles(file_filter=file_filter):
1372 for lnum, line in fpath.ChangedContents():
1373 for (deprecated, replacement) in _DEPRECATED_JS:
1374 if deprecated in line:
1375 results.append(output_api.PresubmitError(
1376 "%s:%d: Use of deprecated JS %s, use %s instead" %
1377 (fpath.LocalPath(), lnum, deprecated, replacement)))
1378 return results
1381 def _CommonChecks(input_api, output_api):
1382 """Checks common to both upload and commit."""
1383 results = []
1384 results.extend(input_api.canned_checks.PanProjectChecks(
1385 input_api, output_api,
1386 excluded_paths=_EXCLUDED_PATHS + _TESTRUNNER_PATHS))
1387 results.extend(_CheckAuthorizedAuthor(input_api, output_api))
1388 results.extend(
1389 _CheckNoProductionCodeUsingTestOnlyFunctions(input_api, output_api))
1390 results.extend(_CheckNoIOStreamInHeaders(input_api, output_api))
1391 results.extend(_CheckNoUNIT_TESTInSourceFiles(input_api, output_api))
1392 results.extend(_CheckNoNewWStrings(input_api, output_api))
1393 results.extend(_CheckNoDEPSGIT(input_api, output_api))
1394 results.extend(_CheckNoBannedFunctions(input_api, output_api))
1395 results.extend(_CheckNoPragmaOnce(input_api, output_api))
1396 results.extend(_CheckNoTrinaryTrueFalse(input_api, output_api))
1397 results.extend(_CheckUnwantedDependencies(input_api, output_api))
1398 results.extend(_CheckFilePermissions(input_api, output_api))
1399 results.extend(_CheckNoAuraWindowPropertyHInHeaders(input_api, output_api))
1400 results.extend(_CheckIncludeOrder(input_api, output_api))
1401 results.extend(_CheckForVersionControlConflicts(input_api, output_api))
1402 results.extend(_CheckPatchFiles(input_api, output_api))
1403 results.extend(_CheckHardcodedGoogleHostsInLowerLayers(input_api, output_api))
1404 results.extend(_CheckNoAbbreviationInPngFileName(input_api, output_api))
1405 results.extend(_CheckForInvalidOSMacros(input_api, output_api))
1406 results.extend(_CheckForInvalidIfDefinedMacros(input_api, output_api))
1407 # TODO(danakj): Remove this when base/move.h is removed.
1408 results.extend(_CheckForUsingSideEffectsOfPass(input_api, output_api))
1409 results.extend(_CheckAddedDepsHaveTargetApprovals(input_api, output_api))
1410 results.extend(
1411 input_api.canned_checks.CheckChangeHasNoTabs(
1412 input_api,
1413 output_api,
1414 source_file_filter=lambda x: x.LocalPath().endswith('.grd')))
1415 results.extend(_CheckSpamLogging(input_api, output_api))
1416 results.extend(_CheckForAnonymousVariables(input_api, output_api))
1417 results.extend(_CheckCygwinShell(input_api, output_api))
1418 results.extend(_CheckUserActionUpdate(input_api, output_api))
1419 results.extend(_CheckNoDeprecatedCSS(input_api, output_api))
1420 results.extend(_CheckNoDeprecatedJS(input_api, output_api))
1421 results.extend(_CheckParseErrors(input_api, output_api))
1422 results.extend(_CheckForIPCRules(input_api, output_api))
1423 results.extend(_CheckForCopyrightedCode(input_api, output_api))
1424 results.extend(_CheckForWindowsLineEndings(input_api, output_api))
1426 if any('PRESUBMIT.py' == f.LocalPath() for f in input_api.AffectedFiles()):
1427 results.extend(input_api.canned_checks.RunUnitTestsInDirectory(
1428 input_api, output_api,
1429 input_api.PresubmitLocalPath(),
1430 whitelist=[r'^PRESUBMIT_test\.py$']))
1431 return results
1434 def _CheckAuthorizedAuthor(input_api, output_api):
1435 """For non-googler/chromites committers, verify the author's email address is
1436 in AUTHORS.
1438 # TODO(maruel): Add it to input_api?
1439 import fnmatch
1441 author = input_api.change.author_email
1442 if not author:
1443 input_api.logging.info('No author, skipping AUTHOR check')
1444 return []
1445 authors_path = input_api.os_path.join(
1446 input_api.PresubmitLocalPath(), 'AUTHORS')
1447 valid_authors = (
1448 input_api.re.match(r'[^#]+\s+\<(.+?)\>\s*$', line)
1449 for line in open(authors_path))
1450 valid_authors = [item.group(1).lower() for item in valid_authors if item]
1451 if not any(fnmatch.fnmatch(author.lower(), valid) for valid in valid_authors):
1452 input_api.logging.info('Valid authors are %s', ', '.join(valid_authors))
1453 return [output_api.PresubmitPromptWarning(
1454 ('%s is not in AUTHORS file. If you are a new contributor, please visit'
1455 '\n'
1456 'http://www.chromium.org/developers/contributing-code and read the '
1457 '"Legal" section\n'
1458 'If you are a chromite, verify the contributor signed the CLA.') %
1459 author)]
1460 return []
1463 def _CheckPatchFiles(input_api, output_api):
1464 problems = [f.LocalPath() for f in input_api.AffectedFiles()
1465 if f.LocalPath().endswith(('.orig', '.rej'))]
1466 if problems:
1467 return [output_api.PresubmitError(
1468 "Don't commit .rej and .orig files.", problems)]
1469 else:
1470 return []
1473 def _DidYouMeanOSMacro(bad_macro):
1474 try:
1475 return {'A': 'OS_ANDROID',
1476 'B': 'OS_BSD',
1477 'C': 'OS_CHROMEOS',
1478 'F': 'OS_FREEBSD',
1479 'L': 'OS_LINUX',
1480 'M': 'OS_MACOSX',
1481 'N': 'OS_NACL',
1482 'O': 'OS_OPENBSD',
1483 'P': 'OS_POSIX',
1484 'S': 'OS_SOLARIS',
1485 'W': 'OS_WIN'}[bad_macro[3].upper()]
1486 except KeyError:
1487 return ''
1490 def _CheckForInvalidOSMacrosInFile(input_api, f):
1491 """Check for sensible looking, totally invalid OS macros."""
1492 preprocessor_statement = input_api.re.compile(r'^\s*#')
1493 os_macro = input_api.re.compile(r'defined\((OS_[^)]+)\)')
1494 results = []
1495 for lnum, line in f.ChangedContents():
1496 if preprocessor_statement.search(line):
1497 for match in os_macro.finditer(line):
1498 if not match.group(1) in _VALID_OS_MACROS:
1499 good = _DidYouMeanOSMacro(match.group(1))
1500 did_you_mean = ' (did you mean %s?)' % good if good else ''
1501 results.append(' %s:%d %s%s' % (f.LocalPath(),
1502 lnum,
1503 match.group(1),
1504 did_you_mean))
1505 return results
1508 def _CheckForInvalidOSMacros(input_api, output_api):
1509 """Check all affected files for invalid OS macros."""
1510 bad_macros = []
1511 for f in input_api.AffectedFiles():
1512 if not f.LocalPath().endswith(('.py', '.js', '.html', '.css')):
1513 bad_macros.extend(_CheckForInvalidOSMacrosInFile(input_api, f))
1515 if not bad_macros:
1516 return []
1518 return [output_api.PresubmitError(
1519 'Possibly invalid OS macro[s] found. Please fix your code\n'
1520 'or add your macro to src/PRESUBMIT.py.', bad_macros)]
1523 def _CheckForInvalidIfDefinedMacrosInFile(input_api, f):
1524 """Check all affected files for invalid "if defined" macros."""
1525 ALWAYS_DEFINED_MACROS = (
1526 "TARGET_CPU_PPC",
1527 "TARGET_CPU_PPC64",
1528 "TARGET_CPU_68K",
1529 "TARGET_CPU_X86",
1530 "TARGET_CPU_ARM",
1531 "TARGET_CPU_MIPS",
1532 "TARGET_CPU_SPARC",
1533 "TARGET_CPU_ALPHA",
1534 "TARGET_IPHONE_SIMULATOR",
1535 "TARGET_OS_EMBEDDED",
1536 "TARGET_OS_IPHONE",
1537 "TARGET_OS_MAC",
1538 "TARGET_OS_UNIX",
1539 "TARGET_OS_WIN32",
1541 ifdef_macro = input_api.re.compile(r'^\s*#.*(?:ifdef\s|defined\()([^\s\)]+)')
1542 results = []
1543 for lnum, line in f.ChangedContents():
1544 for match in ifdef_macro.finditer(line):
1545 if match.group(1) in ALWAYS_DEFINED_MACROS:
1546 always_defined = ' %s is always defined. ' % match.group(1)
1547 did_you_mean = 'Did you mean \'#if %s\'?' % match.group(1)
1548 results.append(' %s:%d %s\n\t%s' % (f.LocalPath(),
1549 lnum,
1550 always_defined,
1551 did_you_mean))
1552 return results
1555 def _CheckForInvalidIfDefinedMacros(input_api, output_api):
1556 """Check all affected files for invalid "if defined" macros."""
1557 bad_macros = []
1558 for f in input_api.AffectedFiles():
1559 if f.LocalPath().endswith(('.h', '.c', '.cc', '.m', '.mm')):
1560 bad_macros.extend(_CheckForInvalidIfDefinedMacrosInFile(input_api, f))
1562 if not bad_macros:
1563 return []
1565 return [output_api.PresubmitError(
1566 'Found ifdef check on always-defined macro[s]. Please fix your code\n'
1567 'or check the list of ALWAYS_DEFINED_MACROS in src/PRESUBMIT.py.',
1568 bad_macros)]
1571 def _CheckForUsingSideEffectsOfPass(input_api, output_api):
1572 """Check all affected files for using side effects of Pass."""
1573 errors = []
1574 for f in input_api.AffectedFiles():
1575 if f.LocalPath().endswith(('.h', '.c', '.cc', '.m', '.mm')):
1576 for lnum, line in f.ChangedContents():
1577 # Disallow Foo(*my_scoped_thing.Pass()); See crbug.com/418297.
1578 if input_api.re.search(r'\*[a-zA-Z0-9_]+\.Pass\(\)', line):
1579 errors.append(output_api.PresubmitError(
1580 ('%s:%d uses *foo.Pass() to delete the contents of scoped_ptr. ' +
1581 'See crbug.com/418297.') % (f.LocalPath(), lnum)))
1582 return errors
1585 def _CheckForIPCRules(input_api, output_api):
1586 """Check for same IPC rules described in
1587 http://www.chromium.org/Home/chromium-security/education/security-tips-for-ipc
1589 base_pattern = r'IPC_ENUM_TRAITS\('
1590 inclusion_pattern = input_api.re.compile(r'(%s)' % base_pattern)
1591 comment_pattern = input_api.re.compile(r'//.*(%s)' % base_pattern)
1593 problems = []
1594 for f in input_api.AffectedSourceFiles(None):
1595 local_path = f.LocalPath()
1596 if not local_path.endswith('.h'):
1597 continue
1598 for line_number, line in f.ChangedContents():
1599 if inclusion_pattern.search(line) and not comment_pattern.search(line):
1600 problems.append(
1601 '%s:%d\n %s' % (local_path, line_number, line.strip()))
1603 if problems:
1604 return [output_api.PresubmitPromptWarning(
1605 _IPC_ENUM_TRAITS_DEPRECATED, problems)]
1606 else:
1607 return []
1610 def _CheckForWindowsLineEndings(input_api, output_api):
1611 """Check source code and known ascii text files for Windows style line
1612 endings.
1614 known_text_files = r'.*\.(txt|html|htm|mhtml|py)$'
1616 file_inclusion_pattern = (
1617 known_text_files,
1618 r'.+%s' % _IMPLEMENTATION_EXTENSIONS
1621 filter = lambda f: input_api.FilterSourceFile(
1622 f, white_list=file_inclusion_pattern, black_list=None)
1623 files = [f.LocalPath() for f in
1624 input_api.AffectedSourceFiles(filter)]
1626 problems = []
1628 for file in files:
1629 fp = open(file, 'r')
1630 for line in fp:
1631 if line.endswith('\r\n'):
1632 problems.append(file)
1633 break
1634 fp.close()
1636 if problems:
1637 return [output_api.PresubmitPromptWarning('Are you sure that you want '
1638 'these files to contain Windows style line endings?\n' +
1639 '\n'.join(problems))]
1641 return []
1644 def CheckChangeOnUpload(input_api, output_api):
1645 results = []
1646 results.extend(_CommonChecks(input_api, output_api))
1647 results.extend(_CheckValidHostsInDEPS(input_api, output_api))
1648 results.extend(_CheckJavaStyle(input_api, output_api))
1649 results.extend(
1650 input_api.canned_checks.CheckGNFormatted(input_api, output_api))
1651 results.extend(_CheckUmaHistogramChanges(input_api, output_api))
1652 return results
1655 def GetTryServerMasterForBot(bot):
1656 """Returns the Try Server master for the given bot.
1658 It tries to guess the master from the bot name, but may still fail
1659 and return None. There is no longer a default master.
1661 # Potentially ambiguous bot names are listed explicitly.
1662 master_map = {
1663 'win_gpu': 'tryserver.chromium.gpu',
1664 'chromium_presubmit': 'tryserver.chromium.linux',
1665 'blink_presubmit': 'tryserver.chromium.linux',
1666 'tools_build_presubmit': 'tryserver.chromium.linux',
1668 master = master_map.get(bot)
1669 if not master:
1670 if 'gpu' in bot:
1671 master = 'tryserver.chromium.gpu'
1672 elif 'linux' in bot or 'android' in bot or 'presubmit' in bot:
1673 master = 'tryserver.chromium.linux'
1674 elif 'win' in bot:
1675 master = 'tryserver.chromium.win'
1676 elif 'mac' in bot or 'ios' in bot:
1677 master = 'tryserver.chromium.mac'
1678 return master
1681 def GetDefaultTryConfigs(bots):
1682 """Returns a list of ('bot', set(['tests']), filtered by [bots].
1685 builders_and_tests = dict((bot, set(['defaulttests'])) for bot in bots)
1687 # Build up the mapping from tryserver master to bot/test.
1688 out = dict()
1689 for bot, tests in builders_and_tests.iteritems():
1690 out.setdefault(GetTryServerMasterForBot(bot), {})[bot] = tests
1691 return out
1694 def CheckChangeOnCommit(input_api, output_api):
1695 results = []
1696 results.extend(_CommonChecks(input_api, output_api))
1697 # TODO(thestig) temporarily disabled, doesn't work in third_party/
1698 #results.extend(input_api.canned_checks.CheckSvnModifiedDirectories(
1699 # input_api, output_api, sources))
1700 # Make sure the tree is 'open'.
1701 results.extend(input_api.canned_checks.CheckTreeIsOpen(
1702 input_api,
1703 output_api,
1704 json_url='http://chromium-status.appspot.com/current?format=json'))
1706 results.extend(input_api.canned_checks.CheckChangeHasBugField(
1707 input_api, output_api))
1708 results.extend(input_api.canned_checks.CheckChangeHasDescription(
1709 input_api, output_api))
1710 return results
1713 def GetPreferredTryMasters(project, change):
1714 import re
1715 files = change.LocalPaths()
1717 if not files or all(re.search(r'[\\\/]OWNERS$', f) for f in files):
1718 return {}
1720 if all(re.search(r'\.(m|mm)$|(^|[\\\/_])mac[\\\/_.]', f) for f in files):
1721 return GetDefaultTryConfigs([
1722 'mac_chromium_compile_dbg_ng',
1723 'mac_chromium_rel_ng',
1725 if all(re.search('(^|[/_])win[/_.]', f) for f in files):
1726 return GetDefaultTryConfigs([
1727 'win8_chromium_rel',
1728 'win_chromium_rel_ng',
1729 'win_chromium_x64_rel_ng',
1731 if all(re.search(r'(^|[\\\/_])android[\\\/_.]', f) and
1732 not re.search(r'(^|[\\\/_])devtools[\\\/_.]', f) for f in files):
1733 return GetDefaultTryConfigs([
1734 'android_aosp',
1735 'android_dbg_tests_recipe',
1737 if all(re.search(r'[\\\/_]ios[\\\/_.]', f) for f in files):
1738 return GetDefaultTryConfigs(['ios_rel_device', 'ios_dbg_simulator'])
1740 import os
1741 import json
1742 with open(os.path.join(
1743 change.RepositoryRoot(), 'testing', 'commit_queue', 'config.json')) as f:
1744 cq_config = json.load(f)
1745 cq_trybots = cq_config.get('trybots', {})
1746 builders = cq_trybots.get('launched', {})
1747 for master, master_config in cq_trybots.get('triggered', {}).iteritems():
1748 for triggered_bot in master_config:
1749 builders.get(master, {}).pop(triggered_bot, None)
1751 # Explicitly iterate over copies of dicts since we mutate them.
1752 for master in builders.keys():
1753 for builder in builders[master].keys():
1754 # Do not trigger presubmit builders, since they're likely to fail
1755 # (e.g. OWNERS checks before finished code review), and we're
1756 # running local presubmit anyway.
1757 if 'presubmit' in builder:
1758 builders[master].pop(builder)
1760 # Match things like path/aura/file.cc and path/file_aura.cc.
1761 # Same for chromeos.
1762 if any(re.search(r'[\\\/_](aura|chromeos)', f) for f in files):
1763 tryserver_linux = builders.setdefault('tryserver.chromium.linux', {})
1764 tryserver_linux['linux_chromium_chromeos_asan_rel_ng'] = ['defaulttests']
1766 return builders