Start unifying document-mode and tabbed-mode tab creation
[chromium-blink-merge.git] / PRESUBMIT.py
blob0293bf0a2da7fef3c1fcf39ce2397ae27b59103b
1 # Copyright (c) 2012 The Chromium Authors. All rights reserved.
2 # Use of this source code is governed by a BSD-style license that can be
3 # found in the LICENSE file.
5 """Top-level presubmit script for Chromium.
7 See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts
8 for more details about the presubmit API built into depot_tools.
9 """
12 _EXCLUDED_PATHS = (
13 r"^breakpad[\\\/].*",
14 r"^native_client_sdk[\\\/]src[\\\/]build_tools[\\\/]make_rules.py",
15 r"^native_client_sdk[\\\/]src[\\\/]build_tools[\\\/]make_simple.py",
16 r"^native_client_sdk[\\\/]src[\\\/]tools[\\\/].*.mk",
17 r"^net[\\\/]tools[\\\/]spdyshark[\\\/].*",
18 r"^skia[\\\/].*",
19 r"^v8[\\\/].*",
20 r".*MakeFile$",
21 r".+_autogen\.h$",
22 r".+[\\\/]pnacl_shim\.c$",
23 r"^gpu[\\\/]config[\\\/].*_list_json\.cc$",
24 r"^chrome[\\\/]browser[\\\/]resources[\\\/]pdf[\\\/]index.js"
27 # The NetscapePlugIn library is excluded from pan-project as it will soon
28 # be deleted together with the rest of the NPAPI and it's not worthwhile to
29 # update the coding style until then.
30 _TESTRUNNER_PATHS = (
31 r"^content[\\\/]shell[\\\/]tools[\\\/]plugin[\\\/].*",
34 # Fragment of a regular expression that matches C++ and Objective-C++
35 # implementation files.
36 _IMPLEMENTATION_EXTENSIONS = r'\.(cc|cpp|cxx|mm)$'
38 # Regular expression that matches code only used for test binaries
39 # (best effort).
40 _TEST_CODE_EXCLUDED_PATHS = (
41 r'.*[\\\/](fake_|test_|mock_).+%s' % _IMPLEMENTATION_EXTENSIONS,
42 r'.+_test_(base|support|util)%s' % _IMPLEMENTATION_EXTENSIONS,
43 r'.+_(api|browser|kif|perf|pixel|unit|ui)?test(_[a-z]+)?%s' %
44 _IMPLEMENTATION_EXTENSIONS,
45 r'.+profile_sync_service_harness%s' % _IMPLEMENTATION_EXTENSIONS,
46 r'.*[\\\/](test|tool(s)?)[\\\/].*',
47 # content_shell is used for running layout tests.
48 r'content[\\\/]shell[\\\/].*',
49 # At request of folks maintaining this folder.
50 r'chrome[\\\/]browser[\\\/]automation[\\\/].*',
51 # Non-production example code.
52 r'mojo[\\\/]examples[\\\/].*',
53 # Launcher for running iOS tests on the simulator.
54 r'testing[\\\/]iossim[\\\/]iossim\.mm$',
57 _TEST_ONLY_WARNING = (
58 'You might be calling functions intended only for testing from\n'
59 'production code. It is OK to ignore this warning if you know what\n'
60 'you are doing, as the heuristics used to detect the situation are\n'
61 'not perfect. The commit queue will not block on this warning.')
64 _INCLUDE_ORDER_WARNING = (
65 'Your #include order seems to be broken. Remember to use the right '
66 'collation (LC_COLLATE=C) and check https://google-styleguide.googlecode'
67 '.com/svn/trunk/cppguide.html#Names_and_Order_of_Includes')
69 _BANNED_OBJC_FUNCTIONS = (
71 'addTrackingRect:',
73 'The use of -[NSView addTrackingRect:owner:userData:assumeInside:] is'
74 'prohibited. Please use CrTrackingArea instead.',
75 'http://dev.chromium.org/developers/coding-style/cocoa-dos-and-donts',
77 False,
80 r'/NSTrackingArea\W',
82 'The use of NSTrackingAreas is prohibited. Please use CrTrackingArea',
83 'instead.',
84 'http://dev.chromium.org/developers/coding-style/cocoa-dos-and-donts',
86 False,
89 'convertPointFromBase:',
91 'The use of -[NSView convertPointFromBase:] is almost certainly wrong.',
92 'Please use |convertPoint:(point) fromView:nil| instead.',
93 'http://dev.chromium.org/developers/coding-style/cocoa-dos-and-donts',
95 True,
98 'convertPointToBase:',
100 'The use of -[NSView convertPointToBase:] is almost certainly wrong.',
101 'Please use |convertPoint:(point) toView:nil| instead.',
102 'http://dev.chromium.org/developers/coding-style/cocoa-dos-and-donts',
104 True,
107 'convertRectFromBase:',
109 'The use of -[NSView convertRectFromBase:] is almost certainly wrong.',
110 'Please use |convertRect:(point) fromView:nil| instead.',
111 'http://dev.chromium.org/developers/coding-style/cocoa-dos-and-donts',
113 True,
116 'convertRectToBase:',
118 'The use of -[NSView convertRectToBase:] is almost certainly wrong.',
119 'Please use |convertRect:(point) toView:nil| instead.',
120 'http://dev.chromium.org/developers/coding-style/cocoa-dos-and-donts',
122 True,
125 'convertSizeFromBase:',
127 'The use of -[NSView convertSizeFromBase:] is almost certainly wrong.',
128 'Please use |convertSize:(point) fromView:nil| instead.',
129 'http://dev.chromium.org/developers/coding-style/cocoa-dos-and-donts',
131 True,
134 'convertSizeToBase:',
136 'The use of -[NSView convertSizeToBase:] is almost certainly wrong.',
137 'Please use |convertSize:(point) toView:nil| instead.',
138 'http://dev.chromium.org/developers/coding-style/cocoa-dos-and-donts',
140 True,
145 _BANNED_CPP_FUNCTIONS = (
146 # Make sure that gtest's FRIEND_TEST() macro is not used; the
147 # FRIEND_TEST_ALL_PREFIXES() macro from base/gtest_prod_util.h should be
148 # used instead since that allows for FLAKY_ and DISABLED_ prefixes.
150 'FRIEND_TEST(',
152 'Chromium code should not use gtest\'s FRIEND_TEST() macro. Include',
153 'base/gtest_prod_util.h and use FRIEND_TEST_ALL_PREFIXES() instead.',
155 False,
159 'ScopedAllowIO',
161 'New code should not use ScopedAllowIO. Post a task to the blocking',
162 'pool or the FILE thread instead.',
164 True,
166 r"^base[\\\/]process[\\\/]process_metrics_linux\.cc$",
167 r"^chrome[\\\/]browser[\\\/]chromeos[\\\/]boot_times_recorder\.cc$",
168 r"^chrome[\\\/]browser[\\\/]chromeos[\\\/]"
169 "customization_document_browsertest\.cc$",
170 r"^components[\\\/]crash[\\\/]app[\\\/]breakpad_mac\.mm$",
171 r"^content[\\\/]shell[\\\/]browser[\\\/]shell_browser_main\.cc$",
172 r"^content[\\\/]shell[\\\/]browser[\\\/]shell_message_filter\.cc$",
173 r"^mojo[\\\/]edk[\\\/]embedder[\\\/]" +
174 r"simple_platform_shared_buffer_posix\.cc$",
175 r"^net[\\\/]disk_cache[\\\/]cache_util\.cc$",
176 r"^net[\\\/]url_request[\\\/]test_url_fetcher_factory\.cc$",
177 r"^ui[\\\/]ozone[\\\/]platform[\\\/]drm[\\\/]host[\\\/]"
178 "drm_display_host_manager\.cc$",
182 'SkRefPtr',
184 'The use of SkRefPtr is prohibited. ',
185 'Please use skia::RefPtr instead.'
187 True,
191 'SkAutoRef',
193 'The indirect use of SkRefPtr via SkAutoRef is prohibited. ',
194 'Please use skia::RefPtr instead.'
196 True,
200 'SkAutoTUnref',
202 'The use of SkAutoTUnref is dangerous because it implicitly ',
203 'converts to a raw pointer. Please use skia::RefPtr instead.'
205 True,
209 'SkAutoUnref',
211 'The indirect use of SkAutoTUnref through SkAutoUnref is dangerous ',
212 'because it implicitly converts to a raw pointer. ',
213 'Please use skia::RefPtr instead.'
215 True,
219 r'/HANDLE_EINTR\(.*close',
221 'HANDLE_EINTR(close) is invalid. If close fails with EINTR, the file',
222 'descriptor will be closed, and it is incorrect to retry the close.',
223 'Either call close directly and ignore its return value, or wrap close',
224 'in IGNORE_EINTR to use its return value. See http://crbug.com/269623'
226 True,
230 r'/IGNORE_EINTR\((?!.*close)',
232 'IGNORE_EINTR is only valid when wrapping close. To wrap other system',
233 'calls, use HANDLE_EINTR. See http://crbug.com/269623',
235 True,
237 # Files that #define IGNORE_EINTR.
238 r'^base[\\\/]posix[\\\/]eintr_wrapper\.h$',
239 r'^ppapi[\\\/]tests[\\\/]test_broker\.cc$',
243 r'/v8::Extension\(',
245 'Do not introduce new v8::Extensions into the code base, use',
246 'gin::Wrappable instead. See http://crbug.com/334679',
248 True,
250 r'extensions[\\\/]renderer[\\\/]safe_builtins\.*',
254 '\<MessageLoopProxy\>',
256 'MessageLoopProxy is deprecated. ',
257 'Please use SingleThreadTaskRunner or ThreadTaskRunnerHandle instead.'
259 True,
261 # Internal message_loop related code may still use it.
262 r'^base[\\\/]message_loop[\\\/].*',
267 _IPC_ENUM_TRAITS_DEPRECATED = (
268 'You are using IPC_ENUM_TRAITS() in your code. It has been deprecated.\n'
269 'See http://www.chromium.org/Home/chromium-security/education/security-tips-for-ipc')
272 _VALID_OS_MACROS = (
273 # Please keep sorted.
274 'OS_ANDROID',
275 'OS_ANDROID_HOST',
276 'OS_BSD',
277 'OS_CAT', # For testing.
278 'OS_CHROMEOS',
279 'OS_FREEBSD',
280 'OS_IOS',
281 'OS_LINUX',
282 'OS_MACOSX',
283 'OS_NACL',
284 'OS_NACL_NONSFI',
285 'OS_NACL_SFI',
286 'OS_OPENBSD',
287 'OS_POSIX',
288 'OS_QNX',
289 'OS_SOLARIS',
290 'OS_WIN',
294 def _CheckNoProductionCodeUsingTestOnlyFunctions(input_api, output_api):
295 """Attempts to prevent use of functions intended only for testing in
296 non-testing code. For now this is just a best-effort implementation
297 that ignores header files and may have some false positives. A
298 better implementation would probably need a proper C++ parser.
300 # We only scan .cc files and the like, as the declaration of
301 # for-testing functions in header files are hard to distinguish from
302 # calls to such functions without a proper C++ parser.
303 file_inclusion_pattern = r'.+%s' % _IMPLEMENTATION_EXTENSIONS
305 base_function_pattern = r'[ :]test::[^\s]+|ForTest(ing)?|for_test(ing)?'
306 inclusion_pattern = input_api.re.compile(r'(%s)\s*\(' % base_function_pattern)
307 comment_pattern = input_api.re.compile(r'//.*(%s)' % base_function_pattern)
308 exclusion_pattern = input_api.re.compile(
309 r'::[A-Za-z0-9_]+(%s)|(%s)[^;]+\{' % (
310 base_function_pattern, base_function_pattern))
312 def FilterFile(affected_file):
313 black_list = (_EXCLUDED_PATHS +
314 _TEST_CODE_EXCLUDED_PATHS +
315 input_api.DEFAULT_BLACK_LIST)
316 return input_api.FilterSourceFile(
317 affected_file,
318 white_list=(file_inclusion_pattern, ),
319 black_list=black_list)
321 problems = []
322 for f in input_api.AffectedSourceFiles(FilterFile):
323 local_path = f.LocalPath()
324 for line_number, line in f.ChangedContents():
325 if (inclusion_pattern.search(line) and
326 not comment_pattern.search(line) and
327 not exclusion_pattern.search(line)):
328 problems.append(
329 '%s:%d\n %s' % (local_path, line_number, line.strip()))
331 if problems:
332 return [output_api.PresubmitPromptOrNotify(_TEST_ONLY_WARNING, problems)]
333 else:
334 return []
337 def _CheckNoIOStreamInHeaders(input_api, output_api):
338 """Checks to make sure no .h files include <iostream>."""
339 files = []
340 pattern = input_api.re.compile(r'^#include\s*<iostream>',
341 input_api.re.MULTILINE)
342 for f in input_api.AffectedSourceFiles(input_api.FilterSourceFile):
343 if not f.LocalPath().endswith('.h'):
344 continue
345 contents = input_api.ReadFile(f)
346 if pattern.search(contents):
347 files.append(f)
349 if len(files):
350 return [ output_api.PresubmitError(
351 'Do not #include <iostream> in header files, since it inserts static '
352 'initialization into every file including the header. Instead, '
353 '#include <ostream>. See http://crbug.com/94794',
354 files) ]
355 return []
358 def _CheckNoUNIT_TESTInSourceFiles(input_api, output_api):
359 """Checks to make sure no source files use UNIT_TEST"""
360 problems = []
361 for f in input_api.AffectedFiles():
362 if (not f.LocalPath().endswith(('.cc', '.mm'))):
363 continue
365 for line_num, line in f.ChangedContents():
366 if 'UNIT_TEST ' in line or line.endswith('UNIT_TEST'):
367 problems.append(' %s:%d' % (f.LocalPath(), line_num))
369 if not problems:
370 return []
371 return [output_api.PresubmitPromptWarning('UNIT_TEST is only for headers.\n' +
372 '\n'.join(problems))]
375 def _FindHistogramNameInLine(histogram_name, line):
376 """Tries to find a histogram name or prefix in a line."""
377 if not "affected-histogram" in line:
378 return histogram_name in line
379 # A histogram_suffixes tag type has an affected-histogram name as a prefix of
380 # the histogram_name.
381 if not '"' in line:
382 return False
383 histogram_prefix = line.split('\"')[1]
384 return histogram_prefix in histogram_name
387 def _CheckUmaHistogramChanges(input_api, output_api):
388 """Check that UMA histogram names in touched lines can still be found in other
389 lines of the patch or in histograms.xml. Note that this check would not catch
390 the reverse: changes in histograms.xml not matched in the code itself."""
391 touched_histograms = []
392 histograms_xml_modifications = []
393 pattern = input_api.re.compile('UMA_HISTOGRAM.*\("(.*)"')
394 for f in input_api.AffectedFiles():
395 # If histograms.xml itself is modified, keep the modified lines for later.
396 if f.LocalPath().endswith(('histograms.xml')):
397 histograms_xml_modifications = f.ChangedContents()
398 continue
399 if not f.LocalPath().endswith(('cc', 'mm', 'cpp')):
400 continue
401 for line_num, line in f.ChangedContents():
402 found = pattern.search(line)
403 if found:
404 touched_histograms.append([found.group(1), f, line_num])
406 # Search for the touched histogram names in the local modifications to
407 # histograms.xml, and, if not found, on the base histograms.xml file.
408 unmatched_histograms = []
409 for histogram_info in touched_histograms:
410 histogram_name_found = False
411 for line_num, line in histograms_xml_modifications:
412 histogram_name_found = _FindHistogramNameInLine(histogram_info[0], line)
413 if histogram_name_found:
414 break
415 if not histogram_name_found:
416 unmatched_histograms.append(histogram_info)
418 histograms_xml_path = 'tools/metrics/histograms/histograms.xml'
419 problems = []
420 if unmatched_histograms:
421 with open(histograms_xml_path) as histograms_xml:
422 for histogram_name, f, line_num in unmatched_histograms:
423 histograms_xml.seek(0)
424 histogram_name_found = False
425 for line in histograms_xml:
426 histogram_name_found = _FindHistogramNameInLine(histogram_name, line)
427 if histogram_name_found:
428 break
429 if not histogram_name_found:
430 problems.append(' [%s:%d] %s' %
431 (f.LocalPath(), line_num, histogram_name))
433 if not problems:
434 return []
435 return [output_api.PresubmitPromptWarning('Some UMA_HISTOGRAM lines have '
436 'been modified and the associated histogram name has no match in either '
437 '%s or the modifications of it:' % (histograms_xml_path), problems)]
440 def _CheckNoNewWStrings(input_api, output_api):
441 """Checks to make sure we don't introduce use of wstrings."""
442 problems = []
443 for f in input_api.AffectedFiles():
444 if (not f.LocalPath().endswith(('.cc', '.h')) or
445 f.LocalPath().endswith(('test.cc', '_win.cc', '_win.h')) or
446 '/win/' in f.LocalPath()):
447 continue
449 allowWString = False
450 for line_num, line in f.ChangedContents():
451 if 'presubmit: allow wstring' in line:
452 allowWString = True
453 elif not allowWString and 'wstring' in line:
454 problems.append(' %s:%d' % (f.LocalPath(), line_num))
455 allowWString = False
456 else:
457 allowWString = False
459 if not problems:
460 return []
461 return [output_api.PresubmitPromptWarning('New code should not use wstrings.'
462 ' If you are calling a cross-platform API that accepts a wstring, '
463 'fix the API.\n' +
464 '\n'.join(problems))]
467 def _CheckNoDEPSGIT(input_api, output_api):
468 """Make sure .DEPS.git is never modified manually."""
469 if any(f.LocalPath().endswith('.DEPS.git') for f in
470 input_api.AffectedFiles()):
471 return [output_api.PresubmitError(
472 'Never commit changes to .DEPS.git. This file is maintained by an\n'
473 'automated system based on what\'s in DEPS and your changes will be\n'
474 'overwritten.\n'
475 'See https://sites.google.com/a/chromium.org/dev/developers/how-tos/get-the-code#Rolling_DEPS\n'
476 'for more information')]
477 return []
480 def _CheckValidHostsInDEPS(input_api, output_api):
481 """Checks that DEPS file deps are from allowed_hosts."""
482 # Run only if DEPS file has been modified to annoy fewer bystanders.
483 if all(f.LocalPath() != 'DEPS' for f in input_api.AffectedFiles()):
484 return []
485 # Outsource work to gclient verify
486 try:
487 input_api.subprocess.check_output(['gclient', 'verify'])
488 return []
489 except input_api.subprocess.CalledProcessError, error:
490 return [output_api.PresubmitError(
491 'DEPS file must have only git dependencies.',
492 long_text=error.output)]
495 def _CheckNoBannedFunctions(input_api, output_api):
496 """Make sure that banned functions are not used."""
497 warnings = []
498 errors = []
500 file_filter = lambda f: f.LocalPath().endswith(('.mm', '.m', '.h'))
501 for f in input_api.AffectedFiles(file_filter=file_filter):
502 for line_num, line in f.ChangedContents():
503 for func_name, message, error in _BANNED_OBJC_FUNCTIONS:
504 matched = False
505 if func_name[0:1] == '/':
506 regex = func_name[1:]
507 if input_api.re.search(regex, line):
508 matched = True
509 elif func_name in line:
510 matched = True
511 if matched:
512 problems = warnings;
513 if error:
514 problems = errors;
515 problems.append(' %s:%d:' % (f.LocalPath(), line_num))
516 for message_line in message:
517 problems.append(' %s' % message_line)
519 file_filter = lambda f: f.LocalPath().endswith(('.cc', '.mm', '.h'))
520 for f in input_api.AffectedFiles(file_filter=file_filter):
521 for line_num, line in f.ChangedContents():
522 for func_name, message, error, excluded_paths in _BANNED_CPP_FUNCTIONS:
523 def IsBlacklisted(affected_file, blacklist):
524 local_path = affected_file.LocalPath()
525 for item in blacklist:
526 if input_api.re.match(item, local_path):
527 return True
528 return False
529 if IsBlacklisted(f, excluded_paths):
530 continue
531 matched = False
532 if func_name[0:1] == '/':
533 regex = func_name[1:]
534 if input_api.re.search(regex, line):
535 matched = True
536 elif func_name in line:
537 matched = True
538 if matched:
539 problems = warnings;
540 if error:
541 problems = errors;
542 problems.append(' %s:%d:' % (f.LocalPath(), line_num))
543 for message_line in message:
544 problems.append(' %s' % message_line)
546 result = []
547 if (warnings):
548 result.append(output_api.PresubmitPromptWarning(
549 'Banned functions were used.\n' + '\n'.join(warnings)))
550 if (errors):
551 result.append(output_api.PresubmitError(
552 'Banned functions were used.\n' + '\n'.join(errors)))
553 return result
556 def _CheckNoPragmaOnce(input_api, output_api):
557 """Make sure that banned functions are not used."""
558 files = []
559 pattern = input_api.re.compile(r'^#pragma\s+once',
560 input_api.re.MULTILINE)
561 for f in input_api.AffectedSourceFiles(input_api.FilterSourceFile):
562 if not f.LocalPath().endswith('.h'):
563 continue
564 contents = input_api.ReadFile(f)
565 if pattern.search(contents):
566 files.append(f)
568 if files:
569 return [output_api.PresubmitError(
570 'Do not use #pragma once in header files.\n'
571 'See http://www.chromium.org/developers/coding-style#TOC-File-headers',
572 files)]
573 return []
576 def _CheckNoTrinaryTrueFalse(input_api, output_api):
577 """Checks to make sure we don't introduce use of foo ? true : false."""
578 problems = []
579 pattern = input_api.re.compile(r'\?\s*(true|false)\s*:\s*(true|false)')
580 for f in input_api.AffectedFiles():
581 if not f.LocalPath().endswith(('.cc', '.h', '.inl', '.m', '.mm')):
582 continue
584 for line_num, line in f.ChangedContents():
585 if pattern.match(line):
586 problems.append(' %s:%d' % (f.LocalPath(), line_num))
588 if not problems:
589 return []
590 return [output_api.PresubmitPromptWarning(
591 'Please consider avoiding the "? true : false" pattern if possible.\n' +
592 '\n'.join(problems))]
595 def _CheckUnwantedDependencies(input_api, output_api):
596 """Runs checkdeps on #include statements added in this
597 change. Breaking - rules is an error, breaking ! rules is a
598 warning.
600 import sys
601 # We need to wait until we have an input_api object and use this
602 # roundabout construct to import checkdeps because this file is
603 # eval-ed and thus doesn't have __file__.
604 original_sys_path = sys.path
605 try:
606 sys.path = sys.path + [input_api.os_path.join(
607 input_api.PresubmitLocalPath(), 'buildtools', 'checkdeps')]
608 import checkdeps
609 from cpp_checker import CppChecker
610 from rules import Rule
611 finally:
612 # Restore sys.path to what it was before.
613 sys.path = original_sys_path
615 added_includes = []
616 for f in input_api.AffectedFiles():
617 if not CppChecker.IsCppFile(f.LocalPath()):
618 continue
620 changed_lines = [line for line_num, line in f.ChangedContents()]
621 added_includes.append([f.LocalPath(), changed_lines])
623 deps_checker = checkdeps.DepsChecker(input_api.PresubmitLocalPath())
625 error_descriptions = []
626 warning_descriptions = []
627 for path, rule_type, rule_description in deps_checker.CheckAddedCppIncludes(
628 added_includes):
629 description_with_path = '%s\n %s' % (path, rule_description)
630 if rule_type == Rule.DISALLOW:
631 error_descriptions.append(description_with_path)
632 else:
633 warning_descriptions.append(description_with_path)
635 results = []
636 if error_descriptions:
637 results.append(output_api.PresubmitError(
638 'You added one or more #includes that violate checkdeps rules.',
639 error_descriptions))
640 if warning_descriptions:
641 results.append(output_api.PresubmitPromptOrNotify(
642 'You added one or more #includes of files that are temporarily\n'
643 'allowed but being removed. Can you avoid introducing the\n'
644 '#include? See relevant DEPS file(s) for details and contacts.',
645 warning_descriptions))
646 return results
649 def _CheckFilePermissions(input_api, output_api):
650 """Check that all files have their permissions properly set."""
651 if input_api.platform == 'win32':
652 return []
653 args = [input_api.python_executable, 'tools/checkperms/checkperms.py',
654 '--root', input_api.change.RepositoryRoot()]
655 for f in input_api.AffectedFiles():
656 args += ['--file', f.LocalPath()]
657 checkperms = input_api.subprocess.Popen(args,
658 stdout=input_api.subprocess.PIPE)
659 errors = checkperms.communicate()[0].strip()
660 if errors:
661 return [output_api.PresubmitError('checkperms.py failed.',
662 errors.splitlines())]
663 return []
666 def _CheckNoAuraWindowPropertyHInHeaders(input_api, output_api):
667 """Makes sure we don't include ui/aura/window_property.h
668 in header files.
670 pattern = input_api.re.compile(r'^#include\s*"ui/aura/window_property.h"')
671 errors = []
672 for f in input_api.AffectedFiles():
673 if not f.LocalPath().endswith('.h'):
674 continue
675 for line_num, line in f.ChangedContents():
676 if pattern.match(line):
677 errors.append(' %s:%d' % (f.LocalPath(), line_num))
679 results = []
680 if errors:
681 results.append(output_api.PresubmitError(
682 'Header files should not include ui/aura/window_property.h', errors))
683 return results
686 def _CheckIncludeOrderForScope(scope, input_api, file_path, changed_linenums):
687 """Checks that the lines in scope occur in the right order.
689 1. C system files in alphabetical order
690 2. C++ system files in alphabetical order
691 3. Project's .h files
694 c_system_include_pattern = input_api.re.compile(r'\s*#include <.*\.h>')
695 cpp_system_include_pattern = input_api.re.compile(r'\s*#include <.*>')
696 custom_include_pattern = input_api.re.compile(r'\s*#include ".*')
698 C_SYSTEM_INCLUDES, CPP_SYSTEM_INCLUDES, CUSTOM_INCLUDES = range(3)
700 state = C_SYSTEM_INCLUDES
702 previous_line = ''
703 previous_line_num = 0
704 problem_linenums = []
705 for line_num, line in scope:
706 if c_system_include_pattern.match(line):
707 if state != C_SYSTEM_INCLUDES:
708 problem_linenums.append((line_num, previous_line_num))
709 elif previous_line and previous_line > line:
710 problem_linenums.append((line_num, previous_line_num))
711 elif cpp_system_include_pattern.match(line):
712 if state == C_SYSTEM_INCLUDES:
713 state = CPP_SYSTEM_INCLUDES
714 elif state == CUSTOM_INCLUDES:
715 problem_linenums.append((line_num, previous_line_num))
716 elif previous_line and previous_line > line:
717 problem_linenums.append((line_num, previous_line_num))
718 elif custom_include_pattern.match(line):
719 if state != CUSTOM_INCLUDES:
720 state = CUSTOM_INCLUDES
721 elif previous_line and previous_line > line:
722 problem_linenums.append((line_num, previous_line_num))
723 else:
724 problem_linenums.append(line_num)
725 previous_line = line
726 previous_line_num = line_num
728 warnings = []
729 for (line_num, previous_line_num) in problem_linenums:
730 if line_num in changed_linenums or previous_line_num in changed_linenums:
731 warnings.append(' %s:%d' % (file_path, line_num))
732 return warnings
735 def _CheckIncludeOrderInFile(input_api, f, changed_linenums):
736 """Checks the #include order for the given file f."""
738 system_include_pattern = input_api.re.compile(r'\s*#include \<.*')
739 # Exclude the following includes from the check:
740 # 1) #include <.../...>, e.g., <sys/...> includes often need to appear in a
741 # specific order.
742 # 2) <atlbase.h>, "build/build_config.h"
743 excluded_include_pattern = input_api.re.compile(
744 r'\s*#include (\<.*/.*|\<atlbase\.h\>|"build/build_config.h")')
745 custom_include_pattern = input_api.re.compile(r'\s*#include "(?P<FILE>.*)"')
746 # Match the final or penultimate token if it is xxxtest so we can ignore it
747 # when considering the special first include.
748 test_file_tag_pattern = input_api.re.compile(
749 r'_[a-z]+test(?=(_[a-zA-Z0-9]+)?\.)')
750 if_pattern = input_api.re.compile(
751 r'\s*#\s*(if|elif|else|endif|define|undef).*')
752 # Some files need specialized order of includes; exclude such files from this
753 # check.
754 uncheckable_includes_pattern = input_api.re.compile(
755 r'\s*#include '
756 '("ipc/.*macros\.h"|<windows\.h>|".*gl.*autogen.h")\s*')
758 contents = f.NewContents()
759 warnings = []
760 line_num = 0
762 # Handle the special first include. If the first include file is
763 # some/path/file.h, the corresponding including file can be some/path/file.cc,
764 # some/other/path/file.cc, some/path/file_platform.cc, some/path/file-suffix.h
765 # etc. It's also possible that no special first include exists.
766 # If the included file is some/path/file_platform.h the including file could
767 # also be some/path/file_xxxtest_platform.h.
768 including_file_base_name = test_file_tag_pattern.sub(
769 '', input_api.os_path.basename(f.LocalPath()))
771 for line in contents:
772 line_num += 1
773 if system_include_pattern.match(line):
774 # No special first include -> process the line again along with normal
775 # includes.
776 line_num -= 1
777 break
778 match = custom_include_pattern.match(line)
779 if match:
780 match_dict = match.groupdict()
781 header_basename = test_file_tag_pattern.sub(
782 '', input_api.os_path.basename(match_dict['FILE'])).replace('.h', '')
784 if header_basename not in including_file_base_name:
785 # No special first include -> process the line again along with normal
786 # includes.
787 line_num -= 1
788 break
790 # Split into scopes: Each region between #if and #endif is its own scope.
791 scopes = []
792 current_scope = []
793 for line in contents[line_num:]:
794 line_num += 1
795 if uncheckable_includes_pattern.match(line):
796 continue
797 if if_pattern.match(line):
798 scopes.append(current_scope)
799 current_scope = []
800 elif ((system_include_pattern.match(line) or
801 custom_include_pattern.match(line)) and
802 not excluded_include_pattern.match(line)):
803 current_scope.append((line_num, line))
804 scopes.append(current_scope)
806 for scope in scopes:
807 warnings.extend(_CheckIncludeOrderForScope(scope, input_api, f.LocalPath(),
808 changed_linenums))
809 return warnings
812 def _CheckIncludeOrder(input_api, output_api):
813 """Checks that the #include order is correct.
815 1. The corresponding header for source files.
816 2. C system files in alphabetical order
817 3. C++ system files in alphabetical order
818 4. Project's .h files in alphabetical order
820 Each region separated by #if, #elif, #else, #endif, #define and #undef follows
821 these rules separately.
823 def FileFilterIncludeOrder(affected_file):
824 black_list = (_EXCLUDED_PATHS + input_api.DEFAULT_BLACK_LIST)
825 return input_api.FilterSourceFile(affected_file, black_list=black_list)
827 warnings = []
828 for f in input_api.AffectedFiles(file_filter=FileFilterIncludeOrder):
829 if f.LocalPath().endswith(('.cc', '.h', '.mm')):
830 changed_linenums = set(line_num for line_num, _ in f.ChangedContents())
831 warnings.extend(_CheckIncludeOrderInFile(input_api, f, changed_linenums))
833 results = []
834 if warnings:
835 results.append(output_api.PresubmitPromptOrNotify(_INCLUDE_ORDER_WARNING,
836 warnings))
837 return results
840 def _CheckForVersionControlConflictsInFile(input_api, f):
841 pattern = input_api.re.compile('^(?:<<<<<<<|>>>>>>>) |^=======$')
842 errors = []
843 for line_num, line in f.ChangedContents():
844 if f.LocalPath().endswith('.md'):
845 # First-level headers in markdown look a lot like version control
846 # conflict markers. http://daringfireball.net/projects/markdown/basics
847 continue
848 if pattern.match(line):
849 errors.append(' %s:%d %s' % (f.LocalPath(), line_num, line))
850 return errors
853 def _CheckForVersionControlConflicts(input_api, output_api):
854 """Usually this is not intentional and will cause a compile failure."""
855 errors = []
856 for f in input_api.AffectedFiles():
857 errors.extend(_CheckForVersionControlConflictsInFile(input_api, f))
859 results = []
860 if errors:
861 results.append(output_api.PresubmitError(
862 'Version control conflict markers found, please resolve.', errors))
863 return results
866 def _CheckHardcodedGoogleHostsInLowerLayers(input_api, output_api):
867 def FilterFile(affected_file):
868 """Filter function for use with input_api.AffectedSourceFiles,
869 below. This filters out everything except non-test files from
870 top-level directories that generally speaking should not hard-code
871 service URLs (e.g. src/android_webview/, src/content/ and others).
873 return input_api.FilterSourceFile(
874 affected_file,
875 white_list=(r'^(android_webview|base|content|net)[\\\/].*', ),
876 black_list=(_EXCLUDED_PATHS +
877 _TEST_CODE_EXCLUDED_PATHS +
878 input_api.DEFAULT_BLACK_LIST))
880 base_pattern = '"[^"]*google\.com[^"]*"'
881 comment_pattern = input_api.re.compile('//.*%s' % base_pattern)
882 pattern = input_api.re.compile(base_pattern)
883 problems = [] # items are (filename, line_number, line)
884 for f in input_api.AffectedSourceFiles(FilterFile):
885 for line_num, line in f.ChangedContents():
886 if not comment_pattern.search(line) and pattern.search(line):
887 problems.append((f.LocalPath(), line_num, line))
889 if problems:
890 return [output_api.PresubmitPromptOrNotify(
891 'Most layers below src/chrome/ should not hardcode service URLs.\n'
892 'Are you sure this is correct?',
893 [' %s:%d: %s' % (
894 problem[0], problem[1], problem[2]) for problem in problems])]
895 else:
896 return []
899 def _CheckNoAbbreviationInPngFileName(input_api, output_api):
900 """Makes sure there are no abbreviations in the name of PNG files.
901 The native_client_sdk directory is excluded because it has auto-generated PNG
902 files for documentation.
904 errors = []
905 white_list = (r'.*_[a-z]_.*\.png$|.*_[a-z]\.png$',)
906 black_list = (r'^native_client_sdk[\\\/]',)
907 file_filter = lambda f: input_api.FilterSourceFile(
908 f, white_list=white_list, black_list=black_list)
909 for f in input_api.AffectedFiles(include_deletes=False,
910 file_filter=file_filter):
911 errors.append(' %s' % f.LocalPath())
913 results = []
914 if errors:
915 results.append(output_api.PresubmitError(
916 'The name of PNG files should not have abbreviations. \n'
917 'Use _hover.png, _center.png, instead of _h.png, _c.png.\n'
918 'Contact oshima@chromium.org if you have questions.', errors))
919 return results
922 def _FilesToCheckForIncomingDeps(re, changed_lines):
923 """Helper method for _CheckAddedDepsHaveTargetApprovals. Returns
924 a set of DEPS entries that we should look up.
926 For a directory (rather than a specific filename) we fake a path to
927 a specific filename by adding /DEPS. This is chosen as a file that
928 will seldom or never be subject to per-file include_rules.
930 # We ignore deps entries on auto-generated directories.
931 AUTO_GENERATED_DIRS = ['grit', 'jni']
933 # This pattern grabs the path without basename in the first
934 # parentheses, and the basename (if present) in the second. It
935 # relies on the simple heuristic that if there is a basename it will
936 # be a header file ending in ".h".
937 pattern = re.compile(
938 r"""['"]\+([^'"]+?)(/[a-zA-Z0-9_]+\.h)?['"].*""")
939 results = set()
940 for changed_line in changed_lines:
941 m = pattern.match(changed_line)
942 if m:
943 path = m.group(1)
944 if path.split('/')[0] not in AUTO_GENERATED_DIRS:
945 if m.group(2):
946 results.add('%s%s' % (path, m.group(2)))
947 else:
948 results.add('%s/DEPS' % path)
949 return results
952 def _CheckAddedDepsHaveTargetApprovals(input_api, output_api):
953 """When a dependency prefixed with + is added to a DEPS file, we
954 want to make sure that the change is reviewed by an OWNER of the
955 target file or directory, to avoid layering violations from being
956 introduced. This check verifies that this happens.
958 changed_lines = set()
959 for f in input_api.AffectedFiles():
960 filename = input_api.os_path.basename(f.LocalPath())
961 if filename == 'DEPS':
962 changed_lines |= set(line.strip()
963 for line_num, line
964 in f.ChangedContents())
965 if not changed_lines:
966 return []
968 virtual_depended_on_files = _FilesToCheckForIncomingDeps(input_api.re,
969 changed_lines)
970 if not virtual_depended_on_files:
971 return []
973 if input_api.is_committing:
974 if input_api.tbr:
975 return [output_api.PresubmitNotifyResult(
976 '--tbr was specified, skipping OWNERS check for DEPS additions')]
977 if not input_api.change.issue:
978 return [output_api.PresubmitError(
979 "DEPS approval by OWNERS check failed: this change has "
980 "no Rietveld issue number, so we can't check it for approvals.")]
981 output = output_api.PresubmitError
982 else:
983 output = output_api.PresubmitNotifyResult
985 owners_db = input_api.owners_db
986 owner_email, reviewers = input_api.canned_checks._RietveldOwnerAndReviewers(
987 input_api,
988 owners_db.email_regexp,
989 approval_needed=input_api.is_committing)
991 owner_email = owner_email or input_api.change.author_email
993 reviewers_plus_owner = set(reviewers)
994 if owner_email:
995 reviewers_plus_owner.add(owner_email)
996 missing_files = owners_db.files_not_covered_by(virtual_depended_on_files,
997 reviewers_plus_owner)
999 # We strip the /DEPS part that was added by
1000 # _FilesToCheckForIncomingDeps to fake a path to a file in a
1001 # directory.
1002 def StripDeps(path):
1003 start_deps = path.rfind('/DEPS')
1004 if start_deps != -1:
1005 return path[:start_deps]
1006 else:
1007 return path
1008 unapproved_dependencies = ["'+%s'," % StripDeps(path)
1009 for path in missing_files]
1011 if unapproved_dependencies:
1012 output_list = [
1013 output('Missing LGTM from OWNERS of dependencies added to DEPS:\n %s' %
1014 '\n '.join(sorted(unapproved_dependencies)))]
1015 if not input_api.is_committing:
1016 suggested_owners = owners_db.reviewers_for(missing_files, owner_email)
1017 output_list.append(output(
1018 'Suggested missing target path OWNERS:\n %s' %
1019 '\n '.join(suggested_owners or [])))
1020 return output_list
1022 return []
1025 def _CheckSpamLogging(input_api, output_api):
1026 file_inclusion_pattern = r'.+%s' % _IMPLEMENTATION_EXTENSIONS
1027 black_list = (_EXCLUDED_PATHS +
1028 _TEST_CODE_EXCLUDED_PATHS +
1029 input_api.DEFAULT_BLACK_LIST +
1030 (r"^base[\\\/]logging\.h$",
1031 r"^base[\\\/]logging\.cc$",
1032 r"^chrome[\\\/]app[\\\/]chrome_main_delegate\.cc$",
1033 r"^chrome[\\\/]browser[\\\/]chrome_browser_main\.cc$",
1034 r"^chrome[\\\/]browser[\\\/]ui[\\\/]startup[\\\/]"
1035 r"startup_browser_creator\.cc$",
1036 r"^chrome[\\\/]installer[\\\/]setup[\\\/].*",
1037 r"chrome[\\\/]browser[\\\/]diagnostics[\\\/]" +
1038 r"diagnostics_writer\.cc$",
1039 r"^chrome_elf[\\\/]dll_hash[\\\/]dll_hash_main\.cc$",
1040 r"^chromecast[\\\/]",
1041 r"^cloud_print[\\\/]",
1042 r"^content[\\\/]common[\\\/]gpu[\\\/]client[\\\/]"
1043 r"gl_helper_benchmark\.cc$",
1044 r"^courgette[\\\/]courgette_tool\.cc$",
1045 r"^extensions[\\\/]renderer[\\\/]logging_native_handler\.cc$",
1046 r"^ipc[\\\/]ipc_logging\.cc$",
1047 r"^native_client_sdk[\\\/]",
1048 r"^remoting[\\\/]base[\\\/]logging\.h$",
1049 r"^remoting[\\\/]host[\\\/].*",
1050 r"^sandbox[\\\/]linux[\\\/].*",
1051 r"^tools[\\\/]",
1052 r"^ui[\\\/]aura[\\\/]bench[\\\/]bench_main\.cc$",
1053 r"^storage[\\\/]browser[\\\/]fileapi[\\\/]" +
1054 r"dump_file_system.cc$",))
1055 source_file_filter = lambda x: input_api.FilterSourceFile(
1056 x, white_list=(file_inclusion_pattern,), black_list=black_list)
1058 log_info = []
1059 printf = []
1061 for f in input_api.AffectedSourceFiles(source_file_filter):
1062 contents = input_api.ReadFile(f, 'rb')
1063 if input_api.re.search(r"\bD?LOG\s*\(\s*INFO\s*\)", contents):
1064 log_info.append(f.LocalPath())
1065 elif input_api.re.search(r"\bD?LOG_IF\s*\(\s*INFO\s*,", contents):
1066 log_info.append(f.LocalPath())
1068 if input_api.re.search(r"\bprintf\(", contents):
1069 printf.append(f.LocalPath())
1070 elif input_api.re.search(r"\bfprintf\((stdout|stderr)", contents):
1071 printf.append(f.LocalPath())
1073 if log_info:
1074 return [output_api.PresubmitError(
1075 'These files spam the console log with LOG(INFO):',
1076 items=log_info)]
1077 if printf:
1078 return [output_api.PresubmitError(
1079 'These files spam the console log with printf/fprintf:',
1080 items=printf)]
1081 return []
1084 def _CheckForAnonymousVariables(input_api, output_api):
1085 """These types are all expected to hold locks while in scope and
1086 so should never be anonymous (which causes them to be immediately
1087 destroyed)."""
1088 they_who_must_be_named = [
1089 'base::AutoLock',
1090 'base::AutoReset',
1091 'base::AutoUnlock',
1092 'SkAutoAlphaRestore',
1093 'SkAutoBitmapShaderInstall',
1094 'SkAutoBlitterChoose',
1095 'SkAutoBounderCommit',
1096 'SkAutoCallProc',
1097 'SkAutoCanvasRestore',
1098 'SkAutoCommentBlock',
1099 'SkAutoDescriptor',
1100 'SkAutoDisableDirectionCheck',
1101 'SkAutoDisableOvalCheck',
1102 'SkAutoFree',
1103 'SkAutoGlyphCache',
1104 'SkAutoHDC',
1105 'SkAutoLockColors',
1106 'SkAutoLockPixels',
1107 'SkAutoMalloc',
1108 'SkAutoMaskFreeImage',
1109 'SkAutoMutexAcquire',
1110 'SkAutoPathBoundsUpdate',
1111 'SkAutoPDFRelease',
1112 'SkAutoRasterClipValidate',
1113 'SkAutoRef',
1114 'SkAutoTime',
1115 'SkAutoTrace',
1116 'SkAutoUnref',
1118 anonymous = r'(%s)\s*[({]' % '|'.join(they_who_must_be_named)
1119 # bad: base::AutoLock(lock.get());
1120 # not bad: base::AutoLock lock(lock.get());
1121 bad_pattern = input_api.re.compile(anonymous)
1122 # good: new base::AutoLock(lock.get())
1123 good_pattern = input_api.re.compile(r'\bnew\s*' + anonymous)
1124 errors = []
1126 for f in input_api.AffectedFiles():
1127 if not f.LocalPath().endswith(('.cc', '.h', '.inl', '.m', '.mm')):
1128 continue
1129 for linenum, line in f.ChangedContents():
1130 if bad_pattern.search(line) and not good_pattern.search(line):
1131 errors.append('%s:%d' % (f.LocalPath(), linenum))
1133 if errors:
1134 return [output_api.PresubmitError(
1135 'These lines create anonymous variables that need to be named:',
1136 items=errors)]
1137 return []
1140 def _CheckCygwinShell(input_api, output_api):
1141 source_file_filter = lambda x: input_api.FilterSourceFile(
1142 x, white_list=(r'.+\.(gyp|gypi)$',))
1143 cygwin_shell = []
1145 for f in input_api.AffectedSourceFiles(source_file_filter):
1146 for linenum, line in f.ChangedContents():
1147 if 'msvs_cygwin_shell' in line:
1148 cygwin_shell.append(f.LocalPath())
1149 break
1151 if cygwin_shell:
1152 return [output_api.PresubmitError(
1153 'These files should not use msvs_cygwin_shell (the default is 0):',
1154 items=cygwin_shell)]
1155 return []
1158 def _CheckUserActionUpdate(input_api, output_api):
1159 """Checks if any new user action has been added."""
1160 if any('actions.xml' == input_api.os_path.basename(f) for f in
1161 input_api.LocalPaths()):
1162 # If actions.xml is already included in the changelist, the PRESUBMIT
1163 # for actions.xml will do a more complete presubmit check.
1164 return []
1166 file_filter = lambda f: f.LocalPath().endswith(('.cc', '.mm'))
1167 action_re = r'[^a-zA-Z]UserMetricsAction\("([^"]*)'
1168 current_actions = None
1169 for f in input_api.AffectedFiles(file_filter=file_filter):
1170 for line_num, line in f.ChangedContents():
1171 match = input_api.re.search(action_re, line)
1172 if match:
1173 # Loads contents in tools/metrics/actions/actions.xml to memory. It's
1174 # loaded only once.
1175 if not current_actions:
1176 with open('tools/metrics/actions/actions.xml') as actions_f:
1177 current_actions = actions_f.read()
1178 # Search for the matched user action name in |current_actions|.
1179 for action_name in match.groups():
1180 action = 'name="{0}"'.format(action_name)
1181 if action not in current_actions:
1182 return [output_api.PresubmitPromptWarning(
1183 'File %s line %d: %s is missing in '
1184 'tools/metrics/actions/actions.xml. Please run '
1185 'tools/metrics/actions/extract_actions.py to update.'
1186 % (f.LocalPath(), line_num, action_name))]
1187 return []
1190 def _GetJSONParseError(input_api, filename, eat_comments=True):
1191 try:
1192 contents = input_api.ReadFile(filename)
1193 if eat_comments:
1194 json_comment_eater = input_api.os_path.join(
1195 input_api.PresubmitLocalPath(),
1196 'tools', 'json_comment_eater', 'json_comment_eater.py')
1197 process = input_api.subprocess.Popen(
1198 [input_api.python_executable, json_comment_eater],
1199 stdin=input_api.subprocess.PIPE,
1200 stdout=input_api.subprocess.PIPE,
1201 universal_newlines=True)
1202 (contents, _) = process.communicate(input=contents)
1204 input_api.json.loads(contents)
1205 except ValueError as e:
1206 return e
1207 return None
1210 def _GetIDLParseError(input_api, filename):
1211 try:
1212 contents = input_api.ReadFile(filename)
1213 idl_schema = input_api.os_path.join(
1214 input_api.PresubmitLocalPath(),
1215 'tools', 'json_schema_compiler', 'idl_schema.py')
1216 process = input_api.subprocess.Popen(
1217 [input_api.python_executable, idl_schema],
1218 stdin=input_api.subprocess.PIPE,
1219 stdout=input_api.subprocess.PIPE,
1220 stderr=input_api.subprocess.PIPE,
1221 universal_newlines=True)
1222 (_, error) = process.communicate(input=contents)
1223 return error or None
1224 except ValueError as e:
1225 return e
1228 def _CheckParseErrors(input_api, output_api):
1229 """Check that IDL and JSON files do not contain syntax errors."""
1230 actions = {
1231 '.idl': _GetIDLParseError,
1232 '.json': _GetJSONParseError,
1234 # These paths contain test data and other known invalid JSON files.
1235 excluded_patterns = [
1236 r'test[\\\/]data[\\\/]',
1237 r'^components[\\\/]policy[\\\/]resources[\\\/]policy_templates\.json$',
1239 # Most JSON files are preprocessed and support comments, but these do not.
1240 json_no_comments_patterns = [
1241 r'^testing[\\\/]',
1243 # Only run IDL checker on files in these directories.
1244 idl_included_patterns = [
1245 r'^chrome[\\\/]common[\\\/]extensions[\\\/]api[\\\/]',
1246 r'^extensions[\\\/]common[\\\/]api[\\\/]',
1249 def get_action(affected_file):
1250 filename = affected_file.LocalPath()
1251 return actions.get(input_api.os_path.splitext(filename)[1])
1253 def MatchesFile(patterns, path):
1254 for pattern in patterns:
1255 if input_api.re.search(pattern, path):
1256 return True
1257 return False
1259 def FilterFile(affected_file):
1260 action = get_action(affected_file)
1261 if not action:
1262 return False
1263 path = affected_file.LocalPath()
1265 if MatchesFile(excluded_patterns, path):
1266 return False
1268 if (action == _GetIDLParseError and
1269 not MatchesFile(idl_included_patterns, path)):
1270 return False
1271 return True
1273 results = []
1274 for affected_file in input_api.AffectedFiles(
1275 file_filter=FilterFile, include_deletes=False):
1276 action = get_action(affected_file)
1277 kwargs = {}
1278 if (action == _GetJSONParseError and
1279 MatchesFile(json_no_comments_patterns, affected_file.LocalPath())):
1280 kwargs['eat_comments'] = False
1281 parse_error = action(input_api,
1282 affected_file.AbsoluteLocalPath(),
1283 **kwargs)
1284 if parse_error:
1285 results.append(output_api.PresubmitError('%s could not be parsed: %s' %
1286 (affected_file.LocalPath(), parse_error)))
1287 return results
1290 def _CheckJavaStyle(input_api, output_api):
1291 """Runs checkstyle on changed java files and returns errors if any exist."""
1292 import sys
1293 original_sys_path = sys.path
1294 try:
1295 sys.path = sys.path + [input_api.os_path.join(
1296 input_api.PresubmitLocalPath(), 'tools', 'android', 'checkstyle')]
1297 import checkstyle
1298 finally:
1299 # Restore sys.path to what it was before.
1300 sys.path = original_sys_path
1302 return checkstyle.RunCheckstyle(
1303 input_api, output_api, 'tools/android/checkstyle/chromium-style-5.0.xml',
1304 black_list=_EXCLUDED_PATHS + input_api.DEFAULT_BLACK_LIST)
1307 def _CheckAndroidCrLogUsage(input_api, output_api):
1308 """Checks that new logs using org.chromium.base.Log:
1309 - Are using 'TAG' as variable name for the tags (warn)
1310 - Are using the suggested name format for the tags: "cr.<PackageTag>" (warn)
1311 - Are using a tag that is shorter than 23 characters (error)
1313 cr_log_import_pattern = input_api.re.compile(
1314 r'^import org\.chromium\.base\.Log;$', input_api.re.MULTILINE);
1315 # Extract the tag from lines like `Log.d(TAG, "*");` or `Log.d("TAG", "*");`
1316 cr_log_pattern = input_api.re.compile(r'^\s*Log\.\w\((?P<tag>\"?\w+\"?)\,')
1317 log_decl_pattern = input_api.re.compile(
1318 r'^\s*private static final String TAG = "(?P<name>(.*)")',
1319 input_api.re.MULTILINE)
1320 log_name_pattern = input_api.re.compile(r'^cr[.\w]*')
1322 REF_MSG = ('See base/android/java/src/org/chromium/base/README_logging.md '
1323 'or contact dgn@chromium.org for more info.')
1324 sources = lambda x: input_api.FilterSourceFile(x, white_list=(r'.*\.java$',))
1325 tag_errors = []
1326 tag_decl_errors = []
1327 tag_length_errors = []
1329 for f in input_api.AffectedSourceFiles(sources):
1330 file_content = input_api.ReadFile(f)
1331 has_modified_logs = False
1333 # Per line checks
1334 if cr_log_import_pattern.search(file_content):
1335 for line_num, line in f.ChangedContents():
1337 # Check if the new line is doing some logging
1338 match = cr_log_pattern.search(line)
1339 if match:
1340 has_modified_logs = True
1342 # Make sure it uses "TAG"
1343 if not match.group('tag') == 'TAG':
1344 tag_errors.append("%s:%d" % (f.LocalPath(), line_num))
1346 # Per file checks
1347 if has_modified_logs:
1348 # Make sure the tag is using the "cr" prefix and is not too long
1349 match = log_decl_pattern.search(file_content)
1350 tag_name = match.group('name') if match else ''
1351 if not log_name_pattern.search(tag_name ):
1352 tag_decl_errors.append(f.LocalPath())
1353 if len(tag_name) > 23:
1354 tag_length_errors.append(f.LocalPath())
1356 results = []
1357 if tag_decl_errors:
1358 results.append(output_api.PresubmitPromptWarning(
1359 'Please define your tags using the suggested format: .\n'
1360 '"private static final String TAG = "cr.<package tag>".\n' + REF_MSG,
1361 tag_decl_errors))
1363 if tag_length_errors:
1364 results.append(output_api.PresubmitError(
1365 'The tag length is restricted by the system to be at most '
1366 '23 characters.\n' + REF_MSG,
1367 tag_length_errors))
1369 if tag_errors:
1370 results.append(output_api.PresubmitPromptWarning(
1371 'Please use a variable named "TAG" for your log tags.\n' + REF_MSG,
1372 tag_errors))
1374 return results
1377 # TODO(dgn): refactor with _CheckAndroidCrLogUsage
1378 def _CheckNoNewUtilLogUsage(input_api, output_api):
1379 """Checks that new logs are using org.chromium.base.Log."""
1381 chromium_log_import_pattern = input_api.re.compile(
1382 r'^import org\.chromium\.base\.Log;$', input_api.re.MULTILINE);
1383 log_pattern = input_api.re.compile(r'^\s*(android\.util\.)?Log\.\w')
1384 sources = lambda x: input_api.FilterSourceFile(x, white_list=(r'.*\.java$',))
1386 errors = []
1388 for f in input_api.AffectedSourceFiles(sources):
1389 if chromium_log_import_pattern.search(input_api.ReadFile(f)) is not None:
1390 # Uses org.chromium.base.Log already
1391 continue
1393 for line_num, line in f.ChangedContents():
1394 if log_pattern.search(line):
1395 errors.append("%s:%d" % (f.LocalPath(), line_num))
1397 results = []
1398 if len(errors):
1399 results.append(output_api.PresubmitPromptWarning(
1400 'Please use org.chromium.base.Log for new logs.\n' +
1401 'See base/android/java/src/org/chromium/base/README_logging.md ' +
1402 'or contact dgn@chromium.org for more info.',
1403 errors))
1404 return results
1407 def _CheckForCopyrightedCode(input_api, output_api):
1408 """Verifies that newly added code doesn't contain copyrighted material
1409 and is properly licensed under the standard Chromium license.
1411 As there can be false positives, we maintain a whitelist file. This check
1412 also verifies that the whitelist file is up to date.
1414 import sys
1415 original_sys_path = sys.path
1416 try:
1417 sys.path = sys.path + [input_api.os_path.join(
1418 input_api.PresubmitLocalPath(), 'android_webview', 'tools')]
1419 import copyright_scanner
1420 finally:
1421 # Restore sys.path to what it was before.
1422 sys.path = original_sys_path
1424 return copyright_scanner.ScanAtPresubmit(input_api, output_api)
1427 def _CheckSingletonInHeaders(input_api, output_api):
1428 """Checks to make sure no header files have |Singleton<|."""
1429 def FileFilter(affected_file):
1430 # It's ok for base/memory/singleton.h to have |Singleton<|.
1431 black_list = (_EXCLUDED_PATHS +
1432 input_api.DEFAULT_BLACK_LIST +
1433 (r"^base[\\\/]memory[\\\/]singleton\.h$",))
1434 return input_api.FilterSourceFile(affected_file, black_list=black_list)
1436 pattern = input_api.re.compile(r'(?<!class\s)Singleton\s*<')
1437 files = []
1438 for f in input_api.AffectedSourceFiles(FileFilter):
1439 if (f.LocalPath().endswith('.h') or f.LocalPath().endswith('.hxx') or
1440 f.LocalPath().endswith('.hpp') or f.LocalPath().endswith('.inl')):
1441 contents = input_api.ReadFile(f)
1442 for line in contents.splitlines(False):
1443 if (not input_api.re.match(r'//', line) and # Strip C++ comment.
1444 pattern.search(line)):
1445 files.append(f)
1446 break
1448 if files:
1449 return [ output_api.PresubmitError(
1450 'Found Singleton<T> in the following header files.\n' +
1451 'Please move them to an appropriate source file so that the ' +
1452 'template gets instantiated in a single compilation unit.',
1453 files) ]
1454 return []
1457 _DEPRECATED_CSS = [
1458 # Values
1459 ( "-webkit-box", "flex" ),
1460 ( "-webkit-inline-box", "inline-flex" ),
1461 ( "-webkit-flex", "flex" ),
1462 ( "-webkit-inline-flex", "inline-flex" ),
1463 ( "-webkit-min-content", "min-content" ),
1464 ( "-webkit-max-content", "max-content" ),
1466 # Properties
1467 ( "-webkit-background-clip", "background-clip" ),
1468 ( "-webkit-background-origin", "background-origin" ),
1469 ( "-webkit-background-size", "background-size" ),
1470 ( "-webkit-box-shadow", "box-shadow" ),
1472 # Functions
1473 ( "-webkit-gradient", "gradient" ),
1474 ( "-webkit-repeating-gradient", "repeating-gradient" ),
1475 ( "-webkit-linear-gradient", "linear-gradient" ),
1476 ( "-webkit-repeating-linear-gradient", "repeating-linear-gradient" ),
1477 ( "-webkit-radial-gradient", "radial-gradient" ),
1478 ( "-webkit-repeating-radial-gradient", "repeating-radial-gradient" ),
1481 def _CheckNoDeprecatedCSS(input_api, output_api):
1482 """ Make sure that we don't use deprecated CSS
1483 properties, functions or values. Our external
1484 documentation and iOS CSS for dom distiller
1485 (reader mode) are ignored by the hooks as it
1486 needs to be consumed by WebKit. """
1487 results = []
1488 file_inclusion_pattern = (r".+\.css$",)
1489 black_list = (_EXCLUDED_PATHS +
1490 _TEST_CODE_EXCLUDED_PATHS +
1491 input_api.DEFAULT_BLACK_LIST +
1492 (r"^chrome/common/extensions/docs",
1493 r"^chrome/docs",
1494 r"^components/dom_distiller/core/css/distilledpage_ios.css",
1495 r"^native_client_sdk"))
1496 file_filter = lambda f: input_api.FilterSourceFile(
1497 f, white_list=file_inclusion_pattern, black_list=black_list)
1498 for fpath in input_api.AffectedFiles(file_filter=file_filter):
1499 for line_num, line in fpath.ChangedContents():
1500 for (deprecated_value, value) in _DEPRECATED_CSS:
1501 if deprecated_value in line:
1502 results.append(output_api.PresubmitError(
1503 "%s:%d: Use of deprecated CSS %s, use %s instead" %
1504 (fpath.LocalPath(), line_num, deprecated_value, value)))
1505 return results
1508 _DEPRECATED_JS = [
1509 ( "__lookupGetter__", "Object.getOwnPropertyDescriptor" ),
1510 ( "__defineGetter__", "Object.defineProperty" ),
1511 ( "__defineSetter__", "Object.defineProperty" ),
1514 def _CheckNoDeprecatedJS(input_api, output_api):
1515 """Make sure that we don't use deprecated JS in Chrome code."""
1516 results = []
1517 file_inclusion_pattern = (r".+\.js$",) # TODO(dbeam): .html?
1518 black_list = (_EXCLUDED_PATHS + _TEST_CODE_EXCLUDED_PATHS +
1519 input_api.DEFAULT_BLACK_LIST)
1520 file_filter = lambda f: input_api.FilterSourceFile(
1521 f, white_list=file_inclusion_pattern, black_list=black_list)
1522 for fpath in input_api.AffectedFiles(file_filter=file_filter):
1523 for lnum, line in fpath.ChangedContents():
1524 for (deprecated, replacement) in _DEPRECATED_JS:
1525 if deprecated in line:
1526 results.append(output_api.PresubmitError(
1527 "%s:%d: Use of deprecated JS %s, use %s instead" %
1528 (fpath.LocalPath(), lnum, deprecated, replacement)))
1529 return results
1532 def _AndroidSpecificOnUploadChecks(input_api, output_api):
1533 """Groups checks that target android code."""
1534 results = []
1535 results.extend(_CheckNoNewUtilLogUsage(input_api, output_api))
1536 results.extend(_CheckAndroidCrLogUsage(input_api, output_api))
1537 return results
1540 def _CommonChecks(input_api, output_api):
1541 """Checks common to both upload and commit."""
1542 results = []
1543 results.extend(input_api.canned_checks.PanProjectChecks(
1544 input_api, output_api,
1545 excluded_paths=_EXCLUDED_PATHS + _TESTRUNNER_PATHS))
1546 results.extend(_CheckAuthorizedAuthor(input_api, output_api))
1547 results.extend(
1548 _CheckNoProductionCodeUsingTestOnlyFunctions(input_api, output_api))
1549 results.extend(_CheckNoIOStreamInHeaders(input_api, output_api))
1550 results.extend(_CheckNoUNIT_TESTInSourceFiles(input_api, output_api))
1551 results.extend(_CheckNoNewWStrings(input_api, output_api))
1552 results.extend(_CheckNoDEPSGIT(input_api, output_api))
1553 results.extend(_CheckNoBannedFunctions(input_api, output_api))
1554 results.extend(_CheckNoPragmaOnce(input_api, output_api))
1555 results.extend(_CheckNoTrinaryTrueFalse(input_api, output_api))
1556 results.extend(_CheckUnwantedDependencies(input_api, output_api))
1557 results.extend(_CheckFilePermissions(input_api, output_api))
1558 results.extend(_CheckNoAuraWindowPropertyHInHeaders(input_api, output_api))
1559 results.extend(_CheckIncludeOrder(input_api, output_api))
1560 results.extend(_CheckForVersionControlConflicts(input_api, output_api))
1561 results.extend(_CheckPatchFiles(input_api, output_api))
1562 results.extend(_CheckHardcodedGoogleHostsInLowerLayers(input_api, output_api))
1563 results.extend(_CheckNoAbbreviationInPngFileName(input_api, output_api))
1564 results.extend(_CheckForInvalidOSMacros(input_api, output_api))
1565 results.extend(_CheckForInvalidIfDefinedMacros(input_api, output_api))
1566 # TODO(danakj): Remove this when base/move.h is removed.
1567 results.extend(_CheckForUsingSideEffectsOfPass(input_api, output_api))
1568 results.extend(_CheckAddedDepsHaveTargetApprovals(input_api, output_api))
1569 results.extend(
1570 input_api.canned_checks.CheckChangeHasNoTabs(
1571 input_api,
1572 output_api,
1573 source_file_filter=lambda x: x.LocalPath().endswith('.grd')))
1574 results.extend(_CheckSpamLogging(input_api, output_api))
1575 results.extend(_CheckForAnonymousVariables(input_api, output_api))
1576 results.extend(_CheckCygwinShell(input_api, output_api))
1577 results.extend(_CheckUserActionUpdate(input_api, output_api))
1578 results.extend(_CheckNoDeprecatedCSS(input_api, output_api))
1579 results.extend(_CheckNoDeprecatedJS(input_api, output_api))
1580 results.extend(_CheckParseErrors(input_api, output_api))
1581 results.extend(_CheckForIPCRules(input_api, output_api))
1582 results.extend(_CheckForCopyrightedCode(input_api, output_api))
1583 results.extend(_CheckForWindowsLineEndings(input_api, output_api))
1584 results.extend(_CheckSingletonInHeaders(input_api, output_api))
1586 if any('PRESUBMIT.py' == f.LocalPath() for f in input_api.AffectedFiles()):
1587 results.extend(input_api.canned_checks.RunUnitTestsInDirectory(
1588 input_api, output_api,
1589 input_api.PresubmitLocalPath(),
1590 whitelist=[r'^PRESUBMIT_test\.py$']))
1591 return results
1594 def _CheckAuthorizedAuthor(input_api, output_api):
1595 """For non-googler/chromites committers, verify the author's email address is
1596 in AUTHORS.
1598 # TODO(maruel): Add it to input_api?
1599 import fnmatch
1601 author = input_api.change.author_email
1602 if not author:
1603 input_api.logging.info('No author, skipping AUTHOR check')
1604 return []
1605 authors_path = input_api.os_path.join(
1606 input_api.PresubmitLocalPath(), 'AUTHORS')
1607 valid_authors = (
1608 input_api.re.match(r'[^#]+\s+\<(.+?)\>\s*$', line)
1609 for line in open(authors_path))
1610 valid_authors = [item.group(1).lower() for item in valid_authors if item]
1611 if not any(fnmatch.fnmatch(author.lower(), valid) for valid in valid_authors):
1612 input_api.logging.info('Valid authors are %s', ', '.join(valid_authors))
1613 return [output_api.PresubmitPromptWarning(
1614 ('%s is not in AUTHORS file. If you are a new contributor, please visit'
1615 '\n'
1616 'http://www.chromium.org/developers/contributing-code and read the '
1617 '"Legal" section\n'
1618 'If you are a chromite, verify the contributor signed the CLA.') %
1619 author)]
1620 return []
1623 def _CheckPatchFiles(input_api, output_api):
1624 problems = [f.LocalPath() for f in input_api.AffectedFiles()
1625 if f.LocalPath().endswith(('.orig', '.rej'))]
1626 if problems:
1627 return [output_api.PresubmitError(
1628 "Don't commit .rej and .orig files.", problems)]
1629 else:
1630 return []
1633 def _DidYouMeanOSMacro(bad_macro):
1634 try:
1635 return {'A': 'OS_ANDROID',
1636 'B': 'OS_BSD',
1637 'C': 'OS_CHROMEOS',
1638 'F': 'OS_FREEBSD',
1639 'L': 'OS_LINUX',
1640 'M': 'OS_MACOSX',
1641 'N': 'OS_NACL',
1642 'O': 'OS_OPENBSD',
1643 'P': 'OS_POSIX',
1644 'S': 'OS_SOLARIS',
1645 'W': 'OS_WIN'}[bad_macro[3].upper()]
1646 except KeyError:
1647 return ''
1650 def _CheckForInvalidOSMacrosInFile(input_api, f):
1651 """Check for sensible looking, totally invalid OS macros."""
1652 preprocessor_statement = input_api.re.compile(r'^\s*#')
1653 os_macro = input_api.re.compile(r'defined\((OS_[^)]+)\)')
1654 results = []
1655 for lnum, line in f.ChangedContents():
1656 if preprocessor_statement.search(line):
1657 for match in os_macro.finditer(line):
1658 if not match.group(1) in _VALID_OS_MACROS:
1659 good = _DidYouMeanOSMacro(match.group(1))
1660 did_you_mean = ' (did you mean %s?)' % good if good else ''
1661 results.append(' %s:%d %s%s' % (f.LocalPath(),
1662 lnum,
1663 match.group(1),
1664 did_you_mean))
1665 return results
1668 def _CheckForInvalidOSMacros(input_api, output_api):
1669 """Check all affected files for invalid OS macros."""
1670 bad_macros = []
1671 for f in input_api.AffectedFiles():
1672 if not f.LocalPath().endswith(('.py', '.js', '.html', '.css')):
1673 bad_macros.extend(_CheckForInvalidOSMacrosInFile(input_api, f))
1675 if not bad_macros:
1676 return []
1678 return [output_api.PresubmitError(
1679 'Possibly invalid OS macro[s] found. Please fix your code\n'
1680 'or add your macro to src/PRESUBMIT.py.', bad_macros)]
1683 def _CheckForInvalidIfDefinedMacrosInFile(input_api, f):
1684 """Check all affected files for invalid "if defined" macros."""
1685 ALWAYS_DEFINED_MACROS = (
1686 "TARGET_CPU_PPC",
1687 "TARGET_CPU_PPC64",
1688 "TARGET_CPU_68K",
1689 "TARGET_CPU_X86",
1690 "TARGET_CPU_ARM",
1691 "TARGET_CPU_MIPS",
1692 "TARGET_CPU_SPARC",
1693 "TARGET_CPU_ALPHA",
1694 "TARGET_IPHONE_SIMULATOR",
1695 "TARGET_OS_EMBEDDED",
1696 "TARGET_OS_IPHONE",
1697 "TARGET_OS_MAC",
1698 "TARGET_OS_UNIX",
1699 "TARGET_OS_WIN32",
1701 ifdef_macro = input_api.re.compile(r'^\s*#.*(?:ifdef\s|defined\()([^\s\)]+)')
1702 results = []
1703 for lnum, line in f.ChangedContents():
1704 for match in ifdef_macro.finditer(line):
1705 if match.group(1) in ALWAYS_DEFINED_MACROS:
1706 always_defined = ' %s is always defined. ' % match.group(1)
1707 did_you_mean = 'Did you mean \'#if %s\'?' % match.group(1)
1708 results.append(' %s:%d %s\n\t%s' % (f.LocalPath(),
1709 lnum,
1710 always_defined,
1711 did_you_mean))
1712 return results
1715 def _CheckForInvalidIfDefinedMacros(input_api, output_api):
1716 """Check all affected files for invalid "if defined" macros."""
1717 bad_macros = []
1718 for f in input_api.AffectedFiles():
1719 if f.LocalPath().endswith(('.h', '.c', '.cc', '.m', '.mm')):
1720 bad_macros.extend(_CheckForInvalidIfDefinedMacrosInFile(input_api, f))
1722 if not bad_macros:
1723 return []
1725 return [output_api.PresubmitError(
1726 'Found ifdef check on always-defined macro[s]. Please fix your code\n'
1727 'or check the list of ALWAYS_DEFINED_MACROS in src/PRESUBMIT.py.',
1728 bad_macros)]
1731 def _CheckForUsingSideEffectsOfPass(input_api, output_api):
1732 """Check all affected files for using side effects of Pass."""
1733 errors = []
1734 for f in input_api.AffectedFiles():
1735 if f.LocalPath().endswith(('.h', '.c', '.cc', '.m', '.mm')):
1736 for lnum, line in f.ChangedContents():
1737 # Disallow Foo(*my_scoped_thing.Pass()); See crbug.com/418297.
1738 if input_api.re.search(r'\*[a-zA-Z0-9_]+\.Pass\(\)', line):
1739 errors.append(output_api.PresubmitError(
1740 ('%s:%d uses *foo.Pass() to delete the contents of scoped_ptr. ' +
1741 'See crbug.com/418297.') % (f.LocalPath(), lnum)))
1742 return errors
1745 def _CheckForIPCRules(input_api, output_api):
1746 """Check for same IPC rules described in
1747 http://www.chromium.org/Home/chromium-security/education/security-tips-for-ipc
1749 base_pattern = r'IPC_ENUM_TRAITS\('
1750 inclusion_pattern = input_api.re.compile(r'(%s)' % base_pattern)
1751 comment_pattern = input_api.re.compile(r'//.*(%s)' % base_pattern)
1753 problems = []
1754 for f in input_api.AffectedSourceFiles(None):
1755 local_path = f.LocalPath()
1756 if not local_path.endswith('.h'):
1757 continue
1758 for line_number, line in f.ChangedContents():
1759 if inclusion_pattern.search(line) and not comment_pattern.search(line):
1760 problems.append(
1761 '%s:%d\n %s' % (local_path, line_number, line.strip()))
1763 if problems:
1764 return [output_api.PresubmitPromptWarning(
1765 _IPC_ENUM_TRAITS_DEPRECATED, problems)]
1766 else:
1767 return []
1770 def _CheckForWindowsLineEndings(input_api, output_api):
1771 """Check source code and known ascii text files for Windows style line
1772 endings.
1774 known_text_files = r'.*\.(txt|html|htm|mhtml|py|gyp|gypi|gn|isolate)$'
1776 file_inclusion_pattern = (
1777 known_text_files,
1778 r'.+%s' % _IMPLEMENTATION_EXTENSIONS
1781 filter = lambda f: input_api.FilterSourceFile(
1782 f, white_list=file_inclusion_pattern, black_list=None)
1783 files = [f.LocalPath() for f in
1784 input_api.AffectedSourceFiles(filter)]
1786 problems = []
1788 for file in files:
1789 fp = open(file, 'r')
1790 for line in fp:
1791 if line.endswith('\r\n'):
1792 problems.append(file)
1793 break
1794 fp.close()
1796 if problems:
1797 return [output_api.PresubmitPromptWarning('Are you sure that you want '
1798 'these files to contain Windows style line endings?\n' +
1799 '\n'.join(problems))]
1801 return []
1804 def CheckChangeOnUpload(input_api, output_api):
1805 results = []
1806 results.extend(_CommonChecks(input_api, output_api))
1807 results.extend(_CheckValidHostsInDEPS(input_api, output_api))
1808 results.extend(_CheckJavaStyle(input_api, output_api))
1809 results.extend(
1810 input_api.canned_checks.CheckGNFormatted(input_api, output_api))
1811 results.extend(_CheckUmaHistogramChanges(input_api, output_api))
1812 results.extend(_AndroidSpecificOnUploadChecks(input_api, output_api))
1813 return results
1816 def GetTryServerMasterForBot(bot):
1817 """Returns the Try Server master for the given bot.
1819 It tries to guess the master from the bot name, but may still fail
1820 and return None. There is no longer a default master.
1822 # Potentially ambiguous bot names are listed explicitly.
1823 master_map = {
1824 'chromium_presubmit': 'tryserver.chromium.linux',
1825 'blink_presubmit': 'tryserver.chromium.linux',
1826 'tools_build_presubmit': 'tryserver.chromium.linux',
1828 master = master_map.get(bot)
1829 if not master:
1830 if 'linux' in bot or 'android' in bot or 'presubmit' in bot:
1831 master = 'tryserver.chromium.linux'
1832 elif 'win' in bot:
1833 master = 'tryserver.chromium.win'
1834 elif 'mac' in bot or 'ios' in bot:
1835 master = 'tryserver.chromium.mac'
1836 return master
1839 def GetDefaultTryConfigs(bots):
1840 """Returns a list of ('bot', set(['tests']), filtered by [bots].
1843 builders_and_tests = dict((bot, set(['defaulttests'])) for bot in bots)
1845 # Build up the mapping from tryserver master to bot/test.
1846 out = dict()
1847 for bot, tests in builders_and_tests.iteritems():
1848 out.setdefault(GetTryServerMasterForBot(bot), {})[bot] = tests
1849 return out
1852 def CheckChangeOnCommit(input_api, output_api):
1853 results = []
1854 results.extend(_CommonChecks(input_api, output_api))
1855 # TODO(thestig) temporarily disabled, doesn't work in third_party/
1856 #results.extend(input_api.canned_checks.CheckSvnModifiedDirectories(
1857 # input_api, output_api, sources))
1858 # Make sure the tree is 'open'.
1859 results.extend(input_api.canned_checks.CheckTreeIsOpen(
1860 input_api,
1861 output_api,
1862 json_url='http://chromium-status.appspot.com/current?format=json'))
1864 results.extend(input_api.canned_checks.CheckChangeHasBugField(
1865 input_api, output_api))
1866 results.extend(input_api.canned_checks.CheckChangeHasDescription(
1867 input_api, output_api))
1868 return results
1871 def GetPreferredTryMasters(project, change):
1872 import json
1873 import os.path
1874 import platform
1875 import subprocess
1877 cq_config_path = os.path.join(
1878 change.RepositoryRoot(), 'infra', 'config', 'cq.cfg')
1879 # commit_queue.py below is a script in depot_tools directory, which has a
1880 # 'builders' command to retrieve a list of CQ builders from the CQ config.
1881 is_win = platform.system() == 'Windows'
1882 masters = json.loads(subprocess.check_output(
1883 ['commit_queue', 'builders', cq_config_path], shell=is_win))
1885 # Explicitly iterate over copies of keys since we mutate them.
1886 for master in masters.keys():
1887 for builder in masters[master].keys():
1888 # Do not trigger presubmit builders, since they're likely to fail
1889 # (e.g. OWNERS checks before finished code review), and we're
1890 # running local presubmit anyway.
1891 if 'presubmit' in builder:
1892 masters[master].pop(builder)
1893 else:
1894 # Convert testfilter format to the one expected by git-cl-try.
1895 testfilter = masters[master][builder].get('testfilter', 'defaulttests')
1896 masters[master][builder] = [testfilter]
1898 return masters