Update V8 to version 4.7.1.
[chromium-blink-merge.git] / PRESUBMIT.py
blob7cbfa4474df12fa36aa647673531da79d4d6652f
1 # Copyright (c) 2012 The Chromium Authors. All rights reserved.
2 # Use of this source code is governed by a BSD-style license that can be
3 # found in the LICENSE file.
5 """Top-level presubmit script for Chromium.
7 See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts
8 for more details about the presubmit API built into depot_tools.
9 """
12 _EXCLUDED_PATHS = (
13 r"^breakpad[\\\/].*",
14 r"^native_client_sdk[\\\/]src[\\\/]build_tools[\\\/]make_rules.py",
15 r"^native_client_sdk[\\\/]src[\\\/]build_tools[\\\/]make_simple.py",
16 r"^native_client_sdk[\\\/]src[\\\/]tools[\\\/].*.mk",
17 r"^net[\\\/]tools[\\\/]spdyshark[\\\/].*",
18 r"^skia[\\\/].*",
19 r"^v8[\\\/].*",
20 r".*MakeFile$",
21 r".+_autogen\.h$",
22 r".+[\\\/]pnacl_shim\.c$",
23 r"^gpu[\\\/]config[\\\/].*_list_json\.cc$",
24 r"^chrome[\\\/]browser[\\\/]resources[\\\/]pdf[\\\/]index.js"
27 # The NetscapePlugIn library is excluded from pan-project as it will soon
28 # be deleted together with the rest of the NPAPI and it's not worthwhile to
29 # update the coding style until then.
30 _TESTRUNNER_PATHS = (
31 r"^content[\\\/]shell[\\\/]tools[\\\/]plugin[\\\/].*",
34 # Fragment of a regular expression that matches C++ and Objective-C++
35 # implementation files.
36 _IMPLEMENTATION_EXTENSIONS = r'\.(cc|cpp|cxx|mm)$'
38 # Regular expression that matches code only used for test binaries
39 # (best effort).
40 _TEST_CODE_EXCLUDED_PATHS = (
41 r'.*[\\\/](fake_|test_|mock_).+%s' % _IMPLEMENTATION_EXTENSIONS,
42 r'.+_test_(base|support|util)%s' % _IMPLEMENTATION_EXTENSIONS,
43 r'.+_(api|browser|kif|perf|pixel|unit|ui)?test(_[a-z]+)?%s' %
44 _IMPLEMENTATION_EXTENSIONS,
45 r'.+profile_sync_service_harness%s' % _IMPLEMENTATION_EXTENSIONS,
46 r'.*[\\\/](test|tool(s)?)[\\\/].*',
47 # content_shell is used for running layout tests.
48 r'content[\\\/]shell[\\\/].*',
49 # At request of folks maintaining this folder.
50 r'chrome[\\\/]browser[\\\/]automation[\\\/].*',
51 # Non-production example code.
52 r'mojo[\\\/]examples[\\\/].*',
53 # Launcher for running iOS tests on the simulator.
54 r'testing[\\\/]iossim[\\\/]iossim\.mm$',
57 _TEST_ONLY_WARNING = (
58 'You might be calling functions intended only for testing from\n'
59 'production code. It is OK to ignore this warning if you know what\n'
60 'you are doing, as the heuristics used to detect the situation are\n'
61 'not perfect. The commit queue will not block on this warning.')
64 _INCLUDE_ORDER_WARNING = (
65 'Your #include order seems to be broken. Remember to use the right '
66 'collation (LC_COLLATE=C) and check\nhttps://google-styleguide.googlecode'
67 '.com/svn/trunk/cppguide.html#Names_and_Order_of_Includes')
69 _BANNED_OBJC_FUNCTIONS = (
71 'addTrackingRect:',
73 'The use of -[NSView addTrackingRect:owner:userData:assumeInside:] is'
74 'prohibited. Please use CrTrackingArea instead.',
75 'http://dev.chromium.org/developers/coding-style/cocoa-dos-and-donts',
77 False,
80 r'/NSTrackingArea\W',
82 'The use of NSTrackingAreas is prohibited. Please use CrTrackingArea',
83 'instead.',
84 'http://dev.chromium.org/developers/coding-style/cocoa-dos-and-donts',
86 False,
89 'convertPointFromBase:',
91 'The use of -[NSView convertPointFromBase:] is almost certainly wrong.',
92 'Please use |convertPoint:(point) fromView:nil| instead.',
93 'http://dev.chromium.org/developers/coding-style/cocoa-dos-and-donts',
95 True,
98 'convertPointToBase:',
100 'The use of -[NSView convertPointToBase:] is almost certainly wrong.',
101 'Please use |convertPoint:(point) toView:nil| instead.',
102 'http://dev.chromium.org/developers/coding-style/cocoa-dos-and-donts',
104 True,
107 'convertRectFromBase:',
109 'The use of -[NSView convertRectFromBase:] is almost certainly wrong.',
110 'Please use |convertRect:(point) fromView:nil| instead.',
111 'http://dev.chromium.org/developers/coding-style/cocoa-dos-and-donts',
113 True,
116 'convertRectToBase:',
118 'The use of -[NSView convertRectToBase:] is almost certainly wrong.',
119 'Please use |convertRect:(point) toView:nil| instead.',
120 'http://dev.chromium.org/developers/coding-style/cocoa-dos-and-donts',
122 True,
125 'convertSizeFromBase:',
127 'The use of -[NSView convertSizeFromBase:] is almost certainly wrong.',
128 'Please use |convertSize:(point) fromView:nil| instead.',
129 'http://dev.chromium.org/developers/coding-style/cocoa-dos-and-donts',
131 True,
134 'convertSizeToBase:',
136 'The use of -[NSView convertSizeToBase:] is almost certainly wrong.',
137 'Please use |convertSize:(point) toView:nil| instead.',
138 'http://dev.chromium.org/developers/coding-style/cocoa-dos-and-donts',
140 True,
145 _BANNED_CPP_FUNCTIONS = (
146 # Make sure that gtest's FRIEND_TEST() macro is not used; the
147 # FRIEND_TEST_ALL_PREFIXES() macro from base/gtest_prod_util.h should be
148 # used instead since that allows for FLAKY_ and DISABLED_ prefixes.
150 'FRIEND_TEST(',
152 'Chromium code should not use gtest\'s FRIEND_TEST() macro. Include',
153 'base/gtest_prod_util.h and use FRIEND_TEST_ALL_PREFIXES() instead.',
155 False,
159 'ScopedAllowIO',
161 'New code should not use ScopedAllowIO. Post a task to the blocking',
162 'pool or the FILE thread instead.',
164 True,
166 r"^base[\\\/]process[\\\/]process_metrics_linux\.cc$",
167 r"^chrome[\\\/]browser[\\\/]chromeos[\\\/]boot_times_recorder\.cc$",
168 r"^chrome[\\\/]browser[\\\/]chromeos[\\\/]"
169 "customization_document_browsertest\.cc$",
170 r"^components[\\\/]crash[\\\/]app[\\\/]breakpad_mac\.mm$",
171 r"^content[\\\/]shell[\\\/]browser[\\\/]shell_browser_main\.cc$",
172 r"^content[\\\/]shell[\\\/]browser[\\\/]shell_message_filter\.cc$",
173 r"^mojo[\\\/]edk[\\\/]embedder[\\\/]" +
174 r"simple_platform_shared_buffer_posix\.cc$",
175 r"^net[\\\/]disk_cache[\\\/]cache_util\.cc$",
176 r"^net[\\\/]url_request[\\\/]test_url_fetcher_factory\.cc$",
177 r"^ui[\\\/]ozone[\\\/]platform[\\\/]drm[\\\/]host[\\\/]"
178 "drm_display_host_manager\.cc$",
182 'SkRefPtr',
184 'The use of SkRefPtr is prohibited. ',
185 'Please use skia::RefPtr instead.'
187 True,
191 'SkAutoRef',
193 'The indirect use of SkRefPtr via SkAutoRef is prohibited. ',
194 'Please use skia::RefPtr instead.'
196 True,
200 'SkAutoTUnref',
202 'The use of SkAutoTUnref is dangerous because it implicitly ',
203 'converts to a raw pointer. Please use skia::RefPtr instead.'
205 True,
209 'SkAutoUnref',
211 'The indirect use of SkAutoTUnref through SkAutoUnref is dangerous ',
212 'because it implicitly converts to a raw pointer. ',
213 'Please use skia::RefPtr instead.'
215 True,
219 r'/HANDLE_EINTR\(.*close',
221 'HANDLE_EINTR(close) is invalid. If close fails with EINTR, the file',
222 'descriptor will be closed, and it is incorrect to retry the close.',
223 'Either call close directly and ignore its return value, or wrap close',
224 'in IGNORE_EINTR to use its return value. See http://crbug.com/269623'
226 True,
230 r'/IGNORE_EINTR\((?!.*close)',
232 'IGNORE_EINTR is only valid when wrapping close. To wrap other system',
233 'calls, use HANDLE_EINTR. See http://crbug.com/269623',
235 True,
237 # Files that #define IGNORE_EINTR.
238 r'^base[\\\/]posix[\\\/]eintr_wrapper\.h$',
239 r'^ppapi[\\\/]tests[\\\/]test_broker\.cc$',
243 r'/v8::Extension\(',
245 'Do not introduce new v8::Extensions into the code base, use',
246 'gin::Wrappable instead. See http://crbug.com/334679',
248 True,
250 r'extensions[\\\/]renderer[\\\/]safe_builtins\.*',
254 '\<MessageLoopProxy\>',
256 'MessageLoopProxy is deprecated. ',
257 'Please use SingleThreadTaskRunner or ThreadTaskRunnerHandle instead.'
259 True,
261 # Internal message_loop related code may still use it.
262 r'^base[\\\/]message_loop[\\\/].*',
267 _IPC_ENUM_TRAITS_DEPRECATED = (
268 'You are using IPC_ENUM_TRAITS() in your code. It has been deprecated.\n'
269 'See http://www.chromium.org/Home/chromium-security/education/security-tips-for-ipc')
272 _VALID_OS_MACROS = (
273 # Please keep sorted.
274 'OS_ANDROID',
275 'OS_ANDROID_HOST',
276 'OS_BSD',
277 'OS_CAT', # For testing.
278 'OS_CHROMEOS',
279 'OS_FREEBSD',
280 'OS_IOS',
281 'OS_LINUX',
282 'OS_MACOSX',
283 'OS_NACL',
284 'OS_NACL_NONSFI',
285 'OS_NACL_SFI',
286 'OS_OPENBSD',
287 'OS_POSIX',
288 'OS_QNX',
289 'OS_SOLARIS',
290 'OS_WIN',
294 def _CheckNoProductionCodeUsingTestOnlyFunctions(input_api, output_api):
295 """Attempts to prevent use of functions intended only for testing in
296 non-testing code. For now this is just a best-effort implementation
297 that ignores header files and may have some false positives. A
298 better implementation would probably need a proper C++ parser.
300 # We only scan .cc files and the like, as the declaration of
301 # for-testing functions in header files are hard to distinguish from
302 # calls to such functions without a proper C++ parser.
303 file_inclusion_pattern = r'.+%s' % _IMPLEMENTATION_EXTENSIONS
305 base_function_pattern = r'[ :]test::[^\s]+|ForTest(s|ing)?|for_test(s|ing)?'
306 inclusion_pattern = input_api.re.compile(r'(%s)\s*\(' % base_function_pattern)
307 comment_pattern = input_api.re.compile(r'//.*(%s)' % base_function_pattern)
308 exclusion_pattern = input_api.re.compile(
309 r'::[A-Za-z0-9_]+(%s)|(%s)[^;]+\{' % (
310 base_function_pattern, base_function_pattern))
312 def FilterFile(affected_file):
313 black_list = (_EXCLUDED_PATHS +
314 _TEST_CODE_EXCLUDED_PATHS +
315 input_api.DEFAULT_BLACK_LIST)
316 return input_api.FilterSourceFile(
317 affected_file,
318 white_list=(file_inclusion_pattern, ),
319 black_list=black_list)
321 problems = []
322 for f in input_api.AffectedSourceFiles(FilterFile):
323 local_path = f.LocalPath()
324 for line_number, line in f.ChangedContents():
325 if (inclusion_pattern.search(line) and
326 not comment_pattern.search(line) and
327 not exclusion_pattern.search(line)):
328 problems.append(
329 '%s:%d\n %s' % (local_path, line_number, line.strip()))
331 if problems:
332 return [output_api.PresubmitPromptOrNotify(_TEST_ONLY_WARNING, problems)]
333 else:
334 return []
337 def _CheckNoIOStreamInHeaders(input_api, output_api):
338 """Checks to make sure no .h files include <iostream>."""
339 files = []
340 pattern = input_api.re.compile(r'^#include\s*<iostream>',
341 input_api.re.MULTILINE)
342 for f in input_api.AffectedSourceFiles(input_api.FilterSourceFile):
343 if not f.LocalPath().endswith('.h'):
344 continue
345 contents = input_api.ReadFile(f)
346 if pattern.search(contents):
347 files.append(f)
349 if len(files):
350 return [ output_api.PresubmitError(
351 'Do not #include <iostream> in header files, since it inserts static '
352 'initialization into every file including the header. Instead, '
353 '#include <ostream>. See http://crbug.com/94794',
354 files) ]
355 return []
358 def _CheckNoUNIT_TESTInSourceFiles(input_api, output_api):
359 """Checks to make sure no source files use UNIT_TEST"""
360 problems = []
361 for f in input_api.AffectedFiles():
362 if (not f.LocalPath().endswith(('.cc', '.mm'))):
363 continue
365 for line_num, line in f.ChangedContents():
366 if 'UNIT_TEST ' in line or line.endswith('UNIT_TEST'):
367 problems.append(' %s:%d' % (f.LocalPath(), line_num))
369 if not problems:
370 return []
371 return [output_api.PresubmitPromptWarning('UNIT_TEST is only for headers.\n' +
372 '\n'.join(problems))]
375 def _FindHistogramNameInLine(histogram_name, line):
376 """Tries to find a histogram name or prefix in a line."""
377 if not "affected-histogram" in line:
378 return histogram_name in line
379 # A histogram_suffixes tag type has an affected-histogram name as a prefix of
380 # the histogram_name.
381 if not '"' in line:
382 return False
383 histogram_prefix = line.split('\"')[1]
384 return histogram_prefix in histogram_name
387 def _CheckUmaHistogramChanges(input_api, output_api):
388 """Check that UMA histogram names in touched lines can still be found in other
389 lines of the patch or in histograms.xml. Note that this check would not catch
390 the reverse: changes in histograms.xml not matched in the code itself."""
391 touched_histograms = []
392 histograms_xml_modifications = []
393 pattern = input_api.re.compile('UMA_HISTOGRAM.*\("(.*)"')
394 for f in input_api.AffectedFiles():
395 # If histograms.xml itself is modified, keep the modified lines for later.
396 if f.LocalPath().endswith(('histograms.xml')):
397 histograms_xml_modifications = f.ChangedContents()
398 continue
399 if not f.LocalPath().endswith(('cc', 'mm', 'cpp')):
400 continue
401 for line_num, line in f.ChangedContents():
402 found = pattern.search(line)
403 if found:
404 touched_histograms.append([found.group(1), f, line_num])
406 # Search for the touched histogram names in the local modifications to
407 # histograms.xml, and, if not found, on the base histograms.xml file.
408 unmatched_histograms = []
409 for histogram_info in touched_histograms:
410 histogram_name_found = False
411 for line_num, line in histograms_xml_modifications:
412 histogram_name_found = _FindHistogramNameInLine(histogram_info[0], line)
413 if histogram_name_found:
414 break
415 if not histogram_name_found:
416 unmatched_histograms.append(histogram_info)
418 histograms_xml_path = 'tools/metrics/histograms/histograms.xml'
419 problems = []
420 if unmatched_histograms:
421 with open(histograms_xml_path) as histograms_xml:
422 for histogram_name, f, line_num in unmatched_histograms:
423 histograms_xml.seek(0)
424 histogram_name_found = False
425 for line in histograms_xml:
426 histogram_name_found = _FindHistogramNameInLine(histogram_name, line)
427 if histogram_name_found:
428 break
429 if not histogram_name_found:
430 problems.append(' [%s:%d] %s' %
431 (f.LocalPath(), line_num, histogram_name))
433 if not problems:
434 return []
435 return [output_api.PresubmitPromptWarning('Some UMA_HISTOGRAM lines have '
436 'been modified and the associated histogram name has no match in either '
437 '%s or the modifications of it:' % (histograms_xml_path), problems)]
440 def _CheckNoNewWStrings(input_api, output_api):
441 """Checks to make sure we don't introduce use of wstrings."""
442 problems = []
443 for f in input_api.AffectedFiles():
444 if (not f.LocalPath().endswith(('.cc', '.h')) or
445 f.LocalPath().endswith(('test.cc', '_win.cc', '_win.h')) or
446 '/win/' in f.LocalPath()):
447 continue
449 allowWString = False
450 for line_num, line in f.ChangedContents():
451 if 'presubmit: allow wstring' in line:
452 allowWString = True
453 elif not allowWString and 'wstring' in line:
454 problems.append(' %s:%d' % (f.LocalPath(), line_num))
455 allowWString = False
456 else:
457 allowWString = False
459 if not problems:
460 return []
461 return [output_api.PresubmitPromptWarning('New code should not use wstrings.'
462 ' If you are calling a cross-platform API that accepts a wstring, '
463 'fix the API.\n' +
464 '\n'.join(problems))]
467 def _CheckNoDEPSGIT(input_api, output_api):
468 """Make sure .DEPS.git is never modified manually."""
469 if any(f.LocalPath().endswith('.DEPS.git') for f in
470 input_api.AffectedFiles()):
471 return [output_api.PresubmitError(
472 'Never commit changes to .DEPS.git. This file is maintained by an\n'
473 'automated system based on what\'s in DEPS and your changes will be\n'
474 'overwritten.\n'
475 'See https://sites.google.com/a/chromium.org/dev/developers/how-tos/get-the-code#Rolling_DEPS\n'
476 'for more information')]
477 return []
480 def _CheckValidHostsInDEPS(input_api, output_api):
481 """Checks that DEPS file deps are from allowed_hosts."""
482 # Run only if DEPS file has been modified to annoy fewer bystanders.
483 if all(f.LocalPath() != 'DEPS' for f in input_api.AffectedFiles()):
484 return []
485 # Outsource work to gclient verify
486 try:
487 input_api.subprocess.check_output(['gclient', 'verify'])
488 return []
489 except input_api.subprocess.CalledProcessError, error:
490 return [output_api.PresubmitError(
491 'DEPS file must have only git dependencies.',
492 long_text=error.output)]
495 def _CheckNoBannedFunctions(input_api, output_api):
496 """Make sure that banned functions are not used."""
497 warnings = []
498 errors = []
500 file_filter = lambda f: f.LocalPath().endswith(('.mm', '.m', '.h'))
501 for f in input_api.AffectedFiles(file_filter=file_filter):
502 for line_num, line in f.ChangedContents():
503 for func_name, message, error in _BANNED_OBJC_FUNCTIONS:
504 matched = False
505 if func_name[0:1] == '/':
506 regex = func_name[1:]
507 if input_api.re.search(regex, line):
508 matched = True
509 elif func_name in line:
510 matched = True
511 if matched:
512 problems = warnings;
513 if error:
514 problems = errors;
515 problems.append(' %s:%d:' % (f.LocalPath(), line_num))
516 for message_line in message:
517 problems.append(' %s' % message_line)
519 file_filter = lambda f: f.LocalPath().endswith(('.cc', '.mm', '.h'))
520 for f in input_api.AffectedFiles(file_filter=file_filter):
521 for line_num, line in f.ChangedContents():
522 for func_name, message, error, excluded_paths in _BANNED_CPP_FUNCTIONS:
523 def IsBlacklisted(affected_file, blacklist):
524 local_path = affected_file.LocalPath()
525 for item in blacklist:
526 if input_api.re.match(item, local_path):
527 return True
528 return False
529 if IsBlacklisted(f, excluded_paths):
530 continue
531 matched = False
532 if func_name[0:1] == '/':
533 regex = func_name[1:]
534 if input_api.re.search(regex, line):
535 matched = True
536 elif func_name in line:
537 matched = True
538 if matched:
539 problems = warnings;
540 if error:
541 problems = errors;
542 problems.append(' %s:%d:' % (f.LocalPath(), line_num))
543 for message_line in message:
544 problems.append(' %s' % message_line)
546 result = []
547 if (warnings):
548 result.append(output_api.PresubmitPromptWarning(
549 'Banned functions were used.\n' + '\n'.join(warnings)))
550 if (errors):
551 result.append(output_api.PresubmitError(
552 'Banned functions were used.\n' + '\n'.join(errors)))
553 return result
556 def _CheckNoPragmaOnce(input_api, output_api):
557 """Make sure that banned functions are not used."""
558 files = []
559 pattern = input_api.re.compile(r'^#pragma\s+once',
560 input_api.re.MULTILINE)
561 for f in input_api.AffectedSourceFiles(input_api.FilterSourceFile):
562 if not f.LocalPath().endswith('.h'):
563 continue
564 contents = input_api.ReadFile(f)
565 if pattern.search(contents):
566 files.append(f)
568 if files:
569 return [output_api.PresubmitError(
570 'Do not use #pragma once in header files.\n'
571 'See http://www.chromium.org/developers/coding-style#TOC-File-headers',
572 files)]
573 return []
576 def _CheckNoTrinaryTrueFalse(input_api, output_api):
577 """Checks to make sure we don't introduce use of foo ? true : false."""
578 problems = []
579 pattern = input_api.re.compile(r'\?\s*(true|false)\s*:\s*(true|false)')
580 for f in input_api.AffectedFiles():
581 if not f.LocalPath().endswith(('.cc', '.h', '.inl', '.m', '.mm')):
582 continue
584 for line_num, line in f.ChangedContents():
585 if pattern.match(line):
586 problems.append(' %s:%d' % (f.LocalPath(), line_num))
588 if not problems:
589 return []
590 return [output_api.PresubmitPromptWarning(
591 'Please consider avoiding the "? true : false" pattern if possible.\n' +
592 '\n'.join(problems))]
595 def _CheckUnwantedDependencies(input_api, output_api):
596 """Runs checkdeps on #include statements added in this
597 change. Breaking - rules is an error, breaking ! rules is a
598 warning.
600 import sys
601 # We need to wait until we have an input_api object and use this
602 # roundabout construct to import checkdeps because this file is
603 # eval-ed and thus doesn't have __file__.
604 original_sys_path = sys.path
605 try:
606 sys.path = sys.path + [input_api.os_path.join(
607 input_api.PresubmitLocalPath(), 'buildtools', 'checkdeps')]
608 import checkdeps
609 from cpp_checker import CppChecker
610 from rules import Rule
611 finally:
612 # Restore sys.path to what it was before.
613 sys.path = original_sys_path
615 added_includes = []
616 for f in input_api.AffectedFiles():
617 if not CppChecker.IsCppFile(f.LocalPath()):
618 continue
620 changed_lines = [line for line_num, line in f.ChangedContents()]
621 added_includes.append([f.LocalPath(), changed_lines])
623 deps_checker = checkdeps.DepsChecker(input_api.PresubmitLocalPath())
625 error_descriptions = []
626 warning_descriptions = []
627 for path, rule_type, rule_description in deps_checker.CheckAddedCppIncludes(
628 added_includes):
629 description_with_path = '%s\n %s' % (path, rule_description)
630 if rule_type == Rule.DISALLOW:
631 error_descriptions.append(description_with_path)
632 else:
633 warning_descriptions.append(description_with_path)
635 results = []
636 if error_descriptions:
637 results.append(output_api.PresubmitError(
638 'You added one or more #includes that violate checkdeps rules.',
639 error_descriptions))
640 if warning_descriptions:
641 results.append(output_api.PresubmitPromptOrNotify(
642 'You added one or more #includes of files that are temporarily\n'
643 'allowed but being removed. Can you avoid introducing the\n'
644 '#include? See relevant DEPS file(s) for details and contacts.',
645 warning_descriptions))
646 return results
649 def _CheckFilePermissions(input_api, output_api):
650 """Check that all files have their permissions properly set."""
651 if input_api.platform == 'win32':
652 return []
653 args = [input_api.python_executable, 'tools/checkperms/checkperms.py',
654 '--root', input_api.change.RepositoryRoot()]
655 for f in input_api.AffectedFiles():
656 args += ['--file', f.LocalPath()]
657 checkperms = input_api.subprocess.Popen(args,
658 stdout=input_api.subprocess.PIPE)
659 errors = checkperms.communicate()[0].strip()
660 if errors:
661 return [output_api.PresubmitError('checkperms.py failed.',
662 errors.splitlines())]
663 return []
666 def _CheckNoAuraWindowPropertyHInHeaders(input_api, output_api):
667 """Makes sure we don't include ui/aura/window_property.h
668 in header files.
670 pattern = input_api.re.compile(r'^#include\s*"ui/aura/window_property.h"')
671 errors = []
672 for f in input_api.AffectedFiles():
673 if not f.LocalPath().endswith('.h'):
674 continue
675 for line_num, line in f.ChangedContents():
676 if pattern.match(line):
677 errors.append(' %s:%d' % (f.LocalPath(), line_num))
679 results = []
680 if errors:
681 results.append(output_api.PresubmitError(
682 'Header files should not include ui/aura/window_property.h', errors))
683 return results
686 def _CheckIncludeOrderForScope(scope, input_api, file_path, changed_linenums):
687 """Checks that the lines in scope occur in the right order.
689 1. C system files in alphabetical order
690 2. C++ system files in alphabetical order
691 3. Project's .h files
694 c_system_include_pattern = input_api.re.compile(r'\s*#include <.*\.h>')
695 cpp_system_include_pattern = input_api.re.compile(r'\s*#include <.*>')
696 custom_include_pattern = input_api.re.compile(r'\s*#include ".*')
698 C_SYSTEM_INCLUDES, CPP_SYSTEM_INCLUDES, CUSTOM_INCLUDES = range(3)
700 state = C_SYSTEM_INCLUDES
702 previous_line = ''
703 previous_line_num = 0
704 problem_linenums = []
705 out_of_order = " - line belongs before previous line"
706 for line_num, line in scope:
707 if c_system_include_pattern.match(line):
708 if state != C_SYSTEM_INCLUDES:
709 problem_linenums.append((line_num, previous_line_num,
710 " - C system include file in wrong block"))
711 elif previous_line and previous_line > line:
712 problem_linenums.append((line_num, previous_line_num,
713 out_of_order))
714 elif cpp_system_include_pattern.match(line):
715 if state == C_SYSTEM_INCLUDES:
716 state = CPP_SYSTEM_INCLUDES
717 elif state == CUSTOM_INCLUDES:
718 problem_linenums.append((line_num, previous_line_num,
719 " - c++ system include file in wrong block"))
720 elif previous_line and previous_line > line:
721 problem_linenums.append((line_num, previous_line_num, out_of_order))
722 elif custom_include_pattern.match(line):
723 if state != CUSTOM_INCLUDES:
724 state = CUSTOM_INCLUDES
725 elif previous_line and previous_line > line:
726 problem_linenums.append((line_num, previous_line_num, out_of_order))
727 else:
728 problem_linenums.append((line_num, previous_line_num,
729 "Unknown include type"))
730 previous_line = line
731 previous_line_num = line_num
733 warnings = []
734 for (line_num, previous_line_num, failure_type) in problem_linenums:
735 if line_num in changed_linenums or previous_line_num in changed_linenums:
736 warnings.append(' %s:%d:%s' % (file_path, line_num, failure_type))
737 return warnings
740 def _CheckIncludeOrderInFile(input_api, f, changed_linenums):
741 """Checks the #include order for the given file f."""
743 system_include_pattern = input_api.re.compile(r'\s*#include \<.*')
744 # Exclude the following includes from the check:
745 # 1) #include <.../...>, e.g., <sys/...> includes often need to appear in a
746 # specific order.
747 # 2) <atlbase.h>, "build/build_config.h"
748 excluded_include_pattern = input_api.re.compile(
749 r'\s*#include (\<.*/.*|\<atlbase\.h\>|"build/build_config.h")')
750 custom_include_pattern = input_api.re.compile(r'\s*#include "(?P<FILE>.*)"')
751 # Match the final or penultimate token if it is xxxtest so we can ignore it
752 # when considering the special first include.
753 test_file_tag_pattern = input_api.re.compile(
754 r'_[a-z]+test(?=(_[a-zA-Z0-9]+)?\.)')
755 if_pattern = input_api.re.compile(
756 r'\s*#\s*(if|elif|else|endif|define|undef).*')
757 # Some files need specialized order of includes; exclude such files from this
758 # check.
759 uncheckable_includes_pattern = input_api.re.compile(
760 r'\s*#include '
761 '("ipc/.*macros\.h"|<windows\.h>|".*gl.*autogen.h")\s*')
763 contents = f.NewContents()
764 warnings = []
765 line_num = 0
767 # Handle the special first include. If the first include file is
768 # some/path/file.h, the corresponding including file can be some/path/file.cc,
769 # some/other/path/file.cc, some/path/file_platform.cc, some/path/file-suffix.h
770 # etc. It's also possible that no special first include exists.
771 # If the included file is some/path/file_platform.h the including file could
772 # also be some/path/file_xxxtest_platform.h.
773 including_file_base_name = test_file_tag_pattern.sub(
774 '', input_api.os_path.basename(f.LocalPath()))
776 for line in contents:
777 line_num += 1
778 if system_include_pattern.match(line):
779 # No special first include -> process the line again along with normal
780 # includes.
781 line_num -= 1
782 break
783 match = custom_include_pattern.match(line)
784 if match:
785 match_dict = match.groupdict()
786 header_basename = test_file_tag_pattern.sub(
787 '', input_api.os_path.basename(match_dict['FILE'])).replace('.h', '')
789 if header_basename not in including_file_base_name:
790 # No special first include -> process the line again along with normal
791 # includes.
792 line_num -= 1
793 break
795 # Split into scopes: Each region between #if and #endif is its own scope.
796 scopes = []
797 current_scope = []
798 for line in contents[line_num:]:
799 line_num += 1
800 if uncheckable_includes_pattern.match(line):
801 continue
802 if if_pattern.match(line):
803 scopes.append(current_scope)
804 current_scope = []
805 elif ((system_include_pattern.match(line) or
806 custom_include_pattern.match(line)) and
807 not excluded_include_pattern.match(line)):
808 current_scope.append((line_num, line))
809 scopes.append(current_scope)
811 for scope in scopes:
812 warnings.extend(_CheckIncludeOrderForScope(scope, input_api, f.LocalPath(),
813 changed_linenums))
814 return warnings
817 def _CheckIncludeOrder(input_api, output_api):
818 """Checks that the #include order is correct.
820 1. The corresponding header for source files.
821 2. C system files in alphabetical order
822 3. C++ system files in alphabetical order
823 4. Project's .h files in alphabetical order
825 Each region separated by #if, #elif, #else, #endif, #define and #undef follows
826 these rules separately.
828 def FileFilterIncludeOrder(affected_file):
829 black_list = (_EXCLUDED_PATHS + input_api.DEFAULT_BLACK_LIST)
830 return input_api.FilterSourceFile(affected_file, black_list=black_list)
832 warnings = []
833 for f in input_api.AffectedFiles(file_filter=FileFilterIncludeOrder):
834 if f.LocalPath().endswith(('.cc', '.h', '.mm')):
835 changed_linenums = set(line_num for line_num, _ in f.ChangedContents())
836 warnings.extend(_CheckIncludeOrderInFile(input_api, f, changed_linenums))
838 results = []
839 if warnings:
840 results.append(output_api.PresubmitPromptOrNotify(_INCLUDE_ORDER_WARNING,
841 warnings))
842 return results
845 def _CheckForVersionControlConflictsInFile(input_api, f):
846 pattern = input_api.re.compile('^(?:<<<<<<<|>>>>>>>) |^=======$')
847 errors = []
848 for line_num, line in f.ChangedContents():
849 if f.LocalPath().endswith('.md'):
850 # First-level headers in markdown look a lot like version control
851 # conflict markers. http://daringfireball.net/projects/markdown/basics
852 continue
853 if pattern.match(line):
854 errors.append(' %s:%d %s' % (f.LocalPath(), line_num, line))
855 return errors
858 def _CheckForVersionControlConflicts(input_api, output_api):
859 """Usually this is not intentional and will cause a compile failure."""
860 errors = []
861 for f in input_api.AffectedFiles():
862 errors.extend(_CheckForVersionControlConflictsInFile(input_api, f))
864 results = []
865 if errors:
866 results.append(output_api.PresubmitError(
867 'Version control conflict markers found, please resolve.', errors))
868 return results
871 def _CheckHardcodedGoogleHostsInLowerLayers(input_api, output_api):
872 def FilterFile(affected_file):
873 """Filter function for use with input_api.AffectedSourceFiles,
874 below. This filters out everything except non-test files from
875 top-level directories that generally speaking should not hard-code
876 service URLs (e.g. src/android_webview/, src/content/ and others).
878 return input_api.FilterSourceFile(
879 affected_file,
880 white_list=(r'^(android_webview|base|content|net)[\\\/].*', ),
881 black_list=(_EXCLUDED_PATHS +
882 _TEST_CODE_EXCLUDED_PATHS +
883 input_api.DEFAULT_BLACK_LIST))
885 base_pattern = '"[^"]*google\.com[^"]*"'
886 comment_pattern = input_api.re.compile('//.*%s' % base_pattern)
887 pattern = input_api.re.compile(base_pattern)
888 problems = [] # items are (filename, line_number, line)
889 for f in input_api.AffectedSourceFiles(FilterFile):
890 for line_num, line in f.ChangedContents():
891 if not comment_pattern.search(line) and pattern.search(line):
892 problems.append((f.LocalPath(), line_num, line))
894 if problems:
895 return [output_api.PresubmitPromptOrNotify(
896 'Most layers below src/chrome/ should not hardcode service URLs.\n'
897 'Are you sure this is correct?',
898 [' %s:%d: %s' % (
899 problem[0], problem[1], problem[2]) for problem in problems])]
900 else:
901 return []
904 def _CheckNoAbbreviationInPngFileName(input_api, output_api):
905 """Makes sure there are no abbreviations in the name of PNG files.
906 The native_client_sdk directory is excluded because it has auto-generated PNG
907 files for documentation.
909 errors = []
910 white_list = (r'.*_[a-z]_.*\.png$|.*_[a-z]\.png$',)
911 black_list = (r'^native_client_sdk[\\\/]',)
912 file_filter = lambda f: input_api.FilterSourceFile(
913 f, white_list=white_list, black_list=black_list)
914 for f in input_api.AffectedFiles(include_deletes=False,
915 file_filter=file_filter):
916 errors.append(' %s' % f.LocalPath())
918 results = []
919 if errors:
920 results.append(output_api.PresubmitError(
921 'The name of PNG files should not have abbreviations. \n'
922 'Use _hover.png, _center.png, instead of _h.png, _c.png.\n'
923 'Contact oshima@chromium.org if you have questions.', errors))
924 return results
927 def _FilesToCheckForIncomingDeps(re, changed_lines):
928 """Helper method for _CheckAddedDepsHaveTargetApprovals. Returns
929 a set of DEPS entries that we should look up.
931 For a directory (rather than a specific filename) we fake a path to
932 a specific filename by adding /DEPS. This is chosen as a file that
933 will seldom or never be subject to per-file include_rules.
935 # We ignore deps entries on auto-generated directories.
936 AUTO_GENERATED_DIRS = ['grit', 'jni']
938 # This pattern grabs the path without basename in the first
939 # parentheses, and the basename (if present) in the second. It
940 # relies on the simple heuristic that if there is a basename it will
941 # be a header file ending in ".h".
942 pattern = re.compile(
943 r"""['"]\+([^'"]+?)(/[a-zA-Z0-9_]+\.h)?['"].*""")
944 results = set()
945 for changed_line in changed_lines:
946 m = pattern.match(changed_line)
947 if m:
948 path = m.group(1)
949 if path.split('/')[0] not in AUTO_GENERATED_DIRS:
950 if m.group(2):
951 results.add('%s%s' % (path, m.group(2)))
952 else:
953 results.add('%s/DEPS' % path)
954 return results
957 def _CheckAddedDepsHaveTargetApprovals(input_api, output_api):
958 """When a dependency prefixed with + is added to a DEPS file, we
959 want to make sure that the change is reviewed by an OWNER of the
960 target file or directory, to avoid layering violations from being
961 introduced. This check verifies that this happens.
963 changed_lines = set()
964 for f in input_api.AffectedFiles():
965 filename = input_api.os_path.basename(f.LocalPath())
966 if filename == 'DEPS':
967 changed_lines |= set(line.strip()
968 for line_num, line
969 in f.ChangedContents())
970 if not changed_lines:
971 return []
973 virtual_depended_on_files = _FilesToCheckForIncomingDeps(input_api.re,
974 changed_lines)
975 if not virtual_depended_on_files:
976 return []
978 if input_api.is_committing:
979 if input_api.tbr:
980 return [output_api.PresubmitNotifyResult(
981 '--tbr was specified, skipping OWNERS check for DEPS additions')]
982 if not input_api.change.issue:
983 return [output_api.PresubmitError(
984 "DEPS approval by OWNERS check failed: this change has "
985 "no Rietveld issue number, so we can't check it for approvals.")]
986 output = output_api.PresubmitError
987 else:
988 output = output_api.PresubmitNotifyResult
990 owners_db = input_api.owners_db
991 owner_email, reviewers = input_api.canned_checks._RietveldOwnerAndReviewers(
992 input_api,
993 owners_db.email_regexp,
994 approval_needed=input_api.is_committing)
996 owner_email = owner_email or input_api.change.author_email
998 reviewers_plus_owner = set(reviewers)
999 if owner_email:
1000 reviewers_plus_owner.add(owner_email)
1001 missing_files = owners_db.files_not_covered_by(virtual_depended_on_files,
1002 reviewers_plus_owner)
1004 # We strip the /DEPS part that was added by
1005 # _FilesToCheckForIncomingDeps to fake a path to a file in a
1006 # directory.
1007 def StripDeps(path):
1008 start_deps = path.rfind('/DEPS')
1009 if start_deps != -1:
1010 return path[:start_deps]
1011 else:
1012 return path
1013 unapproved_dependencies = ["'+%s'," % StripDeps(path)
1014 for path in missing_files]
1016 if unapproved_dependencies:
1017 output_list = [
1018 output('Missing LGTM from OWNERS of dependencies added to DEPS:\n %s' %
1019 '\n '.join(sorted(unapproved_dependencies)))]
1020 if not input_api.is_committing:
1021 suggested_owners = owners_db.reviewers_for(missing_files, owner_email)
1022 output_list.append(output(
1023 'Suggested missing target path OWNERS:\n %s' %
1024 '\n '.join(suggested_owners or [])))
1025 return output_list
1027 return []
1030 def _CheckSpamLogging(input_api, output_api):
1031 file_inclusion_pattern = r'.+%s' % _IMPLEMENTATION_EXTENSIONS
1032 black_list = (_EXCLUDED_PATHS +
1033 _TEST_CODE_EXCLUDED_PATHS +
1034 input_api.DEFAULT_BLACK_LIST +
1035 (r"^base[\\\/]logging\.h$",
1036 r"^base[\\\/]logging\.cc$",
1037 r"^chrome[\\\/]app[\\\/]chrome_main_delegate\.cc$",
1038 r"^chrome[\\\/]browser[\\\/]chrome_browser_main\.cc$",
1039 r"^chrome[\\\/]browser[\\\/]ui[\\\/]startup[\\\/]"
1040 r"startup_browser_creator\.cc$",
1041 r"^chrome[\\\/]installer[\\\/]setup[\\\/].*",
1042 r"chrome[\\\/]browser[\\\/]diagnostics[\\\/]" +
1043 r"diagnostics_writer\.cc$",
1044 r"^chrome_elf[\\\/]dll_hash[\\\/]dll_hash_main\.cc$",
1045 r"^chromecast[\\\/]",
1046 r"^cloud_print[\\\/]",
1047 r"^components[\\\/]html_viewer[\\\/]"
1048 r"web_test_delegate_impl\.cc$",
1049 r"^content[\\\/]common[\\\/]gpu[\\\/]client[\\\/]"
1050 r"gl_helper_benchmark\.cc$",
1051 r"^courgette[\\\/]courgette_tool\.cc$",
1052 r"^extensions[\\\/]renderer[\\\/]logging_native_handler\.cc$",
1053 r"^ipc[\\\/]ipc_logging\.cc$",
1054 r"^native_client_sdk[\\\/]",
1055 r"^remoting[\\\/]base[\\\/]logging\.h$",
1056 r"^remoting[\\\/]host[\\\/].*",
1057 r"^sandbox[\\\/]linux[\\\/].*",
1058 r"^tools[\\\/]",
1059 r"^ui[\\\/]aura[\\\/]bench[\\\/]bench_main\.cc$",
1060 r"^storage[\\\/]browser[\\\/]fileapi[\\\/]" +
1061 r"dump_file_system.cc$",))
1062 source_file_filter = lambda x: input_api.FilterSourceFile(
1063 x, white_list=(file_inclusion_pattern,), black_list=black_list)
1065 log_info = []
1066 printf = []
1068 for f in input_api.AffectedSourceFiles(source_file_filter):
1069 contents = input_api.ReadFile(f, 'rb')
1070 if input_api.re.search(r"\bD?LOG\s*\(\s*INFO\s*\)", contents):
1071 log_info.append(f.LocalPath())
1072 elif input_api.re.search(r"\bD?LOG_IF\s*\(\s*INFO\s*,", contents):
1073 log_info.append(f.LocalPath())
1075 if input_api.re.search(r"\bprintf\(", contents):
1076 printf.append(f.LocalPath())
1077 elif input_api.re.search(r"\bfprintf\((stdout|stderr)", contents):
1078 printf.append(f.LocalPath())
1080 if log_info:
1081 return [output_api.PresubmitError(
1082 'These files spam the console log with LOG(INFO):',
1083 items=log_info)]
1084 if printf:
1085 return [output_api.PresubmitError(
1086 'These files spam the console log with printf/fprintf:',
1087 items=printf)]
1088 return []
1091 def _CheckForAnonymousVariables(input_api, output_api):
1092 """These types are all expected to hold locks while in scope and
1093 so should never be anonymous (which causes them to be immediately
1094 destroyed)."""
1095 they_who_must_be_named = [
1096 'base::AutoLock',
1097 'base::AutoReset',
1098 'base::AutoUnlock',
1099 'SkAutoAlphaRestore',
1100 'SkAutoBitmapShaderInstall',
1101 'SkAutoBlitterChoose',
1102 'SkAutoBounderCommit',
1103 'SkAutoCallProc',
1104 'SkAutoCanvasRestore',
1105 'SkAutoCommentBlock',
1106 'SkAutoDescriptor',
1107 'SkAutoDisableDirectionCheck',
1108 'SkAutoDisableOvalCheck',
1109 'SkAutoFree',
1110 'SkAutoGlyphCache',
1111 'SkAutoHDC',
1112 'SkAutoLockColors',
1113 'SkAutoLockPixels',
1114 'SkAutoMalloc',
1115 'SkAutoMaskFreeImage',
1116 'SkAutoMutexAcquire',
1117 'SkAutoPathBoundsUpdate',
1118 'SkAutoPDFRelease',
1119 'SkAutoRasterClipValidate',
1120 'SkAutoRef',
1121 'SkAutoTime',
1122 'SkAutoTrace',
1123 'SkAutoUnref',
1125 anonymous = r'(%s)\s*[({]' % '|'.join(they_who_must_be_named)
1126 # bad: base::AutoLock(lock.get());
1127 # not bad: base::AutoLock lock(lock.get());
1128 bad_pattern = input_api.re.compile(anonymous)
1129 # good: new base::AutoLock(lock.get())
1130 good_pattern = input_api.re.compile(r'\bnew\s*' + anonymous)
1131 errors = []
1133 for f in input_api.AffectedFiles():
1134 if not f.LocalPath().endswith(('.cc', '.h', '.inl', '.m', '.mm')):
1135 continue
1136 for linenum, line in f.ChangedContents():
1137 if bad_pattern.search(line) and not good_pattern.search(line):
1138 errors.append('%s:%d' % (f.LocalPath(), linenum))
1140 if errors:
1141 return [output_api.PresubmitError(
1142 'These lines create anonymous variables that need to be named:',
1143 items=errors)]
1144 return []
1147 def _CheckCygwinShell(input_api, output_api):
1148 source_file_filter = lambda x: input_api.FilterSourceFile(
1149 x, white_list=(r'.+\.(gyp|gypi)$',))
1150 cygwin_shell = []
1152 for f in input_api.AffectedSourceFiles(source_file_filter):
1153 for linenum, line in f.ChangedContents():
1154 if 'msvs_cygwin_shell' in line:
1155 cygwin_shell.append(f.LocalPath())
1156 break
1158 if cygwin_shell:
1159 return [output_api.PresubmitError(
1160 'These files should not use msvs_cygwin_shell (the default is 0):',
1161 items=cygwin_shell)]
1162 return []
1165 def _CheckUserActionUpdate(input_api, output_api):
1166 """Checks if any new user action has been added."""
1167 if any('actions.xml' == input_api.os_path.basename(f) for f in
1168 input_api.LocalPaths()):
1169 # If actions.xml is already included in the changelist, the PRESUBMIT
1170 # for actions.xml will do a more complete presubmit check.
1171 return []
1173 file_filter = lambda f: f.LocalPath().endswith(('.cc', '.mm'))
1174 action_re = r'[^a-zA-Z]UserMetricsAction\("([^"]*)'
1175 current_actions = None
1176 for f in input_api.AffectedFiles(file_filter=file_filter):
1177 for line_num, line in f.ChangedContents():
1178 match = input_api.re.search(action_re, line)
1179 if match:
1180 # Loads contents in tools/metrics/actions/actions.xml to memory. It's
1181 # loaded only once.
1182 if not current_actions:
1183 with open('tools/metrics/actions/actions.xml') as actions_f:
1184 current_actions = actions_f.read()
1185 # Search for the matched user action name in |current_actions|.
1186 for action_name in match.groups():
1187 action = 'name="{0}"'.format(action_name)
1188 if action not in current_actions:
1189 return [output_api.PresubmitPromptWarning(
1190 'File %s line %d: %s is missing in '
1191 'tools/metrics/actions/actions.xml. Please run '
1192 'tools/metrics/actions/extract_actions.py to update.'
1193 % (f.LocalPath(), line_num, action_name))]
1194 return []
1197 def _GetJSONParseError(input_api, filename, eat_comments=True):
1198 try:
1199 contents = input_api.ReadFile(filename)
1200 if eat_comments:
1201 import sys
1202 original_sys_path = sys.path
1203 try:
1204 sys.path = sys.path + [input_api.os_path.join(
1205 input_api.PresubmitLocalPath(),
1206 'tools', 'json_comment_eater')]
1207 import json_comment_eater
1208 finally:
1209 sys.path = original_sys_path
1210 contents = json_comment_eater.Nom(contents)
1212 input_api.json.loads(contents)
1213 except ValueError as e:
1214 return e
1215 return None
1218 def _GetIDLParseError(input_api, filename):
1219 try:
1220 contents = input_api.ReadFile(filename)
1221 idl_schema = input_api.os_path.join(
1222 input_api.PresubmitLocalPath(),
1223 'tools', 'json_schema_compiler', 'idl_schema.py')
1224 process = input_api.subprocess.Popen(
1225 [input_api.python_executable, idl_schema],
1226 stdin=input_api.subprocess.PIPE,
1227 stdout=input_api.subprocess.PIPE,
1228 stderr=input_api.subprocess.PIPE,
1229 universal_newlines=True)
1230 (_, error) = process.communicate(input=contents)
1231 return error or None
1232 except ValueError as e:
1233 return e
1236 def _CheckParseErrors(input_api, output_api):
1237 """Check that IDL and JSON files do not contain syntax errors."""
1238 actions = {
1239 '.idl': _GetIDLParseError,
1240 '.json': _GetJSONParseError,
1242 # These paths contain test data and other known invalid JSON files.
1243 excluded_patterns = [
1244 r'test[\\\/]data[\\\/]',
1245 r'^components[\\\/]policy[\\\/]resources[\\\/]policy_templates\.json$',
1247 # Most JSON files are preprocessed and support comments, but these do not.
1248 json_no_comments_patterns = [
1249 r'^testing[\\\/]',
1251 # Only run IDL checker on files in these directories.
1252 idl_included_patterns = [
1253 r'^chrome[\\\/]common[\\\/]extensions[\\\/]api[\\\/]',
1254 r'^extensions[\\\/]common[\\\/]api[\\\/]',
1257 def get_action(affected_file):
1258 filename = affected_file.LocalPath()
1259 return actions.get(input_api.os_path.splitext(filename)[1])
1261 def MatchesFile(patterns, path):
1262 for pattern in patterns:
1263 if input_api.re.search(pattern, path):
1264 return True
1265 return False
1267 def FilterFile(affected_file):
1268 action = get_action(affected_file)
1269 if not action:
1270 return False
1271 path = affected_file.LocalPath()
1273 if MatchesFile(excluded_patterns, path):
1274 return False
1276 if (action == _GetIDLParseError and
1277 not MatchesFile(idl_included_patterns, path)):
1278 return False
1279 return True
1281 results = []
1282 for affected_file in input_api.AffectedFiles(
1283 file_filter=FilterFile, include_deletes=False):
1284 action = get_action(affected_file)
1285 kwargs = {}
1286 if (action == _GetJSONParseError and
1287 MatchesFile(json_no_comments_patterns, affected_file.LocalPath())):
1288 kwargs['eat_comments'] = False
1289 parse_error = action(input_api,
1290 affected_file.AbsoluteLocalPath(),
1291 **kwargs)
1292 if parse_error:
1293 results.append(output_api.PresubmitError('%s could not be parsed: %s' %
1294 (affected_file.LocalPath(), parse_error)))
1295 return results
1298 def _CheckJavaStyle(input_api, output_api):
1299 """Runs checkstyle on changed java files and returns errors if any exist."""
1300 import sys
1301 original_sys_path = sys.path
1302 try:
1303 sys.path = sys.path + [input_api.os_path.join(
1304 input_api.PresubmitLocalPath(), 'tools', 'android', 'checkstyle')]
1305 import checkstyle
1306 finally:
1307 # Restore sys.path to what it was before.
1308 sys.path = original_sys_path
1310 return checkstyle.RunCheckstyle(
1311 input_api, output_api, 'tools/android/checkstyle/chromium-style-5.0.xml',
1312 black_list=_EXCLUDED_PATHS + input_api.DEFAULT_BLACK_LIST)
1315 def _CheckAndroidToastUsage(input_api, output_api):
1316 """Checks that code uses org.chromium.ui.widget.Toast instead of
1317 android.widget.Toast (Chromium Toast doesn't force hardware
1318 acceleration on low-end devices, saving memory).
1320 toast_import_pattern = input_api.re.compile(
1321 r'^import android\.widget\.Toast;$')
1323 errors = []
1325 sources = lambda affected_file: input_api.FilterSourceFile(
1326 affected_file,
1327 black_list=(_EXCLUDED_PATHS +
1328 _TEST_CODE_EXCLUDED_PATHS +
1329 input_api.DEFAULT_BLACK_LIST +
1330 (r'^chromecast[\\\/].*',
1331 r'^remoting[\\\/].*')),
1332 white_list=(r'.*\.java$',))
1334 for f in input_api.AffectedSourceFiles(sources):
1335 for line_num, line in f.ChangedContents():
1336 if toast_import_pattern.search(line):
1337 errors.append("%s:%d" % (f.LocalPath(), line_num))
1339 results = []
1341 if errors:
1342 results.append(output_api.PresubmitError(
1343 'android.widget.Toast usage is detected. Android toasts use hardware'
1344 ' acceleration, and can be\ncostly on low-end devices. Please use'
1345 ' org.chromium.ui.widget.Toast instead.\n'
1346 'Contact dskiba@chromium.org if you have any questions.',
1347 errors))
1349 return results
1352 def _CheckAndroidCrLogUsage(input_api, output_api):
1353 """Checks that new logs using org.chromium.base.Log:
1354 - Are using 'TAG' as variable name for the tags (warn)
1355 - Are using the suggested name format for the tags: "cr.<PackageTag>" (warn)
1356 - Are using a tag that is shorter than 23 characters (error)
1358 cr_log_import_pattern = input_api.re.compile(
1359 r'^import org\.chromium\.base\.Log;$', input_api.re.MULTILINE)
1360 class_in_base_pattern = input_api.re.compile(
1361 r'^package org\.chromium\.base;$', input_api.re.MULTILINE)
1362 has_some_log_import_pattern = input_api.re.compile(
1363 r'^import .*\.Log;$', input_api.re.MULTILINE)
1364 # Extract the tag from lines like `Log.d(TAG, "*");` or `Log.d("TAG", "*");`
1365 log_call_pattern = input_api.re.compile(r'^\s*Log\.\w\((?P<tag>\"?\w+\"?)\,')
1366 log_decl_pattern = input_api.re.compile(
1367 r'^\s*private static final String TAG = "(?P<name>(.*)")',
1368 input_api.re.MULTILINE)
1369 log_name_pattern = input_api.re.compile(r'^cr[.\w]*')
1371 REF_MSG = ('See base/android/java/src/org/chromium/base/README_logging.md '
1372 'or contact dgn@chromium.org for more info.')
1373 sources = lambda x: input_api.FilterSourceFile(x, white_list=(r'.*\.java$',))
1375 tag_decl_errors = []
1376 tag_length_errors = []
1377 tag_errors = []
1378 util_log_errors = []
1380 for f in input_api.AffectedSourceFiles(sources):
1381 file_content = input_api.ReadFile(f)
1382 has_modified_logs = False
1384 # Per line checks
1385 if (cr_log_import_pattern.search(file_content) or
1386 (class_in_base_pattern.search(file_content) and
1387 not has_some_log_import_pattern.search(file_content))):
1388 # Checks to run for files using cr log
1389 for line_num, line in f.ChangedContents():
1391 # Check if the new line is doing some logging
1392 match = log_call_pattern.search(line)
1393 if match:
1394 has_modified_logs = True
1396 # Make sure it uses "TAG"
1397 if not match.group('tag') == 'TAG':
1398 tag_errors.append("%s:%d" % (f.LocalPath(), line_num))
1399 else:
1400 # Report non cr Log function calls in changed lines
1401 for line_num, line in f.ChangedContents():
1402 if log_call_pattern.search(line):
1403 util_log_errors.append("%s:%d" % (f.LocalPath(), line_num))
1405 # Per file checks
1406 if has_modified_logs:
1407 # Make sure the tag is using the "cr" prefix and is not too long
1408 match = log_decl_pattern.search(file_content)
1409 tag_name = match.group('name') if match else ''
1410 if not log_name_pattern.search(tag_name ):
1411 tag_decl_errors.append(f.LocalPath())
1412 if len(tag_name) > 23:
1413 tag_length_errors.append(f.LocalPath())
1415 results = []
1416 if tag_decl_errors:
1417 results.append(output_api.PresubmitPromptWarning(
1418 'Please define your tags using the suggested format: .\n'
1419 '"private static final String TAG = "cr.<package tag>".\n' + REF_MSG,
1420 tag_decl_errors))
1422 if tag_length_errors:
1423 results.append(output_api.PresubmitError(
1424 'The tag length is restricted by the system to be at most '
1425 '23 characters.\n' + REF_MSG,
1426 tag_length_errors))
1428 if tag_errors:
1429 results.append(output_api.PresubmitPromptWarning(
1430 'Please use a variable named "TAG" for your log tags.\n' + REF_MSG,
1431 tag_errors))
1433 if util_log_errors:
1434 results.append(output_api.PresubmitPromptWarning(
1435 'Please use org.chromium.base.Log for new logs.\n' + REF_MSG,
1436 util_log_errors))
1438 return results
1441 def _CheckForCopyrightedCode(input_api, output_api):
1442 """Verifies that newly added code doesn't contain copyrighted material
1443 and is properly licensed under the standard Chromium license.
1445 As there can be false positives, we maintain a whitelist file. This check
1446 also verifies that the whitelist file is up to date.
1448 import sys
1449 original_sys_path = sys.path
1450 try:
1451 sys.path = sys.path + [input_api.os_path.join(
1452 input_api.PresubmitLocalPath(), 'tools')]
1453 from copyright_scanner import copyright_scanner
1454 finally:
1455 # Restore sys.path to what it was before.
1456 sys.path = original_sys_path
1458 return copyright_scanner.ScanAtPresubmit(input_api, output_api)
1461 def _CheckSingletonInHeaders(input_api, output_api):
1462 """Checks to make sure no header files have |Singleton<|."""
1463 def FileFilter(affected_file):
1464 # It's ok for base/memory/singleton.h to have |Singleton<|.
1465 black_list = (_EXCLUDED_PATHS +
1466 input_api.DEFAULT_BLACK_LIST +
1467 (r"^base[\\\/]memory[\\\/]singleton\.h$",))
1468 return input_api.FilterSourceFile(affected_file, black_list=black_list)
1470 pattern = input_api.re.compile(r'(?<!class\s)Singleton\s*<')
1471 files = []
1472 for f in input_api.AffectedSourceFiles(FileFilter):
1473 if (f.LocalPath().endswith('.h') or f.LocalPath().endswith('.hxx') or
1474 f.LocalPath().endswith('.hpp') or f.LocalPath().endswith('.inl')):
1475 contents = input_api.ReadFile(f)
1476 for line in contents.splitlines(False):
1477 if (not input_api.re.match(r'//', line) and # Strip C++ comment.
1478 pattern.search(line)):
1479 files.append(f)
1480 break
1482 if files:
1483 return [ output_api.PresubmitError(
1484 'Found Singleton<T> in the following header files.\n' +
1485 'Please move them to an appropriate source file so that the ' +
1486 'template gets instantiated in a single compilation unit.',
1487 files) ]
1488 return []
1491 _DEPRECATED_CSS = [
1492 # Values
1493 ( "-webkit-box", "flex" ),
1494 ( "-webkit-inline-box", "inline-flex" ),
1495 ( "-webkit-flex", "flex" ),
1496 ( "-webkit-inline-flex", "inline-flex" ),
1497 ( "-webkit-min-content", "min-content" ),
1498 ( "-webkit-max-content", "max-content" ),
1500 # Properties
1501 ( "-webkit-background-clip", "background-clip" ),
1502 ( "-webkit-background-origin", "background-origin" ),
1503 ( "-webkit-background-size", "background-size" ),
1504 ( "-webkit-box-shadow", "box-shadow" ),
1506 # Functions
1507 ( "-webkit-gradient", "gradient" ),
1508 ( "-webkit-repeating-gradient", "repeating-gradient" ),
1509 ( "-webkit-linear-gradient", "linear-gradient" ),
1510 ( "-webkit-repeating-linear-gradient", "repeating-linear-gradient" ),
1511 ( "-webkit-radial-gradient", "radial-gradient" ),
1512 ( "-webkit-repeating-radial-gradient", "repeating-radial-gradient" ),
1515 def _CheckNoDeprecatedCSS(input_api, output_api):
1516 """ Make sure that we don't use deprecated CSS
1517 properties, functions or values. Our external
1518 documentation and iOS CSS for dom distiller
1519 (reader mode) are ignored by the hooks as it
1520 needs to be consumed by WebKit. """
1521 results = []
1522 file_inclusion_pattern = (r".+\.css$",)
1523 black_list = (_EXCLUDED_PATHS +
1524 _TEST_CODE_EXCLUDED_PATHS +
1525 input_api.DEFAULT_BLACK_LIST +
1526 (r"^chrome/common/extensions/docs",
1527 r"^chrome/docs",
1528 r"^components/dom_distiller/core/css/distilledpage_ios.css",
1529 r"^native_client_sdk"))
1530 file_filter = lambda f: input_api.FilterSourceFile(
1531 f, white_list=file_inclusion_pattern, black_list=black_list)
1532 for fpath in input_api.AffectedFiles(file_filter=file_filter):
1533 for line_num, line in fpath.ChangedContents():
1534 for (deprecated_value, value) in _DEPRECATED_CSS:
1535 if deprecated_value in line:
1536 results.append(output_api.PresubmitError(
1537 "%s:%d: Use of deprecated CSS %s, use %s instead" %
1538 (fpath.LocalPath(), line_num, deprecated_value, value)))
1539 return results
1542 _DEPRECATED_JS = [
1543 ( "__lookupGetter__", "Object.getOwnPropertyDescriptor" ),
1544 ( "__defineGetter__", "Object.defineProperty" ),
1545 ( "__defineSetter__", "Object.defineProperty" ),
1548 def _CheckNoDeprecatedJS(input_api, output_api):
1549 """Make sure that we don't use deprecated JS in Chrome code."""
1550 results = []
1551 file_inclusion_pattern = (r".+\.js$",) # TODO(dbeam): .html?
1552 black_list = (_EXCLUDED_PATHS + _TEST_CODE_EXCLUDED_PATHS +
1553 input_api.DEFAULT_BLACK_LIST)
1554 file_filter = lambda f: input_api.FilterSourceFile(
1555 f, white_list=file_inclusion_pattern, black_list=black_list)
1556 for fpath in input_api.AffectedFiles(file_filter=file_filter):
1557 for lnum, line in fpath.ChangedContents():
1558 for (deprecated, replacement) in _DEPRECATED_JS:
1559 if deprecated in line:
1560 results.append(output_api.PresubmitError(
1561 "%s:%d: Use of deprecated JS %s, use %s instead" %
1562 (fpath.LocalPath(), lnum, deprecated, replacement)))
1563 return results
1566 def _AndroidSpecificOnUploadChecks(input_api, output_api):
1567 """Groups checks that target android code."""
1568 results = []
1569 results.extend(_CheckAndroidCrLogUsage(input_api, output_api))
1570 results.extend(_CheckAndroidToastUsage(input_api, output_api))
1571 return results
1574 def _CommonChecks(input_api, output_api):
1575 """Checks common to both upload and commit."""
1576 results = []
1577 results.extend(input_api.canned_checks.PanProjectChecks(
1578 input_api, output_api,
1579 excluded_paths=_EXCLUDED_PATHS + _TESTRUNNER_PATHS))
1580 results.extend(_CheckAuthorizedAuthor(input_api, output_api))
1581 results.extend(
1582 _CheckNoProductionCodeUsingTestOnlyFunctions(input_api, output_api))
1583 results.extend(_CheckNoIOStreamInHeaders(input_api, output_api))
1584 results.extend(_CheckNoUNIT_TESTInSourceFiles(input_api, output_api))
1585 results.extend(_CheckNoNewWStrings(input_api, output_api))
1586 results.extend(_CheckNoDEPSGIT(input_api, output_api))
1587 results.extend(_CheckNoBannedFunctions(input_api, output_api))
1588 results.extend(_CheckNoPragmaOnce(input_api, output_api))
1589 results.extend(_CheckNoTrinaryTrueFalse(input_api, output_api))
1590 results.extend(_CheckUnwantedDependencies(input_api, output_api))
1591 results.extend(_CheckFilePermissions(input_api, output_api))
1592 results.extend(_CheckNoAuraWindowPropertyHInHeaders(input_api, output_api))
1593 results.extend(_CheckIncludeOrder(input_api, output_api))
1594 results.extend(_CheckForVersionControlConflicts(input_api, output_api))
1595 results.extend(_CheckPatchFiles(input_api, output_api))
1596 results.extend(_CheckHardcodedGoogleHostsInLowerLayers(input_api, output_api))
1597 results.extend(_CheckNoAbbreviationInPngFileName(input_api, output_api))
1598 results.extend(_CheckForInvalidOSMacros(input_api, output_api))
1599 results.extend(_CheckForInvalidIfDefinedMacros(input_api, output_api))
1600 # TODO(danakj): Remove this when base/move.h is removed.
1601 results.extend(_CheckForUsingSideEffectsOfPass(input_api, output_api))
1602 results.extend(_CheckAddedDepsHaveTargetApprovals(input_api, output_api))
1603 results.extend(
1604 input_api.canned_checks.CheckChangeHasNoTabs(
1605 input_api,
1606 output_api,
1607 source_file_filter=lambda x: x.LocalPath().endswith('.grd')))
1608 results.extend(_CheckSpamLogging(input_api, output_api))
1609 results.extend(_CheckForAnonymousVariables(input_api, output_api))
1610 results.extend(_CheckCygwinShell(input_api, output_api))
1611 results.extend(_CheckUserActionUpdate(input_api, output_api))
1612 results.extend(_CheckNoDeprecatedCSS(input_api, output_api))
1613 results.extend(_CheckNoDeprecatedJS(input_api, output_api))
1614 results.extend(_CheckParseErrors(input_api, output_api))
1615 results.extend(_CheckForIPCRules(input_api, output_api))
1616 results.extend(_CheckForCopyrightedCode(input_api, output_api))
1617 results.extend(_CheckForWindowsLineEndings(input_api, output_api))
1618 results.extend(_CheckSingletonInHeaders(input_api, output_api))
1620 if any('PRESUBMIT.py' == f.LocalPath() for f in input_api.AffectedFiles()):
1621 results.extend(input_api.canned_checks.RunUnitTestsInDirectory(
1622 input_api, output_api,
1623 input_api.PresubmitLocalPath(),
1624 whitelist=[r'^PRESUBMIT_test\.py$']))
1625 return results
1628 def _CheckAuthorizedAuthor(input_api, output_api):
1629 """For non-googler/chromites committers, verify the author's email address is
1630 in AUTHORS.
1632 # TODO(maruel): Add it to input_api?
1633 import fnmatch
1635 author = input_api.change.author_email
1636 if not author:
1637 input_api.logging.info('No author, skipping AUTHOR check')
1638 return []
1639 authors_path = input_api.os_path.join(
1640 input_api.PresubmitLocalPath(), 'AUTHORS')
1641 valid_authors = (
1642 input_api.re.match(r'[^#]+\s+\<(.+?)\>\s*$', line)
1643 for line in open(authors_path))
1644 valid_authors = [item.group(1).lower() for item in valid_authors if item]
1645 if not any(fnmatch.fnmatch(author.lower(), valid) for valid in valid_authors):
1646 input_api.logging.info('Valid authors are %s', ', '.join(valid_authors))
1647 return [output_api.PresubmitPromptWarning(
1648 ('%s is not in AUTHORS file. If you are a new contributor, please visit'
1649 '\n'
1650 'http://www.chromium.org/developers/contributing-code and read the '
1651 '"Legal" section\n'
1652 'If you are a chromite, verify the contributor signed the CLA.') %
1653 author)]
1654 return []
1657 def _CheckPatchFiles(input_api, output_api):
1658 problems = [f.LocalPath() for f in input_api.AffectedFiles()
1659 if f.LocalPath().endswith(('.orig', '.rej'))]
1660 if problems:
1661 return [output_api.PresubmitError(
1662 "Don't commit .rej and .orig files.", problems)]
1663 else:
1664 return []
1667 def _DidYouMeanOSMacro(bad_macro):
1668 try:
1669 return {'A': 'OS_ANDROID',
1670 'B': 'OS_BSD',
1671 'C': 'OS_CHROMEOS',
1672 'F': 'OS_FREEBSD',
1673 'L': 'OS_LINUX',
1674 'M': 'OS_MACOSX',
1675 'N': 'OS_NACL',
1676 'O': 'OS_OPENBSD',
1677 'P': 'OS_POSIX',
1678 'S': 'OS_SOLARIS',
1679 'W': 'OS_WIN'}[bad_macro[3].upper()]
1680 except KeyError:
1681 return ''
1684 def _CheckForInvalidOSMacrosInFile(input_api, f):
1685 """Check for sensible looking, totally invalid OS macros."""
1686 preprocessor_statement = input_api.re.compile(r'^\s*#')
1687 os_macro = input_api.re.compile(r'defined\((OS_[^)]+)\)')
1688 results = []
1689 for lnum, line in f.ChangedContents():
1690 if preprocessor_statement.search(line):
1691 for match in os_macro.finditer(line):
1692 if not match.group(1) in _VALID_OS_MACROS:
1693 good = _DidYouMeanOSMacro(match.group(1))
1694 did_you_mean = ' (did you mean %s?)' % good if good else ''
1695 results.append(' %s:%d %s%s' % (f.LocalPath(),
1696 lnum,
1697 match.group(1),
1698 did_you_mean))
1699 return results
1702 def _CheckForInvalidOSMacros(input_api, output_api):
1703 """Check all affected files for invalid OS macros."""
1704 bad_macros = []
1705 for f in input_api.AffectedFiles():
1706 if not f.LocalPath().endswith(('.py', '.js', '.html', '.css')):
1707 bad_macros.extend(_CheckForInvalidOSMacrosInFile(input_api, f))
1709 if not bad_macros:
1710 return []
1712 return [output_api.PresubmitError(
1713 'Possibly invalid OS macro[s] found. Please fix your code\n'
1714 'or add your macro to src/PRESUBMIT.py.', bad_macros)]
1717 def _CheckForInvalidIfDefinedMacrosInFile(input_api, f):
1718 """Check all affected files for invalid "if defined" macros."""
1719 ALWAYS_DEFINED_MACROS = (
1720 "TARGET_CPU_PPC",
1721 "TARGET_CPU_PPC64",
1722 "TARGET_CPU_68K",
1723 "TARGET_CPU_X86",
1724 "TARGET_CPU_ARM",
1725 "TARGET_CPU_MIPS",
1726 "TARGET_CPU_SPARC",
1727 "TARGET_CPU_ALPHA",
1728 "TARGET_IPHONE_SIMULATOR",
1729 "TARGET_OS_EMBEDDED",
1730 "TARGET_OS_IPHONE",
1731 "TARGET_OS_MAC",
1732 "TARGET_OS_UNIX",
1733 "TARGET_OS_WIN32",
1735 ifdef_macro = input_api.re.compile(r'^\s*#.*(?:ifdef\s|defined\()([^\s\)]+)')
1736 results = []
1737 for lnum, line in f.ChangedContents():
1738 for match in ifdef_macro.finditer(line):
1739 if match.group(1) in ALWAYS_DEFINED_MACROS:
1740 always_defined = ' %s is always defined. ' % match.group(1)
1741 did_you_mean = 'Did you mean \'#if %s\'?' % match.group(1)
1742 results.append(' %s:%d %s\n\t%s' % (f.LocalPath(),
1743 lnum,
1744 always_defined,
1745 did_you_mean))
1746 return results
1749 def _CheckForInvalidIfDefinedMacros(input_api, output_api):
1750 """Check all affected files for invalid "if defined" macros."""
1751 bad_macros = []
1752 for f in input_api.AffectedFiles():
1753 if f.LocalPath().endswith(('.h', '.c', '.cc', '.m', '.mm')):
1754 bad_macros.extend(_CheckForInvalidIfDefinedMacrosInFile(input_api, f))
1756 if not bad_macros:
1757 return []
1759 return [output_api.PresubmitError(
1760 'Found ifdef check on always-defined macro[s]. Please fix your code\n'
1761 'or check the list of ALWAYS_DEFINED_MACROS in src/PRESUBMIT.py.',
1762 bad_macros)]
1765 def _CheckForUsingSideEffectsOfPass(input_api, output_api):
1766 """Check all affected files for using side effects of Pass."""
1767 errors = []
1768 for f in input_api.AffectedFiles():
1769 if f.LocalPath().endswith(('.h', '.c', '.cc', '.m', '.mm')):
1770 for lnum, line in f.ChangedContents():
1771 # Disallow Foo(*my_scoped_thing.Pass()); See crbug.com/418297.
1772 if input_api.re.search(r'\*[a-zA-Z0-9_]+\.Pass\(\)', line):
1773 errors.append(output_api.PresubmitError(
1774 ('%s:%d uses *foo.Pass() to delete the contents of scoped_ptr. ' +
1775 'See crbug.com/418297.') % (f.LocalPath(), lnum)))
1776 return errors
1779 def _CheckForIPCRules(input_api, output_api):
1780 """Check for same IPC rules described in
1781 http://www.chromium.org/Home/chromium-security/education/security-tips-for-ipc
1783 base_pattern = r'IPC_ENUM_TRAITS\('
1784 inclusion_pattern = input_api.re.compile(r'(%s)' % base_pattern)
1785 comment_pattern = input_api.re.compile(r'//.*(%s)' % base_pattern)
1787 problems = []
1788 for f in input_api.AffectedSourceFiles(None):
1789 local_path = f.LocalPath()
1790 if not local_path.endswith('.h'):
1791 continue
1792 for line_number, line in f.ChangedContents():
1793 if inclusion_pattern.search(line) and not comment_pattern.search(line):
1794 problems.append(
1795 '%s:%d\n %s' % (local_path, line_number, line.strip()))
1797 if problems:
1798 return [output_api.PresubmitPromptWarning(
1799 _IPC_ENUM_TRAITS_DEPRECATED, problems)]
1800 else:
1801 return []
1804 def _CheckForWindowsLineEndings(input_api, output_api):
1805 """Check source code and known ascii text files for Windows style line
1806 endings.
1808 known_text_files = r'.*\.(txt|html|htm|mhtml|py|gyp|gypi|gn|isolate)$'
1810 file_inclusion_pattern = (
1811 known_text_files,
1812 r'.+%s' % _IMPLEMENTATION_EXTENSIONS
1815 filter = lambda f: input_api.FilterSourceFile(
1816 f, white_list=file_inclusion_pattern, black_list=None)
1817 files = [f.LocalPath() for f in
1818 input_api.AffectedSourceFiles(filter)]
1820 problems = []
1822 for file in files:
1823 fp = open(file, 'r')
1824 for line in fp:
1825 if line.endswith('\r\n'):
1826 problems.append(file)
1827 break
1828 fp.close()
1830 if problems:
1831 return [output_api.PresubmitPromptWarning('Are you sure that you want '
1832 'these files to contain Windows style line endings?\n' +
1833 '\n'.join(problems))]
1835 return []
1838 def CheckChangeOnUpload(input_api, output_api):
1839 results = []
1840 results.extend(_CommonChecks(input_api, output_api))
1841 results.extend(_CheckValidHostsInDEPS(input_api, output_api))
1842 results.extend(_CheckJavaStyle(input_api, output_api))
1843 results.extend(
1844 input_api.canned_checks.CheckGNFormatted(input_api, output_api))
1845 results.extend(_CheckUmaHistogramChanges(input_api, output_api))
1846 results.extend(_AndroidSpecificOnUploadChecks(input_api, output_api))
1847 return results
1850 def GetTryServerMasterForBot(bot):
1851 """Returns the Try Server master for the given bot.
1853 It tries to guess the master from the bot name, but may still fail
1854 and return None. There is no longer a default master.
1856 # Potentially ambiguous bot names are listed explicitly.
1857 master_map = {
1858 'chromium_presubmit': 'tryserver.chromium.linux',
1859 'blink_presubmit': 'tryserver.chromium.linux',
1860 'tools_build_presubmit': 'tryserver.chromium.linux',
1862 master = master_map.get(bot)
1863 if not master:
1864 if 'linux' in bot or 'android' in bot or 'presubmit' in bot:
1865 master = 'tryserver.chromium.linux'
1866 elif 'win' in bot:
1867 master = 'tryserver.chromium.win'
1868 elif 'mac' in bot or 'ios' in bot:
1869 master = 'tryserver.chromium.mac'
1870 return master
1873 def GetDefaultTryConfigs(bots):
1874 """Returns a list of ('bot', set(['tests']), filtered by [bots].
1877 builders_and_tests = dict((bot, set(['defaulttests'])) for bot in bots)
1879 # Build up the mapping from tryserver master to bot/test.
1880 out = dict()
1881 for bot, tests in builders_and_tests.iteritems():
1882 out.setdefault(GetTryServerMasterForBot(bot), {})[bot] = tests
1883 return out
1886 def CheckChangeOnCommit(input_api, output_api):
1887 results = []
1888 results.extend(_CommonChecks(input_api, output_api))
1889 # TODO(thestig) temporarily disabled, doesn't work in third_party/
1890 #results.extend(input_api.canned_checks.CheckSvnModifiedDirectories(
1891 # input_api, output_api, sources))
1892 # Make sure the tree is 'open'.
1893 results.extend(input_api.canned_checks.CheckTreeIsOpen(
1894 input_api,
1895 output_api,
1896 json_url='http://chromium-status.appspot.com/current?format=json'))
1898 results.extend(input_api.canned_checks.CheckChangeHasBugField(
1899 input_api, output_api))
1900 results.extend(input_api.canned_checks.CheckChangeHasDescription(
1901 input_api, output_api))
1902 return results
1905 def GetPreferredTryMasters(project, change):
1906 import json
1907 import os.path
1908 import platform
1909 import subprocess
1911 cq_config_path = os.path.join(
1912 change.RepositoryRoot(), 'infra', 'config', 'cq.cfg')
1913 # commit_queue.py below is a script in depot_tools directory, which has a
1914 # 'builders' command to retrieve a list of CQ builders from the CQ config.
1915 is_win = platform.system() == 'Windows'
1916 masters = json.loads(subprocess.check_output(
1917 ['commit_queue', 'builders', cq_config_path], shell=is_win))
1919 try_config = {}
1920 for master in masters:
1921 try_config.setdefault(master, {})
1922 for builder in masters[master]:
1923 # Do not trigger presubmit builders, since they're likely to fail
1924 # (e.g. OWNERS checks before finished code review), and we're
1925 # running local presubmit anyway.
1926 if 'presubmit' not in builder:
1927 try_config[master][builder] = ['defaulttests']
1929 return try_config