Roll src/third_party/WebKit f298044:aa8346d (svn 202628:202629)
[chromium-blink-merge.git] / PRESUBMIT.py
blobc777fb800552bdeb4bf54c5bd121d5684b1e6e19
1 # Copyright (c) 2012 The Chromium Authors. All rights reserved.
2 # Use of this source code is governed by a BSD-style license that can be
3 # found in the LICENSE file.
5 """Top-level presubmit script for Chromium.
7 See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts
8 for more details about the presubmit API built into depot_tools.
9 """
12 _EXCLUDED_PATHS = (
13 r"^breakpad[\\\/].*",
14 r"^native_client_sdk[\\\/]src[\\\/]build_tools[\\\/]make_rules.py",
15 r"^native_client_sdk[\\\/]src[\\\/]build_tools[\\\/]make_simple.py",
16 r"^native_client_sdk[\\\/]src[\\\/]tools[\\\/].*.mk",
17 r"^net[\\\/]tools[\\\/]spdyshark[\\\/].*",
18 r"^skia[\\\/].*",
19 r"^v8[\\\/].*",
20 r".*MakeFile$",
21 r".+_autogen\.h$",
22 r".+[\\\/]pnacl_shim\.c$",
23 r"^gpu[\\\/]config[\\\/].*_list_json\.cc$",
24 r"^chrome[\\\/]browser[\\\/]resources[\\\/]pdf[\\\/]index.js"
27 # The NetscapePlugIn library is excluded from pan-project as it will soon
28 # be deleted together with the rest of the NPAPI and it's not worthwhile to
29 # update the coding style until then.
30 _TESTRUNNER_PATHS = (
31 r"^content[\\\/]shell[\\\/]tools[\\\/]plugin[\\\/].*",
34 # Fragment of a regular expression that matches C++ and Objective-C++
35 # implementation files.
36 _IMPLEMENTATION_EXTENSIONS = r'\.(cc|cpp|cxx|mm)$'
38 # Regular expression that matches code only used for test binaries
39 # (best effort).
40 _TEST_CODE_EXCLUDED_PATHS = (
41 r'.*[\\\/](fake_|test_|mock_).+%s' % _IMPLEMENTATION_EXTENSIONS,
42 r'.+_test_(base|support|util)%s' % _IMPLEMENTATION_EXTENSIONS,
43 r'.+_(api|browser|kif|perf|pixel|unit|ui)?test(_[a-z]+)?%s' %
44 _IMPLEMENTATION_EXTENSIONS,
45 r'.+profile_sync_service_harness%s' % _IMPLEMENTATION_EXTENSIONS,
46 r'.*[\\\/](test|tool(s)?)[\\\/].*',
47 # content_shell is used for running layout tests.
48 r'content[\\\/]shell[\\\/].*',
49 # At request of folks maintaining this folder.
50 r'chrome[\\\/]browser[\\\/]automation[\\\/].*',
51 # Non-production example code.
52 r'mojo[\\\/]examples[\\\/].*',
53 # Launcher for running iOS tests on the simulator.
54 r'testing[\\\/]iossim[\\\/]iossim\.mm$',
57 _TEST_ONLY_WARNING = (
58 'You might be calling functions intended only for testing from\n'
59 'production code. It is OK to ignore this warning if you know what\n'
60 'you are doing, as the heuristics used to detect the situation are\n'
61 'not perfect. The commit queue will not block on this warning.')
64 _INCLUDE_ORDER_WARNING = (
65 'Your #include order seems to be broken. Remember to use the right '
66 'collation (LC_COLLATE=C) and check\nhttps://google-styleguide.googlecode'
67 '.com/svn/trunk/cppguide.html#Names_and_Order_of_Includes')
69 _BANNED_OBJC_FUNCTIONS = (
71 'addTrackingRect:',
73 'The use of -[NSView addTrackingRect:owner:userData:assumeInside:] is'
74 'prohibited. Please use CrTrackingArea instead.',
75 'http://dev.chromium.org/developers/coding-style/cocoa-dos-and-donts',
77 False,
80 r'/NSTrackingArea\W',
82 'The use of NSTrackingAreas is prohibited. Please use CrTrackingArea',
83 'instead.',
84 'http://dev.chromium.org/developers/coding-style/cocoa-dos-and-donts',
86 False,
89 'convertPointFromBase:',
91 'The use of -[NSView convertPointFromBase:] is almost certainly wrong.',
92 'Please use |convertPoint:(point) fromView:nil| instead.',
93 'http://dev.chromium.org/developers/coding-style/cocoa-dos-and-donts',
95 True,
98 'convertPointToBase:',
100 'The use of -[NSView convertPointToBase:] is almost certainly wrong.',
101 'Please use |convertPoint:(point) toView:nil| instead.',
102 'http://dev.chromium.org/developers/coding-style/cocoa-dos-and-donts',
104 True,
107 'convertRectFromBase:',
109 'The use of -[NSView convertRectFromBase:] is almost certainly wrong.',
110 'Please use |convertRect:(point) fromView:nil| instead.',
111 'http://dev.chromium.org/developers/coding-style/cocoa-dos-and-donts',
113 True,
116 'convertRectToBase:',
118 'The use of -[NSView convertRectToBase:] is almost certainly wrong.',
119 'Please use |convertRect:(point) toView:nil| instead.',
120 'http://dev.chromium.org/developers/coding-style/cocoa-dos-and-donts',
122 True,
125 'convertSizeFromBase:',
127 'The use of -[NSView convertSizeFromBase:] is almost certainly wrong.',
128 'Please use |convertSize:(point) fromView:nil| instead.',
129 'http://dev.chromium.org/developers/coding-style/cocoa-dos-and-donts',
131 True,
134 'convertSizeToBase:',
136 'The use of -[NSView convertSizeToBase:] is almost certainly wrong.',
137 'Please use |convertSize:(point) toView:nil| instead.',
138 'http://dev.chromium.org/developers/coding-style/cocoa-dos-and-donts',
140 True,
145 _BANNED_CPP_FUNCTIONS = (
146 # Make sure that gtest's FRIEND_TEST() macro is not used; the
147 # FRIEND_TEST_ALL_PREFIXES() macro from base/gtest_prod_util.h should be
148 # used instead since that allows for FLAKY_ and DISABLED_ prefixes.
150 'FRIEND_TEST(',
152 'Chromium code should not use gtest\'s FRIEND_TEST() macro. Include',
153 'base/gtest_prod_util.h and use FRIEND_TEST_ALL_PREFIXES() instead.',
155 False,
159 'ScopedAllowIO',
161 'New code should not use ScopedAllowIO. Post a task to the blocking',
162 'pool or the FILE thread instead.',
164 True,
166 r"^base[\\\/]process[\\\/]process_metrics_linux\.cc$",
167 r"^chrome[\\\/]browser[\\\/]chromeos[\\\/]boot_times_recorder\.cc$",
168 r"^chrome[\\\/]browser[\\\/]chromeos[\\\/]"
169 "customization_document_browsertest\.cc$",
170 r"^components[\\\/]crash[\\\/]app[\\\/]breakpad_mac\.mm$",
171 r"^content[\\\/]shell[\\\/]browser[\\\/]shell_browser_main\.cc$",
172 r"^content[\\\/]shell[\\\/]browser[\\\/]shell_message_filter\.cc$",
173 r"^mojo[\\\/]edk[\\\/]embedder[\\\/]" +
174 r"simple_platform_shared_buffer_posix\.cc$",
175 r"^net[\\\/]disk_cache[\\\/]cache_util\.cc$",
176 r"^net[\\\/]url_request[\\\/]test_url_fetcher_factory\.cc$",
177 r"^remoting[\\\/]host[\\\/]gnubby_auth_handler_posix\.cc$",
178 r"^ui[\\\/]ozone[\\\/]platform[\\\/]drm[\\\/]host[\\\/]"
179 "drm_display_host_manager\.cc$",
183 'SkRefPtr',
185 'The use of SkRefPtr is prohibited. ',
186 'Please use skia::RefPtr instead.'
188 True,
192 'SkAutoRef',
194 'The indirect use of SkRefPtr via SkAutoRef is prohibited. ',
195 'Please use skia::RefPtr instead.'
197 True,
201 'SkAutoTUnref',
203 'The use of SkAutoTUnref is dangerous because it implicitly ',
204 'converts to a raw pointer. Please use skia::RefPtr instead.'
206 True,
210 'SkAutoUnref',
212 'The indirect use of SkAutoTUnref through SkAutoUnref is dangerous ',
213 'because it implicitly converts to a raw pointer. ',
214 'Please use skia::RefPtr instead.'
216 True,
220 r'/HANDLE_EINTR\(.*close',
222 'HANDLE_EINTR(close) is invalid. If close fails with EINTR, the file',
223 'descriptor will be closed, and it is incorrect to retry the close.',
224 'Either call close directly and ignore its return value, or wrap close',
225 'in IGNORE_EINTR to use its return value. See http://crbug.com/269623'
227 True,
231 r'/IGNORE_EINTR\((?!.*close)',
233 'IGNORE_EINTR is only valid when wrapping close. To wrap other system',
234 'calls, use HANDLE_EINTR. See http://crbug.com/269623',
236 True,
238 # Files that #define IGNORE_EINTR.
239 r'^base[\\\/]posix[\\\/]eintr_wrapper\.h$',
240 r'^ppapi[\\\/]tests[\\\/]test_broker\.cc$',
244 r'/v8::Extension\(',
246 'Do not introduce new v8::Extensions into the code base, use',
247 'gin::Wrappable instead. See http://crbug.com/334679',
249 True,
251 r'extensions[\\\/]renderer[\\\/]safe_builtins\.*',
255 '\<MessageLoopProxy\>',
257 'MessageLoopProxy is deprecated. ',
258 'Please use SingleThreadTaskRunner or ThreadTaskRunnerHandle instead.'
260 True,
262 # Internal message_loop related code may still use it.
263 r'^base[\\\/]message_loop[\\\/].*',
268 _IPC_ENUM_TRAITS_DEPRECATED = (
269 'You are using IPC_ENUM_TRAITS() in your code. It has been deprecated.\n'
270 'See http://www.chromium.org/Home/chromium-security/education/security-tips-for-ipc')
273 _VALID_OS_MACROS = (
274 # Please keep sorted.
275 'OS_ANDROID',
276 'OS_ANDROID_HOST',
277 'OS_BSD',
278 'OS_CAT', # For testing.
279 'OS_CHROMEOS',
280 'OS_FREEBSD',
281 'OS_IOS',
282 'OS_LINUX',
283 'OS_MACOSX',
284 'OS_NACL',
285 'OS_NACL_NONSFI',
286 'OS_NACL_SFI',
287 'OS_OPENBSD',
288 'OS_POSIX',
289 'OS_QNX',
290 'OS_SOLARIS',
291 'OS_WIN',
295 def _CheckNoProductionCodeUsingTestOnlyFunctions(input_api, output_api):
296 """Attempts to prevent use of functions intended only for testing in
297 non-testing code. For now this is just a best-effort implementation
298 that ignores header files and may have some false positives. A
299 better implementation would probably need a proper C++ parser.
301 # We only scan .cc files and the like, as the declaration of
302 # for-testing functions in header files are hard to distinguish from
303 # calls to such functions without a proper C++ parser.
304 file_inclusion_pattern = r'.+%s' % _IMPLEMENTATION_EXTENSIONS
306 base_function_pattern = r'[ :]test::[^\s]+|ForTest(s|ing)?|for_test(s|ing)?'
307 inclusion_pattern = input_api.re.compile(r'(%s)\s*\(' % base_function_pattern)
308 comment_pattern = input_api.re.compile(r'//.*(%s)' % base_function_pattern)
309 exclusion_pattern = input_api.re.compile(
310 r'::[A-Za-z0-9_]+(%s)|(%s)[^;]+\{' % (
311 base_function_pattern, base_function_pattern))
313 def FilterFile(affected_file):
314 black_list = (_EXCLUDED_PATHS +
315 _TEST_CODE_EXCLUDED_PATHS +
316 input_api.DEFAULT_BLACK_LIST)
317 return input_api.FilterSourceFile(
318 affected_file,
319 white_list=(file_inclusion_pattern, ),
320 black_list=black_list)
322 problems = []
323 for f in input_api.AffectedSourceFiles(FilterFile):
324 local_path = f.LocalPath()
325 for line_number, line in f.ChangedContents():
326 if (inclusion_pattern.search(line) and
327 not comment_pattern.search(line) and
328 not exclusion_pattern.search(line)):
329 problems.append(
330 '%s:%d\n %s' % (local_path, line_number, line.strip()))
332 if problems:
333 return [output_api.PresubmitPromptOrNotify(_TEST_ONLY_WARNING, problems)]
334 else:
335 return []
338 def _CheckNoIOStreamInHeaders(input_api, output_api):
339 """Checks to make sure no .h files include <iostream>."""
340 files = []
341 pattern = input_api.re.compile(r'^#include\s*<iostream>',
342 input_api.re.MULTILINE)
343 for f in input_api.AffectedSourceFiles(input_api.FilterSourceFile):
344 if not f.LocalPath().endswith('.h'):
345 continue
346 contents = input_api.ReadFile(f)
347 if pattern.search(contents):
348 files.append(f)
350 if len(files):
351 return [ output_api.PresubmitError(
352 'Do not #include <iostream> in header files, since it inserts static '
353 'initialization into every file including the header. Instead, '
354 '#include <ostream>. See http://crbug.com/94794',
355 files) ]
356 return []
359 def _CheckNoUNIT_TESTInSourceFiles(input_api, output_api):
360 """Checks to make sure no source files use UNIT_TEST"""
361 problems = []
362 for f in input_api.AffectedFiles():
363 if (not f.LocalPath().endswith(('.cc', '.mm'))):
364 continue
366 for line_num, line in f.ChangedContents():
367 if 'UNIT_TEST ' in line or line.endswith('UNIT_TEST'):
368 problems.append(' %s:%d' % (f.LocalPath(), line_num))
370 if not problems:
371 return []
372 return [output_api.PresubmitPromptWarning('UNIT_TEST is only for headers.\n' +
373 '\n'.join(problems))]
376 def _FindHistogramNameInLine(histogram_name, line):
377 """Tries to find a histogram name or prefix in a line."""
378 if not "affected-histogram" in line:
379 return histogram_name in line
380 # A histogram_suffixes tag type has an affected-histogram name as a prefix of
381 # the histogram_name.
382 if not '"' in line:
383 return False
384 histogram_prefix = line.split('\"')[1]
385 return histogram_prefix in histogram_name
388 def _CheckUmaHistogramChanges(input_api, output_api):
389 """Check that UMA histogram names in touched lines can still be found in other
390 lines of the patch or in histograms.xml. Note that this check would not catch
391 the reverse: changes in histograms.xml not matched in the code itself."""
392 touched_histograms = []
393 histograms_xml_modifications = []
394 pattern = input_api.re.compile('UMA_HISTOGRAM.*\("(.*)"')
395 for f in input_api.AffectedFiles():
396 # If histograms.xml itself is modified, keep the modified lines for later.
397 if f.LocalPath().endswith(('histograms.xml')):
398 histograms_xml_modifications = f.ChangedContents()
399 continue
400 if not f.LocalPath().endswith(('cc', 'mm', 'cpp')):
401 continue
402 for line_num, line in f.ChangedContents():
403 found = pattern.search(line)
404 if found:
405 touched_histograms.append([found.group(1), f, line_num])
407 # Search for the touched histogram names in the local modifications to
408 # histograms.xml, and, if not found, on the base histograms.xml file.
409 unmatched_histograms = []
410 for histogram_info in touched_histograms:
411 histogram_name_found = False
412 for line_num, line in histograms_xml_modifications:
413 histogram_name_found = _FindHistogramNameInLine(histogram_info[0], line)
414 if histogram_name_found:
415 break
416 if not histogram_name_found:
417 unmatched_histograms.append(histogram_info)
419 histograms_xml_path = 'tools/metrics/histograms/histograms.xml'
420 problems = []
421 if unmatched_histograms:
422 with open(histograms_xml_path) as histograms_xml:
423 for histogram_name, f, line_num in unmatched_histograms:
424 histograms_xml.seek(0)
425 histogram_name_found = False
426 for line in histograms_xml:
427 histogram_name_found = _FindHistogramNameInLine(histogram_name, line)
428 if histogram_name_found:
429 break
430 if not histogram_name_found:
431 problems.append(' [%s:%d] %s' %
432 (f.LocalPath(), line_num, histogram_name))
434 if not problems:
435 return []
436 return [output_api.PresubmitPromptWarning('Some UMA_HISTOGRAM lines have '
437 'been modified and the associated histogram name has no match in either '
438 '%s or the modifications of it:' % (histograms_xml_path), problems)]
441 def _CheckNoNewWStrings(input_api, output_api):
442 """Checks to make sure we don't introduce use of wstrings."""
443 problems = []
444 for f in input_api.AffectedFiles():
445 if (not f.LocalPath().endswith(('.cc', '.h')) or
446 f.LocalPath().endswith(('test.cc', '_win.cc', '_win.h')) or
447 '/win/' in f.LocalPath()):
448 continue
450 allowWString = False
451 for line_num, line in f.ChangedContents():
452 if 'presubmit: allow wstring' in line:
453 allowWString = True
454 elif not allowWString and 'wstring' in line:
455 problems.append(' %s:%d' % (f.LocalPath(), line_num))
456 allowWString = False
457 else:
458 allowWString = False
460 if not problems:
461 return []
462 return [output_api.PresubmitPromptWarning('New code should not use wstrings.'
463 ' If you are calling a cross-platform API that accepts a wstring, '
464 'fix the API.\n' +
465 '\n'.join(problems))]
468 def _CheckNoDEPSGIT(input_api, output_api):
469 """Make sure .DEPS.git is never modified manually."""
470 if any(f.LocalPath().endswith('.DEPS.git') for f in
471 input_api.AffectedFiles()):
472 return [output_api.PresubmitError(
473 'Never commit changes to .DEPS.git. This file is maintained by an\n'
474 'automated system based on what\'s in DEPS and your changes will be\n'
475 'overwritten.\n'
476 'See https://sites.google.com/a/chromium.org/dev/developers/how-tos/get-the-code#Rolling_DEPS\n'
477 'for more information')]
478 return []
481 def _CheckValidHostsInDEPS(input_api, output_api):
482 """Checks that DEPS file deps are from allowed_hosts."""
483 # Run only if DEPS file has been modified to annoy fewer bystanders.
484 if all(f.LocalPath() != 'DEPS' for f in input_api.AffectedFiles()):
485 return []
486 # Outsource work to gclient verify
487 try:
488 input_api.subprocess.check_output(['gclient', 'verify'])
489 return []
490 except input_api.subprocess.CalledProcessError, error:
491 return [output_api.PresubmitError(
492 'DEPS file must have only git dependencies.',
493 long_text=error.output)]
496 def _CheckNoBannedFunctions(input_api, output_api):
497 """Make sure that banned functions are not used."""
498 warnings = []
499 errors = []
501 file_filter = lambda f: f.LocalPath().endswith(('.mm', '.m', '.h'))
502 for f in input_api.AffectedFiles(file_filter=file_filter):
503 for line_num, line in f.ChangedContents():
504 for func_name, message, error in _BANNED_OBJC_FUNCTIONS:
505 matched = False
506 if func_name[0:1] == '/':
507 regex = func_name[1:]
508 if input_api.re.search(regex, line):
509 matched = True
510 elif func_name in line:
511 matched = True
512 if matched:
513 problems = warnings;
514 if error:
515 problems = errors;
516 problems.append(' %s:%d:' % (f.LocalPath(), line_num))
517 for message_line in message:
518 problems.append(' %s' % message_line)
520 file_filter = lambda f: f.LocalPath().endswith(('.cc', '.mm', '.h'))
521 for f in input_api.AffectedFiles(file_filter=file_filter):
522 for line_num, line in f.ChangedContents():
523 for func_name, message, error, excluded_paths in _BANNED_CPP_FUNCTIONS:
524 def IsBlacklisted(affected_file, blacklist):
525 local_path = affected_file.LocalPath()
526 for item in blacklist:
527 if input_api.re.match(item, local_path):
528 return True
529 return False
530 if IsBlacklisted(f, excluded_paths):
531 continue
532 matched = False
533 if func_name[0:1] == '/':
534 regex = func_name[1:]
535 if input_api.re.search(regex, line):
536 matched = True
537 elif func_name in line:
538 matched = True
539 if matched:
540 problems = warnings;
541 if error:
542 problems = errors;
543 problems.append(' %s:%d:' % (f.LocalPath(), line_num))
544 for message_line in message:
545 problems.append(' %s' % message_line)
547 result = []
548 if (warnings):
549 result.append(output_api.PresubmitPromptWarning(
550 'Banned functions were used.\n' + '\n'.join(warnings)))
551 if (errors):
552 result.append(output_api.PresubmitError(
553 'Banned functions were used.\n' + '\n'.join(errors)))
554 return result
557 def _CheckNoPragmaOnce(input_api, output_api):
558 """Make sure that banned functions are not used."""
559 files = []
560 pattern = input_api.re.compile(r'^#pragma\s+once',
561 input_api.re.MULTILINE)
562 for f in input_api.AffectedSourceFiles(input_api.FilterSourceFile):
563 if not f.LocalPath().endswith('.h'):
564 continue
565 contents = input_api.ReadFile(f)
566 if pattern.search(contents):
567 files.append(f)
569 if files:
570 return [output_api.PresubmitError(
571 'Do not use #pragma once in header files.\n'
572 'See http://www.chromium.org/developers/coding-style#TOC-File-headers',
573 files)]
574 return []
577 def _CheckNoTrinaryTrueFalse(input_api, output_api):
578 """Checks to make sure we don't introduce use of foo ? true : false."""
579 problems = []
580 pattern = input_api.re.compile(r'\?\s*(true|false)\s*:\s*(true|false)')
581 for f in input_api.AffectedFiles():
582 if not f.LocalPath().endswith(('.cc', '.h', '.inl', '.m', '.mm')):
583 continue
585 for line_num, line in f.ChangedContents():
586 if pattern.match(line):
587 problems.append(' %s:%d' % (f.LocalPath(), line_num))
589 if not problems:
590 return []
591 return [output_api.PresubmitPromptWarning(
592 'Please consider avoiding the "? true : false" pattern if possible.\n' +
593 '\n'.join(problems))]
596 def _CheckUnwantedDependencies(input_api, output_api):
597 """Runs checkdeps on #include statements added in this
598 change. Breaking - rules is an error, breaking ! rules is a
599 warning.
601 import sys
602 # We need to wait until we have an input_api object and use this
603 # roundabout construct to import checkdeps because this file is
604 # eval-ed and thus doesn't have __file__.
605 original_sys_path = sys.path
606 try:
607 sys.path = sys.path + [input_api.os_path.join(
608 input_api.PresubmitLocalPath(), 'buildtools', 'checkdeps')]
609 import checkdeps
610 from cpp_checker import CppChecker
611 from rules import Rule
612 finally:
613 # Restore sys.path to what it was before.
614 sys.path = original_sys_path
616 added_includes = []
617 for f in input_api.AffectedFiles():
618 if not CppChecker.IsCppFile(f.LocalPath()):
619 continue
621 changed_lines = [line for line_num, line in f.ChangedContents()]
622 added_includes.append([f.LocalPath(), changed_lines])
624 deps_checker = checkdeps.DepsChecker(input_api.PresubmitLocalPath())
626 error_descriptions = []
627 warning_descriptions = []
628 for path, rule_type, rule_description in deps_checker.CheckAddedCppIncludes(
629 added_includes):
630 description_with_path = '%s\n %s' % (path, rule_description)
631 if rule_type == Rule.DISALLOW:
632 error_descriptions.append(description_with_path)
633 else:
634 warning_descriptions.append(description_with_path)
636 results = []
637 if error_descriptions:
638 results.append(output_api.PresubmitError(
639 'You added one or more #includes that violate checkdeps rules.',
640 error_descriptions))
641 if warning_descriptions:
642 results.append(output_api.PresubmitPromptOrNotify(
643 'You added one or more #includes of files that are temporarily\n'
644 'allowed but being removed. Can you avoid introducing the\n'
645 '#include? See relevant DEPS file(s) for details and contacts.',
646 warning_descriptions))
647 return results
650 def _CheckFilePermissions(input_api, output_api):
651 """Check that all files have their permissions properly set."""
652 if input_api.platform == 'win32':
653 return []
654 args = [input_api.python_executable, 'tools/checkperms/checkperms.py',
655 '--root', input_api.change.RepositoryRoot()]
656 for f in input_api.AffectedFiles():
657 args += ['--file', f.LocalPath()]
658 checkperms = input_api.subprocess.Popen(args,
659 stdout=input_api.subprocess.PIPE)
660 errors = checkperms.communicate()[0].strip()
661 if errors:
662 return [output_api.PresubmitError('checkperms.py failed.',
663 errors.splitlines())]
664 return []
667 def _CheckNoAuraWindowPropertyHInHeaders(input_api, output_api):
668 """Makes sure we don't include ui/aura/window_property.h
669 in header files.
671 pattern = input_api.re.compile(r'^#include\s*"ui/aura/window_property.h"')
672 errors = []
673 for f in input_api.AffectedFiles():
674 if not f.LocalPath().endswith('.h'):
675 continue
676 for line_num, line in f.ChangedContents():
677 if pattern.match(line):
678 errors.append(' %s:%d' % (f.LocalPath(), line_num))
680 results = []
681 if errors:
682 results.append(output_api.PresubmitError(
683 'Header files should not include ui/aura/window_property.h', errors))
684 return results
687 def _CheckIncludeOrderForScope(scope, input_api, file_path, changed_linenums):
688 """Checks that the lines in scope occur in the right order.
690 1. C system files in alphabetical order
691 2. C++ system files in alphabetical order
692 3. Project's .h files
695 c_system_include_pattern = input_api.re.compile(r'\s*#include <.*\.h>')
696 cpp_system_include_pattern = input_api.re.compile(r'\s*#include <.*>')
697 custom_include_pattern = input_api.re.compile(r'\s*#include ".*')
699 C_SYSTEM_INCLUDES, CPP_SYSTEM_INCLUDES, CUSTOM_INCLUDES = range(3)
701 state = C_SYSTEM_INCLUDES
703 previous_line = ''
704 previous_line_num = 0
705 problem_linenums = []
706 out_of_order = " - line belongs before previous line"
707 for line_num, line in scope:
708 if c_system_include_pattern.match(line):
709 if state != C_SYSTEM_INCLUDES:
710 problem_linenums.append((line_num, previous_line_num,
711 " - C system include file in wrong block"))
712 elif previous_line and previous_line > line:
713 problem_linenums.append((line_num, previous_line_num,
714 out_of_order))
715 elif cpp_system_include_pattern.match(line):
716 if state == C_SYSTEM_INCLUDES:
717 state = CPP_SYSTEM_INCLUDES
718 elif state == CUSTOM_INCLUDES:
719 problem_linenums.append((line_num, previous_line_num,
720 " - c++ system include file in wrong block"))
721 elif previous_line and previous_line > line:
722 problem_linenums.append((line_num, previous_line_num, out_of_order))
723 elif custom_include_pattern.match(line):
724 if state != CUSTOM_INCLUDES:
725 state = CUSTOM_INCLUDES
726 elif previous_line and previous_line > line:
727 problem_linenums.append((line_num, previous_line_num, out_of_order))
728 else:
729 problem_linenums.append((line_num, previous_line_num,
730 "Unknown include type"))
731 previous_line = line
732 previous_line_num = line_num
734 warnings = []
735 for (line_num, previous_line_num, failure_type) in problem_linenums:
736 if line_num in changed_linenums or previous_line_num in changed_linenums:
737 warnings.append(' %s:%d:%s' % (file_path, line_num, failure_type))
738 return warnings
741 def _CheckIncludeOrderInFile(input_api, f, changed_linenums):
742 """Checks the #include order for the given file f."""
744 system_include_pattern = input_api.re.compile(r'\s*#include \<.*')
745 # Exclude the following includes from the check:
746 # 1) #include <.../...>, e.g., <sys/...> includes often need to appear in a
747 # specific order.
748 # 2) <atlbase.h>, "build/build_config.h"
749 excluded_include_pattern = input_api.re.compile(
750 r'\s*#include (\<.*/.*|\<atlbase\.h\>|"build/build_config.h")')
751 custom_include_pattern = input_api.re.compile(r'\s*#include "(?P<FILE>.*)"')
752 # Match the final or penultimate token if it is xxxtest so we can ignore it
753 # when considering the special first include.
754 test_file_tag_pattern = input_api.re.compile(
755 r'_[a-z]+test(?=(_[a-zA-Z0-9]+)?\.)')
756 if_pattern = input_api.re.compile(
757 r'\s*#\s*(if|elif|else|endif|define|undef).*')
758 # Some files need specialized order of includes; exclude such files from this
759 # check.
760 uncheckable_includes_pattern = input_api.re.compile(
761 r'\s*#include '
762 '("ipc/.*macros\.h"|<windows\.h>|".*gl.*autogen.h")\s*')
764 contents = f.NewContents()
765 warnings = []
766 line_num = 0
768 # Handle the special first include. If the first include file is
769 # some/path/file.h, the corresponding including file can be some/path/file.cc,
770 # some/other/path/file.cc, some/path/file_platform.cc, some/path/file-suffix.h
771 # etc. It's also possible that no special first include exists.
772 # If the included file is some/path/file_platform.h the including file could
773 # also be some/path/file_xxxtest_platform.h.
774 including_file_base_name = test_file_tag_pattern.sub(
775 '', input_api.os_path.basename(f.LocalPath()))
777 for line in contents:
778 line_num += 1
779 if system_include_pattern.match(line):
780 # No special first include -> process the line again along with normal
781 # includes.
782 line_num -= 1
783 break
784 match = custom_include_pattern.match(line)
785 if match:
786 match_dict = match.groupdict()
787 header_basename = test_file_tag_pattern.sub(
788 '', input_api.os_path.basename(match_dict['FILE'])).replace('.h', '')
790 if header_basename not in including_file_base_name:
791 # No special first include -> process the line again along with normal
792 # includes.
793 line_num -= 1
794 break
796 # Split into scopes: Each region between #if and #endif is its own scope.
797 scopes = []
798 current_scope = []
799 for line in contents[line_num:]:
800 line_num += 1
801 if uncheckable_includes_pattern.match(line):
802 continue
803 if if_pattern.match(line):
804 scopes.append(current_scope)
805 current_scope = []
806 elif ((system_include_pattern.match(line) or
807 custom_include_pattern.match(line)) and
808 not excluded_include_pattern.match(line)):
809 current_scope.append((line_num, line))
810 scopes.append(current_scope)
812 for scope in scopes:
813 warnings.extend(_CheckIncludeOrderForScope(scope, input_api, f.LocalPath(),
814 changed_linenums))
815 return warnings
818 def _CheckIncludeOrder(input_api, output_api):
819 """Checks that the #include order is correct.
821 1. The corresponding header for source files.
822 2. C system files in alphabetical order
823 3. C++ system files in alphabetical order
824 4. Project's .h files in alphabetical order
826 Each region separated by #if, #elif, #else, #endif, #define and #undef follows
827 these rules separately.
829 def FileFilterIncludeOrder(affected_file):
830 black_list = (_EXCLUDED_PATHS + input_api.DEFAULT_BLACK_LIST)
831 return input_api.FilterSourceFile(affected_file, black_list=black_list)
833 warnings = []
834 for f in input_api.AffectedFiles(file_filter=FileFilterIncludeOrder):
835 if f.LocalPath().endswith(('.cc', '.h', '.mm')):
836 changed_linenums = set(line_num for line_num, _ in f.ChangedContents())
837 warnings.extend(_CheckIncludeOrderInFile(input_api, f, changed_linenums))
839 results = []
840 if warnings:
841 results.append(output_api.PresubmitPromptOrNotify(_INCLUDE_ORDER_WARNING,
842 warnings))
843 return results
846 def _CheckForVersionControlConflictsInFile(input_api, f):
847 pattern = input_api.re.compile('^(?:<<<<<<<|>>>>>>>) |^=======$')
848 errors = []
849 for line_num, line in f.ChangedContents():
850 if f.LocalPath().endswith('.md'):
851 # First-level headers in markdown look a lot like version control
852 # conflict markers. http://daringfireball.net/projects/markdown/basics
853 continue
854 if pattern.match(line):
855 errors.append(' %s:%d %s' % (f.LocalPath(), line_num, line))
856 return errors
859 def _CheckForVersionControlConflicts(input_api, output_api):
860 """Usually this is not intentional and will cause a compile failure."""
861 errors = []
862 for f in input_api.AffectedFiles():
863 errors.extend(_CheckForVersionControlConflictsInFile(input_api, f))
865 results = []
866 if errors:
867 results.append(output_api.PresubmitError(
868 'Version control conflict markers found, please resolve.', errors))
869 return results
872 def _CheckHardcodedGoogleHostsInLowerLayers(input_api, output_api):
873 def FilterFile(affected_file):
874 """Filter function for use with input_api.AffectedSourceFiles,
875 below. This filters out everything except non-test files from
876 top-level directories that generally speaking should not hard-code
877 service URLs (e.g. src/android_webview/, src/content/ and others).
879 return input_api.FilterSourceFile(
880 affected_file,
881 white_list=(r'^(android_webview|base|content|net)[\\\/].*', ),
882 black_list=(_EXCLUDED_PATHS +
883 _TEST_CODE_EXCLUDED_PATHS +
884 input_api.DEFAULT_BLACK_LIST))
886 base_pattern = '"[^"]*google\.com[^"]*"'
887 comment_pattern = input_api.re.compile('//.*%s' % base_pattern)
888 pattern = input_api.re.compile(base_pattern)
889 problems = [] # items are (filename, line_number, line)
890 for f in input_api.AffectedSourceFiles(FilterFile):
891 for line_num, line in f.ChangedContents():
892 if not comment_pattern.search(line) and pattern.search(line):
893 problems.append((f.LocalPath(), line_num, line))
895 if problems:
896 return [output_api.PresubmitPromptOrNotify(
897 'Most layers below src/chrome/ should not hardcode service URLs.\n'
898 'Are you sure this is correct?',
899 [' %s:%d: %s' % (
900 problem[0], problem[1], problem[2]) for problem in problems])]
901 else:
902 return []
905 def _CheckNoAbbreviationInPngFileName(input_api, output_api):
906 """Makes sure there are no abbreviations in the name of PNG files.
907 The native_client_sdk directory is excluded because it has auto-generated PNG
908 files for documentation.
910 errors = []
911 white_list = (r'.*_[a-z]_.*\.png$|.*_[a-z]\.png$',)
912 black_list = (r'^native_client_sdk[\\\/]',)
913 file_filter = lambda f: input_api.FilterSourceFile(
914 f, white_list=white_list, black_list=black_list)
915 for f in input_api.AffectedFiles(include_deletes=False,
916 file_filter=file_filter):
917 errors.append(' %s' % f.LocalPath())
919 results = []
920 if errors:
921 results.append(output_api.PresubmitError(
922 'The name of PNG files should not have abbreviations. \n'
923 'Use _hover.png, _center.png, instead of _h.png, _c.png.\n'
924 'Contact oshima@chromium.org if you have questions.', errors))
925 return results
928 def _FilesToCheckForIncomingDeps(re, changed_lines):
929 """Helper method for _CheckAddedDepsHaveTargetApprovals. Returns
930 a set of DEPS entries that we should look up.
932 For a directory (rather than a specific filename) we fake a path to
933 a specific filename by adding /DEPS. This is chosen as a file that
934 will seldom or never be subject to per-file include_rules.
936 # We ignore deps entries on auto-generated directories.
937 AUTO_GENERATED_DIRS = ['grit', 'jni']
939 # This pattern grabs the path without basename in the first
940 # parentheses, and the basename (if present) in the second. It
941 # relies on the simple heuristic that if there is a basename it will
942 # be a header file ending in ".h".
943 pattern = re.compile(
944 r"""['"]\+([^'"]+?)(/[a-zA-Z0-9_]+\.h)?['"].*""")
945 results = set()
946 for changed_line in changed_lines:
947 m = pattern.match(changed_line)
948 if m:
949 path = m.group(1)
950 if path.split('/')[0] not in AUTO_GENERATED_DIRS:
951 if m.group(2):
952 results.add('%s%s' % (path, m.group(2)))
953 else:
954 results.add('%s/DEPS' % path)
955 return results
958 def _CheckAddedDepsHaveTargetApprovals(input_api, output_api):
959 """When a dependency prefixed with + is added to a DEPS file, we
960 want to make sure that the change is reviewed by an OWNER of the
961 target file or directory, to avoid layering violations from being
962 introduced. This check verifies that this happens.
964 changed_lines = set()
965 for f in input_api.AffectedFiles():
966 filename = input_api.os_path.basename(f.LocalPath())
967 if filename == 'DEPS':
968 changed_lines |= set(line.strip()
969 for line_num, line
970 in f.ChangedContents())
971 if not changed_lines:
972 return []
974 virtual_depended_on_files = _FilesToCheckForIncomingDeps(input_api.re,
975 changed_lines)
976 if not virtual_depended_on_files:
977 return []
979 if input_api.is_committing:
980 if input_api.tbr:
981 return [output_api.PresubmitNotifyResult(
982 '--tbr was specified, skipping OWNERS check for DEPS additions')]
983 if not input_api.change.issue:
984 return [output_api.PresubmitError(
985 "DEPS approval by OWNERS check failed: this change has "
986 "no Rietveld issue number, so we can't check it for approvals.")]
987 output = output_api.PresubmitError
988 else:
989 output = output_api.PresubmitNotifyResult
991 owners_db = input_api.owners_db
992 owner_email, reviewers = input_api.canned_checks._RietveldOwnerAndReviewers(
993 input_api,
994 owners_db.email_regexp,
995 approval_needed=input_api.is_committing)
997 owner_email = owner_email or input_api.change.author_email
999 reviewers_plus_owner = set(reviewers)
1000 if owner_email:
1001 reviewers_plus_owner.add(owner_email)
1002 missing_files = owners_db.files_not_covered_by(virtual_depended_on_files,
1003 reviewers_plus_owner)
1005 # We strip the /DEPS part that was added by
1006 # _FilesToCheckForIncomingDeps to fake a path to a file in a
1007 # directory.
1008 def StripDeps(path):
1009 start_deps = path.rfind('/DEPS')
1010 if start_deps != -1:
1011 return path[:start_deps]
1012 else:
1013 return path
1014 unapproved_dependencies = ["'+%s'," % StripDeps(path)
1015 for path in missing_files]
1017 if unapproved_dependencies:
1018 output_list = [
1019 output('Missing LGTM from OWNERS of dependencies added to DEPS:\n %s' %
1020 '\n '.join(sorted(unapproved_dependencies)))]
1021 if not input_api.is_committing:
1022 suggested_owners = owners_db.reviewers_for(missing_files, owner_email)
1023 output_list.append(output(
1024 'Suggested missing target path OWNERS:\n %s' %
1025 '\n '.join(suggested_owners or [])))
1026 return output_list
1028 return []
1031 def _CheckSpamLogging(input_api, output_api):
1032 file_inclusion_pattern = r'.+%s' % _IMPLEMENTATION_EXTENSIONS
1033 black_list = (_EXCLUDED_PATHS +
1034 _TEST_CODE_EXCLUDED_PATHS +
1035 input_api.DEFAULT_BLACK_LIST +
1036 (r"^base[\\\/]logging\.h$",
1037 r"^base[\\\/]logging\.cc$",
1038 r"^chrome[\\\/]app[\\\/]chrome_main_delegate\.cc$",
1039 r"^chrome[\\\/]browser[\\\/]chrome_browser_main\.cc$",
1040 r"^chrome[\\\/]browser[\\\/]ui[\\\/]startup[\\\/]"
1041 r"startup_browser_creator\.cc$",
1042 r"^chrome[\\\/]installer[\\\/]setup[\\\/].*",
1043 r"chrome[\\\/]browser[\\\/]diagnostics[\\\/]" +
1044 r"diagnostics_writer\.cc$",
1045 r"^chrome_elf[\\\/]dll_hash[\\\/]dll_hash_main\.cc$",
1046 r"^chromecast[\\\/]",
1047 r"^cloud_print[\\\/]",
1048 r"^components[\\\/]html_viewer[\\\/]"
1049 r"web_test_delegate_impl\.cc$",
1050 r"^content[\\\/]common[\\\/]gpu[\\\/]client[\\\/]"
1051 r"gl_helper_benchmark\.cc$",
1052 r"^courgette[\\\/]courgette_tool\.cc$",
1053 r"^extensions[\\\/]renderer[\\\/]logging_native_handler\.cc$",
1054 r"^ipc[\\\/]ipc_logging\.cc$",
1055 r"^native_client_sdk[\\\/]",
1056 r"^remoting[\\\/]base[\\\/]logging\.h$",
1057 r"^remoting[\\\/]host[\\\/].*",
1058 r"^sandbox[\\\/]linux[\\\/].*",
1059 r"^tools[\\\/]",
1060 r"^ui[\\\/]aura[\\\/]bench[\\\/]bench_main\.cc$",
1061 r"^storage[\\\/]browser[\\\/]fileapi[\\\/]" +
1062 r"dump_file_system.cc$",))
1063 source_file_filter = lambda x: input_api.FilterSourceFile(
1064 x, white_list=(file_inclusion_pattern,), black_list=black_list)
1066 log_info = []
1067 printf = []
1069 for f in input_api.AffectedSourceFiles(source_file_filter):
1070 contents = input_api.ReadFile(f, 'rb')
1071 if input_api.re.search(r"\bD?LOG\s*\(\s*INFO\s*\)", contents):
1072 log_info.append(f.LocalPath())
1073 elif input_api.re.search(r"\bD?LOG_IF\s*\(\s*INFO\s*,", contents):
1074 log_info.append(f.LocalPath())
1076 if input_api.re.search(r"\bprintf\(", contents):
1077 printf.append(f.LocalPath())
1078 elif input_api.re.search(r"\bfprintf\((stdout|stderr)", contents):
1079 printf.append(f.LocalPath())
1081 if log_info:
1082 return [output_api.PresubmitError(
1083 'These files spam the console log with LOG(INFO):',
1084 items=log_info)]
1085 if printf:
1086 return [output_api.PresubmitError(
1087 'These files spam the console log with printf/fprintf:',
1088 items=printf)]
1089 return []
1092 def _CheckForAnonymousVariables(input_api, output_api):
1093 """These types are all expected to hold locks while in scope and
1094 so should never be anonymous (which causes them to be immediately
1095 destroyed)."""
1096 they_who_must_be_named = [
1097 'base::AutoLock',
1098 'base::AutoReset',
1099 'base::AutoUnlock',
1100 'SkAutoAlphaRestore',
1101 'SkAutoBitmapShaderInstall',
1102 'SkAutoBlitterChoose',
1103 'SkAutoBounderCommit',
1104 'SkAutoCallProc',
1105 'SkAutoCanvasRestore',
1106 'SkAutoCommentBlock',
1107 'SkAutoDescriptor',
1108 'SkAutoDisableDirectionCheck',
1109 'SkAutoDisableOvalCheck',
1110 'SkAutoFree',
1111 'SkAutoGlyphCache',
1112 'SkAutoHDC',
1113 'SkAutoLockColors',
1114 'SkAutoLockPixels',
1115 'SkAutoMalloc',
1116 'SkAutoMaskFreeImage',
1117 'SkAutoMutexAcquire',
1118 'SkAutoPathBoundsUpdate',
1119 'SkAutoPDFRelease',
1120 'SkAutoRasterClipValidate',
1121 'SkAutoRef',
1122 'SkAutoTime',
1123 'SkAutoTrace',
1124 'SkAutoUnref',
1126 anonymous = r'(%s)\s*[({]' % '|'.join(they_who_must_be_named)
1127 # bad: base::AutoLock(lock.get());
1128 # not bad: base::AutoLock lock(lock.get());
1129 bad_pattern = input_api.re.compile(anonymous)
1130 # good: new base::AutoLock(lock.get())
1131 good_pattern = input_api.re.compile(r'\bnew\s*' + anonymous)
1132 errors = []
1134 for f in input_api.AffectedFiles():
1135 if not f.LocalPath().endswith(('.cc', '.h', '.inl', '.m', '.mm')):
1136 continue
1137 for linenum, line in f.ChangedContents():
1138 if bad_pattern.search(line) and not good_pattern.search(line):
1139 errors.append('%s:%d' % (f.LocalPath(), linenum))
1141 if errors:
1142 return [output_api.PresubmitError(
1143 'These lines create anonymous variables that need to be named:',
1144 items=errors)]
1145 return []
1148 def _CheckCygwinShell(input_api, output_api):
1149 source_file_filter = lambda x: input_api.FilterSourceFile(
1150 x, white_list=(r'.+\.(gyp|gypi)$',))
1151 cygwin_shell = []
1153 for f in input_api.AffectedSourceFiles(source_file_filter):
1154 for linenum, line in f.ChangedContents():
1155 if 'msvs_cygwin_shell' in line:
1156 cygwin_shell.append(f.LocalPath())
1157 break
1159 if cygwin_shell:
1160 return [output_api.PresubmitError(
1161 'These files should not use msvs_cygwin_shell (the default is 0):',
1162 items=cygwin_shell)]
1163 return []
1166 def _CheckUserActionUpdate(input_api, output_api):
1167 """Checks if any new user action has been added."""
1168 if any('actions.xml' == input_api.os_path.basename(f) for f in
1169 input_api.LocalPaths()):
1170 # If actions.xml is already included in the changelist, the PRESUBMIT
1171 # for actions.xml will do a more complete presubmit check.
1172 return []
1174 file_filter = lambda f: f.LocalPath().endswith(('.cc', '.mm'))
1175 action_re = r'[^a-zA-Z]UserMetricsAction\("([^"]*)'
1176 current_actions = None
1177 for f in input_api.AffectedFiles(file_filter=file_filter):
1178 for line_num, line in f.ChangedContents():
1179 match = input_api.re.search(action_re, line)
1180 if match:
1181 # Loads contents in tools/metrics/actions/actions.xml to memory. It's
1182 # loaded only once.
1183 if not current_actions:
1184 with open('tools/metrics/actions/actions.xml') as actions_f:
1185 current_actions = actions_f.read()
1186 # Search for the matched user action name in |current_actions|.
1187 for action_name in match.groups():
1188 action = 'name="{0}"'.format(action_name)
1189 if action not in current_actions:
1190 return [output_api.PresubmitPromptWarning(
1191 'File %s line %d: %s is missing in '
1192 'tools/metrics/actions/actions.xml. Please run '
1193 'tools/metrics/actions/extract_actions.py to update.'
1194 % (f.LocalPath(), line_num, action_name))]
1195 return []
1198 def _GetJSONParseError(input_api, filename, eat_comments=True):
1199 try:
1200 contents = input_api.ReadFile(filename)
1201 if eat_comments:
1202 import sys
1203 original_sys_path = sys.path
1204 try:
1205 sys.path = sys.path + [input_api.os_path.join(
1206 input_api.PresubmitLocalPath(),
1207 'tools', 'json_comment_eater')]
1208 import json_comment_eater
1209 finally:
1210 sys.path = original_sys_path
1211 contents = json_comment_eater.Nom(contents)
1213 input_api.json.loads(contents)
1214 except ValueError as e:
1215 return e
1216 return None
1219 def _GetIDLParseError(input_api, filename):
1220 try:
1221 contents = input_api.ReadFile(filename)
1222 idl_schema = input_api.os_path.join(
1223 input_api.PresubmitLocalPath(),
1224 'tools', 'json_schema_compiler', 'idl_schema.py')
1225 process = input_api.subprocess.Popen(
1226 [input_api.python_executable, idl_schema],
1227 stdin=input_api.subprocess.PIPE,
1228 stdout=input_api.subprocess.PIPE,
1229 stderr=input_api.subprocess.PIPE,
1230 universal_newlines=True)
1231 (_, error) = process.communicate(input=contents)
1232 return error or None
1233 except ValueError as e:
1234 return e
1237 def _CheckParseErrors(input_api, output_api):
1238 """Check that IDL and JSON files do not contain syntax errors."""
1239 actions = {
1240 '.idl': _GetIDLParseError,
1241 '.json': _GetJSONParseError,
1243 # These paths contain test data and other known invalid JSON files.
1244 excluded_patterns = [
1245 r'test[\\\/]data[\\\/]',
1246 r'^components[\\\/]policy[\\\/]resources[\\\/]policy_templates\.json$',
1248 # Most JSON files are preprocessed and support comments, but these do not.
1249 json_no_comments_patterns = [
1250 r'^testing[\\\/]',
1252 # Only run IDL checker on files in these directories.
1253 idl_included_patterns = [
1254 r'^chrome[\\\/]common[\\\/]extensions[\\\/]api[\\\/]',
1255 r'^extensions[\\\/]common[\\\/]api[\\\/]',
1258 def get_action(affected_file):
1259 filename = affected_file.LocalPath()
1260 return actions.get(input_api.os_path.splitext(filename)[1])
1262 def MatchesFile(patterns, path):
1263 for pattern in patterns:
1264 if input_api.re.search(pattern, path):
1265 return True
1266 return False
1268 def FilterFile(affected_file):
1269 action = get_action(affected_file)
1270 if not action:
1271 return False
1272 path = affected_file.LocalPath()
1274 if MatchesFile(excluded_patterns, path):
1275 return False
1277 if (action == _GetIDLParseError and
1278 not MatchesFile(idl_included_patterns, path)):
1279 return False
1280 return True
1282 results = []
1283 for affected_file in input_api.AffectedFiles(
1284 file_filter=FilterFile, include_deletes=False):
1285 action = get_action(affected_file)
1286 kwargs = {}
1287 if (action == _GetJSONParseError and
1288 MatchesFile(json_no_comments_patterns, affected_file.LocalPath())):
1289 kwargs['eat_comments'] = False
1290 parse_error = action(input_api,
1291 affected_file.AbsoluteLocalPath(),
1292 **kwargs)
1293 if parse_error:
1294 results.append(output_api.PresubmitError('%s could not be parsed: %s' %
1295 (affected_file.LocalPath(), parse_error)))
1296 return results
1299 def _CheckJavaStyle(input_api, output_api):
1300 """Runs checkstyle on changed java files and returns errors if any exist."""
1301 import sys
1302 original_sys_path = sys.path
1303 try:
1304 sys.path = sys.path + [input_api.os_path.join(
1305 input_api.PresubmitLocalPath(), 'tools', 'android', 'checkstyle')]
1306 import checkstyle
1307 finally:
1308 # Restore sys.path to what it was before.
1309 sys.path = original_sys_path
1311 return checkstyle.RunCheckstyle(
1312 input_api, output_api, 'tools/android/checkstyle/chromium-style-5.0.xml',
1313 black_list=_EXCLUDED_PATHS + input_api.DEFAULT_BLACK_LIST)
1316 def _CheckAndroidToastUsage(input_api, output_api):
1317 """Checks that code uses org.chromium.ui.widget.Toast instead of
1318 android.widget.Toast (Chromium Toast doesn't force hardware
1319 acceleration on low-end devices, saving memory).
1321 toast_import_pattern = input_api.re.compile(
1322 r'^import android\.widget\.Toast;$')
1324 errors = []
1326 sources = lambda affected_file: input_api.FilterSourceFile(
1327 affected_file,
1328 black_list=(_EXCLUDED_PATHS +
1329 _TEST_CODE_EXCLUDED_PATHS +
1330 input_api.DEFAULT_BLACK_LIST +
1331 (r'^chromecast[\\\/].*',
1332 r'^remoting[\\\/].*')),
1333 white_list=(r'.*\.java$',))
1335 for f in input_api.AffectedSourceFiles(sources):
1336 for line_num, line in f.ChangedContents():
1337 if toast_import_pattern.search(line):
1338 errors.append("%s:%d" % (f.LocalPath(), line_num))
1340 results = []
1342 if errors:
1343 results.append(output_api.PresubmitError(
1344 'android.widget.Toast usage is detected. Android toasts use hardware'
1345 ' acceleration, and can be\ncostly on low-end devices. Please use'
1346 ' org.chromium.ui.widget.Toast instead.\n'
1347 'Contact dskiba@chromium.org if you have any questions.',
1348 errors))
1350 return results
1353 def _CheckAndroidCrLogUsage(input_api, output_api):
1354 """Checks that new logs using org.chromium.base.Log:
1355 - Are using 'TAG' as variable name for the tags (warn)
1356 - Are using a tag that is shorter than 20 characters (error)
1358 cr_log_import_pattern = input_api.re.compile(
1359 r'^import org\.chromium\.base\.Log;$', input_api.re.MULTILINE)
1360 class_in_base_pattern = input_api.re.compile(
1361 r'^package org\.chromium\.base;$', input_api.re.MULTILINE)
1362 has_some_log_import_pattern = input_api.re.compile(
1363 r'^import .*\.Log;$', input_api.re.MULTILINE)
1364 # Extract the tag from lines like `Log.d(TAG, "*");` or `Log.d("TAG", "*");`
1365 log_call_pattern = input_api.re.compile(r'^\s*Log\.\w\((?P<tag>\"?\w+\"?)\,')
1366 log_decl_pattern = input_api.re.compile(
1367 r'^\s*private static final String TAG = "(?P<name>(.*))";',
1368 input_api.re.MULTILINE)
1370 REF_MSG = ('See docs/android_logging.md '
1371 'or contact dgn@chromium.org for more info.')
1372 sources = lambda x: input_api.FilterSourceFile(x, white_list=(r'.*\.java$',))
1374 tag_decl_errors = []
1375 tag_length_errors = []
1376 tag_errors = []
1377 tag_with_dot_errors = []
1378 util_log_errors = []
1380 for f in input_api.AffectedSourceFiles(sources):
1381 file_content = input_api.ReadFile(f)
1382 has_modified_logs = False
1384 # Per line checks
1385 if (cr_log_import_pattern.search(file_content) or
1386 (class_in_base_pattern.search(file_content) and
1387 not has_some_log_import_pattern.search(file_content))):
1388 # Checks to run for files using cr log
1389 for line_num, line in f.ChangedContents():
1391 # Check if the new line is doing some logging
1392 match = log_call_pattern.search(line)
1393 if match:
1394 has_modified_logs = True
1396 # Make sure it uses "TAG"
1397 if not match.group('tag') == 'TAG':
1398 tag_errors.append("%s:%d" % (f.LocalPath(), line_num))
1399 else:
1400 # Report non cr Log function calls in changed lines
1401 for line_num, line in f.ChangedContents():
1402 if log_call_pattern.search(line):
1403 util_log_errors.append("%s:%d" % (f.LocalPath(), line_num))
1405 # Per file checks
1406 if has_modified_logs:
1407 # Make sure the tag is using the "cr" prefix and is not too long
1408 match = log_decl_pattern.search(file_content)
1409 tag_name = match.group('name') if match else None
1410 if not tag_name:
1411 tag_decl_errors.append(f.LocalPath())
1412 elif len(tag_name) > 20:
1413 tag_length_errors.append(f.LocalPath())
1414 elif '.' in tag_name:
1415 tag_with_dot_errors.append(f.LocalPath())
1417 results = []
1418 if tag_decl_errors:
1419 results.append(output_api.PresubmitPromptWarning(
1420 'Please define your tags using the suggested format: .\n'
1421 '"private static final String TAG = "<package tag>".\n'
1422 'They will be prepended with "cr_" automatically.\n' + REF_MSG,
1423 tag_decl_errors))
1425 if tag_length_errors:
1426 results.append(output_api.PresubmitError(
1427 'The tag length is restricted by the system to be at most '
1428 '20 characters.\n' + REF_MSG,
1429 tag_length_errors))
1431 if tag_errors:
1432 results.append(output_api.PresubmitPromptWarning(
1433 'Please use a variable named "TAG" for your log tags.\n' + REF_MSG,
1434 tag_errors))
1436 if util_log_errors:
1437 results.append(output_api.PresubmitPromptWarning(
1438 'Please use org.chromium.base.Log for new logs.\n' + REF_MSG,
1439 util_log_errors))
1441 if tag_with_dot_errors:
1442 results.append(output_api.PresubmitPromptWarning(
1443 'Dot in log tags cause them to be elided in crash reports.\n' + REF_MSG,
1444 tag_with_dot_errors))
1446 return results
1449 def _CheckForCopyrightedCode(input_api, output_api):
1450 """Verifies that newly added code doesn't contain copyrighted material
1451 and is properly licensed under the standard Chromium license.
1453 As there can be false positives, we maintain a whitelist file. This check
1454 also verifies that the whitelist file is up to date.
1456 import sys
1457 original_sys_path = sys.path
1458 try:
1459 sys.path = sys.path + [input_api.os_path.join(
1460 input_api.PresubmitLocalPath(), 'tools')]
1461 from copyright_scanner import copyright_scanner
1462 finally:
1463 # Restore sys.path to what it was before.
1464 sys.path = original_sys_path
1466 return copyright_scanner.ScanAtPresubmit(input_api, output_api)
1469 def _CheckSingletonInHeaders(input_api, output_api):
1470 """Checks to make sure no header files have |Singleton<|."""
1471 def FileFilter(affected_file):
1472 # It's ok for base/memory/singleton.h to have |Singleton<|.
1473 black_list = (_EXCLUDED_PATHS +
1474 input_api.DEFAULT_BLACK_LIST +
1475 (r"^base[\\\/]memory[\\\/]singleton\.h$",))
1476 return input_api.FilterSourceFile(affected_file, black_list=black_list)
1478 pattern = input_api.re.compile(r'(?<!class\sbase::)Singleton\s*<')
1479 files = []
1480 for f in input_api.AffectedSourceFiles(FileFilter):
1481 if (f.LocalPath().endswith('.h') or f.LocalPath().endswith('.hxx') or
1482 f.LocalPath().endswith('.hpp') or f.LocalPath().endswith('.inl')):
1483 contents = input_api.ReadFile(f)
1484 for line in contents.splitlines(False):
1485 if (not input_api.re.match(r'//', line) and # Strip C++ comment.
1486 pattern.search(line)):
1487 files.append(f)
1488 break
1490 if files:
1491 return [ output_api.PresubmitError(
1492 'Found base::Singleton<T> in the following header files.\n' +
1493 'Please move them to an appropriate source file so that the ' +
1494 'template gets instantiated in a single compilation unit.',
1495 files) ]
1496 return []
1499 _DEPRECATED_CSS = [
1500 # Values
1501 ( "-webkit-box", "flex" ),
1502 ( "-webkit-inline-box", "inline-flex" ),
1503 ( "-webkit-flex", "flex" ),
1504 ( "-webkit-inline-flex", "inline-flex" ),
1505 ( "-webkit-min-content", "min-content" ),
1506 ( "-webkit-max-content", "max-content" ),
1508 # Properties
1509 ( "-webkit-background-clip", "background-clip" ),
1510 ( "-webkit-background-origin", "background-origin" ),
1511 ( "-webkit-background-size", "background-size" ),
1512 ( "-webkit-box-shadow", "box-shadow" ),
1514 # Functions
1515 ( "-webkit-gradient", "gradient" ),
1516 ( "-webkit-repeating-gradient", "repeating-gradient" ),
1517 ( "-webkit-linear-gradient", "linear-gradient" ),
1518 ( "-webkit-repeating-linear-gradient", "repeating-linear-gradient" ),
1519 ( "-webkit-radial-gradient", "radial-gradient" ),
1520 ( "-webkit-repeating-radial-gradient", "repeating-radial-gradient" ),
1523 def _CheckNoDeprecatedCSS(input_api, output_api):
1524 """ Make sure that we don't use deprecated CSS
1525 properties, functions or values. Our external
1526 documentation and iOS CSS for dom distiller
1527 (reader mode) are ignored by the hooks as it
1528 needs to be consumed by WebKit. """
1529 results = []
1530 file_inclusion_pattern = (r".+\.css$",)
1531 black_list = (_EXCLUDED_PATHS +
1532 _TEST_CODE_EXCLUDED_PATHS +
1533 input_api.DEFAULT_BLACK_LIST +
1534 (r"^chrome/common/extensions/docs",
1535 r"^chrome/docs",
1536 r"^components/dom_distiller/core/css/distilledpage_ios.css",
1537 r"^native_client_sdk"))
1538 file_filter = lambda f: input_api.FilterSourceFile(
1539 f, white_list=file_inclusion_pattern, black_list=black_list)
1540 for fpath in input_api.AffectedFiles(file_filter=file_filter):
1541 for line_num, line in fpath.ChangedContents():
1542 for (deprecated_value, value) in _DEPRECATED_CSS:
1543 if deprecated_value in line:
1544 results.append(output_api.PresubmitError(
1545 "%s:%d: Use of deprecated CSS %s, use %s instead" %
1546 (fpath.LocalPath(), line_num, deprecated_value, value)))
1547 return results
1550 _DEPRECATED_JS = [
1551 ( "__lookupGetter__", "Object.getOwnPropertyDescriptor" ),
1552 ( "__defineGetter__", "Object.defineProperty" ),
1553 ( "__defineSetter__", "Object.defineProperty" ),
1556 def _CheckNoDeprecatedJS(input_api, output_api):
1557 """Make sure that we don't use deprecated JS in Chrome code."""
1558 results = []
1559 file_inclusion_pattern = (r".+\.js$",) # TODO(dbeam): .html?
1560 black_list = (_EXCLUDED_PATHS + _TEST_CODE_EXCLUDED_PATHS +
1561 input_api.DEFAULT_BLACK_LIST)
1562 file_filter = lambda f: input_api.FilterSourceFile(
1563 f, white_list=file_inclusion_pattern, black_list=black_list)
1564 for fpath in input_api.AffectedFiles(file_filter=file_filter):
1565 for lnum, line in fpath.ChangedContents():
1566 for (deprecated, replacement) in _DEPRECATED_JS:
1567 if deprecated in line:
1568 results.append(output_api.PresubmitError(
1569 "%s:%d: Use of deprecated JS %s, use %s instead" %
1570 (fpath.LocalPath(), lnum, deprecated, replacement)))
1571 return results
1574 def _AndroidSpecificOnUploadChecks(input_api, output_api):
1575 """Groups checks that target android code."""
1576 results = []
1577 results.extend(_CheckAndroidCrLogUsage(input_api, output_api))
1578 results.extend(_CheckAndroidToastUsage(input_api, output_api))
1579 return results
1582 def _CommonChecks(input_api, output_api):
1583 """Checks common to both upload and commit."""
1584 results = []
1585 results.extend(input_api.canned_checks.PanProjectChecks(
1586 input_api, output_api,
1587 excluded_paths=_EXCLUDED_PATHS + _TESTRUNNER_PATHS))
1588 results.extend(_CheckAuthorizedAuthor(input_api, output_api))
1589 results.extend(
1590 _CheckNoProductionCodeUsingTestOnlyFunctions(input_api, output_api))
1591 results.extend(_CheckNoIOStreamInHeaders(input_api, output_api))
1592 results.extend(_CheckNoUNIT_TESTInSourceFiles(input_api, output_api))
1593 results.extend(_CheckNoNewWStrings(input_api, output_api))
1594 results.extend(_CheckNoDEPSGIT(input_api, output_api))
1595 results.extend(_CheckNoBannedFunctions(input_api, output_api))
1596 results.extend(_CheckNoPragmaOnce(input_api, output_api))
1597 results.extend(_CheckNoTrinaryTrueFalse(input_api, output_api))
1598 results.extend(_CheckUnwantedDependencies(input_api, output_api))
1599 results.extend(_CheckFilePermissions(input_api, output_api))
1600 results.extend(_CheckNoAuraWindowPropertyHInHeaders(input_api, output_api))
1601 results.extend(_CheckIncludeOrder(input_api, output_api))
1602 results.extend(_CheckForVersionControlConflicts(input_api, output_api))
1603 results.extend(_CheckPatchFiles(input_api, output_api))
1604 results.extend(_CheckHardcodedGoogleHostsInLowerLayers(input_api, output_api))
1605 results.extend(_CheckNoAbbreviationInPngFileName(input_api, output_api))
1606 results.extend(_CheckForInvalidOSMacros(input_api, output_api))
1607 results.extend(_CheckForInvalidIfDefinedMacros(input_api, output_api))
1608 # TODO(danakj): Remove this when base/move.h is removed.
1609 results.extend(_CheckForUsingSideEffectsOfPass(input_api, output_api))
1610 results.extend(_CheckAddedDepsHaveTargetApprovals(input_api, output_api))
1611 results.extend(
1612 input_api.canned_checks.CheckChangeHasNoTabs(
1613 input_api,
1614 output_api,
1615 source_file_filter=lambda x: x.LocalPath().endswith('.grd')))
1616 results.extend(_CheckSpamLogging(input_api, output_api))
1617 results.extend(_CheckForAnonymousVariables(input_api, output_api))
1618 results.extend(_CheckCygwinShell(input_api, output_api))
1619 results.extend(_CheckUserActionUpdate(input_api, output_api))
1620 results.extend(_CheckNoDeprecatedCSS(input_api, output_api))
1621 results.extend(_CheckNoDeprecatedJS(input_api, output_api))
1622 results.extend(_CheckParseErrors(input_api, output_api))
1623 results.extend(_CheckForIPCRules(input_api, output_api))
1624 results.extend(_CheckForCopyrightedCode(input_api, output_api))
1625 results.extend(_CheckForWindowsLineEndings(input_api, output_api))
1626 results.extend(_CheckSingletonInHeaders(input_api, output_api))
1628 if any('PRESUBMIT.py' == f.LocalPath() for f in input_api.AffectedFiles()):
1629 results.extend(input_api.canned_checks.RunUnitTestsInDirectory(
1630 input_api, output_api,
1631 input_api.PresubmitLocalPath(),
1632 whitelist=[r'^PRESUBMIT_test\.py$']))
1633 return results
1636 def _CheckAuthorizedAuthor(input_api, output_api):
1637 """For non-googler/chromites committers, verify the author's email address is
1638 in AUTHORS.
1640 # TODO(maruel): Add it to input_api?
1641 import fnmatch
1643 author = input_api.change.author_email
1644 if not author:
1645 input_api.logging.info('No author, skipping AUTHOR check')
1646 return []
1647 authors_path = input_api.os_path.join(
1648 input_api.PresubmitLocalPath(), 'AUTHORS')
1649 valid_authors = (
1650 input_api.re.match(r'[^#]+\s+\<(.+?)\>\s*$', line)
1651 for line in open(authors_path))
1652 valid_authors = [item.group(1).lower() for item in valid_authors if item]
1653 if not any(fnmatch.fnmatch(author.lower(), valid) for valid in valid_authors):
1654 input_api.logging.info('Valid authors are %s', ', '.join(valid_authors))
1655 return [output_api.PresubmitPromptWarning(
1656 ('%s is not in AUTHORS file. If you are a new contributor, please visit'
1657 '\n'
1658 'http://www.chromium.org/developers/contributing-code and read the '
1659 '"Legal" section\n'
1660 'If you are a chromite, verify the contributor signed the CLA.') %
1661 author)]
1662 return []
1665 def _CheckPatchFiles(input_api, output_api):
1666 problems = [f.LocalPath() for f in input_api.AffectedFiles()
1667 if f.LocalPath().endswith(('.orig', '.rej'))]
1668 if problems:
1669 return [output_api.PresubmitError(
1670 "Don't commit .rej and .orig files.", problems)]
1671 else:
1672 return []
1675 def _DidYouMeanOSMacro(bad_macro):
1676 try:
1677 return {'A': 'OS_ANDROID',
1678 'B': 'OS_BSD',
1679 'C': 'OS_CHROMEOS',
1680 'F': 'OS_FREEBSD',
1681 'L': 'OS_LINUX',
1682 'M': 'OS_MACOSX',
1683 'N': 'OS_NACL',
1684 'O': 'OS_OPENBSD',
1685 'P': 'OS_POSIX',
1686 'S': 'OS_SOLARIS',
1687 'W': 'OS_WIN'}[bad_macro[3].upper()]
1688 except KeyError:
1689 return ''
1692 def _CheckForInvalidOSMacrosInFile(input_api, f):
1693 """Check for sensible looking, totally invalid OS macros."""
1694 preprocessor_statement = input_api.re.compile(r'^\s*#')
1695 os_macro = input_api.re.compile(r'defined\((OS_[^)]+)\)')
1696 results = []
1697 for lnum, line in f.ChangedContents():
1698 if preprocessor_statement.search(line):
1699 for match in os_macro.finditer(line):
1700 if not match.group(1) in _VALID_OS_MACROS:
1701 good = _DidYouMeanOSMacro(match.group(1))
1702 did_you_mean = ' (did you mean %s?)' % good if good else ''
1703 results.append(' %s:%d %s%s' % (f.LocalPath(),
1704 lnum,
1705 match.group(1),
1706 did_you_mean))
1707 return results
1710 def _CheckForInvalidOSMacros(input_api, output_api):
1711 """Check all affected files for invalid OS macros."""
1712 bad_macros = []
1713 for f in input_api.AffectedFiles():
1714 if not f.LocalPath().endswith(('.py', '.js', '.html', '.css')):
1715 bad_macros.extend(_CheckForInvalidOSMacrosInFile(input_api, f))
1717 if not bad_macros:
1718 return []
1720 return [output_api.PresubmitError(
1721 'Possibly invalid OS macro[s] found. Please fix your code\n'
1722 'or add your macro to src/PRESUBMIT.py.', bad_macros)]
1725 def _CheckForInvalidIfDefinedMacrosInFile(input_api, f):
1726 """Check all affected files for invalid "if defined" macros."""
1727 ALWAYS_DEFINED_MACROS = (
1728 "TARGET_CPU_PPC",
1729 "TARGET_CPU_PPC64",
1730 "TARGET_CPU_68K",
1731 "TARGET_CPU_X86",
1732 "TARGET_CPU_ARM",
1733 "TARGET_CPU_MIPS",
1734 "TARGET_CPU_SPARC",
1735 "TARGET_CPU_ALPHA",
1736 "TARGET_IPHONE_SIMULATOR",
1737 "TARGET_OS_EMBEDDED",
1738 "TARGET_OS_IPHONE",
1739 "TARGET_OS_MAC",
1740 "TARGET_OS_UNIX",
1741 "TARGET_OS_WIN32",
1743 ifdef_macro = input_api.re.compile(r'^\s*#.*(?:ifdef\s|defined\()([^\s\)]+)')
1744 results = []
1745 for lnum, line in f.ChangedContents():
1746 for match in ifdef_macro.finditer(line):
1747 if match.group(1) in ALWAYS_DEFINED_MACROS:
1748 always_defined = ' %s is always defined. ' % match.group(1)
1749 did_you_mean = 'Did you mean \'#if %s\'?' % match.group(1)
1750 results.append(' %s:%d %s\n\t%s' % (f.LocalPath(),
1751 lnum,
1752 always_defined,
1753 did_you_mean))
1754 return results
1757 def _CheckForInvalidIfDefinedMacros(input_api, output_api):
1758 """Check all affected files for invalid "if defined" macros."""
1759 bad_macros = []
1760 for f in input_api.AffectedFiles():
1761 if f.LocalPath().endswith(('.h', '.c', '.cc', '.m', '.mm')):
1762 bad_macros.extend(_CheckForInvalidIfDefinedMacrosInFile(input_api, f))
1764 if not bad_macros:
1765 return []
1767 return [output_api.PresubmitError(
1768 'Found ifdef check on always-defined macro[s]. Please fix your code\n'
1769 'or check the list of ALWAYS_DEFINED_MACROS in src/PRESUBMIT.py.',
1770 bad_macros)]
1773 def _CheckForUsingSideEffectsOfPass(input_api, output_api):
1774 """Check all affected files for using side effects of Pass."""
1775 errors = []
1776 for f in input_api.AffectedFiles():
1777 if f.LocalPath().endswith(('.h', '.c', '.cc', '.m', '.mm')):
1778 for lnum, line in f.ChangedContents():
1779 # Disallow Foo(*my_scoped_thing.Pass()); See crbug.com/418297.
1780 if input_api.re.search(r'\*[a-zA-Z0-9_]+\.Pass\(\)', line):
1781 errors.append(output_api.PresubmitError(
1782 ('%s:%d uses *foo.Pass() to delete the contents of scoped_ptr. ' +
1783 'See crbug.com/418297.') % (f.LocalPath(), lnum)))
1784 return errors
1787 def _CheckForIPCRules(input_api, output_api):
1788 """Check for same IPC rules described in
1789 http://www.chromium.org/Home/chromium-security/education/security-tips-for-ipc
1791 base_pattern = r'IPC_ENUM_TRAITS\('
1792 inclusion_pattern = input_api.re.compile(r'(%s)' % base_pattern)
1793 comment_pattern = input_api.re.compile(r'//.*(%s)' % base_pattern)
1795 problems = []
1796 for f in input_api.AffectedSourceFiles(None):
1797 local_path = f.LocalPath()
1798 if not local_path.endswith('.h'):
1799 continue
1800 for line_number, line in f.ChangedContents():
1801 if inclusion_pattern.search(line) and not comment_pattern.search(line):
1802 problems.append(
1803 '%s:%d\n %s' % (local_path, line_number, line.strip()))
1805 if problems:
1806 return [output_api.PresubmitPromptWarning(
1807 _IPC_ENUM_TRAITS_DEPRECATED, problems)]
1808 else:
1809 return []
1812 def _CheckForWindowsLineEndings(input_api, output_api):
1813 """Check source code and known ascii text files for Windows style line
1814 endings.
1816 known_text_files = r'.*\.(txt|html|htm|mhtml|py|gyp|gypi|gn|isolate)$'
1818 file_inclusion_pattern = (
1819 known_text_files,
1820 r'.+%s' % _IMPLEMENTATION_EXTENSIONS
1823 filter = lambda f: input_api.FilterSourceFile(
1824 f, white_list=file_inclusion_pattern, black_list=None)
1825 files = [f.LocalPath() for f in
1826 input_api.AffectedSourceFiles(filter)]
1828 problems = []
1830 for file in files:
1831 fp = open(file, 'r')
1832 for line in fp:
1833 if line.endswith('\r\n'):
1834 problems.append(file)
1835 break
1836 fp.close()
1838 if problems:
1839 return [output_api.PresubmitPromptWarning('Are you sure that you want '
1840 'these files to contain Windows style line endings?\n' +
1841 '\n'.join(problems))]
1843 return []
1846 def CheckChangeOnUpload(input_api, output_api):
1847 results = []
1848 results.extend(_CommonChecks(input_api, output_api))
1849 results.extend(_CheckValidHostsInDEPS(input_api, output_api))
1850 results.extend(_CheckJavaStyle(input_api, output_api))
1851 results.extend(
1852 input_api.canned_checks.CheckGNFormatted(input_api, output_api))
1853 results.extend(_CheckUmaHistogramChanges(input_api, output_api))
1854 results.extend(_AndroidSpecificOnUploadChecks(input_api, output_api))
1855 return results
1858 def GetTryServerMasterForBot(bot):
1859 """Returns the Try Server master for the given bot.
1861 It tries to guess the master from the bot name, but may still fail
1862 and return None. There is no longer a default master.
1864 # Potentially ambiguous bot names are listed explicitly.
1865 master_map = {
1866 'chromium_presubmit': 'tryserver.chromium.linux',
1867 'blink_presubmit': 'tryserver.chromium.linux',
1868 'tools_build_presubmit': 'tryserver.chromium.linux',
1870 master = master_map.get(bot)
1871 if not master:
1872 if 'linux' in bot or 'android' in bot or 'presubmit' in bot:
1873 master = 'tryserver.chromium.linux'
1874 elif 'win' in bot:
1875 master = 'tryserver.chromium.win'
1876 elif 'mac' in bot or 'ios' in bot:
1877 master = 'tryserver.chromium.mac'
1878 return master
1881 def GetDefaultTryConfigs(bots):
1882 """Returns a list of ('bot', set(['tests']), filtered by [bots].
1885 builders_and_tests = dict((bot, set(['defaulttests'])) for bot in bots)
1887 # Build up the mapping from tryserver master to bot/test.
1888 out = dict()
1889 for bot, tests in builders_and_tests.iteritems():
1890 out.setdefault(GetTryServerMasterForBot(bot), {})[bot] = tests
1891 return out
1894 def CheckChangeOnCommit(input_api, output_api):
1895 results = []
1896 results.extend(_CommonChecks(input_api, output_api))
1897 # TODO(thestig) temporarily disabled, doesn't work in third_party/
1898 #results.extend(input_api.canned_checks.CheckSvnModifiedDirectories(
1899 # input_api, output_api, sources))
1900 # Make sure the tree is 'open'.
1901 results.extend(input_api.canned_checks.CheckTreeIsOpen(
1902 input_api,
1903 output_api,
1904 json_url='http://chromium-status.appspot.com/current?format=json'))
1906 results.extend(input_api.canned_checks.CheckChangeHasBugField(
1907 input_api, output_api))
1908 results.extend(input_api.canned_checks.CheckChangeHasDescription(
1909 input_api, output_api))
1910 return results
1913 def GetPreferredTryMasters(project, change):
1914 import json
1915 import os.path
1916 import platform
1917 import subprocess
1919 cq_config_path = os.path.join(
1920 change.RepositoryRoot(), 'infra', 'config', 'cq.cfg')
1921 # commit_queue.py below is a script in depot_tools directory, which has a
1922 # 'builders' command to retrieve a list of CQ builders from the CQ config.
1923 is_win = platform.system() == 'Windows'
1924 masters = json.loads(subprocess.check_output(
1925 ['commit_queue', 'builders', cq_config_path], shell=is_win))
1927 try_config = {}
1928 for master in masters:
1929 try_config.setdefault(master, {})
1930 for builder in masters[master]:
1931 # Do not trigger presubmit builders, since they're likely to fail
1932 # (e.g. OWNERS checks before finished code review), and we're
1933 # running local presubmit anyway.
1934 if 'presubmit' not in builder:
1935 try_config[master][builder] = ['defaulttests']
1937 return try_config