Move declarative api files to extensions/browser/api/declarative.
[chromium-blink-merge.git] / PRESUBMIT.py
blobcd8956c4068138f5c35030969b7af81ce2d09209
1 # Copyright (c) 2012 The Chromium Authors. All rights reserved.
2 # Use of this source code is governed by a BSD-style license that can be
3 # found in the LICENSE file.
5 """Top-level presubmit script for Chromium.
7 See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts
8 for more details about the presubmit API built into gcl.
9 """
12 import re
13 import sys
16 _EXCLUDED_PATHS = (
17 r"^breakpad[\\\/].*",
18 r"^native_client_sdk[\\\/]src[\\\/]build_tools[\\\/]make_rules.py",
19 r"^native_client_sdk[\\\/]src[\\\/]build_tools[\\\/]make_simple.py",
20 r"^native_client_sdk[\\\/]src[\\\/]tools[\\\/].*.mk",
21 r"^net[\\\/]tools[\\\/]spdyshark[\\\/].*",
22 r"^skia[\\\/].*",
23 r"^v8[\\\/].*",
24 r".*MakeFile$",
25 r".+_autogen\.h$",
26 r".+[\\\/]pnacl_shim\.c$",
27 r"^gpu[\\\/]config[\\\/].*_list_json\.cc$",
28 r"^chrome[\\\/]browser[\\\/]resources[\\\/]pdf[\\\/]index.js"
31 # TestRunner and NetscapePlugIn library is temporarily excluded from pan-project
32 # checks until it's transitioned to chromium coding style.
33 _TESTRUNNER_PATHS = (
34 r"^content[\\\/]shell[\\\/]renderer[\\\/]test_runner[\\\/].*",
35 r"^content[\\\/]shell[\\\/]tools[\\\/]plugin[\\\/].*",
38 # Fragment of a regular expression that matches C++ and Objective-C++
39 # implementation files.
40 _IMPLEMENTATION_EXTENSIONS = r'\.(cc|cpp|cxx|mm)$'
42 # Regular expression that matches code only used for test binaries
43 # (best effort).
44 _TEST_CODE_EXCLUDED_PATHS = (
45 r'.*[\\\/](fake_|test_|mock_).+%s' % _IMPLEMENTATION_EXTENSIONS,
46 r'.+_test_(base|support|util)%s' % _IMPLEMENTATION_EXTENSIONS,
47 r'.+_(api|browser|kif|perf|pixel|unit|ui)?test(_[a-z]+)?%s' %
48 _IMPLEMENTATION_EXTENSIONS,
49 r'.+profile_sync_service_harness%s' % _IMPLEMENTATION_EXTENSIONS,
50 r'.*[\\\/](test|tool(s)?)[\\\/].*',
51 # content_shell is used for running layout tests.
52 r'content[\\\/]shell[\\\/].*',
53 # At request of folks maintaining this folder.
54 r'chrome[\\\/]browser[\\\/]automation[\\\/].*',
55 # Non-production example code.
56 r'mojo[\\\/]examples[\\\/].*',
57 # Launcher for running iOS tests on the simulator.
58 r'testing[\\\/]iossim[\\\/]iossim\.mm$',
61 _TEST_ONLY_WARNING = (
62 'You might be calling functions intended only for testing from\n'
63 'production code. It is OK to ignore this warning if you know what\n'
64 'you are doing, as the heuristics used to detect the situation are\n'
65 'not perfect. The commit queue will not block on this warning.')
68 _INCLUDE_ORDER_WARNING = (
69 'Your #include order seems to be broken. Send mail to\n'
70 'marja@chromium.org if this is not the case.')
73 _BANNED_OBJC_FUNCTIONS = (
75 'addTrackingRect:',
77 'The use of -[NSView addTrackingRect:owner:userData:assumeInside:] is'
78 'prohibited. Please use CrTrackingArea instead.',
79 'http://dev.chromium.org/developers/coding-style/cocoa-dos-and-donts',
81 False,
84 r'/NSTrackingArea\W',
86 'The use of NSTrackingAreas is prohibited. Please use CrTrackingArea',
87 'instead.',
88 'http://dev.chromium.org/developers/coding-style/cocoa-dos-and-donts',
90 False,
93 'convertPointFromBase:',
95 'The use of -[NSView convertPointFromBase:] is almost certainly wrong.',
96 'Please use |convertPoint:(point) fromView:nil| instead.',
97 'http://dev.chromium.org/developers/coding-style/cocoa-dos-and-donts',
99 True,
102 'convertPointToBase:',
104 'The use of -[NSView convertPointToBase:] is almost certainly wrong.',
105 'Please use |convertPoint:(point) toView:nil| instead.',
106 'http://dev.chromium.org/developers/coding-style/cocoa-dos-and-donts',
108 True,
111 'convertRectFromBase:',
113 'The use of -[NSView convertRectFromBase:] is almost certainly wrong.',
114 'Please use |convertRect:(point) fromView:nil| instead.',
115 'http://dev.chromium.org/developers/coding-style/cocoa-dos-and-donts',
117 True,
120 'convertRectToBase:',
122 'The use of -[NSView convertRectToBase:] is almost certainly wrong.',
123 'Please use |convertRect:(point) toView:nil| instead.',
124 'http://dev.chromium.org/developers/coding-style/cocoa-dos-and-donts',
126 True,
129 'convertSizeFromBase:',
131 'The use of -[NSView convertSizeFromBase:] is almost certainly wrong.',
132 'Please use |convertSize:(point) fromView:nil| instead.',
133 'http://dev.chromium.org/developers/coding-style/cocoa-dos-and-donts',
135 True,
138 'convertSizeToBase:',
140 'The use of -[NSView convertSizeToBase:] is almost certainly wrong.',
141 'Please use |convertSize:(point) toView:nil| instead.',
142 'http://dev.chromium.org/developers/coding-style/cocoa-dos-and-donts',
144 True,
149 _BANNED_CPP_FUNCTIONS = (
150 # Make sure that gtest's FRIEND_TEST() macro is not used; the
151 # FRIEND_TEST_ALL_PREFIXES() macro from base/gtest_prod_util.h should be
152 # used instead since that allows for FLAKY_ and DISABLED_ prefixes.
154 'FRIEND_TEST(',
156 'Chromium code should not use gtest\'s FRIEND_TEST() macro. Include',
157 'base/gtest_prod_util.h and use FRIEND_TEST_ALL_PREFIXES() instead.',
159 False,
163 'ScopedAllowIO',
165 'New code should not use ScopedAllowIO. Post a task to the blocking',
166 'pool or the FILE thread instead.',
168 True,
170 r"^base[\\\/]process[\\\/]process_metrics_linux\.cc$",
171 r"^chrome[\\\/]browser[\\\/]chromeos[\\\/]boot_times_loader\.cc$",
172 r"^components[\\\/]crash[\\\/]app[\\\/]breakpad_mac\.mm$",
173 r"^content[\\\/]shell[\\\/]browser[\\\/]shell_browser_main\.cc$",
174 r"^content[\\\/]shell[\\\/]browser[\\\/]shell_message_filter\.cc$",
175 r"^mojo[\\\/]system[\\\/]raw_shared_buffer_posix\.cc$",
176 r"^net[\\\/]disk_cache[\\\/]cache_util\.cc$",
177 r"^net[\\\/]url_request[\\\/]test_url_fetcher_factory\.cc$",
181 'SkRefPtr',
183 'The use of SkRefPtr is prohibited. ',
184 'Please use skia::RefPtr instead.'
186 True,
190 'SkAutoRef',
192 'The indirect use of SkRefPtr via SkAutoRef is prohibited. ',
193 'Please use skia::RefPtr instead.'
195 True,
199 'SkAutoTUnref',
201 'The use of SkAutoTUnref is dangerous because it implicitly ',
202 'converts to a raw pointer. Please use skia::RefPtr instead.'
204 True,
208 'SkAutoUnref',
210 'The indirect use of SkAutoTUnref through SkAutoUnref is dangerous ',
211 'because it implicitly converts to a raw pointer. ',
212 'Please use skia::RefPtr instead.'
214 True,
218 r'/HANDLE_EINTR\(.*close',
220 'HANDLE_EINTR(close) is invalid. If close fails with EINTR, the file',
221 'descriptor will be closed, and it is incorrect to retry the close.',
222 'Either call close directly and ignore its return value, or wrap close',
223 'in IGNORE_EINTR to use its return value. See http://crbug.com/269623'
225 True,
229 r'/IGNORE_EINTR\((?!.*close)',
231 'IGNORE_EINTR is only valid when wrapping close. To wrap other system',
232 'calls, use HANDLE_EINTR. See http://crbug.com/269623',
234 True,
236 # Files that #define IGNORE_EINTR.
237 r'^base[\\\/]posix[\\\/]eintr_wrapper\.h$',
238 r'^ppapi[\\\/]tests[\\\/]test_broker\.cc$',
242 r'/v8::Extension\(',
244 'Do not introduce new v8::Extensions into the code base, use',
245 'gin::Wrappable instead. See http://crbug.com/334679',
247 True,
249 r'extensions[\\\/]renderer[\\\/]safe_builtins\.*',
255 _VALID_OS_MACROS = (
256 # Please keep sorted.
257 'OS_ANDROID',
258 'OS_ANDROID_HOST',
259 'OS_BSD',
260 'OS_CAT', # For testing.
261 'OS_CHROMEOS',
262 'OS_FREEBSD',
263 'OS_IOS',
264 'OS_LINUX',
265 'OS_MACOSX',
266 'OS_NACL',
267 'OS_OPENBSD',
268 'OS_POSIX',
269 'OS_QNX',
270 'OS_SOLARIS',
271 'OS_WIN',
275 def _CheckNoProductionCodeUsingTestOnlyFunctions(input_api, output_api):
276 """Attempts to prevent use of functions intended only for testing in
277 non-testing code. For now this is just a best-effort implementation
278 that ignores header files and may have some false positives. A
279 better implementation would probably need a proper C++ parser.
281 # We only scan .cc files and the like, as the declaration of
282 # for-testing functions in header files are hard to distinguish from
283 # calls to such functions without a proper C++ parser.
284 file_inclusion_pattern = r'.+%s' % _IMPLEMENTATION_EXTENSIONS
286 base_function_pattern = r'[ :]test::[^\s]+|ForTest(ing)?|for_test(ing)?'
287 inclusion_pattern = input_api.re.compile(r'(%s)\s*\(' % base_function_pattern)
288 comment_pattern = input_api.re.compile(r'//.*(%s)' % base_function_pattern)
289 exclusion_pattern = input_api.re.compile(
290 r'::[A-Za-z0-9_]+(%s)|(%s)[^;]+\{' % (
291 base_function_pattern, base_function_pattern))
293 def FilterFile(affected_file):
294 black_list = (_EXCLUDED_PATHS +
295 _TEST_CODE_EXCLUDED_PATHS +
296 input_api.DEFAULT_BLACK_LIST)
297 return input_api.FilterSourceFile(
298 affected_file,
299 white_list=(file_inclusion_pattern, ),
300 black_list=black_list)
302 problems = []
303 for f in input_api.AffectedSourceFiles(FilterFile):
304 local_path = f.LocalPath()
305 for line_number, line in f.ChangedContents():
306 if (inclusion_pattern.search(line) and
307 not comment_pattern.search(line) and
308 not exclusion_pattern.search(line)):
309 problems.append(
310 '%s:%d\n %s' % (local_path, line_number, line.strip()))
312 if problems:
313 return [output_api.PresubmitPromptOrNotify(_TEST_ONLY_WARNING, problems)]
314 else:
315 return []
318 def _CheckNoIOStreamInHeaders(input_api, output_api):
319 """Checks to make sure no .h files include <iostream>."""
320 files = []
321 pattern = input_api.re.compile(r'^#include\s*<iostream>',
322 input_api.re.MULTILINE)
323 for f in input_api.AffectedSourceFiles(input_api.FilterSourceFile):
324 if not f.LocalPath().endswith('.h'):
325 continue
326 contents = input_api.ReadFile(f)
327 if pattern.search(contents):
328 files.append(f)
330 if len(files):
331 return [ output_api.PresubmitError(
332 'Do not #include <iostream> in header files, since it inserts static '
333 'initialization into every file including the header. Instead, '
334 '#include <ostream>. See http://crbug.com/94794',
335 files) ]
336 return []
339 def _CheckNoUNIT_TESTInSourceFiles(input_api, output_api):
340 """Checks to make sure no source files use UNIT_TEST"""
341 problems = []
342 for f in input_api.AffectedFiles():
343 if (not f.LocalPath().endswith(('.cc', '.mm'))):
344 continue
346 for line_num, line in f.ChangedContents():
347 if 'UNIT_TEST ' in line or line.endswith('UNIT_TEST'):
348 problems.append(' %s:%d' % (f.LocalPath(), line_num))
350 if not problems:
351 return []
352 return [output_api.PresubmitPromptWarning('UNIT_TEST is only for headers.\n' +
353 '\n'.join(problems))]
356 def _CheckNoNewWStrings(input_api, output_api):
357 """Checks to make sure we don't introduce use of wstrings."""
358 problems = []
359 for f in input_api.AffectedFiles():
360 if (not f.LocalPath().endswith(('.cc', '.h')) or
361 f.LocalPath().endswith(('test.cc', '_win.cc', '_win.h'))):
362 continue
364 allowWString = False
365 for line_num, line in f.ChangedContents():
366 if 'presubmit: allow wstring' in line:
367 allowWString = True
368 elif not allowWString and 'wstring' in line:
369 problems.append(' %s:%d' % (f.LocalPath(), line_num))
370 allowWString = False
371 else:
372 allowWString = False
374 if not problems:
375 return []
376 return [output_api.PresubmitPromptWarning('New code should not use wstrings.'
377 ' If you are calling a cross-platform API that accepts a wstring, '
378 'fix the API.\n' +
379 '\n'.join(problems))]
382 def _CheckNoDEPSGIT(input_api, output_api):
383 """Make sure .DEPS.git is never modified manually."""
384 if any(f.LocalPath().endswith('.DEPS.git') for f in
385 input_api.AffectedFiles()):
386 return [output_api.PresubmitError(
387 'Never commit changes to .DEPS.git. This file is maintained by an\n'
388 'automated system based on what\'s in DEPS and your changes will be\n'
389 'overwritten.\n'
390 'See https://sites.google.com/a/chromium.org/dev/developers/how-tos/get-the-code#Rolling_DEPS\n'
391 'for more information')]
392 return []
395 def _CheckNoBannedFunctions(input_api, output_api):
396 """Make sure that banned functions are not used."""
397 warnings = []
398 errors = []
400 file_filter = lambda f: f.LocalPath().endswith(('.mm', '.m', '.h'))
401 for f in input_api.AffectedFiles(file_filter=file_filter):
402 for line_num, line in f.ChangedContents():
403 for func_name, message, error in _BANNED_OBJC_FUNCTIONS:
404 matched = False
405 if func_name[0:1] == '/':
406 regex = func_name[1:]
407 if input_api.re.search(regex, line):
408 matched = True
409 elif func_name in line:
410 matched = True
411 if matched:
412 problems = warnings;
413 if error:
414 problems = errors;
415 problems.append(' %s:%d:' % (f.LocalPath(), line_num))
416 for message_line in message:
417 problems.append(' %s' % message_line)
419 file_filter = lambda f: f.LocalPath().endswith(('.cc', '.mm', '.h'))
420 for f in input_api.AffectedFiles(file_filter=file_filter):
421 for line_num, line in f.ChangedContents():
422 for func_name, message, error, excluded_paths in _BANNED_CPP_FUNCTIONS:
423 def IsBlacklisted(affected_file, blacklist):
424 local_path = affected_file.LocalPath()
425 for item in blacklist:
426 if input_api.re.match(item, local_path):
427 return True
428 return False
429 if IsBlacklisted(f, excluded_paths):
430 continue
431 matched = False
432 if func_name[0:1] == '/':
433 regex = func_name[1:]
434 if input_api.re.search(regex, line):
435 matched = True
436 elif func_name in line:
437 matched = True
438 if matched:
439 problems = warnings;
440 if error:
441 problems = errors;
442 problems.append(' %s:%d:' % (f.LocalPath(), line_num))
443 for message_line in message:
444 problems.append(' %s' % message_line)
446 result = []
447 if (warnings):
448 result.append(output_api.PresubmitPromptWarning(
449 'Banned functions were used.\n' + '\n'.join(warnings)))
450 if (errors):
451 result.append(output_api.PresubmitError(
452 'Banned functions were used.\n' + '\n'.join(errors)))
453 return result
456 def _CheckNoPragmaOnce(input_api, output_api):
457 """Make sure that banned functions are not used."""
458 files = []
459 pattern = input_api.re.compile(r'^#pragma\s+once',
460 input_api.re.MULTILINE)
461 for f in input_api.AffectedSourceFiles(input_api.FilterSourceFile):
462 if not f.LocalPath().endswith('.h'):
463 continue
464 contents = input_api.ReadFile(f)
465 if pattern.search(contents):
466 files.append(f)
468 if files:
469 return [output_api.PresubmitError(
470 'Do not use #pragma once in header files.\n'
471 'See http://www.chromium.org/developers/coding-style#TOC-File-headers',
472 files)]
473 return []
476 def _CheckNoTrinaryTrueFalse(input_api, output_api):
477 """Checks to make sure we don't introduce use of foo ? true : false."""
478 problems = []
479 pattern = input_api.re.compile(r'\?\s*(true|false)\s*:\s*(true|false)')
480 for f in input_api.AffectedFiles():
481 if not f.LocalPath().endswith(('.cc', '.h', '.inl', '.m', '.mm')):
482 continue
484 for line_num, line in f.ChangedContents():
485 if pattern.match(line):
486 problems.append(' %s:%d' % (f.LocalPath(), line_num))
488 if not problems:
489 return []
490 return [output_api.PresubmitPromptWarning(
491 'Please consider avoiding the "? true : false" pattern if possible.\n' +
492 '\n'.join(problems))]
495 def _CheckUnwantedDependencies(input_api, output_api):
496 """Runs checkdeps on #include statements added in this
497 change. Breaking - rules is an error, breaking ! rules is a
498 warning.
500 # We need to wait until we have an input_api object and use this
501 # roundabout construct to import checkdeps because this file is
502 # eval-ed and thus doesn't have __file__.
503 original_sys_path = sys.path
504 try:
505 sys.path = sys.path + [input_api.os_path.join(
506 input_api.PresubmitLocalPath(), 'buildtools', 'checkdeps')]
507 import checkdeps
508 from cpp_checker import CppChecker
509 from rules import Rule
510 finally:
511 # Restore sys.path to what it was before.
512 sys.path = original_sys_path
514 added_includes = []
515 for f in input_api.AffectedFiles():
516 if not CppChecker.IsCppFile(f.LocalPath()):
517 continue
519 changed_lines = [line for line_num, line in f.ChangedContents()]
520 added_includes.append([f.LocalPath(), changed_lines])
522 deps_checker = checkdeps.DepsChecker(input_api.PresubmitLocalPath())
524 error_descriptions = []
525 warning_descriptions = []
526 for path, rule_type, rule_description in deps_checker.CheckAddedCppIncludes(
527 added_includes):
528 description_with_path = '%s\n %s' % (path, rule_description)
529 if rule_type == Rule.DISALLOW:
530 error_descriptions.append(description_with_path)
531 else:
532 warning_descriptions.append(description_with_path)
534 results = []
535 if error_descriptions:
536 results.append(output_api.PresubmitError(
537 'You added one or more #includes that violate checkdeps rules.',
538 error_descriptions))
539 if warning_descriptions:
540 results.append(output_api.PresubmitPromptOrNotify(
541 'You added one or more #includes of files that are temporarily\n'
542 'allowed but being removed. Can you avoid introducing the\n'
543 '#include? See relevant DEPS file(s) for details and contacts.',
544 warning_descriptions))
545 return results
548 def _CheckFilePermissions(input_api, output_api):
549 """Check that all files have their permissions properly set."""
550 if input_api.platform == 'win32':
551 return []
552 args = [sys.executable, 'tools/checkperms/checkperms.py', '--root',
553 input_api.change.RepositoryRoot()]
554 for f in input_api.AffectedFiles():
555 args += ['--file', f.LocalPath()]
556 checkperms = input_api.subprocess.Popen(args,
557 stdout=input_api.subprocess.PIPE)
558 errors = checkperms.communicate()[0].strip()
559 if errors:
560 return [output_api.PresubmitError('checkperms.py failed.',
561 errors.splitlines())]
562 return []
565 def _CheckNoAuraWindowPropertyHInHeaders(input_api, output_api):
566 """Makes sure we don't include ui/aura/window_property.h
567 in header files.
569 pattern = input_api.re.compile(r'^#include\s*"ui/aura/window_property.h"')
570 errors = []
571 for f in input_api.AffectedFiles():
572 if not f.LocalPath().endswith('.h'):
573 continue
574 for line_num, line in f.ChangedContents():
575 if pattern.match(line):
576 errors.append(' %s:%d' % (f.LocalPath(), line_num))
578 results = []
579 if errors:
580 results.append(output_api.PresubmitError(
581 'Header files should not include ui/aura/window_property.h', errors))
582 return results
585 def _CheckIncludeOrderForScope(scope, input_api, file_path, changed_linenums):
586 """Checks that the lines in scope occur in the right order.
588 1. C system files in alphabetical order
589 2. C++ system files in alphabetical order
590 3. Project's .h files
593 c_system_include_pattern = input_api.re.compile(r'\s*#include <.*\.h>')
594 cpp_system_include_pattern = input_api.re.compile(r'\s*#include <.*>')
595 custom_include_pattern = input_api.re.compile(r'\s*#include ".*')
597 C_SYSTEM_INCLUDES, CPP_SYSTEM_INCLUDES, CUSTOM_INCLUDES = range(3)
599 state = C_SYSTEM_INCLUDES
601 previous_line = ''
602 previous_line_num = 0
603 problem_linenums = []
604 for line_num, line in scope:
605 if c_system_include_pattern.match(line):
606 if state != C_SYSTEM_INCLUDES:
607 problem_linenums.append((line_num, previous_line_num))
608 elif previous_line and previous_line > line:
609 problem_linenums.append((line_num, previous_line_num))
610 elif cpp_system_include_pattern.match(line):
611 if state == C_SYSTEM_INCLUDES:
612 state = CPP_SYSTEM_INCLUDES
613 elif state == CUSTOM_INCLUDES:
614 problem_linenums.append((line_num, previous_line_num))
615 elif previous_line and previous_line > line:
616 problem_linenums.append((line_num, previous_line_num))
617 elif custom_include_pattern.match(line):
618 if state != CUSTOM_INCLUDES:
619 state = CUSTOM_INCLUDES
620 elif previous_line and previous_line > line:
621 problem_linenums.append((line_num, previous_line_num))
622 else:
623 problem_linenums.append(line_num)
624 previous_line = line
625 previous_line_num = line_num
627 warnings = []
628 for (line_num, previous_line_num) in problem_linenums:
629 if line_num in changed_linenums or previous_line_num in changed_linenums:
630 warnings.append(' %s:%d' % (file_path, line_num))
631 return warnings
634 def _CheckIncludeOrderInFile(input_api, f, changed_linenums):
635 """Checks the #include order for the given file f."""
637 system_include_pattern = input_api.re.compile(r'\s*#include \<.*')
638 # Exclude the following includes from the check:
639 # 1) #include <.../...>, e.g., <sys/...> includes often need to appear in a
640 # specific order.
641 # 2) <atlbase.h>, "build/build_config.h"
642 excluded_include_pattern = input_api.re.compile(
643 r'\s*#include (\<.*/.*|\<atlbase\.h\>|"build/build_config.h")')
644 custom_include_pattern = input_api.re.compile(r'\s*#include "(?P<FILE>.*)"')
645 # Match the final or penultimate token if it is xxxtest so we can ignore it
646 # when considering the special first include.
647 test_file_tag_pattern = input_api.re.compile(
648 r'_[a-z]+test(?=(_[a-zA-Z0-9]+)?\.)')
649 if_pattern = input_api.re.compile(
650 r'\s*#\s*(if|elif|else|endif|define|undef).*')
651 # Some files need specialized order of includes; exclude such files from this
652 # check.
653 uncheckable_includes_pattern = input_api.re.compile(
654 r'\s*#include '
655 '("ipc/.*macros\.h"|<windows\.h>|".*gl.*autogen.h")\s*')
657 contents = f.NewContents()
658 warnings = []
659 line_num = 0
661 # Handle the special first include. If the first include file is
662 # some/path/file.h, the corresponding including file can be some/path/file.cc,
663 # some/other/path/file.cc, some/path/file_platform.cc, some/path/file-suffix.h
664 # etc. It's also possible that no special first include exists.
665 # If the included file is some/path/file_platform.h the including file could
666 # also be some/path/file_xxxtest_platform.h.
667 including_file_base_name = test_file_tag_pattern.sub(
668 '', input_api.os_path.basename(f.LocalPath()))
670 for line in contents:
671 line_num += 1
672 if system_include_pattern.match(line):
673 # No special first include -> process the line again along with normal
674 # includes.
675 line_num -= 1
676 break
677 match = custom_include_pattern.match(line)
678 if match:
679 match_dict = match.groupdict()
680 header_basename = test_file_tag_pattern.sub(
681 '', input_api.os_path.basename(match_dict['FILE'])).replace('.h', '')
683 if header_basename not in including_file_base_name:
684 # No special first include -> process the line again along with normal
685 # includes.
686 line_num -= 1
687 break
689 # Split into scopes: Each region between #if and #endif is its own scope.
690 scopes = []
691 current_scope = []
692 for line in contents[line_num:]:
693 line_num += 1
694 if uncheckable_includes_pattern.match(line):
695 continue
696 if if_pattern.match(line):
697 scopes.append(current_scope)
698 current_scope = []
699 elif ((system_include_pattern.match(line) or
700 custom_include_pattern.match(line)) and
701 not excluded_include_pattern.match(line)):
702 current_scope.append((line_num, line))
703 scopes.append(current_scope)
705 for scope in scopes:
706 warnings.extend(_CheckIncludeOrderForScope(scope, input_api, f.LocalPath(),
707 changed_linenums))
708 return warnings
711 def _CheckIncludeOrder(input_api, output_api):
712 """Checks that the #include order is correct.
714 1. The corresponding header for source files.
715 2. C system files in alphabetical order
716 3. C++ system files in alphabetical order
717 4. Project's .h files in alphabetical order
719 Each region separated by #if, #elif, #else, #endif, #define and #undef follows
720 these rules separately.
722 def FileFilterIncludeOrder(affected_file):
723 black_list = (_EXCLUDED_PATHS + input_api.DEFAULT_BLACK_LIST)
724 return input_api.FilterSourceFile(affected_file, black_list=black_list)
726 warnings = []
727 for f in input_api.AffectedFiles(file_filter=FileFilterIncludeOrder):
728 if f.LocalPath().endswith(('.cc', '.h')):
729 changed_linenums = set(line_num for line_num, _ in f.ChangedContents())
730 warnings.extend(_CheckIncludeOrderInFile(input_api, f, changed_linenums))
732 results = []
733 if warnings:
734 results.append(output_api.PresubmitPromptOrNotify(_INCLUDE_ORDER_WARNING,
735 warnings))
736 return results
739 def _CheckForVersionControlConflictsInFile(input_api, f):
740 pattern = input_api.re.compile('^(?:<<<<<<<|>>>>>>>) |^=======$')
741 errors = []
742 for line_num, line in f.ChangedContents():
743 if pattern.match(line):
744 errors.append(' %s:%d %s' % (f.LocalPath(), line_num, line))
745 return errors
748 def _CheckForVersionControlConflicts(input_api, output_api):
749 """Usually this is not intentional and will cause a compile failure."""
750 errors = []
751 for f in input_api.AffectedFiles():
752 errors.extend(_CheckForVersionControlConflictsInFile(input_api, f))
754 results = []
755 if errors:
756 results.append(output_api.PresubmitError(
757 'Version control conflict markers found, please resolve.', errors))
758 return results
761 def _CheckHardcodedGoogleHostsInLowerLayers(input_api, output_api):
762 def FilterFile(affected_file):
763 """Filter function for use with input_api.AffectedSourceFiles,
764 below. This filters out everything except non-test files from
765 top-level directories that generally speaking should not hard-code
766 service URLs (e.g. src/android_webview/, src/content/ and others).
768 return input_api.FilterSourceFile(
769 affected_file,
770 white_list=(r'^(android_webview|base|content|net)[\\\/].*', ),
771 black_list=(_EXCLUDED_PATHS +
772 _TEST_CODE_EXCLUDED_PATHS +
773 input_api.DEFAULT_BLACK_LIST))
775 base_pattern = '"[^"]*google\.com[^"]*"'
776 comment_pattern = input_api.re.compile('//.*%s' % base_pattern)
777 pattern = input_api.re.compile(base_pattern)
778 problems = [] # items are (filename, line_number, line)
779 for f in input_api.AffectedSourceFiles(FilterFile):
780 for line_num, line in f.ChangedContents():
781 if not comment_pattern.search(line) and pattern.search(line):
782 problems.append((f.LocalPath(), line_num, line))
784 if problems:
785 return [output_api.PresubmitPromptOrNotify(
786 'Most layers below src/chrome/ should not hardcode service URLs.\n'
787 'Are you sure this is correct?',
788 [' %s:%d: %s' % (
789 problem[0], problem[1], problem[2]) for problem in problems])]
790 else:
791 return []
794 def _CheckNoAbbreviationInPngFileName(input_api, output_api):
795 """Makes sure there are no abbreviations in the name of PNG files.
797 pattern = input_api.re.compile(r'.*_[a-z]_.*\.png$|.*_[a-z]\.png$')
798 errors = []
799 for f in input_api.AffectedFiles(include_deletes=False):
800 if pattern.match(f.LocalPath()):
801 errors.append(' %s' % f.LocalPath())
803 results = []
804 if errors:
805 results.append(output_api.PresubmitError(
806 'The name of PNG files should not have abbreviations. \n'
807 'Use _hover.png, _center.png, instead of _h.png, _c.png.\n'
808 'Contact oshima@chromium.org if you have questions.', errors))
809 return results
812 def _FilesToCheckForIncomingDeps(re, changed_lines):
813 """Helper method for _CheckAddedDepsHaveTargetApprovals. Returns
814 a set of DEPS entries that we should look up.
816 For a directory (rather than a specific filename) we fake a path to
817 a specific filename by adding /DEPS. This is chosen as a file that
818 will seldom or never be subject to per-file include_rules.
820 # We ignore deps entries on auto-generated directories.
821 AUTO_GENERATED_DIRS = ['grit', 'jni']
823 # This pattern grabs the path without basename in the first
824 # parentheses, and the basename (if present) in the second. It
825 # relies on the simple heuristic that if there is a basename it will
826 # be a header file ending in ".h".
827 pattern = re.compile(
828 r"""['"]\+([^'"]+?)(/[a-zA-Z0-9_]+\.h)?['"].*""")
829 results = set()
830 for changed_line in changed_lines:
831 m = pattern.match(changed_line)
832 if m:
833 path = m.group(1)
834 if path.split('/')[0] not in AUTO_GENERATED_DIRS:
835 if m.group(2):
836 results.add('%s%s' % (path, m.group(2)))
837 else:
838 results.add('%s/DEPS' % path)
839 return results
842 def _CheckAddedDepsHaveTargetApprovals(input_api, output_api):
843 """When a dependency prefixed with + is added to a DEPS file, we
844 want to make sure that the change is reviewed by an OWNER of the
845 target file or directory, to avoid layering violations from being
846 introduced. This check verifies that this happens.
848 changed_lines = set()
849 for f in input_api.AffectedFiles():
850 filename = input_api.os_path.basename(f.LocalPath())
851 if filename == 'DEPS':
852 changed_lines |= set(line.strip()
853 for line_num, line
854 in f.ChangedContents())
855 if not changed_lines:
856 return []
858 virtual_depended_on_files = _FilesToCheckForIncomingDeps(input_api.re,
859 changed_lines)
860 if not virtual_depended_on_files:
861 return []
863 if input_api.is_committing:
864 if input_api.tbr:
865 return [output_api.PresubmitNotifyResult(
866 '--tbr was specified, skipping OWNERS check for DEPS additions')]
867 if not input_api.change.issue:
868 return [output_api.PresubmitError(
869 "DEPS approval by OWNERS check failed: this change has "
870 "no Rietveld issue number, so we can't check it for approvals.")]
871 output = output_api.PresubmitError
872 else:
873 output = output_api.PresubmitNotifyResult
875 owners_db = input_api.owners_db
876 owner_email, reviewers = input_api.canned_checks._RietveldOwnerAndReviewers(
877 input_api,
878 owners_db.email_regexp,
879 approval_needed=input_api.is_committing)
881 owner_email = owner_email or input_api.change.author_email
883 reviewers_plus_owner = set(reviewers)
884 if owner_email:
885 reviewers_plus_owner.add(owner_email)
886 missing_files = owners_db.files_not_covered_by(virtual_depended_on_files,
887 reviewers_plus_owner)
889 # We strip the /DEPS part that was added by
890 # _FilesToCheckForIncomingDeps to fake a path to a file in a
891 # directory.
892 def StripDeps(path):
893 start_deps = path.rfind('/DEPS')
894 if start_deps != -1:
895 return path[:start_deps]
896 else:
897 return path
898 unapproved_dependencies = ["'+%s'," % StripDeps(path)
899 for path in missing_files]
901 if unapproved_dependencies:
902 output_list = [
903 output('Missing LGTM from OWNERS of dependencies added to DEPS:\n %s' %
904 '\n '.join(sorted(unapproved_dependencies)))]
905 if not input_api.is_committing:
906 suggested_owners = owners_db.reviewers_for(missing_files, owner_email)
907 output_list.append(output(
908 'Suggested missing target path OWNERS:\n %s' %
909 '\n '.join(suggested_owners or [])))
910 return output_list
912 return []
915 def _CheckSpamLogging(input_api, output_api):
916 file_inclusion_pattern = r'.+%s' % _IMPLEMENTATION_EXTENSIONS
917 black_list = (_EXCLUDED_PATHS +
918 _TEST_CODE_EXCLUDED_PATHS +
919 input_api.DEFAULT_BLACK_LIST +
920 (r"^base[\\\/]logging\.h$",
921 r"^base[\\\/]logging\.cc$",
922 r"^chrome[\\\/]app[\\\/]chrome_main_delegate\.cc$",
923 r"^chrome[\\\/]browser[\\\/]chrome_browser_main\.cc$",
924 r"^chrome[\\\/]browser[\\\/]ui[\\\/]startup[\\\/]"
925 r"startup_browser_creator\.cc$",
926 r"^chrome[\\\/]installer[\\\/]setup[\\\/].*",
927 r"chrome[\\\/]browser[\\\/]diagnostics[\\\/]" +
928 r"diagnostics_writer\.cc$",
929 r"^chrome_elf[\\\/]dll_hash[\\\/]dll_hash_main\.cc$",
930 r"^chromecast[\\\/]",
931 r"^cloud_print[\\\/]",
932 r"^content[\\\/]common[\\\/]gpu[\\\/]client[\\\/]"
933 r"gl_helper_benchmark\.cc$",
934 r"^extensions[\\\/]renderer[\\\/]logging_native_handler\.cc$",
935 r"^native_client_sdk[\\\/]",
936 r"^remoting[\\\/]base[\\\/]logging\.h$",
937 r"^remoting[\\\/]host[\\\/].*",
938 r"^sandbox[\\\/]linux[\\\/].*",
939 r"^tools[\\\/]",
940 r"^ui[\\\/]aura[\\\/]bench[\\\/]bench_main\.cc$",
941 r"^webkit[\\\/]browser[\\\/]fileapi[\\\/]" +
942 r"dump_file_system.cc$",))
943 source_file_filter = lambda x: input_api.FilterSourceFile(
944 x, white_list=(file_inclusion_pattern,), black_list=black_list)
946 log_info = []
947 printf = []
949 for f in input_api.AffectedSourceFiles(source_file_filter):
950 contents = input_api.ReadFile(f, 'rb')
951 if re.search(r"\bD?LOG\s*\(\s*INFO\s*\)", contents):
952 log_info.append(f.LocalPath())
953 elif re.search(r"\bD?LOG_IF\s*\(\s*INFO\s*,", contents):
954 log_info.append(f.LocalPath())
956 if re.search(r"\bprintf\(", contents):
957 printf.append(f.LocalPath())
958 elif re.search(r"\bfprintf\((stdout|stderr)", contents):
959 printf.append(f.LocalPath())
961 if log_info:
962 return [output_api.PresubmitError(
963 'These files spam the console log with LOG(INFO):',
964 items=log_info)]
965 if printf:
966 return [output_api.PresubmitError(
967 'These files spam the console log with printf/fprintf:',
968 items=printf)]
969 return []
972 def _CheckForAnonymousVariables(input_api, output_api):
973 """These types are all expected to hold locks while in scope and
974 so should never be anonymous (which causes them to be immediately
975 destroyed)."""
976 they_who_must_be_named = [
977 'base::AutoLock',
978 'base::AutoReset',
979 'base::AutoUnlock',
980 'SkAutoAlphaRestore',
981 'SkAutoBitmapShaderInstall',
982 'SkAutoBlitterChoose',
983 'SkAutoBounderCommit',
984 'SkAutoCallProc',
985 'SkAutoCanvasRestore',
986 'SkAutoCommentBlock',
987 'SkAutoDescriptor',
988 'SkAutoDisableDirectionCheck',
989 'SkAutoDisableOvalCheck',
990 'SkAutoFree',
991 'SkAutoGlyphCache',
992 'SkAutoHDC',
993 'SkAutoLockColors',
994 'SkAutoLockPixels',
995 'SkAutoMalloc',
996 'SkAutoMaskFreeImage',
997 'SkAutoMutexAcquire',
998 'SkAutoPathBoundsUpdate',
999 'SkAutoPDFRelease',
1000 'SkAutoRasterClipValidate',
1001 'SkAutoRef',
1002 'SkAutoTime',
1003 'SkAutoTrace',
1004 'SkAutoUnref',
1006 anonymous = r'(%s)\s*[({]' % '|'.join(they_who_must_be_named)
1007 # bad: base::AutoLock(lock.get());
1008 # not bad: base::AutoLock lock(lock.get());
1009 bad_pattern = input_api.re.compile(anonymous)
1010 # good: new base::AutoLock(lock.get())
1011 good_pattern = input_api.re.compile(r'\bnew\s*' + anonymous)
1012 errors = []
1014 for f in input_api.AffectedFiles():
1015 if not f.LocalPath().endswith(('.cc', '.h', '.inl', '.m', '.mm')):
1016 continue
1017 for linenum, line in f.ChangedContents():
1018 if bad_pattern.search(line) and not good_pattern.search(line):
1019 errors.append('%s:%d' % (f.LocalPath(), linenum))
1021 if errors:
1022 return [output_api.PresubmitError(
1023 'These lines create anonymous variables that need to be named:',
1024 items=errors)]
1025 return []
1028 def _CheckCygwinShell(input_api, output_api):
1029 source_file_filter = lambda x: input_api.FilterSourceFile(
1030 x, white_list=(r'.+\.(gyp|gypi)$',))
1031 cygwin_shell = []
1033 for f in input_api.AffectedSourceFiles(source_file_filter):
1034 for linenum, line in f.ChangedContents():
1035 if 'msvs_cygwin_shell' in line:
1036 cygwin_shell.append(f.LocalPath())
1037 break
1039 if cygwin_shell:
1040 return [output_api.PresubmitError(
1041 'These files should not use msvs_cygwin_shell (the default is 0):',
1042 items=cygwin_shell)]
1043 return []
1046 def _CheckUserActionUpdate(input_api, output_api):
1047 """Checks if any new user action has been added."""
1048 if any('actions.xml' == input_api.os_path.basename(f) for f in
1049 input_api.LocalPaths()):
1050 # If actions.xml is already included in the changelist, the PRESUBMIT
1051 # for actions.xml will do a more complete presubmit check.
1052 return []
1054 file_filter = lambda f: f.LocalPath().endswith(('.cc', '.mm'))
1055 action_re = r'[^a-zA-Z]UserMetricsAction\("([^"]*)'
1056 current_actions = None
1057 for f in input_api.AffectedFiles(file_filter=file_filter):
1058 for line_num, line in f.ChangedContents():
1059 match = input_api.re.search(action_re, line)
1060 if match:
1061 # Loads contents in tools/metrics/actions/actions.xml to memory. It's
1062 # loaded only once.
1063 if not current_actions:
1064 with open('tools/metrics/actions/actions.xml') as actions_f:
1065 current_actions = actions_f.read()
1066 # Search for the matched user action name in |current_actions|.
1067 for action_name in match.groups():
1068 action = 'name="{0}"'.format(action_name)
1069 if action not in current_actions:
1070 return [output_api.PresubmitPromptWarning(
1071 'File %s line %d: %s is missing in '
1072 'tools/metrics/actions/actions.xml. Please run '
1073 'tools/metrics/actions/extract_actions.py to update.'
1074 % (f.LocalPath(), line_num, action_name))]
1075 return []
1078 def _GetJSONParseError(input_api, filename, eat_comments=True):
1079 try:
1080 contents = input_api.ReadFile(filename)
1081 if eat_comments:
1082 json_comment_eater = input_api.os_path.join(
1083 input_api.PresubmitLocalPath(),
1084 'tools', 'json_comment_eater', 'json_comment_eater.py')
1085 process = input_api.subprocess.Popen(
1086 [input_api.python_executable, json_comment_eater],
1087 stdin=input_api.subprocess.PIPE,
1088 stdout=input_api.subprocess.PIPE,
1089 universal_newlines=True)
1090 (contents, _) = process.communicate(input=contents)
1092 input_api.json.loads(contents)
1093 except ValueError as e:
1094 return e
1095 return None
1098 def _GetIDLParseError(input_api, filename):
1099 try:
1100 contents = input_api.ReadFile(filename)
1101 idl_schema = input_api.os_path.join(
1102 input_api.PresubmitLocalPath(),
1103 'tools', 'json_schema_compiler', 'idl_schema.py')
1104 process = input_api.subprocess.Popen(
1105 [input_api.python_executable, idl_schema],
1106 stdin=input_api.subprocess.PIPE,
1107 stdout=input_api.subprocess.PIPE,
1108 stderr=input_api.subprocess.PIPE,
1109 universal_newlines=True)
1110 (_, error) = process.communicate(input=contents)
1111 return error or None
1112 except ValueError as e:
1113 return e
1116 def _CheckParseErrors(input_api, output_api):
1117 """Check that IDL and JSON files do not contain syntax errors."""
1118 actions = {
1119 '.idl': _GetIDLParseError,
1120 '.json': _GetJSONParseError,
1122 # These paths contain test data and other known invalid JSON files.
1123 excluded_patterns = [
1124 r'test[\\\/]data[\\\/]',
1125 r'^components[\\\/]policy[\\\/]resources[\\\/]policy_templates\.json$',
1127 # Most JSON files are preprocessed and support comments, but these do not.
1128 json_no_comments_patterns = [
1129 r'^testing[\\\/]',
1131 # Only run IDL checker on files in these directories.
1132 idl_included_patterns = [
1133 r'^chrome[\\\/]common[\\\/]extensions[\\\/]api[\\\/]',
1134 r'^extensions[\\\/]common[\\\/]api[\\\/]',
1137 def get_action(affected_file):
1138 filename = affected_file.LocalPath()
1139 return actions.get(input_api.os_path.splitext(filename)[1])
1141 def MatchesFile(patterns, path):
1142 for pattern in patterns:
1143 if input_api.re.search(pattern, path):
1144 return True
1145 return False
1147 def FilterFile(affected_file):
1148 action = get_action(affected_file)
1149 if not action:
1150 return False
1151 path = affected_file.LocalPath()
1153 if MatchesFile(excluded_patterns, path):
1154 return False
1156 if (action == _GetIDLParseError and
1157 not MatchesFile(idl_included_patterns, path)):
1158 return False
1159 return True
1161 results = []
1162 for affected_file in input_api.AffectedFiles(
1163 file_filter=FilterFile, include_deletes=False):
1164 action = get_action(affected_file)
1165 kwargs = {}
1166 if (action == _GetJSONParseError and
1167 MatchesFile(json_no_comments_patterns, affected_file.LocalPath())):
1168 kwargs['eat_comments'] = False
1169 parse_error = action(input_api,
1170 affected_file.AbsoluteLocalPath(),
1171 **kwargs)
1172 if parse_error:
1173 results.append(output_api.PresubmitError('%s could not be parsed: %s' %
1174 (affected_file.LocalPath(), parse_error)))
1175 return results
1178 def _CheckJavaStyle(input_api, output_api):
1179 """Runs checkstyle on changed java files and returns errors if any exist."""
1180 original_sys_path = sys.path
1181 try:
1182 sys.path = sys.path + [input_api.os_path.join(
1183 input_api.PresubmitLocalPath(), 'tools', 'android', 'checkstyle')]
1184 import checkstyle
1185 finally:
1186 # Restore sys.path to what it was before.
1187 sys.path = original_sys_path
1189 return checkstyle.RunCheckstyle(
1190 input_api, output_api, 'tools/android/checkstyle/chromium-style-5.0.xml')
1193 _DEPRECATED_CSS = [
1194 # Values
1195 ( "-webkit-box", "flex" ),
1196 ( "-webkit-inline-box", "inline-flex" ),
1197 ( "-webkit-flex", "flex" ),
1198 ( "-webkit-inline-flex", "inline-flex" ),
1199 ( "-webkit-min-content", "min-content" ),
1200 ( "-webkit-max-content", "max-content" ),
1202 # Properties
1203 ( "-webkit-background-clip", "background-clip" ),
1204 ( "-webkit-background-origin", "background-origin" ),
1205 ( "-webkit-background-size", "background-size" ),
1206 ( "-webkit-box-shadow", "box-shadow" ),
1208 # Functions
1209 ( "-webkit-gradient", "gradient" ),
1210 ( "-webkit-repeating-gradient", "repeating-gradient" ),
1211 ( "-webkit-linear-gradient", "linear-gradient" ),
1212 ( "-webkit-repeating-linear-gradient", "repeating-linear-gradient" ),
1213 ( "-webkit-radial-gradient", "radial-gradient" ),
1214 ( "-webkit-repeating-radial-gradient", "repeating-radial-gradient" ),
1217 def _CheckNoDeprecatedCSS(input_api, output_api):
1218 """ Make sure that we don't use deprecated CSS
1219 properties, functions or values. Our external
1220 documentation is ignored by the hooks as it
1221 needs to be consumed by WebKit. """
1222 results = []
1223 file_inclusion_pattern = (r".+\.css$")
1224 black_list = (_EXCLUDED_PATHS +
1225 _TEST_CODE_EXCLUDED_PATHS +
1226 input_api.DEFAULT_BLACK_LIST +
1227 (r"^chrome/common/extensions/docs",
1228 r"^chrome/docs",
1229 r"^native_client_sdk"))
1230 file_filter = lambda f: input_api.FilterSourceFile(
1231 f, white_list=file_inclusion_pattern, black_list=black_list)
1232 for fpath in input_api.AffectedFiles(file_filter=file_filter):
1233 for line_num, line in fpath.ChangedContents():
1234 for (deprecated_value, value) in _DEPRECATED_CSS:
1235 if input_api.re.search(deprecated_value, line):
1236 results.append(output_api.PresubmitError(
1237 "%s:%d: Use of deprecated CSS %s, use %s instead" %
1238 (fpath.LocalPath(), line_num, deprecated_value, value)))
1239 return results
1241 def _CommonChecks(input_api, output_api):
1242 """Checks common to both upload and commit."""
1243 results = []
1244 results.extend(input_api.canned_checks.PanProjectChecks(
1245 input_api, output_api,
1246 excluded_paths=_EXCLUDED_PATHS + _TESTRUNNER_PATHS))
1247 results.extend(_CheckAuthorizedAuthor(input_api, output_api))
1248 results.extend(
1249 _CheckNoProductionCodeUsingTestOnlyFunctions(input_api, output_api))
1250 results.extend(_CheckNoIOStreamInHeaders(input_api, output_api))
1251 results.extend(_CheckNoUNIT_TESTInSourceFiles(input_api, output_api))
1252 results.extend(_CheckNoNewWStrings(input_api, output_api))
1253 results.extend(_CheckNoDEPSGIT(input_api, output_api))
1254 results.extend(_CheckNoBannedFunctions(input_api, output_api))
1255 results.extend(_CheckNoPragmaOnce(input_api, output_api))
1256 results.extend(_CheckNoTrinaryTrueFalse(input_api, output_api))
1257 results.extend(_CheckUnwantedDependencies(input_api, output_api))
1258 results.extend(_CheckFilePermissions(input_api, output_api))
1259 results.extend(_CheckNoAuraWindowPropertyHInHeaders(input_api, output_api))
1260 results.extend(_CheckIncludeOrder(input_api, output_api))
1261 results.extend(_CheckForVersionControlConflicts(input_api, output_api))
1262 results.extend(_CheckPatchFiles(input_api, output_api))
1263 results.extend(_CheckHardcodedGoogleHostsInLowerLayers(input_api, output_api))
1264 results.extend(_CheckNoAbbreviationInPngFileName(input_api, output_api))
1265 results.extend(_CheckForInvalidOSMacros(input_api, output_api))
1266 results.extend(_CheckAddedDepsHaveTargetApprovals(input_api, output_api))
1267 results.extend(
1268 input_api.canned_checks.CheckChangeHasNoTabs(
1269 input_api,
1270 output_api,
1271 source_file_filter=lambda x: x.LocalPath().endswith('.grd')))
1272 results.extend(_CheckSpamLogging(input_api, output_api))
1273 results.extend(_CheckForAnonymousVariables(input_api, output_api))
1274 results.extend(_CheckCygwinShell(input_api, output_api))
1275 results.extend(_CheckUserActionUpdate(input_api, output_api))
1276 results.extend(_CheckNoDeprecatedCSS(input_api, output_api))
1277 results.extend(_CheckParseErrors(input_api, output_api))
1279 if any('PRESUBMIT.py' == f.LocalPath() for f in input_api.AffectedFiles()):
1280 results.extend(input_api.canned_checks.RunUnitTestsInDirectory(
1281 input_api, output_api,
1282 input_api.PresubmitLocalPath(),
1283 whitelist=[r'^PRESUBMIT_test\.py$']))
1284 return results
1287 def _CheckAuthorizedAuthor(input_api, output_api):
1288 """For non-googler/chromites committers, verify the author's email address is
1289 in AUTHORS.
1291 # TODO(maruel): Add it to input_api?
1292 import fnmatch
1294 author = input_api.change.author_email
1295 if not author:
1296 input_api.logging.info('No author, skipping AUTHOR check')
1297 return []
1298 authors_path = input_api.os_path.join(
1299 input_api.PresubmitLocalPath(), 'AUTHORS')
1300 valid_authors = (
1301 input_api.re.match(r'[^#]+\s+\<(.+?)\>\s*$', line)
1302 for line in open(authors_path))
1303 valid_authors = [item.group(1).lower() for item in valid_authors if item]
1304 if not any(fnmatch.fnmatch(author.lower(), valid) for valid in valid_authors):
1305 input_api.logging.info('Valid authors are %s', ', '.join(valid_authors))
1306 return [output_api.PresubmitPromptWarning(
1307 ('%s is not in AUTHORS file. If you are a new contributor, please visit'
1308 '\n'
1309 'http://www.chromium.org/developers/contributing-code and read the '
1310 '"Legal" section\n'
1311 'If you are a chromite, verify the contributor signed the CLA.') %
1312 author)]
1313 return []
1316 def _CheckPatchFiles(input_api, output_api):
1317 problems = [f.LocalPath() for f in input_api.AffectedFiles()
1318 if f.LocalPath().endswith(('.orig', '.rej'))]
1319 if problems:
1320 return [output_api.PresubmitError(
1321 "Don't commit .rej and .orig files.", problems)]
1322 else:
1323 return []
1326 def _DidYouMeanOSMacro(bad_macro):
1327 try:
1328 return {'A': 'OS_ANDROID',
1329 'B': 'OS_BSD',
1330 'C': 'OS_CHROMEOS',
1331 'F': 'OS_FREEBSD',
1332 'L': 'OS_LINUX',
1333 'M': 'OS_MACOSX',
1334 'N': 'OS_NACL',
1335 'O': 'OS_OPENBSD',
1336 'P': 'OS_POSIX',
1337 'S': 'OS_SOLARIS',
1338 'W': 'OS_WIN'}[bad_macro[3].upper()]
1339 except KeyError:
1340 return ''
1343 def _CheckForInvalidOSMacrosInFile(input_api, f):
1344 """Check for sensible looking, totally invalid OS macros."""
1345 preprocessor_statement = input_api.re.compile(r'^\s*#')
1346 os_macro = input_api.re.compile(r'defined\((OS_[^)]+)\)')
1347 results = []
1348 for lnum, line in f.ChangedContents():
1349 if preprocessor_statement.search(line):
1350 for match in os_macro.finditer(line):
1351 if not match.group(1) in _VALID_OS_MACROS:
1352 good = _DidYouMeanOSMacro(match.group(1))
1353 did_you_mean = ' (did you mean %s?)' % good if good else ''
1354 results.append(' %s:%d %s%s' % (f.LocalPath(),
1355 lnum,
1356 match.group(1),
1357 did_you_mean))
1358 return results
1361 def _CheckForInvalidOSMacros(input_api, output_api):
1362 """Check all affected files for invalid OS macros."""
1363 bad_macros = []
1364 for f in input_api.AffectedFiles():
1365 if not f.LocalPath().endswith(('.py', '.js', '.html', '.css')):
1366 bad_macros.extend(_CheckForInvalidOSMacrosInFile(input_api, f))
1368 if not bad_macros:
1369 return []
1371 return [output_api.PresubmitError(
1372 'Possibly invalid OS macro[s] found. Please fix your code\n'
1373 'or add your macro to src/PRESUBMIT.py.', bad_macros)]
1376 def CheckChangeOnUpload(input_api, output_api):
1377 results = []
1378 results.extend(_CommonChecks(input_api, output_api))
1379 results.extend(_CheckJavaStyle(input_api, output_api))
1380 return results
1383 def GetTryServerMasterForBot(bot):
1384 """Returns the Try Server master for the given bot.
1386 It tries to guess the master from the bot name, but may still fail
1387 and return None. There is no longer a default master.
1389 # Potentially ambiguous bot names are listed explicitly.
1390 master_map = {
1391 'linux_gpu': 'tryserver.chromium.gpu',
1392 'mac_gpu': 'tryserver.chromium.gpu',
1393 'win_gpu': 'tryserver.chromium.gpu',
1394 'chromium_presubmit': 'tryserver.chromium.linux',
1395 'blink_presubmit': 'tryserver.chromium.linux',
1396 'tools_build_presubmit': 'tryserver.chromium.linux',
1398 master = master_map.get(bot)
1399 if not master:
1400 if 'gpu' in bot:
1401 master = 'tryserver.chromium.gpu'
1402 elif 'linux' in bot or 'android' in bot or 'presubmit' in bot:
1403 master = 'tryserver.chromium.linux'
1404 elif 'win' in bot:
1405 master = 'tryserver.chromium.win'
1406 elif 'mac' in bot or 'ios' in bot:
1407 master = 'tryserver.chromium.mac'
1408 return master
1411 def GetDefaultTryConfigs(bots=None):
1412 """Returns a list of ('bot', set(['tests']), optionally filtered by [bots].
1414 To add tests to this list, they MUST be in the the corresponding master's
1415 gatekeeper config. For example, anything on master.chromium would be closed by
1416 tools/build/masters/master.chromium/master_gatekeeper_cfg.py.
1418 If 'bots' is specified, will only return configurations for bots in that list.
1421 standard_tests = [
1422 'base_unittests',
1423 'browser_tests',
1424 'cacheinvalidation_unittests',
1425 'check_deps',
1426 'check_deps2git',
1427 'content_browsertests',
1428 'content_unittests',
1429 'crypto_unittests',
1430 'gpu_unittests',
1431 'interactive_ui_tests',
1432 'ipc_tests',
1433 'jingle_unittests',
1434 'media_unittests',
1435 'net_unittests',
1436 'ppapi_unittests',
1437 'printing_unittests',
1438 'sql_unittests',
1439 'sync_unit_tests',
1440 'unit_tests',
1441 # Broken in release.
1442 #'url_unittests',
1443 #'webkit_unit_tests',
1446 builders_and_tests = {
1447 # TODO(maruel): Figure out a way to run 'sizes' where people can
1448 # effectively update the perf expectation correctly. This requires a
1449 # clobber=True build running 'sizes'. 'sizes' is not accurate with
1450 # incremental build. Reference:
1451 # http://chromium.org/developers/tree-sheriffs/perf-sheriffs.
1452 # TODO(maruel): An option would be to run 'sizes' but not count a failure
1453 # of this step as a try job failure.
1454 'android_aosp': ['compile'],
1455 'android_chromium_gn_compile_rel': ['compile'],
1456 'android_clang_dbg': ['slave_steps'],
1457 'android_dbg_tests_recipe': ['slave_steps'],
1458 'cros_x86': ['defaulttests'],
1459 'ios_dbg_simulator': [
1460 'compile',
1461 'base_unittests',
1462 'content_unittests',
1463 'crypto_unittests',
1464 'url_unittests',
1465 'net_unittests',
1466 'sql_unittests',
1467 'ui_unittests',
1469 'ios_rel_device': ['compile'],
1470 'linux_asan': ['compile'],
1471 'mac_asan': ['compile'],
1472 #TODO(stip): Change the name of this builder to reflect that it's release.
1473 'linux_gtk': standard_tests,
1474 'linux_chromeos_asan': ['compile'],
1475 'linux_chromium_chromeos_clang_dbg': ['defaulttests'],
1476 'linux_chromium_chromeos_rel_swarming': ['defaulttests'],
1477 'linux_chromium_compile_dbg': ['defaulttests'],
1478 'linux_chromium_gn_rel': ['defaulttests'],
1479 'linux_chromium_rel_swarming': ['defaulttests'],
1480 'linux_chromium_clang_dbg': ['defaulttests'],
1481 'linux_gpu': ['defaulttests'],
1482 'linux_nacl_sdk_build': ['compile'],
1483 'mac_chromium_compile_dbg': ['defaulttests'],
1484 'mac_chromium_rel_swarming': ['defaulttests'],
1485 'mac_gpu': ['defaulttests'],
1486 'mac_nacl_sdk_build': ['compile'],
1487 'win_chromium_compile_dbg': ['defaulttests'],
1488 'win_chromium_dbg': ['defaulttests'],
1489 'win_chromium_rel_swarming': ['defaulttests'],
1490 'win_chromium_x64_rel_swarming': ['defaulttests'],
1491 'win_gpu': ['defaulttests'],
1492 'win_nacl_sdk_build': ['compile'],
1495 if bots:
1496 filtered_builders_and_tests = dict((bot, set(builders_and_tests[bot]))
1497 for bot in bots)
1498 else:
1499 filtered_builders_and_tests = dict(
1500 (bot, set(tests))
1501 for bot, tests in builders_and_tests.iteritems())
1503 # Build up the mapping from tryserver master to bot/test.
1504 out = dict()
1505 for bot, tests in filtered_builders_and_tests.iteritems():
1506 out.setdefault(GetTryServerMasterForBot(bot), {})[bot] = tests
1507 return out
1510 def CheckChangeOnCommit(input_api, output_api):
1511 results = []
1512 results.extend(_CommonChecks(input_api, output_api))
1513 # TODO(thestig) temporarily disabled, doesn't work in third_party/
1514 #results.extend(input_api.canned_checks.CheckSvnModifiedDirectories(
1515 # input_api, output_api, sources))
1516 # Make sure the tree is 'open'.
1517 results.extend(input_api.canned_checks.CheckTreeIsOpen(
1518 input_api,
1519 output_api,
1520 json_url='http://chromium-status.appspot.com/current?format=json'))
1522 results.extend(input_api.canned_checks.CheckChangeHasBugField(
1523 input_api, output_api))
1524 results.extend(input_api.canned_checks.CheckChangeHasDescription(
1525 input_api, output_api))
1526 return results
1529 def GetPreferredTryMasters(project, change):
1530 files = change.LocalPaths()
1532 if not files or all(re.search(r'[\\\/]OWNERS$', f) for f in files):
1533 return {}
1535 if all(re.search(r'\.(m|mm)$|(^|[\\\/_])mac[\\\/_.]', f) for f in files):
1536 return GetDefaultTryConfigs([
1537 'mac_chromium_compile_dbg',
1538 'mac_chromium_rel_swarming',
1540 if all(re.search('(^|[/_])win[/_.]', f) for f in files):
1541 return GetDefaultTryConfigs([
1542 'win_chromium_dbg',
1543 'win_chromium_rel_swarming',
1545 if all(re.search(r'(^|[\\\/_])android[\\\/_.]', f) for f in files):
1546 return GetDefaultTryConfigs([
1547 'android_aosp',
1548 'android_clang_dbg',
1549 'android_dbg_tests_recipe',
1551 if all(re.search(r'[\\\/_]ios[\\\/_.]', f) for f in files):
1552 return GetDefaultTryConfigs(['ios_rel_device', 'ios_dbg_simulator'])
1554 builders = [
1555 'android_chromium_gn_compile_rel',
1556 'android_clang_dbg',
1557 'android_dbg_tests_recipe',
1558 'ios_dbg_simulator',
1559 'ios_rel_device',
1560 'linux_chromium_chromeos_rel_swarming',
1561 'linux_chromium_clang_dbg',
1562 'linux_chromium_gn_rel',
1563 'linux_chromium_rel_swarming',
1564 'linux_gpu',
1565 'mac_chromium_compile_dbg',
1566 'mac_chromium_rel_swarming',
1567 'mac_gpu',
1568 'win_chromium_compile_dbg',
1569 'win_chromium_rel_swarming',
1570 'win_chromium_x64_rel_swarming',
1571 'win_gpu',
1574 # Match things like path/aura/file.cc and path/file_aura.cc.
1575 # Same for chromeos.
1576 if any(re.search(r'[\\\/_](aura|chromeos)', f) for f in files):
1577 builders.extend([
1578 'linux_chromeos_asan',
1579 'linux_chromium_chromeos_clang_dbg'
1582 # If there are gyp changes to base, build, or chromeos, run a full cros build
1583 # in addition to the shorter linux_chromeos build. Changes to high level gyp
1584 # files have a much higher chance of breaking the cros build, which is
1585 # differnt from the linux_chromeos build that most chrome developers test
1586 # with.
1587 if any(re.search('^(base|build|chromeos).*\.gypi?$', f) for f in files):
1588 builders.extend(['cros_x86'])
1590 # The AOSP bot doesn't build the chrome/ layer, so ignore any changes to it
1591 # unless they're .gyp(i) files as changes to those files can break the gyp
1592 # step on that bot.
1593 if (not all(re.search('^chrome', f) for f in files) or
1594 any(re.search('\.gypi?$', f) for f in files)):
1595 builders.extend(['android_aosp'])
1597 return GetDefaultTryConfigs(builders)