Roll libjingle/source/talk to r7007.
[chromium-blink-merge.git] / PRESUBMIT.py
blob79187b30045564e3d19e6d2d5a5195df5950d3fb
1 # Copyright (c) 2012 The Chromium Authors. All rights reserved.
2 # Use of this source code is governed by a BSD-style license that can be
3 # found in the LICENSE file.
5 """Top-level presubmit script for Chromium.
7 See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts
8 for more details about the presubmit API built into gcl.
9 """
12 import re
13 import sys
16 _EXCLUDED_PATHS = (
17 r"^breakpad[\\\/].*",
18 r"^native_client_sdk[\\\/]src[\\\/]build_tools[\\\/]make_rules.py",
19 r"^native_client_sdk[\\\/]src[\\\/]build_tools[\\\/]make_simple.py",
20 r"^native_client_sdk[\\\/]src[\\\/]tools[\\\/].*.mk",
21 r"^net[\\\/]tools[\\\/]spdyshark[\\\/].*",
22 r"^skia[\\\/].*",
23 r"^v8[\\\/].*",
24 r".*MakeFile$",
25 r".+_autogen\.h$",
26 r".+[\\\/]pnacl_shim\.c$",
27 r"^gpu[\\\/]config[\\\/].*_list_json\.cc$",
28 r"^chrome[\\\/]browser[\\\/]resources[\\\/]pdf[\\\/]index.js"
31 # TestRunner and NetscapePlugIn library is temporarily excluded from pan-project
32 # checks until it's transitioned to chromium coding style.
33 _TESTRUNNER_PATHS = (
34 r"^content[\\\/]shell[\\\/]renderer[\\\/]test_runner[\\\/].*",
35 r"^content[\\\/]shell[\\\/]tools[\\\/]plugin[\\\/].*",
38 # Fragment of a regular expression that matches C++ and Objective-C++
39 # implementation files.
40 _IMPLEMENTATION_EXTENSIONS = r'\.(cc|cpp|cxx|mm)$'
42 # Regular expression that matches code only used for test binaries
43 # (best effort).
44 _TEST_CODE_EXCLUDED_PATHS = (
45 r'.*[\\\/](fake_|test_|mock_).+%s' % _IMPLEMENTATION_EXTENSIONS,
46 r'.+_test_(base|support|util)%s' % _IMPLEMENTATION_EXTENSIONS,
47 r'.+_(api|browser|kif|perf|pixel|unit|ui)?test(_[a-z]+)?%s' %
48 _IMPLEMENTATION_EXTENSIONS,
49 r'.+profile_sync_service_harness%s' % _IMPLEMENTATION_EXTENSIONS,
50 r'.*[\\\/](test|tool(s)?)[\\\/].*',
51 # content_shell is used for running layout tests.
52 r'content[\\\/]shell[\\\/].*',
53 # At request of folks maintaining this folder.
54 r'chrome[\\\/]browser[\\\/]automation[\\\/].*',
55 # Non-production example code.
56 r'mojo[\\\/]examples[\\\/].*',
57 # Launcher for running iOS tests on the simulator.
58 r'testing[\\\/]iossim[\\\/]iossim\.mm$',
61 _TEST_ONLY_WARNING = (
62 'You might be calling functions intended only for testing from\n'
63 'production code. It is OK to ignore this warning if you know what\n'
64 'you are doing, as the heuristics used to detect the situation are\n'
65 'not perfect. The commit queue will not block on this warning.')
68 _INCLUDE_ORDER_WARNING = (
69 'Your #include order seems to be broken. Send mail to\n'
70 'marja@chromium.org if this is not the case.')
73 _BANNED_OBJC_FUNCTIONS = (
75 'addTrackingRect:',
77 'The use of -[NSView addTrackingRect:owner:userData:assumeInside:] is'
78 'prohibited. Please use CrTrackingArea instead.',
79 'http://dev.chromium.org/developers/coding-style/cocoa-dos-and-donts',
81 False,
84 r'/NSTrackingArea\W',
86 'The use of NSTrackingAreas is prohibited. Please use CrTrackingArea',
87 'instead.',
88 'http://dev.chromium.org/developers/coding-style/cocoa-dos-and-donts',
90 False,
93 'convertPointFromBase:',
95 'The use of -[NSView convertPointFromBase:] is almost certainly wrong.',
96 'Please use |convertPoint:(point) fromView:nil| instead.',
97 'http://dev.chromium.org/developers/coding-style/cocoa-dos-and-donts',
99 True,
102 'convertPointToBase:',
104 'The use of -[NSView convertPointToBase:] is almost certainly wrong.',
105 'Please use |convertPoint:(point) toView:nil| instead.',
106 'http://dev.chromium.org/developers/coding-style/cocoa-dos-and-donts',
108 True,
111 'convertRectFromBase:',
113 'The use of -[NSView convertRectFromBase:] is almost certainly wrong.',
114 'Please use |convertRect:(point) fromView:nil| instead.',
115 'http://dev.chromium.org/developers/coding-style/cocoa-dos-and-donts',
117 True,
120 'convertRectToBase:',
122 'The use of -[NSView convertRectToBase:] is almost certainly wrong.',
123 'Please use |convertRect:(point) toView:nil| instead.',
124 'http://dev.chromium.org/developers/coding-style/cocoa-dos-and-donts',
126 True,
129 'convertSizeFromBase:',
131 'The use of -[NSView convertSizeFromBase:] is almost certainly wrong.',
132 'Please use |convertSize:(point) fromView:nil| instead.',
133 'http://dev.chromium.org/developers/coding-style/cocoa-dos-and-donts',
135 True,
138 'convertSizeToBase:',
140 'The use of -[NSView convertSizeToBase:] is almost certainly wrong.',
141 'Please use |convertSize:(point) toView:nil| instead.',
142 'http://dev.chromium.org/developers/coding-style/cocoa-dos-and-donts',
144 True,
149 _BANNED_CPP_FUNCTIONS = (
150 # Make sure that gtest's FRIEND_TEST() macro is not used; the
151 # FRIEND_TEST_ALL_PREFIXES() macro from base/gtest_prod_util.h should be
152 # used instead since that allows for FLAKY_ and DISABLED_ prefixes.
154 'FRIEND_TEST(',
156 'Chromium code should not use gtest\'s FRIEND_TEST() macro. Include',
157 'base/gtest_prod_util.h and use FRIEND_TEST_ALL_PREFIXES() instead.',
159 False,
163 'ScopedAllowIO',
165 'New code should not use ScopedAllowIO. Post a task to the blocking',
166 'pool or the FILE thread instead.',
168 True,
170 r"^chrome[\\\/]browser[\\\/]chromeos[\\\/]boot_times_loader\.cc$",
171 r"^components[\\\/]crash[\\\/]app[\\\/]breakpad_mac\.mm$",
172 r"^content[\\\/]shell[\\\/]browser[\\\/]shell_browser_main\.cc$",
173 r"^content[\\\/]shell[\\\/]browser[\\\/]shell_message_filter\.cc$",
174 r"^mojo[\\\/]system[\\\/]raw_shared_buffer_posix\.cc$",
175 r"^net[\\\/]disk_cache[\\\/]cache_util\.cc$",
176 r"^net[\\\/]url_request[\\\/]test_url_fetcher_factory\.cc$",
180 'SkRefPtr',
182 'The use of SkRefPtr is prohibited. ',
183 'Please use skia::RefPtr instead.'
185 True,
189 'SkAutoRef',
191 'The indirect use of SkRefPtr via SkAutoRef is prohibited. ',
192 'Please use skia::RefPtr instead.'
194 True,
198 'SkAutoTUnref',
200 'The use of SkAutoTUnref is dangerous because it implicitly ',
201 'converts to a raw pointer. Please use skia::RefPtr instead.'
203 True,
207 'SkAutoUnref',
209 'The indirect use of SkAutoTUnref through SkAutoUnref is dangerous ',
210 'because it implicitly converts to a raw pointer. ',
211 'Please use skia::RefPtr instead.'
213 True,
217 r'/HANDLE_EINTR\(.*close',
219 'HANDLE_EINTR(close) is invalid. If close fails with EINTR, the file',
220 'descriptor will be closed, and it is incorrect to retry the close.',
221 'Either call close directly and ignore its return value, or wrap close',
222 'in IGNORE_EINTR to use its return value. See http://crbug.com/269623'
224 True,
228 r'/IGNORE_EINTR\((?!.*close)',
230 'IGNORE_EINTR is only valid when wrapping close. To wrap other system',
231 'calls, use HANDLE_EINTR. See http://crbug.com/269623',
233 True,
235 # Files that #define IGNORE_EINTR.
236 r'^base[\\\/]posix[\\\/]eintr_wrapper\.h$',
237 r'^ppapi[\\\/]tests[\\\/]test_broker\.cc$',
241 r'/v8::Extension\(',
243 'Do not introduce new v8::Extensions into the code base, use',
244 'gin::Wrappable instead. See http://crbug.com/334679',
246 True,
248 r'extensions[\\\/]renderer[\\\/]safe_builtins\.*',
254 _VALID_OS_MACROS = (
255 # Please keep sorted.
256 'OS_ANDROID',
257 'OS_ANDROID_HOST',
258 'OS_BSD',
259 'OS_CAT', # For testing.
260 'OS_CHROMEOS',
261 'OS_FREEBSD',
262 'OS_IOS',
263 'OS_LINUX',
264 'OS_MACOSX',
265 'OS_NACL',
266 'OS_OPENBSD',
267 'OS_POSIX',
268 'OS_QNX',
269 'OS_SOLARIS',
270 'OS_WIN',
274 def _CheckNoProductionCodeUsingTestOnlyFunctions(input_api, output_api):
275 """Attempts to prevent use of functions intended only for testing in
276 non-testing code. For now this is just a best-effort implementation
277 that ignores header files and may have some false positives. A
278 better implementation would probably need a proper C++ parser.
280 # We only scan .cc files and the like, as the declaration of
281 # for-testing functions in header files are hard to distinguish from
282 # calls to such functions without a proper C++ parser.
283 file_inclusion_pattern = r'.+%s' % _IMPLEMENTATION_EXTENSIONS
285 base_function_pattern = r'[ :]test::[^\s]+|ForTest(ing)?|for_test(ing)?'
286 inclusion_pattern = input_api.re.compile(r'(%s)\s*\(' % base_function_pattern)
287 comment_pattern = input_api.re.compile(r'//.*(%s)' % base_function_pattern)
288 exclusion_pattern = input_api.re.compile(
289 r'::[A-Za-z0-9_]+(%s)|(%s)[^;]+\{' % (
290 base_function_pattern, base_function_pattern))
292 def FilterFile(affected_file):
293 black_list = (_EXCLUDED_PATHS +
294 _TEST_CODE_EXCLUDED_PATHS +
295 input_api.DEFAULT_BLACK_LIST)
296 return input_api.FilterSourceFile(
297 affected_file,
298 white_list=(file_inclusion_pattern, ),
299 black_list=black_list)
301 problems = []
302 for f in input_api.AffectedSourceFiles(FilterFile):
303 local_path = f.LocalPath()
304 for line_number, line in f.ChangedContents():
305 if (inclusion_pattern.search(line) and
306 not comment_pattern.search(line) and
307 not exclusion_pattern.search(line)):
308 problems.append(
309 '%s:%d\n %s' % (local_path, line_number, line.strip()))
311 if problems:
312 return [output_api.PresubmitPromptOrNotify(_TEST_ONLY_WARNING, problems)]
313 else:
314 return []
317 def _CheckNoIOStreamInHeaders(input_api, output_api):
318 """Checks to make sure no .h files include <iostream>."""
319 files = []
320 pattern = input_api.re.compile(r'^#include\s*<iostream>',
321 input_api.re.MULTILINE)
322 for f in input_api.AffectedSourceFiles(input_api.FilterSourceFile):
323 if not f.LocalPath().endswith('.h'):
324 continue
325 contents = input_api.ReadFile(f)
326 if pattern.search(contents):
327 files.append(f)
329 if len(files):
330 return [ output_api.PresubmitError(
331 'Do not #include <iostream> in header files, since it inserts static '
332 'initialization into every file including the header. Instead, '
333 '#include <ostream>. See http://crbug.com/94794',
334 files) ]
335 return []
338 def _CheckNoUNIT_TESTInSourceFiles(input_api, output_api):
339 """Checks to make sure no source files use UNIT_TEST"""
340 problems = []
341 for f in input_api.AffectedFiles():
342 if (not f.LocalPath().endswith(('.cc', '.mm'))):
343 continue
345 for line_num, line in f.ChangedContents():
346 if 'UNIT_TEST ' in line or line.endswith('UNIT_TEST'):
347 problems.append(' %s:%d' % (f.LocalPath(), line_num))
349 if not problems:
350 return []
351 return [output_api.PresubmitPromptWarning('UNIT_TEST is only for headers.\n' +
352 '\n'.join(problems))]
355 def _CheckNoNewWStrings(input_api, output_api):
356 """Checks to make sure we don't introduce use of wstrings."""
357 problems = []
358 for f in input_api.AffectedFiles():
359 if (not f.LocalPath().endswith(('.cc', '.h')) or
360 f.LocalPath().endswith(('test.cc', '_win.cc', '_win.h'))):
361 continue
363 allowWString = False
364 for line_num, line in f.ChangedContents():
365 if 'presubmit: allow wstring' in line:
366 allowWString = True
367 elif not allowWString and 'wstring' in line:
368 problems.append(' %s:%d' % (f.LocalPath(), line_num))
369 allowWString = False
370 else:
371 allowWString = False
373 if not problems:
374 return []
375 return [output_api.PresubmitPromptWarning('New code should not use wstrings.'
376 ' If you are calling a cross-platform API that accepts a wstring, '
377 'fix the API.\n' +
378 '\n'.join(problems))]
381 def _CheckNoDEPSGIT(input_api, output_api):
382 """Make sure .DEPS.git is never modified manually."""
383 if any(f.LocalPath().endswith('.DEPS.git') for f in
384 input_api.AffectedFiles()):
385 return [output_api.PresubmitError(
386 'Never commit changes to .DEPS.git. This file is maintained by an\n'
387 'automated system based on what\'s in DEPS and your changes will be\n'
388 'overwritten.\n'
389 'See https://sites.google.com/a/chromium.org/dev/developers/how-tos/get-the-code#Rolling_DEPS\n'
390 'for more information')]
391 return []
394 def _CheckNoBannedFunctions(input_api, output_api):
395 """Make sure that banned functions are not used."""
396 warnings = []
397 errors = []
399 file_filter = lambda f: f.LocalPath().endswith(('.mm', '.m', '.h'))
400 for f in input_api.AffectedFiles(file_filter=file_filter):
401 for line_num, line in f.ChangedContents():
402 for func_name, message, error in _BANNED_OBJC_FUNCTIONS:
403 matched = False
404 if func_name[0:1] == '/':
405 regex = func_name[1:]
406 if input_api.re.search(regex, line):
407 matched = True
408 elif func_name in line:
409 matched = True
410 if matched:
411 problems = warnings;
412 if error:
413 problems = errors;
414 problems.append(' %s:%d:' % (f.LocalPath(), line_num))
415 for message_line in message:
416 problems.append(' %s' % message_line)
418 file_filter = lambda f: f.LocalPath().endswith(('.cc', '.mm', '.h'))
419 for f in input_api.AffectedFiles(file_filter=file_filter):
420 for line_num, line in f.ChangedContents():
421 for func_name, message, error, excluded_paths in _BANNED_CPP_FUNCTIONS:
422 def IsBlacklisted(affected_file, blacklist):
423 local_path = affected_file.LocalPath()
424 for item in blacklist:
425 if input_api.re.match(item, local_path):
426 return True
427 return False
428 if IsBlacklisted(f, excluded_paths):
429 continue
430 matched = False
431 if func_name[0:1] == '/':
432 regex = func_name[1:]
433 if input_api.re.search(regex, line):
434 matched = True
435 elif func_name in line:
436 matched = True
437 if matched:
438 problems = warnings;
439 if error:
440 problems = errors;
441 problems.append(' %s:%d:' % (f.LocalPath(), line_num))
442 for message_line in message:
443 problems.append(' %s' % message_line)
445 result = []
446 if (warnings):
447 result.append(output_api.PresubmitPromptWarning(
448 'Banned functions were used.\n' + '\n'.join(warnings)))
449 if (errors):
450 result.append(output_api.PresubmitError(
451 'Banned functions were used.\n' + '\n'.join(errors)))
452 return result
455 def _CheckNoPragmaOnce(input_api, output_api):
456 """Make sure that banned functions are not used."""
457 files = []
458 pattern = input_api.re.compile(r'^#pragma\s+once',
459 input_api.re.MULTILINE)
460 for f in input_api.AffectedSourceFiles(input_api.FilterSourceFile):
461 if not f.LocalPath().endswith('.h'):
462 continue
463 contents = input_api.ReadFile(f)
464 if pattern.search(contents):
465 files.append(f)
467 if files:
468 return [output_api.PresubmitError(
469 'Do not use #pragma once in header files.\n'
470 'See http://www.chromium.org/developers/coding-style#TOC-File-headers',
471 files)]
472 return []
475 def _CheckNoTrinaryTrueFalse(input_api, output_api):
476 """Checks to make sure we don't introduce use of foo ? true : false."""
477 problems = []
478 pattern = input_api.re.compile(r'\?\s*(true|false)\s*:\s*(true|false)')
479 for f in input_api.AffectedFiles():
480 if not f.LocalPath().endswith(('.cc', '.h', '.inl', '.m', '.mm')):
481 continue
483 for line_num, line in f.ChangedContents():
484 if pattern.match(line):
485 problems.append(' %s:%d' % (f.LocalPath(), line_num))
487 if not problems:
488 return []
489 return [output_api.PresubmitPromptWarning(
490 'Please consider avoiding the "? true : false" pattern if possible.\n' +
491 '\n'.join(problems))]
494 def _CheckUnwantedDependencies(input_api, output_api):
495 """Runs checkdeps on #include statements added in this
496 change. Breaking - rules is an error, breaking ! rules is a
497 warning.
499 # We need to wait until we have an input_api object and use this
500 # roundabout construct to import checkdeps because this file is
501 # eval-ed and thus doesn't have __file__.
502 original_sys_path = sys.path
503 try:
504 sys.path = sys.path + [input_api.os_path.join(
505 input_api.PresubmitLocalPath(), 'buildtools', 'checkdeps')]
506 import checkdeps
507 from cpp_checker import CppChecker
508 from rules import Rule
509 finally:
510 # Restore sys.path to what it was before.
511 sys.path = original_sys_path
513 added_includes = []
514 for f in input_api.AffectedFiles():
515 if not CppChecker.IsCppFile(f.LocalPath()):
516 continue
518 changed_lines = [line for line_num, line in f.ChangedContents()]
519 added_includes.append([f.LocalPath(), changed_lines])
521 deps_checker = checkdeps.DepsChecker(input_api.PresubmitLocalPath())
523 error_descriptions = []
524 warning_descriptions = []
525 for path, rule_type, rule_description in deps_checker.CheckAddedCppIncludes(
526 added_includes):
527 description_with_path = '%s\n %s' % (path, rule_description)
528 if rule_type == Rule.DISALLOW:
529 error_descriptions.append(description_with_path)
530 else:
531 warning_descriptions.append(description_with_path)
533 results = []
534 if error_descriptions:
535 results.append(output_api.PresubmitError(
536 'You added one or more #includes that violate checkdeps rules.',
537 error_descriptions))
538 if warning_descriptions:
539 results.append(output_api.PresubmitPromptOrNotify(
540 'You added one or more #includes of files that are temporarily\n'
541 'allowed but being removed. Can you avoid introducing the\n'
542 '#include? See relevant DEPS file(s) for details and contacts.',
543 warning_descriptions))
544 return results
547 def _CheckFilePermissions(input_api, output_api):
548 """Check that all files have their permissions properly set."""
549 if input_api.platform == 'win32':
550 return []
551 args = [sys.executable, 'tools/checkperms/checkperms.py', '--root',
552 input_api.change.RepositoryRoot()]
553 for f in input_api.AffectedFiles():
554 args += ['--file', f.LocalPath()]
555 checkperms = input_api.subprocess.Popen(args,
556 stdout=input_api.subprocess.PIPE)
557 errors = checkperms.communicate()[0].strip()
558 if errors:
559 return [output_api.PresubmitError('checkperms.py failed.',
560 errors.splitlines())]
561 return []
564 def _CheckNoAuraWindowPropertyHInHeaders(input_api, output_api):
565 """Makes sure we don't include ui/aura/window_property.h
566 in header files.
568 pattern = input_api.re.compile(r'^#include\s*"ui/aura/window_property.h"')
569 errors = []
570 for f in input_api.AffectedFiles():
571 if not f.LocalPath().endswith('.h'):
572 continue
573 for line_num, line in f.ChangedContents():
574 if pattern.match(line):
575 errors.append(' %s:%d' % (f.LocalPath(), line_num))
577 results = []
578 if errors:
579 results.append(output_api.PresubmitError(
580 'Header files should not include ui/aura/window_property.h', errors))
581 return results
584 def _CheckIncludeOrderForScope(scope, input_api, file_path, changed_linenums):
585 """Checks that the lines in scope occur in the right order.
587 1. C system files in alphabetical order
588 2. C++ system files in alphabetical order
589 3. Project's .h files
592 c_system_include_pattern = input_api.re.compile(r'\s*#include <.*\.h>')
593 cpp_system_include_pattern = input_api.re.compile(r'\s*#include <.*>')
594 custom_include_pattern = input_api.re.compile(r'\s*#include ".*')
596 C_SYSTEM_INCLUDES, CPP_SYSTEM_INCLUDES, CUSTOM_INCLUDES = range(3)
598 state = C_SYSTEM_INCLUDES
600 previous_line = ''
601 previous_line_num = 0
602 problem_linenums = []
603 for line_num, line in scope:
604 if c_system_include_pattern.match(line):
605 if state != C_SYSTEM_INCLUDES:
606 problem_linenums.append((line_num, previous_line_num))
607 elif previous_line and previous_line > line:
608 problem_linenums.append((line_num, previous_line_num))
609 elif cpp_system_include_pattern.match(line):
610 if state == C_SYSTEM_INCLUDES:
611 state = CPP_SYSTEM_INCLUDES
612 elif state == CUSTOM_INCLUDES:
613 problem_linenums.append((line_num, previous_line_num))
614 elif previous_line and previous_line > line:
615 problem_linenums.append((line_num, previous_line_num))
616 elif custom_include_pattern.match(line):
617 if state != CUSTOM_INCLUDES:
618 state = CUSTOM_INCLUDES
619 elif previous_line and previous_line > line:
620 problem_linenums.append((line_num, previous_line_num))
621 else:
622 problem_linenums.append(line_num)
623 previous_line = line
624 previous_line_num = line_num
626 warnings = []
627 for (line_num, previous_line_num) in problem_linenums:
628 if line_num in changed_linenums or previous_line_num in changed_linenums:
629 warnings.append(' %s:%d' % (file_path, line_num))
630 return warnings
633 def _CheckIncludeOrderInFile(input_api, f, changed_linenums):
634 """Checks the #include order for the given file f."""
636 system_include_pattern = input_api.re.compile(r'\s*#include \<.*')
637 # Exclude the following includes from the check:
638 # 1) #include <.../...>, e.g., <sys/...> includes often need to appear in a
639 # specific order.
640 # 2) <atlbase.h>, "build/build_config.h"
641 excluded_include_pattern = input_api.re.compile(
642 r'\s*#include (\<.*/.*|\<atlbase\.h\>|"build/build_config.h")')
643 custom_include_pattern = input_api.re.compile(r'\s*#include "(?P<FILE>.*)"')
644 # Match the final or penultimate token if it is xxxtest so we can ignore it
645 # when considering the special first include.
646 test_file_tag_pattern = input_api.re.compile(
647 r'_[a-z]+test(?=(_[a-zA-Z0-9]+)?\.)')
648 if_pattern = input_api.re.compile(
649 r'\s*#\s*(if|elif|else|endif|define|undef).*')
650 # Some files need specialized order of includes; exclude such files from this
651 # check.
652 uncheckable_includes_pattern = input_api.re.compile(
653 r'\s*#include '
654 '("ipc/.*macros\.h"|<windows\.h>|".*gl.*autogen.h")\s*')
656 contents = f.NewContents()
657 warnings = []
658 line_num = 0
660 # Handle the special first include. If the first include file is
661 # some/path/file.h, the corresponding including file can be some/path/file.cc,
662 # some/other/path/file.cc, some/path/file_platform.cc, some/path/file-suffix.h
663 # etc. It's also possible that no special first include exists.
664 # If the included file is some/path/file_platform.h the including file could
665 # also be some/path/file_xxxtest_platform.h.
666 including_file_base_name = test_file_tag_pattern.sub(
667 '', input_api.os_path.basename(f.LocalPath()))
669 for line in contents:
670 line_num += 1
671 if system_include_pattern.match(line):
672 # No special first include -> process the line again along with normal
673 # includes.
674 line_num -= 1
675 break
676 match = custom_include_pattern.match(line)
677 if match:
678 match_dict = match.groupdict()
679 header_basename = test_file_tag_pattern.sub(
680 '', input_api.os_path.basename(match_dict['FILE'])).replace('.h', '')
682 if header_basename not in including_file_base_name:
683 # No special first include -> process the line again along with normal
684 # includes.
685 line_num -= 1
686 break
688 # Split into scopes: Each region between #if and #endif is its own scope.
689 scopes = []
690 current_scope = []
691 for line in contents[line_num:]:
692 line_num += 1
693 if uncheckable_includes_pattern.match(line):
694 continue
695 if if_pattern.match(line):
696 scopes.append(current_scope)
697 current_scope = []
698 elif ((system_include_pattern.match(line) or
699 custom_include_pattern.match(line)) and
700 not excluded_include_pattern.match(line)):
701 current_scope.append((line_num, line))
702 scopes.append(current_scope)
704 for scope in scopes:
705 warnings.extend(_CheckIncludeOrderForScope(scope, input_api, f.LocalPath(),
706 changed_linenums))
707 return warnings
710 def _CheckIncludeOrder(input_api, output_api):
711 """Checks that the #include order is correct.
713 1. The corresponding header for source files.
714 2. C system files in alphabetical order
715 3. C++ system files in alphabetical order
716 4. Project's .h files in alphabetical order
718 Each region separated by #if, #elif, #else, #endif, #define and #undef follows
719 these rules separately.
721 def FileFilterIncludeOrder(affected_file):
722 black_list = (_EXCLUDED_PATHS + input_api.DEFAULT_BLACK_LIST)
723 return input_api.FilterSourceFile(affected_file, black_list=black_list)
725 warnings = []
726 for f in input_api.AffectedFiles(file_filter=FileFilterIncludeOrder):
727 if f.LocalPath().endswith(('.cc', '.h')):
728 changed_linenums = set(line_num for line_num, _ in f.ChangedContents())
729 warnings.extend(_CheckIncludeOrderInFile(input_api, f, changed_linenums))
731 results = []
732 if warnings:
733 results.append(output_api.PresubmitPromptOrNotify(_INCLUDE_ORDER_WARNING,
734 warnings))
735 return results
738 def _CheckForVersionControlConflictsInFile(input_api, f):
739 pattern = input_api.re.compile('^(?:<<<<<<<|>>>>>>>) |^=======$')
740 errors = []
741 for line_num, line in f.ChangedContents():
742 if pattern.match(line):
743 errors.append(' %s:%d %s' % (f.LocalPath(), line_num, line))
744 return errors
747 def _CheckForVersionControlConflicts(input_api, output_api):
748 """Usually this is not intentional and will cause a compile failure."""
749 errors = []
750 for f in input_api.AffectedFiles():
751 errors.extend(_CheckForVersionControlConflictsInFile(input_api, f))
753 results = []
754 if errors:
755 results.append(output_api.PresubmitError(
756 'Version control conflict markers found, please resolve.', errors))
757 return results
760 def _CheckHardcodedGoogleHostsInLowerLayers(input_api, output_api):
761 def FilterFile(affected_file):
762 """Filter function for use with input_api.AffectedSourceFiles,
763 below. This filters out everything except non-test files from
764 top-level directories that generally speaking should not hard-code
765 service URLs (e.g. src/android_webview/, src/content/ and others).
767 return input_api.FilterSourceFile(
768 affected_file,
769 white_list=(r'^(android_webview|base|content|net)[\\\/].*', ),
770 black_list=(_EXCLUDED_PATHS +
771 _TEST_CODE_EXCLUDED_PATHS +
772 input_api.DEFAULT_BLACK_LIST))
774 base_pattern = '"[^"]*google\.com[^"]*"'
775 comment_pattern = input_api.re.compile('//.*%s' % base_pattern)
776 pattern = input_api.re.compile(base_pattern)
777 problems = [] # items are (filename, line_number, line)
778 for f in input_api.AffectedSourceFiles(FilterFile):
779 for line_num, line in f.ChangedContents():
780 if not comment_pattern.search(line) and pattern.search(line):
781 problems.append((f.LocalPath(), line_num, line))
783 if problems:
784 return [output_api.PresubmitPromptOrNotify(
785 'Most layers below src/chrome/ should not hardcode service URLs.\n'
786 'Are you sure this is correct?',
787 [' %s:%d: %s' % (
788 problem[0], problem[1], problem[2]) for problem in problems])]
789 else:
790 return []
793 def _CheckNoAbbreviationInPngFileName(input_api, output_api):
794 """Makes sure there are no abbreviations in the name of PNG files.
796 pattern = input_api.re.compile(r'.*_[a-z]_.*\.png$|.*_[a-z]\.png$')
797 errors = []
798 for f in input_api.AffectedFiles(include_deletes=False):
799 if pattern.match(f.LocalPath()):
800 errors.append(' %s' % f.LocalPath())
802 results = []
803 if errors:
804 results.append(output_api.PresubmitError(
805 'The name of PNG files should not have abbreviations. \n'
806 'Use _hover.png, _center.png, instead of _h.png, _c.png.\n'
807 'Contact oshima@chromium.org if you have questions.', errors))
808 return results
811 def _FilesToCheckForIncomingDeps(re, changed_lines):
812 """Helper method for _CheckAddedDepsHaveTargetApprovals. Returns
813 a set of DEPS entries that we should look up.
815 For a directory (rather than a specific filename) we fake a path to
816 a specific filename by adding /DEPS. This is chosen as a file that
817 will seldom or never be subject to per-file include_rules.
819 # We ignore deps entries on auto-generated directories.
820 AUTO_GENERATED_DIRS = ['grit', 'jni']
822 # This pattern grabs the path without basename in the first
823 # parentheses, and the basename (if present) in the second. It
824 # relies on the simple heuristic that if there is a basename it will
825 # be a header file ending in ".h".
826 pattern = re.compile(
827 r"""['"]\+([^'"]+?)(/[a-zA-Z0-9_]+\.h)?['"].*""")
828 results = set()
829 for changed_line in changed_lines:
830 m = pattern.match(changed_line)
831 if m:
832 path = m.group(1)
833 if path.split('/')[0] not in AUTO_GENERATED_DIRS:
834 if m.group(2):
835 results.add('%s%s' % (path, m.group(2)))
836 else:
837 results.add('%s/DEPS' % path)
838 return results
841 def _CheckAddedDepsHaveTargetApprovals(input_api, output_api):
842 """When a dependency prefixed with + is added to a DEPS file, we
843 want to make sure that the change is reviewed by an OWNER of the
844 target file or directory, to avoid layering violations from being
845 introduced. This check verifies that this happens.
847 changed_lines = set()
848 for f in input_api.AffectedFiles():
849 filename = input_api.os_path.basename(f.LocalPath())
850 if filename == 'DEPS':
851 changed_lines |= set(line.strip()
852 for line_num, line
853 in f.ChangedContents())
854 if not changed_lines:
855 return []
857 virtual_depended_on_files = _FilesToCheckForIncomingDeps(input_api.re,
858 changed_lines)
859 if not virtual_depended_on_files:
860 return []
862 if input_api.is_committing:
863 if input_api.tbr:
864 return [output_api.PresubmitNotifyResult(
865 '--tbr was specified, skipping OWNERS check for DEPS additions')]
866 if not input_api.change.issue:
867 return [output_api.PresubmitError(
868 "DEPS approval by OWNERS check failed: this change has "
869 "no Rietveld issue number, so we can't check it for approvals.")]
870 output = output_api.PresubmitError
871 else:
872 output = output_api.PresubmitNotifyResult
874 owners_db = input_api.owners_db
875 owner_email, reviewers = input_api.canned_checks._RietveldOwnerAndReviewers(
876 input_api,
877 owners_db.email_regexp,
878 approval_needed=input_api.is_committing)
880 owner_email = owner_email or input_api.change.author_email
882 reviewers_plus_owner = set(reviewers)
883 if owner_email:
884 reviewers_plus_owner.add(owner_email)
885 missing_files = owners_db.files_not_covered_by(virtual_depended_on_files,
886 reviewers_plus_owner)
888 # We strip the /DEPS part that was added by
889 # _FilesToCheckForIncomingDeps to fake a path to a file in a
890 # directory.
891 def StripDeps(path):
892 start_deps = path.rfind('/DEPS')
893 if start_deps != -1:
894 return path[:start_deps]
895 else:
896 return path
897 unapproved_dependencies = ["'+%s'," % StripDeps(path)
898 for path in missing_files]
900 if unapproved_dependencies:
901 output_list = [
902 output('Missing LGTM from OWNERS of dependencies added to DEPS:\n %s' %
903 '\n '.join(sorted(unapproved_dependencies)))]
904 if not input_api.is_committing:
905 suggested_owners = owners_db.reviewers_for(missing_files, owner_email)
906 output_list.append(output(
907 'Suggested missing target path OWNERS:\n %s' %
908 '\n '.join(suggested_owners or [])))
909 return output_list
911 return []
914 def _CheckSpamLogging(input_api, output_api):
915 file_inclusion_pattern = r'.+%s' % _IMPLEMENTATION_EXTENSIONS
916 black_list = (_EXCLUDED_PATHS +
917 _TEST_CODE_EXCLUDED_PATHS +
918 input_api.DEFAULT_BLACK_LIST +
919 (r"^base[\\\/]logging\.h$",
920 r"^base[\\\/]logging\.cc$",
921 r"^chrome[\\\/]app[\\\/]chrome_main_delegate\.cc$",
922 r"^chrome[\\\/]browser[\\\/]chrome_browser_main\.cc$",
923 r"^chrome[\\\/]browser[\\\/]ui[\\\/]startup[\\\/]"
924 r"startup_browser_creator\.cc$",
925 r"^chrome[\\\/]installer[\\\/]setup[\\\/].*",
926 r"chrome[\\\/]browser[\\\/]diagnostics[\\\/]" +
927 r"diagnostics_writer\.cc$",
928 r"^chrome_elf[\\\/]dll_hash[\\\/]dll_hash_main\.cc$",
929 r"^chromecast[\\\/]",
930 r"^cloud_print[\\\/]",
931 r"^content[\\\/]common[\\\/]gpu[\\\/]client[\\\/]"
932 r"gl_helper_benchmark\.cc$",
933 r"^extensions[\\\/]renderer[\\\/]logging_native_handler\.cc$",
934 r"^native_client_sdk[\\\/]",
935 r"^remoting[\\\/]base[\\\/]logging\.h$",
936 r"^remoting[\\\/]host[\\\/].*",
937 r"^sandbox[\\\/]linux[\\\/].*",
938 r"^tools[\\\/]",
939 r"^ui[\\\/]aura[\\\/]bench[\\\/]bench_main\.cc$",))
940 source_file_filter = lambda x: input_api.FilterSourceFile(
941 x, white_list=(file_inclusion_pattern,), black_list=black_list)
943 log_info = []
944 printf = []
946 for f in input_api.AffectedSourceFiles(source_file_filter):
947 contents = input_api.ReadFile(f, 'rb')
948 if re.search(r"\bD?LOG\s*\(\s*INFO\s*\)", contents):
949 log_info.append(f.LocalPath())
950 elif re.search(r"\bD?LOG_IF\s*\(\s*INFO\s*,", contents):
951 log_info.append(f.LocalPath())
953 if re.search(r"\bprintf\(", contents):
954 printf.append(f.LocalPath())
955 elif re.search(r"\bfprintf\((stdout|stderr)", contents):
956 printf.append(f.LocalPath())
958 if log_info:
959 return [output_api.PresubmitError(
960 'These files spam the console log with LOG(INFO):',
961 items=log_info)]
962 if printf:
963 return [output_api.PresubmitError(
964 'These files spam the console log with printf/fprintf:',
965 items=printf)]
966 return []
969 def _CheckForAnonymousVariables(input_api, output_api):
970 """These types are all expected to hold locks while in scope and
971 so should never be anonymous (which causes them to be immediately
972 destroyed)."""
973 they_who_must_be_named = [
974 'base::AutoLock',
975 'base::AutoReset',
976 'base::AutoUnlock',
977 'SkAutoAlphaRestore',
978 'SkAutoBitmapShaderInstall',
979 'SkAutoBlitterChoose',
980 'SkAutoBounderCommit',
981 'SkAutoCallProc',
982 'SkAutoCanvasRestore',
983 'SkAutoCommentBlock',
984 'SkAutoDescriptor',
985 'SkAutoDisableDirectionCheck',
986 'SkAutoDisableOvalCheck',
987 'SkAutoFree',
988 'SkAutoGlyphCache',
989 'SkAutoHDC',
990 'SkAutoLockColors',
991 'SkAutoLockPixels',
992 'SkAutoMalloc',
993 'SkAutoMaskFreeImage',
994 'SkAutoMutexAcquire',
995 'SkAutoPathBoundsUpdate',
996 'SkAutoPDFRelease',
997 'SkAutoRasterClipValidate',
998 'SkAutoRef',
999 'SkAutoTime',
1000 'SkAutoTrace',
1001 'SkAutoUnref',
1003 anonymous = r'(%s)\s*[({]' % '|'.join(they_who_must_be_named)
1004 # bad: base::AutoLock(lock.get());
1005 # not bad: base::AutoLock lock(lock.get());
1006 bad_pattern = input_api.re.compile(anonymous)
1007 # good: new base::AutoLock(lock.get())
1008 good_pattern = input_api.re.compile(r'\bnew\s*' + anonymous)
1009 errors = []
1011 for f in input_api.AffectedFiles():
1012 if not f.LocalPath().endswith(('.cc', '.h', '.inl', '.m', '.mm')):
1013 continue
1014 for linenum, line in f.ChangedContents():
1015 if bad_pattern.search(line) and not good_pattern.search(line):
1016 errors.append('%s:%d' % (f.LocalPath(), linenum))
1018 if errors:
1019 return [output_api.PresubmitError(
1020 'These lines create anonymous variables that need to be named:',
1021 items=errors)]
1022 return []
1025 def _CheckCygwinShell(input_api, output_api):
1026 source_file_filter = lambda x: input_api.FilterSourceFile(
1027 x, white_list=(r'.+\.(gyp|gypi)$',))
1028 cygwin_shell = []
1030 for f in input_api.AffectedSourceFiles(source_file_filter):
1031 for linenum, line in f.ChangedContents():
1032 if 'msvs_cygwin_shell' in line:
1033 cygwin_shell.append(f.LocalPath())
1034 break
1036 if cygwin_shell:
1037 return [output_api.PresubmitError(
1038 'These files should not use msvs_cygwin_shell (the default is 0):',
1039 items=cygwin_shell)]
1040 return []
1043 def _CheckUserActionUpdate(input_api, output_api):
1044 """Checks if any new user action has been added."""
1045 if any('actions.xml' == input_api.os_path.basename(f) for f in
1046 input_api.LocalPaths()):
1047 # If actions.xml is already included in the changelist, the PRESUBMIT
1048 # for actions.xml will do a more complete presubmit check.
1049 return []
1051 file_filter = lambda f: f.LocalPath().endswith(('.cc', '.mm'))
1052 action_re = r'[^a-zA-Z]UserMetricsAction\("([^"]*)'
1053 current_actions = None
1054 for f in input_api.AffectedFiles(file_filter=file_filter):
1055 for line_num, line in f.ChangedContents():
1056 match = input_api.re.search(action_re, line)
1057 if match:
1058 # Loads contents in tools/metrics/actions/actions.xml to memory. It's
1059 # loaded only once.
1060 if not current_actions:
1061 with open('tools/metrics/actions/actions.xml') as actions_f:
1062 current_actions = actions_f.read()
1063 # Search for the matched user action name in |current_actions|.
1064 for action_name in match.groups():
1065 action = 'name="{0}"'.format(action_name)
1066 if action not in current_actions:
1067 return [output_api.PresubmitPromptWarning(
1068 'File %s line %d: %s is missing in '
1069 'tools/metrics/actions/actions.xml. Please run '
1070 'tools/metrics/actions/extract_actions.py to update.'
1071 % (f.LocalPath(), line_num, action_name))]
1072 return []
1075 def _GetJSONParseError(input_api, filename, eat_comments=True):
1076 try:
1077 contents = input_api.ReadFile(filename)
1078 if eat_comments:
1079 json_comment_eater = input_api.os_path.join(
1080 input_api.PresubmitLocalPath(),
1081 'tools', 'json_comment_eater', 'json_comment_eater.py')
1082 process = input_api.subprocess.Popen(
1083 [input_api.python_executable, json_comment_eater],
1084 stdin=input_api.subprocess.PIPE,
1085 stdout=input_api.subprocess.PIPE,
1086 universal_newlines=True)
1087 (contents, _) = process.communicate(input=contents)
1089 input_api.json.loads(contents)
1090 except ValueError as e:
1091 return e
1092 return None
1095 def _GetIDLParseError(input_api, filename):
1096 try:
1097 contents = input_api.ReadFile(filename)
1098 idl_schema = input_api.os_path.join(
1099 input_api.PresubmitLocalPath(),
1100 'tools', 'json_schema_compiler', 'idl_schema.py')
1101 process = input_api.subprocess.Popen(
1102 [input_api.python_executable, idl_schema],
1103 stdin=input_api.subprocess.PIPE,
1104 stdout=input_api.subprocess.PIPE,
1105 stderr=input_api.subprocess.PIPE,
1106 universal_newlines=True)
1107 (_, error) = process.communicate(input=contents)
1108 return error or None
1109 except ValueError as e:
1110 return e
1113 def _CheckParseErrors(input_api, output_api):
1114 """Check that IDL and JSON files do not contain syntax errors."""
1115 actions = {
1116 '.idl': _GetIDLParseError,
1117 '.json': _GetJSONParseError,
1119 # These paths contain test data and other known invalid JSON files.
1120 excluded_patterns = [
1121 r'test[\\\/]data[\\\/]',
1122 r'^components[\\\/]policy[\\\/]resources[\\\/]policy_templates\.json$',
1124 # Most JSON files are preprocessed and support comments, but these do not.
1125 json_no_comments_patterns = [
1126 r'^testing[\\\/]',
1128 # Only run IDL checker on files in these directories.
1129 idl_included_patterns = [
1130 r'^chrome[\\\/]common[\\\/]extensions[\\\/]api[\\\/]',
1131 r'^extensions[\\\/]common[\\\/]api[\\\/]',
1134 def get_action(affected_file):
1135 filename = affected_file.LocalPath()
1136 return actions.get(input_api.os_path.splitext(filename)[1])
1138 def MatchesFile(patterns, path):
1139 for pattern in patterns:
1140 if input_api.re.search(pattern, path):
1141 return True
1142 return False
1144 def FilterFile(affected_file):
1145 action = get_action(affected_file)
1146 if not action:
1147 return False
1148 path = affected_file.LocalPath()
1150 if MatchesFile(excluded_patterns, path):
1151 return False
1153 if (action == _GetIDLParseError and
1154 not MatchesFile(idl_included_patterns, path)):
1155 return False
1156 return True
1158 results = []
1159 for affected_file in input_api.AffectedFiles(
1160 file_filter=FilterFile, include_deletes=False):
1161 action = get_action(affected_file)
1162 kwargs = {}
1163 if (action == _GetJSONParseError and
1164 MatchesFile(json_no_comments_patterns, affected_file.LocalPath())):
1165 kwargs['eat_comments'] = False
1166 parse_error = action(input_api,
1167 affected_file.AbsoluteLocalPath(),
1168 **kwargs)
1169 if parse_error:
1170 results.append(output_api.PresubmitError('%s could not be parsed: %s' %
1171 (affected_file.LocalPath(), parse_error)))
1172 return results
1175 def _CheckJavaStyle(input_api, output_api):
1176 """Runs checkstyle on changed java files and returns errors if any exist."""
1177 original_sys_path = sys.path
1178 try:
1179 sys.path = sys.path + [input_api.os_path.join(
1180 input_api.PresubmitLocalPath(), 'tools', 'android', 'checkstyle')]
1181 import checkstyle
1182 finally:
1183 # Restore sys.path to what it was before.
1184 sys.path = original_sys_path
1186 return checkstyle.RunCheckstyle(
1187 input_api, output_api, 'tools/android/checkstyle/chromium-style-5.0.xml')
1190 _DEPRECATED_CSS = [
1191 # Values
1192 ( "-webkit-box", "flex" ),
1193 ( "-webkit-inline-box", "inline-flex" ),
1194 ( "-webkit-flex", "flex" ),
1195 ( "-webkit-inline-flex", "inline-flex" ),
1196 ( "-webkit-min-content", "min-content" ),
1197 ( "-webkit-max-content", "max-content" ),
1199 # Properties
1200 ( "-webkit-background-clip", "background-clip" ),
1201 ( "-webkit-background-origin", "background-origin" ),
1202 ( "-webkit-background-size", "background-size" ),
1203 ( "-webkit-box-shadow", "box-shadow" ),
1205 # Functions
1206 ( "-webkit-gradient", "gradient" ),
1207 ( "-webkit-repeating-gradient", "repeating-gradient" ),
1208 ( "-webkit-linear-gradient", "linear-gradient" ),
1209 ( "-webkit-repeating-linear-gradient", "repeating-linear-gradient" ),
1210 ( "-webkit-radial-gradient", "radial-gradient" ),
1211 ( "-webkit-repeating-radial-gradient", "repeating-radial-gradient" ),
1214 def _CheckNoDeprecatedCSS(input_api, output_api):
1215 """ Make sure that we don't use deprecated CSS
1216 properties, functions or values. Our external
1217 documentation is ignored by the hooks as it
1218 needs to be consumed by WebKit. """
1219 results = []
1220 file_inclusion_pattern = (r".+\.css$")
1221 black_list = (_EXCLUDED_PATHS +
1222 _TEST_CODE_EXCLUDED_PATHS +
1223 input_api.DEFAULT_BLACK_LIST +
1224 (r"^chrome/common/extensions/docs",
1225 r"^chrome/docs",
1226 r"^native_client_sdk"))
1227 file_filter = lambda f: input_api.FilterSourceFile(
1228 f, white_list=file_inclusion_pattern, black_list=black_list)
1229 for fpath in input_api.AffectedFiles(file_filter=file_filter):
1230 for line_num, line in fpath.ChangedContents():
1231 for (deprecated_value, value) in _DEPRECATED_CSS:
1232 if input_api.re.search(deprecated_value, line):
1233 results.append(output_api.PresubmitError(
1234 "%s:%d: Use of deprecated CSS %s, use %s instead" %
1235 (fpath.LocalPath(), line_num, deprecated_value, value)))
1236 return results
1238 def _CommonChecks(input_api, output_api):
1239 """Checks common to both upload and commit."""
1240 results = []
1241 results.extend(input_api.canned_checks.PanProjectChecks(
1242 input_api, output_api,
1243 excluded_paths=_EXCLUDED_PATHS + _TESTRUNNER_PATHS))
1244 results.extend(_CheckAuthorizedAuthor(input_api, output_api))
1245 results.extend(
1246 _CheckNoProductionCodeUsingTestOnlyFunctions(input_api, output_api))
1247 results.extend(_CheckNoIOStreamInHeaders(input_api, output_api))
1248 results.extend(_CheckNoUNIT_TESTInSourceFiles(input_api, output_api))
1249 results.extend(_CheckNoNewWStrings(input_api, output_api))
1250 results.extend(_CheckNoDEPSGIT(input_api, output_api))
1251 results.extend(_CheckNoBannedFunctions(input_api, output_api))
1252 results.extend(_CheckNoPragmaOnce(input_api, output_api))
1253 results.extend(_CheckNoTrinaryTrueFalse(input_api, output_api))
1254 results.extend(_CheckUnwantedDependencies(input_api, output_api))
1255 results.extend(_CheckFilePermissions(input_api, output_api))
1256 results.extend(_CheckNoAuraWindowPropertyHInHeaders(input_api, output_api))
1257 results.extend(_CheckIncludeOrder(input_api, output_api))
1258 results.extend(_CheckForVersionControlConflicts(input_api, output_api))
1259 results.extend(_CheckPatchFiles(input_api, output_api))
1260 results.extend(_CheckHardcodedGoogleHostsInLowerLayers(input_api, output_api))
1261 results.extend(_CheckNoAbbreviationInPngFileName(input_api, output_api))
1262 results.extend(_CheckForInvalidOSMacros(input_api, output_api))
1263 results.extend(_CheckAddedDepsHaveTargetApprovals(input_api, output_api))
1264 results.extend(
1265 input_api.canned_checks.CheckChangeHasNoTabs(
1266 input_api,
1267 output_api,
1268 source_file_filter=lambda x: x.LocalPath().endswith('.grd')))
1269 results.extend(_CheckSpamLogging(input_api, output_api))
1270 results.extend(_CheckForAnonymousVariables(input_api, output_api))
1271 results.extend(_CheckCygwinShell(input_api, output_api))
1272 results.extend(_CheckUserActionUpdate(input_api, output_api))
1273 results.extend(_CheckNoDeprecatedCSS(input_api, output_api))
1274 results.extend(_CheckParseErrors(input_api, output_api))
1276 if any('PRESUBMIT.py' == f.LocalPath() for f in input_api.AffectedFiles()):
1277 results.extend(input_api.canned_checks.RunUnitTestsInDirectory(
1278 input_api, output_api,
1279 input_api.PresubmitLocalPath(),
1280 whitelist=[r'^PRESUBMIT_test\.py$']))
1281 return results
1284 def _CheckAuthorizedAuthor(input_api, output_api):
1285 """For non-googler/chromites committers, verify the author's email address is
1286 in AUTHORS.
1288 # TODO(maruel): Add it to input_api?
1289 import fnmatch
1291 author = input_api.change.author_email
1292 if not author:
1293 input_api.logging.info('No author, skipping AUTHOR check')
1294 return []
1295 authors_path = input_api.os_path.join(
1296 input_api.PresubmitLocalPath(), 'AUTHORS')
1297 valid_authors = (
1298 input_api.re.match(r'[^#]+\s+\<(.+?)\>\s*$', line)
1299 for line in open(authors_path))
1300 valid_authors = [item.group(1).lower() for item in valid_authors if item]
1301 if not any(fnmatch.fnmatch(author.lower(), valid) for valid in valid_authors):
1302 input_api.logging.info('Valid authors are %s', ', '.join(valid_authors))
1303 return [output_api.PresubmitPromptWarning(
1304 ('%s is not in AUTHORS file. If you are a new contributor, please visit'
1305 '\n'
1306 'http://www.chromium.org/developers/contributing-code and read the '
1307 '"Legal" section\n'
1308 'If you are a chromite, verify the contributor signed the CLA.') %
1309 author)]
1310 return []
1313 def _CheckPatchFiles(input_api, output_api):
1314 problems = [f.LocalPath() for f in input_api.AffectedFiles()
1315 if f.LocalPath().endswith(('.orig', '.rej'))]
1316 if problems:
1317 return [output_api.PresubmitError(
1318 "Don't commit .rej and .orig files.", problems)]
1319 else:
1320 return []
1323 def _DidYouMeanOSMacro(bad_macro):
1324 try:
1325 return {'A': 'OS_ANDROID',
1326 'B': 'OS_BSD',
1327 'C': 'OS_CHROMEOS',
1328 'F': 'OS_FREEBSD',
1329 'L': 'OS_LINUX',
1330 'M': 'OS_MACOSX',
1331 'N': 'OS_NACL',
1332 'O': 'OS_OPENBSD',
1333 'P': 'OS_POSIX',
1334 'S': 'OS_SOLARIS',
1335 'W': 'OS_WIN'}[bad_macro[3].upper()]
1336 except KeyError:
1337 return ''
1340 def _CheckForInvalidOSMacrosInFile(input_api, f):
1341 """Check for sensible looking, totally invalid OS macros."""
1342 preprocessor_statement = input_api.re.compile(r'^\s*#')
1343 os_macro = input_api.re.compile(r'defined\((OS_[^)]+)\)')
1344 results = []
1345 for lnum, line in f.ChangedContents():
1346 if preprocessor_statement.search(line):
1347 for match in os_macro.finditer(line):
1348 if not match.group(1) in _VALID_OS_MACROS:
1349 good = _DidYouMeanOSMacro(match.group(1))
1350 did_you_mean = ' (did you mean %s?)' % good if good else ''
1351 results.append(' %s:%d %s%s' % (f.LocalPath(),
1352 lnum,
1353 match.group(1),
1354 did_you_mean))
1355 return results
1358 def _CheckForInvalidOSMacros(input_api, output_api):
1359 """Check all affected files for invalid OS macros."""
1360 bad_macros = []
1361 for f in input_api.AffectedFiles():
1362 if not f.LocalPath().endswith(('.py', '.js', '.html', '.css')):
1363 bad_macros.extend(_CheckForInvalidOSMacrosInFile(input_api, f))
1365 if not bad_macros:
1366 return []
1368 return [output_api.PresubmitError(
1369 'Possibly invalid OS macro[s] found. Please fix your code\n'
1370 'or add your macro to src/PRESUBMIT.py.', bad_macros)]
1373 def CheckChangeOnUpload(input_api, output_api):
1374 results = []
1375 results.extend(_CommonChecks(input_api, output_api))
1376 results.extend(_CheckJavaStyle(input_api, output_api))
1377 return results
1380 def GetTryServerMasterForBot(bot):
1381 """Returns the Try Server master for the given bot.
1383 It tries to guess the master from the bot name, but may still fail
1384 and return None. There is no longer a default master.
1386 # Potentially ambiguous bot names are listed explicitly.
1387 master_map = {
1388 'linux_gpu': 'tryserver.chromium.gpu',
1389 'mac_gpu': 'tryserver.chromium.gpu',
1390 'win_gpu': 'tryserver.chromium.gpu',
1391 'chromium_presubmit': 'tryserver.chromium.linux',
1392 'blink_presubmit': 'tryserver.chromium.linux',
1393 'tools_build_presubmit': 'tryserver.chromium.linux',
1395 master = master_map.get(bot)
1396 if not master:
1397 if 'gpu' in bot:
1398 master = 'tryserver.chromium.gpu'
1399 elif 'linux' in bot or 'android' in bot or 'presubmit' in bot:
1400 master = 'tryserver.chromium.linux'
1401 elif 'win' in bot:
1402 master = 'tryserver.chromium.win'
1403 elif 'mac' in bot or 'ios' in bot:
1404 master = 'tryserver.chromium.mac'
1405 return master
1408 def GetDefaultTryConfigs(bots=None):
1409 """Returns a list of ('bot', set(['tests']), optionally filtered by [bots].
1411 To add tests to this list, they MUST be in the the corresponding master's
1412 gatekeeper config. For example, anything on master.chromium would be closed by
1413 tools/build/masters/master.chromium/master_gatekeeper_cfg.py.
1415 If 'bots' is specified, will only return configurations for bots in that list.
1418 standard_tests = [
1419 'base_unittests',
1420 'browser_tests',
1421 'cacheinvalidation_unittests',
1422 'check_deps',
1423 'check_deps2git',
1424 'content_browsertests',
1425 'content_unittests',
1426 'crypto_unittests',
1427 'gpu_unittests',
1428 'interactive_ui_tests',
1429 'ipc_tests',
1430 'jingle_unittests',
1431 'media_unittests',
1432 'net_unittests',
1433 'ppapi_unittests',
1434 'printing_unittests',
1435 'sql_unittests',
1436 'sync_unit_tests',
1437 'unit_tests',
1438 # Broken in release.
1439 #'url_unittests',
1440 #'webkit_unit_tests',
1443 builders_and_tests = {
1444 # TODO(maruel): Figure out a way to run 'sizes' where people can
1445 # effectively update the perf expectation correctly. This requires a
1446 # clobber=True build running 'sizes'. 'sizes' is not accurate with
1447 # incremental build. Reference:
1448 # http://chromium.org/developers/tree-sheriffs/perf-sheriffs.
1449 # TODO(maruel): An option would be to run 'sizes' but not count a failure
1450 # of this step as a try job failure.
1451 'android_aosp': ['compile'],
1452 'android_chromium_gn_compile_rel': ['compile'],
1453 'android_clang_dbg': ['slave_steps'],
1454 'android_dbg_tests_recipe': ['slave_steps'],
1455 'cros_x86': ['defaulttests'],
1456 'ios_dbg_simulator': [
1457 'compile',
1458 'base_unittests',
1459 'content_unittests',
1460 'crypto_unittests',
1461 'url_unittests',
1462 'net_unittests',
1463 'sql_unittests',
1464 'ui_unittests',
1466 'ios_rel_device': ['compile'],
1467 'linux_asan': ['compile'],
1468 'mac_asan': ['compile'],
1469 #TODO(stip): Change the name of this builder to reflect that it's release.
1470 'linux_gtk': standard_tests,
1471 'linux_chromeos_asan': ['compile'],
1472 'linux_chromium_chromeos_clang_dbg': ['defaulttests'],
1473 'linux_chromium_chromeos_rel_swarming': ['defaulttests'],
1474 'linux_chromium_compile_dbg': ['defaulttests'],
1475 'linux_chromium_gn_rel': ['defaulttests'],
1476 'linux_chromium_rel_swarming': ['defaulttests'],
1477 'linux_chromium_clang_dbg': ['defaulttests'],
1478 'linux_gpu': ['defaulttests'],
1479 'linux_nacl_sdk_build': ['compile'],
1480 'mac_chromium_compile_dbg': ['defaulttests'],
1481 'mac_chromium_rel_swarming': ['defaulttests'],
1482 'mac_gpu': ['defaulttests'],
1483 'mac_nacl_sdk_build': ['compile'],
1484 'win_chromium_compile_dbg': ['defaulttests'],
1485 'win_chromium_dbg': ['defaulttests'],
1486 'win_chromium_rel_swarming': ['defaulttests'],
1487 'win_chromium_x64_rel_swarming': ['defaulttests'],
1488 'win_gpu': ['defaulttests'],
1489 'win_nacl_sdk_build': ['compile'],
1492 if bots:
1493 filtered_builders_and_tests = dict((bot, set(builders_and_tests[bot]))
1494 for bot in bots)
1495 else:
1496 filtered_builders_and_tests = dict(
1497 (bot, set(tests))
1498 for bot, tests in builders_and_tests.iteritems())
1500 # Build up the mapping from tryserver master to bot/test.
1501 out = dict()
1502 for bot, tests in filtered_builders_and_tests.iteritems():
1503 out.setdefault(GetTryServerMasterForBot(bot), {})[bot] = tests
1504 return out
1507 def CheckChangeOnCommit(input_api, output_api):
1508 results = []
1509 results.extend(_CommonChecks(input_api, output_api))
1510 # TODO(thestig) temporarily disabled, doesn't work in third_party/
1511 #results.extend(input_api.canned_checks.CheckSvnModifiedDirectories(
1512 # input_api, output_api, sources))
1513 # Make sure the tree is 'open'.
1514 results.extend(input_api.canned_checks.CheckTreeIsOpen(
1515 input_api,
1516 output_api,
1517 json_url='http://chromium-status.appspot.com/current?format=json'))
1519 results.extend(input_api.canned_checks.CheckChangeHasBugField(
1520 input_api, output_api))
1521 results.extend(input_api.canned_checks.CheckChangeHasDescription(
1522 input_api, output_api))
1523 return results
1526 def GetPreferredTryMasters(project, change):
1527 files = change.LocalPaths()
1529 if not files or all(re.search(r'[\\\/]OWNERS$', f) for f in files):
1530 return {}
1532 if all(re.search(r'\.(m|mm)$|(^|[\\\/_])mac[\\\/_.]', f) for f in files):
1533 return GetDefaultTryConfigs([
1534 'mac_chromium_compile_dbg',
1535 'mac_chromium_rel_swarming',
1537 if all(re.search('(^|[/_])win[/_.]', f) for f in files):
1538 return GetDefaultTryConfigs([
1539 'win_chromium_dbg',
1540 'win_chromium_rel_swarming',
1542 if all(re.search(r'(^|[\\\/_])android[\\\/_.]', f) for f in files):
1543 return GetDefaultTryConfigs([
1544 'android_aosp',
1545 'android_clang_dbg',
1546 'android_dbg_tests_recipe',
1548 if all(re.search(r'[\\\/_]ios[\\\/_.]', f) for f in files):
1549 return GetDefaultTryConfigs(['ios_rel_device', 'ios_dbg_simulator'])
1551 builders = [
1552 'android_chromium_gn_compile_rel',
1553 'android_clang_dbg',
1554 'android_dbg_tests_recipe',
1555 'ios_dbg_simulator',
1556 'ios_rel_device',
1557 'linux_chromium_chromeos_rel_swarming',
1558 'linux_chromium_clang_dbg',
1559 'linux_chromium_gn_rel',
1560 'linux_chromium_rel_swarming',
1561 'linux_gpu',
1562 'mac_chromium_compile_dbg',
1563 'mac_chromium_rel_swarming',
1564 'mac_gpu',
1565 'win_chromium_compile_dbg',
1566 'win_chromium_rel_swarming',
1567 'win_chromium_x64_rel_swarming',
1568 'win_gpu',
1571 # Match things like path/aura/file.cc and path/file_aura.cc.
1572 # Same for chromeos.
1573 if any(re.search(r'[\\\/_](aura|chromeos)', f) for f in files):
1574 builders.extend([
1575 'linux_chromeos_asan',
1576 'linux_chromium_chromeos_clang_dbg'
1579 # If there are gyp changes to base, build, or chromeos, run a full cros build
1580 # in addition to the shorter linux_chromeos build. Changes to high level gyp
1581 # files have a much higher chance of breaking the cros build, which is
1582 # differnt from the linux_chromeos build that most chrome developers test
1583 # with.
1584 if any(re.search('^(base|build|chromeos).*\.gypi?$', f) for f in files):
1585 builders.extend(['cros_x86'])
1587 # The AOSP bot doesn't build the chrome/ layer, so ignore any changes to it
1588 # unless they're .gyp(i) files as changes to those files can break the gyp
1589 # step on that bot.
1590 if (not all(re.search('^chrome', f) for f in files) or
1591 any(re.search('\.gypi?$', f) for f in files)):
1592 builders.extend(['android_aosp'])
1594 return GetDefaultTryConfigs(builders)