Properly decode IDN in interstitials
[chromium-blink-merge.git] / PRESUBMIT.py
blob3c76e2b5a65edc967f6dbf9376208349724cd535
1 # Copyright (c) 2012 The Chromium Authors. All rights reserved.
2 # Use of this source code is governed by a BSD-style license that can be
3 # found in the LICENSE file.
5 """Top-level presubmit script for Chromium.
7 See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts
8 for more details about the presubmit API built into depot_tools.
9 """
12 _EXCLUDED_PATHS = (
13 r"^breakpad[\\\/].*",
14 r"^native_client_sdk[\\\/]src[\\\/]build_tools[\\\/]make_rules.py",
15 r"^native_client_sdk[\\\/]src[\\\/]build_tools[\\\/]make_simple.py",
16 r"^native_client_sdk[\\\/]src[\\\/]tools[\\\/].*.mk",
17 r"^net[\\\/]tools[\\\/]spdyshark[\\\/].*",
18 r"^skia[\\\/].*",
19 r"^v8[\\\/].*",
20 r".*MakeFile$",
21 r".+_autogen\.h$",
22 r".+[\\\/]pnacl_shim\.c$",
23 r"^gpu[\\\/]config[\\\/].*_list_json\.cc$",
24 r"^chrome[\\\/]browser[\\\/]resources[\\\/]pdf[\\\/]index.js"
27 # The NetscapePlugIn library is excluded from pan-project as it will soon
28 # be deleted together with the rest of the NPAPI and it's not worthwhile to
29 # update the coding style until then.
30 _TESTRUNNER_PATHS = (
31 r"^content[\\\/]shell[\\\/]tools[\\\/]plugin[\\\/].*",
34 # Fragment of a regular expression that matches C++ and Objective-C++
35 # implementation files.
36 _IMPLEMENTATION_EXTENSIONS = r'\.(cc|cpp|cxx|mm)$'
38 # Regular expression that matches code only used for test binaries
39 # (best effort).
40 _TEST_CODE_EXCLUDED_PATHS = (
41 r'.*[\\\/](fake_|test_|mock_).+%s' % _IMPLEMENTATION_EXTENSIONS,
42 r'.+_test_(base|support|util)%s' % _IMPLEMENTATION_EXTENSIONS,
43 r'.+_(api|browser|kif|perf|pixel|unit|ui)?test(_[a-z]+)?%s' %
44 _IMPLEMENTATION_EXTENSIONS,
45 r'.+profile_sync_service_harness%s' % _IMPLEMENTATION_EXTENSIONS,
46 r'.*[\\\/](test|tool(s)?)[\\\/].*',
47 # content_shell is used for running layout tests.
48 r'content[\\\/]shell[\\\/].*',
49 # At request of folks maintaining this folder.
50 r'chrome[\\\/]browser[\\\/]automation[\\\/].*',
51 # Non-production example code.
52 r'mojo[\\\/]examples[\\\/].*',
53 # Launcher for running iOS tests on the simulator.
54 r'testing[\\\/]iossim[\\\/]iossim\.mm$',
57 _TEST_ONLY_WARNING = (
58 'You might be calling functions intended only for testing from\n'
59 'production code. It is OK to ignore this warning if you know what\n'
60 'you are doing, as the heuristics used to detect the situation are\n'
61 'not perfect. The commit queue will not block on this warning.')
64 _INCLUDE_ORDER_WARNING = (
65 'Your #include order seems to be broken. Remember to use the right '
66 'collation (LC_COLLATE=C) and check https://google-styleguide.googlecode'
67 '.com/svn/trunk/cppguide.html#Names_and_Order_of_Includes')
69 _BANNED_OBJC_FUNCTIONS = (
71 'addTrackingRect:',
73 'The use of -[NSView addTrackingRect:owner:userData:assumeInside:] is'
74 'prohibited. Please use CrTrackingArea instead.',
75 'http://dev.chromium.org/developers/coding-style/cocoa-dos-and-donts',
77 False,
80 r'/NSTrackingArea\W',
82 'The use of NSTrackingAreas is prohibited. Please use CrTrackingArea',
83 'instead.',
84 'http://dev.chromium.org/developers/coding-style/cocoa-dos-and-donts',
86 False,
89 'convertPointFromBase:',
91 'The use of -[NSView convertPointFromBase:] is almost certainly wrong.',
92 'Please use |convertPoint:(point) fromView:nil| instead.',
93 'http://dev.chromium.org/developers/coding-style/cocoa-dos-and-donts',
95 True,
98 'convertPointToBase:',
100 'The use of -[NSView convertPointToBase:] is almost certainly wrong.',
101 'Please use |convertPoint:(point) toView:nil| instead.',
102 'http://dev.chromium.org/developers/coding-style/cocoa-dos-and-donts',
104 True,
107 'convertRectFromBase:',
109 'The use of -[NSView convertRectFromBase:] is almost certainly wrong.',
110 'Please use |convertRect:(point) fromView:nil| instead.',
111 'http://dev.chromium.org/developers/coding-style/cocoa-dos-and-donts',
113 True,
116 'convertRectToBase:',
118 'The use of -[NSView convertRectToBase:] is almost certainly wrong.',
119 'Please use |convertRect:(point) toView:nil| instead.',
120 'http://dev.chromium.org/developers/coding-style/cocoa-dos-and-donts',
122 True,
125 'convertSizeFromBase:',
127 'The use of -[NSView convertSizeFromBase:] is almost certainly wrong.',
128 'Please use |convertSize:(point) fromView:nil| instead.',
129 'http://dev.chromium.org/developers/coding-style/cocoa-dos-and-donts',
131 True,
134 'convertSizeToBase:',
136 'The use of -[NSView convertSizeToBase:] is almost certainly wrong.',
137 'Please use |convertSize:(point) toView:nil| instead.',
138 'http://dev.chromium.org/developers/coding-style/cocoa-dos-and-donts',
140 True,
145 _BANNED_CPP_FUNCTIONS = (
146 # Make sure that gtest's FRIEND_TEST() macro is not used; the
147 # FRIEND_TEST_ALL_PREFIXES() macro from base/gtest_prod_util.h should be
148 # used instead since that allows for FLAKY_ and DISABLED_ prefixes.
150 'FRIEND_TEST(',
152 'Chromium code should not use gtest\'s FRIEND_TEST() macro. Include',
153 'base/gtest_prod_util.h and use FRIEND_TEST_ALL_PREFIXES() instead.',
155 False,
159 'ScopedAllowIO',
161 'New code should not use ScopedAllowIO. Post a task to the blocking',
162 'pool or the FILE thread instead.',
164 True,
166 r"^base[\\\/]process[\\\/]process_metrics_linux\.cc$",
167 r"^chrome[\\\/]browser[\\\/]chromeos[\\\/]boot_times_recorder\.cc$",
168 r"^chrome[\\\/]browser[\\\/]chromeos[\\\/]"
169 "customization_document_browsertest\.cc$",
170 r"^components[\\\/]crash[\\\/]app[\\\/]breakpad_mac\.mm$",
171 r"^content[\\\/]shell[\\\/]browser[\\\/]shell_browser_main\.cc$",
172 r"^content[\\\/]shell[\\\/]browser[\\\/]shell_message_filter\.cc$",
173 r"^mojo[\\\/]edk[\\\/]embedder[\\\/]" +
174 r"simple_platform_shared_buffer_posix\.cc$",
175 r"^net[\\\/]disk_cache[\\\/]cache_util\.cc$",
176 r"^net[\\\/]url_request[\\\/]test_url_fetcher_factory\.cc$",
177 r"^ui[\\\/]ozone[\\\/]platform[\\\/]drm[\\\/]host[\\\/]"
178 "drm_native_display_delegate\.cc$",
182 'SkRefPtr',
184 'The use of SkRefPtr is prohibited. ',
185 'Please use skia::RefPtr instead.'
187 True,
191 'SkAutoRef',
193 'The indirect use of SkRefPtr via SkAutoRef is prohibited. ',
194 'Please use skia::RefPtr instead.'
196 True,
200 'SkAutoTUnref',
202 'The use of SkAutoTUnref is dangerous because it implicitly ',
203 'converts to a raw pointer. Please use skia::RefPtr instead.'
205 True,
209 'SkAutoUnref',
211 'The indirect use of SkAutoTUnref through SkAutoUnref is dangerous ',
212 'because it implicitly converts to a raw pointer. ',
213 'Please use skia::RefPtr instead.'
215 True,
219 r'/HANDLE_EINTR\(.*close',
221 'HANDLE_EINTR(close) is invalid. If close fails with EINTR, the file',
222 'descriptor will be closed, and it is incorrect to retry the close.',
223 'Either call close directly and ignore its return value, or wrap close',
224 'in IGNORE_EINTR to use its return value. See http://crbug.com/269623'
226 True,
230 r'/IGNORE_EINTR\((?!.*close)',
232 'IGNORE_EINTR is only valid when wrapping close. To wrap other system',
233 'calls, use HANDLE_EINTR. See http://crbug.com/269623',
235 True,
237 # Files that #define IGNORE_EINTR.
238 r'^base[\\\/]posix[\\\/]eintr_wrapper\.h$',
239 r'^ppapi[\\\/]tests[\\\/]test_broker\.cc$',
243 r'/v8::Extension\(',
245 'Do not introduce new v8::Extensions into the code base, use',
246 'gin::Wrappable instead. See http://crbug.com/334679',
248 True,
250 r'extensions[\\\/]renderer[\\\/]safe_builtins\.*',
255 _IPC_ENUM_TRAITS_DEPRECATED = (
256 'You are using IPC_ENUM_TRAITS() in your code. It has been deprecated.\n'
257 'See http://www.chromium.org/Home/chromium-security/education/security-tips-for-ipc')
260 _VALID_OS_MACROS = (
261 # Please keep sorted.
262 'OS_ANDROID',
263 'OS_ANDROID_HOST',
264 'OS_BSD',
265 'OS_CAT', # For testing.
266 'OS_CHROMEOS',
267 'OS_FREEBSD',
268 'OS_IOS',
269 'OS_LINUX',
270 'OS_MACOSX',
271 'OS_NACL',
272 'OS_NACL_NONSFI',
273 'OS_NACL_SFI',
274 'OS_OPENBSD',
275 'OS_POSIX',
276 'OS_QNX',
277 'OS_SOLARIS',
278 'OS_WIN',
282 def _CheckNoProductionCodeUsingTestOnlyFunctions(input_api, output_api):
283 """Attempts to prevent use of functions intended only for testing in
284 non-testing code. For now this is just a best-effort implementation
285 that ignores header files and may have some false positives. A
286 better implementation would probably need a proper C++ parser.
288 # We only scan .cc files and the like, as the declaration of
289 # for-testing functions in header files are hard to distinguish from
290 # calls to such functions without a proper C++ parser.
291 file_inclusion_pattern = r'.+%s' % _IMPLEMENTATION_EXTENSIONS
293 base_function_pattern = r'[ :]test::[^\s]+|ForTest(ing)?|for_test(ing)?'
294 inclusion_pattern = input_api.re.compile(r'(%s)\s*\(' % base_function_pattern)
295 comment_pattern = input_api.re.compile(r'//.*(%s)' % base_function_pattern)
296 exclusion_pattern = input_api.re.compile(
297 r'::[A-Za-z0-9_]+(%s)|(%s)[^;]+\{' % (
298 base_function_pattern, base_function_pattern))
300 def FilterFile(affected_file):
301 black_list = (_EXCLUDED_PATHS +
302 _TEST_CODE_EXCLUDED_PATHS +
303 input_api.DEFAULT_BLACK_LIST)
304 return input_api.FilterSourceFile(
305 affected_file,
306 white_list=(file_inclusion_pattern, ),
307 black_list=black_list)
309 problems = []
310 for f in input_api.AffectedSourceFiles(FilterFile):
311 local_path = f.LocalPath()
312 for line_number, line in f.ChangedContents():
313 if (inclusion_pattern.search(line) and
314 not comment_pattern.search(line) and
315 not exclusion_pattern.search(line)):
316 problems.append(
317 '%s:%d\n %s' % (local_path, line_number, line.strip()))
319 if problems:
320 return [output_api.PresubmitPromptOrNotify(_TEST_ONLY_WARNING, problems)]
321 else:
322 return []
325 def _CheckNoIOStreamInHeaders(input_api, output_api):
326 """Checks to make sure no .h files include <iostream>."""
327 files = []
328 pattern = input_api.re.compile(r'^#include\s*<iostream>',
329 input_api.re.MULTILINE)
330 for f in input_api.AffectedSourceFiles(input_api.FilterSourceFile):
331 if not f.LocalPath().endswith('.h'):
332 continue
333 contents = input_api.ReadFile(f)
334 if pattern.search(contents):
335 files.append(f)
337 if len(files):
338 return [ output_api.PresubmitError(
339 'Do not #include <iostream> in header files, since it inserts static '
340 'initialization into every file including the header. Instead, '
341 '#include <ostream>. See http://crbug.com/94794',
342 files) ]
343 return []
346 def _CheckNoUNIT_TESTInSourceFiles(input_api, output_api):
347 """Checks to make sure no source files use UNIT_TEST"""
348 problems = []
349 for f in input_api.AffectedFiles():
350 if (not f.LocalPath().endswith(('.cc', '.mm'))):
351 continue
353 for line_num, line in f.ChangedContents():
354 if 'UNIT_TEST ' in line or line.endswith('UNIT_TEST'):
355 problems.append(' %s:%d' % (f.LocalPath(), line_num))
357 if not problems:
358 return []
359 return [output_api.PresubmitPromptWarning('UNIT_TEST is only for headers.\n' +
360 '\n'.join(problems))]
363 def _FindHistogramNameInLine(histogram_name, line):
364 """Tries to find a histogram name or prefix in a line."""
365 if not "affected-histogram" in line:
366 return histogram_name in line
367 # A histogram_suffixes tag type has an affected-histogram name as a prefix of
368 # the histogram_name.
369 if not '"' in line:
370 return False
371 histogram_prefix = line.split('\"')[1]
372 return histogram_prefix in histogram_name
375 def _CheckUmaHistogramChanges(input_api, output_api):
376 """Check that UMA histogram names in touched lines can still be found in other
377 lines of the patch or in histograms.xml. Note that this check would not catch
378 the reverse: changes in histograms.xml not matched in the code itself."""
379 touched_histograms = []
380 histograms_xml_modifications = []
381 pattern = input_api.re.compile('UMA_HISTOGRAM.*\("(.*)"')
382 for f in input_api.AffectedFiles():
383 # If histograms.xml itself is modified, keep the modified lines for later.
384 if f.LocalPath().endswith(('histograms.xml')):
385 histograms_xml_modifications = f.ChangedContents()
386 continue
387 if not f.LocalPath().endswith(('cc', 'mm', 'cpp')):
388 continue
389 for line_num, line in f.ChangedContents():
390 found = pattern.search(line)
391 if found:
392 touched_histograms.append([found.group(1), f, line_num])
394 # Search for the touched histogram names in the local modifications to
395 # histograms.xml, and, if not found, on the base histograms.xml file.
396 unmatched_histograms = []
397 for histogram_info in touched_histograms:
398 histogram_name_found = False
399 for line_num, line in histograms_xml_modifications:
400 histogram_name_found = _FindHistogramNameInLine(histogram_info[0], line)
401 if histogram_name_found:
402 break
403 if not histogram_name_found:
404 unmatched_histograms.append(histogram_info)
406 problems = []
407 if unmatched_histograms:
408 with open('tools/metrics/histograms/histograms.xml') as histograms_xml:
409 for histogram_name, f, line_num in unmatched_histograms:
410 histograms_xml.seek(0)
411 histogram_name_found = False
412 for line in histograms_xml:
413 histogram_name_found = _FindHistogramNameInLine(histogram_name, line)
414 if histogram_name_found:
415 break
416 if not histogram_name_found:
417 problems.append(' [%s:%d] %s' %
418 (f.LocalPath(), line_num, histogram_name))
420 if not problems:
421 return []
422 return [output_api.PresubmitPromptWarning('Some UMA_HISTOGRAM lines have '
423 'been modified and the associated histogram name has no match in either '
424 'metrics/histograms.xml or the modifications of it:', problems)]
427 def _CheckNoNewWStrings(input_api, output_api):
428 """Checks to make sure we don't introduce use of wstrings."""
429 problems = []
430 for f in input_api.AffectedFiles():
431 if (not f.LocalPath().endswith(('.cc', '.h')) or
432 f.LocalPath().endswith(('test.cc', '_win.cc', '_win.h')) or
433 '/win/' in f.LocalPath()):
434 continue
436 allowWString = False
437 for line_num, line in f.ChangedContents():
438 if 'presubmit: allow wstring' in line:
439 allowWString = True
440 elif not allowWString and 'wstring' in line:
441 problems.append(' %s:%d' % (f.LocalPath(), line_num))
442 allowWString = False
443 else:
444 allowWString = False
446 if not problems:
447 return []
448 return [output_api.PresubmitPromptWarning('New code should not use wstrings.'
449 ' If you are calling a cross-platform API that accepts a wstring, '
450 'fix the API.\n' +
451 '\n'.join(problems))]
454 def _CheckNoDEPSGIT(input_api, output_api):
455 """Make sure .DEPS.git is never modified manually."""
456 if any(f.LocalPath().endswith('.DEPS.git') for f in
457 input_api.AffectedFiles()):
458 return [output_api.PresubmitError(
459 'Never commit changes to .DEPS.git. This file is maintained by an\n'
460 'automated system based on what\'s in DEPS and your changes will be\n'
461 'overwritten.\n'
462 'See https://sites.google.com/a/chromium.org/dev/developers/how-tos/get-the-code#Rolling_DEPS\n'
463 'for more information')]
464 return []
467 def _CheckValidHostsInDEPS(input_api, output_api):
468 """Checks that DEPS file deps are from allowed_hosts."""
469 # Run only if DEPS file has been modified to annoy fewer bystanders.
470 if all(f.LocalPath() != 'DEPS' for f in input_api.AffectedFiles()):
471 return []
472 # Outsource work to gclient verify
473 try:
474 input_api.subprocess.check_output(['gclient', 'verify'])
475 return []
476 except input_api.subprocess.CalledProcessError, error:
477 return [output_api.PresubmitError(
478 'DEPS file must have only git dependencies.',
479 long_text=error.output)]
482 def _CheckNoBannedFunctions(input_api, output_api):
483 """Make sure that banned functions are not used."""
484 warnings = []
485 errors = []
487 file_filter = lambda f: f.LocalPath().endswith(('.mm', '.m', '.h'))
488 for f in input_api.AffectedFiles(file_filter=file_filter):
489 for line_num, line in f.ChangedContents():
490 for func_name, message, error in _BANNED_OBJC_FUNCTIONS:
491 matched = False
492 if func_name[0:1] == '/':
493 regex = func_name[1:]
494 if input_api.re.search(regex, line):
495 matched = True
496 elif func_name in line:
497 matched = True
498 if matched:
499 problems = warnings;
500 if error:
501 problems = errors;
502 problems.append(' %s:%d:' % (f.LocalPath(), line_num))
503 for message_line in message:
504 problems.append(' %s' % message_line)
506 file_filter = lambda f: f.LocalPath().endswith(('.cc', '.mm', '.h'))
507 for f in input_api.AffectedFiles(file_filter=file_filter):
508 for line_num, line in f.ChangedContents():
509 for func_name, message, error, excluded_paths in _BANNED_CPP_FUNCTIONS:
510 def IsBlacklisted(affected_file, blacklist):
511 local_path = affected_file.LocalPath()
512 for item in blacklist:
513 if input_api.re.match(item, local_path):
514 return True
515 return False
516 if IsBlacklisted(f, excluded_paths):
517 continue
518 matched = False
519 if func_name[0:1] == '/':
520 regex = func_name[1:]
521 if input_api.re.search(regex, line):
522 matched = True
523 elif func_name in line:
524 matched = True
525 if matched:
526 problems = warnings;
527 if error:
528 problems = errors;
529 problems.append(' %s:%d:' % (f.LocalPath(), line_num))
530 for message_line in message:
531 problems.append(' %s' % message_line)
533 result = []
534 if (warnings):
535 result.append(output_api.PresubmitPromptWarning(
536 'Banned functions were used.\n' + '\n'.join(warnings)))
537 if (errors):
538 result.append(output_api.PresubmitError(
539 'Banned functions were used.\n' + '\n'.join(errors)))
540 return result
543 def _CheckNoPragmaOnce(input_api, output_api):
544 """Make sure that banned functions are not used."""
545 files = []
546 pattern = input_api.re.compile(r'^#pragma\s+once',
547 input_api.re.MULTILINE)
548 for f in input_api.AffectedSourceFiles(input_api.FilterSourceFile):
549 if not f.LocalPath().endswith('.h'):
550 continue
551 contents = input_api.ReadFile(f)
552 if pattern.search(contents):
553 files.append(f)
555 if files:
556 return [output_api.PresubmitError(
557 'Do not use #pragma once in header files.\n'
558 'See http://www.chromium.org/developers/coding-style#TOC-File-headers',
559 files)]
560 return []
563 def _CheckNoTrinaryTrueFalse(input_api, output_api):
564 """Checks to make sure we don't introduce use of foo ? true : false."""
565 problems = []
566 pattern = input_api.re.compile(r'\?\s*(true|false)\s*:\s*(true|false)')
567 for f in input_api.AffectedFiles():
568 if not f.LocalPath().endswith(('.cc', '.h', '.inl', '.m', '.mm')):
569 continue
571 for line_num, line in f.ChangedContents():
572 if pattern.match(line):
573 problems.append(' %s:%d' % (f.LocalPath(), line_num))
575 if not problems:
576 return []
577 return [output_api.PresubmitPromptWarning(
578 'Please consider avoiding the "? true : false" pattern if possible.\n' +
579 '\n'.join(problems))]
582 def _CheckUnwantedDependencies(input_api, output_api):
583 """Runs checkdeps on #include statements added in this
584 change. Breaking - rules is an error, breaking ! rules is a
585 warning.
587 import sys
588 # We need to wait until we have an input_api object and use this
589 # roundabout construct to import checkdeps because this file is
590 # eval-ed and thus doesn't have __file__.
591 original_sys_path = sys.path
592 try:
593 sys.path = sys.path + [input_api.os_path.join(
594 input_api.PresubmitLocalPath(), 'buildtools', 'checkdeps')]
595 import checkdeps
596 from cpp_checker import CppChecker
597 from rules import Rule
598 finally:
599 # Restore sys.path to what it was before.
600 sys.path = original_sys_path
602 added_includes = []
603 for f in input_api.AffectedFiles():
604 if not CppChecker.IsCppFile(f.LocalPath()):
605 continue
607 changed_lines = [line for line_num, line in f.ChangedContents()]
608 added_includes.append([f.LocalPath(), changed_lines])
610 deps_checker = checkdeps.DepsChecker(input_api.PresubmitLocalPath())
612 error_descriptions = []
613 warning_descriptions = []
614 for path, rule_type, rule_description in deps_checker.CheckAddedCppIncludes(
615 added_includes):
616 description_with_path = '%s\n %s' % (path, rule_description)
617 if rule_type == Rule.DISALLOW:
618 error_descriptions.append(description_with_path)
619 else:
620 warning_descriptions.append(description_with_path)
622 results = []
623 if error_descriptions:
624 results.append(output_api.PresubmitError(
625 'You added one or more #includes that violate checkdeps rules.',
626 error_descriptions))
627 if warning_descriptions:
628 results.append(output_api.PresubmitPromptOrNotify(
629 'You added one or more #includes of files that are temporarily\n'
630 'allowed but being removed. Can you avoid introducing the\n'
631 '#include? See relevant DEPS file(s) for details and contacts.',
632 warning_descriptions))
633 return results
636 def _CheckFilePermissions(input_api, output_api):
637 """Check that all files have their permissions properly set."""
638 if input_api.platform == 'win32':
639 return []
640 args = [input_api.python_executable, 'tools/checkperms/checkperms.py',
641 '--root', input_api.change.RepositoryRoot()]
642 for f in input_api.AffectedFiles():
643 args += ['--file', f.LocalPath()]
644 checkperms = input_api.subprocess.Popen(args,
645 stdout=input_api.subprocess.PIPE)
646 errors = checkperms.communicate()[0].strip()
647 if errors:
648 return [output_api.PresubmitError('checkperms.py failed.',
649 errors.splitlines())]
650 return []
653 def _CheckNoAuraWindowPropertyHInHeaders(input_api, output_api):
654 """Makes sure we don't include ui/aura/window_property.h
655 in header files.
657 pattern = input_api.re.compile(r'^#include\s*"ui/aura/window_property.h"')
658 errors = []
659 for f in input_api.AffectedFiles():
660 if not f.LocalPath().endswith('.h'):
661 continue
662 for line_num, line in f.ChangedContents():
663 if pattern.match(line):
664 errors.append(' %s:%d' % (f.LocalPath(), line_num))
666 results = []
667 if errors:
668 results.append(output_api.PresubmitError(
669 'Header files should not include ui/aura/window_property.h', errors))
670 return results
673 def _CheckIncludeOrderForScope(scope, input_api, file_path, changed_linenums):
674 """Checks that the lines in scope occur in the right order.
676 1. C system files in alphabetical order
677 2. C++ system files in alphabetical order
678 3. Project's .h files
681 c_system_include_pattern = input_api.re.compile(r'\s*#include <.*\.h>')
682 cpp_system_include_pattern = input_api.re.compile(r'\s*#include <.*>')
683 custom_include_pattern = input_api.re.compile(r'\s*#include ".*')
685 C_SYSTEM_INCLUDES, CPP_SYSTEM_INCLUDES, CUSTOM_INCLUDES = range(3)
687 state = C_SYSTEM_INCLUDES
689 previous_line = ''
690 previous_line_num = 0
691 problem_linenums = []
692 for line_num, line in scope:
693 if c_system_include_pattern.match(line):
694 if state != C_SYSTEM_INCLUDES:
695 problem_linenums.append((line_num, previous_line_num))
696 elif previous_line and previous_line > line:
697 problem_linenums.append((line_num, previous_line_num))
698 elif cpp_system_include_pattern.match(line):
699 if state == C_SYSTEM_INCLUDES:
700 state = CPP_SYSTEM_INCLUDES
701 elif state == CUSTOM_INCLUDES:
702 problem_linenums.append((line_num, previous_line_num))
703 elif previous_line and previous_line > line:
704 problem_linenums.append((line_num, previous_line_num))
705 elif custom_include_pattern.match(line):
706 if state != CUSTOM_INCLUDES:
707 state = CUSTOM_INCLUDES
708 elif previous_line and previous_line > line:
709 problem_linenums.append((line_num, previous_line_num))
710 else:
711 problem_linenums.append(line_num)
712 previous_line = line
713 previous_line_num = line_num
715 warnings = []
716 for (line_num, previous_line_num) in problem_linenums:
717 if line_num in changed_linenums or previous_line_num in changed_linenums:
718 warnings.append(' %s:%d' % (file_path, line_num))
719 return warnings
722 def _CheckIncludeOrderInFile(input_api, f, changed_linenums):
723 """Checks the #include order for the given file f."""
725 system_include_pattern = input_api.re.compile(r'\s*#include \<.*')
726 # Exclude the following includes from the check:
727 # 1) #include <.../...>, e.g., <sys/...> includes often need to appear in a
728 # specific order.
729 # 2) <atlbase.h>, "build/build_config.h"
730 excluded_include_pattern = input_api.re.compile(
731 r'\s*#include (\<.*/.*|\<atlbase\.h\>|"build/build_config.h")')
732 custom_include_pattern = input_api.re.compile(r'\s*#include "(?P<FILE>.*)"')
733 # Match the final or penultimate token if it is xxxtest so we can ignore it
734 # when considering the special first include.
735 test_file_tag_pattern = input_api.re.compile(
736 r'_[a-z]+test(?=(_[a-zA-Z0-9]+)?\.)')
737 if_pattern = input_api.re.compile(
738 r'\s*#\s*(if|elif|else|endif|define|undef).*')
739 # Some files need specialized order of includes; exclude such files from this
740 # check.
741 uncheckable_includes_pattern = input_api.re.compile(
742 r'\s*#include '
743 '("ipc/.*macros\.h"|<windows\.h>|".*gl.*autogen.h")\s*')
745 contents = f.NewContents()
746 warnings = []
747 line_num = 0
749 # Handle the special first include. If the first include file is
750 # some/path/file.h, the corresponding including file can be some/path/file.cc,
751 # some/other/path/file.cc, some/path/file_platform.cc, some/path/file-suffix.h
752 # etc. It's also possible that no special first include exists.
753 # If the included file is some/path/file_platform.h the including file could
754 # also be some/path/file_xxxtest_platform.h.
755 including_file_base_name = test_file_tag_pattern.sub(
756 '', input_api.os_path.basename(f.LocalPath()))
758 for line in contents:
759 line_num += 1
760 if system_include_pattern.match(line):
761 # No special first include -> process the line again along with normal
762 # includes.
763 line_num -= 1
764 break
765 match = custom_include_pattern.match(line)
766 if match:
767 match_dict = match.groupdict()
768 header_basename = test_file_tag_pattern.sub(
769 '', input_api.os_path.basename(match_dict['FILE'])).replace('.h', '')
771 if header_basename not in including_file_base_name:
772 # No special first include -> process the line again along with normal
773 # includes.
774 line_num -= 1
775 break
777 # Split into scopes: Each region between #if and #endif is its own scope.
778 scopes = []
779 current_scope = []
780 for line in contents[line_num:]:
781 line_num += 1
782 if uncheckable_includes_pattern.match(line):
783 continue
784 if if_pattern.match(line):
785 scopes.append(current_scope)
786 current_scope = []
787 elif ((system_include_pattern.match(line) or
788 custom_include_pattern.match(line)) and
789 not excluded_include_pattern.match(line)):
790 current_scope.append((line_num, line))
791 scopes.append(current_scope)
793 for scope in scopes:
794 warnings.extend(_CheckIncludeOrderForScope(scope, input_api, f.LocalPath(),
795 changed_linenums))
796 return warnings
799 def _CheckIncludeOrder(input_api, output_api):
800 """Checks that the #include order is correct.
802 1. The corresponding header for source files.
803 2. C system files in alphabetical order
804 3. C++ system files in alphabetical order
805 4. Project's .h files in alphabetical order
807 Each region separated by #if, #elif, #else, #endif, #define and #undef follows
808 these rules separately.
810 def FileFilterIncludeOrder(affected_file):
811 black_list = (_EXCLUDED_PATHS + input_api.DEFAULT_BLACK_LIST)
812 return input_api.FilterSourceFile(affected_file, black_list=black_list)
814 warnings = []
815 for f in input_api.AffectedFiles(file_filter=FileFilterIncludeOrder):
816 if f.LocalPath().endswith(('.cc', '.h')):
817 changed_linenums = set(line_num for line_num, _ in f.ChangedContents())
818 warnings.extend(_CheckIncludeOrderInFile(input_api, f, changed_linenums))
820 results = []
821 if warnings:
822 results.append(output_api.PresubmitPromptOrNotify(_INCLUDE_ORDER_WARNING,
823 warnings))
824 return results
827 def _CheckForVersionControlConflictsInFile(input_api, f):
828 pattern = input_api.re.compile('^(?:<<<<<<<|>>>>>>>) |^=======$')
829 errors = []
830 for line_num, line in f.ChangedContents():
831 if pattern.match(line):
832 errors.append(' %s:%d %s' % (f.LocalPath(), line_num, line))
833 return errors
836 def _CheckForVersionControlConflicts(input_api, output_api):
837 """Usually this is not intentional and will cause a compile failure."""
838 errors = []
839 for f in input_api.AffectedFiles():
840 errors.extend(_CheckForVersionControlConflictsInFile(input_api, f))
842 results = []
843 if errors:
844 results.append(output_api.PresubmitError(
845 'Version control conflict markers found, please resolve.', errors))
846 return results
849 def _CheckHardcodedGoogleHostsInLowerLayers(input_api, output_api):
850 def FilterFile(affected_file):
851 """Filter function for use with input_api.AffectedSourceFiles,
852 below. This filters out everything except non-test files from
853 top-level directories that generally speaking should not hard-code
854 service URLs (e.g. src/android_webview/, src/content/ and others).
856 return input_api.FilterSourceFile(
857 affected_file,
858 white_list=(r'^(android_webview|base|content|net)[\\\/].*', ),
859 black_list=(_EXCLUDED_PATHS +
860 _TEST_CODE_EXCLUDED_PATHS +
861 input_api.DEFAULT_BLACK_LIST))
863 base_pattern = '"[^"]*google\.com[^"]*"'
864 comment_pattern = input_api.re.compile('//.*%s' % base_pattern)
865 pattern = input_api.re.compile(base_pattern)
866 problems = [] # items are (filename, line_number, line)
867 for f in input_api.AffectedSourceFiles(FilterFile):
868 for line_num, line in f.ChangedContents():
869 if not comment_pattern.search(line) and pattern.search(line):
870 problems.append((f.LocalPath(), line_num, line))
872 if problems:
873 return [output_api.PresubmitPromptOrNotify(
874 'Most layers below src/chrome/ should not hardcode service URLs.\n'
875 'Are you sure this is correct?',
876 [' %s:%d: %s' % (
877 problem[0], problem[1], problem[2]) for problem in problems])]
878 else:
879 return []
882 def _CheckNoAbbreviationInPngFileName(input_api, output_api):
883 """Makes sure there are no abbreviations in the name of PNG files.
884 The native_client_sdk directory is excluded because it has auto-generated PNG
885 files for documentation.
887 errors = []
888 white_list = (r'.*_[a-z]_.*\.png$|.*_[a-z]\.png$',)
889 black_list = (r'^native_client_sdk[\\\/]',)
890 file_filter = lambda f: input_api.FilterSourceFile(
891 f, white_list=white_list, black_list=black_list)
892 for f in input_api.AffectedFiles(include_deletes=False,
893 file_filter=file_filter):
894 errors.append(' %s' % f.LocalPath())
896 results = []
897 if errors:
898 results.append(output_api.PresubmitError(
899 'The name of PNG files should not have abbreviations. \n'
900 'Use _hover.png, _center.png, instead of _h.png, _c.png.\n'
901 'Contact oshima@chromium.org if you have questions.', errors))
902 return results
905 def _FilesToCheckForIncomingDeps(re, changed_lines):
906 """Helper method for _CheckAddedDepsHaveTargetApprovals. Returns
907 a set of DEPS entries that we should look up.
909 For a directory (rather than a specific filename) we fake a path to
910 a specific filename by adding /DEPS. This is chosen as a file that
911 will seldom or never be subject to per-file include_rules.
913 # We ignore deps entries on auto-generated directories.
914 AUTO_GENERATED_DIRS = ['grit', 'jni']
916 # This pattern grabs the path without basename in the first
917 # parentheses, and the basename (if present) in the second. It
918 # relies on the simple heuristic that if there is a basename it will
919 # be a header file ending in ".h".
920 pattern = re.compile(
921 r"""['"]\+([^'"]+?)(/[a-zA-Z0-9_]+\.h)?['"].*""")
922 results = set()
923 for changed_line in changed_lines:
924 m = pattern.match(changed_line)
925 if m:
926 path = m.group(1)
927 if path.split('/')[0] not in AUTO_GENERATED_DIRS:
928 if m.group(2):
929 results.add('%s%s' % (path, m.group(2)))
930 else:
931 results.add('%s/DEPS' % path)
932 return results
935 def _CheckAddedDepsHaveTargetApprovals(input_api, output_api):
936 """When a dependency prefixed with + is added to a DEPS file, we
937 want to make sure that the change is reviewed by an OWNER of the
938 target file or directory, to avoid layering violations from being
939 introduced. This check verifies that this happens.
941 changed_lines = set()
942 for f in input_api.AffectedFiles():
943 filename = input_api.os_path.basename(f.LocalPath())
944 if filename == 'DEPS':
945 changed_lines |= set(line.strip()
946 for line_num, line
947 in f.ChangedContents())
948 if not changed_lines:
949 return []
951 virtual_depended_on_files = _FilesToCheckForIncomingDeps(input_api.re,
952 changed_lines)
953 if not virtual_depended_on_files:
954 return []
956 if input_api.is_committing:
957 if input_api.tbr:
958 return [output_api.PresubmitNotifyResult(
959 '--tbr was specified, skipping OWNERS check for DEPS additions')]
960 if not input_api.change.issue:
961 return [output_api.PresubmitError(
962 "DEPS approval by OWNERS check failed: this change has "
963 "no Rietveld issue number, so we can't check it for approvals.")]
964 output = output_api.PresubmitError
965 else:
966 output = output_api.PresubmitNotifyResult
968 owners_db = input_api.owners_db
969 owner_email, reviewers = input_api.canned_checks._RietveldOwnerAndReviewers(
970 input_api,
971 owners_db.email_regexp,
972 approval_needed=input_api.is_committing)
974 owner_email = owner_email or input_api.change.author_email
976 reviewers_plus_owner = set(reviewers)
977 if owner_email:
978 reviewers_plus_owner.add(owner_email)
979 missing_files = owners_db.files_not_covered_by(virtual_depended_on_files,
980 reviewers_plus_owner)
982 # We strip the /DEPS part that was added by
983 # _FilesToCheckForIncomingDeps to fake a path to a file in a
984 # directory.
985 def StripDeps(path):
986 start_deps = path.rfind('/DEPS')
987 if start_deps != -1:
988 return path[:start_deps]
989 else:
990 return path
991 unapproved_dependencies = ["'+%s'," % StripDeps(path)
992 for path in missing_files]
994 if unapproved_dependencies:
995 output_list = [
996 output('Missing LGTM from OWNERS of dependencies added to DEPS:\n %s' %
997 '\n '.join(sorted(unapproved_dependencies)))]
998 if not input_api.is_committing:
999 suggested_owners = owners_db.reviewers_for(missing_files, owner_email)
1000 output_list.append(output(
1001 'Suggested missing target path OWNERS:\n %s' %
1002 '\n '.join(suggested_owners or [])))
1003 return output_list
1005 return []
1008 def _CheckSpamLogging(input_api, output_api):
1009 file_inclusion_pattern = r'.+%s' % _IMPLEMENTATION_EXTENSIONS
1010 black_list = (_EXCLUDED_PATHS +
1011 _TEST_CODE_EXCLUDED_PATHS +
1012 input_api.DEFAULT_BLACK_LIST +
1013 (r"^base[\\\/]logging\.h$",
1014 r"^base[\\\/]logging\.cc$",
1015 r"^chrome[\\\/]app[\\\/]chrome_main_delegate\.cc$",
1016 r"^chrome[\\\/]browser[\\\/]chrome_browser_main\.cc$",
1017 r"^chrome[\\\/]browser[\\\/]ui[\\\/]startup[\\\/]"
1018 r"startup_browser_creator\.cc$",
1019 r"^chrome[\\\/]installer[\\\/]setup[\\\/].*",
1020 r"chrome[\\\/]browser[\\\/]diagnostics[\\\/]" +
1021 r"diagnostics_writer\.cc$",
1022 r"^chrome_elf[\\\/]dll_hash[\\\/]dll_hash_main\.cc$",
1023 r"^chromecast[\\\/]",
1024 r"^cloud_print[\\\/]",
1025 r"^content[\\\/]common[\\\/]gpu[\\\/]client[\\\/]"
1026 r"gl_helper_benchmark\.cc$",
1027 r"^courgette[\\\/]courgette_tool\.cc$",
1028 r"^extensions[\\\/]renderer[\\\/]logging_native_handler\.cc$",
1029 r"^ipc[\\\/]ipc_logging\.cc$",
1030 r"^native_client_sdk[\\\/]",
1031 r"^remoting[\\\/]base[\\\/]logging\.h$",
1032 r"^remoting[\\\/]host[\\\/].*",
1033 r"^sandbox[\\\/]linux[\\\/].*",
1034 r"^tools[\\\/]",
1035 r"^ui[\\\/]aura[\\\/]bench[\\\/]bench_main\.cc$",
1036 r"^storage[\\\/]browser[\\\/]fileapi[\\\/]" +
1037 r"dump_file_system.cc$",))
1038 source_file_filter = lambda x: input_api.FilterSourceFile(
1039 x, white_list=(file_inclusion_pattern,), black_list=black_list)
1041 log_info = []
1042 printf = []
1044 for f in input_api.AffectedSourceFiles(source_file_filter):
1045 contents = input_api.ReadFile(f, 'rb')
1046 if input_api.re.search(r"\bD?LOG\s*\(\s*INFO\s*\)", contents):
1047 log_info.append(f.LocalPath())
1048 elif input_api.re.search(r"\bD?LOG_IF\s*\(\s*INFO\s*,", contents):
1049 log_info.append(f.LocalPath())
1051 if input_api.re.search(r"\bprintf\(", contents):
1052 printf.append(f.LocalPath())
1053 elif input_api.re.search(r"\bfprintf\((stdout|stderr)", contents):
1054 printf.append(f.LocalPath())
1056 if log_info:
1057 return [output_api.PresubmitError(
1058 'These files spam the console log with LOG(INFO):',
1059 items=log_info)]
1060 if printf:
1061 return [output_api.PresubmitError(
1062 'These files spam the console log with printf/fprintf:',
1063 items=printf)]
1064 return []
1067 def _CheckForAnonymousVariables(input_api, output_api):
1068 """These types are all expected to hold locks while in scope and
1069 so should never be anonymous (which causes them to be immediately
1070 destroyed)."""
1071 they_who_must_be_named = [
1072 'base::AutoLock',
1073 'base::AutoReset',
1074 'base::AutoUnlock',
1075 'SkAutoAlphaRestore',
1076 'SkAutoBitmapShaderInstall',
1077 'SkAutoBlitterChoose',
1078 'SkAutoBounderCommit',
1079 'SkAutoCallProc',
1080 'SkAutoCanvasRestore',
1081 'SkAutoCommentBlock',
1082 'SkAutoDescriptor',
1083 'SkAutoDisableDirectionCheck',
1084 'SkAutoDisableOvalCheck',
1085 'SkAutoFree',
1086 'SkAutoGlyphCache',
1087 'SkAutoHDC',
1088 'SkAutoLockColors',
1089 'SkAutoLockPixels',
1090 'SkAutoMalloc',
1091 'SkAutoMaskFreeImage',
1092 'SkAutoMutexAcquire',
1093 'SkAutoPathBoundsUpdate',
1094 'SkAutoPDFRelease',
1095 'SkAutoRasterClipValidate',
1096 'SkAutoRef',
1097 'SkAutoTime',
1098 'SkAutoTrace',
1099 'SkAutoUnref',
1101 anonymous = r'(%s)\s*[({]' % '|'.join(they_who_must_be_named)
1102 # bad: base::AutoLock(lock.get());
1103 # not bad: base::AutoLock lock(lock.get());
1104 bad_pattern = input_api.re.compile(anonymous)
1105 # good: new base::AutoLock(lock.get())
1106 good_pattern = input_api.re.compile(r'\bnew\s*' + anonymous)
1107 errors = []
1109 for f in input_api.AffectedFiles():
1110 if not f.LocalPath().endswith(('.cc', '.h', '.inl', '.m', '.mm')):
1111 continue
1112 for linenum, line in f.ChangedContents():
1113 if bad_pattern.search(line) and not good_pattern.search(line):
1114 errors.append('%s:%d' % (f.LocalPath(), linenum))
1116 if errors:
1117 return [output_api.PresubmitError(
1118 'These lines create anonymous variables that need to be named:',
1119 items=errors)]
1120 return []
1123 def _CheckCygwinShell(input_api, output_api):
1124 source_file_filter = lambda x: input_api.FilterSourceFile(
1125 x, white_list=(r'.+\.(gyp|gypi)$',))
1126 cygwin_shell = []
1128 for f in input_api.AffectedSourceFiles(source_file_filter):
1129 for linenum, line in f.ChangedContents():
1130 if 'msvs_cygwin_shell' in line:
1131 cygwin_shell.append(f.LocalPath())
1132 break
1134 if cygwin_shell:
1135 return [output_api.PresubmitError(
1136 'These files should not use msvs_cygwin_shell (the default is 0):',
1137 items=cygwin_shell)]
1138 return []
1141 def _CheckUserActionUpdate(input_api, output_api):
1142 """Checks if any new user action has been added."""
1143 if any('actions.xml' == input_api.os_path.basename(f) for f in
1144 input_api.LocalPaths()):
1145 # If actions.xml is already included in the changelist, the PRESUBMIT
1146 # for actions.xml will do a more complete presubmit check.
1147 return []
1149 file_filter = lambda f: f.LocalPath().endswith(('.cc', '.mm'))
1150 action_re = r'[^a-zA-Z]UserMetricsAction\("([^"]*)'
1151 current_actions = None
1152 for f in input_api.AffectedFiles(file_filter=file_filter):
1153 for line_num, line in f.ChangedContents():
1154 match = input_api.re.search(action_re, line)
1155 if match:
1156 # Loads contents in tools/metrics/actions/actions.xml to memory. It's
1157 # loaded only once.
1158 if not current_actions:
1159 with open('tools/metrics/actions/actions.xml') as actions_f:
1160 current_actions = actions_f.read()
1161 # Search for the matched user action name in |current_actions|.
1162 for action_name in match.groups():
1163 action = 'name="{0}"'.format(action_name)
1164 if action not in current_actions:
1165 return [output_api.PresubmitPromptWarning(
1166 'File %s line %d: %s is missing in '
1167 'tools/metrics/actions/actions.xml. Please run '
1168 'tools/metrics/actions/extract_actions.py to update.'
1169 % (f.LocalPath(), line_num, action_name))]
1170 return []
1173 def _GetJSONParseError(input_api, filename, eat_comments=True):
1174 try:
1175 contents = input_api.ReadFile(filename)
1176 if eat_comments:
1177 json_comment_eater = input_api.os_path.join(
1178 input_api.PresubmitLocalPath(),
1179 'tools', 'json_comment_eater', 'json_comment_eater.py')
1180 process = input_api.subprocess.Popen(
1181 [input_api.python_executable, json_comment_eater],
1182 stdin=input_api.subprocess.PIPE,
1183 stdout=input_api.subprocess.PIPE,
1184 universal_newlines=True)
1185 (contents, _) = process.communicate(input=contents)
1187 input_api.json.loads(contents)
1188 except ValueError as e:
1189 return e
1190 return None
1193 def _GetIDLParseError(input_api, filename):
1194 try:
1195 contents = input_api.ReadFile(filename)
1196 idl_schema = input_api.os_path.join(
1197 input_api.PresubmitLocalPath(),
1198 'tools', 'json_schema_compiler', 'idl_schema.py')
1199 process = input_api.subprocess.Popen(
1200 [input_api.python_executable, idl_schema],
1201 stdin=input_api.subprocess.PIPE,
1202 stdout=input_api.subprocess.PIPE,
1203 stderr=input_api.subprocess.PIPE,
1204 universal_newlines=True)
1205 (_, error) = process.communicate(input=contents)
1206 return error or None
1207 except ValueError as e:
1208 return e
1211 def _CheckParseErrors(input_api, output_api):
1212 """Check that IDL and JSON files do not contain syntax errors."""
1213 actions = {
1214 '.idl': _GetIDLParseError,
1215 '.json': _GetJSONParseError,
1217 # These paths contain test data and other known invalid JSON files.
1218 excluded_patterns = [
1219 r'test[\\\/]data[\\\/]',
1220 r'^components[\\\/]policy[\\\/]resources[\\\/]policy_templates\.json$',
1222 # Most JSON files are preprocessed and support comments, but these do not.
1223 json_no_comments_patterns = [
1224 r'^testing[\\\/]',
1226 # Only run IDL checker on files in these directories.
1227 idl_included_patterns = [
1228 r'^chrome[\\\/]common[\\\/]extensions[\\\/]api[\\\/]',
1229 r'^extensions[\\\/]common[\\\/]api[\\\/]',
1232 def get_action(affected_file):
1233 filename = affected_file.LocalPath()
1234 return actions.get(input_api.os_path.splitext(filename)[1])
1236 def MatchesFile(patterns, path):
1237 for pattern in patterns:
1238 if input_api.re.search(pattern, path):
1239 return True
1240 return False
1242 def FilterFile(affected_file):
1243 action = get_action(affected_file)
1244 if not action:
1245 return False
1246 path = affected_file.LocalPath()
1248 if MatchesFile(excluded_patterns, path):
1249 return False
1251 if (action == _GetIDLParseError and
1252 not MatchesFile(idl_included_patterns, path)):
1253 return False
1254 return True
1256 results = []
1257 for affected_file in input_api.AffectedFiles(
1258 file_filter=FilterFile, include_deletes=False):
1259 action = get_action(affected_file)
1260 kwargs = {}
1261 if (action == _GetJSONParseError and
1262 MatchesFile(json_no_comments_patterns, affected_file.LocalPath())):
1263 kwargs['eat_comments'] = False
1264 parse_error = action(input_api,
1265 affected_file.AbsoluteLocalPath(),
1266 **kwargs)
1267 if parse_error:
1268 results.append(output_api.PresubmitError('%s could not be parsed: %s' %
1269 (affected_file.LocalPath(), parse_error)))
1270 return results
1273 def _CheckJavaStyle(input_api, output_api):
1274 """Runs checkstyle on changed java files and returns errors if any exist."""
1275 import sys
1276 original_sys_path = sys.path
1277 try:
1278 sys.path = sys.path + [input_api.os_path.join(
1279 input_api.PresubmitLocalPath(), 'tools', 'android', 'checkstyle')]
1280 import checkstyle
1281 finally:
1282 # Restore sys.path to what it was before.
1283 sys.path = original_sys_path
1285 return checkstyle.RunCheckstyle(
1286 input_api, output_api, 'tools/android/checkstyle/chromium-style-5.0.xml',
1287 black_list=_EXCLUDED_PATHS + input_api.DEFAULT_BLACK_LIST)
1290 def _CheckForCopyrightedCode(input_api, output_api):
1291 """Verifies that newly added code doesn't contain copyrighted material
1292 and is properly licensed under the standard Chromium license.
1294 As there can be false positives, we maintain a whitelist file. This check
1295 also verifies that the whitelist file is up to date.
1297 import sys
1298 original_sys_path = sys.path
1299 try:
1300 sys.path = sys.path + [input_api.os_path.join(
1301 input_api.PresubmitLocalPath(), 'android_webview', 'tools')]
1302 import copyright_scanner
1303 finally:
1304 # Restore sys.path to what it was before.
1305 sys.path = original_sys_path
1307 return copyright_scanner.ScanAtPresubmit(input_api, output_api)
1310 def _CheckSingletonInHeaders(input_api, output_api):
1311 """Checks to make sure no header files have |Singleton<|."""
1312 def FileFilter(affected_file):
1313 # It's ok for base/memory/singleton.h to have |Singleton<|.
1314 black_list = (_EXCLUDED_PATHS +
1315 input_api.DEFAULT_BLACK_LIST +
1316 (r"^base[\\\/]memory[\\\/]singleton\.h$",))
1317 return input_api.FilterSourceFile(affected_file, black_list=black_list)
1319 pattern = input_api.re.compile(r'(?<!class\s)Singleton\s*<')
1320 files = []
1321 for f in input_api.AffectedSourceFiles(FileFilter):
1322 if (f.LocalPath().endswith('.h') or f.LocalPath().endswith('.hxx') or
1323 f.LocalPath().endswith('.hpp') or f.LocalPath().endswith('.inl')):
1324 contents = input_api.ReadFile(f)
1325 for line in contents.splitlines(False):
1326 if (not input_api.re.match(r'//', line) and # Strip C++ comment.
1327 pattern.search(line)):
1328 files.append(f)
1329 break
1331 if files:
1332 return [ output_api.PresubmitError(
1333 'Found Singleton<T> in the following header files.\n' +
1334 'Please move them to an appropriate source file so that the ' +
1335 'template gets instantiated in a single compilation unit.',
1336 files) ]
1337 return []
1340 _DEPRECATED_CSS = [
1341 # Values
1342 ( "-webkit-box", "flex" ),
1343 ( "-webkit-inline-box", "inline-flex" ),
1344 ( "-webkit-flex", "flex" ),
1345 ( "-webkit-inline-flex", "inline-flex" ),
1346 ( "-webkit-min-content", "min-content" ),
1347 ( "-webkit-max-content", "max-content" ),
1349 # Properties
1350 ( "-webkit-background-clip", "background-clip" ),
1351 ( "-webkit-background-origin", "background-origin" ),
1352 ( "-webkit-background-size", "background-size" ),
1353 ( "-webkit-box-shadow", "box-shadow" ),
1355 # Functions
1356 ( "-webkit-gradient", "gradient" ),
1357 ( "-webkit-repeating-gradient", "repeating-gradient" ),
1358 ( "-webkit-linear-gradient", "linear-gradient" ),
1359 ( "-webkit-repeating-linear-gradient", "repeating-linear-gradient" ),
1360 ( "-webkit-radial-gradient", "radial-gradient" ),
1361 ( "-webkit-repeating-radial-gradient", "repeating-radial-gradient" ),
1364 def _CheckNoDeprecatedCSS(input_api, output_api):
1365 """ Make sure that we don't use deprecated CSS
1366 properties, functions or values. Our external
1367 documentation is ignored by the hooks as it
1368 needs to be consumed by WebKit. """
1369 results = []
1370 file_inclusion_pattern = (r".+\.css$",)
1371 black_list = (_EXCLUDED_PATHS +
1372 _TEST_CODE_EXCLUDED_PATHS +
1373 input_api.DEFAULT_BLACK_LIST +
1374 (r"^chrome/common/extensions/docs",
1375 r"^chrome/docs",
1376 r"^native_client_sdk"))
1377 file_filter = lambda f: input_api.FilterSourceFile(
1378 f, white_list=file_inclusion_pattern, black_list=black_list)
1379 for fpath in input_api.AffectedFiles(file_filter=file_filter):
1380 for line_num, line in fpath.ChangedContents():
1381 for (deprecated_value, value) in _DEPRECATED_CSS:
1382 if deprecated_value in line:
1383 results.append(output_api.PresubmitError(
1384 "%s:%d: Use of deprecated CSS %s, use %s instead" %
1385 (fpath.LocalPath(), line_num, deprecated_value, value)))
1386 return results
1389 _DEPRECATED_JS = [
1390 ( "__lookupGetter__", "Object.getOwnPropertyDescriptor" ),
1391 ( "__defineGetter__", "Object.defineProperty" ),
1392 ( "__defineSetter__", "Object.defineProperty" ),
1395 def _CheckNoDeprecatedJS(input_api, output_api):
1396 """Make sure that we don't use deprecated JS in Chrome code."""
1397 results = []
1398 file_inclusion_pattern = (r".+\.js$",) # TODO(dbeam): .html?
1399 black_list = (_EXCLUDED_PATHS + _TEST_CODE_EXCLUDED_PATHS +
1400 input_api.DEFAULT_BLACK_LIST)
1401 file_filter = lambda f: input_api.FilterSourceFile(
1402 f, white_list=file_inclusion_pattern, black_list=black_list)
1403 for fpath in input_api.AffectedFiles(file_filter=file_filter):
1404 for lnum, line in fpath.ChangedContents():
1405 for (deprecated, replacement) in _DEPRECATED_JS:
1406 if deprecated in line:
1407 results.append(output_api.PresubmitError(
1408 "%s:%d: Use of deprecated JS %s, use %s instead" %
1409 (fpath.LocalPath(), lnum, deprecated, replacement)))
1410 return results
1413 def _CommonChecks(input_api, output_api):
1414 """Checks common to both upload and commit."""
1415 results = []
1416 results.extend(input_api.canned_checks.PanProjectChecks(
1417 input_api, output_api,
1418 excluded_paths=_EXCLUDED_PATHS + _TESTRUNNER_PATHS))
1419 results.extend(_CheckAuthorizedAuthor(input_api, output_api))
1420 results.extend(
1421 _CheckNoProductionCodeUsingTestOnlyFunctions(input_api, output_api))
1422 results.extend(_CheckNoIOStreamInHeaders(input_api, output_api))
1423 results.extend(_CheckNoUNIT_TESTInSourceFiles(input_api, output_api))
1424 results.extend(_CheckNoNewWStrings(input_api, output_api))
1425 results.extend(_CheckNoDEPSGIT(input_api, output_api))
1426 results.extend(_CheckNoBannedFunctions(input_api, output_api))
1427 results.extend(_CheckNoPragmaOnce(input_api, output_api))
1428 results.extend(_CheckNoTrinaryTrueFalse(input_api, output_api))
1429 results.extend(_CheckUnwantedDependencies(input_api, output_api))
1430 results.extend(_CheckFilePermissions(input_api, output_api))
1431 results.extend(_CheckNoAuraWindowPropertyHInHeaders(input_api, output_api))
1432 results.extend(_CheckIncludeOrder(input_api, output_api))
1433 results.extend(_CheckForVersionControlConflicts(input_api, output_api))
1434 results.extend(_CheckPatchFiles(input_api, output_api))
1435 results.extend(_CheckHardcodedGoogleHostsInLowerLayers(input_api, output_api))
1436 results.extend(_CheckNoAbbreviationInPngFileName(input_api, output_api))
1437 results.extend(_CheckForInvalidOSMacros(input_api, output_api))
1438 results.extend(_CheckForInvalidIfDefinedMacros(input_api, output_api))
1439 # TODO(danakj): Remove this when base/move.h is removed.
1440 results.extend(_CheckForUsingSideEffectsOfPass(input_api, output_api))
1441 results.extend(_CheckAddedDepsHaveTargetApprovals(input_api, output_api))
1442 results.extend(
1443 input_api.canned_checks.CheckChangeHasNoTabs(
1444 input_api,
1445 output_api,
1446 source_file_filter=lambda x: x.LocalPath().endswith('.grd')))
1447 results.extend(_CheckSpamLogging(input_api, output_api))
1448 results.extend(_CheckForAnonymousVariables(input_api, output_api))
1449 results.extend(_CheckCygwinShell(input_api, output_api))
1450 results.extend(_CheckUserActionUpdate(input_api, output_api))
1451 results.extend(_CheckNoDeprecatedCSS(input_api, output_api))
1452 results.extend(_CheckNoDeprecatedJS(input_api, output_api))
1453 results.extend(_CheckParseErrors(input_api, output_api))
1454 results.extend(_CheckForIPCRules(input_api, output_api))
1455 results.extend(_CheckForCopyrightedCode(input_api, output_api))
1456 results.extend(_CheckForWindowsLineEndings(input_api, output_api))
1457 results.extend(_CheckSingletonInHeaders(input_api, output_api))
1459 if any('PRESUBMIT.py' == f.LocalPath() for f in input_api.AffectedFiles()):
1460 results.extend(input_api.canned_checks.RunUnitTestsInDirectory(
1461 input_api, output_api,
1462 input_api.PresubmitLocalPath(),
1463 whitelist=[r'^PRESUBMIT_test\.py$']))
1464 return results
1467 def _CheckAuthorizedAuthor(input_api, output_api):
1468 """For non-googler/chromites committers, verify the author's email address is
1469 in AUTHORS.
1471 # TODO(maruel): Add it to input_api?
1472 import fnmatch
1474 author = input_api.change.author_email
1475 if not author:
1476 input_api.logging.info('No author, skipping AUTHOR check')
1477 return []
1478 authors_path = input_api.os_path.join(
1479 input_api.PresubmitLocalPath(), 'AUTHORS')
1480 valid_authors = (
1481 input_api.re.match(r'[^#]+\s+\<(.+?)\>\s*$', line)
1482 for line in open(authors_path))
1483 valid_authors = [item.group(1).lower() for item in valid_authors if item]
1484 if not any(fnmatch.fnmatch(author.lower(), valid) for valid in valid_authors):
1485 input_api.logging.info('Valid authors are %s', ', '.join(valid_authors))
1486 return [output_api.PresubmitPromptWarning(
1487 ('%s is not in AUTHORS file. If you are a new contributor, please visit'
1488 '\n'
1489 'http://www.chromium.org/developers/contributing-code and read the '
1490 '"Legal" section\n'
1491 'If you are a chromite, verify the contributor signed the CLA.') %
1492 author)]
1493 return []
1496 def _CheckPatchFiles(input_api, output_api):
1497 problems = [f.LocalPath() for f in input_api.AffectedFiles()
1498 if f.LocalPath().endswith(('.orig', '.rej'))]
1499 if problems:
1500 return [output_api.PresubmitError(
1501 "Don't commit .rej and .orig files.", problems)]
1502 else:
1503 return []
1506 def _DidYouMeanOSMacro(bad_macro):
1507 try:
1508 return {'A': 'OS_ANDROID',
1509 'B': 'OS_BSD',
1510 'C': 'OS_CHROMEOS',
1511 'F': 'OS_FREEBSD',
1512 'L': 'OS_LINUX',
1513 'M': 'OS_MACOSX',
1514 'N': 'OS_NACL',
1515 'O': 'OS_OPENBSD',
1516 'P': 'OS_POSIX',
1517 'S': 'OS_SOLARIS',
1518 'W': 'OS_WIN'}[bad_macro[3].upper()]
1519 except KeyError:
1520 return ''
1523 def _CheckForInvalidOSMacrosInFile(input_api, f):
1524 """Check for sensible looking, totally invalid OS macros."""
1525 preprocessor_statement = input_api.re.compile(r'^\s*#')
1526 os_macro = input_api.re.compile(r'defined\((OS_[^)]+)\)')
1527 results = []
1528 for lnum, line in f.ChangedContents():
1529 if preprocessor_statement.search(line):
1530 for match in os_macro.finditer(line):
1531 if not match.group(1) in _VALID_OS_MACROS:
1532 good = _DidYouMeanOSMacro(match.group(1))
1533 did_you_mean = ' (did you mean %s?)' % good if good else ''
1534 results.append(' %s:%d %s%s' % (f.LocalPath(),
1535 lnum,
1536 match.group(1),
1537 did_you_mean))
1538 return results
1541 def _CheckForInvalidOSMacros(input_api, output_api):
1542 """Check all affected files for invalid OS macros."""
1543 bad_macros = []
1544 for f in input_api.AffectedFiles():
1545 if not f.LocalPath().endswith(('.py', '.js', '.html', '.css')):
1546 bad_macros.extend(_CheckForInvalidOSMacrosInFile(input_api, f))
1548 if not bad_macros:
1549 return []
1551 return [output_api.PresubmitError(
1552 'Possibly invalid OS macro[s] found. Please fix your code\n'
1553 'or add your macro to src/PRESUBMIT.py.', bad_macros)]
1556 def _CheckForInvalidIfDefinedMacrosInFile(input_api, f):
1557 """Check all affected files for invalid "if defined" macros."""
1558 ALWAYS_DEFINED_MACROS = (
1559 "TARGET_CPU_PPC",
1560 "TARGET_CPU_PPC64",
1561 "TARGET_CPU_68K",
1562 "TARGET_CPU_X86",
1563 "TARGET_CPU_ARM",
1564 "TARGET_CPU_MIPS",
1565 "TARGET_CPU_SPARC",
1566 "TARGET_CPU_ALPHA",
1567 "TARGET_IPHONE_SIMULATOR",
1568 "TARGET_OS_EMBEDDED",
1569 "TARGET_OS_IPHONE",
1570 "TARGET_OS_MAC",
1571 "TARGET_OS_UNIX",
1572 "TARGET_OS_WIN32",
1574 ifdef_macro = input_api.re.compile(r'^\s*#.*(?:ifdef\s|defined\()([^\s\)]+)')
1575 results = []
1576 for lnum, line in f.ChangedContents():
1577 for match in ifdef_macro.finditer(line):
1578 if match.group(1) in ALWAYS_DEFINED_MACROS:
1579 always_defined = ' %s is always defined. ' % match.group(1)
1580 did_you_mean = 'Did you mean \'#if %s\'?' % match.group(1)
1581 results.append(' %s:%d %s\n\t%s' % (f.LocalPath(),
1582 lnum,
1583 always_defined,
1584 did_you_mean))
1585 return results
1588 def _CheckForInvalidIfDefinedMacros(input_api, output_api):
1589 """Check all affected files for invalid "if defined" macros."""
1590 bad_macros = []
1591 for f in input_api.AffectedFiles():
1592 if f.LocalPath().endswith(('.h', '.c', '.cc', '.m', '.mm')):
1593 bad_macros.extend(_CheckForInvalidIfDefinedMacrosInFile(input_api, f))
1595 if not bad_macros:
1596 return []
1598 return [output_api.PresubmitError(
1599 'Found ifdef check on always-defined macro[s]. Please fix your code\n'
1600 'or check the list of ALWAYS_DEFINED_MACROS in src/PRESUBMIT.py.',
1601 bad_macros)]
1604 def _CheckForUsingSideEffectsOfPass(input_api, output_api):
1605 """Check all affected files for using side effects of Pass."""
1606 errors = []
1607 for f in input_api.AffectedFiles():
1608 if f.LocalPath().endswith(('.h', '.c', '.cc', '.m', '.mm')):
1609 for lnum, line in f.ChangedContents():
1610 # Disallow Foo(*my_scoped_thing.Pass()); See crbug.com/418297.
1611 if input_api.re.search(r'\*[a-zA-Z0-9_]+\.Pass\(\)', line):
1612 errors.append(output_api.PresubmitError(
1613 ('%s:%d uses *foo.Pass() to delete the contents of scoped_ptr. ' +
1614 'See crbug.com/418297.') % (f.LocalPath(), lnum)))
1615 return errors
1618 def _CheckForIPCRules(input_api, output_api):
1619 """Check for same IPC rules described in
1620 http://www.chromium.org/Home/chromium-security/education/security-tips-for-ipc
1622 base_pattern = r'IPC_ENUM_TRAITS\('
1623 inclusion_pattern = input_api.re.compile(r'(%s)' % base_pattern)
1624 comment_pattern = input_api.re.compile(r'//.*(%s)' % base_pattern)
1626 problems = []
1627 for f in input_api.AffectedSourceFiles(None):
1628 local_path = f.LocalPath()
1629 if not local_path.endswith('.h'):
1630 continue
1631 for line_number, line in f.ChangedContents():
1632 if inclusion_pattern.search(line) and not comment_pattern.search(line):
1633 problems.append(
1634 '%s:%d\n %s' % (local_path, line_number, line.strip()))
1636 if problems:
1637 return [output_api.PresubmitPromptWarning(
1638 _IPC_ENUM_TRAITS_DEPRECATED, problems)]
1639 else:
1640 return []
1643 def _CheckForWindowsLineEndings(input_api, output_api):
1644 """Check source code and known ascii text files for Windows style line
1645 endings.
1647 known_text_files = r'.*\.(txt|html|htm|mhtml|py|gyp|gypi|gn|isolate)$'
1649 file_inclusion_pattern = (
1650 known_text_files,
1651 r'.+%s' % _IMPLEMENTATION_EXTENSIONS
1654 filter = lambda f: input_api.FilterSourceFile(
1655 f, white_list=file_inclusion_pattern, black_list=None)
1656 files = [f.LocalPath() for f in
1657 input_api.AffectedSourceFiles(filter)]
1659 problems = []
1661 for file in files:
1662 fp = open(file, 'r')
1663 for line in fp:
1664 if line.endswith('\r\n'):
1665 problems.append(file)
1666 break
1667 fp.close()
1669 if problems:
1670 return [output_api.PresubmitPromptWarning('Are you sure that you want '
1671 'these files to contain Windows style line endings?\n' +
1672 '\n'.join(problems))]
1674 return []
1677 def CheckChangeOnUpload(input_api, output_api):
1678 results = []
1679 results.extend(_CommonChecks(input_api, output_api))
1680 results.extend(_CheckValidHostsInDEPS(input_api, output_api))
1681 results.extend(_CheckJavaStyle(input_api, output_api))
1682 results.extend(
1683 input_api.canned_checks.CheckGNFormatted(input_api, output_api))
1684 results.extend(_CheckUmaHistogramChanges(input_api, output_api))
1685 return results
1688 def GetTryServerMasterForBot(bot):
1689 """Returns the Try Server master for the given bot.
1691 It tries to guess the master from the bot name, but may still fail
1692 and return None. There is no longer a default master.
1694 # Potentially ambiguous bot names are listed explicitly.
1695 master_map = {
1696 'chromium_presubmit': 'tryserver.chromium.linux',
1697 'blink_presubmit': 'tryserver.chromium.linux',
1698 'tools_build_presubmit': 'tryserver.chromium.linux',
1700 master = master_map.get(bot)
1701 if not master:
1702 if 'linux' in bot or 'android' in bot or 'presubmit' in bot:
1703 master = 'tryserver.chromium.linux'
1704 elif 'win' in bot:
1705 master = 'tryserver.chromium.win'
1706 elif 'mac' in bot or 'ios' in bot:
1707 master = 'tryserver.chromium.mac'
1708 return master
1711 def GetDefaultTryConfigs(bots):
1712 """Returns a list of ('bot', set(['tests']), filtered by [bots].
1715 builders_and_tests = dict((bot, set(['defaulttests'])) for bot in bots)
1717 # Build up the mapping from tryserver master to bot/test.
1718 out = dict()
1719 for bot, tests in builders_and_tests.iteritems():
1720 out.setdefault(GetTryServerMasterForBot(bot), {})[bot] = tests
1721 return out
1724 def CheckChangeOnCommit(input_api, output_api):
1725 results = []
1726 results.extend(_CommonChecks(input_api, output_api))
1727 # TODO(thestig) temporarily disabled, doesn't work in third_party/
1728 #results.extend(input_api.canned_checks.CheckSvnModifiedDirectories(
1729 # input_api, output_api, sources))
1730 # Make sure the tree is 'open'.
1731 results.extend(input_api.canned_checks.CheckTreeIsOpen(
1732 input_api,
1733 output_api,
1734 json_url='http://chromium-status.appspot.com/current?format=json'))
1736 results.extend(input_api.canned_checks.CheckChangeHasBugField(
1737 input_api, output_api))
1738 results.extend(input_api.canned_checks.CheckChangeHasDescription(
1739 input_api, output_api))
1740 return results
1743 def GetPreferredTryMasters(project, change):
1744 import re
1745 files = change.LocalPaths()
1747 import os
1748 import json
1749 with open(os.path.join(
1750 change.RepositoryRoot(), 'testing', 'commit_queue', 'config.json')) as f:
1751 cq_config = json.load(f)
1752 cq_verifiers = cq_config.get('verifiers_no_patch', {})
1753 cq_try_jobs = cq_verifiers.get('try_job_verifier', {})
1754 builders = cq_try_jobs.get('launched', {})
1756 for master, master_config in cq_try_jobs.get('triggered', {}).iteritems():
1757 for triggered_bot in master_config:
1758 builders.get(master, {}).pop(triggered_bot, None)
1760 # Explicitly iterate over copies of dicts since we mutate them.
1761 for master in builders.keys():
1762 for builder in builders[master].keys():
1763 # Do not trigger presubmit builders, since they're likely to fail
1764 # (e.g. OWNERS checks before finished code review), and we're
1765 # running local presubmit anyway.
1766 if 'presubmit' in builder:
1767 builders[master].pop(builder)
1769 return builders