Clean up logging in MB.
[chromium-blink-merge.git] / tools / mb / mb.py
blobb4f7e770dbbc65154bc3f5095fe7b5151000625c
1 #!/usr/bin/env python
2 # Copyright 2015 The Chromium Authors. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
6 """MB - the Meta-Build wrapper around GYP and GN
8 MB is a wrapper script for GYP and GN that can be used to generate build files
9 for sets of canned configurations and analyze them.
10 """
12 from __future__ import print_function
14 import argparse
15 import ast
16 import errno
17 import json
18 import os
19 import pipes
20 import pprint
21 import shlex
22 import shutil
23 import sys
24 import subprocess
25 import tempfile
27 def main(args):
28 mbw = MetaBuildWrapper()
29 mbw.ParseArgs(args)
30 return mbw.args.func()
33 class MetaBuildWrapper(object):
34 def __init__(self):
35 p = os.path
36 d = os.path.dirname
37 self.chromium_src_dir = p.normpath(d(d(d(p.abspath(__file__)))))
38 self.default_config = p.join(self.chromium_src_dir, 'tools', 'mb',
39 'mb_config.pyl')
40 self.platform = sys.platform
41 self.args = argparse.Namespace()
42 self.configs = {}
43 self.masters = {}
44 self.mixins = {}
45 self.private_configs = []
46 self.common_dev_configs = []
47 self.unsupported_configs = []
49 def ParseArgs(self, argv):
50 def AddCommonOptions(subp):
51 subp.add_argument('-b', '--builder',
52 help='builder name to look up config from')
53 subp.add_argument('-m', '--master',
54 help='master name to look up config from')
55 subp.add_argument('-c', '--config',
56 help='configuration to analyze')
57 subp.add_argument('-f', '--config-file', metavar='PATH',
58 default=self.default_config,
59 help='path to config file '
60 '(default is //tools/mb/mb_config.pyl)')
61 subp.add_argument('-g', '--goma-dir', default=self.ExpandUser('~/goma'),
62 help='path to goma directory (default is %(default)s).')
63 subp.add_argument('-n', '--dryrun', action='store_true',
64 help='Do a dry run (i.e., do nothing, just print '
65 'the commands that will run)')
66 subp.add_argument('-v', '--verbose', action='store_true',
67 help='verbose logging')
69 parser = argparse.ArgumentParser(prog='mb')
70 subps = parser.add_subparsers()
72 subp = subps.add_parser('analyze',
73 help='analyze whether changes to a set of files '
74 'will cause a set of binaries to be rebuilt.')
75 AddCommonOptions(subp)
76 subp.add_argument('--swarming-targets-file',
77 help='save runtime dependencies for targets listed '
78 'in file.')
79 subp.add_argument('path', nargs=1,
80 help='path build was generated into.')
81 subp.add_argument('input_path', nargs=1,
82 help='path to a file containing the input arguments '
83 'as a JSON object.')
84 subp.add_argument('output_path', nargs=1,
85 help='path to a file containing the output arguments '
86 'as a JSON object.')
87 subp.set_defaults(func=self.CmdAnalyze)
89 subp = subps.add_parser('gen',
90 help='generate a new set of build files')
91 AddCommonOptions(subp)
92 subp.add_argument('--swarming-targets-file',
93 help='save runtime dependencies for targets listed '
94 'in file.')
95 subp.add_argument('path', nargs=1,
96 help='path to generate build into')
97 subp.set_defaults(func=self.CmdGen)
99 subp = subps.add_parser('lookup',
100 help='look up the command for a given config or '
101 'builder')
102 AddCommonOptions(subp)
103 subp.set_defaults(func=self.CmdLookup)
105 subp = subps.add_parser('validate',
106 help='validate the config file')
107 subp.add_argument('-f', '--config-file', metavar='PATH',
108 default=self.default_config,
109 help='path to config file '
110 '(default is //tools/mb/mb_config.pyl)')
111 subp.set_defaults(func=self.CmdValidate)
113 subp = subps.add_parser('help',
114 help='Get help on a subcommand.')
115 subp.add_argument(nargs='?', action='store', dest='subcommand',
116 help='The command to get help for.')
117 subp.set_defaults(func=self.CmdHelp)
119 self.args = parser.parse_args(argv)
121 def CmdAnalyze(self):
122 vals = self.GetConfig()
123 if vals['type'] == 'gn':
124 return self.RunGNAnalyze(vals)
125 elif vals['type'] == 'gyp':
126 return self.RunGYPAnalyze(vals)
127 else:
128 raise MBErr('Unknown meta-build type "%s"' % vals['type'])
130 def CmdGen(self):
131 vals = self.GetConfig()
133 self.ClobberIfNeeded(vals)
135 if vals['type'] == 'gn':
136 return self.RunGNGen(vals)
137 if vals['type'] == 'gyp':
138 return self.RunGYPGen(vals)
140 raise MBErr('Unknown meta-build type "%s"' % vals['type'])
142 def CmdLookup(self):
143 vals = self.GetConfig()
144 if vals['type'] == 'gn':
145 cmd = self.GNCmd('gen', '<path>', vals['gn_args'])
146 elif vals['type'] == 'gyp':
147 if vals['gyp_crosscompile']:
148 self.Print('GYP_CROSSCOMPILE=1')
149 cmd = self.GYPCmd('<path>', vals['gyp_defines'], vals['gyp_config'])
150 else:
151 raise MBErr('Unknown meta-build type "%s"' % vals['type'])
153 self.PrintCmd(cmd)
154 return 0
156 def CmdHelp(self):
157 if self.args.subcommand:
158 self.ParseArgs([self.args.subcommand, '--help'])
159 else:
160 self.ParseArgs(['--help'])
162 def CmdValidate(self):
163 errs = []
165 # Read the file to make sure it parses.
166 self.ReadConfigFile()
168 # Figure out the whole list of configs and ensure that no config is
169 # listed in more than one category.
170 all_configs = {}
171 for config in self.common_dev_configs:
172 all_configs[config] = 'common_dev_configs'
173 for config in self.private_configs:
174 if config in all_configs:
175 errs.append('config "%s" listed in "private_configs" also '
176 'listed in "%s"' % (config, all_configs['config']))
177 else:
178 all_configs[config] = 'private_configs'
179 for config in self.unsupported_configs:
180 if config in all_configs:
181 errs.append('config "%s" listed in "unsupported_configs" also '
182 'listed in "%s"' % (config, all_configs['config']))
183 else:
184 all_configs[config] = 'unsupported_configs'
186 for master in self.masters:
187 for builder in self.masters[master]:
188 config = self.masters[master][builder]
189 if config in all_configs and all_configs[config] not in self.masters:
190 errs.append('Config "%s" used by a bot is also listed in "%s".' %
191 (config, all_configs[config]))
192 else:
193 all_configs[config] = master
195 # Check that every referenced config actually exists.
196 for config, loc in all_configs.items():
197 if not config in self.configs:
198 errs.append('Unknown config "%s" referenced from "%s".' %
199 (config, loc))
201 # Check that every actual config is actually referenced.
202 for config in self.configs:
203 if not config in all_configs:
204 errs.append('Unused config "%s".' % config)
206 # Figure out the whole list of mixins, and check that every mixin
207 # listed by a config or another mixin actually exists.
208 referenced_mixins = set()
209 for config, mixins in self.configs.items():
210 for mixin in mixins:
211 if not mixin in self.mixins:
212 errs.append('Unknown mixin "%s" referenced by config "%s".' %
213 (mixin, config))
214 referenced_mixins.add(mixin)
216 for mixin in self.mixins:
217 for sub_mixin in self.mixins[mixin].get('mixins', []):
218 if not sub_mixin in self.mixins:
219 errs.append('Unknown mixin "%s" referenced by mixin "%s".' %
220 (sub_mixin, mixin))
221 referenced_mixins.add(sub_mixin)
223 # Check that every mixin defined is actually referenced somewhere.
224 for mixin in self.mixins:
225 if not mixin in referenced_mixins:
226 errs.append('Unreferenced mixin "%s".' % mixin)
228 if errs:
229 raise MBErr(('mb config file %s has problems:' % self.args.config_file) +
230 '\n ' + '\n '.join(errs))
232 self.Print('mb config file %s looks ok.' % self.args.config_file)
233 return 0
235 def GetConfig(self):
236 self.ReadConfigFile()
237 config = self.ConfigFromArgs()
238 if not config in self.configs:
239 raise MBErr('Config "%s" not found in %s' %
240 (config, self.args.config_file))
242 return self.FlattenConfig(config)
244 def ReadConfigFile(self):
245 if not self.Exists(self.args.config_file):
246 raise MBErr('config file not found at %s' % self.args.config_file)
248 try:
249 contents = ast.literal_eval(self.ReadFile(self.args.config_file))
250 except SyntaxError as e:
251 raise MBErr('Failed to parse config file "%s": %s' %
252 (self.args.config_file, e))
254 self.common_dev_configs = contents['common_dev_configs']
255 self.configs = contents['configs']
256 self.masters = contents['masters']
257 self.mixins = contents['mixins']
258 self.private_configs = contents['private_configs']
259 self.unsupported_configs = contents['unsupported_configs']
261 def ConfigFromArgs(self):
262 if self.args.config:
263 if self.args.master or self.args.builder:
264 raise MBErr('Can not specific both -c/--config and -m/--master or '
265 '-b/--builder')
267 return self.args.config
269 if not self.args.master or not self.args.builder:
270 raise MBErr('Must specify either -c/--config or '
271 '(-m/--master and -b/--builder)')
273 if not self.args.master in self.masters:
274 raise MBErr('Master name "%s" not found in "%s"' %
275 (self.args.master, self.args.config_file))
277 if not self.args.builder in self.masters[self.args.master]:
278 raise MBErr('Builder name "%s" not found under masters[%s] in "%s"' %
279 (self.args.builder, self.args.master, self.args.config_file))
281 return self.masters[self.args.master][self.args.builder]
283 def FlattenConfig(self, config):
284 mixins = self.configs[config]
285 vals = {
286 'type': None,
287 'gn_args': [],
288 'gyp_config': [],
289 'gyp_defines': '',
290 'gyp_crosscompile': False,
293 visited = []
294 self.FlattenMixins(mixins, vals, visited)
295 return vals
297 def FlattenMixins(self, mixins, vals, visited):
298 for m in mixins:
299 if m not in self.mixins:
300 raise MBErr('Unknown mixin "%s"' % m)
302 # TODO: check for cycles in mixins.
304 visited.append(m)
306 mixin_vals = self.mixins[m]
307 if 'type' in mixin_vals:
308 vals['type'] = mixin_vals['type']
309 if 'gn_args' in mixin_vals:
310 if vals['gn_args']:
311 vals['gn_args'] += ' ' + mixin_vals['gn_args']
312 else:
313 vals['gn_args'] = mixin_vals['gn_args']
314 if 'gyp_config' in mixin_vals:
315 vals['gyp_config'] = mixin_vals['gyp_config']
316 if 'gyp_crosscompile' in mixin_vals:
317 vals['gyp_crosscompile'] = mixin_vals['gyp_crosscompile']
318 if 'gyp_defines' in mixin_vals:
319 if vals['gyp_defines']:
320 vals['gyp_defines'] += ' ' + mixin_vals['gyp_defines']
321 else:
322 vals['gyp_defines'] = mixin_vals['gyp_defines']
323 if 'mixins' in mixin_vals:
324 self.FlattenMixins(mixin_vals['mixins'], vals, visited)
325 return vals
327 def ClobberIfNeeded(self, vals):
328 path = self.args.path[0]
329 build_dir = self.ToAbsPath(path)
330 mb_type_path = os.path.join(build_dir, 'mb_type')
331 needs_clobber = False
332 new_mb_type = vals['type']
333 if self.Exists(build_dir):
334 if self.Exists(mb_type_path):
335 old_mb_type = self.ReadFile(mb_type_path)
336 if old_mb_type != new_mb_type:
337 self.Print("Build type mismatch: was %s, will be %s, clobbering %s" %
338 (old_mb_type, new_mb_type, path))
339 needs_clobber = True
340 else:
341 # There is no 'mb_type' file in the build directory, so this probably
342 # means that the prior build(s) were not done through mb, and we
343 # have no idea if this was a GYP build or a GN build. Clobber it
344 # to be safe.
345 self.Print("%s/mb_type missing, clobbering to be safe" % path)
346 needs_clobber = True
348 if needs_clobber:
349 self.RemoveDirectory(build_dir)
351 self.MaybeMakeDirectory(build_dir)
352 self.WriteFile(mb_type_path, new_mb_type)
354 def RunGNGen(self, vals):
355 path = self.args.path[0]
357 cmd = self.GNCmd('gen', path, vals['gn_args'])
359 swarming_targets = []
360 if self.args.swarming_targets_file:
361 # We need GN to generate the list of runtime dependencies for
362 # the compile targets listed (one per line) in the file so
363 # we can run them via swarming. We use ninja_to_gn.pyl to convert
364 # the compile targets to the matching GN labels.
365 contents = self.ReadFile(self.args.swarming_targets_file)
366 swarming_targets = contents.splitlines()
367 gn_isolate_map = ast.literal_eval(self.ReadFile(os.path.join(
368 self.chromium_src_dir, 'testing', 'buildbot', 'gn_isolate_map.pyl')))
369 gn_labels = []
370 for target in swarming_targets:
371 if not target in gn_isolate_map:
372 raise MBErr('test target "%s" not found in %s' %
373 (target, '//testing/buildbot/gn_isolate_map.pyl'))
374 gn_labels.append(gn_isolate_map[target]['label'])
376 gn_runtime_deps_path = self.ToAbsPath(path, 'runtime_deps')
378 # Since GN hasn't run yet, the build directory may not even exist.
379 self.MaybeMakeDirectory(self.ToAbsPath(path))
381 self.WriteFile(gn_runtime_deps_path, '\n'.join(gn_labels) + '\n')
382 cmd.append('--runtime-deps-list-file=%s' % gn_runtime_deps_path)
384 ret, _, _ = self.Run(cmd)
385 if ret:
386 # If `gn gen` failed, we should exit early rather than trying to
387 # generate isolates. Run() will have already logged any error output.
388 self.Print('GN gen failed: %d' % ret)
389 return ret
391 for target in swarming_targets:
392 if gn_isolate_map[target]['type'] == 'gpu_browser_test':
393 runtime_deps_target = 'browser_tests'
394 elif gn_isolate_map[target]['type'] == 'script':
395 # For script targets, the build target is usually a group,
396 # for which gn generates the runtime_deps next to the stamp file
397 # for the label, which lives under the obj/ directory.
398 label = gn_isolate_map[target]['label']
399 runtime_deps_target = 'obj/%s.stamp' % label.replace(':', '/')
400 else:
401 runtime_deps_target = target
402 if sys.platform == 'win32':
403 deps_path = self.ToAbsPath(path,
404 runtime_deps_target + '.exe.runtime_deps')
405 else:
406 deps_path = self.ToAbsPath(path,
407 runtime_deps_target + '.runtime_deps')
408 if not self.Exists(deps_path):
409 raise MBErr('did not generate %s' % deps_path)
411 command, extra_files = self.GetIsolateCommand(target, vals,
412 gn_isolate_map)
414 runtime_deps = self.ReadFile(deps_path).splitlines()
416 isolate_path = self.ToAbsPath(path, target + '.isolate')
417 self.WriteFile(isolate_path,
418 pprint.pformat({
419 'variables': {
420 'command': command,
421 'files': sorted(runtime_deps + extra_files),
423 }) + '\n')
425 self.WriteJSON(
427 'args': [
428 '--isolated',
429 self.ToSrcRelPath('%s%s%s.isolated' % (path, os.sep, target)),
430 '--isolate',
431 self.ToSrcRelPath('%s%s%s.isolate' % (path, os.sep, target)),
433 'dir': self.chromium_src_dir,
434 'version': 1,
436 isolate_path + 'd.gen.json',
439 return ret
441 def GNCmd(self, subcommand, path, gn_args=''):
442 if self.platform == 'linux2':
443 gn_path = os.path.join(self.chromium_src_dir, 'buildtools', 'linux64',
444 'gn')
445 elif self.platform == 'darwin':
446 gn_path = os.path.join(self.chromium_src_dir, 'buildtools', 'mac',
447 'gn')
448 else:
449 gn_path = os.path.join(self.chromium_src_dir, 'buildtools', 'win',
450 'gn.exe')
452 cmd = [gn_path, subcommand, path]
453 gn_args = gn_args.replace("$(goma_dir)", self.args.goma_dir)
454 if gn_args:
455 cmd.append('--args=%s' % gn_args)
456 return cmd
458 def RunGYPGen(self, vals):
459 path = self.args.path[0]
461 output_dir, gyp_config = self.ParseGYPConfigPath(path)
462 if gyp_config != vals['gyp_config']:
463 raise MBErr('The last component of the path (%s) must match the '
464 'GYP configuration specified in the config (%s), and '
465 'it does not.' % (gyp_config, vals['gyp_config']))
466 cmd = self.GYPCmd(output_dir, vals['gyp_defines'], config=gyp_config)
467 env = None
468 if vals['gyp_crosscompile']:
469 if self.args.verbose:
470 self.Print('Setting GYP_CROSSCOMPILE=1 in the environment')
471 env = os.environ.copy()
472 env['GYP_CROSSCOMPILE'] = '1'
473 ret, _, _ = self.Run(cmd, env=env)
474 return ret
476 def RunGYPAnalyze(self, vals):
477 output_dir, gyp_config = self.ParseGYPConfigPath(self.args.path[0])
478 if gyp_config != vals['gyp_config']:
479 raise MBErr('The last component of the path (%s) must match the '
480 'GYP configuration specified in the config (%s), and '
481 'it does not.' % (gyp_config, vals['gyp_config']))
482 if self.args.verbose:
483 inp = self.ReadInputJSON(['files', 'targets'])
484 self.Print()
485 self.Print('analyze input:')
486 self.PrintJSON(inp)
487 self.Print()
489 cmd = self.GYPCmd(output_dir, vals['gyp_defines'], config=gyp_config)
490 cmd.extend(['-f', 'analyzer',
491 '-G', 'config_path=%s' % self.args.input_path[0],
492 '-G', 'analyzer_output_path=%s' % self.args.output_path[0]])
493 ret, _, _ = self.Run(cmd)
494 if not ret and self.args.verbose:
495 outp = json.loads(self.ReadFile(self.args.output_path[0]))
496 self.Print()
497 self.Print('analyze output:')
498 self.PrintJSON(outp)
499 self.Print()
501 return ret
503 def GetIsolateCommand(self, target, vals, gn_isolate_map):
504 # This needs to mirror the settings in //build/config/ui.gni:
505 # use_x11 = is_linux && !use_ozone.
506 # TODO(dpranke): Figure out how to keep this in sync better.
507 use_x11 = (sys.platform == 'linux2' and
508 not 'target_os="android"' in vals['gn_args'] and
509 not 'use_ozone=true' in vals['gn_args'])
511 asan = 'is_asan=true' in vals['gn_args']
512 msan = 'is_msan=true' in vals['gn_args']
513 tsan = 'is_tsan=true' in vals['gn_args']
515 executable_suffix = '.exe' if sys.platform == 'win32' else ''
517 test_type = gn_isolate_map[target]['type']
518 cmdline = []
519 extra_files = []
521 if use_x11 and test_type == 'windowed_test_launcher':
522 extra_files = [
523 'xdisplaycheck',
524 '../../testing/test_env.py',
525 '../../testing/xvfb.py',
527 cmdline = [
528 '../../testing/xvfb.py',
529 '.',
530 './' + str(target),
531 '--brave-new-test-launcher',
532 '--test-launcher-bot-mode',
533 '--asan=%d' % asan,
534 '--msan=%d' % msan,
535 '--tsan=%d' % tsan,
537 elif test_type in ('windowed_test_launcher', 'console_test_launcher'):
538 extra_files = [
539 '../../testing/test_env.py'
541 cmdline = [
542 '../../testing/test_env.py',
543 './' + str(target) + executable_suffix,
544 '--brave-new-test-launcher',
545 '--test-launcher-bot-mode',
546 '--asan=%d' % asan,
547 '--msan=%d' % msan,
548 '--tsan=%d' % tsan,
550 elif test_type == 'gpu_browser_test':
551 extra_files = [
552 '../../testing/test_env.py'
554 gtest_filter = gn_isolate_map[target]['gtest_filter']
555 cmdline = [
556 '../../testing/test_env.py',
557 './browser_tests' + executable_suffix,
558 '--test-launcher-bot-mode',
559 '--enable-gpu',
560 '--test-launcher-jobs=1',
561 '--gtest_filter=%s' % gtest_filter,
563 elif test_type == 'script':
564 extra_files = [
565 '../../testing/test_env.py'
567 cmdline = [
568 '../../testing/test_env.py',
569 ] + ['../../' + self.ToSrcRelPath(gn_isolate_map[target]['script'])]
570 elif test_type in ('raw'):
571 extra_files = []
572 cmdline = [
573 './' + str(target) + executable_suffix,
574 ] + gn_isolate_map[target].get('args')
576 else:
577 self.WriteFailureAndRaise('No command line for %s found (test type %s).'
578 % (target, test_type), output_path=None)
580 return cmdline, extra_files
582 def ToAbsPath(self, build_path, *comps):
583 return os.path.join(self.chromium_src_dir,
584 self.ToSrcRelPath(build_path),
585 *comps)
587 def ToSrcRelPath(self, path):
588 """Returns a relative path from the top of the repo."""
589 # TODO: Support normal paths in addition to source-absolute paths.
590 assert(path.startswith('//'))
591 return path[2:].replace('/', os.sep)
593 def ParseGYPConfigPath(self, path):
594 rpath = self.ToSrcRelPath(path)
595 output_dir, _, config = rpath.rpartition('/')
596 self.CheckGYPConfigIsSupported(config, path)
597 return output_dir, config
599 def CheckGYPConfigIsSupported(self, config, path):
600 if config not in ('Debug', 'Release'):
601 if (sys.platform in ('win32', 'cygwin') and
602 config not in ('Debug_x64', 'Release_x64')):
603 raise MBErr('Unknown or unsupported config type "%s" in "%s"' %
604 config, path)
606 def GYPCmd(self, output_dir, gyp_defines, config):
607 gyp_defines = gyp_defines.replace("$(goma_dir)", self.args.goma_dir)
608 cmd = [
609 sys.executable,
610 os.path.join('build', 'gyp_chromium'),
611 '-G',
612 'output_dir=' + output_dir,
613 '-G',
614 'config=' + config,
616 for d in shlex.split(gyp_defines):
617 cmd += ['-D', d]
618 return cmd
620 def RunGNAnalyze(self, vals):
621 # analyze runs before 'gn gen' now, so we need to run gn gen
622 # in order to ensure that we have a build directory.
623 ret = self.RunGNGen(vals)
624 if ret:
625 return ret
627 inp = self.ReadInputJSON(['files', 'targets'])
628 if self.args.verbose:
629 self.Print()
630 self.Print('analyze input:')
631 self.PrintJSON(inp)
632 self.Print()
634 output_path = self.args.output_path[0]
636 # Bail out early if a GN file was modified, since 'gn refs' won't know
637 # what to do about it.
638 if any(f.endswith('.gn') or f.endswith('.gni') for f in inp['files']):
639 self.WriteJSON({'status': 'Found dependency (all)'}, output_path)
640 return 0
642 # Bail out early if 'all' was asked for, since 'gn refs' won't recognize it.
643 if 'all' in inp['targets']:
644 self.WriteJSON({'status': 'Found dependency (all)'}, output_path)
645 return 0
647 # This shouldn't normally happen, but could due to unusual race conditions,
648 # like a try job that gets scheduled before a patch lands but runs after
649 # the patch has landed.
650 if not inp['files']:
651 self.Print('Warning: No files modified in patch, bailing out early.')
652 self.WriteJSON({'targets': [],
653 'build_targets': [],
654 'status': 'No dependency'}, output_path)
655 return 0
657 ret = 0
658 response_file = self.TempFile()
659 response_file.write('\n'.join(inp['files']) + '\n')
660 response_file.close()
662 matching_targets = []
663 try:
664 cmd = self.GNCmd('refs', self.args.path[0]) + [
665 '@%s' % response_file.name, '--all', '--as=output']
666 ret, out, _ = self.Run(cmd, force_verbose=False)
667 if ret and not 'The input matches no targets' in out:
668 self.WriteFailureAndRaise('gn refs returned %d: %s' % (ret, out),
669 output_path)
670 build_dir = self.ToSrcRelPath(self.args.path[0]) + os.sep
671 for output in out.splitlines():
672 build_output = output.replace(build_dir, '')
673 if build_output in inp['targets']:
674 matching_targets.append(build_output)
676 cmd = self.GNCmd('refs', self.args.path[0]) + [
677 '@%s' % response_file.name, '--all']
678 ret, out, _ = self.Run(cmd, force_verbose=False)
679 if ret and not 'The input matches no targets' in out:
680 self.WriteFailureAndRaise('gn refs returned %d: %s' % (ret, out),
681 output_path)
682 for label in out.splitlines():
683 build_target = label[2:]
684 # We want to accept 'chrome/android:chrome_public_apk' and
685 # just 'chrome_public_apk'. This may result in too many targets
686 # getting built, but we can adjust that later if need be.
687 for input_target in inp['targets']:
688 if (input_target == build_target or
689 build_target.endswith(':' + input_target)):
690 matching_targets.append(input_target)
691 finally:
692 self.RemoveFile(response_file.name)
694 if matching_targets:
695 # TODO: it could be that a target X might depend on a target Y
696 # and both would be listed in the input, but we would only need
697 # to specify target X as a build_target (whereas both X and Y are
698 # targets). I'm not sure if that optimization is generally worth it.
699 self.WriteJSON({'targets': sorted(set(matching_targets)),
700 'build_targets': sorted(set(matching_targets)),
701 'status': 'Found dependency'}, output_path)
702 else:
703 self.WriteJSON({'targets': [],
704 'build_targets': [],
705 'status': 'No dependency'}, output_path)
707 if self.args.verbose:
708 outp = json.loads(self.ReadFile(output_path))
709 self.Print()
710 self.Print('analyze output:')
711 self.PrintJSON(outp)
712 self.Print()
714 return 0
716 def ReadInputJSON(self, required_keys):
717 path = self.args.input_path[0]
718 output_path = self.args.output_path[0]
719 if not self.Exists(path):
720 self.WriteFailureAndRaise('"%s" does not exist' % path, output_path)
722 try:
723 inp = json.loads(self.ReadFile(path))
724 except Exception as e:
725 self.WriteFailureAndRaise('Failed to read JSON input from "%s": %s' %
726 (path, e), output_path)
728 for k in required_keys:
729 if not k in inp:
730 self.WriteFailureAndRaise('input file is missing a "%s" key' % k,
731 output_path)
733 return inp
735 def WriteFailureAndRaise(self, msg, output_path):
736 if output_path:
737 self.WriteJSON({'error': msg}, output_path, force_verbose=True)
738 raise MBErr(msg)
740 def WriteJSON(self, obj, path, force_verbose=False):
741 try:
742 self.WriteFile(path, json.dumps(obj, indent=2, sort_keys=True) + '\n',
743 force_verbose=force_verbose)
744 except Exception as e:
745 raise MBErr('Error %s writing to the output path "%s"' %
746 (e, path))
748 def PrintCmd(self, cmd):
749 if cmd[0] == sys.executable:
750 cmd = ['python'] + cmd[1:]
751 self.Print(*[pipes.quote(c) for c in cmd])
753 def PrintJSON(self, obj):
754 self.Print(json.dumps(obj, indent=2, sort_keys=True))
756 def Print(self, *args, **kwargs):
757 # This function largely exists so it can be overridden for testing.
758 print(*args, **kwargs)
760 def Run(self, cmd, env=None, force_verbose=True):
761 # This function largely exists so it can be overridden for testing.
762 if self.args.dryrun or self.args.verbose or force_verbose:
763 self.PrintCmd(cmd)
764 if self.args.dryrun:
765 return 0, '', ''
767 ret, out, err = self.Call(cmd, env=env)
768 if self.args.verbose or force_verbose:
769 if out:
770 self.Print(out, end='')
771 if err:
772 self.Print(err, end='', file=sys.stderr)
773 return ret, out, err
775 def Call(self, cmd, env=None):
776 p = subprocess.Popen(cmd, shell=False, cwd=self.chromium_src_dir,
777 stdout=subprocess.PIPE, stderr=subprocess.PIPE,
778 env=env)
779 out, err = p.communicate()
780 return p.returncode, out, err
782 def ExpandUser(self, path):
783 # This function largely exists so it can be overridden for testing.
784 return os.path.expanduser(path)
786 def Exists(self, path):
787 # This function largely exists so it can be overridden for testing.
788 return os.path.exists(path)
790 def MaybeMakeDirectory(self, path):
791 try:
792 os.makedirs(path)
793 except OSError, e:
794 if e.errno != errno.EEXIST:
795 raise
797 def ReadFile(self, path):
798 # This function largely exists so it can be overriden for testing.
799 with open(path) as fp:
800 return fp.read()
802 def RemoveFile(self, path):
803 # This function largely exists so it can be overriden for testing.
804 os.remove(path)
806 def RemoveDirectory(self, abs_path):
807 if sys.platform == 'win32':
808 # In other places in chromium, we often have to retry this command
809 # because we're worried about other processes still holding on to
810 # file handles, but when MB is invoked, it will be early enough in the
811 # build that their should be no other processes to interfere. We
812 # can change this if need be.
813 self.Run(['cmd.exe', '/c', 'rmdir', '/q', '/s', abs_path])
814 else:
815 shutil.rmtree(abs_path, ignore_errors=True)
817 def TempFile(self, mode='w'):
818 # This function largely exists so it can be overriden for testing.
819 return tempfile.NamedTemporaryFile(mode=mode, delete=False)
821 def WriteFile(self, path, contents, force_verbose=False):
822 # This function largely exists so it can be overriden for testing.
823 if self.args.dryrun or self.args.verbose or force_verbose:
824 self.Print('\nWriting """\\\n%s""" to %s.\n' % (contents, path))
825 with open(path, 'w') as fp:
826 return fp.write(contents)
829 class MBErr(Exception):
830 pass
833 if __name__ == '__main__':
834 try:
835 sys.exit(main(sys.argv[1:]))
836 except MBErr as e:
837 print(e)
838 sys.exit(1)
839 except KeyboardInterrupt:
840 print("interrupted, exiting", stream=sys.stderr)
841 sys.exit(130)