2 # Copyright 2015 The Chromium Authors. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
6 """MB - the Meta-Build wrapper around GYP and GN
8 MB is a wrapper script for GYP and GN that can be used to generate build files
9 for sets of canned configurations and analyze them.
12 from __future__
import print_function
26 mbw
= MetaBuildWrapper()
28 return mbw
.args
.func()
31 class MetaBuildWrapper(object):
35 self
.chromium_src_dir
= p
.normpath(d(d(d(p
.abspath(__file__
)))))
36 self
.default_config
= p
.join(self
.chromium_src_dir
, 'tools', 'mb',
38 self
.executable
= sys
.executable
39 self
.platform
= sys
.platform
41 self
.args
= argparse
.Namespace()
45 self
.private_configs
= []
46 self
.common_dev_configs
= []
47 self
.unsupported_configs
= []
49 def ParseArgs(self
, argv
):
50 def AddCommonOptions(subp
):
51 subp
.add_argument('-b', '--builder',
52 help='builder name to look up config from')
53 subp
.add_argument('-m', '--master',
54 help='master name to look up config from')
55 subp
.add_argument('-c', '--config',
56 help='configuration to analyze')
57 subp
.add_argument('-f', '--config-file', metavar
='PATH',
58 default
=self
.default_config
,
59 help='path to config file '
60 '(default is //tools/mb/mb_config.pyl)')
61 subp
.add_argument('-g', '--goma-dir', default
=self
.ExpandUser('~/goma'),
62 help='path to goma directory (default is %(default)s).')
63 subp
.add_argument('-n', '--dryrun', action
='store_true',
64 help='Do a dry run (i.e., do nothing, just print '
65 'the commands that will run)')
66 subp
.add_argument('-v', '--verbose', action
='store_true',
67 help='verbose logging')
69 parser
= argparse
.ArgumentParser(prog
='mb')
70 subps
= parser
.add_subparsers()
72 subp
= subps
.add_parser('analyze',
73 help='analyze whether changes to a set of files '
74 'will cause a set of binaries to be rebuilt.')
75 AddCommonOptions(subp
)
76 subp
.add_argument('--swarming-targets-file',
77 help='save runtime dependencies for targets listed '
79 subp
.add_argument('path', nargs
=1,
80 help='path build was generated into.')
81 subp
.add_argument('input_path', nargs
=1,
82 help='path to a file containing the input arguments '
84 subp
.add_argument('output_path', nargs
=1,
85 help='path to a file containing the output arguments '
87 subp
.set_defaults(func
=self
.CmdAnalyze
)
89 subp
= subps
.add_parser('gen',
90 help='generate a new set of build files')
91 AddCommonOptions(subp
)
92 subp
.add_argument('--swarming-targets-file',
93 help='save runtime dependencies for targets listed '
95 subp
.add_argument('path', nargs
=1,
96 help='path to generate build into')
97 subp
.set_defaults(func
=self
.CmdGen
)
99 subp
= subps
.add_parser('lookup',
100 help='look up the command for a given config or '
102 AddCommonOptions(subp
)
103 subp
.set_defaults(func
=self
.CmdLookup
)
105 subp
= subps
.add_parser('validate',
106 help='validate the config file')
107 subp
.add_argument('-f', '--config-file', metavar
='PATH',
108 default
=self
.default_config
,
109 help='path to config file '
110 '(default is //tools/mb/mb_config.pyl)')
111 subp
.set_defaults(func
=self
.CmdValidate
)
113 subp
= subps
.add_parser('help',
114 help='Get help on a subcommand.')
115 subp
.add_argument(nargs
='?', action
='store', dest
='subcommand',
116 help='The command to get help for.')
117 subp
.set_defaults(func
=self
.CmdHelp
)
119 self
.args
= parser
.parse_args(argv
)
121 def CmdAnalyze(self
):
122 vals
= self
.GetConfig()
123 if vals
['type'] == 'gn':
124 return self
.RunGNAnalyze(vals
)
125 elif vals
['type'] == 'gyp':
126 return self
.RunGYPAnalyze(vals
)
128 raise MBErr('Unknown meta-build type "%s"' % vals
['type'])
131 vals
= self
.GetConfig()
133 self
.ClobberIfNeeded(vals
)
135 if vals
['type'] == 'gn':
136 return self
.RunGNGen(vals
)
137 if vals
['type'] == 'gyp':
138 return self
.RunGYPGen(vals
)
140 raise MBErr('Unknown meta-build type "%s"' % vals
['type'])
143 vals
= self
.GetConfig()
144 if vals
['type'] == 'gn':
145 cmd
= self
.GNCmd('gen', '<path>', vals
['gn_args'])
146 elif vals
['type'] == 'gyp':
147 if vals
['gyp_crosscompile']:
148 self
.Print('GYP_CROSSCOMPILE=1')
149 cmd
= self
.GYPCmd('<path>', vals
['gyp_defines'])
151 raise MBErr('Unknown meta-build type "%s"' % vals
['type'])
157 if self
.args
.subcommand
:
158 self
.ParseArgs([self
.args
.subcommand
, '--help'])
160 self
.ParseArgs(['--help'])
162 def CmdValidate(self
):
165 # Read the file to make sure it parses.
166 self
.ReadConfigFile()
168 # Figure out the whole list of configs and ensure that no config is
169 # listed in more than one category.
171 for config
in self
.common_dev_configs
:
172 all_configs
[config
] = 'common_dev_configs'
173 for config
in self
.private_configs
:
174 if config
in all_configs
:
175 errs
.append('config "%s" listed in "private_configs" also '
176 'listed in "%s"' % (config
, all_configs
['config']))
178 all_configs
[config
] = 'private_configs'
179 for config
in self
.unsupported_configs
:
180 if config
in all_configs
:
181 errs
.append('config "%s" listed in "unsupported_configs" also '
182 'listed in "%s"' % (config
, all_configs
['config']))
184 all_configs
[config
] = 'unsupported_configs'
186 for master
in self
.masters
:
187 for builder
in self
.masters
[master
]:
188 config
= self
.masters
[master
][builder
]
189 if config
in all_configs
and all_configs
[config
] not in self
.masters
:
190 errs
.append('Config "%s" used by a bot is also listed in "%s".' %
191 (config
, all_configs
[config
]))
193 all_configs
[config
] = master
195 # Check that every referenced config actually exists.
196 for config
, loc
in all_configs
.items():
197 if not config
in self
.configs
:
198 errs
.append('Unknown config "%s" referenced from "%s".' %
201 # Check that every actual config is actually referenced.
202 for config
in self
.configs
:
203 if not config
in all_configs
:
204 errs
.append('Unused config "%s".' % config
)
206 # Figure out the whole list of mixins, and check that every mixin
207 # listed by a config or another mixin actually exists.
208 referenced_mixins
= set()
209 for config
, mixins
in self
.configs
.items():
211 if not mixin
in self
.mixins
:
212 errs
.append('Unknown mixin "%s" referenced by config "%s".' %
214 referenced_mixins
.add(mixin
)
216 for mixin
in self
.mixins
:
217 for sub_mixin
in self
.mixins
[mixin
].get('mixins', []):
218 if not sub_mixin
in self
.mixins
:
219 errs
.append('Unknown mixin "%s" referenced by mixin "%s".' %
221 referenced_mixins
.add(sub_mixin
)
223 # Check that every mixin defined is actually referenced somewhere.
224 for mixin
in self
.mixins
:
225 if not mixin
in referenced_mixins
:
226 errs
.append('Unreferenced mixin "%s".' % mixin
)
229 raise MBErr(('mb config file %s has problems:' % self
.args
.config_file
) +
230 '\n ' + '\n '.join(errs
))
232 self
.Print('mb config file %s looks ok.' % self
.args
.config_file
)
236 self
.ReadConfigFile()
237 config
= self
.ConfigFromArgs()
238 if not config
in self
.configs
:
239 raise MBErr('Config "%s" not found in %s' %
240 (config
, self
.args
.config_file
))
242 return self
.FlattenConfig(config
)
244 def ReadConfigFile(self
):
245 if not self
.Exists(self
.args
.config_file
):
246 raise MBErr('config file not found at %s' % self
.args
.config_file
)
249 contents
= ast
.literal_eval(self
.ReadFile(self
.args
.config_file
))
250 except SyntaxError as e
:
251 raise MBErr('Failed to parse config file "%s": %s' %
252 (self
.args
.config_file
, e
))
254 self
.common_dev_configs
= contents
['common_dev_configs']
255 self
.configs
= contents
['configs']
256 self
.masters
= contents
['masters']
257 self
.mixins
= contents
['mixins']
258 self
.private_configs
= contents
['private_configs']
259 self
.unsupported_configs
= contents
['unsupported_configs']
261 def ConfigFromArgs(self
):
263 if self
.args
.master
or self
.args
.builder
:
264 raise MBErr('Can not specific both -c/--config and -m/--master or '
267 return self
.args
.config
269 if not self
.args
.master
or not self
.args
.builder
:
270 raise MBErr('Must specify either -c/--config or '
271 '(-m/--master and -b/--builder)')
273 if not self
.args
.master
in self
.masters
:
274 raise MBErr('Master name "%s" not found in "%s"' %
275 (self
.args
.master
, self
.args
.config_file
))
277 if not self
.args
.builder
in self
.masters
[self
.args
.master
]:
278 raise MBErr('Builder name "%s" not found under masters[%s] in "%s"' %
279 (self
.args
.builder
, self
.args
.master
, self
.args
.config_file
))
281 return self
.masters
[self
.args
.master
][self
.args
.builder
]
283 def FlattenConfig(self
, config
):
284 mixins
= self
.configs
[config
]
289 'gyp_crosscompile': False,
293 self
.FlattenMixins(mixins
, vals
, visited
)
296 def FlattenMixins(self
, mixins
, vals
, visited
):
298 if m
not in self
.mixins
:
299 raise MBErr('Unknown mixin "%s"' % m
)
301 # TODO: check for cycles in mixins.
305 mixin_vals
= self
.mixins
[m
]
306 if 'type' in mixin_vals
:
307 vals
['type'] = mixin_vals
['type']
308 if 'gn_args' in mixin_vals
:
310 vals
['gn_args'] += ' ' + mixin_vals
['gn_args']
312 vals
['gn_args'] = mixin_vals
['gn_args']
313 if 'gyp_crosscompile' in mixin_vals
:
314 vals
['gyp_crosscompile'] = mixin_vals
['gyp_crosscompile']
315 if 'gyp_defines' in mixin_vals
:
316 if vals
['gyp_defines']:
317 vals
['gyp_defines'] += ' ' + mixin_vals
['gyp_defines']
319 vals
['gyp_defines'] = mixin_vals
['gyp_defines']
320 if 'mixins' in mixin_vals
:
321 self
.FlattenMixins(mixin_vals
['mixins'], vals
, visited
)
324 def ClobberIfNeeded(self
, vals
):
325 path
= self
.args
.path
[0]
326 build_dir
= self
.ToAbsPath(path
)
327 mb_type_path
= self
.PathJoin(build_dir
, 'mb_type')
328 needs_clobber
= False
329 new_mb_type
= vals
['type']
330 if self
.Exists(build_dir
):
331 if self
.Exists(mb_type_path
):
332 old_mb_type
= self
.ReadFile(mb_type_path
)
333 if old_mb_type
!= new_mb_type
:
334 self
.Print("Build type mismatch: was %s, will be %s, clobbering %s" %
335 (old_mb_type
, new_mb_type
, path
))
338 # There is no 'mb_type' file in the build directory, so this probably
339 # means that the prior build(s) were not done through mb, and we
340 # have no idea if this was a GYP build or a GN build. Clobber it
342 self
.Print("%s/mb_type missing, clobbering to be safe" % path
)
346 self
.RemoveDirectory(build_dir
)
348 self
.MaybeMakeDirectory(build_dir
)
349 self
.WriteFile(mb_type_path
, new_mb_type
)
351 def RunGNGen(self
, vals
):
352 path
= self
.args
.path
[0]
354 cmd
= self
.GNCmd('gen', path
, vals
['gn_args'])
356 swarming_targets
= []
357 if self
.args
.swarming_targets_file
:
358 # We need GN to generate the list of runtime dependencies for
359 # the compile targets listed (one per line) in the file so
360 # we can run them via swarming. We use ninja_to_gn.pyl to convert
361 # the compile targets to the matching GN labels.
362 contents
= self
.ReadFile(self
.args
.swarming_targets_file
)
363 swarming_targets
= contents
.splitlines()
364 gn_isolate_map
= ast
.literal_eval(self
.ReadFile(self
.PathJoin(
365 self
.chromium_src_dir
, 'testing', 'buildbot', 'gn_isolate_map.pyl')))
367 for target
in swarming_targets
:
368 if not target
in gn_isolate_map
:
369 raise MBErr('test target "%s" not found in %s' %
370 (target
, '//testing/buildbot/gn_isolate_map.pyl'))
371 gn_labels
.append(gn_isolate_map
[target
]['label'])
373 gn_runtime_deps_path
= self
.ToAbsPath(path
, 'runtime_deps')
375 # Since GN hasn't run yet, the build directory may not even exist.
376 self
.MaybeMakeDirectory(self
.ToAbsPath(path
))
378 self
.WriteFile(gn_runtime_deps_path
, '\n'.join(gn_labels
) + '\n')
379 cmd
.append('--runtime-deps-list-file=%s' % gn_runtime_deps_path
)
381 ret
, _
, _
= self
.Run(cmd
)
383 # If `gn gen` failed, we should exit early rather than trying to
384 # generate isolates. Run() will have already logged any error output.
385 self
.Print('GN gen failed: %d' % ret
)
388 for target
in swarming_targets
:
389 if gn_isolate_map
[target
]['type'] == 'gpu_browser_test':
390 runtime_deps_target
= 'browser_tests'
391 elif gn_isolate_map
[target
]['type'] == 'script':
392 # For script targets, the build target is usually a group,
393 # for which gn generates the runtime_deps next to the stamp file
394 # for the label, which lives under the obj/ directory.
395 label
= gn_isolate_map
[target
]['label']
396 runtime_deps_target
= 'obj/%s.stamp' % label
.replace(':', '/')
398 runtime_deps_target
= target
399 if self
.platform
== 'win32':
400 deps_path
= self
.ToAbsPath(path
,
401 runtime_deps_target
+ '.exe.runtime_deps')
403 deps_path
= self
.ToAbsPath(path
,
404 runtime_deps_target
+ '.runtime_deps')
405 if not self
.Exists(deps_path
):
406 raise MBErr('did not generate %s' % deps_path
)
408 command
, extra_files
= self
.GetIsolateCommand(target
, vals
,
411 runtime_deps
= self
.ReadFile(deps_path
).splitlines()
413 isolate_path
= self
.ToAbsPath(path
, target
+ '.isolate')
414 self
.WriteFile(isolate_path
,
418 'files': sorted(runtime_deps
+ extra_files
),
426 self
.ToSrcRelPath('%s%s%s.isolated' % (path
, self
.sep
, target
)),
428 self
.ToSrcRelPath('%s%s%s.isolate' % (path
, self
.sep
, target
)),
430 'dir': self
.chromium_src_dir
,
433 isolate_path
+ 'd.gen.json',
438 def GNCmd(self
, subcommand
, path
, gn_args
=''):
439 if self
.platform
== 'linux2':
441 elif self
.platform
== 'darwin':
445 gn_path
= self
.PathJoin(self
.chromium_src_dir
, 'buildtools', subdir
, 'gn')
447 cmd
= [gn_path
, subcommand
, path
]
448 gn_args
= gn_args
.replace("$(goma_dir)", self
.args
.goma_dir
)
450 cmd
.append('--args=%s' % gn_args
)
453 def RunGYPGen(self
, vals
):
454 path
= self
.args
.path
[0]
456 output_dir
= self
.ParseGYPConfigPath(path
)
457 cmd
= self
.GYPCmd(output_dir
, vals
['gyp_defines'])
459 if vals
['gyp_crosscompile']:
460 if self
.args
.verbose
:
461 self
.Print('Setting GYP_CROSSCOMPILE=1 in the environment')
462 env
= os
.environ
.copy()
463 env
['GYP_CROSSCOMPILE'] = '1'
464 ret
, _
, _
= self
.Run(cmd
, env
=env
)
467 def RunGYPAnalyze(self
, vals
):
468 output_dir
= self
.ParseGYPConfigPath(self
.args
.path
[0])
469 if self
.args
.verbose
:
470 inp
= self
.ReadInputJSON(['files', 'targets'])
472 self
.Print('analyze input:')
476 cmd
= self
.GYPCmd(output_dir
, vals
['gyp_defines'])
477 cmd
.extend(['-f', 'analyzer',
478 '-G', 'config_path=%s' % self
.args
.input_path
[0],
479 '-G', 'analyzer_output_path=%s' % self
.args
.output_path
[0]])
480 ret
, _
, _
= self
.Run(cmd
)
481 if not ret
and self
.args
.verbose
:
482 outp
= json
.loads(self
.ReadFile(self
.args
.output_path
[0]))
484 self
.Print('analyze output:')
490 def GetIsolateCommand(self
, target
, vals
, gn_isolate_map
):
491 # This needs to mirror the settings in //build/config/ui.gni:
492 # use_x11 = is_linux && !use_ozone.
493 # TODO(dpranke): Figure out how to keep this in sync better.
494 use_x11
= (self
.platform
== 'linux2' and
495 not 'target_os="android"' in vals
['gn_args'] and
496 not 'use_ozone=true' in vals
['gn_args'])
498 asan
= 'is_asan=true' in vals
['gn_args']
499 msan
= 'is_msan=true' in vals
['gn_args']
500 tsan
= 'is_tsan=true' in vals
['gn_args']
502 executable_suffix
= '.exe' if self
.platform
== 'win32' else ''
504 test_type
= gn_isolate_map
[target
]['type']
508 if use_x11
and test_type
== 'windowed_test_launcher':
511 '../../testing/test_env.py',
512 '../../testing/xvfb.py',
515 '../../testing/xvfb.py',
518 '--brave-new-test-launcher',
519 '--test-launcher-bot-mode',
524 elif test_type
in ('windowed_test_launcher', 'console_test_launcher'):
526 '../../testing/test_env.py'
529 '../../testing/test_env.py',
530 './' + str(target
) + executable_suffix
,
531 '--brave-new-test-launcher',
532 '--test-launcher-bot-mode',
537 elif test_type
== 'gpu_browser_test':
539 '../../testing/test_env.py'
541 gtest_filter
= gn_isolate_map
[target
]['gtest_filter']
543 '../../testing/test_env.py',
544 './browser_tests' + executable_suffix
,
545 '--test-launcher-bot-mode',
547 '--test-launcher-jobs=1',
548 '--gtest_filter=%s' % gtest_filter
,
550 elif test_type
== 'script':
552 '../../testing/test_env.py'
555 '../../testing/test_env.py',
556 ] + ['../../' + self
.ToSrcRelPath(gn_isolate_map
[target
]['script'])]
557 elif test_type
in ('raw'):
560 './' + str(target
) + executable_suffix
,
561 ] + gn_isolate_map
[target
].get('args')
564 self
.WriteFailureAndRaise('No command line for %s found (test type %s).'
565 % (target
, test_type
), output_path
=None)
567 return cmdline
, extra_files
569 def ToAbsPath(self
, build_path
, *comps
):
570 return self
.PathJoin(self
.chromium_src_dir
,
571 self
.ToSrcRelPath(build_path
),
574 def ToSrcRelPath(self
, path
):
575 """Returns a relative path from the top of the repo."""
576 # TODO: Support normal paths in addition to source-absolute paths.
577 assert(path
.startswith('//'))
578 return path
[2:].replace('/', self
.sep
)
580 def ParseGYPConfigPath(self
, path
):
581 rpath
= self
.ToSrcRelPath(path
)
582 output_dir
, _
, _
= rpath
.rpartition(self
.sep
)
585 def GYPCmd(self
, output_dir
, gyp_defines
):
586 gyp_defines
= gyp_defines
.replace("$(goma_dir)", self
.args
.goma_dir
)
589 self
.PathJoin('build', 'gyp_chromium'),
591 'output_dir=' + output_dir
,
593 for d
in gyp_defines
.split(' '):
597 def RunGNAnalyze(self
, vals
):
598 # analyze runs before 'gn gen' now, so we need to run gn gen
599 # in order to ensure that we have a build directory.
600 ret
= self
.RunGNGen(vals
)
604 inp
= self
.ReadInputJSON(['files', 'targets'])
605 if self
.args
.verbose
:
607 self
.Print('analyze input:')
611 output_path
= self
.args
.output_path
[0]
613 # Bail out early if a GN file was modified, since 'gn refs' won't know
614 # what to do about it.
615 if any(f
.endswith('.gn') or f
.endswith('.gni') for f
in inp
['files']):
616 self
.WriteJSON({'status': 'Found dependency (all)'}, output_path
)
619 # Bail out early if 'all' was asked for, since 'gn refs' won't recognize it.
620 if 'all' in inp
['targets']:
621 self
.WriteJSON({'status': 'Found dependency (all)'}, output_path
)
624 # This shouldn't normally happen, but could due to unusual race conditions,
625 # like a try job that gets scheduled before a patch lands but runs after
626 # the patch has landed.
628 self
.Print('Warning: No files modified in patch, bailing out early.')
629 self
.WriteJSON({'targets': [],
631 'status': 'No dependency'}, output_path
)
635 response_file
= self
.TempFile()
636 response_file
.write('\n'.join(inp
['files']) + '\n')
637 response_file
.close()
639 matching_targets
= []
641 cmd
= self
.GNCmd('refs', self
.args
.path
[0]) + [
642 '@%s' % response_file
.name
, '--all', '--as=output']
643 ret
, out
, _
= self
.Run(cmd
, force_verbose
=False)
644 if ret
and not 'The input matches no targets' in out
:
645 self
.WriteFailureAndRaise('gn refs returned %d: %s' % (ret
, out
),
647 build_dir
= self
.ToSrcRelPath(self
.args
.path
[0]) + self
.sep
648 for output
in out
.splitlines():
649 build_output
= output
.replace(build_dir
, '')
650 if build_output
in inp
['targets']:
651 matching_targets
.append(build_output
)
653 cmd
= self
.GNCmd('refs', self
.args
.path
[0]) + [
654 '@%s' % response_file
.name
, '--all']
655 ret
, out
, _
= self
.Run(cmd
, force_verbose
=False)
656 if ret
and not 'The input matches no targets' in out
:
657 self
.WriteFailureAndRaise('gn refs returned %d: %s' % (ret
, out
),
659 for label
in out
.splitlines():
660 build_target
= label
[2:]
661 # We want to accept 'chrome/android:chrome_public_apk' and
662 # just 'chrome_public_apk'. This may result in too many targets
663 # getting built, but we can adjust that later if need be.
664 for input_target
in inp
['targets']:
665 if (input_target
== build_target
or
666 build_target
.endswith(':' + input_target
)):
667 matching_targets
.append(input_target
)
669 self
.RemoveFile(response_file
.name
)
672 # TODO: it could be that a target X might depend on a target Y
673 # and both would be listed in the input, but we would only need
674 # to specify target X as a build_target (whereas both X and Y are
675 # targets). I'm not sure if that optimization is generally worth it.
676 self
.WriteJSON({'targets': sorted(set(matching_targets
)),
677 'build_targets': sorted(set(matching_targets
)),
678 'status': 'Found dependency'}, output_path
)
680 self
.WriteJSON({'targets': [],
682 'status': 'No dependency'}, output_path
)
684 if self
.args
.verbose
:
685 outp
= json
.loads(self
.ReadFile(output_path
))
687 self
.Print('analyze output:')
693 def ReadInputJSON(self
, required_keys
):
694 path
= self
.args
.input_path
[0]
695 output_path
= self
.args
.output_path
[0]
696 if not self
.Exists(path
):
697 self
.WriteFailureAndRaise('"%s" does not exist' % path
, output_path
)
700 inp
= json
.loads(self
.ReadFile(path
))
701 except Exception as e
:
702 self
.WriteFailureAndRaise('Failed to read JSON input from "%s": %s' %
703 (path
, e
), output_path
)
705 for k
in required_keys
:
707 self
.WriteFailureAndRaise('input file is missing a "%s" key' % k
,
712 def WriteFailureAndRaise(self
, msg
, output_path
):
714 self
.WriteJSON({'error': msg
}, output_path
, force_verbose
=True)
717 def WriteJSON(self
, obj
, path
, force_verbose
=False):
719 self
.WriteFile(path
, json
.dumps(obj
, indent
=2, sort_keys
=True) + '\n',
720 force_verbose
=force_verbose
)
721 except Exception as e
:
722 raise MBErr('Error %s writing to the output path "%s"' %
725 def PrintCmd(self
, cmd
):
726 if cmd
[0] == self
.executable
:
727 cmd
= ['python'] + cmd
[1:]
730 def PrintJSON(self
, obj
):
731 self
.Print(json
.dumps(obj
, indent
=2, sort_keys
=True))
733 def Print(self
, *args
, **kwargs
):
734 # This function largely exists so it can be overridden for testing.
735 print(*args
, **kwargs
)
737 def Run(self
, cmd
, env
=None, force_verbose
=True):
738 # This function largely exists so it can be overridden for testing.
739 if self
.args
.dryrun
or self
.args
.verbose
or force_verbose
:
744 ret
, out
, err
= self
.Call(cmd
, env
=env
)
745 if self
.args
.verbose
or force_verbose
:
747 self
.Print(out
, end
='')
749 self
.Print(err
, end
='', file=sys
.stderr
)
752 def Call(self
, cmd
, env
=None):
753 p
= subprocess
.Popen(cmd
, shell
=False, cwd
=self
.chromium_src_dir
,
754 stdout
=subprocess
.PIPE
, stderr
=subprocess
.PIPE
,
756 out
, err
= p
.communicate()
757 return p
.returncode
, out
, err
759 def ExpandUser(self
, path
):
760 # This function largely exists so it can be overridden for testing.
761 return os
.path
.expanduser(path
)
763 def Exists(self
, path
):
764 # This function largely exists so it can be overridden for testing.
765 return os
.path
.exists(path
)
767 def MaybeMakeDirectory(self
, path
):
771 if e
.errno
!= errno
.EEXIST
:
774 def PathJoin(self
, *comps
):
775 # This function largely exists so it can be overriden for testing.
776 return os
.path
.join(*comps
)
778 def ReadFile(self
, path
):
779 # This function largely exists so it can be overriden for testing.
780 with
open(path
) as fp
:
783 def RemoveFile(self
, path
):
784 # This function largely exists so it can be overriden for testing.
787 def RemoveDirectory(self
, abs_path
):
788 if self
.platform
== 'win32':
789 # In other places in chromium, we often have to retry this command
790 # because we're worried about other processes still holding on to
791 # file handles, but when MB is invoked, it will be early enough in the
792 # build that their should be no other processes to interfere. We
793 # can change this if need be.
794 self
.Run(['cmd.exe', '/c', 'rmdir', '/q', '/s', abs_path
])
796 shutil
.rmtree(abs_path
, ignore_errors
=True)
798 def TempFile(self
, mode
='w'):
799 # This function largely exists so it can be overriden for testing.
800 return tempfile
.NamedTemporaryFile(mode
=mode
, delete
=False)
802 def WriteFile(self
, path
, contents
, force_verbose
=False):
803 # This function largely exists so it can be overriden for testing.
804 if self
.args
.dryrun
or self
.args
.verbose
or force_verbose
:
805 self
.Print('\nWriting """\\\n%s""" to %s.\n' % (contents
, path
))
806 with
open(path
, 'w') as fp
:
807 return fp
.write(contents
)
810 class MBErr(Exception):
814 if __name__
== '__main__':
816 sys
.exit(main(sys
.argv
[1:]))
820 except KeyboardInterrupt:
821 print("interrupted, exiting", stream
=sys
.stderr
)