Update WebCore.FeatureObserver histogram owner.
[chromium-blink-merge.git] / tools / mb / mb.py
blobe7ef57b63efe391f2cb8df238839cd744240b8ee
1 #!/usr/bin/env python
2 # Copyright 2015 The Chromium Authors. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
6 """MB - the Meta-Build wrapper around GYP and GN
8 MB is a wrapper script for GYP and GN that can be used to generate build files
9 for sets of canned configurations and analyze them.
10 """
12 from __future__ import print_function
14 import argparse
15 import ast
16 import errno
17 import json
18 import os
19 import pipes
20 import pprint
21 import shlex
22 import shutil
23 import sys
24 import subprocess
25 import tempfile
28 def main(args):
29 mbw = MetaBuildWrapper()
30 mbw.ParseArgs(args)
31 return mbw.args.func()
34 class MetaBuildWrapper(object):
35 def __init__(self):
36 p = os.path
37 d = os.path.dirname
38 self.chromium_src_dir = p.normpath(d(d(d(p.abspath(__file__)))))
39 self.default_config = p.join(self.chromium_src_dir, 'tools', 'mb',
40 'mb_config.pyl')
41 self.platform = sys.platform
42 self.args = argparse.Namespace()
43 self.configs = {}
44 self.masters = {}
45 self.mixins = {}
46 self.private_configs = []
47 self.common_dev_configs = []
48 self.unsupported_configs = []
50 def ParseArgs(self, argv):
51 def AddCommonOptions(subp):
52 subp.add_argument('-b', '--builder',
53 help='builder name to look up config from')
54 subp.add_argument('-m', '--master',
55 help='master name to look up config from')
56 subp.add_argument('-c', '--config',
57 help='configuration to analyze')
58 subp.add_argument('-f', '--config-file', metavar='PATH',
59 default=self.default_config,
60 help='path to config file '
61 '(default is //tools/mb/mb_config.pyl)')
62 subp.add_argument('-g', '--goma-dir', default=self.ExpandUser('~/goma'),
63 help='path to goma directory (default is %(default)s).')
64 subp.add_argument('-n', '--dryrun', action='store_true',
65 help='Do a dry run (i.e., do nothing, just print '
66 'the commands that will run)')
67 subp.add_argument('-q', '--quiet', action='store_true',
68 help='Do not print anything, just return an exit '
69 'code.')
70 subp.add_argument('-v', '--verbose', action='count',
71 help='verbose logging (may specify multiple times).')
73 parser = argparse.ArgumentParser(prog='mb')
74 subps = parser.add_subparsers()
76 subp = subps.add_parser('analyze',
77 help='analyze whether changes to a set of files '
78 'will cause a set of binaries to be rebuilt.')
79 AddCommonOptions(subp)
80 subp.add_argument('--swarming-targets-file',
81 help='save runtime dependencies for targets listed '
82 'in file.')
83 subp.add_argument('path', nargs=1,
84 help='path build was generated into.')
85 subp.add_argument('input_path', nargs=1,
86 help='path to a file containing the input arguments '
87 'as a JSON object.')
88 subp.add_argument('output_path', nargs=1,
89 help='path to a file containing the output arguments '
90 'as a JSON object.')
91 subp.set_defaults(func=self.CmdAnalyze)
93 subp = subps.add_parser('gen',
94 help='generate a new set of build files')
95 AddCommonOptions(subp)
96 subp.add_argument('--swarming-targets-file',
97 help='save runtime dependencies for targets listed '
98 'in file.')
99 subp.add_argument('path', nargs=1,
100 help='path to generate build into')
101 subp.set_defaults(func=self.CmdGen)
103 subp = subps.add_parser('lookup',
104 help='look up the command for a given config or '
105 'builder')
106 AddCommonOptions(subp)
107 subp.set_defaults(func=self.CmdLookup)
109 subp = subps.add_parser('validate',
110 help='validate the config file')
111 AddCommonOptions(subp)
112 subp.set_defaults(func=self.CmdValidate)
114 subp = subps.add_parser('help',
115 help='Get help on a subcommand.')
116 subp.add_argument(nargs='?', action='store', dest='subcommand',
117 help='The command to get help for.')
118 subp.set_defaults(func=self.CmdHelp)
120 self.args = parser.parse_args(argv)
122 def CmdAnalyze(self):
123 vals = self.GetConfig()
124 if vals['type'] == 'gn':
125 return self.RunGNAnalyze(vals)
126 elif vals['type'] == 'gyp':
127 return self.RunGYPAnalyze(vals)
128 else:
129 raise MBErr('Unknown meta-build type "%s"' % vals['type'])
131 def CmdGen(self):
132 vals = self.GetConfig()
133 if vals['type'] == 'gn':
134 return self.RunGNGen(vals)
135 if vals['type'] == 'gyp':
136 return self.RunGYPGen(vals)
138 raise MBErr('Unknown meta-build type "%s"' % vals['type'])
140 def CmdLookup(self):
141 vals = self.GetConfig()
142 if vals['type'] == 'gn':
143 cmd = self.GNCmd('gen', '<path>', vals['gn_args'])
144 elif vals['type'] == 'gyp':
145 cmd = self.GYPCmd('<path>', vals['gyp_defines'], vals['gyp_config'])
146 else:
147 raise MBErr('Unknown meta-build type "%s"' % vals['type'])
149 self.PrintCmd(cmd)
150 return 0
152 def CmdHelp(self):
153 if self.args.subcommand:
154 self.ParseArgs([self.args.subcommand, '--help'])
155 else:
156 self.ParseArgs(['--help'])
158 def CmdValidate(self):
159 errs = []
161 # Read the file to make sure it parses.
162 self.ReadConfigFile()
164 # Figure out the whole list of configs and ensure that no config is
165 # listed in more than one category.
166 all_configs = {}
167 for config in self.common_dev_configs:
168 all_configs[config] = 'common_dev_configs'
169 for config in self.private_configs:
170 if config in all_configs:
171 errs.append('config "%s" listed in "private_configs" also '
172 'listed in "%s"' % (config, all_configs['config']))
173 else:
174 all_configs[config] = 'private_configs'
175 for config in self.unsupported_configs:
176 if config in all_configs:
177 errs.append('config "%s" listed in "unsupported_configs" also '
178 'listed in "%s"' % (config, all_configs['config']))
179 else:
180 all_configs[config] = 'unsupported_configs'
182 for master in self.masters:
183 for builder in self.masters[master]:
184 config = self.masters[master][builder]
185 if config in all_configs and all_configs[config] not in self.masters:
186 errs.append('Config "%s" used by a bot is also listed in "%s".' %
187 (config, all_configs[config]))
188 else:
189 all_configs[config] = master
191 # Check that every referenced config actually exists.
192 for config, loc in all_configs.items():
193 if not config in self.configs:
194 errs.append('Unknown config "%s" referenced from "%s".' %
195 (config, loc))
197 # Check that every actual config is actually referenced.
198 for config in self.configs:
199 if not config in all_configs:
200 errs.append('Unused config "%s".' % config)
202 # Figure out the whole list of mixins, and check that every mixin
203 # listed by a config or another mixin actually exists.
204 referenced_mixins = set()
205 for config, mixins in self.configs.items():
206 for mixin in mixins:
207 if not mixin in self.mixins:
208 errs.append('Unknown mixin "%s" referenced by config "%s".' %
209 (mixin, config))
210 referenced_mixins.add(mixin)
212 for mixin in self.mixins:
213 for sub_mixin in self.mixins[mixin].get('mixins', []):
214 if not sub_mixin in self.mixins:
215 errs.append('Unknown mixin "%s" referenced by mixin "%s".' %
216 (sub_mixin, mixin))
217 referenced_mixins.add(sub_mixin)
219 # Check that every mixin defined is actually referenced somewhere.
220 for mixin in self.mixins:
221 if not mixin in referenced_mixins:
222 errs.append('Unreferenced mixin "%s".' % mixin)
224 if errs:
225 raise MBErr('mb config file %s has problems:\n ' + '\n '.join(errs))
227 if not self.args.quiet:
228 self.Print('mb config file %s looks ok.' % self.args.config_file)
229 return 0
231 def GetConfig(self):
232 self.ReadConfigFile()
233 config = self.ConfigFromArgs()
234 if not config in self.configs:
235 raise MBErr('Config "%s" not found in %s' %
236 (config, self.args.config_file))
238 return self.FlattenConfig(config)
240 def ReadConfigFile(self):
241 if not self.Exists(self.args.config_file):
242 raise MBErr('config file not found at %s' % self.args.config_file)
244 try:
245 contents = ast.literal_eval(self.ReadFile(self.args.config_file))
246 except SyntaxError as e:
247 raise MBErr('Failed to parse config file "%s": %s' %
248 (self.args.config_file, e))
250 self.common_dev_configs = contents['common_dev_configs']
251 self.configs = contents['configs']
252 self.masters = contents['masters']
253 self.mixins = contents['mixins']
254 self.private_configs = contents['private_configs']
255 self.unsupported_configs = contents['unsupported_configs']
257 def ConfigFromArgs(self):
258 if self.args.config:
259 if self.args.master or self.args.builder:
260 raise MBErr('Can not specific both -c/--config and -m/--master or '
261 '-b/--builder')
263 return self.args.config
265 if not self.args.master or not self.args.builder:
266 raise MBErr('Must specify either -c/--config or '
267 '(-m/--master and -b/--builder)')
269 if not self.args.master in self.masters:
270 raise MBErr('Master name "%s" not found in "%s"' %
271 (self.args.master, self.args.config_file))
273 if not self.args.builder in self.masters[self.args.master]:
274 raise MBErr('Builder name "%s" not found under masters[%s] in "%s"' %
275 (self.args.builder, self.args.master, self.args.config_file))
277 return self.masters[self.args.master][self.args.builder]
279 def FlattenConfig(self, config):
280 mixins = self.configs[config]
281 vals = {
282 'type': None,
283 'gn_args': [],
284 'gyp_config': [],
285 'gyp_defines': [],
288 visited = []
289 self.FlattenMixins(mixins, vals, visited)
290 return vals
292 def FlattenMixins(self, mixins, vals, visited):
293 for m in mixins:
294 if m not in self.mixins:
295 raise MBErr('Unknown mixin "%s"' % m)
297 # TODO: check for cycles in mixins.
299 visited.append(m)
301 mixin_vals = self.mixins[m]
302 if 'type' in mixin_vals:
303 vals['type'] = mixin_vals['type']
304 if 'gn_args' in mixin_vals:
305 if vals['gn_args']:
306 vals['gn_args'] += ' ' + mixin_vals['gn_args']
307 else:
308 vals['gn_args'] = mixin_vals['gn_args']
309 if 'gyp_config' in mixin_vals:
310 vals['gyp_config'] = mixin_vals['gyp_config']
311 if 'gyp_defines' in mixin_vals:
312 if vals['gyp_defines']:
313 vals['gyp_defines'] += ' ' + mixin_vals['gyp_defines']
314 else:
315 vals['gyp_defines'] = mixin_vals['gyp_defines']
316 if 'mixins' in mixin_vals:
317 self.FlattenMixins(mixin_vals['mixins'], vals, visited)
318 return vals
320 def RunGNGen(self, vals):
321 path = self.args.path[0]
323 cmd = self.GNCmd('gen', path, vals['gn_args'])
325 swarming_targets = []
326 if self.args.swarming_targets_file:
327 # We need GN to generate the list of runtime dependencies for
328 # the compile targets listed (one per line) in the file so
329 # we can run them via swarming. We use ninja_to_gn.pyl to convert
330 # the compile targets to the matching GN labels.
331 contents = self.ReadFile(self.args.swarming_targets_file)
332 swarming_targets = contents.splitlines()
333 ninja_targets_to_labels = ast.literal_eval(self.ReadFile(os.path.join(
334 self.chromium_src_dir, 'testing', 'buildbot', 'ninja_to_gn.pyl')))
335 gn_labels = []
336 for target in swarming_targets:
337 if not target in ninja_targets_to_labels:
338 raise MBErr('test target "%s" not found in %s' %
339 (target, '//testing/buildbot/ninja_to_gn.pyl'))
340 gn_labels.append(ninja_targets_to_labels[target])
342 gn_runtime_deps_path = self.ToAbsPath(path, 'runtime_deps')
344 # Since GN hasn't run yet, the build directory may not even exist.
345 self.MaybeMakeDirectory(self.ToAbsPath(path))
347 self.WriteFile(gn_runtime_deps_path, '\n'.join(gn_labels) + '\n')
348 cmd.append('--runtime-deps-list-file=%s' % gn_runtime_deps_path)
350 ret, _, _ = self.Run(cmd)
352 for target in swarming_targets:
353 if sys.platform == 'win32':
354 deps_path = self.ToAbsPath(path, target + '.exe.runtime_deps')
355 else:
356 deps_path = self.ToAbsPath(path, target + '.runtime_deps')
357 if not self.Exists(deps_path):
358 raise MBErr('did not generate %s' % deps_path)
360 command, extra_files = self.GetIsolateCommand(target, vals)
362 runtime_deps = self.ReadFile(deps_path).splitlines()
364 isolate_path = self.ToAbsPath(path, target + '.isolate')
365 self.WriteFile(isolate_path,
366 pprint.pformat({
367 'variables': {
368 'command': command,
369 'files': sorted(runtime_deps + extra_files),
370 'read_only': 1,
372 }) + '\n')
374 self.WriteJSON(
376 'args': [
377 '--isolated',
378 self.ToSrcRelPath('%s%s%s.isolated' % (path, os.sep, target)),
379 '--isolate',
380 self.ToSrcRelPath('%s%s%s.isolate' % (path, os.sep, target)),
382 'dir': self.chromium_src_dir,
383 'version': 1,
385 isolate_path + 'd.gen.json',
389 return ret
391 def GNCmd(self, subcommand, path, gn_args=''):
392 if self.platform == 'linux2':
393 gn_path = os.path.join(self.chromium_src_dir, 'buildtools', 'linux64',
394 'gn')
395 elif self.platform == 'darwin':
396 gn_path = os.path.join(self.chromium_src_dir, 'buildtools', 'mac',
397 'gn')
398 else:
399 gn_path = os.path.join(self.chromium_src_dir, 'buildtools', 'win',
400 'gn.exe')
402 cmd = [gn_path, subcommand, path]
403 gn_args = gn_args.replace("$(goma_dir)", self.args.goma_dir)
404 if gn_args:
405 cmd.append('--args=%s' % gn_args)
406 return cmd
408 def RunGYPGen(self, vals):
409 path = self.args.path[0]
411 output_dir, gyp_config = self.ParseGYPConfigPath(path)
412 if gyp_config != vals['gyp_config']:
413 raise MBErr('The last component of the path (%s) must match the '
414 'GYP configuration specified in the config (%s), and '
415 'it does not.' % (gyp_config, vals['gyp_config']))
416 cmd = self.GYPCmd(output_dir, vals['gyp_defines'], config=gyp_config)
417 ret, _, _ = self.Run(cmd)
418 return ret
420 def RunGYPAnalyze(self, vals):
421 output_dir, gyp_config = self.ParseGYPConfigPath(self.args.path[0])
422 if gyp_config != vals['gyp_config']:
423 raise MBErr('The last component of the path (%s) must match the '
424 'GYP configuration specified in the config (%s), and '
425 'it does not.' % (gyp_config, vals['gyp_config']))
426 if self.args.verbose:
427 inp = self.GetAnalyzeInput()
428 self.Print()
429 self.Print('analyze input:')
430 self.PrintJSON(inp)
431 self.Print()
433 cmd = self.GYPCmd(output_dir, vals['gyp_defines'], config=gyp_config)
434 cmd.extend(['-G', 'config_path=%s' % self.args.input_path[0],
435 '-G', 'analyzer_output_path=%s' % self.args.output_path[0]])
436 ret, _, _ = self.Run(cmd)
437 if not ret and self.args.verbose:
438 outp = json.loads(self.ReadFile(self.args.output_path[0]))
439 self.Print()
440 self.Print('analyze output:')
441 self.PrintJSON(outp)
442 self.Print()
444 return ret
446 def GetIsolateCommand(self, target, vals):
447 extra_files = []
449 # TODO(dpranke): We should probably pull this from
450 # the test list info in //testing/buildbot/*.json,
451 # and assert that the test has can_use_on_swarming_builders: True,
452 # but we hardcode it here for now.
453 test_type = {}.get(target, 'gtest_test')
455 # This needs to mirror the settings in //build/config/ui.gni:
456 # use_x11 = is_linux && !use_ozone.
457 # TODO(dpranke): Figure out how to keep this in sync better.
458 use_x11 = (sys.platform == 'linux2' and
459 not 'target_os="android"' in vals['gn_args'] and
460 not 'use_ozone=true' in vals['gn_args'])
462 asan = 'is_asan=true' in vals['gn_args']
463 msan = 'is_msan=true' in vals['gn_args']
464 tsan = 'is_tsan=true' in vals['gn_args']
466 executable_suffix = '.exe' if sys.platform == 'win32' else ''
468 if test_type == 'gtest_test':
469 extra_files.append('../../testing/test_env.py')
471 if use_x11:
472 # TODO(dpranke): Figure out some way to figure out which
473 # test steps really need xvfb.
474 extra_files.append('xdisplaycheck')
475 extra_files.append('../../testing/xvfb.py')
477 cmdline = [
478 '../../testing/xvfb.py',
479 '.',
480 './' + str(target),
481 '--brave-new-test-launcher',
482 '--test-launcher-bot-mode',
483 '--asan=%d' % asan,
484 '--msan=%d' % msan,
485 '--tsan=%d' % tsan,
487 else:
488 cmdline = [
489 '../../testing/test_env.py',
490 '.',
491 './' + str(target) + executable_suffix,
492 '--brave-new-test-launcher',
493 '--test-launcher-bot-mode',
494 '--asan=%d' % asan,
495 '--msan=%d' % msan,
496 '--tsan=%d' % tsan,
498 else:
499 # TODO(dpranke): Handle script_tests and other types of swarmed tests.
500 self.WriteFailureAndRaise('unknown test type "%s" for %s' %
501 (test_type, target), output_path=None)
504 return cmdline, extra_files
506 def ToAbsPath(self, build_path, *comps):
507 return os.path.join(self.chromium_src_dir,
508 self.ToSrcRelPath(build_path),
509 *comps)
511 def ToSrcRelPath(self, path):
512 """Returns a relative path from the top of the repo."""
513 # TODO: Support normal paths in addition to source-absolute paths.
514 assert(path.startswith('//'))
515 return path[2:].replace('/', os.sep)
517 def ParseGYPConfigPath(self, path):
518 rpath = self.ToSrcRelPath(path)
519 output_dir, _, config = rpath.rpartition('/')
520 self.CheckGYPConfigIsSupported(config, path)
521 return output_dir, config
523 def CheckGYPConfigIsSupported(self, config, path):
524 if config not in ('Debug', 'Release'):
525 if (sys.platform in ('win32', 'cygwin') and
526 config not in ('Debug_x64', 'Release_x64')):
527 raise MBErr('Unknown or unsupported config type "%s" in "%s"' %
528 config, path)
530 def GYPCmd(self, output_dir, gyp_defines, config):
531 gyp_defines = gyp_defines.replace("$(goma_dir)", self.args.goma_dir)
532 cmd = [
533 sys.executable,
534 os.path.join('build', 'gyp_chromium'),
535 '-G',
536 'output_dir=' + output_dir,
537 '-G',
538 'config=' + config,
540 for d in shlex.split(gyp_defines):
541 cmd += ['-D', d]
542 return cmd
544 def RunGNAnalyze(self, vals):
545 # analyze runs before 'gn gen' now, so we need to run gn gen
546 # in order to ensure that we have a build directory.
547 ret = self.RunGNGen(vals)
548 if ret:
549 return ret
551 inp = self.ReadInputJSON(['files', 'targets'])
552 if self.args.verbose:
553 self.Print()
554 self.Print('analyze input:')
555 self.PrintJSON(inp)
556 self.Print()
558 output_path = self.args.output_path[0]
560 # Bail out early if a GN file was modified, since 'gn refs' won't know
561 # what to do about it.
562 if any(f.endswith('.gn') or f.endswith('.gni') for f in inp['files']):
563 self.WriteJSON({'status': 'Found dependency (all)'}, output_path)
564 return 0
566 # Bail out early if 'all' was asked for, since 'gn refs' won't recognize it.
567 if 'all' in inp['targets']:
568 self.WriteJSON({'status': 'Found dependency (all)'}, output_path)
569 return 0
571 ret = 0
572 response_file = self.TempFile()
573 response_file.write('\n'.join(inp['files']) + '\n')
574 response_file.close()
576 matching_targets = []
577 try:
578 cmd = self.GNCmd('refs', self.args.path[0]) + [
579 '@%s' % response_file.name, '--all', '--as=output']
580 ret, out, _ = self.Run(cmd)
581 if ret and not 'The input matches no targets' in out:
582 self.WriteFailureAndRaise('gn refs returned %d: %s' % (ret, out),
583 output_path)
584 build_dir = self.ToSrcRelPath(self.args.path[0]) + os.sep
585 for output in out.splitlines():
586 build_output = output.replace(build_dir, '')
587 if build_output in inp['targets']:
588 matching_targets.append(build_output)
590 cmd = self.GNCmd('refs', self.args.path[0]) + [
591 '@%s' % response_file.name, '--all']
592 ret, out, _ = self.Run(cmd)
593 if ret and not 'The input matches no targets' in out:
594 self.WriteFailureAndRaise('gn refs returned %d: %s' % (ret, out),
595 output_path)
596 for label in out.splitlines():
597 build_target = label[2:]
598 # We want to accept 'chrome/android:chrome_shell_apk' and
599 # just 'chrome_shell_apk'. This may result in too many targets
600 # getting built, but we can adjust that later if need be.
601 for input_target in inp['targets']:
602 if (input_target == build_target or
603 build_target.endswith(':' + input_target)):
604 matching_targets.append(input_target)
605 finally:
606 self.RemoveFile(response_file.name)
608 if matching_targets:
609 # TODO: it could be that a target X might depend on a target Y
610 # and both would be listed in the input, but we would only need
611 # to specify target X as a build_target (whereas both X and Y are
612 # targets). I'm not sure if that optimization is generally worth it.
613 self.WriteJSON({'targets': sorted(matching_targets),
614 'build_targets': sorted(matching_targets),
615 'status': 'Found dependency'}, output_path)
616 else:
617 self.WriteJSON({'targets': [],
618 'build_targets': [],
619 'status': 'No dependency'}, output_path)
621 if not ret and self.args.verbose:
622 outp = json.loads(self.ReadFile(output_path))
623 self.Print()
624 self.Print('analyze output:')
625 self.PrintJSON(outp)
626 self.Print()
628 return 0
630 def ReadInputJSON(self, required_keys):
631 path = self.args.input_path[0]
632 output_path = self.args.output_path[0]
633 if not self.Exists(path):
634 self.WriteFailureAndRaise('"%s" does not exist' % path, output_path)
636 try:
637 inp = json.loads(self.ReadFile(path))
638 except Exception as e:
639 self.WriteFailureAndRaise('Failed to read JSON input from "%s": %s' %
640 (path, e), output_path)
642 for k in required_keys:
643 if not k in inp:
644 self.WriteFailureAndRaise('input file is missing a "%s" key' % k,
645 output_path)
647 return inp
649 def WriteFailureAndRaise(self, msg, output_path):
650 if output_path:
651 self.WriteJSON({'error': msg}, output_path)
652 raise MBErr(msg)
654 def WriteJSON(self, obj, path):
655 try:
656 self.WriteFile(path, json.dumps(obj, indent=2, sort_keys=True) + '\n')
657 except Exception as e:
658 raise MBErr('Error %s writing to the output path "%s"' %
659 (e, path))
661 def PrintCmd(self, cmd):
662 if cmd[0] == sys.executable:
663 cmd = ['python'] + cmd[1:]
664 self.Print(*[pipes.quote(c) for c in cmd])
666 def PrintJSON(self, obj):
667 self.Print(json.dumps(obj, indent=2, sort_keys=True))
669 def Print(self, *args, **kwargs):
670 # This function largely exists so it can be overridden for testing.
671 print(*args, **kwargs)
673 def Run(self, cmd):
674 # This function largely exists so it can be overridden for testing.
675 if self.args.dryrun or self.args.verbose:
676 self.PrintCmd(cmd)
677 if self.args.dryrun:
678 return 0, '', ''
679 ret, out, err = self.Call(cmd)
680 if self.args.verbose:
681 if out:
682 self.Print(out, end='')
683 if err:
684 self.Print(err, end='', file=sys.stderr)
685 return ret, out, err
687 def Call(self, cmd):
688 p = subprocess.Popen(cmd, shell=False, cwd=self.chromium_src_dir,
689 stdout=subprocess.PIPE, stderr=subprocess.PIPE)
690 out, err = p.communicate()
691 return p.returncode, out, err
693 def ExpandUser(self, path):
694 # This function largely exists so it can be overridden for testing.
695 return os.path.expanduser(path)
697 def Exists(self, path):
698 # This function largely exists so it can be overridden for testing.
699 return os.path.exists(path)
701 def MaybeMakeDirectory(self, path):
702 try:
703 os.makedirs(path)
704 except OSError, e:
705 if e.errno != errno.EEXIST:
706 raise
708 def ReadFile(self, path):
709 # This function largely exists so it can be overriden for testing.
710 with open(path) as fp:
711 return fp.read()
713 def RemoveFile(self, path):
714 # This function largely exists so it can be overriden for testing.
715 os.remove(path)
717 def TempFile(self, mode='w'):
718 # This function largely exists so it can be overriden for testing.
719 return tempfile.NamedTemporaryFile(mode=mode, delete=False)
721 def WriteFile(self, path, contents):
722 # This function largely exists so it can be overriden for testing.
723 if self.args.dryrun or self.args.verbose:
724 self.Print('\nWriting """\\\n%s""" to %s.\n' % (contents, path))
725 with open(path, 'w') as fp:
726 return fp.write(contents)
729 class MBErr(Exception):
730 pass
733 if __name__ == '__main__':
734 try:
735 sys.exit(main(sys.argv[1:]))
736 except MBErr as e:
737 print(e)
738 sys.exit(1)
739 except KeyboardInterrupt:
740 print("interrupted, exiting", stream=sys.stderr)
741 sys.exit(130)