Merge Chromium + Blink git repositories
[chromium-blink-merge.git] / build / android / gyp / util / build_utils.py
blob81041ef2f9bf6843e0e00dbbc779db05a34ff5c6
1 # Copyright 2013 The Chromium Authors. All rights reserved.
2 # Use of this source code is governed by a BSD-style license that can be
3 # found in the LICENSE file.
5 import ast
6 import contextlib
7 import fnmatch
8 import json
9 import os
10 import pipes
11 import re
12 import shlex
13 import shutil
14 import subprocess
15 import sys
16 import tempfile
17 import zipfile
20 CHROMIUM_SRC = os.path.normpath(
21 os.path.join(os.path.dirname(__file__),
22 os.pardir, os.pardir, os.pardir, os.pardir))
23 COLORAMA_ROOT = os.path.join(CHROMIUM_SRC,
24 'third_party', 'colorama', 'src')
25 # aapt should ignore OWNERS files in addition the default ignore pattern.
26 AAPT_IGNORE_PATTERN = ('!OWNERS:!.svn:!.git:!.ds_store:!*.scc:.*:<dir>_*:' +
27 '!CVS:!thumbs.db:!picasa.ini:!*~:!*.d.stamp')
28 HERMETIC_TIMESTAMP = (2001, 1, 1, 0, 0, 0)
31 @contextlib.contextmanager
32 def TempDir():
33 dirname = tempfile.mkdtemp()
34 try:
35 yield dirname
36 finally:
37 shutil.rmtree(dirname)
40 def MakeDirectory(dir_path):
41 try:
42 os.makedirs(dir_path)
43 except OSError:
44 pass
47 def DeleteDirectory(dir_path):
48 if os.path.exists(dir_path):
49 shutil.rmtree(dir_path)
52 def Touch(path, fail_if_missing=False):
53 if fail_if_missing and not os.path.exists(path):
54 raise Exception(path + ' doesn\'t exist.')
56 MakeDirectory(os.path.dirname(path))
57 with open(path, 'a'):
58 os.utime(path, None)
61 def FindInDirectory(directory, filename_filter):
62 files = []
63 for root, _dirnames, filenames in os.walk(directory):
64 matched_files = fnmatch.filter(filenames, filename_filter)
65 files.extend((os.path.join(root, f) for f in matched_files))
66 return files
69 def FindInDirectories(directories, filename_filter):
70 all_files = []
71 for directory in directories:
72 all_files.extend(FindInDirectory(directory, filename_filter))
73 return all_files
76 def ParseGnList(gn_string):
77 return ast.literal_eval(gn_string)
80 def ParseGypList(gyp_string):
81 # The ninja generator doesn't support $ in strings, so use ## to
82 # represent $.
83 # TODO(cjhopman): Remove when
84 # https://code.google.com/p/gyp/issues/detail?id=327
85 # is addressed.
86 gyp_string = gyp_string.replace('##', '$')
88 if gyp_string.startswith('['):
89 return ParseGnList(gyp_string)
90 return shlex.split(gyp_string)
93 def CheckOptions(options, parser, required=None):
94 if not required:
95 return
96 for option_name in required:
97 if getattr(options, option_name) is None:
98 parser.error('--%s is required' % option_name.replace('_', '-'))
101 def WriteJson(obj, path, only_if_changed=False):
102 old_dump = None
103 if os.path.exists(path):
104 with open(path, 'r') as oldfile:
105 old_dump = oldfile.read()
107 new_dump = json.dumps(obj, sort_keys=True, indent=2, separators=(',', ': '))
109 if not only_if_changed or old_dump != new_dump:
110 with open(path, 'w') as outfile:
111 outfile.write(new_dump)
114 def ReadJson(path):
115 with open(path, 'r') as jsonfile:
116 return json.load(jsonfile)
119 class CalledProcessError(Exception):
120 """This exception is raised when the process run by CheckOutput
121 exits with a non-zero exit code."""
123 def __init__(self, cwd, args, output):
124 super(CalledProcessError, self).__init__()
125 self.cwd = cwd
126 self.args = args
127 self.output = output
129 def __str__(self):
130 # A user should be able to simply copy and paste the command that failed
131 # into their shell.
132 copyable_command = '( cd {}; {} )'.format(os.path.abspath(self.cwd),
133 ' '.join(map(pipes.quote, self.args)))
134 return 'Command failed: {}\n{}'.format(copyable_command, self.output)
137 # This can be used in most cases like subprocess.check_output(). The output,
138 # particularly when the command fails, better highlights the command's failure.
139 # If the command fails, raises a build_utils.CalledProcessError.
140 def CheckOutput(args, cwd=None,
141 print_stdout=False, print_stderr=True,
142 stdout_filter=None,
143 stderr_filter=None,
144 fail_func=lambda returncode, stderr: returncode != 0):
145 if not cwd:
146 cwd = os.getcwd()
148 child = subprocess.Popen(args,
149 stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=cwd)
150 stdout, stderr = child.communicate()
152 if stdout_filter is not None:
153 stdout = stdout_filter(stdout)
155 if stderr_filter is not None:
156 stderr = stderr_filter(stderr)
158 if fail_func(child.returncode, stderr):
159 raise CalledProcessError(cwd, args, stdout + stderr)
161 if print_stdout:
162 sys.stdout.write(stdout)
163 if print_stderr:
164 sys.stderr.write(stderr)
166 return stdout
169 def GetModifiedTime(path):
170 # For a symlink, the modified time should be the greater of the link's
171 # modified time and the modified time of the target.
172 return max(os.lstat(path).st_mtime, os.stat(path).st_mtime)
175 def IsTimeStale(output, inputs):
176 if not os.path.exists(output):
177 return True
179 output_time = GetModifiedTime(output)
180 for i in inputs:
181 if GetModifiedTime(i) > output_time:
182 return True
183 return False
186 def IsDeviceReady():
187 device_state = CheckOutput(['adb', 'get-state'])
188 return device_state.strip() == 'device'
191 def CheckZipPath(name):
192 if os.path.normpath(name) != name:
193 raise Exception('Non-canonical zip path: %s' % name)
194 if os.path.isabs(name):
195 raise Exception('Absolute zip path: %s' % name)
198 def ExtractAll(zip_path, path=None, no_clobber=True, pattern=None):
199 if path is None:
200 path = os.getcwd()
201 elif not os.path.exists(path):
202 MakeDirectory(path)
204 with zipfile.ZipFile(zip_path) as z:
205 for name in z.namelist():
206 if name.endswith('/'):
207 continue
208 if pattern is not None:
209 if not fnmatch.fnmatch(name, pattern):
210 continue
211 CheckZipPath(name)
212 if no_clobber:
213 output_path = os.path.join(path, name)
214 if os.path.exists(output_path):
215 raise Exception(
216 'Path already exists from zip: %s %s %s'
217 % (zip_path, name, output_path))
219 z.extractall(path=path)
222 def DoZip(inputs, output, base_dir=None):
223 """Creates a zip file from a list of files.
225 Args:
226 inputs: A list of paths to zip, or a list of (zip_path, fs_path) tuples.
227 output: Destination .zip file.
228 base_dir: Prefix to strip from inputs.
230 input_tuples = []
231 for tup in inputs:
232 if isinstance(tup, basestring):
233 tup = (os.path.relpath(tup, base_dir), tup)
234 input_tuples.append(tup)
236 # Sort by zip path to ensure stable zip ordering.
237 input_tuples.sort(key=lambda tup: tup[0])
238 with zipfile.ZipFile(output, 'w') as outfile:
239 for zip_path, fs_path in input_tuples:
240 CheckZipPath(zip_path)
241 zipinfo = zipfile.ZipInfo(filename=zip_path, date_time=HERMETIC_TIMESTAMP)
242 with file(fs_path) as f:
243 contents = f.read()
244 outfile.writestr(zipinfo, contents)
247 def ZipDir(output, base_dir):
248 """Creates a zip file from a directory."""
249 inputs = []
250 for root, _, files in os.walk(base_dir):
251 for f in files:
252 inputs.append(os.path.join(root, f))
253 DoZip(inputs, output, base_dir)
256 def MatchesGlob(path, filters):
257 """Returns whether the given path matches any of the given glob patterns."""
258 return filters and any(fnmatch.fnmatch(path, f) for f in filters)
261 def MergeZips(output, inputs, exclude_patterns=None, path_transform=None):
262 path_transform = path_transform or (lambda p, z: p)
263 added_names = set()
265 with zipfile.ZipFile(output, 'w') as out_zip:
266 for in_file in inputs:
267 with zipfile.ZipFile(in_file, 'r') as in_zip:
268 for name in in_zip.namelist():
269 dst_name = path_transform(name, in_file)
270 already_added = dst_name in added_names
271 if not already_added and not MatchesGlob(dst_name, exclude_patterns):
272 zipinfo = zipfile.ZipInfo(filename=dst_name,
273 date_time=HERMETIC_TIMESTAMP)
274 out_zip.writestr(zipinfo, in_zip.read(name))
275 added_names.add(dst_name)
278 def PrintWarning(message):
279 print 'WARNING: ' + message
282 def PrintBigWarning(message):
283 print '***** ' * 8
284 PrintWarning(message)
285 print '***** ' * 8
288 def GetSortedTransitiveDependencies(top, deps_func):
289 """Gets the list of all transitive dependencies in sorted order.
291 There should be no cycles in the dependency graph.
293 Args:
294 top: a list of the top level nodes
295 deps_func: A function that takes a node and returns its direct dependencies.
296 Returns:
297 A list of all transitive dependencies of nodes in top, in order (a node will
298 appear in the list at a higher index than all of its dependencies).
300 def Node(dep):
301 return (dep, deps_func(dep))
303 # First: find all deps
304 unchecked_deps = list(top)
305 all_deps = set(top)
306 while unchecked_deps:
307 dep = unchecked_deps.pop()
308 new_deps = deps_func(dep).difference(all_deps)
309 unchecked_deps.extend(new_deps)
310 all_deps = all_deps.union(new_deps)
312 # Then: simple, slow topological sort.
313 sorted_deps = []
314 unsorted_deps = dict(map(Node, all_deps))
315 while unsorted_deps:
316 for library, dependencies in unsorted_deps.items():
317 if not dependencies.intersection(unsorted_deps.keys()):
318 sorted_deps.append(library)
319 del unsorted_deps[library]
321 return sorted_deps
324 def GetPythonDependencies():
325 """Gets the paths of imported non-system python modules.
327 A path is assumed to be a "system" import if it is outside of chromium's
328 src/. The paths will be relative to the current directory.
330 module_paths = (m.__file__ for m in sys.modules.itervalues()
331 if m is not None and hasattr(m, '__file__'))
333 abs_module_paths = map(os.path.abspath, module_paths)
335 non_system_module_paths = [
336 p for p in abs_module_paths if p.startswith(CHROMIUM_SRC)]
337 def ConvertPycToPy(s):
338 if s.endswith('.pyc'):
339 return s[:-1]
340 return s
342 non_system_module_paths = map(ConvertPycToPy, non_system_module_paths)
343 non_system_module_paths = map(os.path.relpath, non_system_module_paths)
344 return sorted(set(non_system_module_paths))
347 def AddDepfileOption(parser):
348 # TODO(agrieve): Get rid of this once we've moved to argparse.
349 if hasattr(parser, 'add_option'):
350 func = parser.add_option
351 else:
352 func = parser.add_argument
353 func('--depfile',
354 help='Path to depfile. Must be specified as the action\'s first output.')
357 def WriteDepfile(path, dependencies):
358 with open(path, 'w') as depfile:
359 depfile.write(path)
360 depfile.write(': ')
361 depfile.write(' '.join(dependencies))
362 depfile.write('\n')
365 def ExpandFileArgs(args):
366 """Replaces file-arg placeholders in args.
368 These placeholders have the form:
369 @FileArg(filename:key1:key2:...:keyn)
371 The value of such a placeholder is calculated by reading 'filename' as json.
372 And then extracting the value at [key1][key2]...[keyn].
374 Note: This intentionally does not return the list of files that appear in such
375 placeholders. An action that uses file-args *must* know the paths of those
376 files prior to the parsing of the arguments (typically by explicitly listing
377 them in the action's inputs in build files).
379 new_args = list(args)
380 file_jsons = dict()
381 r = re.compile('@FileArg\((.*?)\)')
382 for i, arg in enumerate(args):
383 match = r.search(arg)
384 if not match:
385 continue
387 if match.end() != len(arg):
388 raise Exception('Unexpected characters after FileArg: ' + arg)
390 lookup_path = match.group(1).split(':')
391 file_path = lookup_path[0]
392 if not file_path in file_jsons:
393 file_jsons[file_path] = ReadJson(file_path)
395 expansion = file_jsons[file_path]
396 for k in lookup_path[1:]:
397 expansion = expansion[k]
399 new_args[i] = arg[:match.start()] + str(expansion)
401 return new_args