1 # Copyright 2013 The Chromium Authors. All rights reserved.
2 # Use of this source code is governed by a BSD-style license that can be
3 # found in the LICENSE file.
20 CHROMIUM_SRC
= os
.path
.normpath(
21 os
.path
.join(os
.path
.dirname(__file__
),
22 os
.pardir
, os
.pardir
, os
.pardir
, os
.pardir
))
23 COLORAMA_ROOT
= os
.path
.join(CHROMIUM_SRC
,
24 'third_party', 'colorama', 'src')
25 # aapt should ignore OWNERS files in addition the default ignore pattern.
26 AAPT_IGNORE_PATTERN
= ('!OWNERS:!.svn:!.git:!.ds_store:!*.scc:.*:<dir>_*:' +
27 '!CVS:!thumbs.db:!picasa.ini:!*~:!*.d.stamp')
28 HERMETIC_TIMESTAMP
= (2001, 1, 1, 0, 0, 0)
31 @contextlib.contextmanager
33 dirname
= tempfile
.mkdtemp()
37 shutil
.rmtree(dirname
)
40 def MakeDirectory(dir_path
):
47 def DeleteDirectory(dir_path
):
48 if os
.path
.exists(dir_path
):
49 shutil
.rmtree(dir_path
)
52 def Touch(path
, fail_if_missing
=False):
53 if fail_if_missing
and not os
.path
.exists(path
):
54 raise Exception(path
+ ' doesn\'t exist.')
56 MakeDirectory(os
.path
.dirname(path
))
61 def FindInDirectory(directory
, filename_filter
):
63 for root
, _dirnames
, filenames
in os
.walk(directory
):
64 matched_files
= fnmatch
.filter(filenames
, filename_filter
)
65 files
.extend((os
.path
.join(root
, f
) for f
in matched_files
))
69 def FindInDirectories(directories
, filename_filter
):
71 for directory
in directories
:
72 all_files
.extend(FindInDirectory(directory
, filename_filter
))
76 def ParseGnList(gn_string
):
77 return ast
.literal_eval(gn_string
)
80 def ParseGypList(gyp_string
):
81 # The ninja generator doesn't support $ in strings, so use ## to
83 # TODO(cjhopman): Remove when
84 # https://code.google.com/p/gyp/issues/detail?id=327
86 gyp_string
= gyp_string
.replace('##', '$')
88 if gyp_string
.startswith('['):
89 return ParseGnList(gyp_string
)
90 return shlex
.split(gyp_string
)
93 def CheckOptions(options
, parser
, required
=None):
96 for option_name
in required
:
97 if getattr(options
, option_name
) is None:
98 parser
.error('--%s is required' % option_name
.replace('_', '-'))
101 def WriteJson(obj
, path
, only_if_changed
=False):
103 if os
.path
.exists(path
):
104 with
open(path
, 'r') as oldfile
:
105 old_dump
= oldfile
.read()
107 new_dump
= json
.dumps(obj
, sort_keys
=True, indent
=2, separators
=(',', ': '))
109 if not only_if_changed
or old_dump
!= new_dump
:
110 with
open(path
, 'w') as outfile
:
111 outfile
.write(new_dump
)
115 with
open(path
, 'r') as jsonfile
:
116 return json
.load(jsonfile
)
119 class CalledProcessError(Exception):
120 """This exception is raised when the process run by CheckOutput
121 exits with a non-zero exit code."""
123 def __init__(self
, cwd
, args
, output
):
124 super(CalledProcessError
, self
).__init
__()
130 # A user should be able to simply copy and paste the command that failed
132 copyable_command
= '( cd {}; {} )'.format(os
.path
.abspath(self
.cwd
),
133 ' '.join(map(pipes
.quote
, self
.args
)))
134 return 'Command failed: {}\n{}'.format(copyable_command
, self
.output
)
137 # This can be used in most cases like subprocess.check_output(). The output,
138 # particularly when the command fails, better highlights the command's failure.
139 # If the command fails, raises a build_utils.CalledProcessError.
140 def CheckOutput(args
, cwd
=None,
141 print_stdout
=False, print_stderr
=True,
144 fail_func
=lambda returncode
, stderr
: returncode
!= 0):
148 child
= subprocess
.Popen(args
,
149 stdout
=subprocess
.PIPE
, stderr
=subprocess
.PIPE
, cwd
=cwd
)
150 stdout
, stderr
= child
.communicate()
152 if stdout_filter
is not None:
153 stdout
= stdout_filter(stdout
)
155 if stderr_filter
is not None:
156 stderr
= stderr_filter(stderr
)
158 if fail_func(child
.returncode
, stderr
):
159 raise CalledProcessError(cwd
, args
, stdout
+ stderr
)
162 sys
.stdout
.write(stdout
)
164 sys
.stderr
.write(stderr
)
169 def GetModifiedTime(path
):
170 # For a symlink, the modified time should be the greater of the link's
171 # modified time and the modified time of the target.
172 return max(os
.lstat(path
).st_mtime
, os
.stat(path
).st_mtime
)
175 def IsTimeStale(output
, inputs
):
176 if not os
.path
.exists(output
):
179 output_time
= GetModifiedTime(output
)
181 if GetModifiedTime(i
) > output_time
:
187 device_state
= CheckOutput(['adb', 'get-state'])
188 return device_state
.strip() == 'device'
191 def CheckZipPath(name
):
192 if os
.path
.normpath(name
) != name
:
193 raise Exception('Non-canonical zip path: %s' % name
)
194 if os
.path
.isabs(name
):
195 raise Exception('Absolute zip path: %s' % name
)
198 def ExtractAll(zip_path
, path
=None, no_clobber
=True, pattern
=None):
201 elif not os
.path
.exists(path
):
204 with zipfile
.ZipFile(zip_path
) as z
:
205 for name
in z
.namelist():
206 if name
.endswith('/'):
208 if pattern
is not None:
209 if not fnmatch
.fnmatch(name
, pattern
):
213 output_path
= os
.path
.join(path
, name
)
214 if os
.path
.exists(output_path
):
216 'Path already exists from zip: %s %s %s'
217 % (zip_path
, name
, output_path
))
219 z
.extractall(path
=path
)
222 def DoZip(inputs
, output
, base_dir
=None):
223 """Creates a zip file from a list of files.
226 inputs: A list of paths to zip, or a list of (zip_path, fs_path) tuples.
227 output: Destination .zip file.
228 base_dir: Prefix to strip from inputs.
232 if isinstance(tup
, basestring
):
233 tup
= (os
.path
.relpath(tup
, base_dir
), tup
)
234 input_tuples
.append(tup
)
236 # Sort by zip path to ensure stable zip ordering.
237 input_tuples
.sort(key
=lambda tup
: tup
[0])
238 with zipfile
.ZipFile(output
, 'w') as outfile
:
239 for zip_path
, fs_path
in input_tuples
:
240 CheckZipPath(zip_path
)
241 zipinfo
= zipfile
.ZipInfo(filename
=zip_path
, date_time
=HERMETIC_TIMESTAMP
)
242 with
file(fs_path
) as f
:
244 outfile
.writestr(zipinfo
, contents
)
247 def ZipDir(output
, base_dir
):
248 """Creates a zip file from a directory."""
250 for root
, _
, files
in os
.walk(base_dir
):
252 inputs
.append(os
.path
.join(root
, f
))
253 DoZip(inputs
, output
, base_dir
)
256 def MatchesGlob(path
, filters
):
257 """Returns whether the given path matches any of the given glob patterns."""
258 return filters
and any(fnmatch
.fnmatch(path
, f
) for f
in filters
)
261 def MergeZips(output
, inputs
, exclude_patterns
=None, path_transform
=None):
262 path_transform
= path_transform
or (lambda p
, z
: p
)
265 with zipfile
.ZipFile(output
, 'w') as out_zip
:
266 for in_file
in inputs
:
267 with zipfile
.ZipFile(in_file
, 'r') as in_zip
:
268 for name
in in_zip
.namelist():
269 dst_name
= path_transform(name
, in_file
)
270 already_added
= dst_name
in added_names
271 if not already_added
and not MatchesGlob(dst_name
, exclude_patterns
):
272 zipinfo
= zipfile
.ZipInfo(filename
=dst_name
,
273 date_time
=HERMETIC_TIMESTAMP
)
274 out_zip
.writestr(zipinfo
, in_zip
.read(name
))
275 added_names
.add(dst_name
)
278 def PrintWarning(message
):
279 print 'WARNING: ' + message
282 def PrintBigWarning(message
):
284 PrintWarning(message
)
288 def GetSortedTransitiveDependencies(top
, deps_func
):
289 """Gets the list of all transitive dependencies in sorted order.
291 There should be no cycles in the dependency graph.
294 top: a list of the top level nodes
295 deps_func: A function that takes a node and returns its direct dependencies.
297 A list of all transitive dependencies of nodes in top, in order (a node will
298 appear in the list at a higher index than all of its dependencies).
301 return (dep
, deps_func(dep
))
303 # First: find all deps
304 unchecked_deps
= list(top
)
306 while unchecked_deps
:
307 dep
= unchecked_deps
.pop()
308 new_deps
= deps_func(dep
).difference(all_deps
)
309 unchecked_deps
.extend(new_deps
)
310 all_deps
= all_deps
.union(new_deps
)
312 # Then: simple, slow topological sort.
314 unsorted_deps
= dict(map(Node
, all_deps
))
316 for library
, dependencies
in unsorted_deps
.items():
317 if not dependencies
.intersection(unsorted_deps
.keys()):
318 sorted_deps
.append(library
)
319 del unsorted_deps
[library
]
324 def GetPythonDependencies():
325 """Gets the paths of imported non-system python modules.
327 A path is assumed to be a "system" import if it is outside of chromium's
328 src/. The paths will be relative to the current directory.
330 module_paths
= (m
.__file
__ for m
in sys
.modules
.itervalues()
331 if m
is not None and hasattr(m
, '__file__'))
333 abs_module_paths
= map(os
.path
.abspath
, module_paths
)
335 non_system_module_paths
= [
336 p
for p
in abs_module_paths
if p
.startswith(CHROMIUM_SRC
)]
337 def ConvertPycToPy(s
):
338 if s
.endswith('.pyc'):
342 non_system_module_paths
= map(ConvertPycToPy
, non_system_module_paths
)
343 non_system_module_paths
= map(os
.path
.relpath
, non_system_module_paths
)
344 return sorted(set(non_system_module_paths
))
347 def AddDepfileOption(parser
):
348 # TODO(agrieve): Get rid of this once we've moved to argparse.
349 if hasattr(parser
, 'add_option'):
350 func
= parser
.add_option
352 func
= parser
.add_argument
354 help='Path to depfile. Must be specified as the action\'s first output.')
357 def WriteDepfile(path
, dependencies
):
358 with
open(path
, 'w') as depfile
:
361 depfile
.write(' '.join(dependencies
))
365 def ExpandFileArgs(args
):
366 """Replaces file-arg placeholders in args.
368 These placeholders have the form:
369 @FileArg(filename:key1:key2:...:keyn)
371 The value of such a placeholder is calculated by reading 'filename' as json.
372 And then extracting the value at [key1][key2]...[keyn].
374 Note: This intentionally does not return the list of files that appear in such
375 placeholders. An action that uses file-args *must* know the paths of those
376 files prior to the parsing of the arguments (typically by explicitly listing
377 them in the action's inputs in build files).
379 new_args
= list(args
)
381 r
= re
.compile('@FileArg\((.*?)\)')
382 for i
, arg
in enumerate(args
):
383 match
= r
.search(arg
)
387 if match
.end() != len(arg
):
388 raise Exception('Unexpected characters after FileArg: ' + arg
)
390 lookup_path
= match
.group(1).split(':')
391 file_path
= lookup_path
[0]
392 if not file_path
in file_jsons
:
393 file_jsons
[file_path
] = ReadJson(file_path
)
395 expansion
= file_jsons
[file_path
]
396 for k
in lookup_path
[1:]:
397 expansion
= expansion
[k
]
399 new_args
[i
] = arg
[:match
.start()] + str(expansion
)