1 # Copyright 2013 The Chromium Authors. All rights reserved.
2 # Use of this source code is governed by a BSD-style license that can be
3 # found in the LICENSE file.
20 CHROMIUM_SRC
= os
.path
.normpath(
21 os
.path
.join(os
.path
.dirname(__file__
),
22 os
.pardir
, os
.pardir
, os
.pardir
, os
.pardir
))
23 COLORAMA_ROOT
= os
.path
.join(CHROMIUM_SRC
,
24 'third_party', 'colorama', 'src')
25 # aapt should ignore OWNERS files in addition the default ignore pattern.
26 AAPT_IGNORE_PATTERN
= ('!OWNERS:!.svn:!.git:!.ds_store:!*.scc:.*:<dir>_*:' +
27 '!CVS:!thumbs.db:!picasa.ini:!*~:!*.d.stamp')
30 @contextlib.contextmanager
32 dirname
= tempfile
.mkdtemp()
36 shutil
.rmtree(dirname
)
39 def MakeDirectory(dir_path
):
46 def DeleteDirectory(dir_path
):
47 if os
.path
.exists(dir_path
):
48 shutil
.rmtree(dir_path
)
51 def Touch(path
, fail_if_missing
=False):
52 if fail_if_missing
and not os
.path
.exists(path
):
53 raise Exception(path
+ ' doesn\'t exist.')
55 MakeDirectory(os
.path
.dirname(path
))
60 def FindInDirectory(directory
, filename_filter
):
62 for root
, _dirnames
, filenames
in os
.walk(directory
):
63 matched_files
= fnmatch
.filter(filenames
, filename_filter
)
64 files
.extend((os
.path
.join(root
, f
) for f
in matched_files
))
68 def FindInDirectories(directories
, filename_filter
):
70 for directory
in directories
:
71 all_files
.extend(FindInDirectory(directory
, filename_filter
))
75 def ParseGnList(gn_string
):
76 return ast
.literal_eval(gn_string
)
79 def ParseGypList(gyp_string
):
80 # The ninja generator doesn't support $ in strings, so use ## to
82 # TODO(cjhopman): Remove when
83 # https://code.google.com/p/gyp/issues/detail?id=327
85 gyp_string
= gyp_string
.replace('##', '$')
87 if gyp_string
.startswith('['):
88 return ParseGnList(gyp_string
)
89 return shlex
.split(gyp_string
)
92 def CheckOptions(options
, parser
, required
=None):
95 for option_name
in required
:
96 if getattr(options
, option_name
) is None:
97 parser
.error('--%s is required' % option_name
.replace('_', '-'))
100 def WriteJson(obj
, path
, only_if_changed
=False):
102 if os
.path
.exists(path
):
103 with
open(path
, 'r') as oldfile
:
104 old_dump
= oldfile
.read()
106 new_dump
= json
.dumps(obj
, sort_keys
=True, indent
=2, separators
=(',', ': '))
108 if not only_if_changed
or old_dump
!= new_dump
:
109 with
open(path
, 'w') as outfile
:
110 outfile
.write(new_dump
)
114 with
open(path
, 'r') as jsonfile
:
115 return json
.load(jsonfile
)
118 class CalledProcessError(Exception):
119 """This exception is raised when the process run by CheckOutput
120 exits with a non-zero exit code."""
122 def __init__(self
, cwd
, args
, output
):
123 super(CalledProcessError
, self
).__init
__()
129 # A user should be able to simply copy and paste the command that failed
131 copyable_command
= '( cd {}; {} )'.format(os
.path
.abspath(self
.cwd
),
132 ' '.join(map(pipes
.quote
, self
.args
)))
133 return 'Command failed: {}\n{}'.format(copyable_command
, self
.output
)
136 # This can be used in most cases like subprocess.check_output(). The output,
137 # particularly when the command fails, better highlights the command's failure.
138 # If the command fails, raises a build_utils.CalledProcessError.
139 def CheckOutput(args
, cwd
=None,
140 print_stdout
=False, print_stderr
=True,
143 fail_func
=lambda returncode
, stderr
: returncode
!= 0):
147 child
= subprocess
.Popen(args
,
148 stdout
=subprocess
.PIPE
, stderr
=subprocess
.PIPE
, cwd
=cwd
)
149 stdout
, stderr
= child
.communicate()
151 if stdout_filter
is not None:
152 stdout
= stdout_filter(stdout
)
154 if stderr_filter
is not None:
155 stderr
= stderr_filter(stderr
)
157 if fail_func(child
.returncode
, stderr
):
158 raise CalledProcessError(cwd
, args
, stdout
+ stderr
)
161 sys
.stdout
.write(stdout
)
163 sys
.stderr
.write(stderr
)
168 def GetModifiedTime(path
):
169 # For a symlink, the modified time should be the greater of the link's
170 # modified time and the modified time of the target.
171 return max(os
.lstat(path
).st_mtime
, os
.stat(path
).st_mtime
)
174 def IsTimeStale(output
, inputs
):
175 if not os
.path
.exists(output
):
178 output_time
= GetModifiedTime(output
)
180 if GetModifiedTime(i
) > output_time
:
186 device_state
= CheckOutput(['adb', 'get-state'])
187 return device_state
.strip() == 'device'
190 def CheckZipPath(name
):
191 if os
.path
.normpath(name
) != name
:
192 raise Exception('Non-canonical zip path: %s' % name
)
193 if os
.path
.isabs(name
):
194 raise Exception('Absolute zip path: %s' % name
)
197 def ExtractAll(zip_path
, path
=None, no_clobber
=True, pattern
=None):
200 elif not os
.path
.exists(path
):
203 with zipfile
.ZipFile(zip_path
) as z
:
204 for name
in z
.namelist():
205 if name
.endswith('/'):
207 if pattern
is not None:
208 if not fnmatch
.fnmatch(name
, pattern
):
212 output_path
= os
.path
.join(path
, name
)
213 if os
.path
.exists(output_path
):
215 'Path already exists from zip: %s %s %s'
216 % (zip_path
, name
, output_path
))
218 z
.extractall(path
=path
)
221 def DoZip(inputs
, output
, base_dir
):
222 with zipfile
.ZipFile(output
, 'w') as outfile
:
224 CheckZipPath(os
.path
.relpath(f
, base_dir
))
225 outfile
.write(f
, os
.path
.relpath(f
, base_dir
))
228 def ZipDir(output
, base_dir
):
229 with zipfile
.ZipFile(output
, 'w') as outfile
:
230 for root
, _
, files
in os
.walk(base_dir
):
232 path
= os
.path
.join(root
, f
)
233 archive_path
= os
.path
.relpath(path
, base_dir
)
234 CheckZipPath(archive_path
)
235 outfile
.write(path
, archive_path
)
238 def MergeZips(output
, inputs
, exclude_patterns
=None):
241 if exclude_patterns
is not None:
242 for p
in exclude_patterns
:
243 if fnmatch
.fnmatch(name
, p
):
247 with zipfile
.ZipFile(output
, 'w') as out_zip
:
248 for in_file
in inputs
:
249 with zipfile
.ZipFile(in_file
, 'r') as in_zip
:
250 for name
in in_zip
.namelist():
251 if name
not in added_names
and Allow(name
):
252 out_zip
.writestr(name
, in_zip
.read(name
))
253 added_names
.add(name
)
256 def PrintWarning(message
):
257 print 'WARNING: ' + message
260 def PrintBigWarning(message
):
262 PrintWarning(message
)
266 def GetSortedTransitiveDependencies(top
, deps_func
):
267 """Gets the list of all transitive dependencies in sorted order.
269 There should be no cycles in the dependency graph.
272 top: a list of the top level nodes
273 deps_func: A function that takes a node and returns its direct dependencies.
275 A list of all transitive dependencies of nodes in top, in order (a node will
276 appear in the list at a higher index than all of its dependencies).
279 return (dep
, deps_func(dep
))
281 # First: find all deps
282 unchecked_deps
= list(top
)
284 while unchecked_deps
:
285 dep
= unchecked_deps
.pop()
286 new_deps
= deps_func(dep
).difference(all_deps
)
287 unchecked_deps
.extend(new_deps
)
288 all_deps
= all_deps
.union(new_deps
)
290 # Then: simple, slow topological sort.
292 unsorted_deps
= dict(map(Node
, all_deps
))
294 for library
, dependencies
in unsorted_deps
.items():
295 if not dependencies
.intersection(unsorted_deps
.keys()):
296 sorted_deps
.append(library
)
297 del unsorted_deps
[library
]
302 def GetPythonDependencies():
303 """Gets the paths of imported non-system python modules.
305 A path is assumed to be a "system" import if it is outside of chromium's
306 src/. The paths will be relative to the current directory.
308 module_paths
= (m
.__file
__ for m
in sys
.modules
.itervalues()
309 if m
is not None and hasattr(m
, '__file__'))
311 abs_module_paths
= map(os
.path
.abspath
, module_paths
)
313 non_system_module_paths
= [
314 p
for p
in abs_module_paths
if p
.startswith(CHROMIUM_SRC
)]
315 def ConvertPycToPy(s
):
316 if s
.endswith('.pyc'):
320 non_system_module_paths
= map(ConvertPycToPy
, non_system_module_paths
)
321 non_system_module_paths
= map(os
.path
.relpath
, non_system_module_paths
)
322 return sorted(set(non_system_module_paths
))
325 def AddDepfileOption(parser
):
326 parser
.add_option('--depfile',
327 help='Path to depfile. This must be specified as the '
328 'action\'s first output.')
331 def WriteDepfile(path
, dependencies
):
332 with
open(path
, 'w') as depfile
:
335 depfile
.write(' '.join(dependencies
))
339 def ExpandFileArgs(args
):
340 """Replaces file-arg placeholders in args.
342 These placeholders have the form:
343 @FileArg(filename:key1:key2:...:keyn)
345 The value of such a placeholder is calculated by reading 'filename' as json.
346 And then extracting the value at [key1][key2]...[keyn].
348 Note: This intentionally does not return the list of files that appear in such
349 placeholders. An action that uses file-args *must* know the paths of those
350 files prior to the parsing of the arguments (typically by explicitly listing
351 them in the action's inputs in build files).
353 new_args
= list(args
)
355 r
= re
.compile('@FileArg\((.*?)\)')
356 for i
, arg
in enumerate(args
):
357 match
= r
.search(arg
)
361 if match
.end() != len(arg
):
362 raise Exception('Unexpected characters after FileArg: ' + arg
)
364 lookup_path
= match
.group(1).split(':')
365 file_path
= lookup_path
[0]
366 if not file_path
in file_jsons
:
367 file_jsons
[file_path
] = ReadJson(file_path
)
369 expansion
= file_jsons
[file_path
]
370 for k
in lookup_path
[1:]:
371 expansion
= expansion
[k
]
373 new_args
[i
] = arg
[:match
.start()] + str(expansion
)