Roll src/third_party/WebKit f298044:aa8346d (svn 202628:202629)
[chromium-blink-merge.git] / tools / isolate_driver.py
blobf75b4bdf71995e9928e8eac3c10499034e6a7efc
1 #!/usr/bin/env python
2 # Copyright 2014 The Chromium Authors. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
6 """Adaptor script called through build/isolate.gypi.
8 Creates a wrapping .isolate which 'includes' the original one, that can be
9 consumed by tools/swarming_client/isolate.py. Path variables are determined
10 based on the current working directory. The relative_cwd in the .isolated file
11 is determined based on the .isolate file that declare the 'command' variable to
12 be used so the wrapping .isolate doesn't affect this value.
14 This script loads build.ninja and processes it to determine all the executables
15 referenced by the isolated target. It adds them in the wrapping .isolate file.
17 WARNING: The target to use for build.ninja analysis is the base name of the
18 .isolate file plus '_run'. For example, 'foo_test.isolate' would have the target
19 'foo_test_run' analysed.
20 """
22 import glob
23 import json
24 import logging
25 import os
26 import posixpath
27 import StringIO
28 import subprocess
29 import sys
30 import time
32 TOOLS_DIR = os.path.dirname(os.path.abspath(__file__))
33 SWARMING_CLIENT_DIR = os.path.join(TOOLS_DIR, 'swarming_client')
34 SRC_DIR = os.path.dirname(TOOLS_DIR)
36 sys.path.insert(0, SWARMING_CLIENT_DIR)
38 import isolate_format
41 def load_ninja_recursively(build_dir, ninja_path, build_steps):
42 """Crudely extracts all the subninja and build referenced in ninja_path.
44 In particular, it ignores rule and variable declarations. The goal is to be
45 performant (well, as much as python can be performant) which is currently in
46 the <200ms range for a complete chromium tree. As such the code is laid out
47 for performance instead of readability.
48 """
49 logging.debug('Loading %s', ninja_path)
50 try:
51 with open(os.path.join(build_dir, ninja_path), 'rb') as f:
52 line = None
53 merge_line = ''
54 subninja = []
55 for line in f:
56 line = line.rstrip()
57 if not line:
58 continue
60 if line[-1] == '$':
61 # The next line needs to be merged in.
62 merge_line += line[:-1]
63 continue
65 if merge_line:
66 line = merge_line + line
67 merge_line = ''
69 statement = line[:line.find(' ')]
70 if statement == 'build':
71 # Save the dependency list as a raw string. Only the lines needed will
72 # be processed with raw_build_to_deps(). This saves a good 70ms of
73 # processing time.
74 build_target, dependencies = line[6:].split(': ', 1)
75 # Interestingly, trying to be smart and only saving the build steps
76 # with the intended extensions ('', '.stamp', '.so') slows down
77 # parsing even if 90% of the build rules can be skipped.
78 # On Windows, a single step may generate two target, so split items
79 # accordingly. It has only been seen for .exe/.exe.pdb combos.
80 for i in build_target.strip().split():
81 build_steps[i] = dependencies
82 elif statement == 'subninja':
83 subninja.append(line[9:])
84 except IOError:
85 print >> sys.stderr, 'Failed to open %s' % ninja_path
86 raise
88 total = 1
89 for rel_path in subninja:
90 try:
91 # Load each of the files referenced.
92 # TODO(maruel): Skip the files known to not be needed. It saves an aweful
93 # lot of processing time.
94 total += load_ninja_recursively(build_dir, rel_path, build_steps)
95 except IOError:
96 print >> sys.stderr, '... as referenced by %s' % ninja_path
97 raise
98 return total
101 def load_ninja(build_dir):
102 """Loads the tree of .ninja files in build_dir."""
103 build_steps = {}
104 total = load_ninja_recursively(build_dir, 'build.ninja', build_steps)
105 logging.info('Loaded %d ninja files, %d build steps', total, len(build_steps))
106 return build_steps
109 def using_blacklist(item):
110 """Returns True if an item should be analyzed.
112 Ignores many rules that are assumed to not depend on a dynamic library. If
113 the assumption doesn't hold true anymore for a file format, remove it from
114 this list. This is simply an optimization.
116 # *.json is ignored below, *.isolated.gen.json is an exception, it is produced
117 # by isolate_driver.py in 'test_isolation_mode==prepare'.
118 if item.endswith('.isolated.gen.json'):
119 return True
120 IGNORED = (
121 '.a', '.cc', '.css', '.dat', '.def', '.frag', '.h', '.html', '.isolate',
122 '.js', '.json', '.manifest', '.o', '.obj', '.pak', '.png', '.pdb', '.py',
123 '.strings', '.test', '.txt', '.vert',
125 # ninja files use native path format.
126 ext = os.path.splitext(item)[1]
127 if ext in IGNORED:
128 return False
129 # Special case Windows, keep .dll.lib but discard .lib.
130 if item.endswith('.dll.lib'):
131 return True
132 if ext == '.lib':
133 return False
134 return item not in ('', '|', '||')
137 def raw_build_to_deps(item):
138 """Converts a raw ninja build statement into the list of interesting
139 dependencies.
141 # TODO(maruel): Use a whitelist instead? .stamp, .so.TOC, .dylib.TOC,
142 # .dll.lib, .exe and empty.
143 # The first item is the build rule, e.g. 'link', 'cxx', 'phony', etc.
144 return filter(using_blacklist, item.split(' ')[1:])
147 def collect_deps(target, build_steps, dependencies_added, rules_seen):
148 """Recursively adds all the interesting dependencies for |target|
149 into |dependencies_added|.
151 if rules_seen is None:
152 rules_seen = set()
153 if target in rules_seen:
154 # TODO(maruel): Figure out how it happens.
155 logging.warning('Circular dependency for %s!', target)
156 return
157 rules_seen.add(target)
158 try:
159 dependencies = raw_build_to_deps(build_steps[target])
160 except KeyError:
161 logging.info('Failed to find a build step to generate: %s', target)
162 return
163 logging.debug('collect_deps(%s) -> %s', target, dependencies)
164 for dependency in dependencies:
165 dependencies_added.add(dependency)
166 collect_deps(dependency, build_steps, dependencies_added, rules_seen)
169 def post_process_deps(build_dir, dependencies):
170 """Processes the dependency list with OS specific rules."""
171 def filter_item(i):
172 if i.endswith('.so.TOC'):
173 # Remove only the suffix .TOC, not the .so!
174 return i[:-4]
175 if i.endswith('.dylib.TOC'):
176 # Remove only the suffix .TOC, not the .dylib!
177 return i[:-4]
178 if i.endswith('.dll.lib'):
179 # Remove only the suffix .lib, not the .dll!
180 return i[:-4]
181 return i
183 def is_exe(i):
184 # This script is only for adding new binaries that are created as part of
185 # the component build.
186 ext = os.path.splitext(i)[1]
187 # On POSIX, executables have no extension.
188 if ext not in ('', '.dll', '.dylib', '.exe', '.nexe', '.so'):
189 return False
190 if os.path.isabs(i):
191 # In some rare case, there's dependency set explicitly on files outside
192 # the checkout.
193 return False
195 # Check for execute access and strip directories. This gets rid of all the
196 # phony rules.
197 p = os.path.join(build_dir, i)
198 return os.access(p, os.X_OK) and not os.path.isdir(p)
200 return filter(is_exe, map(filter_item, dependencies))
203 def create_wrapper(args, isolate_index, isolated_index):
204 """Creates a wrapper .isolate that add dynamic libs.
206 The original .isolate is not modified.
208 cwd = os.getcwd()
209 isolate = args[isolate_index]
210 # The code assumes the .isolate file is always specified path-less in cwd. Fix
211 # if this assumption doesn't hold true.
212 assert os.path.basename(isolate) == isolate, isolate
214 # This will look like ../out/Debug. This is based against cwd. Note that this
215 # must equal the value provided as PRODUCT_DIR.
216 build_dir = os.path.dirname(args[isolated_index])
218 # This will look like chrome/unit_tests.isolate. It is based against SRC_DIR.
219 # It's used to calculate temp_isolate.
220 src_isolate = os.path.relpath(os.path.join(cwd, isolate), SRC_DIR)
222 # The wrapping .isolate. This will look like
223 # ../out/Debug/gen/chrome/unit_tests.isolate.
224 temp_isolate = os.path.join(build_dir, 'gen', src_isolate)
225 temp_isolate_dir = os.path.dirname(temp_isolate)
227 # Relative path between the new and old .isolate file.
228 isolate_relpath = os.path.relpath(
229 '.', temp_isolate_dir).replace(os.path.sep, '/')
231 # It's a big assumption here that the name of the isolate file matches the
232 # primary target '_run'. Fix accordingly if this doesn't hold true, e.g.
233 # complain to maruel@.
234 target = isolate[:-len('.isolate')] + '_run'
235 build_steps = load_ninja(build_dir)
236 binary_deps = set()
237 collect_deps(target, build_steps, binary_deps, None)
238 binary_deps = post_process_deps(build_dir, binary_deps)
239 logging.debug(
240 'Binary dependencies:%s', ''.join('\n ' + i for i in binary_deps))
242 # Now do actual wrapping .isolate.
243 isolate_dict = {
244 'includes': [
245 posixpath.join(isolate_relpath, isolate),
247 'variables': {
248 # Will look like ['<(PRODUCT_DIR)/lib/flibuser_prefs.so'].
249 'files': sorted(
250 '<(PRODUCT_DIR)/%s' % i.replace(os.path.sep, '/')
251 for i in binary_deps),
254 if not os.path.isdir(temp_isolate_dir):
255 os.makedirs(temp_isolate_dir)
256 comment = (
257 '# Warning: this file was AUTOGENERATED.\n'
258 '# DO NO EDIT.\n')
259 out = StringIO.StringIO()
260 isolate_format.print_all(comment, isolate_dict, out)
261 isolate_content = out.getvalue()
262 with open(temp_isolate, 'wb') as f:
263 f.write(isolate_content)
264 logging.info('Added %d dynamic libs', len(binary_deps))
265 logging.debug('%s', isolate_content)
266 args[isolate_index] = temp_isolate
269 def prepare_isolate_call(args, output):
270 """Gathers all information required to run isolate.py later.
272 Dumps it as JSON to |output| file.
274 with open(output, 'wb') as f:
275 json.dump({
276 'args': args,
277 'dir': os.getcwd(),
278 'version': 1,
279 }, f, indent=2, sort_keys=True)
282 def main():
283 logging.basicConfig(level=logging.ERROR, format='%(levelname)7s %(message)s')
284 args = sys.argv[1:]
285 mode = args[0] if args else None
286 isolate = None
287 isolated = None
288 for i, arg in enumerate(args):
289 if arg == '--isolate':
290 isolate = i + 1
291 if arg == '--isolated':
292 isolated = i + 1
293 if isolate is None or isolated is None or not mode:
294 print >> sys.stderr, 'Internal failure'
295 return 1
297 create_wrapper(args, isolate, isolated)
299 # In 'prepare' mode just collect all required information for postponed
300 # isolated.py invocation later, store it in *.isolated.gen.json file.
301 if mode == 'prepare':
302 prepare_isolate_call(args[1:], args[isolated] + '.gen.json')
303 return 0
305 swarming_client = os.path.join(SRC_DIR, 'tools', 'swarming_client')
306 sys.stdout.flush()
307 result = subprocess.call(
308 [sys.executable, os.path.join(swarming_client, 'isolate.py')] + args)
309 return result
312 if __name__ == '__main__':
313 sys.exit(main())