2 # Copyright 2014 The Chromium Authors. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
6 """Adaptor script called through build/isolate.gypi.
8 Creates a wrapping .isolate which 'includes' the original one, that can be
9 consumed by tools/swarming_client/isolate.py. Path variables are determined
10 based on the current working directory. The relative_cwd in the .isolated file
11 is determined based on the .isolate file that declare the 'command' variable to
12 be used so the wrapping .isolate doesn't affect this value.
14 This script loads build.ninja and processes it to determine all the executables
15 referenced by the isolated target. It adds them in the wrapping .isolate file.
17 WARNING: The target to use for build.ninja analysis is the base name of the
18 .isolate file plus '_run'. For example, 'foo_test.isolate' would have the target
19 'foo_test_run' analysed.
32 TOOLS_DIR
= os
.path
.dirname(os
.path
.abspath(__file__
))
33 SWARMING_CLIENT_DIR
= os
.path
.join(TOOLS_DIR
, 'swarming_client')
34 SRC_DIR
= os
.path
.dirname(TOOLS_DIR
)
36 sys
.path
.insert(0, SWARMING_CLIENT_DIR
)
41 def load_ninja_recursively(build_dir
, ninja_path
, build_steps
):
42 """Crudely extracts all the subninja and build referenced in ninja_path.
44 In particular, it ignores rule and variable declarations. The goal is to be
45 performant (well, as much as python can be performant) which is currently in
46 the <200ms range for a complete chromium tree. As such the code is laid out
47 for performance instead of readability.
49 logging
.debug('Loading %s', ninja_path
)
51 with
open(os
.path
.join(build_dir
, ninja_path
), 'rb') as f
:
61 # The next line needs to be merged in.
62 merge_line
+= line
[:-1]
66 line
= merge_line
+ line
69 statement
= line
[:line
.find(' ')]
70 if statement
== 'build':
71 # Save the dependency list as a raw string. Only the lines needed will
72 # be processed with raw_build_to_deps(). This saves a good 70ms of
74 build_target
, dependencies
= line
[6:].split(': ', 1)
75 # Interestingly, trying to be smart and only saving the build steps
76 # with the intended extensions ('', '.stamp', '.so') slows down
77 # parsing even if 90% of the build rules can be skipped.
78 # On Windows, a single step may generate two target, so split items
79 # accordingly. It has only been seen for .exe/.exe.pdb combos.
80 for i
in build_target
.strip().split():
81 build_steps
[i
] = dependencies
82 elif statement
== 'subninja':
83 subninja
.append(line
[9:])
85 print >> sys
.stderr
, 'Failed to open %s' % ninja_path
89 for rel_path
in subninja
:
91 # Load each of the files referenced.
92 # TODO(maruel): Skip the files known to not be needed. It saves an aweful
93 # lot of processing time.
94 total
+= load_ninja_recursively(build_dir
, rel_path
, build_steps
)
96 print >> sys
.stderr
, '... as referenced by %s' % ninja_path
101 def load_ninja(build_dir
):
102 """Loads the tree of .ninja files in build_dir."""
104 total
= load_ninja_recursively(build_dir
, 'build.ninja', build_steps
)
105 logging
.info('Loaded %d ninja files, %d build steps', total
, len(build_steps
))
109 def using_blacklist(item
):
110 """Returns True if an item should be analyzed.
112 Ignores many rules that are assumed to not depend on a dynamic library. If
113 the assumption doesn't hold true anymore for a file format, remove it from
114 this list. This is simply an optimization.
117 '.a', '.cc', '.css', '.def', '.frag', '.h', '.html', '.js', '.json',
118 '.manifest', '.o', '.obj', '.pak', '.png', '.pdb', '.strings', '.test',
121 # ninja files use native path format.
122 ext
= os
.path
.splitext(item
)[1]
125 # Special case Windows, keep .dll.lib but discard .lib.
126 if item
.endswith('.dll.lib'):
130 return item
not in ('', '|', '||')
133 def raw_build_to_deps(item
):
134 """Converts a raw ninja build statement into the list of interesting
137 # TODO(maruel): Use a whitelist instead? .stamp, .so.TOC, .dylib.TOC,
138 # .dll.lib, .exe and empty.
139 # The first item is the build rule, e.g. 'link', 'cxx', 'phony', etc.
140 return filter(using_blacklist
, item
.split(' ')[1:])
143 def collect_deps(target
, build_steps
, dependencies_added
, rules_seen
):
144 """Recursively adds all the interesting dependencies for |target|
145 into |dependencies_added|.
147 if rules_seen
is None:
149 if target
in rules_seen
:
150 # TODO(maruel): Figure out how it happens.
151 logging
.warning('Circular dependency for %s!', target
)
153 rules_seen
.add(target
)
155 dependencies
= raw_build_to_deps(build_steps
[target
])
157 logging
.info('Failed to find a build step to generate: %s', target
)
159 logging
.debug('collect_deps(%s) -> %s', target
, dependencies
)
160 for dependency
in dependencies
:
161 dependencies_added
.add(dependency
)
162 collect_deps(dependency
, build_steps
, dependencies_added
, rules_seen
)
165 def post_process_deps(build_dir
, dependencies
):
166 """Processes the dependency list with OS specific rules."""
168 if i
.endswith('.so.TOC'):
169 # Remove only the suffix .TOC, not the .so!
171 if i
.endswith('.dylib.TOC'):
172 # Remove only the suffix .TOC, not the .dylib!
174 if i
.endswith('.dll.lib'):
175 # Remove only the suffix .lib, not the .dll!
180 # This script is only for adding new binaries that are created as part of
181 # the component build.
182 ext
= os
.path
.splitext(i
)[1]
183 if ext
not in ['.dll', '.nexe', '.so', '.dylib']:
186 # Check for execute access and strip directories. This gets rid of all the
188 p
= os
.path
.join(build_dir
, i
)
189 return os
.access(p
, os
.X_OK
) and not os
.path
.isdir(p
)
191 return filter(f
, map(filter_item
, dependencies
))
194 def create_wrapper(args
, isolate_index
, isolated_index
):
195 """Creates a wrapper .isolate that add dynamic libs.
197 The original .isolate is not modified.
200 isolate
= args
[isolate_index
]
201 # The code assumes the .isolate file is always specified path-less in cwd. Fix
202 # if this assumption doesn't hold true.
203 assert os
.path
.basename(isolate
) == isolate
, isolate
205 # This will look like ../out/Debug. This is based against cwd. Note that this
206 # must equal the value provided as PRODUCT_DIR.
207 build_dir
= os
.path
.dirname(args
[isolated_index
])
209 # This will look like chrome/unit_tests.isolate. It is based against SRC_DIR.
210 # It's used to calculate temp_isolate.
211 src_isolate
= os
.path
.relpath(os
.path
.join(cwd
, isolate
), SRC_DIR
)
213 # The wrapping .isolate. This will look like
214 # ../out/Debug/gen/chrome/unit_tests.isolate.
215 temp_isolate
= os
.path
.join(build_dir
, 'gen', src_isolate
)
216 temp_isolate_dir
= os
.path
.dirname(temp_isolate
)
218 # Relative path between the new and old .isolate file.
219 isolate_relpath
= os
.path
.relpath(
220 '.', temp_isolate_dir
).replace(os
.path
.sep
, '/')
222 # It's a big assumption here that the name of the isolate file matches the
223 # primary target '_run'. Fix accordingly if this doesn't hold true, e.g.
224 # complain to maruel@.
225 target
= isolate
[:-len('.isolate')] + '_run'
226 build_steps
= load_ninja(build_dir
)
228 collect_deps(target
, build_steps
, binary_deps
, None)
229 binary_deps
= post_process_deps(build_dir
, binary_deps
)
231 'Binary dependencies:%s', ''.join('\n ' + i
for i
in binary_deps
))
233 # Now do actual wrapping .isolate.
236 posixpath
.join(isolate_relpath
, isolate
),
239 # Will look like ['<(PRODUCT_DIR)/lib/flibuser_prefs.so'].
241 '<(PRODUCT_DIR)/%s' % i
.replace(os
.path
.sep
, '/')
242 for i
in binary_deps
),
245 if not os
.path
.isdir(temp_isolate_dir
):
246 os
.makedirs(temp_isolate_dir
)
248 '# Warning: this file was AUTOGENERATED.\n'
250 out
= StringIO
.StringIO()
251 isolate_format
.print_all(comment
, isolate_dict
, out
)
252 isolate_content
= out
.getvalue()
253 with
open(temp_isolate
, 'wb') as f
:
254 f
.write(isolate_content
)
255 logging
.info('Added %d dynamic libs', len(binary_deps
))
256 logging
.debug('%s', isolate_content
)
257 args
[isolate_index
] = temp_isolate
260 def prepare_isolate_call(args
, output
):
261 """Gathers all information required to run isolate.py later.
263 Dumps it as JSON to |output| file.
265 with
open(output
, 'wb') as f
:
270 }, f
, indent
=2, sort_keys
=True)
274 logging
.basicConfig(level
=logging
.ERROR
, format
='%(levelname)7s %(message)s')
276 mode
= args
[0] if args
else None
280 for i
, arg
in enumerate(args
):
281 if arg
== '--isolate':
283 if arg
== '--isolated':
285 if arg
== 'component=shared_library':
287 if isolate
is None or isolated
is None or not mode
:
288 print >> sys
.stderr
, 'Internal failure'
292 create_wrapper(args
, isolate
, isolated
)
294 # In 'prepare' mode just collect all required information for postponed
295 # isolated.py invocation later, store it in *.isolated.gen.json file.
296 if mode
== 'prepare':
297 prepare_isolate_call(args
[1:], args
[isolated
] + '.gen.json')
300 swarming_client
= os
.path
.join(SRC_DIR
, 'tools', 'swarming_client')
302 result
= subprocess
.call(
303 [sys
.executable
, os
.path
.join(swarming_client
, 'isolate.py')] + args
)
307 if __name__
== '__main__':