Change speed index to behave like WPT, improve speed index unit test
[chromium-blink-merge.git] / tools / bisect-builds.py
blob15ace2e61f572d64d8c3f3e42115ab210c75153e
1 #!/usr/bin/env python
2 # Copyright (c) 2012 The Chromium Authors. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
6 """Snapshot Build Bisect Tool
8 This script bisects a snapshot archive using binary search. It starts at
9 a bad revision (it will try to guess HEAD) and asks for a last known-good
10 revision. It will then binary search across this revision range by downloading,
11 unzipping, and opening Chromium for you. After testing the specific revision,
12 it will ask you whether it is good or bad before continuing the search.
13 """
15 # The root URL for storage.
16 BASE_URL = 'http://commondatastorage.googleapis.com/chromium-browser-snapshots'
18 # The root URL for official builds.
19 OFFICIAL_BASE_URL = 'http://master.chrome.corp.google.com/official_builds'
21 # Changelogs URL.
22 CHANGELOG_URL = 'http://build.chromium.org/f/chromium/' \
23 'perf/dashboard/ui/changelog.html?' \
24 'url=/trunk/src&range=%d%%3A%d'
26 # Official Changelogs URL.
27 OFFICIAL_CHANGELOG_URL = 'http://omahaproxy.appspot.com/'\
28 'changelog?old_version=%s&new_version=%s'
30 # DEPS file URL.
31 DEPS_FILE= 'http://src.chromium.org/viewvc/chrome/trunk/src/DEPS?revision=%d'
32 # Blink Changelogs URL.
33 BLINK_CHANGELOG_URL = 'http://build.chromium.org/f/chromium/' \
34 'perf/dashboard/ui/changelog_blink.html?' \
35 'url=/trunk&range=%d%%3A%d'
37 DONE_MESSAGE_GOOD_MIN = 'You are probably looking for a change made after %s ' \
38 '(known good), but no later than %s (first known bad).'
39 DONE_MESSAGE_GOOD_MAX = 'You are probably looking for a change made after %s ' \
40 '(known bad), but no later than %s (first known good).'
42 ###############################################################################
44 import math
45 import optparse
46 import os
47 import pipes
48 import re
49 import shutil
50 import subprocess
51 import sys
52 import tempfile
53 import threading
54 import urllib
55 from distutils.version import LooseVersion
56 from xml.etree import ElementTree
57 import zipfile
60 class PathContext(object):
61 """A PathContext is used to carry the information used to construct URLs and
62 paths when dealing with the storage server and archives."""
63 def __init__(self, platform, good_revision, bad_revision, is_official,
64 is_aura):
65 super(PathContext, self).__init__()
66 # Store off the input parameters.
67 self.platform = platform # What's passed in to the '-a/--archive' option.
68 self.good_revision = good_revision
69 self.bad_revision = bad_revision
70 self.is_official = is_official
71 self.is_aura = is_aura
73 # The name of the ZIP file in a revision directory on the server.
74 self.archive_name = None
76 # Set some internal members:
77 # _listing_platform_dir = Directory that holds revisions. Ends with a '/'.
78 # _archive_extract_dir = Uncompressed directory in the archive_name file.
79 # _binary_name = The name of the executable to run.
80 if self.platform in ('linux', 'linux64', 'linux-arm'):
81 self._binary_name = 'chrome'
82 elif self.platform == 'mac':
83 self.archive_name = 'chrome-mac.zip'
84 self._archive_extract_dir = 'chrome-mac'
85 elif self.platform == 'win':
86 self.archive_name = 'chrome-win32.zip'
87 self._archive_extract_dir = 'chrome-win32'
88 self._binary_name = 'chrome.exe'
89 else:
90 raise Exception('Invalid platform: %s' % self.platform)
92 if is_official:
93 if self.platform == 'linux':
94 self._listing_platform_dir = 'precise32bit/'
95 self.archive_name = 'chrome-precise32bit.zip'
96 self._archive_extract_dir = 'chrome-precise32bit'
97 elif self.platform == 'linux64':
98 self._listing_platform_dir = 'precise64bit/'
99 self.archive_name = 'chrome-precise64bit.zip'
100 self._archive_extract_dir = 'chrome-precise64bit'
101 elif self.platform == 'mac':
102 self._listing_platform_dir = 'mac/'
103 self._binary_name = 'Google Chrome.app/Contents/MacOS/Google Chrome'
104 elif self.platform == 'win':
105 if self.is_aura:
106 self._listing_platform_dir = 'win-aura/'
107 else:
108 self._listing_platform_dir = 'win/'
109 else:
110 if self.platform in ('linux', 'linux64', 'linux-arm'):
111 self.archive_name = 'chrome-linux.zip'
112 self._archive_extract_dir = 'chrome-linux'
113 if self.platform == 'linux':
114 self._listing_platform_dir = 'Linux/'
115 elif self.platform == 'linux64':
116 self._listing_platform_dir = 'Linux_x64/'
117 elif self.platform == 'linux-arm':
118 self._listing_platform_dir = 'Linux_ARM_Cross-Compile/'
119 elif self.platform == 'mac':
120 self._listing_platform_dir = 'Mac/'
121 self._binary_name = 'Chromium.app/Contents/MacOS/Chromium'
122 elif self.platform == 'win':
123 self._listing_platform_dir = 'Win/'
125 def GetListingURL(self, marker=None):
126 """Returns the URL for a directory listing, with an optional marker."""
127 marker_param = ''
128 if marker:
129 marker_param = '&marker=' + str(marker)
130 return BASE_URL + '/?delimiter=/&prefix=' + self._listing_platform_dir + \
131 marker_param
133 def GetDownloadURL(self, revision):
134 """Gets the download URL for a build archive of a specific revision."""
135 if self.is_official:
136 return "%s/%s/%s%s" % (
137 OFFICIAL_BASE_URL, revision, self._listing_platform_dir,
138 self.archive_name)
139 else:
140 return "%s/%s%s/%s" % (
141 BASE_URL, self._listing_platform_dir, revision, self.archive_name)
143 def GetLastChangeURL(self):
144 """Returns a URL to the LAST_CHANGE file."""
145 return BASE_URL + '/' + self._listing_platform_dir + 'LAST_CHANGE'
147 def GetLaunchPath(self):
148 """Returns a relative path (presumably from the archive extraction location)
149 that is used to run the executable."""
150 return os.path.join(self._archive_extract_dir, self._binary_name)
152 def IsAuraBuild(self, build):
153 """Check the given build is Aura."""
154 return build.split('.')[3] == '1'
156 def IsASANBuild(self, build):
157 """Check the given build is ASAN build."""
158 return build.split('.')[3] == '2'
160 def ParseDirectoryIndex(self):
161 """Parses the Google Storage directory listing into a list of revision
162 numbers."""
164 def _FetchAndParse(url):
165 """Fetches a URL and returns a 2-Tuple of ([revisions], next-marker). If
166 next-marker is not None, then the listing is a partial listing and another
167 fetch should be performed with next-marker being the marker= GET
168 parameter."""
169 handle = urllib.urlopen(url)
170 document = ElementTree.parse(handle)
172 # All nodes in the tree are namespaced. Get the root's tag name to extract
173 # the namespace. Etree does namespaces as |{namespace}tag|.
174 root_tag = document.getroot().tag
175 end_ns_pos = root_tag.find('}')
176 if end_ns_pos == -1:
177 raise Exception("Could not locate end namespace for directory index")
178 namespace = root_tag[:end_ns_pos + 1]
180 # Find the prefix (_listing_platform_dir) and whether or not the list is
181 # truncated.
182 prefix_len = len(document.find(namespace + 'Prefix').text)
183 next_marker = None
184 is_truncated = document.find(namespace + 'IsTruncated')
185 if is_truncated is not None and is_truncated.text.lower() == 'true':
186 next_marker = document.find(namespace + 'NextMarker').text
188 # Get a list of all the revisions.
189 all_prefixes = document.findall(namespace + 'CommonPrefixes/' +
190 namespace + 'Prefix')
191 # The <Prefix> nodes have content of the form of
192 # |_listing_platform_dir/revision/|. Strip off the platform dir and the
193 # trailing slash to just have a number.
194 revisions = []
195 for prefix in all_prefixes:
196 revnum = prefix.text[prefix_len:-1]
197 try:
198 revnum = int(revnum)
199 revisions.append(revnum)
200 except ValueError:
201 pass
202 return (revisions, next_marker)
204 # Fetch the first list of revisions.
205 (revisions, next_marker) = _FetchAndParse(self.GetListingURL())
207 # If the result list was truncated, refetch with the next marker. Do this
208 # until an entire directory listing is done.
209 while next_marker:
210 next_url = self.GetListingURL(next_marker)
211 (new_revisions, next_marker) = _FetchAndParse(next_url)
212 revisions.extend(new_revisions)
213 return revisions
215 def GetRevList(self):
216 """Gets the list of revision numbers between self.good_revision and
217 self.bad_revision."""
218 # Download the revlist and filter for just the range between good and bad.
219 minrev = min(self.good_revision, self.bad_revision)
220 maxrev = max(self.good_revision, self.bad_revision)
221 revlist = map(int, self.ParseDirectoryIndex())
222 revlist = [x for x in revlist if x >= int(minrev) and x <= int(maxrev)]
223 revlist.sort()
224 return revlist
226 def GetOfficialBuildsList(self):
227 """Gets the list of official build numbers between self.good_revision and
228 self.bad_revision."""
229 # Download the revlist and filter for just the range between good and bad.
230 minrev = min(self.good_revision, self.bad_revision)
231 maxrev = max(self.good_revision, self.bad_revision)
232 handle = urllib.urlopen(OFFICIAL_BASE_URL)
233 dirindex = handle.read()
234 handle.close()
235 build_numbers = re.findall(r'<a href="([0-9][0-9].*)/">', dirindex)
236 final_list = []
237 i = 0
238 parsed_build_numbers = [LooseVersion(x) for x in build_numbers]
239 for build_number in sorted(parsed_build_numbers):
240 path = OFFICIAL_BASE_URL + '/' + str(build_number) + '/' + \
241 self._listing_platform_dir + self.archive_name
242 i = i + 1
243 try:
244 connection = urllib.urlopen(path)
245 connection.close()
246 if build_number > maxrev:
247 break
248 if build_number >= minrev:
249 # If we are bisecting Aura, we want to include only builds which
250 # ends with ".1".
251 if self.is_aura:
252 if self.IsAuraBuild(str(build_number)):
253 final_list.append(str(build_number))
254 # If we are bisecting only official builds (without --aura),
255 # we can not include builds which ends with '.1' or '.2' since
256 # they have different folder hierarchy inside.
257 elif (not self.IsAuraBuild(str(build_number)) and
258 not self.IsASANBuild(str(build_number))):
259 final_list.append(str(build_number))
260 except urllib.HTTPError, e:
261 pass
262 return final_list
264 def UnzipFilenameToDir(filename, dir):
265 """Unzip |filename| to directory |dir|."""
266 cwd = os.getcwd()
267 if not os.path.isabs(filename):
268 filename = os.path.join(cwd, filename)
269 zf = zipfile.ZipFile(filename)
270 # Make base.
271 if not os.path.isdir(dir):
272 os.mkdir(dir)
273 os.chdir(dir)
274 # Extract files.
275 for info in zf.infolist():
276 name = info.filename
277 if name.endswith('/'): # dir
278 if not os.path.isdir(name):
279 os.makedirs(name)
280 else: # file
281 dir = os.path.dirname(name)
282 if not os.path.isdir(dir):
283 os.makedirs(dir)
284 out = open(name, 'wb')
285 out.write(zf.read(name))
286 out.close()
287 # Set permissions. Permission info in external_attr is shifted 16 bits.
288 os.chmod(name, info.external_attr >> 16L)
289 os.chdir(cwd)
292 def FetchRevision(context, rev, filename, quit_event=None, progress_event=None):
293 """Downloads and unzips revision |rev|.
294 @param context A PathContext instance.
295 @param rev The Chromium revision number/tag to download.
296 @param filename The destination for the downloaded file.
297 @param quit_event A threading.Event which will be set by the master thread to
298 indicate that the download should be aborted.
299 @param progress_event A threading.Event which will be set by the master thread
300 to indicate that the progress of the download should be
301 displayed.
303 def ReportHook(blocknum, blocksize, totalsize):
304 if quit_event and quit_event.isSet():
305 raise RuntimeError("Aborting download of revision %s" % str(rev))
306 if progress_event and progress_event.isSet():
307 size = blocknum * blocksize
308 if totalsize == -1: # Total size not known.
309 progress = "Received %d bytes" % size
310 else:
311 size = min(totalsize, size)
312 progress = "Received %d of %d bytes, %.2f%%" % (
313 size, totalsize, 100.0 * size / totalsize)
314 # Send a \r to let all progress messages use just one line of output.
315 sys.stdout.write("\r" + progress)
316 sys.stdout.flush()
318 download_url = context.GetDownloadURL(rev)
319 try:
320 urllib.urlretrieve(download_url, filename, ReportHook)
321 if progress_event and progress_event.isSet():
322 print
323 except RuntimeError, e:
324 pass
327 def RunRevision(context, revision, zipfile, profile, num_runs, command, args):
328 """Given a zipped revision, unzip it and run the test."""
329 print "Trying revision %s..." % str(revision)
331 # Create a temp directory and unzip the revision into it.
332 cwd = os.getcwd()
333 tempdir = tempfile.mkdtemp(prefix='bisect_tmp')
334 UnzipFilenameToDir(zipfile, tempdir)
335 os.chdir(tempdir)
337 # Run the build as many times as specified.
338 testargs = ['--user-data-dir=%s' % profile] + args
339 # The sandbox must be run as root on Official Chrome, so bypass it.
340 if context.is_official and context.platform.startswith('linux'):
341 testargs.append('--no-sandbox')
343 runcommand = []
344 for token in command.split():
345 if token == "%a":
346 runcommand.extend(testargs)
347 else:
348 runcommand.append( \
349 token.replace('%p', context.GetLaunchPath()) \
350 .replace('%s', ' '.join(testargs)))
352 for i in range(0, num_runs):
353 subproc = subprocess.Popen(runcommand,
354 bufsize=-1,
355 stdout=subprocess.PIPE,
356 stderr=subprocess.PIPE)
357 (stdout, stderr) = subproc.communicate()
359 os.chdir(cwd)
360 try:
361 shutil.rmtree(tempdir, True)
362 except Exception, e:
363 pass
365 return (subproc.returncode, stdout, stderr)
368 def AskIsGoodBuild(rev, official_builds, status, stdout, stderr):
369 """Ask the user whether build |rev| is good or bad."""
370 # Loop until we get a response that we can parse.
371 while True:
372 response = raw_input('Revision %s is ' \
373 '[(g)ood/(b)ad/(r)etry/(u)nknown/(q)uit]: ' %
374 str(rev))
375 if response and response in ('g', 'b', 'r', 'u'):
376 return response
377 if response and response == 'q':
378 raise SystemExit()
381 class DownloadJob(object):
382 """DownloadJob represents a task to download a given Chromium revision."""
383 def __init__(self, context, name, rev, zipfile):
384 super(DownloadJob, self).__init__()
385 # Store off the input parameters.
386 self.context = context
387 self.name = name
388 self.rev = rev
389 self.zipfile = zipfile
390 self.quit_event = threading.Event()
391 self.progress_event = threading.Event()
393 def Start(self):
394 """Starts the download."""
395 fetchargs = (self.context,
396 self.rev,
397 self.zipfile,
398 self.quit_event,
399 self.progress_event)
400 self.thread = threading.Thread(target=FetchRevision,
401 name=self.name,
402 args=fetchargs)
403 self.thread.start()
405 def Stop(self):
406 """Stops the download which must have been started previously."""
407 self.quit_event.set()
408 self.thread.join()
409 os.unlink(self.zipfile)
411 def WaitFor(self):
412 """Prints a message and waits for the download to complete. The download
413 must have been started previously."""
414 print "Downloading revision %s..." % str(self.rev)
415 self.progress_event.set() # Display progress of download.
416 self.thread.join()
419 def Bisect(platform,
420 official_builds,
421 is_aura,
422 good_rev=0,
423 bad_rev=0,
424 num_runs=1,
425 command="%p %a",
426 try_args=(),
427 profile=None,
428 evaluate=AskIsGoodBuild):
429 """Given known good and known bad revisions, run a binary search on all
430 archived revisions to determine the last known good revision.
432 @param platform Which build to download/run ('mac', 'win', 'linux64', etc.).
433 @param official_builds Specify build type (Chromium or Official build).
434 @param good_rev Number/tag of the known good revision.
435 @param bad_rev Number/tag of the known bad revision.
436 @param num_runs Number of times to run each build for asking good/bad.
437 @param try_args A tuple of arguments to pass to the test application.
438 @param profile The name of the user profile to run with.
439 @param evaluate A function which returns 'g' if the argument build is good,
440 'b' if it's bad or 'u' if unknown.
442 Threading is used to fetch Chromium revisions in the background, speeding up
443 the user's experience. For example, suppose the bounds of the search are
444 good_rev=0, bad_rev=100. The first revision to be checked is 50. Depending on
445 whether revision 50 is good or bad, the next revision to check will be either
446 25 or 75. So, while revision 50 is being checked, the script will download
447 revisions 25 and 75 in the background. Once the good/bad verdict on rev 50 is
448 known:
450 - If rev 50 is good, the download of rev 25 is cancelled, and the next test
451 is run on rev 75.
453 - If rev 50 is bad, the download of rev 75 is cancelled, and the next test
454 is run on rev 25.
457 if not profile:
458 profile = 'profile'
460 context = PathContext(platform, good_rev, bad_rev, official_builds, is_aura)
461 cwd = os.getcwd()
465 print "Downloading list of known revisions..."
466 _GetDownloadPath = lambda rev: os.path.join(cwd,
467 '%s-%s' % (str(rev), context.archive_name))
468 if official_builds:
469 revlist = context.GetOfficialBuildsList()
470 else:
471 revlist = context.GetRevList()
473 # Get a list of revisions to bisect across.
474 if len(revlist) < 2: # Don't have enough builds to bisect.
475 msg = 'We don\'t have enough builds to bisect. revlist: %s' % revlist
476 raise RuntimeError(msg)
478 # Figure out our bookends and first pivot point; fetch the pivot revision.
479 minrev = 0
480 maxrev = len(revlist) - 1
481 pivot = maxrev / 2
482 rev = revlist[pivot]
483 zipfile = _GetDownloadPath(rev)
484 fetch = DownloadJob(context, 'initial_fetch', rev, zipfile)
485 fetch.Start()
486 fetch.WaitFor()
488 # Binary search time!
489 while fetch and fetch.zipfile and maxrev - minrev > 1:
490 if bad_rev < good_rev:
491 min_str, max_str = "bad", "good"
492 else:
493 min_str, max_str = "good", "bad"
494 print 'Bisecting range [%s (%s), %s (%s)].' % (revlist[minrev], min_str, \
495 revlist[maxrev], max_str)
497 # Pre-fetch next two possible pivots
498 # - down_pivot is the next revision to check if the current revision turns
499 # out to be bad.
500 # - up_pivot is the next revision to check if the current revision turns
501 # out to be good.
502 down_pivot = int((pivot - minrev) / 2) + minrev
503 down_fetch = None
504 if down_pivot != pivot and down_pivot != minrev:
505 down_rev = revlist[down_pivot]
506 down_fetch = DownloadJob(context, 'down_fetch', down_rev,
507 _GetDownloadPath(down_rev))
508 down_fetch.Start()
510 up_pivot = int((maxrev - pivot) / 2) + pivot
511 up_fetch = None
512 if up_pivot != pivot and up_pivot != maxrev:
513 up_rev = revlist[up_pivot]
514 up_fetch = DownloadJob(context, 'up_fetch', up_rev,
515 _GetDownloadPath(up_rev))
516 up_fetch.Start()
518 # Run test on the pivot revision.
519 status = None
520 stdout = None
521 stderr = None
522 try:
523 (status, stdout, stderr) = RunRevision(context,
524 rev,
525 fetch.zipfile,
526 profile,
527 num_runs,
528 command,
529 try_args)
530 except Exception, e:
531 print >>sys.stderr, e
533 # Call the evaluate function to see if the current revision is good or bad.
534 # On that basis, kill one of the background downloads and complete the
535 # other, as described in the comments above.
536 try:
537 answer = evaluate(rev, official_builds, status, stdout, stderr)
538 if answer == 'g' and good_rev < bad_rev or \
539 answer == 'b' and bad_rev < good_rev:
540 fetch.Stop()
541 minrev = pivot
542 if down_fetch:
543 down_fetch.Stop() # Kill the download of the older revision.
544 fetch = None
545 if up_fetch:
546 up_fetch.WaitFor()
547 pivot = up_pivot
548 fetch = up_fetch
549 elif answer == 'b' and good_rev < bad_rev or \
550 answer == 'g' and bad_rev < good_rev:
551 fetch.Stop()
552 maxrev = pivot
553 if up_fetch:
554 up_fetch.Stop() # Kill the download of the newer revision.
555 fetch = None
556 if down_fetch:
557 down_fetch.WaitFor()
558 pivot = down_pivot
559 fetch = down_fetch
560 elif answer == 'r':
561 pass # Retry requires no changes.
562 elif answer == 'u':
563 # Nuke the revision from the revlist and choose a new pivot.
564 fetch.Stop()
565 revlist.pop(pivot)
566 maxrev -= 1 # Assumes maxrev >= pivot.
568 if maxrev - minrev > 1:
569 # Alternate between using down_pivot or up_pivot for the new pivot
570 # point, without affecting the range. Do this instead of setting the
571 # pivot to the midpoint of the new range because adjacent revisions
572 # are likely affected by the same issue that caused the (u)nknown
573 # response.
574 if up_fetch and down_fetch:
575 fetch = [up_fetch, down_fetch][len(revlist) % 2]
576 elif up_fetch:
577 fetch = up_fetch
578 else:
579 fetch = down_fetch
580 fetch.WaitFor()
581 if fetch == up_fetch:
582 pivot = up_pivot - 1 # Subtracts 1 because revlist was resized.
583 else:
584 pivot = down_pivot
585 zipfile = fetch.zipfile
587 if down_fetch and fetch != down_fetch:
588 down_fetch.Stop()
589 if up_fetch and fetch != up_fetch:
590 up_fetch.Stop()
591 else:
592 assert False, "Unexpected return value from evaluate(): " + answer
593 except SystemExit:
594 print "Cleaning up..."
595 for f in [_GetDownloadPath(revlist[down_pivot]),
596 _GetDownloadPath(revlist[up_pivot])]:
597 try:
598 os.unlink(f)
599 except OSError:
600 pass
601 sys.exit(0)
603 rev = revlist[pivot]
605 return (revlist[minrev], revlist[maxrev])
608 def GetBlinkRevisionForChromiumRevision(rev):
609 """Returns the blink revision that was in chromium's DEPS file at
610 chromium revision |rev|."""
611 # . doesn't match newlines without re.DOTALL, so this is safe.
612 blink_re = re.compile(r'webkit_revision.:\D*(\d+)')
613 url = urllib.urlopen(DEPS_FILE % rev)
614 m = blink_re.search(url.read())
615 url.close()
616 if m:
617 return int(m.group(1))
618 else:
619 raise Exception('Could not get blink revision for cr rev %d' % rev)
622 def GetChromiumRevision(url):
623 """Returns the chromium revision read from given URL."""
624 try:
625 # Location of the latest build revision number
626 return int(urllib.urlopen(url).read())
627 except Exception, e:
628 print('Could not determine latest revision. This could be bad...')
629 return 999999999
632 def main():
633 usage = ('%prog [options] [-- chromium-options]\n'
634 'Perform binary search on the snapshot builds to find a minimal\n'
635 'range of revisions where a behavior change happened. The\n'
636 'behaviors are described as "good" and "bad".\n'
637 'It is NOT assumed that the behavior of the later revision is\n'
638 'the bad one.\n'
639 '\n'
640 'Revision numbers should use\n'
641 ' Official versions (e.g. 1.0.1000.0) for official builds. (-o)\n'
642 ' SVN revisions (e.g. 123456) for chromium builds, from trunk.\n'
643 ' Use base_trunk_revision from http://omahaproxy.appspot.com/\n'
644 ' for earlier revs.\n'
645 ' Chrome\'s about: build number and omahaproxy branch_revision\n'
646 ' are incorrect, they are from branches.\n'
647 '\n'
648 'Tip: add "-- --no-first-run" to bypass the first run prompts.')
649 parser = optparse.OptionParser(usage=usage)
650 # Strangely, the default help output doesn't include the choice list.
651 choices = ['mac', 'win', 'linux', 'linux64', 'linux-arm']
652 # linux-chromiumos lacks a continuous archive http://crbug.com/78158
653 parser.add_option('-a', '--archive',
654 choices = choices,
655 help = 'The buildbot archive to bisect [%s].' %
656 '|'.join(choices))
657 parser.add_option('-o', action="store_true", dest='official_builds',
658 help = 'Bisect across official ' +
659 'Chrome builds (internal only) instead of ' +
660 'Chromium archives.')
661 parser.add_option('-b', '--bad', type = 'str',
662 help = 'A bad revision to start bisection. ' +
663 'May be earlier or later than the good revision. ' +
664 'Default is HEAD.')
665 parser.add_option('-g', '--good', type = 'str',
666 help = 'A good revision to start bisection. ' +
667 'May be earlier or later than the bad revision. ' +
668 'Default is 0.')
669 parser.add_option('-p', '--profile', '--user-data-dir', type = 'str',
670 help = 'Profile to use; this will not reset every run. ' +
671 'Defaults to a clean profile.', default = 'profile')
672 parser.add_option('-t', '--times', type = 'int',
673 help = 'Number of times to run each build before asking ' +
674 'if it\'s good or bad. Temporary profiles are reused.',
675 default = 1)
676 parser.add_option('-c', '--command', type = 'str',
677 help = 'Command to execute. %p and %a refer to Chrome ' +
678 'executable and specified extra arguments respectively. ' +
679 'Use %s to specify all extra arguments as one string. ' +
680 'Defaults to "%p %a". Note that any extra paths ' +
681 'specified should be absolute.',
682 default = '%p %a');
683 parser.add_option('--aura',
684 dest='aura',
685 action='store_true',
686 default=False,
687 help='Allow the script to bisect aura builds')
689 (opts, args) = parser.parse_args()
691 if opts.archive is None:
692 print 'Error: missing required parameter: --archive'
693 print
694 parser.print_help()
695 return 1
697 if opts.aura:
698 if opts.archive != 'win' or not opts.official_builds:
699 print 'Error: Aura is supported only on Windows platform '\
700 'and official builds.'
701 return 1
703 # Create the context. Initialize 0 for the revisions as they are set below.
704 context = PathContext(opts.archive, 0, 0, opts.official_builds, opts.aura)
705 # Pick a starting point, try to get HEAD for this.
706 if opts.bad:
707 bad_rev = opts.bad
708 else:
709 bad_rev = '999.0.0.0'
710 if not opts.official_builds:
711 bad_rev = GetChromiumRevision(context.GetLastChangeURL())
713 # Find out when we were good.
714 if opts.good:
715 good_rev = opts.good
716 else:
717 good_rev = '0.0.0.0' if opts.official_builds else 0
719 if opts.official_builds:
720 good_rev = LooseVersion(good_rev)
721 bad_rev = LooseVersion(bad_rev)
722 else:
723 good_rev = int(good_rev)
724 bad_rev = int(bad_rev)
726 if opts.times < 1:
727 print('Number of times to run (%d) must be greater than or equal to 1.' %
728 opts.times)
729 parser.print_help()
730 return 1
732 (min_chromium_rev, max_chromium_rev) = Bisect(
733 opts.archive, opts.official_builds, opts.aura, good_rev, bad_rev,
734 opts.times, opts.command, args, opts.profile)
736 # Get corresponding blink revisions.
737 try:
738 min_blink_rev = GetBlinkRevisionForChromiumRevision(min_chromium_rev)
739 max_blink_rev = GetBlinkRevisionForChromiumRevision(max_chromium_rev)
740 except Exception, e:
741 # Silently ignore the failure.
742 min_blink_rev, max_blink_rev = 0, 0
744 # We're done. Let the user know the results in an official manner.
745 if good_rev > bad_rev:
746 print DONE_MESSAGE_GOOD_MAX % (str(min_chromium_rev), str(max_chromium_rev))
747 else:
748 print DONE_MESSAGE_GOOD_MIN % (str(min_chromium_rev), str(max_chromium_rev))
750 if min_blink_rev != max_blink_rev:
751 print 'BLINK CHANGELOG URL:'
752 print ' ' + BLINK_CHANGELOG_URL % (max_blink_rev, min_blink_rev)
753 print 'CHANGELOG URL:'
754 if opts.official_builds:
755 print OFFICIAL_CHANGELOG_URL % (min_chromium_rev, max_chromium_rev)
756 else:
757 print ' ' + CHANGELOG_URL % (min_chromium_rev, max_chromium_rev)
759 if __name__ == '__main__':
760 sys.exit(main())