Retry downloading rules for libaddressinput.
[chromium-blink-merge.git] / tools / bisect-builds.py
blobb8b1b6a694acccf8235f03b3754ee5800a902ec8
1 #!/usr/bin/env python
2 # Copyright (c) 2012 The Chromium Authors. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
6 """Snapshot Build Bisect Tool
8 This script bisects a snapshot archive using binary search. It starts at
9 a bad revision (it will try to guess HEAD) and asks for a last known-good
10 revision. It will then binary search across this revision range by downloading,
11 unzipping, and opening Chromium for you. After testing the specific revision,
12 it will ask you whether it is good or bad before continuing the search.
13 """
15 # The root URL for storage.
16 CHROMIUM_BASE_URL = 'http://commondatastorage.googleapis.com/chromium-browser-snapshots'
17 WEBKIT_BASE_URL = 'http://commondatastorage.googleapis.com/chromium-webkit-snapshots'
19 # The root URL for official builds.
20 OFFICIAL_BASE_URL = 'http://master.chrome.corp.google.com/official_builds'
22 # Changelogs URL.
23 CHANGELOG_URL = 'http://build.chromium.org/f/chromium/' \
24 'perf/dashboard/ui/changelog.html?' \
25 'url=/trunk/src&range=%d%%3A%d'
27 # Official Changelogs URL.
28 OFFICIAL_CHANGELOG_URL = 'http://omahaproxy.appspot.com/'\
29 'changelog?old_version=%s&new_version=%s'
31 # DEPS file URL.
32 DEPS_FILE = 'http://src.chromium.org/viewvc/chrome/trunk/src/DEPS?revision=%d'
33 # Blink Changelogs URL.
34 BLINK_CHANGELOG_URL = 'http://build.chromium.org/f/chromium/' \
35 'perf/dashboard/ui/changelog_blink.html?' \
36 'url=/trunk&range=%d%%3A%d'
38 DONE_MESSAGE_GOOD_MIN = 'You are probably looking for a change made after %s ' \
39 '(known good), but no later than %s (first known bad).'
40 DONE_MESSAGE_GOOD_MAX = 'You are probably looking for a change made after %s ' \
41 '(known bad), but no later than %s (first known good).'
43 CHROMIUM_GITHASH_TO_SVN_URL = (
44 'https://chromium.googlesource.com/chromium/src/+/%s?format=json')
45 BLINK_GITHASH_TO_SVN_URL = (
46 'https://chromium.googlesource.com/chromium/blink/+/%s?format=json')
47 GITHASH_TO_SVN_URL = { 'chromium': CHROMIUM_GITHASH_TO_SVN_URL,
48 'blink': BLINK_GITHASH_TO_SVN_URL }
49 # Search pattern to be matched in the json output from
50 # CHROMIUM_GITHASH_TO_SVN_URL to get the chromium revision (svn revision).
51 CHROMIUM_SEARCH_PATTERN = (
52 r'.*git-svn-id: svn://svn.chromium.org/chrome/trunk/src@(\d+) ')
53 # Search pattern to be matched in the json output from
54 # BLINK_GITHASH_TO_SVN_URL to get the blink revision (svn revision).
55 BLINK_SEARCH_PATTERN = (
56 r'.*git-svn-id: svn://svn.chromium.org/blink/trunk@(\d+) ')
57 SEARCH_PATTERN = { 'chromium': CHROMIUM_SEARCH_PATTERN,
58 'blink': BLINK_SEARCH_PATTERN }
60 ###############################################################################
62 import json
63 import optparse
64 import os
65 import re
66 import shlex
67 import shutil
68 import subprocess
69 import sys
70 import tempfile
71 import threading
72 import urllib
73 from distutils.version import LooseVersion
74 from xml.etree import ElementTree
75 import zipfile
78 class PathContext(object):
79 """A PathContext is used to carry the information used to construct URLs and
80 paths when dealing with the storage server and archives."""
81 def __init__(self, base_url, platform, good_revision, bad_revision,
82 is_official, is_aura, use_local_repo, flash_path = None):
83 super(PathContext, self).__init__()
84 # Store off the input parameters.
85 self.base_url = base_url
86 self.platform = platform # What's passed in to the '-a/--archive' option.
87 self.good_revision = good_revision
88 self.bad_revision = bad_revision
89 self.is_official = is_official
90 self.is_aura = is_aura
91 self.flash_path = flash_path
92 # Dictionary which stores svn revision number as key and it's
93 # corresponding git hash as value. This data is populated in
94 # _FetchAndParse and used later in GetDownloadURL while downloading
95 # the build.
96 self.githash_svn_dict = {}
98 # The name of the ZIP file in a revision directory on the server.
99 self.archive_name = None
101 # If the script is run from a local Chromium checkout,
102 # "--use-local-repo" option can be used to make the script run faster.
103 # It uses "git svn find-rev <SHA1>" command to convert git hash to svn
104 # revision number.
105 self.use_local_repo = use_local_repo
107 # Set some internal members:
108 # _listing_platform_dir = Directory that holds revisions. Ends with a '/'.
109 # _archive_extract_dir = Uncompressed directory in the archive_name file.
110 # _binary_name = The name of the executable to run.
111 if self.platform in ('linux', 'linux64', 'linux-arm'):
112 self._binary_name = 'chrome'
113 elif self.platform == 'mac':
114 self.archive_name = 'chrome-mac.zip'
115 self._archive_extract_dir = 'chrome-mac'
116 elif self.platform == 'win':
117 self.archive_name = 'chrome-win32.zip'
118 self._archive_extract_dir = 'chrome-win32'
119 self._binary_name = 'chrome.exe'
120 else:
121 raise Exception('Invalid platform: %s' % self.platform)
123 if is_official:
124 if self.platform == 'linux':
125 self._listing_platform_dir = 'precise32bit/'
126 self.archive_name = 'chrome-precise32bit.zip'
127 self._archive_extract_dir = 'chrome-precise32bit'
128 elif self.platform == 'linux64':
129 self._listing_platform_dir = 'precise64bit/'
130 self.archive_name = 'chrome-precise64bit.zip'
131 self._archive_extract_dir = 'chrome-precise64bit'
132 elif self.platform == 'mac':
133 self._listing_platform_dir = 'mac/'
134 self._binary_name = 'Google Chrome.app/Contents/MacOS/Google Chrome'
135 elif self.platform == 'win':
136 if self.is_aura:
137 self._listing_platform_dir = 'win-aura/'
138 else:
139 self._listing_platform_dir = 'win/'
140 else:
141 if self.platform in ('linux', 'linux64', 'linux-arm'):
142 self.archive_name = 'chrome-linux.zip'
143 self._archive_extract_dir = 'chrome-linux'
144 if self.platform == 'linux':
145 self._listing_platform_dir = 'Linux/'
146 elif self.platform == 'linux64':
147 self._listing_platform_dir = 'Linux_x64/'
148 elif self.platform == 'linux-arm':
149 self._listing_platform_dir = 'Linux_ARM_Cross-Compile/'
150 elif self.platform == 'mac':
151 self._listing_platform_dir = 'Mac/'
152 self._binary_name = 'Chromium.app/Contents/MacOS/Chromium'
153 elif self.platform == 'win':
154 self._listing_platform_dir = 'Win/'
156 def GetListingURL(self, marker=None):
157 """Returns the URL for a directory listing, with an optional marker."""
158 marker_param = ''
159 if marker:
160 marker_param = '&marker=' + str(marker)
161 return self.base_url + '/?delimiter=/&prefix=' + \
162 self._listing_platform_dir + marker_param
164 def GetDownloadURL(self, revision):
165 """Gets the download URL for a build archive of a specific revision."""
166 if self.is_official:
167 return "%s/%s/%s%s" % (
168 OFFICIAL_BASE_URL, revision, self._listing_platform_dir,
169 self.archive_name)
170 else:
171 if str(revision) in self.githash_svn_dict:
172 revision = self.githash_svn_dict[str(revision)]
173 return "%s/%s%s/%s" % (self.base_url, self._listing_platform_dir,
174 revision, self.archive_name)
176 def GetLastChangeURL(self):
177 """Returns a URL to the LAST_CHANGE file."""
178 return self.base_url + '/' + self._listing_platform_dir + 'LAST_CHANGE'
180 def GetLaunchPath(self):
181 """Returns a relative path (presumably from the archive extraction location)
182 that is used to run the executable."""
183 return os.path.join(self._archive_extract_dir, self._binary_name)
185 def IsAuraBuild(self, build):
186 """Check the given build is Aura."""
187 return build.split('.')[3] == '1'
189 def IsASANBuild(self, build):
190 """Check the given build is ASAN build."""
191 return build.split('.')[3] == '2'
193 def ParseDirectoryIndex(self):
194 """Parses the Google Storage directory listing into a list of revision
195 numbers."""
197 def _FetchAndParse(url):
198 """Fetches a URL and returns a 2-Tuple of ([revisions], next-marker). If
199 next-marker is not None, then the listing is a partial listing and another
200 fetch should be performed with next-marker being the marker= GET
201 parameter."""
202 handle = urllib.urlopen(url)
203 document = ElementTree.parse(handle)
205 # All nodes in the tree are namespaced. Get the root's tag name to extract
206 # the namespace. Etree does namespaces as |{namespace}tag|.
207 root_tag = document.getroot().tag
208 end_ns_pos = root_tag.find('}')
209 if end_ns_pos == -1:
210 raise Exception("Could not locate end namespace for directory index")
211 namespace = root_tag[:end_ns_pos + 1]
213 # Find the prefix (_listing_platform_dir) and whether or not the list is
214 # truncated.
215 prefix_len = len(document.find(namespace + 'Prefix').text)
216 next_marker = None
217 is_truncated = document.find(namespace + 'IsTruncated')
218 if is_truncated is not None and is_truncated.text.lower() == 'true':
219 next_marker = document.find(namespace + 'NextMarker').text
220 # Get a list of all the revisions.
221 all_prefixes = document.findall(namespace + 'CommonPrefixes/' +
222 namespace + 'Prefix')
223 # The <Prefix> nodes have content of the form of
224 # |_listing_platform_dir/revision/|. Strip off the platform dir and the
225 # trailing slash to just have a number.
226 revisions = []
227 githash_svn_dict = {}
228 for prefix in all_prefixes:
229 revnum = prefix.text[prefix_len:-1]
230 try:
231 if not revnum.isdigit():
232 git_hash = revnum
233 revnum = self.GetSVNRevisionFromGitHash(git_hash)
234 githash_svn_dict[revnum] = git_hash
235 if revnum is not None:
236 revnum = int(revnum)
237 revisions.append(revnum)
238 except ValueError:
239 pass
240 return (revisions, next_marker, githash_svn_dict)
242 # Fetch the first list of revisions.
243 (revisions, next_marker, self.githash_svn_dict) =\
244 _FetchAndParse(self.GetListingURL())
245 # If the result list was truncated, refetch with the next marker. Do this
246 # until an entire directory listing is done.
247 while next_marker:
248 next_url = self.GetListingURL(next_marker)
249 (new_revisions, next_marker, new_dict) = _FetchAndParse(next_url)
250 revisions.extend(new_revisions)
251 self.githash_svn_dict.update(new_dict)
252 return revisions
254 def _GetSVNRevisionFromGitHashWithoutGitCheckout(self, git_sha1, depot):
255 json_url = GITHASH_TO_SVN_URL[depot] % git_sha1
256 try:
257 response = urllib.urlopen(json_url)
258 except urllib.HTTPError as error:
259 msg = 'HTTP Error %d for %s' % (error.getcode(), git_sha1)
260 return None
261 data = json.loads(response.read()[4:])
262 if 'message' in data:
263 message = data['message'].split('\n')
264 message = [line for line in message if line.strip()]
265 search_pattern = re.compile(SEARCH_PATTERN[depot])
266 result = search_pattern.search(message[len(message)-1])
267 if result:
268 return result.group(1)
269 print 'Failed to get svn revision number for %s' % git_sha1
270 return None
272 def _GetSVNRevisionFromGitHashFromGitCheckout(self, git_sha1, depot):
273 def _RunGit(command, path):
274 command = ['git'] + command
275 if path:
276 original_path = os.getcwd()
277 os.chdir(path)
278 shell = sys.platform.startswith('win')
279 proc = subprocess.Popen(command, shell=shell, stdout=subprocess.PIPE,
280 stderr=subprocess.PIPE)
281 (output, _) = proc.communicate()
283 if path:
284 os.chdir(original_path)
285 return (output, proc.returncode)
287 path = None
288 if depot == 'blink':
289 path = os.path.join(os.getcwd(), 'third_party', 'WebKit')
290 if os.path.basename(os.getcwd()) == 'src':
291 command = ['svn', 'find-rev', git_sha1]
292 (git_output, return_code) = _RunGit(command, path)
293 if not return_code:
294 return git_output.strip('\n')
295 return None
296 else:
297 print ('Script should be run from src folder. ' +
298 'Eg: python tools/bisect-builds.py -g 280588 -b 280590' +
299 '--archive linux64 --use-local-repo')
300 sys.exit(1)
302 def GetSVNRevisionFromGitHash(self, git_sha1, depot='chromium'):
303 if not self.use_local_repo:
304 return self._GetSVNRevisionFromGitHashWithoutGitCheckout(git_sha1, depot)
305 else:
306 return self._GetSVNRevisionFromGitHashFromGitCheckout(git_sha1, depot)
308 def GetRevList(self):
309 """Gets the list of revision numbers between self.good_revision and
310 self.bad_revision."""
311 # Download the revlist and filter for just the range between good and bad.
312 minrev = min(self.good_revision, self.bad_revision)
313 maxrev = max(self.good_revision, self.bad_revision)
314 revlist_all = map(int, self.ParseDirectoryIndex())
316 revlist = [x for x in revlist_all if x >= int(minrev) and x <= int(maxrev)]
317 revlist.sort()
319 # Set good and bad revisions to be legit revisions.
320 if revlist:
321 if self.good_revision < self.bad_revision:
322 self.good_revision = revlist[0]
323 self.bad_revision = revlist[-1]
324 else:
325 self.bad_revision = revlist[0]
326 self.good_revision = revlist[-1]
328 # Fix chromium rev so that the deps blink revision matches REVISIONS file.
329 if self.base_url == WEBKIT_BASE_URL:
330 revlist_all.sort()
331 self.good_revision = FixChromiumRevForBlink(revlist,
332 revlist_all,
333 self,
334 self.good_revision)
335 self.bad_revision = FixChromiumRevForBlink(revlist,
336 revlist_all,
337 self,
338 self.bad_revision)
339 return revlist
341 def GetOfficialBuildsList(self):
342 """Gets the list of official build numbers between self.good_revision and
343 self.bad_revision."""
344 # Download the revlist and filter for just the range between good and bad.
345 minrev = min(self.good_revision, self.bad_revision)
346 maxrev = max(self.good_revision, self.bad_revision)
347 handle = urllib.urlopen(OFFICIAL_BASE_URL)
348 dirindex = handle.read()
349 handle.close()
350 build_numbers = re.findall(r'<a href="([0-9][0-9].*)/">', dirindex)
351 final_list = []
352 i = 0
353 parsed_build_numbers = [LooseVersion(x) for x in build_numbers]
354 for build_number in sorted(parsed_build_numbers):
355 path = OFFICIAL_BASE_URL + '/' + str(build_number) + '/' + \
356 self._listing_platform_dir + self.archive_name
357 i = i + 1
358 try:
359 connection = urllib.urlopen(path)
360 connection.close()
361 if build_number > maxrev:
362 break
363 if build_number >= minrev:
364 # If we are bisecting Aura, we want to include only builds which
365 # ends with ".1".
366 if self.is_aura:
367 if self.IsAuraBuild(str(build_number)):
368 final_list.append(str(build_number))
369 # If we are bisecting only official builds (without --aura),
370 # we can not include builds which ends with '.1' or '.2' since
371 # they have different folder hierarchy inside.
372 elif (not self.IsAuraBuild(str(build_number)) and
373 not self.IsASANBuild(str(build_number))):
374 final_list.append(str(build_number))
375 except urllib.HTTPError, e:
376 pass
377 return final_list
379 def UnzipFilenameToDir(filename, directory):
380 """Unzip |filename| to |directory|."""
381 cwd = os.getcwd()
382 if not os.path.isabs(filename):
383 filename = os.path.join(cwd, filename)
384 zf = zipfile.ZipFile(filename)
385 # Make base.
386 if not os.path.isdir(directory):
387 os.mkdir(directory)
388 os.chdir(directory)
389 # Extract files.
390 for info in zf.infolist():
391 name = info.filename
392 if name.endswith('/'): # dir
393 if not os.path.isdir(name):
394 os.makedirs(name)
395 else: # file
396 directory = os.path.dirname(name)
397 if not os.path.isdir(directory):
398 os.makedirs(directory)
399 out = open(name, 'wb')
400 out.write(zf.read(name))
401 out.close()
402 # Set permissions. Permission info in external_attr is shifted 16 bits.
403 os.chmod(name, info.external_attr >> 16L)
404 os.chdir(cwd)
407 def FetchRevision(context, rev, filename, quit_event=None, progress_event=None):
408 """Downloads and unzips revision |rev|.
409 @param context A PathContext instance.
410 @param rev The Chromium revision number/tag to download.
411 @param filename The destination for the downloaded file.
412 @param quit_event A threading.Event which will be set by the master thread to
413 indicate that the download should be aborted.
414 @param progress_event A threading.Event which will be set by the master thread
415 to indicate that the progress of the download should be
416 displayed.
418 def ReportHook(blocknum, blocksize, totalsize):
419 if quit_event and quit_event.isSet():
420 raise RuntimeError("Aborting download of revision %s" % str(rev))
421 if progress_event and progress_event.isSet():
422 size = blocknum * blocksize
423 if totalsize == -1: # Total size not known.
424 progress = "Received %d bytes" % size
425 else:
426 size = min(totalsize, size)
427 progress = "Received %d of %d bytes, %.2f%%" % (
428 size, totalsize, 100.0 * size / totalsize)
429 # Send a \r to let all progress messages use just one line of output.
430 sys.stdout.write("\r" + progress)
431 sys.stdout.flush()
433 download_url = context.GetDownloadURL(rev)
434 try:
435 urllib.urlretrieve(download_url, filename, ReportHook)
436 if progress_event and progress_event.isSet():
437 print
438 except RuntimeError, e:
439 pass
442 def RunRevision(context, revision, zipfile, profile, num_runs, command, args):
443 """Given a zipped revision, unzip it and run the test."""
444 print "Trying revision %s..." % str(revision)
446 # Create a temp directory and unzip the revision into it.
447 cwd = os.getcwd()
448 tempdir = tempfile.mkdtemp(prefix='bisect_tmp')
449 UnzipFilenameToDir(zipfile, tempdir)
450 os.chdir(tempdir)
452 # Run the build as many times as specified.
453 testargs = ['--user-data-dir=%s' % profile] + args
454 # The sandbox must be run as root on Official Chrome, so bypass it.
455 if ((context.is_official or context.flash_path) and
456 context.platform.startswith('linux')):
457 testargs.append('--no-sandbox')
458 if context.flash_path:
459 testargs.append('--ppapi-flash-path=%s' % context.flash_path)
460 # We have to pass a large enough Flash version, which currently needs not
461 # be correct. Instead of requiring the user of the script to figure out and
462 # pass the correct version we just spoof it.
463 testargs.append('--ppapi-flash-version=99.9.999.999')
465 runcommand = []
466 for token in shlex.split(command):
467 if token == "%a":
468 runcommand.extend(testargs)
469 else:
470 runcommand.append( \
471 token.replace('%p', os.path.abspath(context.GetLaunchPath())) \
472 .replace('%s', ' '.join(testargs)))
474 results = []
475 for i in range(0, num_runs):
476 subproc = subprocess.Popen(runcommand,
477 bufsize=-1,
478 stdout=subprocess.PIPE,
479 stderr=subprocess.PIPE)
480 (stdout, stderr) = subproc.communicate()
481 results.append((subproc.returncode, stdout, stderr))
483 os.chdir(cwd)
484 try:
485 shutil.rmtree(tempdir, True)
486 except Exception, e:
487 pass
489 for (returncode, stdout, stderr) in results:
490 if returncode:
491 return (returncode, stdout, stderr)
492 return results[0]
495 def AskIsGoodBuild(rev, official_builds, status, stdout, stderr):
496 """Ask the user whether build |rev| is good or bad."""
497 # Loop until we get a response that we can parse.
498 while True:
499 response = raw_input('Revision %s is ' \
500 '[(g)ood/(b)ad/(r)etry/(u)nknown/(q)uit]: ' %
501 str(rev))
502 if response and response in ('g', 'b', 'r', 'u'):
503 return response
504 if response and response == 'q':
505 raise SystemExit()
508 class DownloadJob(object):
509 """DownloadJob represents a task to download a given Chromium revision."""
510 def __init__(self, context, name, rev, zipfile):
511 super(DownloadJob, self).__init__()
512 # Store off the input parameters.
513 self.context = context
514 self.name = name
515 self.rev = rev
516 self.zipfile = zipfile
517 self.quit_event = threading.Event()
518 self.progress_event = threading.Event()
520 def Start(self):
521 """Starts the download."""
522 fetchargs = (self.context,
523 self.rev,
524 self.zipfile,
525 self.quit_event,
526 self.progress_event)
527 self.thread = threading.Thread(target=FetchRevision,
528 name=self.name,
529 args=fetchargs)
530 self.thread.start()
532 def Stop(self):
533 """Stops the download which must have been started previously."""
534 self.quit_event.set()
535 self.thread.join()
536 os.unlink(self.zipfile)
538 def WaitFor(self):
539 """Prints a message and waits for the download to complete. The download
540 must have been started previously."""
541 print "Downloading revision %s..." % str(self.rev)
542 self.progress_event.set() # Display progress of download.
543 self.thread.join()
546 def Bisect(base_url,
547 platform,
548 official_builds,
549 is_aura,
550 use_local_repo,
551 good_rev=0,
552 bad_rev=0,
553 num_runs=1,
554 command="%p %a",
555 try_args=(),
556 profile=None,
557 flash_path=None,
558 interactive=True,
559 evaluate=AskIsGoodBuild):
560 """Given known good and known bad revisions, run a binary search on all
561 archived revisions to determine the last known good revision.
563 @param platform Which build to download/run ('mac', 'win', 'linux64', etc.).
564 @param official_builds Specify build type (Chromium or Official build).
565 @param good_rev Number/tag of the known good revision.
566 @param bad_rev Number/tag of the known bad revision.
567 @param num_runs Number of times to run each build for asking good/bad.
568 @param try_args A tuple of arguments to pass to the test application.
569 @param profile The name of the user profile to run with.
570 @param interactive If it is false, use command exit code for good or bad
571 judgment of the argument build.
572 @param evaluate A function which returns 'g' if the argument build is good,
573 'b' if it's bad or 'u' if unknown.
575 Threading is used to fetch Chromium revisions in the background, speeding up
576 the user's experience. For example, suppose the bounds of the search are
577 good_rev=0, bad_rev=100. The first revision to be checked is 50. Depending on
578 whether revision 50 is good or bad, the next revision to check will be either
579 25 or 75. So, while revision 50 is being checked, the script will download
580 revisions 25 and 75 in the background. Once the good/bad verdict on rev 50 is
581 known:
583 - If rev 50 is good, the download of rev 25 is cancelled, and the next test
584 is run on rev 75.
586 - If rev 50 is bad, the download of rev 75 is cancelled, and the next test
587 is run on rev 25.
590 if not profile:
591 profile = 'profile'
593 context = PathContext(base_url, platform, good_rev, bad_rev,
594 official_builds, is_aura, use_local_repo, flash_path)
595 cwd = os.getcwd()
597 print "Downloading list of known revisions..."
598 _GetDownloadPath = lambda rev: os.path.join(cwd,
599 '%s-%s' % (str(rev), context.archive_name))
600 if official_builds:
601 revlist = context.GetOfficialBuildsList()
602 else:
603 revlist = context.GetRevList()
605 # Get a list of revisions to bisect across.
606 if len(revlist) < 2: # Don't have enough builds to bisect.
607 msg = 'We don\'t have enough builds to bisect. revlist: %s' % revlist
608 raise RuntimeError(msg)
610 # Figure out our bookends and first pivot point; fetch the pivot revision.
611 minrev = 0
612 maxrev = len(revlist) - 1
613 pivot = maxrev / 2
614 rev = revlist[pivot]
615 zipfile = _GetDownloadPath(rev)
616 fetch = DownloadJob(context, 'initial_fetch', rev, zipfile)
617 fetch.Start()
618 fetch.WaitFor()
620 # Binary search time!
621 while fetch and fetch.zipfile and maxrev - minrev > 1:
622 if bad_rev < good_rev:
623 min_str, max_str = "bad", "good"
624 else:
625 min_str, max_str = "good", "bad"
626 print 'Bisecting range [%s (%s), %s (%s)].' % (revlist[minrev], min_str, \
627 revlist[maxrev], max_str)
629 # Pre-fetch next two possible pivots
630 # - down_pivot is the next revision to check if the current revision turns
631 # out to be bad.
632 # - up_pivot is the next revision to check if the current revision turns
633 # out to be good.
634 down_pivot = int((pivot - minrev) / 2) + minrev
635 down_fetch = None
636 if down_pivot != pivot and down_pivot != minrev:
637 down_rev = revlist[down_pivot]
638 down_fetch = DownloadJob(context, 'down_fetch', down_rev,
639 _GetDownloadPath(down_rev))
640 down_fetch.Start()
642 up_pivot = int((maxrev - pivot) / 2) + pivot
643 up_fetch = None
644 if up_pivot != pivot and up_pivot != maxrev:
645 up_rev = revlist[up_pivot]
646 up_fetch = DownloadJob(context, 'up_fetch', up_rev,
647 _GetDownloadPath(up_rev))
648 up_fetch.Start()
650 # Run test on the pivot revision.
651 status = None
652 stdout = None
653 stderr = None
654 try:
655 (status, stdout, stderr) = RunRevision(context,
656 rev,
657 fetch.zipfile,
658 profile,
659 num_runs,
660 command,
661 try_args)
662 except Exception, e:
663 print >> sys.stderr, e
665 # Call the evaluate function to see if the current revision is good or bad.
666 # On that basis, kill one of the background downloads and complete the
667 # other, as described in the comments above.
668 try:
669 if not interactive:
670 if status:
671 answer = 'b'
672 print 'Bad revision: %s' % rev
673 else:
674 answer = 'g'
675 print 'Good revision: %s' % rev
676 else:
677 answer = evaluate(rev, official_builds, status, stdout, stderr)
678 if answer == 'g' and good_rev < bad_rev or \
679 answer == 'b' and bad_rev < good_rev:
680 fetch.Stop()
681 minrev = pivot
682 if down_fetch:
683 down_fetch.Stop() # Kill the download of the older revision.
684 fetch = None
685 if up_fetch:
686 up_fetch.WaitFor()
687 pivot = up_pivot
688 fetch = up_fetch
689 elif answer == 'b' and good_rev < bad_rev or \
690 answer == 'g' and bad_rev < good_rev:
691 fetch.Stop()
692 maxrev = pivot
693 if up_fetch:
694 up_fetch.Stop() # Kill the download of the newer revision.
695 fetch = None
696 if down_fetch:
697 down_fetch.WaitFor()
698 pivot = down_pivot
699 fetch = down_fetch
700 elif answer == 'r':
701 pass # Retry requires no changes.
702 elif answer == 'u':
703 # Nuke the revision from the revlist and choose a new pivot.
704 fetch.Stop()
705 revlist.pop(pivot)
706 maxrev -= 1 # Assumes maxrev >= pivot.
708 if maxrev - minrev > 1:
709 # Alternate between using down_pivot or up_pivot for the new pivot
710 # point, without affecting the range. Do this instead of setting the
711 # pivot to the midpoint of the new range because adjacent revisions
712 # are likely affected by the same issue that caused the (u)nknown
713 # response.
714 if up_fetch and down_fetch:
715 fetch = [up_fetch, down_fetch][len(revlist) % 2]
716 elif up_fetch:
717 fetch = up_fetch
718 else:
719 fetch = down_fetch
720 fetch.WaitFor()
721 if fetch == up_fetch:
722 pivot = up_pivot - 1 # Subtracts 1 because revlist was resized.
723 else:
724 pivot = down_pivot
725 zipfile = fetch.zipfile
727 if down_fetch and fetch != down_fetch:
728 down_fetch.Stop()
729 if up_fetch and fetch != up_fetch:
730 up_fetch.Stop()
731 else:
732 assert False, "Unexpected return value from evaluate(): " + answer
733 except SystemExit:
734 print "Cleaning up..."
735 for f in [_GetDownloadPath(revlist[down_pivot]),
736 _GetDownloadPath(revlist[up_pivot])]:
737 try:
738 os.unlink(f)
739 except OSError:
740 pass
741 sys.exit(0)
743 rev = revlist[pivot]
745 return (revlist[minrev], revlist[maxrev])
748 def GetBlinkDEPSRevisionForChromiumRevision(rev):
749 """Returns the blink revision that was in REVISIONS file at
750 chromium revision |rev|."""
751 # . doesn't match newlines without re.DOTALL, so this is safe.
752 blink_re = re.compile(r'webkit_revision\D*(\d+)')
753 url = urllib.urlopen(DEPS_FILE % rev)
754 m = blink_re.search(url.read())
755 url.close()
756 if m:
757 return int(m.group(1))
758 else:
759 raise Exception('Could not get Blink revision for Chromium rev %d'
760 % rev)
763 def GetBlinkRevisionForChromiumRevision(self, rev):
764 """Returns the blink revision that was in REVISIONS file at
765 chromium revision |rev|."""
766 def _IsRevisionNumber(revision):
767 if isinstance(revision, int):
768 return True
769 else:
770 return revision.isdigit()
771 if str(rev) in self.githash_svn_dict:
772 rev = self.githash_svn_dict[str(rev)]
773 file_url = "%s/%s%s/REVISIONS" % (self.base_url,
774 self._listing_platform_dir, rev)
775 url = urllib.urlopen(file_url)
776 data = json.loads(url.read())
777 url.close()
778 if 'webkit_revision' in data:
779 blink_rev = data['webkit_revision']
780 if not _IsRevisionNumber(blink_rev):
781 blink_rev = self.GetSVNRevisionFromGitHash(blink_rev, 'blink')
782 return blink_rev
783 else:
784 raise Exception('Could not get blink revision for cr rev %d' % rev)
786 def FixChromiumRevForBlink(revisions_final, revisions, self, rev):
787 """Returns the chromium revision that has the correct blink revision
788 for blink bisect, DEPS and REVISIONS file might not match since
789 blink snapshots point to tip of tree blink.
790 Note: The revisions_final variable might get modified to include
791 additional revisions."""
792 blink_deps_rev = GetBlinkDEPSRevisionForChromiumRevision(rev)
794 while (GetBlinkRevisionForChromiumRevision(self, rev) > blink_deps_rev):
795 idx = revisions.index(rev)
796 if idx > 0:
797 rev = revisions[idx-1]
798 if rev not in revisions_final:
799 revisions_final.insert(0, rev)
801 revisions_final.sort()
802 return rev
804 def GetChromiumRevision(context, url):
805 """Returns the chromium revision read from given URL."""
806 try:
807 # Location of the latest build revision number
808 latest_revision = urllib.urlopen(url).read()
809 if latest_revision.isdigit():
810 return int(latest_revision)
811 return context.GetSVNRevisionFromGitHash(latest_revision)
812 except Exception, e:
813 print('Could not determine latest revision. This could be bad...')
814 return 999999999
817 def main():
818 usage = ('%prog [options] [-- chromium-options]\n'
819 'Perform binary search on the snapshot builds to find a minimal\n'
820 'range of revisions where a behavior change happened. The\n'
821 'behaviors are described as "good" and "bad".\n'
822 'It is NOT assumed that the behavior of the later revision is\n'
823 'the bad one.\n'
824 '\n'
825 'Revision numbers should use\n'
826 ' Official versions (e.g. 1.0.1000.0) for official builds. (-o)\n'
827 ' SVN revisions (e.g. 123456) for chromium builds, from trunk.\n'
828 ' Use base_trunk_revision from http://omahaproxy.appspot.com/\n'
829 ' for earlier revs.\n'
830 ' Chrome\'s about: build number and omahaproxy branch_revision\n'
831 ' are incorrect, they are from branches.\n'
832 '\n'
833 'Tip: add "-- --no-first-run" to bypass the first run prompts.')
834 parser = optparse.OptionParser(usage=usage)
835 # Strangely, the default help output doesn't include the choice list.
836 choices = ['mac', 'win', 'linux', 'linux64', 'linux-arm']
837 # linux-chromiumos lacks a continuous archive http://crbug.com/78158
838 parser.add_option('-a', '--archive',
839 choices = choices,
840 help = 'The buildbot archive to bisect [%s].' %
841 '|'.join(choices))
842 parser.add_option('-o', action="store_true", dest='official_builds',
843 help = 'Bisect across official ' +
844 'Chrome builds (internal only) instead of ' +
845 'Chromium archives.')
846 parser.add_option('-b', '--bad', type = 'str',
847 help = 'A bad revision to start bisection. ' +
848 'May be earlier or later than the good revision. ' +
849 'Default is HEAD.')
850 parser.add_option('-f', '--flash_path', type = 'str',
851 help = 'Absolute path to a recent Adobe Pepper Flash ' +
852 'binary to be used in this bisection (e.g. ' +
853 'on Windows C:\...\pepflashplayer.dll and on Linux ' +
854 '/opt/google/chrome/PepperFlash/libpepflashplayer.so).')
855 parser.add_option('-g', '--good', type = 'str',
856 help = 'A good revision to start bisection. ' +
857 'May be earlier or later than the bad revision. ' +
858 'Default is 0.')
859 parser.add_option('-p', '--profile', '--user-data-dir', type = 'str',
860 help = 'Profile to use; this will not reset every run. ' +
861 'Defaults to a clean profile.', default = 'profile')
862 parser.add_option('-t', '--times', type = 'int',
863 help = 'Number of times to run each build before asking ' +
864 'if it\'s good or bad. Temporary profiles are reused.',
865 default = 1)
866 parser.add_option('-c', '--command', type = 'str',
867 help = 'Command to execute. %p and %a refer to Chrome ' +
868 'executable and specified extra arguments respectively. ' +
869 'Use %s to specify all extra arguments as one string. ' +
870 'Defaults to "%p %a". Note that any extra paths ' +
871 'specified should be absolute.',
872 default = '%p %a')
873 parser.add_option('-l', '--blink', action='store_true',
874 help = 'Use Blink bisect instead of Chromium. ')
875 parser.add_option('', '--not-interactive', action='store_true',
876 help = 'Use command exit code to tell good/bad revision.',
877 default=False)
878 parser.add_option('--aura',
879 dest='aura',
880 action='store_true',
881 default=False,
882 help='Allow the script to bisect aura builds')
883 parser.add_option('--use-local-repo',
884 dest='use_local_repo',
885 action='store_true',
886 default=False,
887 help='Allow the script to convert git SHA1 to SVN ' +
888 'revision using "git svn find-rev <SHA1>" ' +
889 'command from a Chromium checkout.')
891 (opts, args) = parser.parse_args()
893 if opts.archive is None:
894 print 'Error: missing required parameter: --archive'
895 print
896 parser.print_help()
897 return 1
899 if opts.aura:
900 if opts.archive != 'win' or not opts.official_builds:
901 print 'Error: Aura is supported only on Windows platform '\
902 'and official builds.'
903 return 1
905 if opts.blink:
906 base_url = WEBKIT_BASE_URL
907 else:
908 base_url = CHROMIUM_BASE_URL
910 # Create the context. Initialize 0 for the revisions as they are set below.
911 context = PathContext(base_url, opts.archive, 0, 0,
912 opts.official_builds, opts.aura, opts.use_local_repo,
913 None)
914 # Pick a starting point, try to get HEAD for this.
915 if opts.bad:
916 bad_rev = opts.bad
917 else:
918 bad_rev = '999.0.0.0'
919 if not opts.official_builds:
920 bad_rev = GetChromiumRevision(context, context.GetLastChangeURL())
922 # Find out when we were good.
923 if opts.good:
924 good_rev = opts.good
925 else:
926 good_rev = '0.0.0.0' if opts.official_builds else 0
928 if opts.flash_path:
929 flash_path = opts.flash_path
930 msg = 'Could not find Flash binary at %s' % flash_path
931 assert os.path.exists(flash_path), msg
933 if opts.official_builds:
934 good_rev = LooseVersion(good_rev)
935 bad_rev = LooseVersion(bad_rev)
936 else:
937 good_rev = int(good_rev)
938 bad_rev = int(bad_rev)
940 if opts.times < 1:
941 print('Number of times to run (%d) must be greater than or equal to 1.' %
942 opts.times)
943 parser.print_help()
944 return 1
946 (min_chromium_rev, max_chromium_rev) = Bisect(
947 base_url, opts.archive, opts.official_builds, opts.aura,
948 opts.use_local_repo, good_rev, bad_rev, opts.times, opts.command,
949 args, opts.profile, opts.flash_path, not opts.not_interactive)
951 # Get corresponding blink revisions.
952 try:
953 min_blink_rev = GetBlinkRevisionForChromiumRevision(context,
954 min_chromium_rev)
955 max_blink_rev = GetBlinkRevisionForChromiumRevision(context,
956 max_chromium_rev)
957 except Exception, e:
958 # Silently ignore the failure.
959 min_blink_rev, max_blink_rev = 0, 0
961 if opts.blink:
962 # We're done. Let the user know the results in an official manner.
963 if good_rev > bad_rev:
964 print DONE_MESSAGE_GOOD_MAX % (str(min_blink_rev), str(max_blink_rev))
965 else:
966 print DONE_MESSAGE_GOOD_MIN % (str(min_blink_rev), str(max_blink_rev))
968 print 'BLINK CHANGELOG URL:'
969 print ' ' + BLINK_CHANGELOG_URL % (max_blink_rev, min_blink_rev)
971 else:
972 # We're done. Let the user know the results in an official manner.
973 if good_rev > bad_rev:
974 print DONE_MESSAGE_GOOD_MAX % (str(min_chromium_rev),
975 str(max_chromium_rev))
976 else:
977 print DONE_MESSAGE_GOOD_MIN % (str(min_chromium_rev),
978 str(max_chromium_rev))
979 if min_blink_rev != max_blink_rev:
980 print ("NOTE: There is a Blink roll in the range, "
981 "you might also want to do a Blink bisect.")
983 print 'CHANGELOG URL:'
984 if opts.official_builds:
985 print OFFICIAL_CHANGELOG_URL % (min_chromium_rev, max_chromium_rev)
986 else:
987 print ' ' + CHANGELOG_URL % (min_chromium_rev, max_chromium_rev)
989 if __name__ == '__main__':
990 sys.exit(main())