[safe browsing] Remove support for version 7 file stores.
[chromium-blink-merge.git] / tools / bisect-builds.py
blob2ed09678a7420dddfb383fe77e1ac20fb55d8f83
1 #!/usr/bin/env python
2 # Copyright (c) 2012 The Chromium Authors. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
6 """Snapshot Build Bisect Tool
8 This script bisects a snapshot archive using binary search. It starts at
9 a bad revision (it will try to guess HEAD) and asks for a last known-good
10 revision. It will then binary search across this revision range by downloading,
11 unzipping, and opening Chromium for you. After testing the specific revision,
12 it will ask you whether it is good or bad before continuing the search.
13 """
15 # The root URL for storage.
16 CHROMIUM_BASE_URL = 'http://commondatastorage.googleapis.com/chromium-browser-snapshots'
17 WEBKIT_BASE_URL = 'http://commondatastorage.googleapis.com/chromium-webkit-snapshots'
19 # The root URL for official builds.
20 OFFICIAL_BASE_URL = 'http://master.chrome.corp.google.com/official_builds'
22 # Changelogs URL.
23 CHANGELOG_URL = 'http://build.chromium.org/f/chromium/' \
24 'perf/dashboard/ui/changelog.html?' \
25 'url=/trunk/src&range=%d%%3A%d'
27 # Official Changelogs URL.
28 OFFICIAL_CHANGELOG_URL = 'http://omahaproxy.appspot.com/'\
29 'changelog?old_version=%s&new_version=%s'
31 # DEPS file URL.
32 DEPS_FILE = 'http://src.chromium.org/viewvc/chrome/trunk/src/DEPS?revision=%d'
33 # Blink Changelogs URL.
34 BLINK_CHANGELOG_URL = 'http://build.chromium.org/f/chromium/' \
35 'perf/dashboard/ui/changelog_blink.html?' \
36 'url=/trunk&range=%d%%3A%d'
38 DONE_MESSAGE_GOOD_MIN = 'You are probably looking for a change made after %s ' \
39 '(known good), but no later than %s (first known bad).'
40 DONE_MESSAGE_GOOD_MAX = 'You are probably looking for a change made after %s ' \
41 '(known bad), but no later than %s (first known good).'
43 CHROMIUM_GITHASH_TO_SVN_URL = (
44 'https://chromium.googlesource.com/chromium/src/+/%s?format=json')
45 BLINK_GITHASH_TO_SVN_URL = (
46 'https://chromium.googlesource.com/chromium/blink/+/%s?format=json')
47 GITHASH_TO_SVN_URL = { 'chromium': CHROMIUM_GITHASH_TO_SVN_URL,
48 'blink': BLINK_GITHASH_TO_SVN_URL }
49 # Search pattern to be matched in the json output from
50 # CHROMIUM_GITHASH_TO_SVN_URL to get the chromium revision (svn revision).
51 CHROMIUM_SEARCH_PATTERN = (
52 r'.*git-svn-id: svn://svn.chromium.org/chrome/trunk/src@(\d+) ')
53 # Search pattern to be matched in the json output from
54 # BLINK_GITHASH_TO_SVN_URL to get the blink revision (svn revision).
55 BLINK_SEARCH_PATTERN = (
56 r'.*git-svn-id: svn://svn.chromium.org/blink/trunk@(\d+) ')
57 SEARCH_PATTERN = { 'chromium': CHROMIUM_SEARCH_PATTERN,
58 'blink': BLINK_SEARCH_PATTERN }
60 ###############################################################################
62 import json
63 import optparse
64 import os
65 import re
66 import shlex
67 import shutil
68 import subprocess
69 import sys
70 import tempfile
71 import threading
72 import urllib
73 from distutils.version import LooseVersion
74 from xml.etree import ElementTree
75 import zipfile
78 class PathContext(object):
79 """A PathContext is used to carry the information used to construct URLs and
80 paths when dealing with the storage server and archives."""
81 def __init__(self, base_url, platform, good_revision, bad_revision,
82 is_official, is_aura, use_local_repo, flash_path = None,
83 pdf_path = None):
84 super(PathContext, self).__init__()
85 # Store off the input parameters.
86 self.base_url = base_url
87 self.platform = platform # What's passed in to the '-a/--archive' option.
88 self.good_revision = good_revision
89 self.bad_revision = bad_revision
90 self.is_official = is_official
91 self.is_aura = is_aura
92 self.flash_path = flash_path
93 # Dictionary which stores svn revision number as key and it's
94 # corresponding git hash as value. This data is populated in
95 # _FetchAndParse and used later in GetDownloadURL while downloading
96 # the build.
97 self.githash_svn_dict = {}
98 self.pdf_path = pdf_path
100 # The name of the ZIP file in a revision directory on the server.
101 self.archive_name = None
103 # If the script is run from a local Chromium checkout,
104 # "--use-local-repo" option can be used to make the script run faster.
105 # It uses "git svn find-rev <SHA1>" command to convert git hash to svn
106 # revision number.
107 self.use_local_repo = use_local_repo
109 # Set some internal members:
110 # _listing_platform_dir = Directory that holds revisions. Ends with a '/'.
111 # _archive_extract_dir = Uncompressed directory in the archive_name file.
112 # _binary_name = The name of the executable to run.
113 if self.platform in ('linux', 'linux64', 'linux-arm'):
114 self._binary_name = 'chrome'
115 elif self.platform == 'mac':
116 self.archive_name = 'chrome-mac.zip'
117 self._archive_extract_dir = 'chrome-mac'
118 elif self.platform == 'win':
119 self.archive_name = 'chrome-win32.zip'
120 self._archive_extract_dir = 'chrome-win32'
121 self._binary_name = 'chrome.exe'
122 else:
123 raise Exception('Invalid platform: %s' % self.platform)
125 if is_official:
126 if self.platform == 'linux':
127 self._listing_platform_dir = 'precise32bit/'
128 self.archive_name = 'chrome-precise32bit.zip'
129 self._archive_extract_dir = 'chrome-precise32bit'
130 elif self.platform == 'linux64':
131 self._listing_platform_dir = 'precise64bit/'
132 self.archive_name = 'chrome-precise64bit.zip'
133 self._archive_extract_dir = 'chrome-precise64bit'
134 elif self.platform == 'mac':
135 self._listing_platform_dir = 'mac/'
136 self._binary_name = 'Google Chrome.app/Contents/MacOS/Google Chrome'
137 elif self.platform == 'win':
138 if self.is_aura:
139 self._listing_platform_dir = 'win-aura/'
140 else:
141 self._listing_platform_dir = 'win/'
142 else:
143 if self.platform in ('linux', 'linux64', 'linux-arm'):
144 self.archive_name = 'chrome-linux.zip'
145 self._archive_extract_dir = 'chrome-linux'
146 if self.platform == 'linux':
147 self._listing_platform_dir = 'Linux/'
148 elif self.platform == 'linux64':
149 self._listing_platform_dir = 'Linux_x64/'
150 elif self.platform == 'linux-arm':
151 self._listing_platform_dir = 'Linux_ARM_Cross-Compile/'
152 elif self.platform == 'mac':
153 self._listing_platform_dir = 'Mac/'
154 self._binary_name = 'Chromium.app/Contents/MacOS/Chromium'
155 elif self.platform == 'win':
156 self._listing_platform_dir = 'Win/'
158 def GetListingURL(self, marker=None):
159 """Returns the URL for a directory listing, with an optional marker."""
160 marker_param = ''
161 if marker:
162 marker_param = '&marker=' + str(marker)
163 return self.base_url + '/?delimiter=/&prefix=' + \
164 self._listing_platform_dir + marker_param
166 def GetDownloadURL(self, revision):
167 """Gets the download URL for a build archive of a specific revision."""
168 if self.is_official:
169 return "%s/%s/%s%s" % (
170 OFFICIAL_BASE_URL, revision, self._listing_platform_dir,
171 self.archive_name)
172 else:
173 if str(revision) in self.githash_svn_dict:
174 revision = self.githash_svn_dict[str(revision)]
175 return "%s/%s%s/%s" % (self.base_url, self._listing_platform_dir,
176 revision, self.archive_name)
178 def GetLastChangeURL(self):
179 """Returns a URL to the LAST_CHANGE file."""
180 return self.base_url + '/' + self._listing_platform_dir + 'LAST_CHANGE'
182 def GetLaunchPath(self):
183 """Returns a relative path (presumably from the archive extraction location)
184 that is used to run the executable."""
185 return os.path.join(self._archive_extract_dir, self._binary_name)
187 def IsAuraBuild(self, build):
188 """Check the given build is Aura."""
189 return build.split('.')[3] == '1'
191 def IsASANBuild(self, build):
192 """Check the given build is ASAN build."""
193 return build.split('.')[3] == '2'
195 def ParseDirectoryIndex(self):
196 """Parses the Google Storage directory listing into a list of revision
197 numbers."""
199 def _FetchAndParse(url):
200 """Fetches a URL and returns a 2-Tuple of ([revisions], next-marker). If
201 next-marker is not None, then the listing is a partial listing and another
202 fetch should be performed with next-marker being the marker= GET
203 parameter."""
204 handle = urllib.urlopen(url)
205 document = ElementTree.parse(handle)
207 # All nodes in the tree are namespaced. Get the root's tag name to extract
208 # the namespace. Etree does namespaces as |{namespace}tag|.
209 root_tag = document.getroot().tag
210 end_ns_pos = root_tag.find('}')
211 if end_ns_pos == -1:
212 raise Exception("Could not locate end namespace for directory index")
213 namespace = root_tag[:end_ns_pos + 1]
215 # Find the prefix (_listing_platform_dir) and whether or not the list is
216 # truncated.
217 prefix_len = len(document.find(namespace + 'Prefix').text)
218 next_marker = None
219 is_truncated = document.find(namespace + 'IsTruncated')
220 if is_truncated is not None and is_truncated.text.lower() == 'true':
221 next_marker = document.find(namespace + 'NextMarker').text
222 # Get a list of all the revisions.
223 all_prefixes = document.findall(namespace + 'CommonPrefixes/' +
224 namespace + 'Prefix')
225 # The <Prefix> nodes have content of the form of
226 # |_listing_platform_dir/revision/|. Strip off the platform dir and the
227 # trailing slash to just have a number.
228 revisions = []
229 githash_svn_dict = {}
230 for prefix in all_prefixes:
231 revnum = prefix.text[prefix_len:-1]
232 try:
233 if not revnum.isdigit():
234 git_hash = revnum
235 revnum = self.GetSVNRevisionFromGitHash(git_hash)
236 githash_svn_dict[revnum] = git_hash
237 if revnum is not None:
238 revnum = int(revnum)
239 revisions.append(revnum)
240 except ValueError:
241 pass
242 return (revisions, next_marker, githash_svn_dict)
244 # Fetch the first list of revisions.
245 (revisions, next_marker, self.githash_svn_dict) =\
246 _FetchAndParse(self.GetListingURL())
247 # If the result list was truncated, refetch with the next marker. Do this
248 # until an entire directory listing is done.
249 while next_marker:
250 next_url = self.GetListingURL(next_marker)
251 (new_revisions, next_marker, new_dict) = _FetchAndParse(next_url)
252 revisions.extend(new_revisions)
253 self.githash_svn_dict.update(new_dict)
254 return revisions
256 def _GetSVNRevisionFromGitHashWithoutGitCheckout(self, git_sha1, depot):
257 json_url = GITHASH_TO_SVN_URL[depot] % git_sha1
258 try:
259 response = urllib.urlopen(json_url)
260 except urllib.HTTPError as error:
261 msg = 'HTTP Error %d for %s' % (error.getcode(), git_sha1)
262 return None
263 data = json.loads(response.read()[4:])
264 if 'message' in data:
265 message = data['message'].split('\n')
266 message = [line for line in message if line.strip()]
267 search_pattern = re.compile(SEARCH_PATTERN[depot])
268 result = search_pattern.search(message[len(message)-1])
269 if result:
270 return result.group(1)
271 print 'Failed to get svn revision number for %s' % git_sha1
272 raise ValueError
274 def _GetSVNRevisionFromGitHashFromGitCheckout(self, git_sha1, depot):
275 def _RunGit(command, path):
276 command = ['git'] + command
277 if path:
278 original_path = os.getcwd()
279 os.chdir(path)
280 shell = sys.platform.startswith('win')
281 proc = subprocess.Popen(command, shell=shell, stdout=subprocess.PIPE,
282 stderr=subprocess.PIPE)
283 (output, _) = proc.communicate()
285 if path:
286 os.chdir(original_path)
287 return (output, proc.returncode)
289 path = None
290 if depot == 'blink':
291 path = os.path.join(os.getcwd(), 'third_party', 'WebKit')
292 if os.path.basename(os.getcwd()) == 'src':
293 command = ['svn', 'find-rev', git_sha1]
294 (git_output, return_code) = _RunGit(command, path)
295 if not return_code:
296 return git_output.strip('\n')
297 raise ValueError
298 else:
299 print ('Script should be run from src folder. ' +
300 'Eg: python tools/bisect-builds.py -g 280588 -b 280590' +
301 '--archive linux64 --use-local-repo')
302 sys.exit(1)
304 def GetSVNRevisionFromGitHash(self, git_sha1, depot='chromium'):
305 if not self.use_local_repo:
306 return self._GetSVNRevisionFromGitHashWithoutGitCheckout(git_sha1, depot)
307 else:
308 return self._GetSVNRevisionFromGitHashFromGitCheckout(git_sha1, depot)
310 def GetRevList(self):
311 """Gets the list of revision numbers between self.good_revision and
312 self.bad_revision."""
313 # Download the revlist and filter for just the range between good and bad.
314 minrev = min(self.good_revision, self.bad_revision)
315 maxrev = max(self.good_revision, self.bad_revision)
316 revlist_all = map(int, self.ParseDirectoryIndex())
318 revlist = [x for x in revlist_all if x >= int(minrev) and x <= int(maxrev)]
319 revlist.sort()
321 # Set good and bad revisions to be legit revisions.
322 if revlist:
323 if self.good_revision < self.bad_revision:
324 self.good_revision = revlist[0]
325 self.bad_revision = revlist[-1]
326 else:
327 self.bad_revision = revlist[0]
328 self.good_revision = revlist[-1]
330 # Fix chromium rev so that the deps blink revision matches REVISIONS file.
331 if self.base_url == WEBKIT_BASE_URL:
332 revlist_all.sort()
333 self.good_revision = FixChromiumRevForBlink(revlist,
334 revlist_all,
335 self,
336 self.good_revision)
337 self.bad_revision = FixChromiumRevForBlink(revlist,
338 revlist_all,
339 self,
340 self.bad_revision)
341 return revlist
343 def GetOfficialBuildsList(self):
344 """Gets the list of official build numbers between self.good_revision and
345 self.bad_revision."""
346 # Download the revlist and filter for just the range between good and bad.
347 minrev = min(self.good_revision, self.bad_revision)
348 maxrev = max(self.good_revision, self.bad_revision)
349 handle = urllib.urlopen(OFFICIAL_BASE_URL)
350 dirindex = handle.read()
351 handle.close()
352 build_numbers = re.findall(r'<a href="([0-9][0-9].*)/">', dirindex)
353 final_list = []
354 i = 0
355 parsed_build_numbers = [LooseVersion(x) for x in build_numbers]
356 for build_number in sorted(parsed_build_numbers):
357 path = OFFICIAL_BASE_URL + '/' + str(build_number) + '/' + \
358 self._listing_platform_dir + self.archive_name
359 i = i + 1
360 try:
361 connection = urllib.urlopen(path)
362 connection.close()
363 if build_number > maxrev:
364 break
365 if build_number >= minrev:
366 # If we are bisecting Aura, we want to include only builds which
367 # ends with ".1".
368 if self.is_aura:
369 if self.IsAuraBuild(str(build_number)):
370 final_list.append(str(build_number))
371 # If we are bisecting only official builds (without --aura),
372 # we can not include builds which ends with '.1' or '.2' since
373 # they have different folder hierarchy inside.
374 elif (not self.IsAuraBuild(str(build_number)) and
375 not self.IsASANBuild(str(build_number))):
376 final_list.append(str(build_number))
377 except urllib.HTTPError, e:
378 pass
379 return final_list
381 def UnzipFilenameToDir(filename, directory):
382 """Unzip |filename| to |directory|."""
383 cwd = os.getcwd()
384 if not os.path.isabs(filename):
385 filename = os.path.join(cwd, filename)
386 zf = zipfile.ZipFile(filename)
387 # Make base.
388 if not os.path.isdir(directory):
389 os.mkdir(directory)
390 os.chdir(directory)
391 # Extract files.
392 for info in zf.infolist():
393 name = info.filename
394 if name.endswith('/'): # dir
395 if not os.path.isdir(name):
396 os.makedirs(name)
397 else: # file
398 directory = os.path.dirname(name)
399 if not os.path.isdir(directory):
400 os.makedirs(directory)
401 out = open(name, 'wb')
402 out.write(zf.read(name))
403 out.close()
404 # Set permissions. Permission info in external_attr is shifted 16 bits.
405 os.chmod(name, info.external_attr >> 16L)
406 os.chdir(cwd)
409 def FetchRevision(context, rev, filename, quit_event=None, progress_event=None):
410 """Downloads and unzips revision |rev|.
411 @param context A PathContext instance.
412 @param rev The Chromium revision number/tag to download.
413 @param filename The destination for the downloaded file.
414 @param quit_event A threading.Event which will be set by the master thread to
415 indicate that the download should be aborted.
416 @param progress_event A threading.Event which will be set by the master thread
417 to indicate that the progress of the download should be
418 displayed.
420 def ReportHook(blocknum, blocksize, totalsize):
421 if quit_event and quit_event.isSet():
422 raise RuntimeError("Aborting download of revision %s" % str(rev))
423 if progress_event and progress_event.isSet():
424 size = blocknum * blocksize
425 if totalsize == -1: # Total size not known.
426 progress = "Received %d bytes" % size
427 else:
428 size = min(totalsize, size)
429 progress = "Received %d of %d bytes, %.2f%%" % (
430 size, totalsize, 100.0 * size / totalsize)
431 # Send a \r to let all progress messages use just one line of output.
432 sys.stdout.write("\r" + progress)
433 sys.stdout.flush()
435 download_url = context.GetDownloadURL(rev)
436 try:
437 urllib.urlretrieve(download_url, filename, ReportHook)
438 if progress_event and progress_event.isSet():
439 print
440 except RuntimeError, e:
441 pass
444 def RunRevision(context, revision, zipfile, profile, num_runs, command, args):
445 """Given a zipped revision, unzip it and run the test."""
446 print "Trying revision %s..." % str(revision)
448 # Create a temp directory and unzip the revision into it.
449 cwd = os.getcwd()
450 tempdir = tempfile.mkdtemp(prefix='bisect_tmp')
451 UnzipFilenameToDir(zipfile, tempdir)
452 os.chdir(tempdir)
454 # Run the build as many times as specified.
455 testargs = ['--user-data-dir=%s' % profile] + args
456 # The sandbox must be run as root on Official Chrome, so bypass it.
457 if ((context.is_official or context.flash_path or context.pdf_path) and
458 context.platform.startswith('linux')):
459 testargs.append('--no-sandbox')
460 if context.flash_path:
461 testargs.append('--ppapi-flash-path=%s' % context.flash_path)
462 # We have to pass a large enough Flash version, which currently needs not
463 # be correct. Instead of requiring the user of the script to figure out and
464 # pass the correct version we just spoof it.
465 testargs.append('--ppapi-flash-version=99.9.999.999')
467 # TODO(vitalybuka): Remove in the future. See crbug.com/395687.
468 if context.pdf_path:
469 shutil.copy(context.pdf_path, os.path.dirname(context.GetLaunchPath()))
470 testargs.append('--enable-print-preview')
472 runcommand = []
473 for token in shlex.split(command):
474 if token == "%a":
475 runcommand.extend(testargs)
476 else:
477 runcommand.append( \
478 token.replace('%p', os.path.abspath(context.GetLaunchPath())) \
479 .replace('%s', ' '.join(testargs)))
481 results = []
482 for i in range(0, num_runs):
483 subproc = subprocess.Popen(runcommand,
484 bufsize=-1,
485 stdout=subprocess.PIPE,
486 stderr=subprocess.PIPE)
487 (stdout, stderr) = subproc.communicate()
488 results.append((subproc.returncode, stdout, stderr))
490 os.chdir(cwd)
491 try:
492 shutil.rmtree(tempdir, True)
493 except Exception, e:
494 pass
496 for (returncode, stdout, stderr) in results:
497 if returncode:
498 return (returncode, stdout, stderr)
499 return results[0]
502 def AskIsGoodBuild(rev, official_builds, status, stdout, stderr):
503 """Ask the user whether build |rev| is good or bad."""
504 # Loop until we get a response that we can parse.
505 while True:
506 response = raw_input('Revision %s is ' \
507 '[(g)ood/(b)ad/(r)etry/(u)nknown/(q)uit]: ' %
508 str(rev))
509 if response and response in ('g', 'b', 'r', 'u'):
510 return response
511 if response and response == 'q':
512 raise SystemExit()
515 class DownloadJob(object):
516 """DownloadJob represents a task to download a given Chromium revision."""
517 def __init__(self, context, name, rev, zipfile):
518 super(DownloadJob, self).__init__()
519 # Store off the input parameters.
520 self.context = context
521 self.name = name
522 self.rev = rev
523 self.zipfile = zipfile
524 self.quit_event = threading.Event()
525 self.progress_event = threading.Event()
527 def Start(self):
528 """Starts the download."""
529 fetchargs = (self.context,
530 self.rev,
531 self.zipfile,
532 self.quit_event,
533 self.progress_event)
534 self.thread = threading.Thread(target=FetchRevision,
535 name=self.name,
536 args=fetchargs)
537 self.thread.start()
539 def Stop(self):
540 """Stops the download which must have been started previously."""
541 self.quit_event.set()
542 self.thread.join()
543 os.unlink(self.zipfile)
545 def WaitFor(self):
546 """Prints a message and waits for the download to complete. The download
547 must have been started previously."""
548 print "Downloading revision %s..." % str(self.rev)
549 self.progress_event.set() # Display progress of download.
550 self.thread.join()
553 def Bisect(base_url,
554 platform,
555 official_builds,
556 is_aura,
557 use_local_repo,
558 good_rev=0,
559 bad_rev=0,
560 num_runs=1,
561 command="%p %a",
562 try_args=(),
563 profile=None,
564 flash_path=None,
565 pdf_path=None,
566 interactive=True,
567 evaluate=AskIsGoodBuild):
568 """Given known good and known bad revisions, run a binary search on all
569 archived revisions to determine the last known good revision.
571 @param platform Which build to download/run ('mac', 'win', 'linux64', etc.).
572 @param official_builds Specify build type (Chromium or Official build).
573 @param good_rev Number/tag of the known good revision.
574 @param bad_rev Number/tag of the known bad revision.
575 @param num_runs Number of times to run each build for asking good/bad.
576 @param try_args A tuple of arguments to pass to the test application.
577 @param profile The name of the user profile to run with.
578 @param interactive If it is false, use command exit code for good or bad
579 judgment of the argument build.
580 @param evaluate A function which returns 'g' if the argument build is good,
581 'b' if it's bad or 'u' if unknown.
583 Threading is used to fetch Chromium revisions in the background, speeding up
584 the user's experience. For example, suppose the bounds of the search are
585 good_rev=0, bad_rev=100. The first revision to be checked is 50. Depending on
586 whether revision 50 is good or bad, the next revision to check will be either
587 25 or 75. So, while revision 50 is being checked, the script will download
588 revisions 25 and 75 in the background. Once the good/bad verdict on rev 50 is
589 known:
591 - If rev 50 is good, the download of rev 25 is cancelled, and the next test
592 is run on rev 75.
594 - If rev 50 is bad, the download of rev 75 is cancelled, and the next test
595 is run on rev 25.
598 if not profile:
599 profile = 'profile'
601 context = PathContext(base_url, platform, good_rev, bad_rev,
602 official_builds, is_aura, use_local_repo, flash_path,
603 pdf_path)
604 cwd = os.getcwd()
606 print "Downloading list of known revisions..."
607 _GetDownloadPath = lambda rev: os.path.join(cwd,
608 '%s-%s' % (str(rev), context.archive_name))
609 if official_builds:
610 revlist = context.GetOfficialBuildsList()
611 else:
612 revlist = context.GetRevList()
614 # Get a list of revisions to bisect across.
615 if len(revlist) < 2: # Don't have enough builds to bisect.
616 msg = 'We don\'t have enough builds to bisect. revlist: %s' % revlist
617 raise RuntimeError(msg)
619 # Figure out our bookends and first pivot point; fetch the pivot revision.
620 minrev = 0
621 maxrev = len(revlist) - 1
622 pivot = maxrev / 2
623 rev = revlist[pivot]
624 zipfile = _GetDownloadPath(rev)
625 fetch = DownloadJob(context, 'initial_fetch', rev, zipfile)
626 fetch.Start()
627 fetch.WaitFor()
629 # Binary search time!
630 while fetch and fetch.zipfile and maxrev - minrev > 1:
631 if bad_rev < good_rev:
632 min_str, max_str = "bad", "good"
633 else:
634 min_str, max_str = "good", "bad"
635 print 'Bisecting range [%s (%s), %s (%s)].' % (revlist[minrev], min_str, \
636 revlist[maxrev], max_str)
638 # Pre-fetch next two possible pivots
639 # - down_pivot is the next revision to check if the current revision turns
640 # out to be bad.
641 # - up_pivot is the next revision to check if the current revision turns
642 # out to be good.
643 down_pivot = int((pivot - minrev) / 2) + minrev
644 down_fetch = None
645 if down_pivot != pivot and down_pivot != minrev:
646 down_rev = revlist[down_pivot]
647 down_fetch = DownloadJob(context, 'down_fetch', down_rev,
648 _GetDownloadPath(down_rev))
649 down_fetch.Start()
651 up_pivot = int((maxrev - pivot) / 2) + pivot
652 up_fetch = None
653 if up_pivot != pivot and up_pivot != maxrev:
654 up_rev = revlist[up_pivot]
655 up_fetch = DownloadJob(context, 'up_fetch', up_rev,
656 _GetDownloadPath(up_rev))
657 up_fetch.Start()
659 # Run test on the pivot revision.
660 status = None
661 stdout = None
662 stderr = None
663 try:
664 (status, stdout, stderr) = RunRevision(context,
665 rev,
666 fetch.zipfile,
667 profile,
668 num_runs,
669 command,
670 try_args)
671 except Exception, e:
672 print >> sys.stderr, e
674 # Call the evaluate function to see if the current revision is good or bad.
675 # On that basis, kill one of the background downloads and complete the
676 # other, as described in the comments above.
677 try:
678 if not interactive:
679 if status:
680 answer = 'b'
681 print 'Bad revision: %s' % rev
682 else:
683 answer = 'g'
684 print 'Good revision: %s' % rev
685 else:
686 answer = evaluate(rev, official_builds, status, stdout, stderr)
687 if answer == 'g' and good_rev < bad_rev or \
688 answer == 'b' and bad_rev < good_rev:
689 fetch.Stop()
690 minrev = pivot
691 if down_fetch:
692 down_fetch.Stop() # Kill the download of the older revision.
693 fetch = None
694 if up_fetch:
695 up_fetch.WaitFor()
696 pivot = up_pivot
697 fetch = up_fetch
698 elif answer == 'b' and good_rev < bad_rev or \
699 answer == 'g' and bad_rev < good_rev:
700 fetch.Stop()
701 maxrev = pivot
702 if up_fetch:
703 up_fetch.Stop() # Kill the download of the newer revision.
704 fetch = None
705 if down_fetch:
706 down_fetch.WaitFor()
707 pivot = down_pivot
708 fetch = down_fetch
709 elif answer == 'r':
710 pass # Retry requires no changes.
711 elif answer == 'u':
712 # Nuke the revision from the revlist and choose a new pivot.
713 fetch.Stop()
714 revlist.pop(pivot)
715 maxrev -= 1 # Assumes maxrev >= pivot.
717 if maxrev - minrev > 1:
718 # Alternate between using down_pivot or up_pivot for the new pivot
719 # point, without affecting the range. Do this instead of setting the
720 # pivot to the midpoint of the new range because adjacent revisions
721 # are likely affected by the same issue that caused the (u)nknown
722 # response.
723 if up_fetch and down_fetch:
724 fetch = [up_fetch, down_fetch][len(revlist) % 2]
725 elif up_fetch:
726 fetch = up_fetch
727 else:
728 fetch = down_fetch
729 fetch.WaitFor()
730 if fetch == up_fetch:
731 pivot = up_pivot - 1 # Subtracts 1 because revlist was resized.
732 else:
733 pivot = down_pivot
734 zipfile = fetch.zipfile
736 if down_fetch and fetch != down_fetch:
737 down_fetch.Stop()
738 if up_fetch and fetch != up_fetch:
739 up_fetch.Stop()
740 else:
741 assert False, "Unexpected return value from evaluate(): " + answer
742 except SystemExit:
743 print "Cleaning up..."
744 for f in [_GetDownloadPath(revlist[down_pivot]),
745 _GetDownloadPath(revlist[up_pivot])]:
746 try:
747 os.unlink(f)
748 except OSError:
749 pass
750 sys.exit(0)
752 rev = revlist[pivot]
754 return (revlist[minrev], revlist[maxrev])
757 def GetBlinkDEPSRevisionForChromiumRevision(rev):
758 """Returns the blink revision that was in REVISIONS file at
759 chromium revision |rev|."""
760 # . doesn't match newlines without re.DOTALL, so this is safe.
761 blink_re = re.compile(r'webkit_revision\D*(\d+)')
762 url = urllib.urlopen(DEPS_FILE % rev)
763 m = blink_re.search(url.read())
764 url.close()
765 if m:
766 return int(m.group(1))
767 else:
768 raise Exception('Could not get Blink revision for Chromium rev %d'
769 % rev)
772 def GetBlinkRevisionForChromiumRevision(self, rev):
773 """Returns the blink revision that was in REVISIONS file at
774 chromium revision |rev|."""
775 def _IsRevisionNumber(revision):
776 if isinstance(revision, int):
777 return True
778 else:
779 return revision.isdigit()
780 if str(rev) in self.githash_svn_dict:
781 rev = self.githash_svn_dict[str(rev)]
782 file_url = "%s/%s%s/REVISIONS" % (self.base_url,
783 self._listing_platform_dir, rev)
784 url = urllib.urlopen(file_url)
785 data = json.loads(url.read())
786 url.close()
787 if 'webkit_revision' in data:
788 blink_rev = data['webkit_revision']
789 if not _IsRevisionNumber(blink_rev):
790 blink_rev = self.GetSVNRevisionFromGitHash(blink_rev, 'blink')
791 return blink_rev
792 else:
793 raise Exception('Could not get blink revision for cr rev %d' % rev)
795 def FixChromiumRevForBlink(revisions_final, revisions, self, rev):
796 """Returns the chromium revision that has the correct blink revision
797 for blink bisect, DEPS and REVISIONS file might not match since
798 blink snapshots point to tip of tree blink.
799 Note: The revisions_final variable might get modified to include
800 additional revisions."""
801 blink_deps_rev = GetBlinkDEPSRevisionForChromiumRevision(rev)
803 while (GetBlinkRevisionForChromiumRevision(self, rev) > blink_deps_rev):
804 idx = revisions.index(rev)
805 if idx > 0:
806 rev = revisions[idx-1]
807 if rev not in revisions_final:
808 revisions_final.insert(0, rev)
810 revisions_final.sort()
811 return rev
813 def GetChromiumRevision(context, url):
814 """Returns the chromium revision read from given URL."""
815 try:
816 # Location of the latest build revision number
817 latest_revision = urllib.urlopen(url).read()
818 if latest_revision.isdigit():
819 return int(latest_revision)
820 return context.GetSVNRevisionFromGitHash(latest_revision)
821 except Exception, e:
822 print('Could not determine latest revision. This could be bad...')
823 return 999999999
826 def main():
827 usage = ('%prog [options] [-- chromium-options]\n'
828 'Perform binary search on the snapshot builds to find a minimal\n'
829 'range of revisions where a behavior change happened. The\n'
830 'behaviors are described as "good" and "bad".\n'
831 'It is NOT assumed that the behavior of the later revision is\n'
832 'the bad one.\n'
833 '\n'
834 'Revision numbers should use\n'
835 ' Official versions (e.g. 1.0.1000.0) for official builds. (-o)\n'
836 ' SVN revisions (e.g. 123456) for chromium builds, from trunk.\n'
837 ' Use base_trunk_revision from http://omahaproxy.appspot.com/\n'
838 ' for earlier revs.\n'
839 ' Chrome\'s about: build number and omahaproxy branch_revision\n'
840 ' are incorrect, they are from branches.\n'
841 '\n'
842 'Tip: add "-- --no-first-run" to bypass the first run prompts.')
843 parser = optparse.OptionParser(usage=usage)
844 # Strangely, the default help output doesn't include the choice list.
845 choices = ['mac', 'win', 'linux', 'linux64', 'linux-arm']
846 # linux-chromiumos lacks a continuous archive http://crbug.com/78158
847 parser.add_option('-a', '--archive',
848 choices = choices,
849 help = 'The buildbot archive to bisect [%s].' %
850 '|'.join(choices))
851 parser.add_option('-o', action="store_true", dest='official_builds',
852 help = 'Bisect across official ' +
853 'Chrome builds (internal only) instead of ' +
854 'Chromium archives.')
855 parser.add_option('-b', '--bad', type = 'str',
856 help = 'A bad revision to start bisection. ' +
857 'May be earlier or later than the good revision. ' +
858 'Default is HEAD.')
859 parser.add_option('-f', '--flash_path', type = 'str',
860 help = 'Absolute path to a recent Adobe Pepper Flash ' +
861 'binary to be used in this bisection (e.g. ' +
862 'on Windows C:\...\pepflashplayer.dll and on Linux ' +
863 '/opt/google/chrome/PepperFlash/libpepflashplayer.so).')
864 parser.add_option('-d', '--pdf_path', type = 'str',
865 help = 'Absolute path to a recent PDF pluggin ' +
866 'binary to be used in this bisection (e.g. ' +
867 'on Windows C:\...\pdf.dll and on Linux ' +
868 '/opt/google/chrome/libpdf.so). Option also enables ' +
869 'print preview.')
870 parser.add_option('-g', '--good', type = 'str',
871 help = 'A good revision to start bisection. ' +
872 'May be earlier or later than the bad revision. ' +
873 'Default is 0.')
874 parser.add_option('-p', '--profile', '--user-data-dir', type = 'str',
875 help = 'Profile to use; this will not reset every run. ' +
876 'Defaults to a clean profile.', default = 'profile')
877 parser.add_option('-t', '--times', type = 'int',
878 help = 'Number of times to run each build before asking ' +
879 'if it\'s good or bad. Temporary profiles are reused.',
880 default = 1)
881 parser.add_option('-c', '--command', type = 'str',
882 help = 'Command to execute. %p and %a refer to Chrome ' +
883 'executable and specified extra arguments respectively. ' +
884 'Use %s to specify all extra arguments as one string. ' +
885 'Defaults to "%p %a". Note that any extra paths ' +
886 'specified should be absolute.',
887 default = '%p %a')
888 parser.add_option('-l', '--blink', action='store_true',
889 help = 'Use Blink bisect instead of Chromium. ')
890 parser.add_option('', '--not-interactive', action='store_true',
891 help = 'Use command exit code to tell good/bad revision.',
892 default=False)
893 parser.add_option('--aura',
894 dest='aura',
895 action='store_true',
896 default=False,
897 help='Allow the script to bisect aura builds')
898 parser.add_option('--use-local-repo',
899 dest='use_local_repo',
900 action='store_true',
901 default=False,
902 help='Allow the script to convert git SHA1 to SVN ' +
903 'revision using "git svn find-rev <SHA1>" ' +
904 'command from a Chromium checkout.')
906 (opts, args) = parser.parse_args()
908 if opts.archive is None:
909 print 'Error: missing required parameter: --archive'
910 print
911 parser.print_help()
912 return 1
914 if opts.aura:
915 if opts.archive != 'win' or not opts.official_builds:
916 print 'Error: Aura is supported only on Windows platform '\
917 'and official builds.'
918 return 1
920 if opts.blink:
921 base_url = WEBKIT_BASE_URL
922 else:
923 base_url = CHROMIUM_BASE_URL
925 # Create the context. Initialize 0 for the revisions as they are set below.
926 context = PathContext(base_url, opts.archive, 0, 0,
927 opts.official_builds, opts.aura, opts.use_local_repo,
928 None)
929 # Pick a starting point, try to get HEAD for this.
930 if opts.bad:
931 bad_rev = opts.bad
932 else:
933 bad_rev = '999.0.0.0'
934 if not opts.official_builds:
935 bad_rev = GetChromiumRevision(context, context.GetLastChangeURL())
937 # Find out when we were good.
938 if opts.good:
939 good_rev = opts.good
940 else:
941 good_rev = '0.0.0.0' if opts.official_builds else 0
943 if opts.flash_path:
944 flash_path = opts.flash_path
945 msg = 'Could not find Flash binary at %s' % flash_path
946 assert os.path.exists(flash_path), msg
948 if opts.pdf_path:
949 pdf_path = opts.pdf_path
950 msg = 'Could not find PDF binary at %s' % pdf_path
951 assert os.path.exists(pdf_path), msg
953 if opts.official_builds:
954 good_rev = LooseVersion(good_rev)
955 bad_rev = LooseVersion(bad_rev)
956 else:
957 good_rev = int(good_rev)
958 bad_rev = int(bad_rev)
960 if opts.times < 1:
961 print('Number of times to run (%d) must be greater than or equal to 1.' %
962 opts.times)
963 parser.print_help()
964 return 1
966 (min_chromium_rev, max_chromium_rev) = Bisect(
967 base_url, opts.archive, opts.official_builds, opts.aura,
968 opts.use_local_repo, good_rev, bad_rev, opts.times, opts.command,
969 args, opts.profile, opts.flash_path, opts.pdf_path,
970 not opts.not_interactive)
972 # Get corresponding blink revisions.
973 try:
974 min_blink_rev = GetBlinkRevisionForChromiumRevision(context,
975 min_chromium_rev)
976 max_blink_rev = GetBlinkRevisionForChromiumRevision(context,
977 max_chromium_rev)
978 except Exception, e:
979 # Silently ignore the failure.
980 min_blink_rev, max_blink_rev = 0, 0
982 if opts.blink:
983 # We're done. Let the user know the results in an official manner.
984 if good_rev > bad_rev:
985 print DONE_MESSAGE_GOOD_MAX % (str(min_blink_rev), str(max_blink_rev))
986 else:
987 print DONE_MESSAGE_GOOD_MIN % (str(min_blink_rev), str(max_blink_rev))
989 print 'BLINK CHANGELOG URL:'
990 print ' ' + BLINK_CHANGELOG_URL % (max_blink_rev, min_blink_rev)
992 else:
993 # We're done. Let the user know the results in an official manner.
994 if good_rev > bad_rev:
995 print DONE_MESSAGE_GOOD_MAX % (str(min_chromium_rev),
996 str(max_chromium_rev))
997 else:
998 print DONE_MESSAGE_GOOD_MIN % (str(min_chromium_rev),
999 str(max_chromium_rev))
1000 if min_blink_rev != max_blink_rev:
1001 print ("NOTE: There is a Blink roll in the range, "
1002 "you might also want to do a Blink bisect.")
1004 print 'CHANGELOG URL:'
1005 if opts.official_builds:
1006 print OFFICIAL_CHANGELOG_URL % (min_chromium_rev, max_chromium_rev)
1007 else:
1008 print ' ' + CHANGELOG_URL % (min_chromium_rev, max_chromium_rev)
1010 if __name__ == '__main__':
1011 sys.exit(main())