ozone: Disable menu tests
[chromium-blink-merge.git] / tools / bisect-builds.py
blobfb4bac250d544b5ee793f14c4e85d3a97c2dbe9d
1 #!/usr/bin/env python
2 # Copyright (c) 2012 The Chromium Authors. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
6 """Snapshot Build Bisect Tool
8 This script bisects a snapshot archive using binary search. It starts at
9 a bad revision (it will try to guess HEAD) and asks for a last known-good
10 revision. It will then binary search across this revision range by downloading,
11 unzipping, and opening Chromium for you. After testing the specific revision,
12 it will ask you whether it is good or bad before continuing the search.
13 """
15 # The base URL for stored build archives.
16 CHROMIUM_BASE_URL = ('http://commondatastorage.googleapis.com'
17 '/chromium-browser-snapshots')
18 WEBKIT_BASE_URL = ('http://commondatastorage.googleapis.com'
19 '/chromium-webkit-snapshots')
20 ASAN_BASE_URL = ('http://commondatastorage.googleapis.com'
21 '/chromium-browser-asan')
23 # The base URL for official builds.
24 OFFICIAL_BASE_URL = 'http://master.chrome.corp.google.com/official_builds'
26 # URL template for viewing changelogs between revisions.
27 CHANGELOG_URL = ('http://build.chromium.org'
28 '/f/chromium/perf/dashboard/ui/changelog.html'
29 '?url=/trunk/src&range=%d%%3A%d')
31 # URL template for viewing changelogs between official versions.
32 OFFICIAL_CHANGELOG_URL = ('http://omahaproxy.appspot.com/changelog'
33 '?old_version=%s&new_version=%s')
35 # DEPS file URL.
36 DEPS_FILE = 'http://src.chromium.org/viewvc/chrome/trunk/src/DEPS?revision=%d'
38 # Blink changelogs URL.
39 BLINK_CHANGELOG_URL = ('http://build.chromium.org'
40 '/f/chromium/perf/dashboard/ui/changelog_blink.html'
41 '?url=/trunk&range=%d%%3A%d')
43 DONE_MESSAGE_GOOD_MIN = ('You are probably looking for a change made after %s ('
44 'known good), but no later than %s (first known bad).')
45 DONE_MESSAGE_GOOD_MAX = ('You are probably looking for a change made after %s ('
46 'known bad), but no later than %s (first known good).')
48 CHROMIUM_GITHASH_TO_SVN_URL = (
49 'https://chromium.googlesource.com/chromium/src/+/%s?format=json')
51 BLINK_GITHASH_TO_SVN_URL = (
52 'https://chromium.googlesource.com/chromium/blink/+/%s?format=json')
54 GITHASH_TO_SVN_URL = {
55 'chromium': CHROMIUM_GITHASH_TO_SVN_URL,
56 'blink': BLINK_GITHASH_TO_SVN_URL,
59 # Search pattern to be matched in the JSON output from
60 # CHROMIUM_GITHASH_TO_SVN_URL to get the chromium revision (svn revision).
61 CHROMIUM_SEARCH_PATTERN = (
62 r'.*git-svn-id: svn://svn.chromium.org/chrome/trunk/src@(\d+) ')
64 # Search pattern to be matched in the json output from
65 # BLINK_GITHASH_TO_SVN_URL to get the blink revision (svn revision).
66 BLINK_SEARCH_PATTERN = (
67 r'.*git-svn-id: svn://svn.chromium.org/blink/trunk@(\d+) ')
69 SEARCH_PATTERN = {
70 'chromium': CHROMIUM_SEARCH_PATTERN,
71 'blink': BLINK_SEARCH_PATTERN,
74 ###############################################################################
76 import json
77 import optparse
78 import os
79 import re
80 import shlex
81 import shutil
82 import subprocess
83 import sys
84 import tempfile
85 import threading
86 import urllib
87 from distutils.version import LooseVersion
88 from xml.etree import ElementTree
89 import zipfile
92 class PathContext(object):
93 """A PathContext is used to carry the information used to construct URLs and
94 paths when dealing with the storage server and archives."""
95 def __init__(self, base_url, platform, good_revision, bad_revision,
96 is_official, is_aura, is_asan, use_local_repo, flash_path = None,
97 pdf_path = None):
98 super(PathContext, self).__init__()
99 # Store off the input parameters.
100 self.base_url = base_url
101 self.platform = platform # What's passed in to the '-a/--archive' option.
102 self.good_revision = good_revision
103 self.bad_revision = bad_revision
104 self.is_official = is_official
105 self.is_aura = is_aura
106 self.is_asan = is_asan
107 self.build_type = 'release'
108 self.flash_path = flash_path
109 # Dictionary which stores svn revision number as key and it's
110 # corresponding git hash as value. This data is populated in
111 # _FetchAndParse and used later in GetDownloadURL while downloading
112 # the build.
113 self.githash_svn_dict = {}
114 self.pdf_path = pdf_path
116 # The name of the ZIP file in a revision directory on the server.
117 self.archive_name = None
119 # If the script is run from a local Chromium checkout,
120 # "--use-local-repo" option can be used to make the script run faster.
121 # It uses "git svn find-rev <SHA1>" command to convert git hash to svn
122 # revision number.
123 self.use_local_repo = use_local_repo
125 # Set some internal members:
126 # _listing_platform_dir = Directory that holds revisions. Ends with a '/'.
127 # _archive_extract_dir = Uncompressed directory in the archive_name file.
128 # _binary_name = The name of the executable to run.
129 if self.platform in ('linux', 'linux64', 'linux-arm'):
130 self._binary_name = 'chrome'
131 elif self.platform == 'mac':
132 self.archive_name = 'chrome-mac.zip'
133 self._archive_extract_dir = 'chrome-mac'
134 elif self.platform == 'win':
135 self.archive_name = 'chrome-win32.zip'
136 self._archive_extract_dir = 'chrome-win32'
137 self._binary_name = 'chrome.exe'
138 else:
139 raise Exception('Invalid platform: %s' % self.platform)
141 if is_official:
142 if self.platform == 'linux':
143 self._listing_platform_dir = 'precise32bit/'
144 self.archive_name = 'chrome-precise32bit.zip'
145 self._archive_extract_dir = 'chrome-precise32bit'
146 elif self.platform == 'linux64':
147 self._listing_platform_dir = 'precise64bit/'
148 self.archive_name = 'chrome-precise64bit.zip'
149 self._archive_extract_dir = 'chrome-precise64bit'
150 elif self.platform == 'mac':
151 self._listing_platform_dir = 'mac/'
152 self._binary_name = 'Google Chrome.app/Contents/MacOS/Google Chrome'
153 elif self.platform == 'win':
154 if self.is_aura:
155 self._listing_platform_dir = 'win-aura/'
156 else:
157 self._listing_platform_dir = 'win/'
158 else:
159 if self.platform in ('linux', 'linux64', 'linux-arm'):
160 self.archive_name = 'chrome-linux.zip'
161 self._archive_extract_dir = 'chrome-linux'
162 if self.platform == 'linux':
163 self._listing_platform_dir = 'Linux/'
164 elif self.platform == 'linux64':
165 self._listing_platform_dir = 'Linux_x64/'
166 elif self.platform == 'linux-arm':
167 self._listing_platform_dir = 'Linux_ARM_Cross-Compile/'
168 elif self.platform == 'mac':
169 self._listing_platform_dir = 'Mac/'
170 self._binary_name = 'Chromium.app/Contents/MacOS/Chromium'
171 elif self.platform == 'win':
172 self._listing_platform_dir = 'Win/'
174 def GetASANPlatformDir(self):
175 """ASAN builds are in directories like "linux-release", or have filenames
176 like "asan-win32-release-277079.zip". This aligns to our platform names
177 except in the case of Windows where they use "win32" instead of "win"."""
178 if self.platform == 'win':
179 return 'win32'
180 else:
181 return self.platform
183 def GetListingURL(self, marker=None):
184 """Returns the URL for a directory listing, with an optional marker."""
185 marker_param = ''
186 if marker:
187 marker_param = '&marker=' + str(marker)
188 if self.is_asan:
189 prefix = '%s-%s' % (self.GetASANPlatformDir(), self.build_type)
190 return self.base_url + '/?delimiter=&prefix=' + prefix + marker_param
191 else:
192 return (self.base_url + '/?delimiter=/&prefix=' +
193 self._listing_platform_dir + marker_param)
195 def GetDownloadURL(self, revision):
196 """Gets the download URL for a build archive of a specific revision."""
197 if self.is_asan:
198 return '%s/%s-%s/%s-%d.zip' % (
199 ASAN_BASE_URL, self.GetASANPlatformDir(), self.build_type,
200 self.GetASANBaseName(), revision)
201 if self.is_official:
202 return '%s/%s/%s%s' % (
203 OFFICIAL_BASE_URL, revision, self._listing_platform_dir,
204 self.archive_name)
205 else:
206 if str(revision) in self.githash_svn_dict:
207 revision = self.githash_svn_dict[str(revision)]
208 return '%s/%s%s/%s' % (self.base_url, self._listing_platform_dir,
209 revision, self.archive_name)
211 def GetLastChangeURL(self):
212 """Returns a URL to the LAST_CHANGE file."""
213 return self.base_url + '/' + self._listing_platform_dir + 'LAST_CHANGE'
215 def GetASANBaseName(self):
216 """Returns the base name of the ASAN zip file."""
217 if 'linux' in self.platform:
218 return 'asan-symbolized-%s-%s' % (self.GetASANPlatformDir(),
219 self.build_type)
220 else:
221 return 'asan-%s-%s' % (self.GetASANPlatformDir(), self.build_type)
223 def GetLaunchPath(self, revision):
224 """Returns a relative path (presumably from the archive extraction location)
225 that is used to run the executable."""
226 if self.is_asan:
227 extract_dir = '%s-%d' % (self.GetASANBaseName(), revision)
228 else:
229 extract_dir = self._archive_extract_dir
230 return os.path.join(extract_dir, self._binary_name)
232 @staticmethod
233 def IsAuraBuild(build):
234 """Checks whether the given build is an Aura build."""
235 return build.split('.')[3] == '1'
237 @staticmethod
238 def IsOfficialASANBuild(build):
239 """Checks whether the given build is an ASAN build."""
240 return build.split('.')[3] == '2'
242 def ParseDirectoryIndex(self):
243 """Parses the Google Storage directory listing into a list of revision
244 numbers."""
246 def _FetchAndParse(url):
247 """Fetches a URL and returns a 2-Tuple of ([revisions], next-marker). If
248 next-marker is not None, then the listing is a partial listing and another
249 fetch should be performed with next-marker being the marker= GET
250 parameter."""
251 handle = urllib.urlopen(url)
252 document = ElementTree.parse(handle)
254 # All nodes in the tree are namespaced. Get the root's tag name to extract
255 # the namespace. Etree does namespaces as |{namespace}tag|.
256 root_tag = document.getroot().tag
257 end_ns_pos = root_tag.find('}')
258 if end_ns_pos == -1:
259 raise Exception('Could not locate end namespace for directory index')
260 namespace = root_tag[:end_ns_pos + 1]
262 # Find the prefix (_listing_platform_dir) and whether or not the list is
263 # truncated.
264 prefix_len = len(document.find(namespace + 'Prefix').text)
265 next_marker = None
266 is_truncated = document.find(namespace + 'IsTruncated')
267 if is_truncated is not None and is_truncated.text.lower() == 'true':
268 next_marker = document.find(namespace + 'NextMarker').text
269 # Get a list of all the revisions.
270 revisions = []
271 githash_svn_dict = {}
272 if self.is_asan:
273 asan_regex = re.compile(r'.*%s-(\d+)\.zip$' % (self.GetASANBaseName()))
274 # Non ASAN builds are in a <revision> directory. The ASAN builds are
275 # flat
276 all_prefixes = document.findall(namespace + 'Contents/' +
277 namespace + 'Key')
278 for prefix in all_prefixes:
279 m = asan_regex.match(prefix.text)
280 if m:
281 try:
282 revisions.append(int(m.group(1)))
283 except ValueError:
284 pass
285 else:
286 all_prefixes = document.findall(namespace + 'CommonPrefixes/' +
287 namespace + 'Prefix')
288 # The <Prefix> nodes have content of the form of
289 # |_listing_platform_dir/revision/|. Strip off the platform dir and the
290 # trailing slash to just have a number.
291 for prefix in all_prefixes:
292 revnum = prefix.text[prefix_len:-1]
293 try:
294 if not revnum.isdigit():
295 git_hash = revnum
296 revnum = self.GetSVNRevisionFromGitHash(git_hash)
297 githash_svn_dict[revnum] = git_hash
298 if revnum is not None:
299 revnum = int(revnum)
300 revisions.append(revnum)
301 except ValueError:
302 pass
303 return (revisions, next_marker, githash_svn_dict)
305 # Fetch the first list of revisions.
306 (revisions, next_marker, self.githash_svn_dict) = _FetchAndParse(
307 self.GetListingURL())
308 # If the result list was truncated, refetch with the next marker. Do this
309 # until an entire directory listing is done.
310 while next_marker:
311 next_url = self.GetListingURL(next_marker)
312 (new_revisions, next_marker, new_dict) = _FetchAndParse(next_url)
313 revisions.extend(new_revisions)
314 self.githash_svn_dict.update(new_dict)
315 return revisions
317 def _GetSVNRevisionFromGitHashWithoutGitCheckout(self, git_sha1, depot):
318 json_url = GITHASH_TO_SVN_URL[depot] % git_sha1
319 try:
320 response = urllib.urlopen(json_url)
321 except urllib.HTTPError as error:
322 msg = 'HTTP Error %d for %s' % (error.getcode(), git_sha1)
323 return None
324 data = json.loads(response.read()[4:])
325 if 'message' in data:
326 message = data['message'].split('\n')
327 message = [line for line in message if line.strip()]
328 search_pattern = re.compile(SEARCH_PATTERN[depot])
329 result = search_pattern.search(message[len(message)-1])
330 if result:
331 return result.group(1)
332 print 'Failed to get svn revision number for %s' % git_sha1
333 raise ValueError
335 def _GetSVNRevisionFromGitHashFromGitCheckout(self, git_sha1, depot):
336 def _RunGit(command, path):
337 command = ['git'] + command
338 if path:
339 original_path = os.getcwd()
340 os.chdir(path)
341 shell = sys.platform.startswith('win')
342 proc = subprocess.Popen(command, shell=shell, stdout=subprocess.PIPE,
343 stderr=subprocess.PIPE)
344 (output, _) = proc.communicate()
346 if path:
347 os.chdir(original_path)
348 return (output, proc.returncode)
350 path = None
351 if depot == 'blink':
352 path = os.path.join(os.getcwd(), 'third_party', 'WebKit')
353 if os.path.basename(os.getcwd()) == 'src':
354 command = ['svn', 'find-rev', git_sha1]
355 (git_output, return_code) = _RunGit(command, path)
356 if not return_code:
357 return git_output.strip('\n')
358 raise ValueError
359 else:
360 print ('Script should be run from src folder. ' +
361 'Eg: python tools/bisect-builds.py -g 280588 -b 280590' +
362 '--archive linux64 --use-local-repo')
363 sys.exit(1)
365 def GetSVNRevisionFromGitHash(self, git_sha1, depot='chromium'):
366 if not self.use_local_repo:
367 return self._GetSVNRevisionFromGitHashWithoutGitCheckout(git_sha1, depot)
368 else:
369 return self._GetSVNRevisionFromGitHashFromGitCheckout(git_sha1, depot)
371 def GetRevList(self):
372 """Gets the list of revision numbers between self.good_revision and
373 self.bad_revision."""
374 # Download the revlist and filter for just the range between good and bad.
375 minrev = min(self.good_revision, self.bad_revision)
376 maxrev = max(self.good_revision, self.bad_revision)
377 revlist_all = map(int, self.ParseDirectoryIndex())
379 revlist = [x for x in revlist_all if x >= int(minrev) and x <= int(maxrev)]
380 revlist.sort()
382 # Set good and bad revisions to be legit revisions.
383 if revlist:
384 if self.good_revision < self.bad_revision:
385 self.good_revision = revlist[0]
386 self.bad_revision = revlist[-1]
387 else:
388 self.bad_revision = revlist[0]
389 self.good_revision = revlist[-1]
391 # Fix chromium rev so that the deps blink revision matches REVISIONS file.
392 if self.base_url == WEBKIT_BASE_URL:
393 revlist_all.sort()
394 self.good_revision = FixChromiumRevForBlink(revlist,
395 revlist_all,
396 self,
397 self.good_revision)
398 self.bad_revision = FixChromiumRevForBlink(revlist,
399 revlist_all,
400 self,
401 self.bad_revision)
402 return revlist
404 def GetOfficialBuildsList(self):
405 """Gets the list of official build numbers between self.good_revision and
406 self.bad_revision."""
407 # Download the revlist and filter for just the range between good and bad.
408 minrev = min(self.good_revision, self.bad_revision)
409 maxrev = max(self.good_revision, self.bad_revision)
410 handle = urllib.urlopen(OFFICIAL_BASE_URL)
411 dirindex = handle.read()
412 handle.close()
413 build_numbers = re.findall(r'<a href="([0-9][0-9].*)/">', dirindex)
414 final_list = []
415 i = 0
416 parsed_build_numbers = [LooseVersion(x) for x in build_numbers]
417 for build_number in sorted(parsed_build_numbers):
418 path = (OFFICIAL_BASE_URL + '/' + str(build_number) + '/' +
419 self._listing_platform_dir + self.archive_name)
420 i = i + 1
421 try:
422 connection = urllib.urlopen(path)
423 connection.close()
424 if build_number > maxrev:
425 break
426 if build_number >= minrev:
427 # If we are bisecting Aura, we want to include only builds which
428 # ends with ".1".
429 if self.is_aura:
430 if self.IsAuraBuild(str(build_number)):
431 final_list.append(str(build_number))
432 # If we are bisecting only official builds (without --aura),
433 # we can not include builds which ends with '.1' or '.2' since
434 # they have different folder hierarchy inside.
435 elif (not self.IsAuraBuild(str(build_number)) and
436 not self.IsOfficialASANBuild(str(build_number))):
437 final_list.append(str(build_number))
438 except urllib.HTTPError:
439 pass
440 return final_list
442 def UnzipFilenameToDir(filename, directory):
443 """Unzip |filename| to |directory|."""
444 cwd = os.getcwd()
445 if not os.path.isabs(filename):
446 filename = os.path.join(cwd, filename)
447 zf = zipfile.ZipFile(filename)
448 # Make base.
449 if not os.path.isdir(directory):
450 os.mkdir(directory)
451 os.chdir(directory)
452 # Extract files.
453 for info in zf.infolist():
454 name = info.filename
455 if name.endswith('/'): # dir
456 if not os.path.isdir(name):
457 os.makedirs(name)
458 else: # file
459 directory = os.path.dirname(name)
460 if not os.path.isdir(directory):
461 os.makedirs(directory)
462 out = open(name, 'wb')
463 out.write(zf.read(name))
464 out.close()
465 # Set permissions. Permission info in external_attr is shifted 16 bits.
466 os.chmod(name, info.external_attr >> 16L)
467 os.chdir(cwd)
470 def FetchRevision(context, rev, filename, quit_event=None, progress_event=None):
471 """Downloads and unzips revision |rev|.
472 @param context A PathContext instance.
473 @param rev The Chromium revision number/tag to download.
474 @param filename The destination for the downloaded file.
475 @param quit_event A threading.Event which will be set by the master thread to
476 indicate that the download should be aborted.
477 @param progress_event A threading.Event which will be set by the master thread
478 to indicate that the progress of the download should be
479 displayed.
481 def ReportHook(blocknum, blocksize, totalsize):
482 if quit_event and quit_event.isSet():
483 raise RuntimeError('Aborting download of revision %s' % str(rev))
484 if progress_event and progress_event.isSet():
485 size = blocknum * blocksize
486 if totalsize == -1: # Total size not known.
487 progress = 'Received %d bytes' % size
488 else:
489 size = min(totalsize, size)
490 progress = 'Received %d of %d bytes, %.2f%%' % (
491 size, totalsize, 100.0 * size / totalsize)
492 # Send a \r to let all progress messages use just one line of output.
493 sys.stdout.write('\r' + progress)
494 sys.stdout.flush()
496 download_url = context.GetDownloadURL(rev)
497 try:
498 urllib.urlretrieve(download_url, filename, ReportHook)
499 if progress_event and progress_event.isSet():
500 print
501 except RuntimeError:
502 pass
505 def RunRevision(context, revision, zip_file, profile, num_runs, command, args):
506 """Given a zipped revision, unzip it and run the test."""
507 print 'Trying revision %s...' % str(revision)
509 # Create a temp directory and unzip the revision into it.
510 cwd = os.getcwd()
511 tempdir = tempfile.mkdtemp(prefix='bisect_tmp')
512 UnzipFilenameToDir(zip_file, tempdir)
513 os.chdir(tempdir)
515 # Run the build as many times as specified.
516 testargs = ['--user-data-dir=%s' % profile] + args
517 # The sandbox must be run as root on Official Chrome, so bypass it.
518 if ((context.is_official or context.flash_path or context.pdf_path) and
519 context.platform.startswith('linux')):
520 testargs.append('--no-sandbox')
521 if context.flash_path:
522 testargs.append('--ppapi-flash-path=%s' % context.flash_path)
523 # We have to pass a large enough Flash version, which currently needs not
524 # be correct. Instead of requiring the user of the script to figure out and
525 # pass the correct version we just spoof it.
526 testargs.append('--ppapi-flash-version=99.9.999.999')
528 # TODO(vitalybuka): Remove in the future. See crbug.com/395687.
529 if context.pdf_path:
530 shutil.copy(context.pdf_path,
531 os.path.dirname(context.GetLaunchPath(revision)))
532 testargs.append('--enable-print-preview')
534 runcommand = []
535 for token in shlex.split(command):
536 if token == '%a':
537 runcommand.extend(testargs)
538 else:
539 runcommand.append(
540 token.replace('%p', os.path.abspath(context.GetLaunchPath(revision))).
541 replace('%s', ' '.join(testargs)))
543 results = []
544 for _ in range(num_runs):
545 subproc = subprocess.Popen(runcommand,
546 bufsize=-1,
547 stdout=subprocess.PIPE,
548 stderr=subprocess.PIPE)
549 (stdout, stderr) = subproc.communicate()
550 results.append((subproc.returncode, stdout, stderr))
552 os.chdir(cwd)
553 try:
554 shutil.rmtree(tempdir, True)
555 except Exception:
556 pass
558 for (returncode, stdout, stderr) in results:
559 if returncode:
560 return (returncode, stdout, stderr)
561 return results[0]
564 # The arguments official_builds, status, stdout and stderr are unused.
565 # They are present here because this function is passed to Bisect which then
566 # calls it with 5 arguments.
567 # pylint: disable=W0613
568 def AskIsGoodBuild(rev, official_builds, status, stdout, stderr):
569 """Asks the user whether build |rev| is good or bad."""
570 # Loop until we get a response that we can parse.
571 while True:
572 response = raw_input('Revision %s is '
573 '[(g)ood/(b)ad/(r)etry/(u)nknown/(q)uit]: ' %
574 str(rev))
575 if response and response in ('g', 'b', 'r', 'u'):
576 return response
577 if response and response == 'q':
578 raise SystemExit()
581 def IsGoodASANBuild(rev, official_builds, status, stdout, stderr):
582 """Determine if an ASAN build |rev| is good or bad
584 Will examine stderr looking for the error message emitted by ASAN. If not
585 found then will fallback to asking the user."""
586 if stderr:
587 bad_count = 0
588 for line in stderr.splitlines():
589 print line
590 if line.find('ERROR: AddressSanitizer:') != -1:
591 bad_count += 1
592 if bad_count > 0:
593 print 'Revision %d determined to be bad.' % rev
594 return 'b'
595 return AskIsGoodBuild(rev, official_builds, status, stdout, stderr)
597 class DownloadJob(object):
598 """DownloadJob represents a task to download a given Chromium revision."""
600 def __init__(self, context, name, rev, zip_file):
601 super(DownloadJob, self).__init__()
602 # Store off the input parameters.
603 self.context = context
604 self.name = name
605 self.rev = rev
606 self.zip_file = zip_file
607 self.quit_event = threading.Event()
608 self.progress_event = threading.Event()
609 self.thread = None
611 def Start(self):
612 """Starts the download."""
613 fetchargs = (self.context,
614 self.rev,
615 self.zip_file,
616 self.quit_event,
617 self.progress_event)
618 self.thread = threading.Thread(target=FetchRevision,
619 name=self.name,
620 args=fetchargs)
621 self.thread.start()
623 def Stop(self):
624 """Stops the download which must have been started previously."""
625 assert self.thread, 'DownloadJob must be started before Stop is called.'
626 self.quit_event.set()
627 self.thread.join()
628 os.unlink(self.zip_file)
630 def WaitFor(self):
631 """Prints a message and waits for the download to complete. The download
632 must have been started previously."""
633 assert self.thread, 'DownloadJob must be started before WaitFor is called.'
634 print 'Downloading revision %s...' % str(self.rev)
635 self.progress_event.set() # Display progress of download.
636 self.thread.join()
639 def Bisect(base_url,
640 platform,
641 official_builds,
642 is_aura,
643 is_asan,
644 use_local_repo,
645 good_rev=0,
646 bad_rev=0,
647 num_runs=1,
648 command='%p %a',
649 try_args=(),
650 profile=None,
651 flash_path=None,
652 pdf_path=None,
653 interactive=True,
654 evaluate=AskIsGoodBuild):
655 """Given known good and known bad revisions, run a binary search on all
656 archived revisions to determine the last known good revision.
658 @param platform Which build to download/run ('mac', 'win', 'linux64', etc.).
659 @param official_builds Specify build type (Chromium or Official build).
660 @param good_rev Number/tag of the known good revision.
661 @param bad_rev Number/tag of the known bad revision.
662 @param num_runs Number of times to run each build for asking good/bad.
663 @param try_args A tuple of arguments to pass to the test application.
664 @param profile The name of the user profile to run with.
665 @param interactive If it is false, use command exit code for good or bad
666 judgment of the argument build.
667 @param evaluate A function which returns 'g' if the argument build is good,
668 'b' if it's bad or 'u' if unknown.
670 Threading is used to fetch Chromium revisions in the background, speeding up
671 the user's experience. For example, suppose the bounds of the search are
672 good_rev=0, bad_rev=100. The first revision to be checked is 50. Depending on
673 whether revision 50 is good or bad, the next revision to check will be either
674 25 or 75. So, while revision 50 is being checked, the script will download
675 revisions 25 and 75 in the background. Once the good/bad verdict on rev 50 is
676 known:
678 - If rev 50 is good, the download of rev 25 is cancelled, and the next test
679 is run on rev 75.
681 - If rev 50 is bad, the download of rev 75 is cancelled, and the next test
682 is run on rev 25.
685 if not profile:
686 profile = 'profile'
688 context = PathContext(base_url, platform, good_rev, bad_rev,
689 official_builds, is_aura, is_asan, use_local_repo,
690 flash_path, pdf_path)
691 cwd = os.getcwd()
693 print 'Downloading list of known revisions...',
694 if not use_local_repo:
695 print '(use --use-local-repo for speed if you have a local checkout)'
696 else:
697 print
698 _GetDownloadPath = lambda rev: os.path.join(cwd,
699 '%s-%s' % (str(rev), context.archive_name))
700 if official_builds:
701 revlist = context.GetOfficialBuildsList()
702 else:
703 revlist = context.GetRevList()
705 # Get a list of revisions to bisect across.
706 if len(revlist) < 2: # Don't have enough builds to bisect.
707 msg = 'We don\'t have enough builds to bisect. revlist: %s' % revlist
708 raise RuntimeError(msg)
710 # Figure out our bookends and first pivot point; fetch the pivot revision.
711 minrev = 0
712 maxrev = len(revlist) - 1
713 pivot = maxrev / 2
714 rev = revlist[pivot]
715 zip_file = _GetDownloadPath(rev)
716 fetch = DownloadJob(context, 'initial_fetch', rev, zip_file)
717 fetch.Start()
718 fetch.WaitFor()
720 # Binary search time!
721 while fetch and fetch.zip_file and maxrev - minrev > 1:
722 if bad_rev < good_rev:
723 min_str, max_str = 'bad', 'good'
724 else:
725 min_str, max_str = 'good', 'bad'
726 print 'Bisecting range [%s (%s), %s (%s)].' % (revlist[minrev], min_str,
727 revlist[maxrev], max_str)
729 # Pre-fetch next two possible pivots
730 # - down_pivot is the next revision to check if the current revision turns
731 # out to be bad.
732 # - up_pivot is the next revision to check if the current revision turns
733 # out to be good.
734 down_pivot = int((pivot - minrev) / 2) + minrev
735 down_fetch = None
736 if down_pivot != pivot and down_pivot != minrev:
737 down_rev = revlist[down_pivot]
738 down_fetch = DownloadJob(context, 'down_fetch', down_rev,
739 _GetDownloadPath(down_rev))
740 down_fetch.Start()
742 up_pivot = int((maxrev - pivot) / 2) + pivot
743 up_fetch = None
744 if up_pivot != pivot and up_pivot != maxrev:
745 up_rev = revlist[up_pivot]
746 up_fetch = DownloadJob(context, 'up_fetch', up_rev,
747 _GetDownloadPath(up_rev))
748 up_fetch.Start()
750 # Run test on the pivot revision.
751 status = None
752 stdout = None
753 stderr = None
754 try:
755 (status, stdout, stderr) = RunRevision(context,
756 rev,
757 fetch.zip_file,
758 profile,
759 num_runs,
760 command,
761 try_args)
762 except Exception, e:
763 print >> sys.stderr, e
765 # Call the evaluate function to see if the current revision is good or bad.
766 # On that basis, kill one of the background downloads and complete the
767 # other, as described in the comments above.
768 try:
769 if not interactive:
770 if status:
771 answer = 'b'
772 print 'Bad revision: %s' % rev
773 else:
774 answer = 'g'
775 print 'Good revision: %s' % rev
776 else:
777 answer = evaluate(rev, official_builds, status, stdout, stderr)
778 if ((answer == 'g' and good_rev < bad_rev)
779 or (answer == 'b' and bad_rev < good_rev)):
780 fetch.Stop()
781 minrev = pivot
782 if down_fetch:
783 down_fetch.Stop() # Kill the download of the older revision.
784 fetch = None
785 if up_fetch:
786 up_fetch.WaitFor()
787 pivot = up_pivot
788 fetch = up_fetch
789 elif ((answer == 'b' and good_rev < bad_rev)
790 or (answer == 'g' and bad_rev < good_rev)):
791 fetch.Stop()
792 maxrev = pivot
793 if up_fetch:
794 up_fetch.Stop() # Kill the download of the newer revision.
795 fetch = None
796 if down_fetch:
797 down_fetch.WaitFor()
798 pivot = down_pivot
799 fetch = down_fetch
800 elif answer == 'r':
801 pass # Retry requires no changes.
802 elif answer == 'u':
803 # Nuke the revision from the revlist and choose a new pivot.
804 fetch.Stop()
805 revlist.pop(pivot)
806 maxrev -= 1 # Assumes maxrev >= pivot.
808 if maxrev - minrev > 1:
809 # Alternate between using down_pivot or up_pivot for the new pivot
810 # point, without affecting the range. Do this instead of setting the
811 # pivot to the midpoint of the new range because adjacent revisions
812 # are likely affected by the same issue that caused the (u)nknown
813 # response.
814 if up_fetch and down_fetch:
815 fetch = [up_fetch, down_fetch][len(revlist) % 2]
816 elif up_fetch:
817 fetch = up_fetch
818 else:
819 fetch = down_fetch
820 fetch.WaitFor()
821 if fetch == up_fetch:
822 pivot = up_pivot - 1 # Subtracts 1 because revlist was resized.
823 else:
824 pivot = down_pivot
825 zip_file = fetch.zip_file
827 if down_fetch and fetch != down_fetch:
828 down_fetch.Stop()
829 if up_fetch and fetch != up_fetch:
830 up_fetch.Stop()
831 else:
832 assert False, 'Unexpected return value from evaluate(): ' + answer
833 except SystemExit:
834 print 'Cleaning up...'
835 for f in [_GetDownloadPath(revlist[down_pivot]),
836 _GetDownloadPath(revlist[up_pivot])]:
837 try:
838 os.unlink(f)
839 except OSError:
840 pass
841 sys.exit(0)
843 rev = revlist[pivot]
845 return (revlist[minrev], revlist[maxrev])
848 def GetBlinkDEPSRevisionForChromiumRevision(rev):
849 """Returns the blink revision that was in REVISIONS file at
850 chromium revision |rev|."""
851 # . doesn't match newlines without re.DOTALL, so this is safe.
852 blink_re = re.compile(r'webkit_revision\D*(\d+)')
853 url = urllib.urlopen(DEPS_FILE % rev)
854 m = blink_re.search(url.read())
855 url.close()
856 if m:
857 return int(m.group(1))
858 else:
859 raise Exception('Could not get Blink revision for Chromium rev %d' % rev)
862 def GetBlinkRevisionForChromiumRevision(self, rev):
863 """Returns the blink revision that was in REVISIONS file at
864 chromium revision |rev|."""
865 def _IsRevisionNumber(revision):
866 if isinstance(revision, int):
867 return True
868 else:
869 return revision.isdigit()
870 if str(rev) in self.githash_svn_dict:
871 rev = self.githash_svn_dict[str(rev)]
872 file_url = '%s/%s%s/REVISIONS' % (self.base_url,
873 self._listing_platform_dir, rev)
874 url = urllib.urlopen(file_url)
875 data = json.loads(url.read())
876 url.close()
877 if 'webkit_revision' in data:
878 blink_rev = data['webkit_revision']
879 if not _IsRevisionNumber(blink_rev):
880 blink_rev = self.GetSVNRevisionFromGitHash(blink_rev, 'blink')
881 return blink_rev
882 else:
883 raise Exception('Could not get blink revision for cr rev %d' % rev)
886 def FixChromiumRevForBlink(revisions_final, revisions, self, rev):
887 """Returns the chromium revision that has the correct blink revision
888 for blink bisect, DEPS and REVISIONS file might not match since
889 blink snapshots point to tip of tree blink.
890 Note: The revisions_final variable might get modified to include
891 additional revisions."""
892 blink_deps_rev = GetBlinkDEPSRevisionForChromiumRevision(rev)
894 while (GetBlinkRevisionForChromiumRevision(self, rev) > blink_deps_rev):
895 idx = revisions.index(rev)
896 if idx > 0:
897 rev = revisions[idx-1]
898 if rev not in revisions_final:
899 revisions_final.insert(0, rev)
901 revisions_final.sort()
902 return rev
905 def GetChromiumRevision(context, url):
906 """Returns the chromium revision read from given URL."""
907 try:
908 # Location of the latest build revision number
909 latest_revision = urllib.urlopen(url).read()
910 if latest_revision.isdigit():
911 return int(latest_revision)
912 return context.GetSVNRevisionFromGitHash(latest_revision)
913 except Exception:
914 print 'Could not determine latest revision. This could be bad...'
915 return 999999999
918 def main():
919 usage = ('%prog [options] [-- chromium-options]\n'
920 'Perform binary search on the snapshot builds to find a minimal\n'
921 'range of revisions where a behavior change happened. The\n'
922 'behaviors are described as "good" and "bad".\n'
923 'It is NOT assumed that the behavior of the later revision is\n'
924 'the bad one.\n'
925 '\n'
926 'Revision numbers should use\n'
927 ' Official versions (e.g. 1.0.1000.0) for official builds. (-o)\n'
928 ' SVN revisions (e.g. 123456) for chromium builds, from trunk.\n'
929 ' Use base_trunk_revision from http://omahaproxy.appspot.com/\n'
930 ' for earlier revs.\n'
931 ' Chrome\'s about: build number and omahaproxy branch_revision\n'
932 ' are incorrect, they are from branches.\n'
933 '\n'
934 'Tip: add "-- --no-first-run" to bypass the first run prompts.')
935 parser = optparse.OptionParser(usage=usage)
936 # Strangely, the default help output doesn't include the choice list.
937 choices = ['mac', 'win', 'linux', 'linux64', 'linux-arm']
938 # linux-chromiumos lacks a continuous archive http://crbug.com/78158
939 parser.add_option('-a', '--archive',
940 choices=choices,
941 help='The buildbot archive to bisect [%s].' %
942 '|'.join(choices))
943 parser.add_option('-o',
944 action='store_true',
945 dest='official_builds',
946 help='Bisect across official Chrome builds (internal '
947 'only) instead of Chromium archives.')
948 parser.add_option('-b', '--bad',
949 type='str',
950 help='A bad revision to start bisection. '
951 'May be earlier or later than the good revision. '
952 'Default is HEAD.')
953 parser.add_option('-f', '--flash_path',
954 type='str',
955 help='Absolute path to a recent Adobe Pepper Flash '
956 'binary to be used in this bisection (e.g. '
957 'on Windows C:\...\pepflashplayer.dll and on Linux '
958 '/opt/google/chrome/PepperFlash/'
959 'libpepflashplayer.so).')
960 parser.add_option('-d', '--pdf_path',
961 type='str',
962 help='Absolute path to a recent PDF plugin '
963 'binary to be used in this bisection (e.g. '
964 'on Windows C:\...\pdf.dll and on Linux '
965 '/opt/google/chrome/libpdf.so). Option also enables '
966 'print preview.')
967 parser.add_option('-g', '--good',
968 type='str',
969 help='A good revision to start bisection. ' +
970 'May be earlier or later than the bad revision. ' +
971 'Default is 0.')
972 parser.add_option('-p', '--profile', '--user-data-dir',
973 type='str',
974 default='profile',
975 help='Profile to use; this will not reset every run. '
976 'Defaults to a clean profile.')
977 parser.add_option('-t', '--times',
978 type='int',
979 default=1,
980 help='Number of times to run each build before asking '
981 'if it\'s good or bad. Temporary profiles are reused.')
982 parser.add_option('-c', '--command',
983 type='str',
984 default='%p %a',
985 help='Command to execute. %p and %a refer to Chrome '
986 'executable and specified extra arguments '
987 'respectively. Use %s to specify all extra arguments '
988 'as one string. Defaults to "%p %a". Note that any '
989 'extra paths specified should be absolute.')
990 parser.add_option('-l', '--blink',
991 action='store_true',
992 help='Use Blink bisect instead of Chromium. ')
993 parser.add_option('', '--not-interactive',
994 action='store_true',
995 default=False,
996 help='Use command exit code to tell good/bad revision.')
997 parser.add_option('--aura',
998 dest='aura',
999 action='store_true',
1000 default=False,
1001 help='Allow the script to bisect aura builds')
1002 parser.add_option('--asan',
1003 dest='asan',
1004 action='store_true',
1005 default=False,
1006 help='Allow the script to bisect ASAN builds')
1007 parser.add_option('--use-local-repo',
1008 dest='use_local_repo',
1009 action='store_true',
1010 default=False,
1011 help='Allow the script to convert git SHA1 to SVN '
1012 'revision using "git svn find-rev <SHA1>" '
1013 'command from a Chromium checkout.')
1015 (opts, args) = parser.parse_args()
1017 if opts.archive is None:
1018 print 'Error: missing required parameter: --archive'
1019 print
1020 parser.print_help()
1021 return 1
1023 if opts.aura:
1024 if opts.archive != 'win' or not opts.official_builds:
1025 print ('Error: Aura is supported only on Windows platform '
1026 'and official builds.')
1027 return 1
1029 if opts.asan:
1030 supported_platforms = ['linux', 'mac', 'win']
1031 if opts.archive not in supported_platforms:
1032 print 'Error: ASAN bisecting only supported on these platforms: [%s].' % (
1033 '|'.join(supported_platforms))
1034 return 1
1035 if opts.official_builds:
1036 print 'Error: Do not yet support bisecting official ASAN builds.'
1037 return 1
1039 if opts.asan:
1040 base_url = ASAN_BASE_URL
1041 elif opts.blink:
1042 base_url = WEBKIT_BASE_URL
1043 else:
1044 base_url = CHROMIUM_BASE_URL
1046 # Create the context. Initialize 0 for the revisions as they are set below.
1047 context = PathContext(base_url, opts.archive, 0, 0,
1048 opts.official_builds, opts.aura, opts.asan,
1049 opts.use_local_repo, None)
1050 # Pick a starting point, try to get HEAD for this.
1051 if opts.bad:
1052 bad_rev = opts.bad
1053 else:
1054 bad_rev = '999.0.0.0'
1055 if not opts.official_builds:
1056 bad_rev = GetChromiumRevision(context, context.GetLastChangeURL())
1058 # Find out when we were good.
1059 if opts.good:
1060 good_rev = opts.good
1061 else:
1062 good_rev = '0.0.0.0' if opts.official_builds else 0
1064 if opts.flash_path:
1065 flash_path = opts.flash_path
1066 msg = 'Could not find Flash binary at %s' % flash_path
1067 assert os.path.exists(flash_path), msg
1069 if opts.pdf_path:
1070 pdf_path = opts.pdf_path
1071 msg = 'Could not find PDF binary at %s' % pdf_path
1072 assert os.path.exists(pdf_path), msg
1074 if opts.official_builds:
1075 good_rev = LooseVersion(good_rev)
1076 bad_rev = LooseVersion(bad_rev)
1077 else:
1078 good_rev = int(good_rev)
1079 bad_rev = int(bad_rev)
1081 if opts.times < 1:
1082 print('Number of times to run (%d) must be greater than or equal to 1.' %
1083 opts.times)
1084 parser.print_help()
1085 return 1
1087 if opts.asan:
1088 evaluator = IsGoodASANBuild
1089 else:
1090 evaluator = AskIsGoodBuild
1092 (min_chromium_rev, max_chromium_rev) = Bisect(
1093 base_url, opts.archive, opts.official_builds, opts.aura, opts.asan,
1094 opts.use_local_repo, good_rev, bad_rev, opts.times, opts.command,
1095 args, opts.profile, opts.flash_path, opts.pdf_path,
1096 not opts.not_interactive, evaluator)
1098 # Get corresponding blink revisions.
1099 try:
1100 min_blink_rev = GetBlinkRevisionForChromiumRevision(context,
1101 min_chromium_rev)
1102 max_blink_rev = GetBlinkRevisionForChromiumRevision(context,
1103 max_chromium_rev)
1104 except Exception:
1105 # Silently ignore the failure.
1106 min_blink_rev, max_blink_rev = 0, 0
1108 if opts.blink:
1109 # We're done. Let the user know the results in an official manner.
1110 if good_rev > bad_rev:
1111 print DONE_MESSAGE_GOOD_MAX % (str(min_blink_rev), str(max_blink_rev))
1112 else:
1113 print DONE_MESSAGE_GOOD_MIN % (str(min_blink_rev), str(max_blink_rev))
1115 print 'BLINK CHANGELOG URL:'
1116 print ' ' + BLINK_CHANGELOG_URL % (max_blink_rev, min_blink_rev)
1118 else:
1119 # We're done. Let the user know the results in an official manner.
1120 if good_rev > bad_rev:
1121 print DONE_MESSAGE_GOOD_MAX % (str(min_chromium_rev),
1122 str(max_chromium_rev))
1123 else:
1124 print DONE_MESSAGE_GOOD_MIN % (str(min_chromium_rev),
1125 str(max_chromium_rev))
1126 if min_blink_rev != max_blink_rev:
1127 print ('NOTE: There is a Blink roll in the range, '
1128 'you might also want to do a Blink bisect.')
1130 print 'CHANGELOG URL:'
1131 if opts.official_builds:
1132 print OFFICIAL_CHANGELOG_URL % (min_chromium_rev, max_chromium_rev)
1133 else:
1134 print ' ' + CHANGELOG_URL % (min_chromium_rev, max_chromium_rev)
1137 if __name__ == '__main__':
1138 sys.exit(main())