Enable lock even when no password hash is present.
[chromium-blink-merge.git] / tools / bisect-builds.py
blobc63571baaddd79c7ba972cf6de87c1050c665edd
1 #!/usr/bin/env python
2 # Copyright (c) 2012 The Chromium Authors. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
6 """Snapshot Build Bisect Tool
8 This script bisects a snapshot archive using binary search. It starts at
9 a bad revision (it will try to guess HEAD) and asks for a last known-good
10 revision. It will then binary search across this revision range by downloading,
11 unzipping, and opening Chromium for you. After testing the specific revision,
12 it will ask you whether it is good or bad before continuing the search.
13 """
15 # The base URL for stored build archives.
16 CHROMIUM_BASE_URL = ('http://commondatastorage.googleapis.com'
17 '/chromium-browser-snapshots')
18 WEBKIT_BASE_URL = ('http://commondatastorage.googleapis.com'
19 '/chromium-webkit-snapshots')
20 ASAN_BASE_URL = ('http://commondatastorage.googleapis.com'
21 '/chromium-browser-asan')
23 # GS bucket name.
24 GS_BUCKET_NAME = 'chrome-unsigned/desktop-W15K3Y'
26 # Base URL for downloading official builds.
27 GOOGLE_APIS_URL = 'commondatastorage.googleapis.com'
29 # The base URL for official builds.
30 OFFICIAL_BASE_URL = 'http://%s/%s' % (GOOGLE_APIS_URL, GS_BUCKET_NAME)
32 # URL template for viewing changelogs between revisions.
33 CHANGELOG_URL = ('https://chromium.googlesource.com/chromium/src/+log/%s..%s')
35 # URL to convert SVN revision to git hash.
36 CRREV_URL = ('https://cr-rev.appspot.com/_ah/api/crrev/v1/redirect/')
38 # URL template for viewing changelogs between official versions.
39 OFFICIAL_CHANGELOG_URL = ('https://chromium.googlesource.com/chromium/'
40 'src/+log/%s..%s?pretty=full')
42 # DEPS file URL.
43 DEPS_FILE = 'http://src.chromium.org/viewvc/chrome/trunk/src/DEPS?revision=%d'
45 # Blink changelogs URL.
46 BLINK_CHANGELOG_URL = ('http://build.chromium.org'
47 '/f/chromium/perf/dashboard/ui/changelog_blink.html'
48 '?url=/trunk&range=%d%%3A%d')
50 DONE_MESSAGE_GOOD_MIN = ('You are probably looking for a change made after %s ('
51 'known good), but no later than %s (first known bad).')
52 DONE_MESSAGE_GOOD_MAX = ('You are probably looking for a change made after %s ('
53 'known bad), but no later than %s (first known good).')
55 CHROMIUM_GITHASH_TO_SVN_URL = (
56 'https://chromium.googlesource.com/chromium/src/+/%s?format=json')
58 BLINK_GITHASH_TO_SVN_URL = (
59 'https://chromium.googlesource.com/chromium/blink/+/%s?format=json')
61 GITHASH_TO_SVN_URL = {
62 'chromium': CHROMIUM_GITHASH_TO_SVN_URL,
63 'blink': BLINK_GITHASH_TO_SVN_URL,
66 # Search pattern to be matched in the JSON output from
67 # CHROMIUM_GITHASH_TO_SVN_URL to get the chromium revision (svn revision).
68 CHROMIUM_SEARCH_PATTERN_OLD = (
69 r'.*git-svn-id: svn://svn.chromium.org/chrome/trunk/src@(\d+) ')
70 CHROMIUM_SEARCH_PATTERN = (
71 r'Cr-Commit-Position: refs/heads/master@{#(\d+)}')
73 # Search pattern to be matched in the json output from
74 # BLINK_GITHASH_TO_SVN_URL to get the blink revision (svn revision).
75 BLINK_SEARCH_PATTERN = (
76 r'.*git-svn-id: svn://svn.chromium.org/blink/trunk@(\d+) ')
78 SEARCH_PATTERN = {
79 'chromium': CHROMIUM_SEARCH_PATTERN,
80 'blink': BLINK_SEARCH_PATTERN,
83 CREDENTIAL_ERROR_MESSAGE = ('You are attempting to access protected data with '
84 'no configured credentials')
86 ###############################################################################
88 import httplib
89 import json
90 import optparse
91 import os
92 import re
93 import shlex
94 import shutil
95 import subprocess
96 import sys
97 import tempfile
98 import threading
99 import urllib
100 from distutils.version import LooseVersion
101 from xml.etree import ElementTree
102 import zipfile
105 class PathContext(object):
106 """A PathContext is used to carry the information used to construct URLs and
107 paths when dealing with the storage server and archives."""
108 def __init__(self, base_url, platform, good_revision, bad_revision,
109 is_official, is_asan, use_local_repo, flash_path = None,
110 pdf_path = None):
111 super(PathContext, self).__init__()
112 # Store off the input parameters.
113 self.base_url = base_url
114 self.platform = platform # What's passed in to the '-a/--archive' option.
115 self.good_revision = good_revision
116 self.bad_revision = bad_revision
117 self.is_official = is_official
118 self.is_asan = is_asan
119 self.build_type = 'release'
120 self.flash_path = flash_path
121 # Dictionary which stores svn revision number as key and it's
122 # corresponding git hash as value. This data is populated in
123 # _FetchAndParse and used later in GetDownloadURL while downloading
124 # the build.
125 self.githash_svn_dict = {}
126 self.pdf_path = pdf_path
128 # The name of the ZIP file in a revision directory on the server.
129 self.archive_name = None
131 # If the script is run from a local Chromium checkout,
132 # "--use-local-repo" option can be used to make the script run faster.
133 # It uses "git svn find-rev <SHA1>" command to convert git hash to svn
134 # revision number.
135 self.use_local_repo = use_local_repo
137 # Set some internal members:
138 # _listing_platform_dir = Directory that holds revisions. Ends with a '/'.
139 # _archive_extract_dir = Uncompressed directory in the archive_name file.
140 # _binary_name = The name of the executable to run.
141 if self.platform in ('linux', 'linux64', 'linux-arm'):
142 self._binary_name = 'chrome'
143 elif self.platform in ('mac', 'mac64'):
144 self.archive_name = 'chrome-mac.zip'
145 self._archive_extract_dir = 'chrome-mac'
146 elif self.platform in ('win', 'win64'):
147 self.archive_name = 'chrome-win32.zip'
148 self._archive_extract_dir = 'chrome-win32'
149 self._binary_name = 'chrome.exe'
150 else:
151 raise Exception('Invalid platform: %s' % self.platform)
153 if is_official:
154 if self.platform == 'linux':
155 self._listing_platform_dir = 'precise32/'
156 self.archive_name = 'chrome-precise32.zip'
157 self._archive_extract_dir = 'chrome-precise32'
158 elif self.platform == 'linux64':
159 self._listing_platform_dir = 'precise64/'
160 self.archive_name = 'chrome-precise64.zip'
161 self._archive_extract_dir = 'chrome-precise64'
162 elif self.platform == 'mac':
163 self._listing_platform_dir = 'mac/'
164 self._binary_name = 'Google Chrome.app/Contents/MacOS/Google Chrome'
165 elif self.platform == 'mac64':
166 self._listing_platform_dir = 'mac64/'
167 self._binary_name = 'Google Chrome.app/Contents/MacOS/Google Chrome'
168 elif self.platform == 'win':
169 self._listing_platform_dir = 'win/'
170 self.archive_name = 'chrome-win.zip'
171 self._archive_extract_dir = 'chrome-win'
172 elif self.platform == 'win64':
173 self._listing_platform_dir = 'win64/'
174 self.archive_name = 'chrome-win64.zip'
175 self._archive_extract_dir = 'chrome-win64'
176 else:
177 if self.platform in ('linux', 'linux64', 'linux-arm'):
178 self.archive_name = 'chrome-linux.zip'
179 self._archive_extract_dir = 'chrome-linux'
180 if self.platform == 'linux':
181 self._listing_platform_dir = 'Linux/'
182 elif self.platform == 'linux64':
183 self._listing_platform_dir = 'Linux_x64/'
184 elif self.platform == 'linux-arm':
185 self._listing_platform_dir = 'Linux_ARM_Cross-Compile/'
186 elif self.platform == 'mac':
187 self._listing_platform_dir = 'Mac/'
188 self._binary_name = 'Chromium.app/Contents/MacOS/Chromium'
189 elif self.platform == 'win':
190 self._listing_platform_dir = 'Win/'
192 def GetASANPlatformDir(self):
193 """ASAN builds are in directories like "linux-release", or have filenames
194 like "asan-win32-release-277079.zip". This aligns to our platform names
195 except in the case of Windows where they use "win32" instead of "win"."""
196 if self.platform == 'win':
197 return 'win32'
198 else:
199 return self.platform
201 def GetListingURL(self, marker=None):
202 """Returns the URL for a directory listing, with an optional marker."""
203 marker_param = ''
204 if marker:
205 marker_param = '&marker=' + str(marker)
206 if self.is_asan:
207 prefix = '%s-%s' % (self.GetASANPlatformDir(), self.build_type)
208 return self.base_url + '/?delimiter=&prefix=' + prefix + marker_param
209 else:
210 return (self.base_url + '/?delimiter=/&prefix=' +
211 self._listing_platform_dir + marker_param)
213 def GetDownloadURL(self, revision):
214 """Gets the download URL for a build archive of a specific revision."""
215 if self.is_asan:
216 return '%s/%s-%s/%s-%d.zip' % (
217 ASAN_BASE_URL, self.GetASANPlatformDir(), self.build_type,
218 self.GetASANBaseName(), revision)
219 if self.is_official:
220 return '%s/%s/%s%s' % (
221 OFFICIAL_BASE_URL, revision, self._listing_platform_dir,
222 self.archive_name)
223 else:
224 if str(revision) in self.githash_svn_dict:
225 revision = self.githash_svn_dict[str(revision)]
226 return '%s/%s%s/%s' % (self.base_url, self._listing_platform_dir,
227 revision, self.archive_name)
229 def GetLastChangeURL(self):
230 """Returns a URL to the LAST_CHANGE file."""
231 return self.base_url + '/' + self._listing_platform_dir + 'LAST_CHANGE'
233 def GetASANBaseName(self):
234 """Returns the base name of the ASAN zip file."""
235 if 'linux' in self.platform:
236 return 'asan-symbolized-%s-%s' % (self.GetASANPlatformDir(),
237 self.build_type)
238 else:
239 return 'asan-%s-%s' % (self.GetASANPlatformDir(), self.build_type)
241 def GetLaunchPath(self, revision):
242 """Returns a relative path (presumably from the archive extraction location)
243 that is used to run the executable."""
244 if self.is_asan:
245 extract_dir = '%s-%d' % (self.GetASANBaseName(), revision)
246 else:
247 extract_dir = self._archive_extract_dir
248 return os.path.join(extract_dir, self._binary_name)
250 def ParseDirectoryIndex(self):
251 """Parses the Google Storage directory listing into a list of revision
252 numbers."""
254 def _FetchAndParse(url):
255 """Fetches a URL and returns a 2-Tuple of ([revisions], next-marker). If
256 next-marker is not None, then the listing is a partial listing and another
257 fetch should be performed with next-marker being the marker= GET
258 parameter."""
259 handle = urllib.urlopen(url)
260 document = ElementTree.parse(handle)
262 # All nodes in the tree are namespaced. Get the root's tag name to extract
263 # the namespace. Etree does namespaces as |{namespace}tag|.
264 root_tag = document.getroot().tag
265 end_ns_pos = root_tag.find('}')
266 if end_ns_pos == -1:
267 raise Exception('Could not locate end namespace for directory index')
268 namespace = root_tag[:end_ns_pos + 1]
270 # Find the prefix (_listing_platform_dir) and whether or not the list is
271 # truncated.
272 prefix_len = len(document.find(namespace + 'Prefix').text)
273 next_marker = None
274 is_truncated = document.find(namespace + 'IsTruncated')
275 if is_truncated is not None and is_truncated.text.lower() == 'true':
276 next_marker = document.find(namespace + 'NextMarker').text
277 # Get a list of all the revisions.
278 revisions = []
279 githash_svn_dict = {}
280 if self.is_asan:
281 asan_regex = re.compile(r'.*%s-(\d+)\.zip$' % (self.GetASANBaseName()))
282 # Non ASAN builds are in a <revision> directory. The ASAN builds are
283 # flat
284 all_prefixes = document.findall(namespace + 'Contents/' +
285 namespace + 'Key')
286 for prefix in all_prefixes:
287 m = asan_regex.match(prefix.text)
288 if m:
289 try:
290 revisions.append(int(m.group(1)))
291 except ValueError:
292 pass
293 else:
294 all_prefixes = document.findall(namespace + 'CommonPrefixes/' +
295 namespace + 'Prefix')
296 # The <Prefix> nodes have content of the form of
297 # |_listing_platform_dir/revision/|. Strip off the platform dir and the
298 # trailing slash to just have a number.
299 for prefix in all_prefixes:
300 revnum = prefix.text[prefix_len:-1]
301 try:
302 if not revnum.isdigit():
303 git_hash = revnum
304 revnum = self.GetSVNRevisionFromGitHash(git_hash)
305 githash_svn_dict[revnum] = git_hash
306 if revnum is not None:
307 revnum = int(revnum)
308 revisions.append(revnum)
309 except ValueError:
310 pass
311 return (revisions, next_marker, githash_svn_dict)
313 # Fetch the first list of revisions.
314 (revisions, next_marker, self.githash_svn_dict) = _FetchAndParse(
315 self.GetListingURL())
316 # If the result list was truncated, refetch with the next marker. Do this
317 # until an entire directory listing is done.
318 while next_marker:
319 next_url = self.GetListingURL(next_marker)
320 (new_revisions, next_marker, new_dict) = _FetchAndParse(next_url)
321 revisions.extend(new_revisions)
322 self.githash_svn_dict.update(new_dict)
323 return revisions
325 def _GetSVNRevisionFromGitHashWithoutGitCheckout(self, git_sha1, depot):
326 json_url = GITHASH_TO_SVN_URL[depot] % git_sha1
327 response = urllib.urlopen(json_url)
328 if response.getcode() == 200:
329 try:
330 data = json.loads(response.read()[4:])
331 except ValueError:
332 print 'ValueError for JSON URL: %s' % json_url
333 raise ValueError
334 else:
335 raise ValueError
336 if 'message' in data:
337 message = data['message'].split('\n')
338 message = [line for line in message if line.strip()]
339 search_pattern = re.compile(SEARCH_PATTERN[depot])
340 result = search_pattern.search(message[len(message)-1])
341 if result:
342 return result.group(1)
343 else:
344 if depot == 'chromium':
345 result = re.search(CHROMIUM_SEARCH_PATTERN_OLD,
346 message[len(message)-1])
347 if result:
348 return result.group(1)
349 print 'Failed to get svn revision number for %s' % git_sha1
350 raise ValueError
352 def _GetSVNRevisionFromGitHashFromGitCheckout(self, git_sha1, depot):
353 def _RunGit(command, path):
354 command = ['git'] + command
355 if path:
356 original_path = os.getcwd()
357 os.chdir(path)
358 shell = sys.platform.startswith('win')
359 proc = subprocess.Popen(command, shell=shell, stdout=subprocess.PIPE,
360 stderr=subprocess.PIPE)
361 (output, _) = proc.communicate()
363 if path:
364 os.chdir(original_path)
365 return (output, proc.returncode)
367 path = None
368 if depot == 'blink':
369 path = os.path.join(os.getcwd(), 'third_party', 'WebKit')
370 if os.path.basename(os.getcwd()) == 'src':
371 command = ['svn', 'find-rev', git_sha1]
372 (git_output, return_code) = _RunGit(command, path)
373 if not return_code:
374 return git_output.strip('\n')
375 raise ValueError
376 else:
377 print ('Script should be run from src folder. ' +
378 'Eg: python tools/bisect-builds.py -g 280588 -b 280590' +
379 '--archive linux64 --use-local-repo')
380 sys.exit(1)
382 def GetSVNRevisionFromGitHash(self, git_sha1, depot='chromium'):
383 if not self.use_local_repo:
384 return self._GetSVNRevisionFromGitHashWithoutGitCheckout(git_sha1, depot)
385 else:
386 return self._GetSVNRevisionFromGitHashFromGitCheckout(git_sha1, depot)
388 def GetRevList(self):
389 """Gets the list of revision numbers between self.good_revision and
390 self.bad_revision."""
391 # Download the revlist and filter for just the range between good and bad.
392 minrev = min(self.good_revision, self.bad_revision)
393 maxrev = max(self.good_revision, self.bad_revision)
394 revlist_all = map(int, self.ParseDirectoryIndex())
396 revlist = [x for x in revlist_all if x >= int(minrev) and x <= int(maxrev)]
397 revlist.sort()
399 # Set good and bad revisions to be legit revisions.
400 if revlist:
401 if self.good_revision < self.bad_revision:
402 self.good_revision = revlist[0]
403 self.bad_revision = revlist[-1]
404 else:
405 self.bad_revision = revlist[0]
406 self.good_revision = revlist[-1]
408 # Fix chromium rev so that the deps blink revision matches REVISIONS file.
409 if self.base_url == WEBKIT_BASE_URL:
410 revlist_all.sort()
411 self.good_revision = FixChromiumRevForBlink(revlist,
412 revlist_all,
413 self,
414 self.good_revision)
415 self.bad_revision = FixChromiumRevForBlink(revlist,
416 revlist_all,
417 self,
418 self.bad_revision)
419 return revlist
421 def GetOfficialBuildsList(self):
422 """Gets the list of official build numbers between self.good_revision and
423 self.bad_revision."""
425 def CheckDepotToolsInPath():
426 delimiter = ';' if sys.platform.startswith('win') else ':'
427 path_list = os.environ['PATH'].split(delimiter)
428 for path in path_list:
429 if path.find('depot_tools') != -1:
430 return path
431 return None
433 def RunGsutilCommand(args):
434 gsutil_path = CheckDepotToolsInPath()
435 if gsutil_path is None:
436 print ('Follow the instructions in this document '
437 'http://dev.chromium.org/developers/how-tos/install-depot-tools'
438 ' to install depot_tools and then try again.')
439 sys.exit(1)
440 gsutil_path = os.path.join(gsutil_path, 'third_party', 'gsutil', 'gsutil')
441 gsutil = subprocess.Popen([sys.executable, gsutil_path] + args,
442 stdout=subprocess.PIPE, stderr=subprocess.PIPE,
443 env=None)
444 stdout, stderr = gsutil.communicate()
445 if gsutil.returncode:
446 if (re.findall(r'status[ |=]40[1|3]', stderr) or
447 stderr.startswith(CREDENTIAL_ERROR_MESSAGE)):
448 print ('Follow these steps to configure your credentials and try'
449 ' running the bisect-builds.py again.:\n'
450 ' 1. Run "python %s config" and follow its instructions.\n'
451 ' 2. If you have a @google.com account, use that account.\n'
452 ' 3. For the project-id, just enter 0.' % gsutil_path)
453 sys.exit(1)
454 else:
455 raise Exception('Error running the gsutil command: %s' % stderr)
456 return stdout
458 def GsutilList(bucket):
459 query = 'gs://%s/' % bucket
460 stdout = RunGsutilCommand(['ls', query])
461 return [url[len(query):].strip('/') for url in stdout.splitlines()]
463 # Download the revlist and filter for just the range between good and bad.
464 minrev = min(self.good_revision, self.bad_revision)
465 maxrev = max(self.good_revision, self.bad_revision)
466 build_numbers = GsutilList(GS_BUCKET_NAME)
467 revision_re = re.compile(r'(\d\d\.\d\.\d{4}\.\d+)')
468 build_numbers = filter(lambda b: revision_re.search(b), build_numbers)
469 final_list = []
470 parsed_build_numbers = [LooseVersion(x) for x in build_numbers]
471 connection = httplib.HTTPConnection(GOOGLE_APIS_URL)
472 for build_number in sorted(parsed_build_numbers):
473 if build_number > maxrev:
474 break
475 if build_number < minrev:
476 continue
477 path = ('/' + GS_BUCKET_NAME + '/' + str(build_number) + '/' +
478 self._listing_platform_dir + self.archive_name)
479 connection.request('HEAD', path)
480 response = connection.getresponse()
481 if response.status == 200:
482 final_list.append(str(build_number))
483 response.read()
484 connection.close()
485 return final_list
487 def UnzipFilenameToDir(filename, directory):
488 """Unzip |filename| to |directory|."""
489 cwd = os.getcwd()
490 if not os.path.isabs(filename):
491 filename = os.path.join(cwd, filename)
492 zf = zipfile.ZipFile(filename)
493 # Make base.
494 if not os.path.isdir(directory):
495 os.mkdir(directory)
496 os.chdir(directory)
497 # Extract files.
498 for info in zf.infolist():
499 name = info.filename
500 if name.endswith('/'): # dir
501 if not os.path.isdir(name):
502 os.makedirs(name)
503 else: # file
504 directory = os.path.dirname(name)
505 if not os.path.isdir(directory):
506 os.makedirs(directory)
507 out = open(name, 'wb')
508 out.write(zf.read(name))
509 out.close()
510 # Set permissions. Permission info in external_attr is shifted 16 bits.
511 os.chmod(name, info.external_attr >> 16L)
512 os.chdir(cwd)
515 def FetchRevision(context, rev, filename, quit_event=None, progress_event=None):
516 """Downloads and unzips revision |rev|.
517 @param context A PathContext instance.
518 @param rev The Chromium revision number/tag to download.
519 @param filename The destination for the downloaded file.
520 @param quit_event A threading.Event which will be set by the master thread to
521 indicate that the download should be aborted.
522 @param progress_event A threading.Event which will be set by the master thread
523 to indicate that the progress of the download should be
524 displayed.
526 def ReportHook(blocknum, blocksize, totalsize):
527 if quit_event and quit_event.isSet():
528 raise RuntimeError('Aborting download of revision %s' % str(rev))
529 if progress_event and progress_event.isSet():
530 size = blocknum * blocksize
531 if totalsize == -1: # Total size not known.
532 progress = 'Received %d bytes' % size
533 else:
534 size = min(totalsize, size)
535 progress = 'Received %d of %d bytes, %.2f%%' % (
536 size, totalsize, 100.0 * size / totalsize)
537 # Send a \r to let all progress messages use just one line of output.
538 sys.stdout.write('\r' + progress)
539 sys.stdout.flush()
541 download_url = context.GetDownloadURL(rev)
542 try:
543 urllib.urlretrieve(download_url, filename, ReportHook)
544 if progress_event and progress_event.isSet():
545 print
546 except RuntimeError:
547 pass
550 def RunRevision(context, revision, zip_file, profile, num_runs, command, args):
551 """Given a zipped revision, unzip it and run the test."""
552 print 'Trying revision %s...' % str(revision)
554 # Create a temp directory and unzip the revision into it.
555 cwd = os.getcwd()
556 tempdir = tempfile.mkdtemp(prefix='bisect_tmp')
557 UnzipFilenameToDir(zip_file, tempdir)
558 os.chdir(tempdir)
560 # Run the build as many times as specified.
561 testargs = ['--user-data-dir=%s' % profile] + args
562 # The sandbox must be run as root on Official Chrome, so bypass it.
563 if ((context.is_official or context.flash_path or context.pdf_path) and
564 context.platform.startswith('linux')):
565 testargs.append('--no-sandbox')
566 if context.flash_path:
567 testargs.append('--ppapi-flash-path=%s' % context.flash_path)
568 # We have to pass a large enough Flash version, which currently needs not
569 # be correct. Instead of requiring the user of the script to figure out and
570 # pass the correct version we just spoof it.
571 testargs.append('--ppapi-flash-version=99.9.999.999')
573 # TODO(vitalybuka): Remove in the future. See crbug.com/395687.
574 if context.pdf_path:
575 shutil.copy(context.pdf_path,
576 os.path.dirname(context.GetLaunchPath(revision)))
577 testargs.append('--enable-print-preview')
579 runcommand = []
580 for token in shlex.split(command):
581 if token == '%a':
582 runcommand.extend(testargs)
583 else:
584 runcommand.append(
585 token.replace('%p', os.path.abspath(context.GetLaunchPath(revision))).
586 replace('%s', ' '.join(testargs)))
588 results = []
589 for _ in range(num_runs):
590 subproc = subprocess.Popen(runcommand,
591 bufsize=-1,
592 stdout=subprocess.PIPE,
593 stderr=subprocess.PIPE)
594 (stdout, stderr) = subproc.communicate()
595 results.append((subproc.returncode, stdout, stderr))
597 os.chdir(cwd)
598 try:
599 shutil.rmtree(tempdir, True)
600 except Exception:
601 pass
603 for (returncode, stdout, stderr) in results:
604 if returncode:
605 return (returncode, stdout, stderr)
606 return results[0]
609 # The arguments official_builds, status, stdout and stderr are unused.
610 # They are present here because this function is passed to Bisect which then
611 # calls it with 5 arguments.
612 # pylint: disable=W0613
613 def AskIsGoodBuild(rev, official_builds, status, stdout, stderr):
614 """Asks the user whether build |rev| is good or bad."""
615 # Loop until we get a response that we can parse.
616 while True:
617 response = raw_input('Revision %s is '
618 '[(g)ood/(b)ad/(r)etry/(u)nknown/(q)uit]: ' %
619 str(rev))
620 if response and response in ('g', 'b', 'r', 'u'):
621 return response
622 if response and response == 'q':
623 raise SystemExit()
626 def IsGoodASANBuild(rev, official_builds, status, stdout, stderr):
627 """Determine if an ASAN build |rev| is good or bad
629 Will examine stderr looking for the error message emitted by ASAN. If not
630 found then will fallback to asking the user."""
631 if stderr:
632 bad_count = 0
633 for line in stderr.splitlines():
634 print line
635 if line.find('ERROR: AddressSanitizer:') != -1:
636 bad_count += 1
637 if bad_count > 0:
638 print 'Revision %d determined to be bad.' % rev
639 return 'b'
640 return AskIsGoodBuild(rev, official_builds, status, stdout, stderr)
642 class DownloadJob(object):
643 """DownloadJob represents a task to download a given Chromium revision."""
645 def __init__(self, context, name, rev, zip_file):
646 super(DownloadJob, self).__init__()
647 # Store off the input parameters.
648 self.context = context
649 self.name = name
650 self.rev = rev
651 self.zip_file = zip_file
652 self.quit_event = threading.Event()
653 self.progress_event = threading.Event()
654 self.thread = None
656 def Start(self):
657 """Starts the download."""
658 fetchargs = (self.context,
659 self.rev,
660 self.zip_file,
661 self.quit_event,
662 self.progress_event)
663 self.thread = threading.Thread(target=FetchRevision,
664 name=self.name,
665 args=fetchargs)
666 self.thread.start()
668 def Stop(self):
669 """Stops the download which must have been started previously."""
670 assert self.thread, 'DownloadJob must be started before Stop is called.'
671 self.quit_event.set()
672 self.thread.join()
673 os.unlink(self.zip_file)
675 def WaitFor(self):
676 """Prints a message and waits for the download to complete. The download
677 must have been started previously."""
678 assert self.thread, 'DownloadJob must be started before WaitFor is called.'
679 print 'Downloading revision %s...' % str(self.rev)
680 self.progress_event.set() # Display progress of download.
681 self.thread.join()
684 def Bisect(context,
685 num_runs=1,
686 command='%p %a',
687 try_args=(),
688 profile=None,
689 interactive=True,
690 evaluate=AskIsGoodBuild):
691 """Given known good and known bad revisions, run a binary search on all
692 archived revisions to determine the last known good revision.
694 @param context PathContext object initialized with user provided parameters.
695 @param num_runs Number of times to run each build for asking good/bad.
696 @param try_args A tuple of arguments to pass to the test application.
697 @param profile The name of the user profile to run with.
698 @param interactive If it is false, use command exit code for good or bad
699 judgment of the argument build.
700 @param evaluate A function which returns 'g' if the argument build is good,
701 'b' if it's bad or 'u' if unknown.
703 Threading is used to fetch Chromium revisions in the background, speeding up
704 the user's experience. For example, suppose the bounds of the search are
705 good_rev=0, bad_rev=100. The first revision to be checked is 50. Depending on
706 whether revision 50 is good or bad, the next revision to check will be either
707 25 or 75. So, while revision 50 is being checked, the script will download
708 revisions 25 and 75 in the background. Once the good/bad verdict on rev 50 is
709 known:
711 - If rev 50 is good, the download of rev 25 is cancelled, and the next test
712 is run on rev 75.
714 - If rev 50 is bad, the download of rev 75 is cancelled, and the next test
715 is run on rev 25.
718 if not profile:
719 profile = 'profile'
721 good_rev = context.good_revision
722 bad_rev = context.bad_revision
723 cwd = os.getcwd()
725 print 'Downloading list of known revisions...',
726 if not context.use_local_repo and not context.is_official:
727 print '(use --use-local-repo for speed if you have a local checkout)'
728 else:
729 print
730 _GetDownloadPath = lambda rev: os.path.join(cwd,
731 '%s-%s' % (str(rev), context.archive_name))
732 if context.is_official:
733 revlist = context.GetOfficialBuildsList()
734 else:
735 revlist = context.GetRevList()
737 # Get a list of revisions to bisect across.
738 if len(revlist) < 2: # Don't have enough builds to bisect.
739 msg = 'We don\'t have enough builds to bisect. revlist: %s' % revlist
740 raise RuntimeError(msg)
742 # Figure out our bookends and first pivot point; fetch the pivot revision.
743 minrev = 0
744 maxrev = len(revlist) - 1
745 pivot = maxrev / 2
746 rev = revlist[pivot]
747 zip_file = _GetDownloadPath(rev)
748 fetch = DownloadJob(context, 'initial_fetch', rev, zip_file)
749 fetch.Start()
750 fetch.WaitFor()
752 # Binary search time!
753 while fetch and fetch.zip_file and maxrev - minrev > 1:
754 if bad_rev < good_rev:
755 min_str, max_str = 'bad', 'good'
756 else:
757 min_str, max_str = 'good', 'bad'
758 print 'Bisecting range [%s (%s), %s (%s)].' % (revlist[minrev], min_str,
759 revlist[maxrev], max_str)
761 # Pre-fetch next two possible pivots
762 # - down_pivot is the next revision to check if the current revision turns
763 # out to be bad.
764 # - up_pivot is the next revision to check if the current revision turns
765 # out to be good.
766 down_pivot = int((pivot - minrev) / 2) + minrev
767 down_fetch = None
768 if down_pivot != pivot and down_pivot != minrev:
769 down_rev = revlist[down_pivot]
770 down_fetch = DownloadJob(context, 'down_fetch', down_rev,
771 _GetDownloadPath(down_rev))
772 down_fetch.Start()
774 up_pivot = int((maxrev - pivot) / 2) + pivot
775 up_fetch = None
776 if up_pivot != pivot and up_pivot != maxrev:
777 up_rev = revlist[up_pivot]
778 up_fetch = DownloadJob(context, 'up_fetch', up_rev,
779 _GetDownloadPath(up_rev))
780 up_fetch.Start()
782 # Run test on the pivot revision.
783 status = None
784 stdout = None
785 stderr = None
786 try:
787 (status, stdout, stderr) = RunRevision(context,
788 rev,
789 fetch.zip_file,
790 profile,
791 num_runs,
792 command,
793 try_args)
794 except Exception, e:
795 print >> sys.stderr, e
797 # Call the evaluate function to see if the current revision is good or bad.
798 # On that basis, kill one of the background downloads and complete the
799 # other, as described in the comments above.
800 try:
801 if not interactive:
802 if status:
803 answer = 'b'
804 print 'Bad revision: %s' % rev
805 else:
806 answer = 'g'
807 print 'Good revision: %s' % rev
808 else:
809 answer = evaluate(rev, context.is_official, status, stdout, stderr)
810 if ((answer == 'g' and good_rev < bad_rev)
811 or (answer == 'b' and bad_rev < good_rev)):
812 fetch.Stop()
813 minrev = pivot
814 if down_fetch:
815 down_fetch.Stop() # Kill the download of the older revision.
816 fetch = None
817 if up_fetch:
818 up_fetch.WaitFor()
819 pivot = up_pivot
820 fetch = up_fetch
821 elif ((answer == 'b' and good_rev < bad_rev)
822 or (answer == 'g' and bad_rev < good_rev)):
823 fetch.Stop()
824 maxrev = pivot
825 if up_fetch:
826 up_fetch.Stop() # Kill the download of the newer revision.
827 fetch = None
828 if down_fetch:
829 down_fetch.WaitFor()
830 pivot = down_pivot
831 fetch = down_fetch
832 elif answer == 'r':
833 pass # Retry requires no changes.
834 elif answer == 'u':
835 # Nuke the revision from the revlist and choose a new pivot.
836 fetch.Stop()
837 revlist.pop(pivot)
838 maxrev -= 1 # Assumes maxrev >= pivot.
840 if maxrev - minrev > 1:
841 # Alternate between using down_pivot or up_pivot for the new pivot
842 # point, without affecting the range. Do this instead of setting the
843 # pivot to the midpoint of the new range because adjacent revisions
844 # are likely affected by the same issue that caused the (u)nknown
845 # response.
846 if up_fetch and down_fetch:
847 fetch = [up_fetch, down_fetch][len(revlist) % 2]
848 elif up_fetch:
849 fetch = up_fetch
850 else:
851 fetch = down_fetch
852 fetch.WaitFor()
853 if fetch == up_fetch:
854 pivot = up_pivot - 1 # Subtracts 1 because revlist was resized.
855 else:
856 pivot = down_pivot
857 zip_file = fetch.zip_file
859 if down_fetch and fetch != down_fetch:
860 down_fetch.Stop()
861 if up_fetch and fetch != up_fetch:
862 up_fetch.Stop()
863 else:
864 assert False, 'Unexpected return value from evaluate(): ' + answer
865 except SystemExit:
866 print 'Cleaning up...'
867 for f in [_GetDownloadPath(revlist[down_pivot]),
868 _GetDownloadPath(revlist[up_pivot])]:
869 try:
870 os.unlink(f)
871 except OSError:
872 pass
873 sys.exit(0)
875 rev = revlist[pivot]
877 return (revlist[minrev], revlist[maxrev], context)
880 def GetBlinkDEPSRevisionForChromiumRevision(rev):
881 """Returns the blink revision that was in REVISIONS file at
882 chromium revision |rev|."""
883 # . doesn't match newlines without re.DOTALL, so this is safe.
884 blink_re = re.compile(r'webkit_revision\D*(\d+)')
885 url = urllib.urlopen(DEPS_FILE % rev)
886 m = blink_re.search(url.read())
887 url.close()
888 if m:
889 return int(m.group(1))
890 else:
891 raise Exception('Could not get Blink revision for Chromium rev %d' % rev)
894 def GetBlinkRevisionForChromiumRevision(context, rev):
895 """Returns the blink revision that was in REVISIONS file at
896 chromium revision |rev|."""
897 def _IsRevisionNumber(revision):
898 if isinstance(revision, int):
899 return True
900 else:
901 return revision.isdigit()
902 if str(rev) in context.githash_svn_dict:
903 rev = context.githash_svn_dict[str(rev)]
904 file_url = '%s/%s%s/REVISIONS' % (context.base_url,
905 context._listing_platform_dir, rev)
906 url = urllib.urlopen(file_url)
907 if url.getcode() == 200:
908 try:
909 data = json.loads(url.read())
910 except ValueError:
911 print 'ValueError for JSON URL: %s' % file_url
912 raise ValueError
913 else:
914 raise ValueError
915 url.close()
916 if 'webkit_revision' in data:
917 blink_rev = data['webkit_revision']
918 if not _IsRevisionNumber(blink_rev):
919 blink_rev = int(context.GetSVNRevisionFromGitHash(blink_rev, 'blink'))
920 return blink_rev
921 else:
922 raise Exception('Could not get blink revision for cr rev %d' % rev)
925 def FixChromiumRevForBlink(revisions_final, revisions, self, rev):
926 """Returns the chromium revision that has the correct blink revision
927 for blink bisect, DEPS and REVISIONS file might not match since
928 blink snapshots point to tip of tree blink.
929 Note: The revisions_final variable might get modified to include
930 additional revisions."""
931 blink_deps_rev = GetBlinkDEPSRevisionForChromiumRevision(rev)
933 while (GetBlinkRevisionForChromiumRevision(self, rev) > blink_deps_rev):
934 idx = revisions.index(rev)
935 if idx > 0:
936 rev = revisions[idx-1]
937 if rev not in revisions_final:
938 revisions_final.insert(0, rev)
940 revisions_final.sort()
941 return rev
944 def GetChromiumRevision(context, url):
945 """Returns the chromium revision read from given URL."""
946 try:
947 # Location of the latest build revision number
948 latest_revision = urllib.urlopen(url).read()
949 if latest_revision.isdigit():
950 return int(latest_revision)
951 return context.GetSVNRevisionFromGitHash(latest_revision)
952 except Exception:
953 print 'Could not determine latest revision. This could be bad...'
954 return 999999999
956 def PrintChangeLog(min_chromium_rev, max_chromium_rev):
957 """Prints the changelog URL."""
959 def _GetGitHashFromSVNRevision(svn_revision):
960 crrev_url = CRREV_URL + str(svn_revision)
961 url = urllib.urlopen(crrev_url)
962 if url.getcode() == 200:
963 data = json.loads(url.read())
964 if 'git_sha' in data:
965 return data['git_sha']
967 print (' ' + CHANGELOG_URL % (_GetGitHashFromSVNRevision(min_chromium_rev),
968 _GetGitHashFromSVNRevision(max_chromium_rev)))
971 def main():
972 usage = ('%prog [options] [-- chromium-options]\n'
973 'Perform binary search on the snapshot builds to find a minimal\n'
974 'range of revisions where a behavior change happened. The\n'
975 'behaviors are described as "good" and "bad".\n'
976 'It is NOT assumed that the behavior of the later revision is\n'
977 'the bad one.\n'
978 '\n'
979 'Revision numbers should use\n'
980 ' Official versions (e.g. 1.0.1000.0) for official builds. (-o)\n'
981 ' SVN revisions (e.g. 123456) for chromium builds, from trunk.\n'
982 ' Use base_trunk_revision from http://omahaproxy.appspot.com/\n'
983 ' for earlier revs.\n'
984 ' Chrome\'s about: build number and omahaproxy branch_revision\n'
985 ' are incorrect, they are from branches.\n'
986 '\n'
987 'Tip: add "-- --no-first-run" to bypass the first run prompts.')
988 parser = optparse.OptionParser(usage=usage)
989 # Strangely, the default help output doesn't include the choice list.
990 choices = ['mac', 'mac64', 'win', 'win64', 'linux', 'linux64', 'linux-arm']
991 # linux-chromiumos lacks a continuous archive http://crbug.com/78158
992 parser.add_option('-a', '--archive',
993 choices=choices,
994 help='The buildbot archive to bisect [%s].' %
995 '|'.join(choices))
996 parser.add_option('-o',
997 action='store_true',
998 dest='official_builds',
999 help='Bisect across official Chrome builds (internal '
1000 'only) instead of Chromium archives.')
1001 parser.add_option('-b', '--bad',
1002 type='str',
1003 help='A bad revision to start bisection. '
1004 'May be earlier or later than the good revision. '
1005 'Default is HEAD.')
1006 parser.add_option('-f', '--flash_path',
1007 type='str',
1008 help='Absolute path to a recent Adobe Pepper Flash '
1009 'binary to be used in this bisection (e.g. '
1010 'on Windows C:\...\pepflashplayer.dll and on Linux '
1011 '/opt/google/chrome/PepperFlash/'
1012 'libpepflashplayer.so).')
1013 parser.add_option('-d', '--pdf_path',
1014 type='str',
1015 help='Absolute path to a recent PDF plugin '
1016 'binary to be used in this bisection (e.g. '
1017 'on Windows C:\...\pdf.dll and on Linux '
1018 '/opt/google/chrome/libpdf.so). Option also enables '
1019 'print preview.')
1020 parser.add_option('-g', '--good',
1021 type='str',
1022 help='A good revision to start bisection. ' +
1023 'May be earlier or later than the bad revision. ' +
1024 'Default is 0.')
1025 parser.add_option('-p', '--profile', '--user-data-dir',
1026 type='str',
1027 default='profile',
1028 help='Profile to use; this will not reset every run. '
1029 'Defaults to a clean profile.')
1030 parser.add_option('-t', '--times',
1031 type='int',
1032 default=1,
1033 help='Number of times to run each build before asking '
1034 'if it\'s good or bad. Temporary profiles are reused.')
1035 parser.add_option('-c', '--command',
1036 type='str',
1037 default='%p %a',
1038 help='Command to execute. %p and %a refer to Chrome '
1039 'executable and specified extra arguments '
1040 'respectively. Use %s to specify all extra arguments '
1041 'as one string. Defaults to "%p %a". Note that any '
1042 'extra paths specified should be absolute.')
1043 parser.add_option('-l', '--blink',
1044 action='store_true',
1045 help='Use Blink bisect instead of Chromium. ')
1046 parser.add_option('', '--not-interactive',
1047 action='store_true',
1048 default=False,
1049 help='Use command exit code to tell good/bad revision.')
1050 parser.add_option('--asan',
1051 dest='asan',
1052 action='store_true',
1053 default=False,
1054 help='Allow the script to bisect ASAN builds')
1055 parser.add_option('--use-local-repo',
1056 dest='use_local_repo',
1057 action='store_true',
1058 default=False,
1059 help='Allow the script to convert git SHA1 to SVN '
1060 'revision using "git svn find-rev <SHA1>" '
1061 'command from a Chromium checkout.')
1063 (opts, args) = parser.parse_args()
1065 if opts.archive is None:
1066 print 'Error: missing required parameter: --archive'
1067 print
1068 parser.print_help()
1069 return 1
1071 if opts.asan:
1072 supported_platforms = ['linux', 'mac', 'win']
1073 if opts.archive not in supported_platforms:
1074 print 'Error: ASAN bisecting only supported on these platforms: [%s].' % (
1075 '|'.join(supported_platforms))
1076 return 1
1077 if opts.official_builds:
1078 print 'Error: Do not yet support bisecting official ASAN builds.'
1079 return 1
1081 if opts.asan:
1082 base_url = ASAN_BASE_URL
1083 elif opts.blink:
1084 base_url = WEBKIT_BASE_URL
1085 else:
1086 base_url = CHROMIUM_BASE_URL
1088 # Create the context. Initialize 0 for the revisions as they are set below.
1089 context = PathContext(base_url, opts.archive, opts.good, opts.bad,
1090 opts.official_builds, opts.asan, opts.use_local_repo,
1091 opts.flash_path, opts.pdf_path)
1092 # Pick a starting point, try to get HEAD for this.
1093 if not opts.bad:
1094 context.bad_revision = '999.0.0.0'
1095 context.bad_revision = GetChromiumRevision(
1096 context, context.GetLastChangeURL())
1098 # Find out when we were good.
1099 if not opts.good:
1100 context.good_revision = '0.0.0.0' if opts.official_builds else 0
1102 if opts.flash_path:
1103 msg = 'Could not find Flash binary at %s' % opts.flash_path
1104 assert os.path.exists(opts.flash_path), msg
1106 if opts.pdf_path:
1107 msg = 'Could not find PDF binary at %s' % opts.pdf_path
1108 assert os.path.exists(opts.pdf_path), msg
1110 if opts.official_builds:
1111 context.good_revision = LooseVersion(context.good_revision)
1112 context.bad_revision = LooseVersion(context.bad_revision)
1113 else:
1114 context.good_revision = int(context.good_revision)
1115 context.bad_revision = int(context.bad_revision)
1117 if opts.times < 1:
1118 print('Number of times to run (%d) must be greater than or equal to 1.' %
1119 opts.times)
1120 parser.print_help()
1121 return 1
1123 if opts.asan:
1124 evaluator = IsGoodASANBuild
1125 else:
1126 evaluator = AskIsGoodBuild
1128 # Save these revision numbers to compare when showing the changelog URL
1129 # after the bisect.
1130 good_rev = context.good_revision
1131 bad_rev = context.bad_revision
1133 (min_chromium_rev, max_chromium_rev, context) = Bisect(
1134 context, opts.times, opts.command, args, opts.profile,
1135 not opts.not_interactive, evaluator)
1137 # Get corresponding blink revisions.
1138 try:
1139 min_blink_rev = GetBlinkRevisionForChromiumRevision(context,
1140 min_chromium_rev)
1141 max_blink_rev = GetBlinkRevisionForChromiumRevision(context,
1142 max_chromium_rev)
1143 except Exception:
1144 # Silently ignore the failure.
1145 min_blink_rev, max_blink_rev = 0, 0
1147 if opts.blink:
1148 # We're done. Let the user know the results in an official manner.
1149 if good_rev > bad_rev:
1150 print DONE_MESSAGE_GOOD_MAX % (str(min_blink_rev), str(max_blink_rev))
1151 else:
1152 print DONE_MESSAGE_GOOD_MIN % (str(min_blink_rev), str(max_blink_rev))
1154 print 'BLINK CHANGELOG URL:'
1155 print ' ' + BLINK_CHANGELOG_URL % (max_blink_rev, min_blink_rev)
1157 else:
1158 # We're done. Let the user know the results in an official manner.
1159 if good_rev > bad_rev:
1160 print DONE_MESSAGE_GOOD_MAX % (str(min_chromium_rev),
1161 str(max_chromium_rev))
1162 else:
1163 print DONE_MESSAGE_GOOD_MIN % (str(min_chromium_rev),
1164 str(max_chromium_rev))
1165 if min_blink_rev != max_blink_rev:
1166 print ('NOTE: There is a Blink roll in the range, '
1167 'you might also want to do a Blink bisect.')
1169 print 'CHANGELOG URL:'
1170 if opts.official_builds:
1171 print OFFICIAL_CHANGELOG_URL % (min_chromium_rev, max_chromium_rev)
1172 else:
1173 PrintChangeLog(min_chromium_rev, max_chromium_rev)
1176 if __name__ == '__main__':
1177 sys.exit(main())