2 # Copyright (c) 2012 The Chromium Authors. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
6 """Snapshot Build Bisect Tool
8 This script bisects a snapshot archive using binary search. It starts at
9 a bad revision (it will try to guess HEAD) and asks for a last known-good
10 revision. It will then binary search across this revision range by downloading,
11 unzipping, and opening Chromium for you. After testing the specific revision,
12 it will ask you whether it is good or bad before continuing the search.
15 # The root URL for storage.
16 CHROMIUM_BASE_URL
= 'http://commondatastorage.googleapis.com/chromium-browser-snapshots'
17 WEBKIT_BASE_URL
= 'http://commondatastorage.googleapis.com/chromium-webkit-snapshots'
19 # The root URL for official builds.
20 OFFICIAL_BASE_URL
= 'http://master.chrome.corp.google.com/official_builds'
23 CHANGELOG_URL
= 'http://build.chromium.org/f/chromium/' \
24 'perf/dashboard/ui/changelog.html?' \
25 'url=/trunk/src&range=%d%%3A%d'
27 # Official Changelogs URL.
28 OFFICIAL_CHANGELOG_URL
= 'http://omahaproxy.appspot.com/'\
29 'changelog?old_version=%s&new_version=%s'
32 DEPS_FILE
= 'http://src.chromium.org/viewvc/chrome/trunk/src/DEPS?revision=%d'
33 # Blink Changelogs URL.
34 BLINK_CHANGELOG_URL
= 'http://build.chromium.org/f/chromium/' \
35 'perf/dashboard/ui/changelog_blink.html?' \
36 'url=/trunk&range=%d%%3A%d'
38 DONE_MESSAGE_GOOD_MIN
= 'You are probably looking for a change made after %s ' \
39 '(known good), but no later than %s (first known bad).'
40 DONE_MESSAGE_GOOD_MAX
= 'You are probably looking for a change made after %s ' \
41 '(known bad), but no later than %s (first known good).'
43 CHROMIUM_GITHASH_TO_SVN_URL
= (
44 'https://chromium.googlesource.com/chromium/src/+/%s?format=json')
45 BLINK_GITHASH_TO_SVN_URL
= (
46 'https://chromium.googlesource.com/chromium/blink/+/%s?format=json')
47 GITHASH_TO_SVN_URL
= { 'chromium': CHROMIUM_GITHASH_TO_SVN_URL
,
48 'blink': BLINK_GITHASH_TO_SVN_URL
}
49 # Search pattern to be matched in the json output from
50 # CHROMIUM_GITHASH_TO_SVN_URL to get the chromium revision (svn revision).
51 CHROMIUM_SEARCH_PATTERN
= (
52 r
'.*git-svn-id: svn://svn.chromium.org/chrome/trunk/src@(\d+) ')
53 # Search pattern to be matched in the json output from
54 # BLINK_GITHASH_TO_SVN_URL to get the blink revision (svn revision).
55 BLINK_SEARCH_PATTERN
= (
56 r
'.*git-svn-id: svn://svn.chromium.org/blink/trunk@(\d+) ')
57 SEARCH_PATTERN
= { 'chromium': CHROMIUM_SEARCH_PATTERN
,
58 'blink': BLINK_SEARCH_PATTERN
}
60 ###############################################################################
73 from distutils
.version
import LooseVersion
74 from xml
.etree
import ElementTree
78 class PathContext(object):
79 """A PathContext is used to carry the information used to construct URLs and
80 paths when dealing with the storage server and archives."""
81 def __init__(self
, base_url
, platform
, good_revision
, bad_revision
,
82 is_official
, is_aura
, use_local_repo
, flash_path
= None,
84 super(PathContext
, self
).__init
__()
85 # Store off the input parameters.
86 self
.base_url
= base_url
87 self
.platform
= platform
# What's passed in to the '-a/--archive' option.
88 self
.good_revision
= good_revision
89 self
.bad_revision
= bad_revision
90 self
.is_official
= is_official
91 self
.is_aura
= is_aura
92 self
.flash_path
= flash_path
93 # Dictionary which stores svn revision number as key and it's
94 # corresponding git hash as value. This data is populated in
95 # _FetchAndParse and used later in GetDownloadURL while downloading
97 self
.githash_svn_dict
= {}
98 self
.pdf_path
= pdf_path
100 # The name of the ZIP file in a revision directory on the server.
101 self
.archive_name
= None
103 # If the script is run from a local Chromium checkout,
104 # "--use-local-repo" option can be used to make the script run faster.
105 # It uses "git svn find-rev <SHA1>" command to convert git hash to svn
107 self
.use_local_repo
= use_local_repo
109 # Set some internal members:
110 # _listing_platform_dir = Directory that holds revisions. Ends with a '/'.
111 # _archive_extract_dir = Uncompressed directory in the archive_name file.
112 # _binary_name = The name of the executable to run.
113 if self
.platform
in ('linux', 'linux64', 'linux-arm'):
114 self
._binary
_name
= 'chrome'
115 elif self
.platform
== 'mac':
116 self
.archive_name
= 'chrome-mac.zip'
117 self
._archive
_extract
_dir
= 'chrome-mac'
118 elif self
.platform
== 'win':
119 self
.archive_name
= 'chrome-win32.zip'
120 self
._archive
_extract
_dir
= 'chrome-win32'
121 self
._binary
_name
= 'chrome.exe'
123 raise Exception('Invalid platform: %s' % self
.platform
)
126 if self
.platform
== 'linux':
127 self
._listing
_platform
_dir
= 'precise32bit/'
128 self
.archive_name
= 'chrome-precise32bit.zip'
129 self
._archive
_extract
_dir
= 'chrome-precise32bit'
130 elif self
.platform
== 'linux64':
131 self
._listing
_platform
_dir
= 'precise64bit/'
132 self
.archive_name
= 'chrome-precise64bit.zip'
133 self
._archive
_extract
_dir
= 'chrome-precise64bit'
134 elif self
.platform
== 'mac':
135 self
._listing
_platform
_dir
= 'mac/'
136 self
._binary
_name
= 'Google Chrome.app/Contents/MacOS/Google Chrome'
137 elif self
.platform
== 'win':
139 self
._listing
_platform
_dir
= 'win-aura/'
141 self
._listing
_platform
_dir
= 'win/'
143 if self
.platform
in ('linux', 'linux64', 'linux-arm'):
144 self
.archive_name
= 'chrome-linux.zip'
145 self
._archive
_extract
_dir
= 'chrome-linux'
146 if self
.platform
== 'linux':
147 self
._listing
_platform
_dir
= 'Linux/'
148 elif self
.platform
== 'linux64':
149 self
._listing
_platform
_dir
= 'Linux_x64/'
150 elif self
.platform
== 'linux-arm':
151 self
._listing
_platform
_dir
= 'Linux_ARM_Cross-Compile/'
152 elif self
.platform
== 'mac':
153 self
._listing
_platform
_dir
= 'Mac/'
154 self
._binary
_name
= 'Chromium.app/Contents/MacOS/Chromium'
155 elif self
.platform
== 'win':
156 self
._listing
_platform
_dir
= 'Win/'
158 def GetListingURL(self
, marker
=None):
159 """Returns the URL for a directory listing, with an optional marker."""
162 marker_param
= '&marker=' + str(marker
)
163 return self
.base_url
+ '/?delimiter=/&prefix=' + \
164 self
._listing
_platform
_dir
+ marker_param
166 def GetDownloadURL(self
, revision
):
167 """Gets the download URL for a build archive of a specific revision."""
169 return "%s/%s/%s%s" % (
170 OFFICIAL_BASE_URL
, revision
, self
._listing
_platform
_dir
,
173 if str(revision
) in self
.githash_svn_dict
:
174 revision
= self
.githash_svn_dict
[str(revision
)]
175 return "%s/%s%s/%s" % (self
.base_url
, self
._listing
_platform
_dir
,
176 revision
, self
.archive_name
)
178 def GetLastChangeURL(self
):
179 """Returns a URL to the LAST_CHANGE file."""
180 return self
.base_url
+ '/' + self
._listing
_platform
_dir
+ 'LAST_CHANGE'
182 def GetLaunchPath(self
):
183 """Returns a relative path (presumably from the archive extraction location)
184 that is used to run the executable."""
185 return os
.path
.join(self
._archive
_extract
_dir
, self
._binary
_name
)
187 def IsAuraBuild(self
, build
):
188 """Check the given build is Aura."""
189 return build
.split('.')[3] == '1'
191 def IsASANBuild(self
, build
):
192 """Check the given build is ASAN build."""
193 return build
.split('.')[3] == '2'
195 def ParseDirectoryIndex(self
):
196 """Parses the Google Storage directory listing into a list of revision
199 def _FetchAndParse(url
):
200 """Fetches a URL and returns a 2-Tuple of ([revisions], next-marker). If
201 next-marker is not None, then the listing is a partial listing and another
202 fetch should be performed with next-marker being the marker= GET
204 handle
= urllib
.urlopen(url
)
205 document
= ElementTree
.parse(handle
)
207 # All nodes in the tree are namespaced. Get the root's tag name to extract
208 # the namespace. Etree does namespaces as |{namespace}tag|.
209 root_tag
= document
.getroot().tag
210 end_ns_pos
= root_tag
.find('}')
212 raise Exception("Could not locate end namespace for directory index")
213 namespace
= root_tag
[:end_ns_pos
+ 1]
215 # Find the prefix (_listing_platform_dir) and whether or not the list is
217 prefix_len
= len(document
.find(namespace
+ 'Prefix').text
)
219 is_truncated
= document
.find(namespace
+ 'IsTruncated')
220 if is_truncated
is not None and is_truncated
.text
.lower() == 'true':
221 next_marker
= document
.find(namespace
+ 'NextMarker').text
222 # Get a list of all the revisions.
223 all_prefixes
= document
.findall(namespace
+ 'CommonPrefixes/' +
224 namespace
+ 'Prefix')
225 # The <Prefix> nodes have content of the form of
226 # |_listing_platform_dir/revision/|. Strip off the platform dir and the
227 # trailing slash to just have a number.
229 githash_svn_dict
= {}
230 for prefix
in all_prefixes
:
231 revnum
= prefix
.text
[prefix_len
:-1]
233 if not revnum
.isdigit():
235 revnum
= self
.GetSVNRevisionFromGitHash(git_hash
)
236 githash_svn_dict
[revnum
] = git_hash
237 if revnum
is not None:
239 revisions
.append(revnum
)
242 return (revisions
, next_marker
, githash_svn_dict
)
244 # Fetch the first list of revisions.
245 (revisions
, next_marker
, self
.githash_svn_dict
) =\
246 _FetchAndParse(self
.GetListingURL())
247 # If the result list was truncated, refetch with the next marker. Do this
248 # until an entire directory listing is done.
250 next_url
= self
.GetListingURL(next_marker
)
251 (new_revisions
, next_marker
, new_dict
) = _FetchAndParse(next_url
)
252 revisions
.extend(new_revisions
)
253 self
.githash_svn_dict
.update(new_dict
)
256 def _GetSVNRevisionFromGitHashWithoutGitCheckout(self
, git_sha1
, depot
):
257 json_url
= GITHASH_TO_SVN_URL
[depot
] % git_sha1
259 response
= urllib
.urlopen(json_url
)
260 except urllib
.HTTPError
as error
:
261 msg
= 'HTTP Error %d for %s' % (error
.getcode(), git_sha1
)
263 data
= json
.loads(response
.read()[4:])
264 if 'message' in data
:
265 message
= data
['message'].split('\n')
266 message
= [line
for line
in message
if line
.strip()]
267 search_pattern
= re
.compile(SEARCH_PATTERN
[depot
])
268 result
= search_pattern
.search(message
[len(message
)-1])
270 return result
.group(1)
271 print 'Failed to get svn revision number for %s' % git_sha1
274 def _GetSVNRevisionFromGitHashFromGitCheckout(self
, git_sha1
, depot
):
275 def _RunGit(command
, path
):
276 command
= ['git'] + command
278 original_path
= os
.getcwd()
280 shell
= sys
.platform
.startswith('win')
281 proc
= subprocess
.Popen(command
, shell
=shell
, stdout
=subprocess
.PIPE
,
282 stderr
=subprocess
.PIPE
)
283 (output
, _
) = proc
.communicate()
286 os
.chdir(original_path
)
287 return (output
, proc
.returncode
)
291 path
= os
.path
.join(os
.getcwd(), 'third_party', 'WebKit')
292 if os
.path
.basename(os
.getcwd()) == 'src':
293 command
= ['svn', 'find-rev', git_sha1
]
294 (git_output
, return_code
) = _RunGit(command
, path
)
296 return git_output
.strip('\n')
299 print ('Script should be run from src folder. ' +
300 'Eg: python tools/bisect-builds.py -g 280588 -b 280590' +
301 '--archive linux64 --use-local-repo')
304 def GetSVNRevisionFromGitHash(self
, git_sha1
, depot
='chromium'):
305 if not self
.use_local_repo
:
306 return self
._GetSVNRevisionFromGitHashWithoutGitCheckout
(git_sha1
, depot
)
308 return self
._GetSVNRevisionFromGitHashFromGitCheckout
(git_sha1
, depot
)
310 def GetRevList(self
):
311 """Gets the list of revision numbers between self.good_revision and
312 self.bad_revision."""
313 # Download the revlist and filter for just the range between good and bad.
314 minrev
= min(self
.good_revision
, self
.bad_revision
)
315 maxrev
= max(self
.good_revision
, self
.bad_revision
)
316 revlist_all
= map(int, self
.ParseDirectoryIndex())
318 revlist
= [x
for x
in revlist_all
if x
>= int(minrev
) and x
<= int(maxrev
)]
321 # Set good and bad revisions to be legit revisions.
323 if self
.good_revision
< self
.bad_revision
:
324 self
.good_revision
= revlist
[0]
325 self
.bad_revision
= revlist
[-1]
327 self
.bad_revision
= revlist
[0]
328 self
.good_revision
= revlist
[-1]
330 # Fix chromium rev so that the deps blink revision matches REVISIONS file.
331 if self
.base_url
== WEBKIT_BASE_URL
:
333 self
.good_revision
= FixChromiumRevForBlink(revlist
,
337 self
.bad_revision
= FixChromiumRevForBlink(revlist
,
343 def GetOfficialBuildsList(self
):
344 """Gets the list of official build numbers between self.good_revision and
345 self.bad_revision."""
346 # Download the revlist and filter for just the range between good and bad.
347 minrev
= min(self
.good_revision
, self
.bad_revision
)
348 maxrev
= max(self
.good_revision
, self
.bad_revision
)
349 handle
= urllib
.urlopen(OFFICIAL_BASE_URL
)
350 dirindex
= handle
.read()
352 build_numbers
= re
.findall(r
'<a href="([0-9][0-9].*)/">', dirindex
)
355 parsed_build_numbers
= [LooseVersion(x
) for x
in build_numbers
]
356 for build_number
in sorted(parsed_build_numbers
):
357 path
= OFFICIAL_BASE_URL
+ '/' + str(build_number
) + '/' + \
358 self
._listing
_platform
_dir
+ self
.archive_name
361 connection
= urllib
.urlopen(path
)
363 if build_number
> maxrev
:
365 if build_number
>= minrev
:
366 # If we are bisecting Aura, we want to include only builds which
369 if self
.IsAuraBuild(str(build_number
)):
370 final_list
.append(str(build_number
))
371 # If we are bisecting only official builds (without --aura),
372 # we can not include builds which ends with '.1' or '.2' since
373 # they have different folder hierarchy inside.
374 elif (not self
.IsAuraBuild(str(build_number
)) and
375 not self
.IsASANBuild(str(build_number
))):
376 final_list
.append(str(build_number
))
377 except urllib
.HTTPError
, e
:
381 def UnzipFilenameToDir(filename
, directory
):
382 """Unzip |filename| to |directory|."""
384 if not os
.path
.isabs(filename
):
385 filename
= os
.path
.join(cwd
, filename
)
386 zf
= zipfile
.ZipFile(filename
)
388 if not os
.path
.isdir(directory
):
392 for info
in zf
.infolist():
394 if name
.endswith('/'): # dir
395 if not os
.path
.isdir(name
):
398 directory
= os
.path
.dirname(name
)
399 if not os
.path
.isdir(directory
):
400 os
.makedirs(directory
)
401 out
= open(name
, 'wb')
402 out
.write(zf
.read(name
))
404 # Set permissions. Permission info in external_attr is shifted 16 bits.
405 os
.chmod(name
, info
.external_attr
>> 16L)
409 def FetchRevision(context
, rev
, filename
, quit_event
=None, progress_event
=None):
410 """Downloads and unzips revision |rev|.
411 @param context A PathContext instance.
412 @param rev The Chromium revision number/tag to download.
413 @param filename The destination for the downloaded file.
414 @param quit_event A threading.Event which will be set by the master thread to
415 indicate that the download should be aborted.
416 @param progress_event A threading.Event which will be set by the master thread
417 to indicate that the progress of the download should be
420 def ReportHook(blocknum
, blocksize
, totalsize
):
421 if quit_event
and quit_event
.isSet():
422 raise RuntimeError("Aborting download of revision %s" % str(rev
))
423 if progress_event
and progress_event
.isSet():
424 size
= blocknum
* blocksize
425 if totalsize
== -1: # Total size not known.
426 progress
= "Received %d bytes" % size
428 size
= min(totalsize
, size
)
429 progress
= "Received %d of %d bytes, %.2f%%" % (
430 size
, totalsize
, 100.0 * size
/ totalsize
)
431 # Send a \r to let all progress messages use just one line of output.
432 sys
.stdout
.write("\r" + progress
)
435 download_url
= context
.GetDownloadURL(rev
)
437 urllib
.urlretrieve(download_url
, filename
, ReportHook
)
438 if progress_event
and progress_event
.isSet():
440 except RuntimeError, e
:
444 def RunRevision(context
, revision
, zipfile
, profile
, num_runs
, command
, args
):
445 """Given a zipped revision, unzip it and run the test."""
446 print "Trying revision %s..." % str(revision
)
448 # Create a temp directory and unzip the revision into it.
450 tempdir
= tempfile
.mkdtemp(prefix
='bisect_tmp')
451 UnzipFilenameToDir(zipfile
, tempdir
)
454 # Run the build as many times as specified.
455 testargs
= ['--user-data-dir=%s' % profile
] + args
456 # The sandbox must be run as root on Official Chrome, so bypass it.
457 if ((context
.is_official
or context
.flash_path
or context
.pdf_path
) and
458 context
.platform
.startswith('linux')):
459 testargs
.append('--no-sandbox')
460 if context
.flash_path
:
461 testargs
.append('--ppapi-flash-path=%s' % context
.flash_path
)
462 # We have to pass a large enough Flash version, which currently needs not
463 # be correct. Instead of requiring the user of the script to figure out and
464 # pass the correct version we just spoof it.
465 testargs
.append('--ppapi-flash-version=99.9.999.999')
467 # TODO(vitalybuka): Remove in the future. See crbug.com/395687.
469 shutil
.copy(context
.pdf_path
, os
.path
.dirname(context
.GetLaunchPath()))
470 testargs
.append('--enable-print-preview')
473 for token
in shlex
.split(command
):
475 runcommand
.extend(testargs
)
478 token
.replace('%p', os
.path
.abspath(context
.GetLaunchPath())) \
479 .replace('%s', ' '.join(testargs
)))
482 for i
in range(0, num_runs
):
483 subproc
= subprocess
.Popen(runcommand
,
485 stdout
=subprocess
.PIPE
,
486 stderr
=subprocess
.PIPE
)
487 (stdout
, stderr
) = subproc
.communicate()
488 results
.append((subproc
.returncode
, stdout
, stderr
))
492 shutil
.rmtree(tempdir
, True)
496 for (returncode
, stdout
, stderr
) in results
:
498 return (returncode
, stdout
, stderr
)
502 def AskIsGoodBuild(rev
, official_builds
, status
, stdout
, stderr
):
503 """Ask the user whether build |rev| is good or bad."""
504 # Loop until we get a response that we can parse.
506 response
= raw_input('Revision %s is ' \
507 '[(g)ood/(b)ad/(r)etry/(u)nknown/(q)uit]: ' %
509 if response
and response
in ('g', 'b', 'r', 'u'):
511 if response
and response
== 'q':
515 class DownloadJob(object):
516 """DownloadJob represents a task to download a given Chromium revision."""
517 def __init__(self
, context
, name
, rev
, zipfile
):
518 super(DownloadJob
, self
).__init
__()
519 # Store off the input parameters.
520 self
.context
= context
523 self
.zipfile
= zipfile
524 self
.quit_event
= threading
.Event()
525 self
.progress_event
= threading
.Event()
528 """Starts the download."""
529 fetchargs
= (self
.context
,
534 self
.thread
= threading
.Thread(target
=FetchRevision
,
540 """Stops the download which must have been started previously."""
541 self
.quit_event
.set()
543 os
.unlink(self
.zipfile
)
546 """Prints a message and waits for the download to complete. The download
547 must have been started previously."""
548 print "Downloading revision %s..." % str(self
.rev
)
549 self
.progress_event
.set() # Display progress of download.
567 evaluate
=AskIsGoodBuild
):
568 """Given known good and known bad revisions, run a binary search on all
569 archived revisions to determine the last known good revision.
571 @param platform Which build to download/run ('mac', 'win', 'linux64', etc.).
572 @param official_builds Specify build type (Chromium or Official build).
573 @param good_rev Number/tag of the known good revision.
574 @param bad_rev Number/tag of the known bad revision.
575 @param num_runs Number of times to run each build for asking good/bad.
576 @param try_args A tuple of arguments to pass to the test application.
577 @param profile The name of the user profile to run with.
578 @param interactive If it is false, use command exit code for good or bad
579 judgment of the argument build.
580 @param evaluate A function which returns 'g' if the argument build is good,
581 'b' if it's bad or 'u' if unknown.
583 Threading is used to fetch Chromium revisions in the background, speeding up
584 the user's experience. For example, suppose the bounds of the search are
585 good_rev=0, bad_rev=100. The first revision to be checked is 50. Depending on
586 whether revision 50 is good or bad, the next revision to check will be either
587 25 or 75. So, while revision 50 is being checked, the script will download
588 revisions 25 and 75 in the background. Once the good/bad verdict on rev 50 is
591 - If rev 50 is good, the download of rev 25 is cancelled, and the next test
594 - If rev 50 is bad, the download of rev 75 is cancelled, and the next test
601 context
= PathContext(base_url
, platform
, good_rev
, bad_rev
,
602 official_builds
, is_aura
, use_local_repo
, flash_path
,
606 print "Downloading list of known revisions..."
607 _GetDownloadPath
= lambda rev
: os
.path
.join(cwd
,
608 '%s-%s' % (str(rev
), context
.archive_name
))
610 revlist
= context
.GetOfficialBuildsList()
612 revlist
= context
.GetRevList()
614 # Get a list of revisions to bisect across.
615 if len(revlist
) < 2: # Don't have enough builds to bisect.
616 msg
= 'We don\'t have enough builds to bisect. revlist: %s' % revlist
617 raise RuntimeError(msg
)
619 # Figure out our bookends and first pivot point; fetch the pivot revision.
621 maxrev
= len(revlist
) - 1
624 zipfile
= _GetDownloadPath(rev
)
625 fetch
= DownloadJob(context
, 'initial_fetch', rev
, zipfile
)
629 # Binary search time!
630 while fetch
and fetch
.zipfile
and maxrev
- minrev
> 1:
631 if bad_rev
< good_rev
:
632 min_str
, max_str
= "bad", "good"
634 min_str
, max_str
= "good", "bad"
635 print 'Bisecting range [%s (%s), %s (%s)].' % (revlist
[minrev
], min_str
, \
636 revlist
[maxrev
], max_str
)
638 # Pre-fetch next two possible pivots
639 # - down_pivot is the next revision to check if the current revision turns
641 # - up_pivot is the next revision to check if the current revision turns
643 down_pivot
= int((pivot
- minrev
) / 2) + minrev
645 if down_pivot
!= pivot
and down_pivot
!= minrev
:
646 down_rev
= revlist
[down_pivot
]
647 down_fetch
= DownloadJob(context
, 'down_fetch', down_rev
,
648 _GetDownloadPath(down_rev
))
651 up_pivot
= int((maxrev
- pivot
) / 2) + pivot
653 if up_pivot
!= pivot
and up_pivot
!= maxrev
:
654 up_rev
= revlist
[up_pivot
]
655 up_fetch
= DownloadJob(context
, 'up_fetch', up_rev
,
656 _GetDownloadPath(up_rev
))
659 # Run test on the pivot revision.
664 (status
, stdout
, stderr
) = RunRevision(context
,
672 print >> sys
.stderr
, e
674 # Call the evaluate function to see if the current revision is good or bad.
675 # On that basis, kill one of the background downloads and complete the
676 # other, as described in the comments above.
681 print 'Bad revision: %s' % rev
684 print 'Good revision: %s' % rev
686 answer
= evaluate(rev
, official_builds
, status
, stdout
, stderr
)
687 if answer
== 'g' and good_rev
< bad_rev
or \
688 answer
== 'b' and bad_rev
< good_rev
:
692 down_fetch
.Stop() # Kill the download of the older revision.
698 elif answer
== 'b' and good_rev
< bad_rev
or \
699 answer
== 'g' and bad_rev
< good_rev
:
703 up_fetch
.Stop() # Kill the download of the newer revision.
710 pass # Retry requires no changes.
712 # Nuke the revision from the revlist and choose a new pivot.
715 maxrev
-= 1 # Assumes maxrev >= pivot.
717 if maxrev
- minrev
> 1:
718 # Alternate between using down_pivot or up_pivot for the new pivot
719 # point, without affecting the range. Do this instead of setting the
720 # pivot to the midpoint of the new range because adjacent revisions
721 # are likely affected by the same issue that caused the (u)nknown
723 if up_fetch
and down_fetch
:
724 fetch
= [up_fetch
, down_fetch
][len(revlist
) % 2]
730 if fetch
== up_fetch
:
731 pivot
= up_pivot
- 1 # Subtracts 1 because revlist was resized.
734 zipfile
= fetch
.zipfile
736 if down_fetch
and fetch
!= down_fetch
:
738 if up_fetch
and fetch
!= up_fetch
:
741 assert False, "Unexpected return value from evaluate(): " + answer
743 print "Cleaning up..."
744 for f
in [_GetDownloadPath(revlist
[down_pivot
]),
745 _GetDownloadPath(revlist
[up_pivot
])]:
754 return (revlist
[minrev
], revlist
[maxrev
])
757 def GetBlinkDEPSRevisionForChromiumRevision(rev
):
758 """Returns the blink revision that was in REVISIONS file at
759 chromium revision |rev|."""
760 # . doesn't match newlines without re.DOTALL, so this is safe.
761 blink_re
= re
.compile(r
'webkit_revision\D*(\d+)')
762 url
= urllib
.urlopen(DEPS_FILE
% rev
)
763 m
= blink_re
.search(url
.read())
766 return int(m
.group(1))
768 raise Exception('Could not get Blink revision for Chromium rev %d'
772 def GetBlinkRevisionForChromiumRevision(self
, rev
):
773 """Returns the blink revision that was in REVISIONS file at
774 chromium revision |rev|."""
775 def _IsRevisionNumber(revision
):
776 if isinstance(revision
, int):
779 return revision
.isdigit()
780 if str(rev
) in self
.githash_svn_dict
:
781 rev
= self
.githash_svn_dict
[str(rev
)]
782 file_url
= "%s/%s%s/REVISIONS" % (self
.base_url
,
783 self
._listing
_platform
_dir
, rev
)
784 url
= urllib
.urlopen(file_url
)
785 data
= json
.loads(url
.read())
787 if 'webkit_revision' in data
:
788 blink_rev
= data
['webkit_revision']
789 if not _IsRevisionNumber(blink_rev
):
790 blink_rev
= self
.GetSVNRevisionFromGitHash(blink_rev
, 'blink')
793 raise Exception('Could not get blink revision for cr rev %d' % rev
)
795 def FixChromiumRevForBlink(revisions_final
, revisions
, self
, rev
):
796 """Returns the chromium revision that has the correct blink revision
797 for blink bisect, DEPS and REVISIONS file might not match since
798 blink snapshots point to tip of tree blink.
799 Note: The revisions_final variable might get modified to include
800 additional revisions."""
801 blink_deps_rev
= GetBlinkDEPSRevisionForChromiumRevision(rev
)
803 while (GetBlinkRevisionForChromiumRevision(self
, rev
) > blink_deps_rev
):
804 idx
= revisions
.index(rev
)
806 rev
= revisions
[idx
-1]
807 if rev
not in revisions_final
:
808 revisions_final
.insert(0, rev
)
810 revisions_final
.sort()
813 def GetChromiumRevision(context
, url
):
814 """Returns the chromium revision read from given URL."""
816 # Location of the latest build revision number
817 latest_revision
= urllib
.urlopen(url
).read()
818 if latest_revision
.isdigit():
819 return int(latest_revision
)
820 return context
.GetSVNRevisionFromGitHash(latest_revision
)
822 print('Could not determine latest revision. This could be bad...')
827 usage
= ('%prog [options] [-- chromium-options]\n'
828 'Perform binary search on the snapshot builds to find a minimal\n'
829 'range of revisions where a behavior change happened. The\n'
830 'behaviors are described as "good" and "bad".\n'
831 'It is NOT assumed that the behavior of the later revision is\n'
834 'Revision numbers should use\n'
835 ' Official versions (e.g. 1.0.1000.0) for official builds. (-o)\n'
836 ' SVN revisions (e.g. 123456) for chromium builds, from trunk.\n'
837 ' Use base_trunk_revision from http://omahaproxy.appspot.com/\n'
838 ' for earlier revs.\n'
839 ' Chrome\'s about: build number and omahaproxy branch_revision\n'
840 ' are incorrect, they are from branches.\n'
842 'Tip: add "-- --no-first-run" to bypass the first run prompts.')
843 parser
= optparse
.OptionParser(usage
=usage
)
844 # Strangely, the default help output doesn't include the choice list.
845 choices
= ['mac', 'win', 'linux', 'linux64', 'linux-arm']
846 # linux-chromiumos lacks a continuous archive http://crbug.com/78158
847 parser
.add_option('-a', '--archive',
849 help = 'The buildbot archive to bisect [%s].' %
851 parser
.add_option('-o', action
="store_true", dest
='official_builds',
852 help = 'Bisect across official ' +
853 'Chrome builds (internal only) instead of ' +
854 'Chromium archives.')
855 parser
.add_option('-b', '--bad', type = 'str',
856 help = 'A bad revision to start bisection. ' +
857 'May be earlier or later than the good revision. ' +
859 parser
.add_option('-f', '--flash_path', type = 'str',
860 help = 'Absolute path to a recent Adobe Pepper Flash ' +
861 'binary to be used in this bisection (e.g. ' +
862 'on Windows C:\...\pepflashplayer.dll and on Linux ' +
863 '/opt/google/chrome/PepperFlash/libpepflashplayer.so).')
864 parser
.add_option('-d', '--pdf_path', type = 'str',
865 help = 'Absolute path to a recent PDF pluggin ' +
866 'binary to be used in this bisection (e.g. ' +
867 'on Windows C:\...\pdf.dll and on Linux ' +
868 '/opt/google/chrome/libpdf.so). Option also enables ' +
870 parser
.add_option('-g', '--good', type = 'str',
871 help = 'A good revision to start bisection. ' +
872 'May be earlier or later than the bad revision. ' +
874 parser
.add_option('-p', '--profile', '--user-data-dir', type = 'str',
875 help = 'Profile to use; this will not reset every run. ' +
876 'Defaults to a clean profile.', default
= 'profile')
877 parser
.add_option('-t', '--times', type = 'int',
878 help = 'Number of times to run each build before asking ' +
879 'if it\'s good or bad. Temporary profiles are reused.',
881 parser
.add_option('-c', '--command', type = 'str',
882 help = 'Command to execute. %p and %a refer to Chrome ' +
883 'executable and specified extra arguments respectively. ' +
884 'Use %s to specify all extra arguments as one string. ' +
885 'Defaults to "%p %a". Note that any extra paths ' +
886 'specified should be absolute.',
888 parser
.add_option('-l', '--blink', action
='store_true',
889 help = 'Use Blink bisect instead of Chromium. ')
890 parser
.add_option('', '--not-interactive', action
='store_true',
891 help = 'Use command exit code to tell good/bad revision.',
893 parser
.add_option('--aura',
897 help='Allow the script to bisect aura builds')
898 parser
.add_option('--use-local-repo',
899 dest
='use_local_repo',
902 help='Allow the script to convert git SHA1 to SVN ' +
903 'revision using "git svn find-rev <SHA1>" ' +
904 'command from a Chromium checkout.')
906 (opts
, args
) = parser
.parse_args()
908 if opts
.archive
is None:
909 print 'Error: missing required parameter: --archive'
915 if opts
.archive
!= 'win' or not opts
.official_builds
:
916 print 'Error: Aura is supported only on Windows platform '\
917 'and official builds.'
921 base_url
= WEBKIT_BASE_URL
923 base_url
= CHROMIUM_BASE_URL
925 # Create the context. Initialize 0 for the revisions as they are set below.
926 context
= PathContext(base_url
, opts
.archive
, 0, 0,
927 opts
.official_builds
, opts
.aura
, opts
.use_local_repo
,
929 # Pick a starting point, try to get HEAD for this.
933 bad_rev
= '999.0.0.0'
934 if not opts
.official_builds
:
935 bad_rev
= GetChromiumRevision(context
, context
.GetLastChangeURL())
937 # Find out when we were good.
941 good_rev
= '0.0.0.0' if opts
.official_builds
else 0
944 flash_path
= opts
.flash_path
945 msg
= 'Could not find Flash binary at %s' % flash_path
946 assert os
.path
.exists(flash_path
), msg
949 pdf_path
= opts
.pdf_path
950 msg
= 'Could not find PDF binary at %s' % pdf_path
951 assert os
.path
.exists(pdf_path
), msg
953 if opts
.official_builds
:
954 good_rev
= LooseVersion(good_rev
)
955 bad_rev
= LooseVersion(bad_rev
)
957 good_rev
= int(good_rev
)
958 bad_rev
= int(bad_rev
)
961 print('Number of times to run (%d) must be greater than or equal to 1.' %
966 (min_chromium_rev
, max_chromium_rev
) = Bisect(
967 base_url
, opts
.archive
, opts
.official_builds
, opts
.aura
,
968 opts
.use_local_repo
, good_rev
, bad_rev
, opts
.times
, opts
.command
,
969 args
, opts
.profile
, opts
.flash_path
, opts
.pdf_path
,
970 not opts
.not_interactive
)
972 # Get corresponding blink revisions.
974 min_blink_rev
= GetBlinkRevisionForChromiumRevision(context
,
976 max_blink_rev
= GetBlinkRevisionForChromiumRevision(context
,
979 # Silently ignore the failure.
980 min_blink_rev
, max_blink_rev
= 0, 0
983 # We're done. Let the user know the results in an official manner.
984 if good_rev
> bad_rev
:
985 print DONE_MESSAGE_GOOD_MAX
% (str(min_blink_rev
), str(max_blink_rev
))
987 print DONE_MESSAGE_GOOD_MIN
% (str(min_blink_rev
), str(max_blink_rev
))
989 print 'BLINK CHANGELOG URL:'
990 print ' ' + BLINK_CHANGELOG_URL
% (max_blink_rev
, min_blink_rev
)
993 # We're done. Let the user know the results in an official manner.
994 if good_rev
> bad_rev
:
995 print DONE_MESSAGE_GOOD_MAX
% (str(min_chromium_rev
),
996 str(max_chromium_rev
))
998 print DONE_MESSAGE_GOOD_MIN
% (str(min_chromium_rev
),
999 str(max_chromium_rev
))
1000 if min_blink_rev
!= max_blink_rev
:
1001 print ("NOTE: There is a Blink roll in the range, "
1002 "you might also want to do a Blink bisect.")
1004 print 'CHANGELOG URL:'
1005 if opts
.official_builds
:
1006 print OFFICIAL_CHANGELOG_URL
% (min_chromium_rev
, max_chromium_rev
)
1008 print ' ' + CHANGELOG_URL
% (min_chromium_rev
, max_chromium_rev
)
1010 if __name__
== '__main__':