1 # Copyright (c) 2012 The Chromium Authors. All rights reserved.
2 # Use of this source code is governed by a BSD-style license that can be
3 # found in the LICENSE file.
16 from sdk_update_common
import Error
17 import sdk_update_common
19 SCRIPT_DIR
= os
.path
.dirname(os
.path
.abspath(__file__
))
20 PARENT_DIR
= os
.path
.dirname(SCRIPT_DIR
)
21 sys
.path
.append(PARENT_DIR
)
25 # Try to find this in the Chromium repo.
26 CHROME_SRC_DIR
= os
.path
.abspath(
27 os
.path
.join(PARENT_DIR
, '..', '..', '..', '..'))
28 sys
.path
.append(os
.path
.join(CHROME_SRC_DIR
, 'native_client', 'build'))
32 RECOMMENDED
= 'recommended'
33 SDK_TOOLS
= 'sdk_tools'
34 HTTP_CONTENT_LENGTH
= 'Content-Length' # HTTP Header field for content length
35 DEFAULT_CACHE_SIZE
= 512 * 1024 * 1024 # 1/2 Gb cache by default
38 class UpdateDelegate(object):
39 def BundleDirectoryExists(self
, bundle_name
):
40 raise NotImplementedError()
42 def DownloadToFile(self
, url
, dest_filename
):
43 raise NotImplementedError()
45 def ExtractArchives(self
, archives
, extract_dir
, rename_from_dir
,
47 raise NotImplementedError()
50 class RealUpdateDelegate(UpdateDelegate
):
51 def __init__(self
, user_data_dir
, install_dir
, cfg
):
52 UpdateDelegate
.__init
__(self
)
53 self
.archive_cache
= os
.path
.join(user_data_dir
, 'archives')
54 self
.install_dir
= install_dir
55 self
.cache_max
= getattr(cfg
, 'cache_max', DEFAULT_CACHE_SIZE
)
57 def BundleDirectoryExists(self
, bundle_name
):
58 bundle_path
= os
.path
.join(self
.install_dir
, bundle_name
)
59 return os
.path
.isdir(bundle_path
)
61 def VerifyDownload(self
, filename
, archive
):
62 """Verify that a local filename in the cache matches the given
65 Returns True if both size and sha1 match, False otherwise.
67 filename
= os
.path
.join(self
.archive_cache
, filename
)
68 if not os
.path
.exists(filename
):
69 logging
.info('File does not exist: %s.' % filename
)
71 size
= os
.path
.getsize(filename
)
72 if size
!= archive
.size
:
73 logging
.info('File size does not match (%d vs %d): %s.' % (size
,
74 archive
.size
, filename
))
76 sha1_hash
= hashlib
.sha1()
77 with
open(filename
) as f
:
78 sha1_hash
.update(f
.read())
79 if sha1_hash
.hexdigest() != archive
.GetChecksum():
80 logging
.info('File hash does not match: %s.' % filename
)
84 def BytesUsedInCache(self
):
85 """Determine number of bytes currently be in local archive cache."""
87 for root
, _
, files
in os
.walk(self
.archive_cache
):
88 for filename
in files
:
89 total
+= os
.path
.getsize(os
.path
.join(root
, filename
))
92 def CleanupCache(self
):
93 """Remove archives from the local filesystem cache until the
94 total size is below cache_max.
96 This is done my deleting the oldest archive files until the
97 condition is satisfied. If cache_max is zero then the entire
98 cache will be removed.
100 used
= self
.BytesUsedInCache()
101 logging
.info('Cache usage: %d / %d' % (used
, self
.cache_max
))
102 if used
<= self
.cache_max
:
104 clean_bytes
= used
- self
.cache_max
106 logging
.info('Clearing %d bytes in archive cache' % clean_bytes
)
108 for root
, _
, files
in os
.walk(self
.archive_cache
):
109 for filename
in files
:
110 fullname
= os
.path
.join(root
, filename
)
111 file_timestamps
.append((os
.path
.getmtime(fullname
), fullname
))
113 file_timestamps
.sort()
114 while clean_bytes
> 0:
115 assert(file_timestamps
)
116 filename_to_remove
= file_timestamps
[0][1]
117 clean_bytes
-= os
.path
.getsize(filename_to_remove
)
118 logging
.info('Removing from cache: %s' % filename_to_remove
)
119 os
.remove(filename_to_remove
)
120 # Also remove resulting empty parent directory structure
122 filename_to_remove
= os
.path
.dirname(filename_to_remove
)
123 if not os
.listdir(filename_to_remove
):
124 os
.rmdir(filename_to_remove
)
127 file_timestamps
= file_timestamps
[1:]
129 def DownloadToFile(self
, url
, dest_filename
):
130 dest_path
= os
.path
.join(self
.archive_cache
, dest_filename
)
131 sdk_update_common
.MakeDirs(os
.path
.dirname(dest_path
))
136 out_stream
= open(dest_path
, 'wb')
137 url_stream
= download
.UrlOpen(url
)
138 content_length
= int(url_stream
.info()[HTTP_CONTENT_LENGTH
])
139 progress
= download
.MakeProgressFunction(content_length
)
140 sha1
, size
= download
.DownloadAndComputeHash(url_stream
, out_stream
,
143 except urllib2
.URLError
as e
:
144 raise Error('Unable to read from URL "%s".\n %s' % (url
, e
))
146 raise Error('Unable to write to file "%s".\n %s' % (dest_filename
, e
))
153 def ExtractArchives(self
, archives
, extract_dir
, rename_from_dir
,
157 extract_path
= os
.path
.join(self
.install_dir
, extract_dir
)
158 rename_from_path
= os
.path
.join(self
.install_dir
, rename_from_dir
)
159 rename_to_path
= os
.path
.join(self
.install_dir
, rename_to_dir
)
161 # Extract to extract_dir, usually "<bundle name>_update".
162 # This way if the extraction fails, we haven't blown away the old bundle
164 sdk_update_common
.RemoveDir(extract_path
)
165 sdk_update_common
.MakeDirs(extract_path
)
166 curpath
= os
.getcwd()
171 logging
.info('Changing the directory to %s' % (extract_path
,))
172 os
.chdir(extract_path
)
173 except Exception as e
:
174 raise Error('Unable to chdir into "%s".\n %s' % (extract_path
, e
))
176 for i
, archive
in enumerate(archives
):
177 archive_path
= os
.path
.join(self
.archive_cache
, archive
)
179 if len(archives
) > 1:
180 print '(file %d/%d - "%s")' % (
181 i
+ 1, len(archives
), os
.path
.basename(archive_path
))
182 logging
.info('Extracting to %s' % (extract_path
,))
184 if sys
.platform
== 'win32':
186 logging
.info('Opening file %s (%d/%d).' % (archive_path
, i
+ 1,
189 tar_file
= cygtar
.CygTar(archive_path
, 'r', verbose
=True)
190 except Exception as e
:
191 raise Error("Can't open archive '%s'.\n %s" % (archive_path
, e
))
199 subprocess
.check_call(['tar', 'xf', archive_path
])
200 except subprocess
.CalledProcessError
:
201 raise Error('Error extracting archive: %s' % archive_path
)
203 logging
.info('Changing the directory to %s' % (curpath
,))
206 logging
.info('Renaming %s->%s' % (rename_from_path
, rename_to_path
))
207 sdk_update_common
.RenameDir(rename_from_path
, rename_to_path
)
209 # Change the directory back so we can remove the update directory.
212 # Clean up the ..._update directory.
214 sdk_update_common
.RemoveDir(extract_path
)
215 except Exception as e
:
216 logging
.error('Failed to remove directory \"%s\". %s' % (
220 def Update(delegate
, remote_manifest
, local_manifest
, bundle_names
, force
):
221 valid_bundles
= set([bundle
.name
for bundle
in remote_manifest
.GetBundles()])
222 requested_bundles
= _GetRequestedBundleNamesFromArgs(remote_manifest
,
224 invalid_bundles
= requested_bundles
- valid_bundles
226 logging
.warn('Ignoring unknown bundle(s): %s' % (
227 ', '.join(invalid_bundles
)))
228 requested_bundles
-= invalid_bundles
230 if SDK_TOOLS
in requested_bundles
:
231 logging
.warn('Updating sdk_tools happens automatically. '
232 'Ignoring manual update request.')
233 requested_bundles
.discard(SDK_TOOLS
)
235 if requested_bundles
:
236 for bundle_name
in requested_bundles
:
237 logging
.info('Trying to update %s' % (bundle_name
,))
238 UpdateBundleIfNeeded(delegate
, remote_manifest
, local_manifest
,
241 logging
.warn('No bundles to update.')
244 def Reinstall(delegate
, local_manifest
, bundle_names
):
245 valid_bundles
, invalid_bundles
= \
246 command_common
.GetValidBundles(local_manifest
, bundle_names
)
248 logging
.warn('Unknown bundle(s): %s\n' % (', '.join(invalid_bundles
)))
250 if not valid_bundles
:
251 logging
.warn('No bundles to reinstall.')
254 for bundle_name
in valid_bundles
:
255 bundle
= copy
.deepcopy(local_manifest
.GetBundle(bundle_name
))
257 # HACK(binji): There was a bug where we'd merge the bundles from the old
258 # archive and the new archive when updating. As a result, some users may
259 # have a cache manifest that contains duplicate archives. Remove all
260 # archives with the same basename except for the most recent.
261 # Because the archives are added to a list, we know the most recent is at
264 for archive
in bundle
.GetArchives():
266 path
= urlparse
.urlparse(url
)[2]
267 basename
= os
.path
.basename(path
)
268 archives
[basename
] = archive
270 # Update the bundle with these new archives.
271 bundle
.RemoveAllArchives()
272 for _
, archive
in archives
.iteritems():
273 bundle
.AddArchive(archive
)
275 _UpdateBundle(delegate
, bundle
, local_manifest
)
278 def UpdateBundleIfNeeded(delegate
, remote_manifest
, local_manifest
,
280 bundle
= remote_manifest
.GetBundle(bundle_name
)
282 if _BundleNeedsUpdate(delegate
, local_manifest
, bundle
):
283 # TODO(binji): It would be nicer to detect whether the user has any
284 # modifications to the bundle. If not, we could update with impunity.
285 if not force
and delegate
.BundleDirectoryExists(bundle_name
):
286 print ('%s already exists, but has an update available.\n'
287 'Run update with the --force option to overwrite the '
288 'existing directory.\nWarning: This will overwrite any '
289 'modifications you have made within this directory.'
293 _UpdateBundle(delegate
, bundle
, local_manifest
)
295 print '%s is already up-to-date.' % (bundle
.name
,)
297 logging
.error('Bundle %s does not exist.' % (bundle_name
,))
300 def _GetRequestedBundleNamesFromArgs(remote_manifest
, requested_bundles
):
301 requested_bundles
= set(requested_bundles
)
302 if RECOMMENDED
in requested_bundles
:
303 requested_bundles
.discard(RECOMMENDED
)
304 requested_bundles |
= set(_GetRecommendedBundleNames(remote_manifest
))
306 return requested_bundles
309 def _GetRecommendedBundleNames(remote_manifest
):
311 for bundle
in remote_manifest
.GetBundles():
312 if bundle
.recommended
== 'yes' and bundle
.name
!= SDK_TOOLS
:
313 result
.append(bundle
.name
)
317 def _BundleNeedsUpdate(delegate
, local_manifest
, bundle
):
318 # Always update the bundle if the directory doesn't exist;
319 # the user may have deleted it.
320 if not delegate
.BundleDirectoryExists(bundle
.name
):
323 return local_manifest
.BundleNeedsUpdate(bundle
)
326 def _UpdateBundle(delegate
, bundle
, local_manifest
):
327 archives
= bundle
.GetHostOSArchives()
329 logging
.warn('Bundle %s does not exist for this platform.' % (bundle
.name
,))
332 archive_filenames
= []
335 for i
, archive
in enumerate(archives
):
336 archive_filename
= _GetFilenameFromURL(archive
.url
)
337 archive_filename
= os
.path
.join(bundle
.name
, archive_filename
)
339 if not delegate
.VerifyDownload(archive_filename
, archive
):
342 print 'Downloading bundle %s' % (bundle
.name
,)
343 if len(archives
) > 1:
344 print '(file %d/%d - "%s")' % (
345 i
+ 1, len(archives
), os
.path
.basename(archive
.url
))
346 sha1
, size
= delegate
.DownloadToFile(archive
.url
, archive_filename
)
347 _ValidateArchive(archive
, sha1
, size
)
349 archive_filenames
.append(archive_filename
)
351 print 'Updating bundle %s to version %s, revision %s' % (
352 bundle
.name
, bundle
.version
, bundle
.revision
)
353 extract_dir
= bundle
.name
+ '_update'
355 repath_dir
= bundle
.get('repath', None)
357 # If repath is specified:
358 # The files are extracted to nacl_sdk/<bundle.name>_update/<repath>/...
359 # The destination directory is nacl_sdk/<bundle.name>/...
360 rename_from_dir
= os
.path
.join(extract_dir
, repath_dir
)
362 # If no repath is specified:
363 # The files are extracted to nacl_sdk/<bundle.name>_update/...
364 # The destination directory is nacl_sdk/<bundle.name>/...
365 rename_from_dir
= extract_dir
367 rename_to_dir
= bundle
.name
369 delegate
.ExtractArchives(archive_filenames
, extract_dir
, rename_from_dir
,
372 logging
.info('Updating local manifest to include bundle %s' % (bundle
.name
))
373 local_manifest
.RemoveBundle(bundle
.name
)
374 local_manifest
.SetBundle(bundle
)
375 delegate
.CleanupCache()
378 def _GetFilenameFromURL(url
):
379 path
= urlparse
.urlparse(url
)[2]
380 return os
.path
.basename(path
)
383 def _ValidateArchive(archive
, actual_sha1
, actual_size
):
384 if actual_size
!= archive
.size
:
385 raise Error('Size mismatch on "%s". Expected %s but got %s bytes' % (
386 archive
.url
, archive
.size
, actual_size
))
387 if actual_sha1
!= archive
.GetChecksum():
388 raise Error('SHA1 checksum mismatch on "%s". Expected %s but got %s' % (
389 archive
.url
, archive
.GetChecksum(), actual_sha1
))