Only grant permissions to new extensions from sync if they have the expected version
[chromium-blink-merge.git] / tools / telemetry / third_party / gsutilz / gslib / cloud_api.py
blobb7af6b6eeac56ac0457c213753aa18d7878f5edb
1 # -*- coding: utf-8 -*-
2 # Copyright 2013 Google Inc. All Rights Reserved.
4 # Licensed under the Apache License, Version 2.0 (the "License");
5 # you may not use this file except in compliance with the License.
6 # You may obtain a copy of the License at
8 # http://www.apache.org/licenses/LICENSE-2.0
10 # Unless required by applicable law or agreed to in writing, software
11 # distributed under the License is distributed on an "AS IS" BASIS,
12 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 # See the License for the specific language governing permissions and
14 # limitations under the License.
15 """Gsutil API for interacting with cloud storage providers."""
17 from __future__ import absolute_import
20 class CloudApi(object):
21 """Abstract base class for interacting with cloud storage providers.
23 Implementations of the gsutil Cloud API are not guaranteed to be thread-safe.
24 Behavior when calling a gsutil Cloud API instance simultaneously across
25 threads is undefined and doing so will likely cause errors. Therefore,
26 a separate instance of the gsutil Cloud API should be instantiated per-thread.
27 """
29 def __init__(self, bucket_storage_uri_class, logger, provider=None, debug=0):
30 """Performs necessary setup for interacting with the cloud storage provider.
32 Args:
33 bucket_storage_uri_class: boto storage_uri class, used by APIs that
34 provide boto translation or mocking.
35 logger: logging.logger for outputting log messages.
36 provider: Default provider prefix describing cloud storage provider to
37 connect to.
38 debug: Debug level for the API implementation (0..3).
39 """
40 self.bucket_storage_uri_class = bucket_storage_uri_class
41 self.logger = logger
42 self.provider = provider
43 self.debug = debug
45 def GetBucket(self, bucket_name, provider=None, fields=None):
46 """Gets Bucket metadata.
48 Args:
49 bucket_name: Name of the bucket.
50 provider: Cloud storage provider to connect to. If not present,
51 class-wide default is used.
52 fields: If present, return only these Bucket metadata fields, for
53 example, ['logging', 'defaultObjectAcl']
55 Raises:
56 ArgumentException for errors during input validation.
57 ServiceException for errors interacting with cloud storage providers.
59 Returns:
60 Bucket object.
61 """
62 raise NotImplementedError('GetBucket must be overloaded')
64 def ListBuckets(self, project_id=None, provider=None, fields=None):
65 """Lists bucket metadata for the given project.
67 Args:
68 project_id: Project owning the buckets, default from config if None.
69 provider: Cloud storage provider to connect to. If not present,
70 class-wide default is used.
71 fields: If present, return only these metadata fields for the listing,
72 for example:
73 ['items/logging', 'items/defaultObjectAcl'].
74 Note that the WildcardIterator class should be used to list
75 buckets instead of calling this function directly. It amends
76 the fields definition from get-like syntax such as
77 ['logging', 'defaultObjectAcl'] so that the caller does not
78 need to prepend 'items/' or specify fields necessary for listing
79 (like nextPageToken).
81 Raises:
82 ArgumentException for errors during input validation.
83 ServiceException for errors interacting with cloud storage providers.
85 Returns:
86 Iterator over Bucket objects.
87 """
88 raise NotImplementedError('ListBuckets must be overloaded')
90 def PatchBucket(self, bucket_name, metadata, canned_acl=None,
91 canned_def_acl=None, preconditions=None, provider=None,
92 fields=None):
93 """Updates bucket metadata for the bucket with patch semantics.
95 Args:
96 bucket_name: Name of bucket to update.
97 metadata: Bucket object defining metadata to be updated.
98 canned_acl: Canned ACL to apply to the bucket.
99 canned_def_acl: Canned default object ACL to apply to the bucket.
100 preconditions: Preconditions for the request.
101 provider: Cloud storage provider to connect to. If not present,
102 class-wide default is used.
103 fields: If present, return only these Bucket metadata fields.
105 Raises:
106 ArgumentException for errors during input validation.
107 ServiceException for errors interacting with cloud storage providers.
109 Returns:
110 Bucket object describing new bucket metadata.
112 raise NotImplementedError('PatchBucket must be overloaded')
114 def CreateBucket(self, bucket_name, project_id=None, metadata=None,
115 provider=None, fields=None):
116 """Creates a new bucket with the specified metadata.
118 Args:
119 bucket_name: Name of the new bucket.
120 project_id: Project owner of the new bucket, default from config if None.
121 metadata: Bucket object defining new bucket metadata.
122 provider: Cloud storage provider to connect to. If not present,
123 class-wide default is used.
124 fields: If present, return only these Bucket metadata fields.
126 Raises:
127 ArgumentException for errors during input validation.
128 ServiceException for errors interacting with cloud storage providers.
130 Returns:
131 Bucket object describing new bucket metadata.
133 raise NotImplementedError('CreateBucket must be overloaded')
135 def DeleteBucket(self, bucket_name, preconditions=None, provider=None):
136 """Deletes a bucket.
138 Args:
139 bucket_name: Name of the bucket to delete.
140 preconditions: Preconditions for the request.
141 provider: Cloud storage provider to connect to. If not present,
142 class-wide default is used.
144 Raises:
145 ArgumentException for errors during input validation.
146 ServiceException for errors interacting with cloud storage providers.
148 Returns:
149 None.
151 raise NotImplementedError('DeleteBucket must be overloaded')
153 class CsObjectOrPrefixType(object):
154 """Enum class for describing CsObjectOrPrefix types."""
155 OBJECT = 'object' # Cloud object
156 PREFIX = 'prefix' # Cloud bucket subdirectory
158 class CsObjectOrPrefix(object):
159 """Container class for ListObjects results."""
161 def __init__(self, data, datatype):
162 """Stores a ListObjects result.
164 Args:
165 data: Root object, either an apitools Object or a string Prefix.
166 datatype: CsObjectOrPrefixType of data.
168 self.data = data
169 self.datatype = datatype
171 def ListObjects(self, bucket_name, prefix=None, delimiter=None,
172 all_versions=None, provider=None, fields=None):
173 """Lists objects (with metadata) and prefixes in a bucket.
175 Args:
176 bucket_name: Bucket containing the objects.
177 prefix: Prefix for directory-like behavior.
178 delimiter: Delimiter for directory-like behavior.
179 all_versions: If true, list all object versions.
180 provider: Cloud storage provider to connect to. If not present,
181 class-wide default is used.
182 fields: If present, return only these metadata fields for the listing,
183 for example:
184 ['items/acl', 'items/updated', 'prefixes'].
185 Note that the WildcardIterator class should be used to list
186 objects instead of calling this function directly. It amends
187 the fields definition from get-like syntax such as
188 ['acl', 'updated'] so that the caller does not need to
189 prepend 'items/' or specify any fields necessary for listing
190 (such as prefixes or nextPageToken).
192 Raises:
193 ArgumentException for errors during input validation.
194 ServiceException for errors interacting with cloud storage providers.
196 Returns:
197 Iterator over CsObjectOrPrefix wrapper class.
199 raise NotImplementedError('ListObjects must be overloaded')
201 def GetObjectMetadata(self, bucket_name, object_name, generation=None,
202 provider=None, fields=None):
203 """Gets object metadata.
205 Args:
206 bucket_name: Bucket containing the object.
207 object_name: Object name.
208 generation: Generation of the object to retrieve.
209 provider: Cloud storage provider to connect to. If not present,
210 class-wide default is used.
211 fields: If present, return only these Object metadata fields, for
212 example, ['acl', 'updated'].
214 Raises:
215 ArgumentException for errors during input validation.
216 ServiceException for errors interacting with cloud storage providers.
218 Returns:
219 Object object.
221 raise NotImplementedError('GetObjectMetadata must be overloaded')
223 def PatchObjectMetadata(self, bucket_name, object_name, metadata,
224 canned_acl=None, generation=None, preconditions=None,
225 provider=None, fields=None):
226 """Updates object metadata with patch semantics.
228 Args:
229 bucket_name: Bucket containing the object.
230 object_name: Object name for object.
231 metadata: Object object defining metadata to be updated.
232 canned_acl: Canned ACL to be set on the object.
233 generation: Generation (or version) of the object to update.
234 preconditions: Preconditions for the request.
235 provider: Cloud storage provider to connect to. If not present,
236 class-wide default is used.
237 fields: If present, return only these Object metadata fields.
239 Raises:
240 ArgumentException for errors during input validation.
241 ServiceException for errors interacting with cloud storage providers.
243 Returns:
244 Updated object metadata.
246 raise NotImplementedError('PatchObjectMetadata must be overloaded')
248 class DownloadStrategy(object):
249 """Enum class for specifying download strategy."""
250 ONE_SHOT = 'oneshot'
251 RESUMABLE = 'resumable'
253 def GetObjectMedia(self, bucket_name, object_name, download_stream,
254 provider=None, generation=None, object_size=None,
255 download_strategy=DownloadStrategy.ONE_SHOT, start_byte=0,
256 end_byte=None, progress_callback=None,
257 serialization_data=None, digesters=None):
258 """Gets object data.
260 Args:
261 bucket_name: Bucket containing the object.
262 object_name: Object name.
263 download_stream: Stream to send the object data to.
264 provider: Cloud storage provider to connect to. If not present,
265 class-wide default is used.
266 generation: Generation of the object to retrieve.
267 object_size: Total size of the object being downloaded.
268 download_strategy: Cloud API download strategy to use for download.
269 start_byte: Starting point for download (for resumable downloads and
270 range requests). Can be set to negative to request a range
271 of bytes (python equivalent of [:-3])
272 end_byte: Ending point for download (for range requests).
273 progress_callback: Optional callback function for progress notifications.
274 Receives calls with arguments
275 (bytes_transferred, total_size).
276 serialization_data: Implementation-specific dict containing serialization
277 information for the download.
278 digesters: Dict of {string : digester}, where string is a name of a hash
279 algorithm, and digester is a validation digester that supports
280 update(bytes) and digest() using that algorithm.
281 Implementation can set the digester value to None to indicate
282 bytes were not successfully digested on-the-fly.
284 Raises:
285 ArgumentException for errors during input validation.
286 ServiceException for errors interacting with cloud storage providers.
288 Returns:
289 Content-encoding string if it was detected that the server sent an encoded
290 object during transfer, None otherwise.
292 raise NotImplementedError('GetObjectMedia must be overloaded')
294 def UploadObject(self, upload_stream, object_metadata, canned_acl=None,
295 size=None, preconditions=None, progress_callback=None,
296 provider=None, fields=None):
297 """Uploads object data and metadata.
299 Args:
300 upload_stream: Seekable stream of object data.
301 object_metadata: Object metadata for new object. Must include bucket
302 and object name.
303 canned_acl: Optional canned ACL to apply to object. Overrides ACL set
304 in object_metadata.
305 size: Optional object size.
306 preconditions: Preconditions for the request.
307 progress_callback: Optional callback function for progress notifications.
308 Receives calls with arguments
309 (bytes_transferred, total_size).
310 provider: Cloud storage provider to connect to. If not present,
311 class-wide default is used.
312 fields: If present, return only these Object metadata fields.
314 Raises:
315 ArgumentException for errors during input validation.
316 ServiceException for errors interacting with cloud storage providers.
318 Returns:
319 Object object for newly created destination object.
321 raise NotImplementedError('UploadObject must be overloaded')
323 def UploadObjectStreaming(self, upload_stream, object_metadata,
324 canned_acl=None, preconditions=None,
325 progress_callback=None, provider=None,
326 fields=None):
327 """Uploads object data and metadata.
329 Args:
330 upload_stream: Stream of object data. May not be seekable.
331 object_metadata: Object metadata for new object. Must include bucket
332 and object name.
333 canned_acl: Optional canned ACL to apply to object. Overrides ACL set
334 in object_metadata.
335 preconditions: Preconditions for the request.
336 progress_callback: Optional callback function for progress notifications.
337 Receives calls with arguments
338 (bytes_transferred, total_size), but fills in only
339 bytes_transferred.
340 provider: Cloud storage provider to connect to. If not present,
341 class-wide default is used.
342 fields: If present, return only these Object metadata fields.
344 Raises:
345 ArgumentException for errors during input validation.
346 ServiceException for errors interacting with cloud storage providers.
348 Returns:
349 Object object for newly created destination object.
351 raise NotImplementedError('UploadObjectStreaming must be overloaded')
353 def UploadObjectResumable(
354 self, upload_stream, object_metadata, canned_acl=None,
355 size=None, preconditions=None, serialization_data=None,
356 tracker_callback=None, progress_callback=None, provider=None,
357 fields=None):
358 """Uploads object data and metadata using a resumable upload strategy.
360 Args:
361 upload_stream: Seekable stream of object data.
362 object_metadata: Object metadata for new object. Must include bucket
363 and object name.
364 canned_acl: Optional canned ACL to apply to object. Overrides ACL set
365 in object_metadata.
366 size: Total size of the object.
367 preconditions: Preconditions for the request.
368 serialization_data: Dict of {'url' : UploadURL} allowing for uploads to
369 be resumed.
370 tracker_callback: Callback function taking a upload URL string.
371 Guaranteed to be called when the implementation gets an
372 upload URL, allowing the caller to resume the upload
373 across process breaks by saving the upload URL in
374 a tracker file.
375 progress_callback: Optional callback function for progress notifications.
376 Receives calls with arguments
377 (bytes_transferred, total_size).
378 provider: Cloud storage provider to connect to. If not present,
379 class-wide default is used.
380 fields: If present, return only these Object metadata fields when the
381 upload is complete.
383 Raises:
384 ArgumentException for errors during input validation.
385 ServiceException for errors interacting with cloud storage providers.
387 Returns:
388 Object object for newly created destination object.
390 raise NotImplementedError('UploadObjectResumable must be overloaded')
392 def CopyObject(self, src_obj_metadata, dst_obj_metadata, src_generation=None,
393 canned_acl=None, preconditions=None, progress_callback=None,
394 max_bytes_per_call=None, provider=None, fields=None):
395 """Copies an object in the cloud.
397 Args:
398 src_obj_metadata: Object metadata for source object. Must include
399 bucket name, object name, and etag.
400 dst_obj_metadata: Object metadata for new object. Must include bucket
401 and object name.
402 src_generation: Generation of the source object to copy.
403 canned_acl: Optional canned ACL to apply to destination object. Overrides
404 ACL set in dst_obj_metadata.
405 preconditions: Destination object preconditions for the request.
406 progress_callback: Optional callback function for progress notifications.
407 Receives calls with arguments
408 (bytes_transferred, total_size).
409 max_bytes_per_call: Integer describing maximum number of bytes
410 to rewrite per service call.
411 provider: Cloud storage provider to connect to. If not present,
412 class-wide default is used.
413 fields: If present, return only these Object metadata fields.
415 Raises:
416 ArgumentException for errors during input validation.
417 ServiceException for errors interacting with cloud storage providers.
419 Returns:
420 Object object for newly created destination object.
422 raise NotImplementedError('CopyObject must be overloaded')
424 def ComposeObject(self, src_objs_metadata, dst_obj_metadata,
425 preconditions=None, provider=None, fields=None):
426 """Composes an object in the cloud.
428 Args:
429 src_objs_metadata: List of ComposeRequest.SourceObjectsValueListEntries
430 specifying the objects to compose.
431 dst_obj_metadata: Metadata for the destination object including bucket
432 and object name.
433 preconditions: Destination object preconditions for the request.
434 provider: Cloud storage provider to connect to. If not present,
435 class-wide default is used.
436 fields: If present, return only these Object metadata fields.
438 Raises:
439 ArgumentException for errors during input validation.
440 ServiceException for errors interacting with cloud storage providers.
442 Returns:
443 Composed object metadata.
445 raise NotImplementedError('ComposeObject must be overloaded')
447 def DeleteObject(self, bucket_name, object_name, preconditions=None,
448 generation=None, provider=None):
449 """Deletes an object.
451 Args:
452 bucket_name: Name of the containing bucket.
453 object_name: Name of the object to delete.
454 preconditions: Preconditions for the request.
455 generation: Generation (or version) of the object to delete; if None,
456 deletes the live object.
457 provider: Cloud storage provider to connect to. If not present,
458 class-wide default is used.
460 Raises:
461 ArgumentException for errors during input validation.
462 ServiceException for errors interacting with cloud storage providers.
464 Returns:
465 None.
467 raise NotImplementedError('DeleteObject must be overloaded')
469 def WatchBucket(self, bucket_name, address, channel_id, token=None,
470 provider=None, fields=None):
471 """Creates a notification subscription for changes to objects in a bucket.
473 Args:
474 bucket_name: Bucket containing the objects.
475 address: Address to which to send notifications.
476 channel_id: Unique ID string for the channel.
477 token: If present, token string is delivered with each notification.
478 provider: Cloud storage provider to connect to. If not present,
479 class-wide default is used.
480 fields: If present, return only these Channel metadata fields.
482 Raises:
483 ArgumentException for errors during input validation.
484 ServiceException for errors interacting with cloud storage providers.
486 Returns:
487 Channel object describing the notification subscription.
489 raise NotImplementedError('WatchBucket must be overloaded')
491 def StopChannel(self, channel_id, resource_id, provider=None):
492 """Stops a notification channel.
494 Args:
495 channel_id: Unique ID string for the channel.
496 resource_id: Version-agnostic ID string for the channel.
497 provider: Cloud storage provider to connect to. If not present,
498 class-wide default is used.
500 Raises:
501 ArgumentException for errors during input validation.
502 ServiceException for errors interacting with cloud storage providers.
504 Returns:
505 None.
507 raise NotImplementedError('StopChannel must be overloaded')
510 class Preconditions(object):
511 """Preconditions class for specifying preconditions to cloud API requests."""
513 def __init__(self, gen_match=None, meta_gen_match=None):
514 """Instantiates a Preconditions object.
516 Args:
517 gen_match: Perform request only if generation of target object
518 matches the given integer. Ignored for bucket requests.
519 meta_gen_match: Perform request only if metageneration of target
520 object/bucket matches the given integer.
522 self.gen_match = gen_match
523 self.meta_gen_match = meta_gen_match
526 class ArgumentException(Exception):
527 """Exception raised when arguments to a Cloud API method are invalid.
529 This exception is never raised as a result of a failed call to a cloud
530 storage provider.
533 def __init__(self, reason):
534 Exception.__init__(self)
535 self.reason = reason
537 def __repr__(self):
538 return str(self)
540 def __str__(self):
541 return '%s: %s' % (self.__class__.__name__, self.reason)
544 class ProjectIdException(ArgumentException):
545 """Exception raised when a Project ID argument is required but not present."""
548 class ServiceException(Exception):
549 """Exception raised when a cloud storage provider request fails.
551 This exception is raised only as a result of a failed remote call.
554 def __init__(self, reason, status=None, body=None):
555 Exception.__init__(self)
556 self.reason = reason
557 self.status = status
558 self.body = body
560 def __repr__(self):
561 return str(self)
563 def __str__(self):
564 message = '%s:' % self.__class__.__name__
565 if self.status:
566 message += ' %s' % self.status
567 message += ' %s' % self.reason
568 if self.body:
569 message += '\n%s' % self.body
570 return message
573 class RetryableServiceException(ServiceException):
574 """Exception class for retryable exceptions."""
577 class ResumableDownloadException(RetryableServiceException):
578 """Exception raised for res. downloads that can be retried later."""
581 class ResumableUploadException(RetryableServiceException):
582 """Exception raised for res. uploads that can be retried w/ same upload ID."""
585 class ResumableUploadStartOverException(RetryableServiceException):
586 """Exception raised for res. uploads that can be retried w/ new upload ID."""
589 class ResumableUploadAbortException(ServiceException):
590 """Exception raised for resumable uploads that cannot be retried later."""
593 class AuthenticationException(ServiceException):
594 """Exception raised for errors during the authentication process."""
597 class PreconditionException(ServiceException):
598 """Exception raised for precondition failures."""
601 class NotFoundException(ServiceException):
602 """Exception raised when a resource is not found (404)."""
605 class NotEmptyException(ServiceException):
606 """Exception raised when trying to delete a bucket is not empty."""
609 class BadRequestException(ServiceException):
610 """Exception raised for malformed requests.
612 Where it is possible to detect invalid arguments prior to sending them
613 to the server, an ArgumentException should be raised instead.
617 class AccessDeniedException(ServiceException):
618 """Exception raised when authenticated user has insufficient access rights.
620 This is raised when the authentication process succeeded but the
621 authenticated user does not have access rights to the requested resource.