Roll src/third_party/WebKit 4619053:6b63e20 (svn 201059:201060)
[chromium-blink-merge.git] / tools / telemetry / cloud_storage
blobe2a7241907d371a65eaba66232cd6245bf177f94
1 #!/usr/bin/env python
2 # Copyright 2014 The Chromium Authors. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
6 import argparse
7 import logging
8 import os
9 import subprocess
10 import sys
12 from telemetry.core import command_line
13 from telemetry.util import cloud_storage
16 BUCKETS = {bucket: easy_bucket_name for easy_bucket_name, bucket
17 in cloud_storage.BUCKET_ALIASES.iteritems()}
20 def _GetPaths(path):
21 root, ext = os.path.splitext(path)
22 if ext == '.sha1':
23 file_path = root
24 hash_path = path
25 else:
26 file_path = path
27 hash_path = path + '.sha1'
28 return file_path, hash_path
31 def _FindFilesInCloudStorage(files):
32 """Returns a dict of all files and which buckets they're in."""
33 # Preprocessing: get the contents of all buckets.
34 bucket_contents = {}
35 for bucket in BUCKETS:
36 try:
37 bucket_contents[bucket] = cloud_storage.List(bucket)
38 except (cloud_storage.PermissionError, cloud_storage.CredentialsError):
39 pass
41 # Check if each file is in the bucket contents.
42 file_buckets = {}
43 for path in files:
44 file_path, hash_path = _GetPaths(path)
46 if file_path in file_buckets:
47 # Ignore duplicates, if both data and sha1 file were in the file list.
48 continue
49 if not os.path.exists(hash_path):
50 # Probably got some non-Cloud Storage files in the file list. Ignore.
51 continue
53 file_hash = cloud_storage.ReadHash(hash_path)
54 file_buckets[file_path] = []
55 for bucket in BUCKETS:
56 if bucket in bucket_contents and file_hash in bucket_contents[bucket]:
57 file_buckets[file_path].append(bucket)
59 return file_buckets
62 class Ls(command_line.Command):
63 """List which bucket each file is in."""
65 @classmethod
66 def AddCommandLineArgs(cls, parser):
67 parser.add_argument('-r', '--recursive', action='store_true')
68 parser.add_argument('paths', nargs='+')
70 @classmethod
71 def ProcessCommandLineArgs(cls, parser, args):
72 for path in args.paths:
73 if not os.path.exists(path):
74 parser.error('Path not found: %s' % path)
76 def Run(self, args):
77 def GetFilesInPaths(paths, recursive):
78 """If path is a dir, yields all files in path, otherwise just yields path.
80 If recursive is true, walks subdirectories recursively."""
81 for path in paths:
82 if not os.path.isdir(path):
83 yield path
84 continue
86 if recursive:
87 for root, _, filenames in os.walk(path):
88 for filename in filenames:
89 yield os.path.join(root, filename)
90 else:
91 for filename in os.listdir(path):
92 yield os.path.join(path, filename)
94 files = _FindFilesInCloudStorage(GetFilesInPaths(args.paths, args.recursive))
96 if not files:
97 print 'No files in Cloud Storage.'
98 return
100 for file_path, buckets in sorted(files.iteritems()):
101 if buckets:
102 buckets = [BUCKETS[bucket] for bucket in buckets]
103 print '%-11s %s' % (','.join(buckets), file_path)
104 else:
105 print '%-11s %s' % ('not found', file_path)
108 class Mv(command_line.Command):
109 """Move files to the given bucket."""
111 @classmethod
112 def AddCommandLineArgs(cls, parser):
113 parser.add_argument('files', nargs='+')
114 parser.add_argument('bucket', choices=cloud_storage.BUCKET_ALIASES)
116 @classmethod
117 def ProcessCommandLineArgs(cls, parser, args):
118 args.bucket = cloud_storage.BUCKET_ALIASES[args.bucket]
120 def Run(self, args):
121 files = _FindFilesInCloudStorage(args.files)
123 for file_path, buckets in sorted(files.iteritems()):
124 if not buckets:
125 raise IOError('%s not found in Cloud Storage.' % file_path)
127 for file_path, buckets in sorted(files.iteritems()):
128 if args.bucket in buckets:
129 buckets.remove(args.bucket)
130 if not buckets:
131 logging.info('Skipping %s, no action needed.' % file_path)
132 continue
134 # Move to the target bucket.
135 file_hash = cloud_storage.ReadHash(file_path + '.sha1')
136 cloud_storage.Move(buckets.pop(), args.bucket, file_hash)
138 # Delete all additional copies.
139 for bucket in buckets:
140 cloud_storage.Delete(bucket, file_hash)
143 class Rm(command_line.Command):
144 """Remove files from Cloud Storage."""
146 @classmethod
147 def AddCommandLineArgs(cls, parser):
148 parser.add_argument('files', nargs='+')
150 def Run(self, args):
151 files = _FindFilesInCloudStorage(args.files)
152 for file_path, buckets in sorted(files.iteritems()):
153 file_hash = cloud_storage.ReadHash(file_path + '.sha1')
154 for bucket in buckets:
155 cloud_storage.Delete(bucket, file_hash)
158 class Upload(command_line.Command):
159 """Upload files to Cloud Storage."""
161 @classmethod
162 def AddCommandLineArgs(cls, parser):
163 parser.add_argument('files', nargs='+')
164 parser.add_argument('bucket', choices=cloud_storage.BUCKET_ALIASES)
166 @classmethod
167 def ProcessCommandLineArgs(cls, parser, args):
168 args.bucket = cloud_storage.BUCKET_ALIASES[args.bucket]
170 for path in args.files:
171 if not os.path.exists(path):
172 parser.error('File not found: %s' % path)
174 def Run(self, args):
175 for file_path in args.files:
176 file_hash = cloud_storage.CalculateHash(file_path)
178 # Create or update the hash file.
179 hash_path = file_path + '.sha1'
180 with open(hash_path, 'wb') as f:
181 f.write(file_hash)
182 f.flush()
184 # Add the data to Cloud Storage.
185 cloud_storage.Insert(args.bucket, file_hash, file_path)
187 # Add the hash file to the branch, for convenience. :)
188 subprocess.call(['git', 'add', hash_path])
191 class CloudStorageCommand(command_line.SubcommandCommand):
192 commands = (Ls, Mv, Rm, Upload)
195 if __name__ == '__main__':
196 logging.getLogger().setLevel(logging.INFO)
197 sys.exit(CloudStorageCommand.main())