[MacViews] Show comboboxes with a native NSMenu
[chromium-blink-merge.git] / tools / telemetry / cloud_storage
blobe988fb2f567eff2028f2a5f1bf637b971b8482ff
1 #!/usr/bin/env python
2 # Copyright 2014 The Chromium Authors. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
6 import argparse
7 import logging
8 import os
9 import subprocess
10 import sys
12 from telemetry.internal.util import command_line
13 from catapult_base import cloud_storage
16 BUCKETS = {bucket: easy_bucket_name for easy_bucket_name, bucket
17 in cloud_storage.BUCKET_ALIASES.iteritems()}
20 def _GetPaths(path):
21 root, ext = os.path.splitext(path)
22 if ext == '.sha1':
23 file_path = root
24 hash_path = path
25 else:
26 file_path = path
27 hash_path = path + '.sha1'
28 return file_path, hash_path
31 def _FindFilesInCloudStorage(files):
32 """Returns a dict of all files and which buckets they're in."""
33 # Preprocessing: get the contents of all buckets.
34 bucket_contents = {}
35 for bucket in BUCKETS:
36 try:
37 bucket_contents[bucket] = cloud_storage.List(bucket)
38 except (cloud_storage.PermissionError, cloud_storage.CredentialsError):
39 pass
41 # Check if each file is in the bucket contents.
42 file_buckets = {}
43 for path in files:
44 file_path, hash_path = _GetPaths(path)
46 if file_path in file_buckets:
47 # Ignore duplicates, if both data and sha1 file were in the file list.
48 continue
49 if not os.path.exists(hash_path):
50 # Probably got some non-Cloud Storage files in the file list. Ignore.
51 continue
53 file_hash = cloud_storage.ReadHash(hash_path)
54 file_buckets[file_path] = []
55 for bucket in BUCKETS:
56 if bucket in bucket_contents and file_hash in bucket_contents[bucket]:
57 file_buckets[file_path].append(bucket)
59 return file_buckets
62 class Ls(command_line.Command):
63 """List which bucket each file is in."""
65 @classmethod
66 def AddCommandLineArgs(cls, parser):
67 parser.add_argument('-r', '--recursive', action='store_true')
68 parser.add_argument('paths', nargs='+')
70 @classmethod
71 def ProcessCommandLineArgs(cls, parser, args):
72 for path in args.paths:
73 if not os.path.exists(path):
74 parser.error('Path not found: %s' % path)
76 def Run(self, args):
77 def GetFilesInPaths(paths, recursive):
78 """If path is a dir, yields all files in path, otherwise just yields path.
79 If recursive is true, walks subdirectories recursively."""
80 for path in paths:
81 if not os.path.isdir(path):
82 yield path
83 continue
85 if recursive:
86 for root, _, filenames in os.walk(path):
87 for filename in filenames:
88 yield os.path.join(root, filename)
89 else:
90 for filename in os.listdir(path):
91 yield os.path.join(path, filename)
93 files = _FindFilesInCloudStorage(GetFilesInPaths(args.paths, args.recursive))
95 if not files:
96 print 'No files in Cloud Storage.'
97 return
99 for file_path, buckets in sorted(files.iteritems()):
100 if buckets:
101 buckets = [BUCKETS[bucket] for bucket in buckets]
102 print '%-11s %s' % (','.join(buckets), file_path)
103 else:
104 print '%-11s %s' % ('not found', file_path)
107 class Mv(command_line.Command):
108 """Move files to the given bucket."""
110 @classmethod
111 def AddCommandLineArgs(cls, parser):
112 parser.add_argument('files', nargs='+')
113 parser.add_argument('bucket', choices=cloud_storage.BUCKET_ALIASES)
115 @classmethod
116 def ProcessCommandLineArgs(cls, parser, args):
117 args.bucket = cloud_storage.BUCKET_ALIASES[args.bucket]
119 def Run(self, args):
120 files = _FindFilesInCloudStorage(args.files)
122 for file_path, buckets in sorted(files.iteritems()):
123 if not buckets:
124 raise IOError('%s not found in Cloud Storage.' % file_path)
126 for file_path, buckets in sorted(files.iteritems()):
127 if args.bucket in buckets:
128 buckets.remove(args.bucket)
129 if not buckets:
130 logging.info('Skipping %s, no action needed.' % file_path)
131 continue
133 # Move to the target bucket.
134 file_hash = cloud_storage.ReadHash(file_path + '.sha1')
135 cloud_storage.Move(buckets.pop(), args.bucket, file_hash)
137 # Delete all additional copies.
138 for bucket in buckets:
139 cloud_storage.Delete(bucket, file_hash)
142 class Rm(command_line.Command):
143 """Remove files from Cloud Storage."""
145 @classmethod
146 def AddCommandLineArgs(cls, parser):
147 parser.add_argument('files', nargs='+')
149 def Run(self, args):
150 files = _FindFilesInCloudStorage(args.files)
151 for file_path, buckets in sorted(files.iteritems()):
152 file_hash = cloud_storage.ReadHash(file_path + '.sha1')
153 for bucket in buckets:
154 cloud_storage.Delete(bucket, file_hash)
157 class Upload(command_line.Command):
158 """Upload files to Cloud Storage."""
160 @classmethod
161 def AddCommandLineArgs(cls, parser):
162 parser.add_argument('files', nargs='+')
163 parser.add_argument('bucket', choices=cloud_storage.BUCKET_ALIASES)
165 @classmethod
166 def ProcessCommandLineArgs(cls, parser, args):
167 args.bucket = cloud_storage.BUCKET_ALIASES[args.bucket]
169 for path in args.files:
170 if not os.path.exists(path):
171 parser.error('File not found: %s' % path)
173 def Run(self, args):
174 for file_path in args.files:
175 file_hash = cloud_storage.CalculateHash(file_path)
177 # Create or update the hash file.
178 hash_path = file_path + '.sha1'
179 with open(hash_path, 'wb') as f:
180 f.write(file_hash)
181 f.flush()
183 # Add the data to Cloud Storage.
184 cloud_storage.Insert(args.bucket, file_hash, file_path)
186 # Add the hash file to the branch, for convenience. :)
187 subprocess.call(['git', 'add', hash_path])
190 class CloudStorageCommand(command_line.SubcommandCommand):
191 commands = (Ls, Mv, Rm, Upload)
194 if __name__ == '__main__':
195 logging.getLogger().setLevel(logging.INFO)
196 sys.exit(CloudStorageCommand.main())