2 # Copyright 2014 The Chromium Authors. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
12 from telemetry
.core
import command_line
13 from telemetry
.util
import cloud_storage
16 BUCKETS
= {bucket
: easy_bucket_name
for easy_bucket_name
, bucket
17 in cloud_storage
.BUCKET_ALIASES
.iteritems()}
21 root
, ext
= os
.path
.splitext(path
)
27 hash_path
= path
+ '.sha1'
28 return file_path
, hash_path
31 def _FindFilesInCloudStorage(files
):
32 """Returns a dict of all files and which buckets they're in."""
33 # Preprocessing: get the contents of all buckets.
35 for bucket
in BUCKETS
:
37 bucket_contents
[bucket
] = cloud_storage
.List(bucket
)
38 except (cloud_storage
.PermissionError
, cloud_storage
.CredentialsError
):
41 # Check if each file is in the bucket contents.
44 file_path
, hash_path
= _GetPaths(path
)
46 if file_path
in file_buckets
:
47 # Ignore duplicates, if both data and sha1 file were in the file list.
49 if not os
.path
.exists(hash_path
):
50 # Probably got some non-Cloud Storage files in the file list. Ignore.
53 file_hash
= cloud_storage
.ReadHash(hash_path
)
54 file_buckets
[file_path
] = []
55 for bucket
in BUCKETS
:
56 if bucket
in bucket_contents
and file_hash
in bucket_contents
[bucket
]:
57 file_buckets
[file_path
].append(bucket
)
62 class Ls(command_line
.Command
):
63 """List which bucket each file is in."""
66 def AddCommandLineArgs(cls
, parser
):
67 parser
.add_argument('-r', '--recursive', action
='store_true')
68 parser
.add_argument('paths', nargs
='+')
71 def ProcessCommandLineArgs(cls
, parser
, args
):
72 for path
in args
.paths
:
73 if not os
.path
.exists(path
):
74 parser
.error('Path not found: %s' % path
)
77 def GetFilesInPaths(paths
, recursive
):
78 """If path is a dir, yields all files in path, otherwise just yields path.
80 If recursive is true, walks subdirectories recursively."""
82 if not os
.path
.isdir(path
):
87 for root
, _
, filenames
in os
.walk(path
):
88 for filename
in filenames
:
89 yield os
.path
.join(root
, filename
)
91 for filename
in os
.listdir(path
):
92 yield os
.path
.join(path
, filename
)
94 files
= _FindFilesInCloudStorage(GetFilesInPaths(args
.paths
, args
.recursive
))
97 print 'No files in Cloud Storage.'
100 for file_path
, buckets
in sorted(files
.iteritems()):
102 buckets
= [BUCKETS
[bucket
] for bucket
in buckets
]
103 print '%-11s %s' % (','.join(buckets
), file_path
)
105 print '%-11s %s' % ('not found', file_path
)
108 class Mv(command_line
.Command
):
109 """Move files to the given bucket."""
112 def AddCommandLineArgs(cls
, parser
):
113 parser
.add_argument('files', nargs
='+')
114 parser
.add_argument('bucket', choices
=cloud_storage
.BUCKET_ALIASES
)
117 def ProcessCommandLineArgs(cls
, parser
, args
):
118 args
.bucket
= cloud_storage
.BUCKET_ALIASES
[args
.bucket
]
121 files
= _FindFilesInCloudStorage(args
.files
)
123 for file_path
, buckets
in sorted(files
.iteritems()):
125 raise IOError('%s not found in Cloud Storage.' % file_path
)
127 for file_path
, buckets
in sorted(files
.iteritems()):
128 if args
.bucket
in buckets
:
129 buckets
.remove(args
.bucket
)
131 logging
.info('Skipping %s, no action needed.' % file_path
)
134 # Move to the target bucket.
135 file_hash
= cloud_storage
.ReadHash(file_path
+ '.sha1')
136 cloud_storage
.Move(buckets
.pop(), args
.bucket
, file_hash
)
138 # Delete all additional copies.
139 for bucket
in buckets
:
140 cloud_storage
.Delete(bucket
, file_hash
)
143 class Rm(command_line
.Command
):
144 """Remove files from Cloud Storage."""
147 def AddCommandLineArgs(cls
, parser
):
148 parser
.add_argument('files', nargs
='+')
151 files
= _FindFilesInCloudStorage(args
.files
)
152 for file_path
, buckets
in sorted(files
.iteritems()):
153 file_hash
= cloud_storage
.ReadHash(file_path
+ '.sha1')
154 for bucket
in buckets
:
155 cloud_storage
.Delete(bucket
, file_hash
)
158 class Upload(command_line
.Command
):
159 """Upload files to Cloud Storage."""
162 def AddCommandLineArgs(cls
, parser
):
163 parser
.add_argument('files', nargs
='+')
164 parser
.add_argument('bucket', choices
=cloud_storage
.BUCKET_ALIASES
)
167 def ProcessCommandLineArgs(cls
, parser
, args
):
168 args
.bucket
= cloud_storage
.BUCKET_ALIASES
[args
.bucket
]
170 for path
in args
.files
:
171 if not os
.path
.exists(path
):
172 parser
.error('File not found: %s' % path
)
175 for file_path
in args
.files
:
176 file_hash
= cloud_storage
.CalculateHash(file_path
)
178 # Create or update the hash file.
179 hash_path
= file_path
+ '.sha1'
180 with
open(hash_path
, 'wb') as f
:
184 # Add the data to Cloud Storage.
185 cloud_storage
.Insert(args
.bucket
, file_hash
, file_path
)
187 # Add the hash file to the branch, for convenience. :)
188 subprocess
.call(['git', 'add', hash_path
])
191 class CloudStorageCommand(command_line
.SubcommandCommand
):
192 commands
= (Ls
, Mv
, Rm
, Upload
)
195 if __name__
== '__main__':
196 logging
.getLogger().setLevel(logging
.INFO
)
197 sys
.exit(CloudStorageCommand
.main())