Explicitly add python-numpy dependency to install-build-deps.
[chromium-blink-merge.git] / tools / perf / page_sets / PRESUBMIT.py
blob366f998876a28f9e448149b893b90b32ffc47160
1 # Copyright 2013 The Chromium Authors. All rights reserved.
2 # Use of this source code is governed by a BSD-style license that can be
3 # found in the LICENSE file.
5 import os
6 import re
7 import sys
10 def LoadSupport(input_api):
11 if 'cloud_storage' not in globals():
12 # Avoid leaking changes to global sys.path.
13 _old_sys_path = sys.path
14 try:
15 telemetry_path = os.path.join(os.path.dirname(os.path.dirname(
16 input_api.PresubmitLocalPath())), 'telemetry')
17 sys.path = [telemetry_path] + sys.path
18 from telemetry.util import cloud_storage
19 globals()['cloud_storage'] = cloud_storage
20 finally:
21 sys.path = _old_sys_path
23 return globals()['cloud_storage']
26 def _GetFilesNotInCloud(input_api):
27 """Searches for .sha1 files and uploads them to Cloud Storage.
29 It validates all the hashes and skips upload if not necessary.
30 """
31 hash_paths = []
32 for affected_file in input_api.AffectedFiles(include_deletes=False):
33 hash_path = affected_file.AbsoluteLocalPath()
34 _, extension = os.path.splitext(hash_path)
35 if extension == '.sha1':
36 hash_paths.append(hash_path)
37 if not hash_paths:
38 return []
40 cloud_storage = LoadSupport(input_api)
42 # Look in all buckets, in case the user uploaded the file manually. But this
43 # script focuses on WPR archives, so it only uploads to the internal bucket.
44 hashes_in_cloud_storage = cloud_storage.List(cloud_storage.PUBLIC_BUCKET)
45 try:
46 hashes_in_cloud_storage += cloud_storage.List(cloud_storage.PARTNER_BUCKET)
47 hashes_in_cloud_storage += cloud_storage.List(cloud_storage.INTERNAL_BUCKET)
48 except (cloud_storage.PermissionError, cloud_storage.CredentialsError):
49 pass
51 files = []
52 for hash_path in hash_paths:
53 file_hash = cloud_storage.ReadHash(hash_path)
54 if file_hash not in hashes_in_cloud_storage:
55 files.append((hash_path, file_hash))
57 return files
60 def _SyncFilesToCloud(input_api, output_api):
61 """Searches for .sha1 files and uploads them to Cloud Storage.
63 It validates all the hashes and skips upload if not necessary.
64 """
66 cloud_storage = LoadSupport(input_api)
68 results = []
69 for hash_path, file_hash in _GetFilesNotInCloud(input_api):
70 file_path, _ = os.path.splitext(hash_path)
72 if not re.match('^([A-Za-z0-9]{40})$', file_hash):
73 results.append(output_api.PresubmitError(
74 'Hash file does not contain a valid SHA-1 hash: %s' % hash_path))
75 continue
76 if not os.path.exists(file_path):
77 results.append(output_api.PresubmitError(
78 'Hash file exists, but file not found: %s' % hash_path))
79 continue
80 if cloud_storage.CalculateHash(file_path) != file_hash:
81 results.append(output_api.PresubmitError(
82 'Hash file does not match file\'s actual hash: %s' % hash_path))
83 continue
85 try:
86 bucket_aliases_string = ', '.join(cloud_storage.BUCKET_ALIASES)
87 bucket_input = raw_input(
88 'Uploading to Cloud Storage: %s\n'
89 'Which bucket should this go in? (%s) '
90 % (file_path, bucket_aliases_string)).lower()
91 bucket = cloud_storage.BUCKET_ALIASES.get(bucket_input, None)
92 if not bucket:
93 results.append(output_api.PresubmitError(
94 '"%s" was not one of %s' % (bucket_input, bucket_aliases_string)))
95 return results
97 cloud_storage.Insert(bucket, file_hash, file_path)
98 results.append(output_api.PresubmitNotifyResult(
99 'Uploaded file to Cloud Storage: %s' % file_path))
100 except cloud_storage.CloudStorageError, e:
101 results.append(output_api.PresubmitError(
102 'Unable to upload to Cloud Storage: %s\n\n%s' % (file_path, e)))
104 return results
107 def _VerifyFilesInCloud(input_api, output_api):
108 """Searches for .sha1 files and uploads them to Cloud Storage.
110 It validates all the hashes and skips upload if not necessary.
112 results = []
113 for hash_path, _ in _GetFilesNotInCloud(input_api):
114 results.append(output_api.PresubmitError(
115 'Attemping to commit hash file, but corresponding '
116 'data file is not in Cloud Storage: %s' % hash_path))
117 return results
120 def _IsNewJsonPageSet(affected_file):
121 return (affected_file.Action() == 'A' and
122 'page_sets/data/' not in affected_file.AbsoluteLocalPath()
123 and affected_file.AbsoluteLocalPath().endswith('.json'))
126 def _GetNewJsonPageSets(input_api):
127 return input_api.AffectedFiles(file_filter=_IsNewJsonPageSet)
129 def CheckChangeOnUpload(input_api, output_api):
130 results = _SyncFilesToCloud(input_api, output_api)
131 return results
134 def CheckChangeOnCommit(input_api, output_api):
135 results = _VerifyFilesInCloud(input_api, output_api)
136 return results