Merge Chromium + Blink git repositories
[chromium-blink-merge.git] / tools / telemetry / third_party / gsutilz / gslib / commands / logging.py
blobdab9f52ce9002ef0a6ebe2c4502075b697077e7e
1 # -*- coding: utf-8 -*-
2 # Copyright 2011 Google Inc. All Rights Reserved.
4 # Licensed under the Apache License, Version 2.0 (the "License");
5 # you may not use this file except in compliance with the License.
6 # You may obtain a copy of the License at
8 # http://www.apache.org/licenses/LICENSE-2.0
10 # Unless required by applicable law or agreed to in writing, software
11 # distributed under the License is distributed on an "AS IS" BASIS,
12 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 # See the License for the specific language governing permissions and
14 # limitations under the License.
15 """Implementation of logging configuration command for buckets."""
17 from __future__ import absolute_import
19 import sys
21 from apitools.base.py import encoding
23 from gslib.command import Command
24 from gslib.command_argument import CommandArgument
25 from gslib.cs_api_map import ApiSelector
26 from gslib.exception import CommandException
27 from gslib.help_provider import CreateHelpText
28 from gslib.storage_url import StorageUrlFromString
29 from gslib.third_party.storage_apitools import storage_v1_messages as apitools_messages
30 from gslib.util import NO_MAX
31 from gslib.util import UrlsAreForSingleProvider
33 _SET_SYNOPSIS = """
34 gsutil logging set on -b logging_bucket [-o log_object_prefix] url...
35 gsutil logging set off url...
36 """
38 _GET_SYNOPSIS = """
39 gsutil logging get url
40 """
42 _SYNOPSIS = _SET_SYNOPSIS + _GET_SYNOPSIS.lstrip('\n') + '\n'
44 _SET_DESCRIPTION = """
45 <B>SET</B>
46 The set sub-command has two sub-commands:
48 <B>ON</B>
49 The "gsutil set on" command will enable access logging of the
50 buckets named by the specified URLs, outputting log files in the specified
51 logging_bucket. logging_bucket must already exist, and all URLs must name
52 buckets (e.g., gs://bucket). The required bucket parameter specifies the
53 bucket to which the logs are written, and the optional log_object_prefix
54 parameter specifies the prefix for log object names. The default prefix
55 is the bucket name. For example, the command:
57 gsutil logging set on -b gs://my_logging_bucket -o AccessLog \\
58 gs://my_bucket1 gs://my_bucket2
60 will cause all read and write activity to objects in gs://mybucket1 and
61 gs://mybucket2 to be logged to objects prefixed with the name "AccessLog",
62 with those log objects written to the bucket gs://my_logging_bucket.
64 Next, you need to grant cloud-storage-analytics@google.com write access to
65 the log bucket, using this command:
67 gsutil acl ch -g cloud-storage-analytics@google.com:W gs://my_logging_bucket
69 Note that log data may contain sensitive information, so you should make
70 sure to set an appropriate default bucket ACL to protect that data. (See
71 "gsutil help defacl".)
73 <B>OFF</B>
74 This command will disable access logging of the buckets named by the
75 specified URLs. All URLs must name buckets (e.g., gs://bucket).
77 No logging data is removed from the log buckets when you disable logging,
78 but Google Cloud Storage will stop delivering new logs once you have
79 run this command.
81 """
83 _GET_DESCRIPTION = """
84 <B>GET</B>
85 If logging is enabled for the specified bucket url, the server responds
86 with a JSON document that looks something like this:
89 "logObjectPrefix": "AccessLog",
90 "logBucket": "my_logging_bucket"
93 You can download log data from your log bucket using the gsutil cp command.
95 """
97 _DESCRIPTION = """
98 Google Cloud Storage offers access logs and storage data in the form of
99 CSV files that you can download and view. Access logs provide information
100 for all of the requests made on a specified bucket in the last 24 hours,
101 while the storage logs provide information about the storage consumption of
102 that bucket for the last 24 hour period. The logs and storage data files
103 are automatically created as new objects in a bucket that you specify, in
104 24 hour intervals.
106 The logging command has two sub-commands:
107 """ + _SET_DESCRIPTION + _GET_DESCRIPTION + """
109 <B>ACCESS LOG AND STORAGE DATA FIELDS</B>
110 For a complete list of access log fields and storage data fields, see:
111 https://developers.google.com/storage/docs/accesslogs#reviewing
114 _DETAILED_HELP_TEXT = CreateHelpText(_SYNOPSIS, _DESCRIPTION)
116 _get_help_text = CreateHelpText(_GET_SYNOPSIS, _GET_DESCRIPTION)
117 _set_help_text = CreateHelpText(_SET_SYNOPSIS, _SET_DESCRIPTION)
120 class LoggingCommand(Command):
121 """Implementation of gsutil logging command."""
123 # Command specification. See base class for documentation.
124 command_spec = Command.CreateCommandSpec(
125 'logging',
126 command_name_aliases=['disablelogging', 'enablelogging', 'getlogging'],
127 usage_synopsis=_SYNOPSIS,
128 min_args=2,
129 max_args=NO_MAX,
130 supported_sub_args='b:o:',
131 file_url_ok=False,
132 provider_url_ok=False,
133 urls_start_arg=0,
134 gs_api_support=[ApiSelector.XML, ApiSelector.JSON],
135 gs_default_api=ApiSelector.JSON,
136 argparse_arguments=[
137 CommandArgument('mode', choices=['on', 'off']),
138 CommandArgument.MakeZeroOrMoreCloudBucketURLsArgument()
141 # Help specification. See help_provider.py for documentation.
142 help_spec = Command.HelpSpec(
143 help_name='logging',
144 help_name_aliases=['loggingconfig', 'logs', 'log', 'getlogging',
145 'enablelogging', 'disablelogging'],
146 help_type='command_help',
147 help_one_line_summary='Configure or retrieve logging on buckets',
148 help_text=_DETAILED_HELP_TEXT,
149 subcommand_help_text={'get': _get_help_text, 'set': _set_help_text},
152 def _Get(self):
153 """Gets logging configuration for a bucket."""
154 bucket_url, bucket_metadata = self.GetSingleBucketUrlFromArg(
155 self.args[0], bucket_fields=['logging'])
157 if bucket_url.scheme == 's3':
158 sys.stdout.write(self.gsutil_api.XmlPassThroughGetLogging(
159 bucket_url, provider=bucket_url.scheme))
160 else:
161 if (bucket_metadata.logging and bucket_metadata.logging.logBucket and
162 bucket_metadata.logging.logObjectPrefix):
163 sys.stdout.write(str(encoding.MessageToJson(
164 bucket_metadata.logging)) + '\n')
165 else:
166 sys.stdout.write('%s has no logging configuration.\n' % bucket_url)
167 return 0
169 def _Enable(self):
170 """Enables logging configuration for a bucket."""
171 # Disallow multi-provider 'logging set on' calls, because the schemas
172 # differ.
173 if not UrlsAreForSingleProvider(self.args):
174 raise CommandException('"logging set on" command spanning providers not '
175 'allowed.')
176 target_bucket_url = None
177 target_prefix = None
178 for opt, opt_arg in self.sub_opts:
179 if opt == '-b':
180 target_bucket_url = StorageUrlFromString(opt_arg)
181 if opt == '-o':
182 target_prefix = opt_arg
184 if not target_bucket_url:
185 raise CommandException('"logging set on" requires \'-b <log_bucket>\' '
186 'option')
187 if not target_bucket_url.IsBucket():
188 raise CommandException('-b option must specify a bucket URL.')
190 # Iterate over URLs, expanding wildcards and setting logging on each.
191 some_matched = False
192 for url_str in self.args:
193 bucket_iter = self.GetBucketUrlIterFromArg(url_str, bucket_fields=['id'])
194 for blr in bucket_iter:
195 url = blr.storage_url
196 some_matched = True
197 self.logger.info('Enabling logging on %s...', blr)
198 logging = apitools_messages.Bucket.LoggingValue(
199 logBucket=target_bucket_url.bucket_name,
200 logObjectPrefix=target_prefix or url.bucket_name)
202 bucket_metadata = apitools_messages.Bucket(logging=logging)
203 self.gsutil_api.PatchBucket(url.bucket_name, bucket_metadata,
204 provider=url.scheme, fields=['id'])
205 if not some_matched:
206 raise CommandException('No URLs matched')
207 return 0
209 def _Disable(self):
210 """Disables logging configuration for a bucket."""
211 # Iterate over URLs, expanding wildcards, and disabling logging on each.
212 some_matched = False
213 for url_str in self.args:
214 bucket_iter = self.GetBucketUrlIterFromArg(url_str, bucket_fields=['id'])
215 for blr in bucket_iter:
216 url = blr.storage_url
217 some_matched = True
218 self.logger.info('Disabling logging on %s...', blr)
219 logging = apitools_messages.Bucket.LoggingValue()
221 bucket_metadata = apitools_messages.Bucket(logging=logging)
222 self.gsutil_api.PatchBucket(url.bucket_name, bucket_metadata,
223 provider=url.scheme, fields=['id'])
224 if not some_matched:
225 raise CommandException('No URLs matched')
226 return 0
228 def RunCommand(self):
229 """Command entry point for the logging command."""
230 # Parse the subcommand and alias for the new logging command.
231 action_subcommand = self.args.pop(0)
232 if action_subcommand == 'get':
233 func = self._Get
234 elif action_subcommand == 'set':
235 state_subcommand = self.args.pop(0)
236 if not self.args:
237 self.RaiseWrongNumberOfArgumentsException()
238 if state_subcommand == 'on':
239 func = self._Enable
240 elif state_subcommand == 'off':
241 func = self._Disable
242 else:
243 raise CommandException((
244 'Invalid subcommand "%s" for the "%s %s" command.\n'
245 'See "gsutil help logging".') % (
246 state_subcommand, self.command_name, action_subcommand))
247 else:
248 raise CommandException(('Invalid subcommand "%s" for the %s command.\n'
249 'See "gsutil help logging".') %
250 (action_subcommand, self.command_name))
251 self.ParseSubOpts(check_args=True)
252 func()
253 return 0