2 # -*- encoding: utf-8; py-indent-offset: 4 -*-
3 # +------------------------------------------------------------------+
4 # | ____ _ _ __ __ _ __ |
5 # | / ___| |__ ___ ___| | __ | \/ | |/ / |
6 # | | | | '_ \ / _ \/ __| |/ / | |\/| | ' / |
7 # | | |___| | | | __/ (__| < | | | | . \ |
8 # | \____|_| |_|\___|\___|_|\_\___|_| |_|_|\_\ |
10 # | Copyright Mathias Kettner 2018 mk@mathias-kettner.de |
11 # +------------------------------------------------------------------+
13 # This file is part of Check_MK.
14 # The official homepage is at http://mathias-kettner.de/check_mk.
16 # check_mk is free software; you can redistribute it and/or modify it
17 # under the terms of the GNU General Public License as published by
18 # the Free Software Foundation in version 2. check_mk is distributed
19 # in the hope that it will be useful, but WITHOUT ANY WARRANTY; with-
20 # out even the implied warranty of MERCHANTABILITY or FITNESS FOR A
21 # PARTICULAR PURPOSE. See the GNU General Public License for more de-
22 # tails. You should have received a copy of the GNU General Public
23 # License along with GNU Make; see the file COPYING. If not, write
24 # to the Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
25 # Boston, MA 02110-1301 USA.
28 def parse_aws_s3(info
): # pylint: disable=function-redefined
30 for row
in parse_aws(info
):
31 bucket
= parsed
.setdefault(row
['Label'], {})
33 bucket
['LocationConstraint'] = row
['LocationConstraint']
37 bucket
['Tagging'] = row
['Tagging']
40 storage_key
, size_key
= row
['Id'].split("_")[-2:]
41 inst
= bucket
.setdefault(size_key
, {})
43 inst
.setdefault(storage_key
, row
['Values'][0])
44 except (IndexError, ValueError):
49 # .--S3 objects----------------------------------------------------------.
50 # | ____ _____ _ _ _ |
51 # | / ___|___ / ___ | |__ (_) ___ ___| |_ ___ |
52 # | \___ \ |_ \ / _ \| '_ \| |/ _ \/ __| __/ __| |
53 # | ___) |__) | | (_) | |_) | | __/ (__| |_\__ \ |
54 # | |____/____/ \___/|_.__// |\___|\___|\__|___/ |
56 # '----------------------------------------------------------------------'
60 def check_aws_s3_objects(item
, params
, metrics
):
61 bucket_sizes
= metrics
['BucketSizeBytes']
63 for storage_type
, value
in bucket_sizes
.iteritems():
64 storage_infos
.append("%s: %s" % (storage_type
, get_bytes_human_readable(value
)))
65 sum_size
= sum(bucket_sizes
.values())
69 params
.get('bucket_size_levels', (None, None)),
70 human_readable_func
=get_bytes_human_readable
,
71 infoname
='Bucket size')
73 yield 0, ", ".join(storage_infos
)
75 num_objects
= sum(metrics
['NumberOfObjects'].values())
76 yield 0, 'Number of objects: %s' % int(num_objects
), [('aws_num_objects', num_objects
)]
78 location
= metrics
.get('LocationConstraint')
80 yield 0, 'Location: %s' % location
83 for tag
in metrics
.get('Tagging', {}):
84 tag_infos
.append("%s: %s" % (tag
['Key'], tag
['Value']))
86 yield 0, '[Tags] %s' % ", ".join(tag_infos
)
89 check_info
['aws_s3'] = {
90 'parse_function': parse_aws_s3
,
91 'inventory_function': lambda p
:\
92 inventory_aws_generic(p
, ['BucketSizeBytes', 'NumberOfObjects']),
93 'check_function': check_aws_s3_objects
,
94 'service_description': 'AWS/S3 Objects %s',
96 'includes': ['aws.include'],
97 'group': 'aws_s3_buckets_objects',
101 # .--summary-------------------------------------------------------------.
103 # | ___ _ _ _ __ ___ _ __ ___ __ _ _ __ _ _ |
104 # | / __| | | | '_ ` _ \| '_ ` _ \ / _` | '__| | | | |
105 # | \__ \ |_| | | | | | | | | | | | (_| | | | |_| | |
106 # | |___/\__,_|_| |_| |_|_| |_| |_|\__,_|_| \__, | |
108 # '----------------------------------------------------------------------'
111 def check_aws_s3_summary(item
, params
, parsed
):
113 largest_bucket
= None
114 largest_bucket_size
= 0
115 for bucket_name
, bucket
in parsed
.iteritems():
116 bucket_size
= sum(bucket
['BucketSizeBytes'].values())
117 sum_size
+= bucket_size
118 if bucket_size
>= largest_bucket_size
:
119 largest_bucket
= bucket_name
120 largest_bucket_size
= bucket_size
124 params
.get('bucket_size_levels', (None, None)),
125 human_readable_func
=get_bytes_human_readable
,
126 infoname
='Total size')
129 yield 0, 'Largest bucket: %s (%s)' % \
130 (largest_bucket
, get_bytes_human_readable(largest_bucket_size
)), [('aws_largest_bucket_size', largest_bucket_size
)]
133 check_info
['aws_s3.summary'] = {
134 'inventory_function': discover_single
,
135 'check_function': check_aws_s3_summary
,
136 'service_description': 'AWS/S3 Summary',
137 'has_perfdata': True,
138 'group': 'aws_s3_buckets',