Module: check_mk
Branch: master
Commit: 0e423f8f4d6e22f27ce89f8c8b246be7eafe6fc4
URL:
http://git.mathias-kettner.de/git/?p=check_mk.git;a=commit;h=0e423f8f4d6e22…
Author: Simon Betz <si(a)mathias-kettner.de>
Date: Wed Feb 6 13:16:56 2019 +0100
aws_s3, aws_s3.summary: Show location and tagging information and renamed summary service
Change-Id: I8b4631ab5666873bcc7dda6c67c6283afdc65514
---
agents/special/agent_aws | 10 +++-
checkman/{aws_s3_summary => aws_s3.summary} | 0
checks/aws_s3 | 86 +++++++++++++++++++++++++++--
checks/aws_s3_summary | 71 ------------------------
4 files changed, 88 insertions(+), 79 deletions(-)
diff --git a/agents/special/agent_aws b/agents/special/agent_aws
index e507fdc..c204d3a 100755
--- a/agents/special/agent_aws
+++ b/agents/special/agent_aws
@@ -633,7 +633,7 @@ class S3Summary(AWSSectionGeneric):
return AWSComputedContent(raw_content.content, raw_content.cache_timestamp)
def _create_results(self, computed_content):
- return [AWSSectionResult("", computed_content.content)]
+ return [AWSSectionResult("", None)]
class S3(AWSSectionCloudwatch):
@@ -648,14 +648,14 @@ class S3(AWSSectionCloudwatch):
def _get_colleague_contents(self):
colleague = self._received_results.get('s3_summary')
if colleague and colleague.content:
- return AWSColleagueContents([bucket['Name'] for bucket in
colleague.content],
+ return AWSColleagueContents({bucket['Name']: bucket for bucket in
colleague.content},
colleague.cache_timestamp)
return AWSColleagueContents([], 0)
def _get_metrics(self, colleague_contents):
metrics = []
idx = 0
- for bucket_name in colleague_contents.content:
+ for bucket_name in colleague_contents.content.iterkeys():
for metric_name, unit, storage_classes in [
("BucketSizeBytes", "Bytes", [
"StandardStorage",
@@ -689,6 +689,10 @@ class S3(AWSSectionCloudwatch):
return metrics
def _compute_content(self, raw_content, colleague_contents):
+ for row in raw_content.content:
+ bucket = colleague_contents.content.get(row['Label'])
+ if bucket:
+ row.update(bucket)
return AWSComputedContent(raw_content.content, raw_content.cache_timestamp)
def _create_results(self, computed_content):
diff --git a/checkman/aws_s3_summary b/checkman/aws_s3.summary
similarity index 100%
rename from checkman/aws_s3_summary
rename to checkman/aws_s3.summary
diff --git a/checks/aws_s3 b/checks/aws_s3
index 3c66361..5b0adf6 100644
--- a/checks/aws_s3
+++ b/checks/aws_s3
@@ -29,7 +29,16 @@ def parse_aws_s3(info):
parsed = {}
for row in parse_aws(info):
splitted_row = row['Id'].split("_")
- inst = parsed.setdefault(row['Label'], {}).setdefault(splitted_row[0],
{})
+ bucket = parsed.setdefault(row['Label'], {})
+ try:
+ bucket['LocationConstraint'] = row['LocationConstraint']
+ except KeyError:
+ pass
+ try:
+ bucket['Tagging'] = row['Tagging']
+ except KeyError:
+ pass
+ inst = bucket.setdefault(splitted_row[0], {})
try:
inst.setdefault(splitted_row[1], row['Values'][0])
except (IndexError, ValueError):
@@ -37,20 +46,87 @@ def parse_aws_s3(info):
return parsed
+# .--S3 objects----------------------------------------------------------.
+# | ____ _____ _ _ _ |
+# | / ___|___ / ___ | |__ (_) ___ ___| |_ ___ |
+# | \___ \ |_ \ / _ \| '_ \| |/ _ \/ __| __/ __| |
+# | ___) |__) | | (_) | |_) | | __/ (__| |_\__ \ |
+# | |____/____/ \___/|_.__// |\___|\___|\__|___/ |
+# | |__/ |
+# '----------------------------------------------------------------------'
+
+aws_s3_storage_mapping = {
+ "standardstorage": "Standard",
+ "standardiastorage": "IA",
+ "reducedredundancystorage": "Redundancy",
+}
+
+
@get_parsed_item_data
-def check_aws_s3(item, params, metrics):
- sum_size = sum(metrics['bucketsizebytes'].values())
- yield 0, 'Bucket size: %s' % get_bytes_human_readable(sum_size),
[('bucket_size', sum_size)]
+def check_aws_s3_objects(item, params, metrics):
+ bucket_sizes = metrics['bucketsizebytes']
+ storage_infos = []
+ for storage_type, value in bucket_sizes.iteritems():
+ storage_infos.append(
+ "%s: %s" % (aws_s3_storage_mapping[storage_type],
get_bytes_human_readable(value)))
+ sum_size = sum(bucket_sizes.values())
+ yield (0,
+ 'Bucket size: %s (%s)' % (get_bytes_human_readable(sum_size), ",
".join(storage_infos)),
+ [('bucket_size', sum_size)])
num_objects = sum(metrics['numberofobjects'].values())
yield 0, 'Number of objects: %s' % int(num_objects), [('num_objects',
num_objects)]
+ location = metrics.get('LocationConstraint')
+ if location:
+ yield 0, 'Location: %s' % location
+
+ tag_infos = []
+ for tag in metrics.get('Tagging', {}):
+ tag_infos.append("%s: %s" % (tag['Key'],
tag['Value']))
+ if tag_infos:
+ yield 0, '[Tags] %s' % ", ".join(tag_infos)
+
check_info['aws_s3'] = {
'parse_function': parse_aws_s3,
'inventory_function': discover(),
- 'check_function': check_aws_s3,
+ 'check_function': check_aws_s3_objects,
'service_description': 'AWS/S3 Objects %s',
'has_perfdata': True,
'includes': ['aws.include'],
}
+
+#.
+# .--summary-------------------------------------------------------------.
+# | |
+# | ___ _ _ _ __ ___ _ __ ___ __ _ _ __ _ _ |
+# | / __| | | | '_ ` _ \| '_ ` _ \ / _` | '__| | | | |
+# | \__ \ |_| | | | | | | | | | | | (_| | | | |_| | |
+# | |___/\__,_|_| |_| |_|_| |_| |_|\__,_|_| \__, | |
+# | |___/ |
+# '----------------------------------------------------------------------'
+
+
+def check_aws_s3_summary(item, params, parsed):
+ sum_size = 0
+ largest_bucket = None
+ largest_bucket_size = 0
+ for bucket_name, bucket in parsed.iteritems():
+ bucket_size = sum(bucket['bucketsizebytes'].values())
+ sum_size += bucket_size
+ if bucket_size >= largest_bucket_size:
+ largest_bucket = bucket_name
+ largest_bucket_size = bucket_size
+ yield 0, 'Total size: %s' % get_bytes_human_readable(sum_size)
+
+ if largest_bucket:
+ yield 0, 'Largest bucket: %s (%s)' % \
+ (largest_bucket, get_bytes_human_readable(largest_bucket_size))
+
+
+check_info['aws_s3.summary'] = {
+ 'inventory_function': discover_single,
+ 'check_function': check_aws_s3_summary,
+ 'service_description': 'AWS/S3 Summary',
+}
diff --git a/checks/aws_s3_summary b/checks/aws_s3_summary
deleted file mode 100644
index 91c7d48..0000000
--- a/checks/aws_s3_summary
+++ /dev/null
@@ -1,71 +0,0 @@
-#!/usr/bin/python
-# -*- encoding: utf-8; py-indent-offset: 4 -*-
-# +------------------------------------------------------------------+
-# | ____ _ _ __ __ _ __ |
-# | / ___| |__ ___ ___| | __ | \/ | |/ / |
-# | | | | '_ \ / _ \/ __| |/ / | |\/| | ' / |
-# | | |___| | | | __/ (__| < | | | | . \ |
-# | \____|_| |_|\___|\___|_|\_\___|_| |_|_|\_\ |
-# | |
-# | Copyright Mathias Kettner 2018 mk(a)mathias-kettner.de |
-# +------------------------------------------------------------------+
-#
-# This file is part of Check_MK.
-# The official homepage is at
http://mathias-kettner.de/check_mk.
-#
-# check_mk is free software; you can redistribute it and/or modify it
-# under the terms of the GNU General Public License as published by
-# the Free Software Foundation in version 2. check_mk is distributed
-# in the hope that it will be useful, but WITHOUT ANY WARRANTY; with-
-# out even the implied warranty of MERCHANTABILITY or FITNESS FOR A
-# PARTICULAR PURPOSE. See the GNU General Public License for more de-
-# tails. You should have received a copy of the GNU General Public
-# License along with GNU Make; see the file COPYING. If not, write
-# to the Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
-# Boston, MA 02110-1301 USA.
-
-
-def parse_aws_s3_summary(info):
- parsed = {}
- for row in parse_aws(info):
- parsed.setdefault(row['Name'], row)
- return parsed
-
-
-def _extract_aws_s3_tags(metrics):
- # metrics is dict with key 'Tagging'
- return ", ".join(
- ["%s: %s" % (pair['Key'], pair['Value']) for pair in
metrics.get('Tagging', [])])
-
-
-def check_aws_s3_summary(item, params, parsed):
- yield 0, '%s buckets' % len(parsed)
- buckets_by_region = {}
- long_output = []
- for bucket_name, row in parsed.iteritems():
- region = row['LocationConstraint']
- if not region:
- region = "unknown"
-
- buckets_by_region.setdefault(region, []).append(row)
- long_output_info = ["Bucket: %s, Region: %s" % (bucket_name, region)]
- tags = _extract_aws_s3_tags(row)
- if tags:
- long_output_info.append("[Tags] %s" % tags)
- long_output.append(", ".join(long_output_info))
-
- for region, buckets in buckets_by_region.iteritems():
- region_readable = AWSRegions.get(region, 'unknown[%s]' % region)
- yield 0, "%s: %s" % (region_readable, len(buckets))
-
- if long_output:
- yield 0, '\n%s' % '\n'.join(long_output)
-
-
-check_info['aws_s3_summary'] = {
- 'parse_function': parse_aws_s3_summary,
- 'inventory_function': discover_single,
- 'check_function': check_aws_s3_summary,
- 'service_description': 'AWS/S3 Summary',
- 'includes': ['aws.include'],
-}