Module: check_mk
Branch: master
Commit: 0c27c50386c91ecb11de9d0a74762067f0dc468d
URL:
http://git.mathias-kettner.de/git/?p=check_mk.git;a=commit;h=0c27c50386c91e…
Author: Mathias Kettner <mk(a)mathias-kettner.de>
Date: Thu Feb 26 10:51:24 2015 +0100
#2039 mk_logwatch: new per-logfile-options maxfilesize and maxlinesize
These options allow you to truncated too long lines (in order to save space
and processing time for excessivly long lines) and to get a warning if the
size of a logfile is too large. Please refer to the online documentation
for details.
---
.werks/2039 | 13 +++++++++++++
ChangeLog | 1 +
agents/plugins/.f12 | 2 +-
agents/plugins/mk_logwatch | 41 ++++++++++++++++++++++++++++-------------
4 files changed, 43 insertions(+), 14 deletions(-)
diff --git a/.werks/2039 b/.werks/2039
new file mode 100644
index 0000000..8ed32bb
--- /dev/null
+++ b/.werks/2039
@@ -0,0 +1,13 @@
+Title: mk_logwatch: new per-logfile-options maxfilesize and maxlinesize
+Level: 1
+Component: checks
+Class: feature
+Compatible: compat
+State: unknown
+Version: 1.2.7i1
+Date: 1424944221
+
+These options allow you to truncated too long lines (in order to save space
+and processing time for excessivly long lines) and to get a warning if the
+size of a logfile is too large. Please refer to the online documentation
+for details.
diff --git a/ChangeLog b/ChangeLog
index 287f6fe..95f40b2 100644
--- a/ChangeLog
+++ b/ChangeLog
@@ -95,6 +95,7 @@
* 1862 netscaler_vserver: new check to monitor VServers on Citrix Netscaler
Appliance
* 2036 docsis_channels_upstream: Add handling for codewords (WATO rule, rate
computation, Perf-O-Meter)
* 1947 agent_ucs_bladecenter: Monitors UCS Bladecenter via Web-API...
+ * 2039 mk_logwatch: new per-logfile-options maxfilesize and maxlinesize...
* 1457 FIX: logins: new check renamed from "users" check...
NOTE: Please refer to the migration notes!
* 1762 FIX: lnx_thermal: Now ignoring trip points with level 0...
diff --git a/agents/plugins/.f12 b/agents/plugins/.f12
index 5d08296..184bd99 100755
--- a/agents/plugins/.f12
+++ b/agents/plugins/.f12
@@ -3,4 +3,4 @@ SITE=${SITE:-$(cat ../../.site 2>/dev/null || true)}
SITE=${SITE:-$(omd sites --bare | head -n 1)}
# sudo mkdir -p /usr/lib/check_mk_agent/plugins
# sudo install -m 755 * /usr/lib/check_mk_agent/plugins || true
-sudo install -m 755 * /omd/sites/$SITE/share/check_mk/agents/plugins
+sudo rsync --delete -va ./ /omd/sites/$SITE/share/check_mk/agents/plugins/
diff --git a/agents/plugins/mk_logwatch b/agents/plugins/mk_logwatch
index d4ec518..ff77b5a 100755
--- a/agents/plugins/mk_logwatch
+++ b/agents/plugins/mk_logwatch
@@ -157,7 +157,7 @@ def save_status(status):
f.write("%s|%d|%d\n" % (filename, offset, inode))
pushed_back_line = None
-def next_line(f):
+def next_line(file_handle):
global pushed_back_line
if pushed_back_line != None:
line = pushed_back_line
@@ -165,7 +165,7 @@ def next_line(f):
return line
else:
try:
- line = f.next()
+ line = file_handle.next()
return line
except:
return None
@@ -179,8 +179,8 @@ def process_logfile(logfile, patterns):
# before, we set the offset to -1
offset, prev_inode = status.get(logfile, (-1, -1))
try:
- fl = os.open(logfile, os.O_RDONLY)
- inode = os.fstat(fl)[1] # 1 = st_ino
+ file_desc = os.open(logfile, os.O_RDONLY)
+ inode = os.fstat(file_desc)[1] # 1 = st_ino
except:
if debug:
raise
@@ -190,7 +190,7 @@ def process_logfile(logfile, patterns):
print "[[[%s]]]" % logfile
# Seek to the current end in order to determine file size
- current_end = os.lseek(fl, 0, 2) # os.SEEK_END not available in Python 2.4
+ current_end = os.lseek(file_desc, 0, 2) # os.SEEK_END not available in Python 2.4
status[logfile] = current_end, inode
# If we have never seen this file before, we just set the
@@ -223,25 +223,29 @@ def process_logfile(logfile, patterns):
offset = 0
# now seek to offset where interesting data begins
- os.lseek(fl, offset, 0) # os.SEEK_SET not available in Python 2.4
- f = os.fdopen(fl)
+ os.lseek(file_desc, offset, 0) # os.SEEK_SET not available in Python 2.4
+ file_handle = os.fdopen(file_desc)
worst = -1
outputtxt = ""
lines_parsed = 0
start_time = time.time()
while True:
- line = next_line(f)
+ line = next_line(file_handle)
if line == None:
break # End of file
+ # Handle option maxlinesize
+ if opt_maxlinesize != None and len(line) > opt_maxlinesize:
+ line = line[:opt_maxlinesize] + "[TRUNCATED]\n"
+
lines_parsed += 1
# Check if maximum number of new log messages is exceeded
if opt_maxlines != None and lines_parsed > opt_maxlines:
outputtxt += "%s Maximum number (%d) of new log messages
exceeded.\n" % (
opt_overflow, opt_maxlines)
worst = max(worst, opt_overflow_level)
- os.lseek(fl, 0, 2) # Seek to end of file, skip all other messages
+ os.lseek(file_desc, 0, 2) # Seek to end of file, skip all other messages
break
# Check if maximum processing time (per file) is exceeded. Check only
@@ -251,7 +255,7 @@ def process_logfile(logfile, patterns):
outputtxt += "%s Maximum parsing time (%.1f sec) of this log file
exceeded.\n" % (
opt_overflow, opt_maxtime)
worst = max(worst, opt_overflow_level)
- os.lseek(fl, 0, 2) # Seek to end of file, skip all other messages
+ os.lseek(file_desc, 0, 2) # Seek to end of file, skip all other messages
break
level = "."
@@ -266,14 +270,14 @@ def process_logfile(logfile, patterns):
for cont_pattern in cont_patterns:
if type(cont_pattern) == int: # add that many lines
for x in range(cont_pattern):
- cont_line = next_line(f)
+ cont_line = next_line(file_handle)
if cont_line == None: # end of file
break
line = line[:-1] + "\1" + cont_line
else: # pattern is regex
while True:
- cont_line = next_line(f)
+ cont_line = next_line(file_handle)
if cont_line == None: # end of file
break
elif cont_pattern.search(cont_line[:-1]):
@@ -299,7 +303,7 @@ def process_logfile(logfile, patterns):
continue
outputtxt += "%s%s %s%s\n" % (color, level, line[:-1], tty_normal)
- new_offset = os.lseek(fl, 0, 1) # os.SEEK_CUR not available in Python 2.4
+ new_offset = os.lseek(file_desc, 0, 1) # os.SEEK_CUR not available in Python 2.4
status[logfile] = new_offset, inode
# output all lines if at least one warning, error or ok has been found
@@ -307,6 +311,11 @@ def process_logfile(logfile, patterns):
sys.stdout.write(outputtxt)
sys.stdout.flush()
+ # Handle option maxfilesize, regardless of warning or errors that have happened
+ if opt_maxfilesize != None and offset <= opt_maxfilesize and new_offset >
opt_maxfilesize:
+ sys.stdout.write("%sW Maximum allowed logfile size (%d bytes)
exceeded.%s\n" % (tty_yellow, opt_maxfilesize, tty_normal))
+
+
try:
config = read_config()
except Exception, e:
@@ -329,6 +338,8 @@ for filenames, patterns in config:
# Initialize options with default values
opt_maxlines = None
opt_maxtime = None
+ opt_maxlinesize = None
+ opt_maxfilesize = None
opt_regex = None
opt_overflow = 'C'
opt_overflow_level = 2
@@ -340,6 +351,10 @@ for filenames, patterns in config:
opt_maxlines = int(value)
elif key == 'maxtime':
opt_maxtime = float(value)
+ elif key == 'maxlinesize':
+ opt_maxlinesize = int(value)
+ elif key == 'maxfilesize':
+ opt_maxfilesize = int(value)
elif key == 'overflow':
if value not in [ 'C', 'I', 'W', 'O' ]:
raise Exception("Invalid value %s for overflow. Allowed are C,
I, O and W" % value)