565 lines
20 KiB
Python
Executable File
565 lines
20 KiB
Python
Executable File
#!/usr/bin/python
|
|
# -*- encoding: utf-8; py-indent-offset: 4 -*-
|
|
# +------------------------------------------------------------------+
|
|
# | ____ _ _ __ __ _ __ |
|
|
# | / ___| |__ ___ ___| | __ | \/ | |/ / |
|
|
# | | | | '_ \ / _ \/ __| |/ / | |\/| | ' / |
|
|
# | | |___| | | | __/ (__| < | | | | . \ |
|
|
# | \____|_| |_|\___|\___|_|\_\___|_| |_|_|\_\ |
|
|
# | |
|
|
# | Copyright Mathias Kettner 2014 mk@mathias-kettner.de |
|
|
# +------------------------------------------------------------------+
|
|
#
|
|
# This file is part of Check_MK.
|
|
# The official homepage is at http://mathias-kettner.de/check_mk.
|
|
#
|
|
# check_mk is free software; you can redistribute it and/or modify it
|
|
# under the terms of the GNU General Public License as published by
|
|
# the Free Software Foundation in version 2. check_mk is distributed
|
|
# in the hope that it will be useful, but WITHOUT ANY WARRANTY; with-
|
|
# out even the implied warranty of MERCHANTABILITY or FITNESS FOR A
|
|
# PARTICULAR PURPOSE. See the GNU General Public License for more de-
|
|
# tails. You should have received a copy of the GNU General Public
|
|
# License along with GNU Make; see the file COPYING. If not, write
|
|
# to the Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
|
|
# Boston, MA 02110-1301 USA.
|
|
|
|
# Call with -d for debug mode: colored output, no saving of status
|
|
|
|
import sys, os, re, time, glob
|
|
|
|
|
|
# .--MEI-Cleanup---------------------------------------------------------.
|
|
# | __ __ _____ ___ ____ _ |
|
|
# | | \/ | ____|_ _| / ___| | ___ __ _ _ __ _ _ _ __ |
|
|
# | | |\/| | _| | |_____| | | |/ _ \/ _` | '_ \| | | | '_ \ |
|
|
# | | | | | |___ | |_____| |___| | __/ (_| | | | | |_| | |_) | |
|
|
# | |_| |_|_____|___| \____|_|\___|\__,_|_| |_|\__,_| .__/ |
|
|
# | |_| |
|
|
# +----------------------------------------------------------------------+
|
|
# In case the program crashes or is killed in a hard way, the frozen binary .exe
|
|
# may leave temporary directories named "_MEI..." in the temporary path. Clean them
|
|
# up to prevent eating disk space over time.
|
|
|
|
########################################################################
|
|
############## DUPLICATE CODE WARNING ##################################
|
|
### This code is also used in the cmk-update-agent frozen binary #######
|
|
### Any changes to this class should also be made in cmk-update-agent ##
|
|
### In the bright future we will move this code into a library #########
|
|
########################################################################
|
|
|
|
class MEIFolderCleaner(object):
|
|
def pid_running(self, pid):
|
|
import ctypes
|
|
kernel32 = ctypes.windll.kernel32
|
|
SYNCHRONIZE = 0x100000
|
|
|
|
process = kernel32.OpenProcess(SYNCHRONIZE, 0, pid)
|
|
|
|
if process != 0:
|
|
kernel32.CloseHandle(process)
|
|
return True
|
|
else:
|
|
return False
|
|
|
|
|
|
def find_and_remove_leftover_folders(self, hint_filenames):
|
|
if not hasattr(sys, "frozen"):
|
|
return
|
|
|
|
import win32file # pylint: disable=import-error
|
|
import tempfile
|
|
base_path = tempfile.gettempdir()
|
|
for f in os.listdir(base_path):
|
|
try:
|
|
path = os.path.join(base_path, f)
|
|
|
|
if not os.path.isdir(path):
|
|
continue
|
|
|
|
# Only care about directories related to our program
|
|
invalid_dir = False
|
|
for hint_filename in hint_filenames:
|
|
if not os.path.exists(os.path.join(path, hint_filename)):
|
|
invalid_dir = True
|
|
break
|
|
if invalid_dir:
|
|
continue
|
|
|
|
pyinstaller_tmp_path = win32file.GetLongPathName(sys._MEIPASS).lower() # pylint: disable=no-member
|
|
if pyinstaller_tmp_path == path.lower():
|
|
continue # Skip our own directory
|
|
|
|
# Extract the process id from the directory and check whether or not it is still
|
|
# running. Don't delete directories of running processes!
|
|
# The name of the temporary directories is "_MEI<PID><NR>". We try to extract the PID
|
|
# by stripping of a single digit from the right. In the hope the NR is a single digit
|
|
# in all relevant cases.
|
|
pid = int(f[4:-1])
|
|
if self.pid_running(pid):
|
|
continue
|
|
|
|
shutil.rmtree(path)
|
|
except Exception, e:
|
|
# TODO: introduce verbose mode for mk_logwatch
|
|
pass
|
|
#.
|
|
|
|
os_type = "linux"
|
|
try:
|
|
import platform
|
|
os_type = platform.system().lower()
|
|
except:
|
|
pass
|
|
|
|
if '-d' in sys.argv[1:] or '--debug' in sys.argv[1:]:
|
|
tty_red = '\033[1;31m'
|
|
tty_green = '\033[1;32m'
|
|
tty_yellow = '\033[1;33m'
|
|
tty_blue = '\033[1;34m'
|
|
tty_normal = '\033[0m'
|
|
debug = True
|
|
else:
|
|
tty_red = ''
|
|
tty_green = ''
|
|
tty_yellow = ''
|
|
tty_blue = ''
|
|
tty_normal = ''
|
|
debug = False
|
|
|
|
# The configuration file and status file are searched
|
|
# in the directory named by the environment variable
|
|
# LOGWATCH_DIR. If that is not set, MK_CONFDIR is used.
|
|
# If that is not set either, the current directory ist
|
|
# used.
|
|
logwatch_dir = os.getenv("LOGWATCH_DIR")
|
|
if logwatch_dir:
|
|
mk_confdir = logwatch_dir
|
|
mk_vardir = logwatch_dir
|
|
else:
|
|
mk_confdir = os.getenv("MK_CONFDIR") or "."
|
|
mk_vardir = os.getenv("MK_VARDIR") or os.getenv("MK_STATEDIR") or "."
|
|
|
|
|
|
sys.stdout.write("<<<logwatch>>>\n")
|
|
|
|
config_filename = mk_confdir + "/logwatch.cfg"
|
|
config_dir = mk_confdir + "/logwatch.d/*.cfg"
|
|
|
|
|
|
# Determine the name of the state file
|
|
# $REMOTE set -> logwatch.state.$REMOTE
|
|
# $REMOTE not set and a tty -> logwatch.state.local
|
|
# $REMOTE not set and not a tty -> logwatch.state
|
|
remote_hostname = os.getenv("REMOTE", "")
|
|
remote_hostname = remote_hostname.replace(":", "_")
|
|
if remote_hostname != "":
|
|
status_filename = "%s/logwatch.state.%s" % (mk_vardir, remote_hostname)
|
|
else:
|
|
if sys.stdout.isatty():
|
|
status_filename = "%s/logwatch.state.local" % mk_vardir
|
|
else:
|
|
status_filename = "%s/logwatch.state" % mk_vardir
|
|
|
|
# Copy the last known state from the logwatch.state when there is no status_filename yet.
|
|
if not os.path.exists(status_filename) and os.path.exists("%s/logwatch.state" % mk_vardir):
|
|
import shutil
|
|
shutil.copy("%s/logwatch.state" % mk_vardir, status_filename)
|
|
|
|
def is_not_comment(line):
|
|
if line.lstrip().startswith('#') or \
|
|
line.strip() == '':
|
|
return False
|
|
return True
|
|
|
|
def parse_filenames(line):
|
|
return line.split()
|
|
|
|
def parse_pattern(level, pattern, line):
|
|
if level not in [ 'C', 'W', 'I', 'O' ]:
|
|
raise Exception("Invalid pattern line '%s'" % line)
|
|
|
|
try:
|
|
compiled = re.compile(pattern)
|
|
except:
|
|
raise Exception("Invalid regular expression in line '%s'" % line)
|
|
|
|
return (level, compiled)
|
|
|
|
def read_config():
|
|
config_lines = []
|
|
try:
|
|
config_lines += [ line.rstrip() for line in filter(is_not_comment, file(config_filename).readlines()) ]
|
|
except IOError, e:
|
|
if debug:
|
|
raise
|
|
|
|
# Add config from a logwatch.d folder
|
|
for config_file in glob.glob(config_dir):
|
|
config_lines += [ line.rstrip() for line in filter(is_not_comment, file(config_file).readlines()) ]
|
|
|
|
have_filenames = False
|
|
config = []
|
|
cont_list = []
|
|
rewrite_list = []
|
|
|
|
for line in config_lines:
|
|
if line[0].isspace(): # pattern line
|
|
if not have_filenames:
|
|
raise Exception("Missing logfile names")
|
|
|
|
level, pattern = line.split(None, 1)
|
|
|
|
if level == 'A':
|
|
cont_list.append(parse_cont_pattern(pattern))
|
|
elif level == 'R':
|
|
rewrite_list.append(pattern)
|
|
else:
|
|
level, compiled = parse_pattern(level, pattern, line)
|
|
# New pattern for line matching => clear continuation and rewrite patterns
|
|
cont_list = []
|
|
rewrite_list = []
|
|
# TODO: Fix the code and remove the pragma below!
|
|
patterns.append((level, compiled, cont_list, rewrite_list)) # pylint: disable=used-before-assignment
|
|
|
|
else: # filename line
|
|
patterns = []
|
|
cont_list = [] # Clear list of continuation patterns from last file
|
|
rewrite_list = [] # Same for rewrite patterns
|
|
config.append((parse_filenames(line), patterns))
|
|
have_filenames = True
|
|
return config
|
|
|
|
def parse_cont_pattern(pattern):
|
|
try:
|
|
return int(pattern)
|
|
except:
|
|
try:
|
|
return re.compile(pattern)
|
|
except:
|
|
if debug:
|
|
raise
|
|
raise Exception("Invalid regular expression in line '%s'" % pattern)
|
|
|
|
# structure of statusfile
|
|
# # LOGFILE OFFSET INODE
|
|
# /var/log/messages|7767698|32455445
|
|
# /var/test/x12134.log|12345|32444355
|
|
def read_status():
|
|
if debug:
|
|
return {}
|
|
|
|
status = {}
|
|
for line in file(status_filename):
|
|
# TODO: Remove variants with spaces. rsplit is
|
|
# not portable. split fails if logfilename contains
|
|
# spaces
|
|
inode = -1
|
|
try:
|
|
parts = line.split('|')
|
|
filename = parts[0]
|
|
offset = parts[1]
|
|
if len(parts) >= 3:
|
|
inode = parts[2]
|
|
|
|
except:
|
|
try:
|
|
filename, offset = line.rsplit(None, 1)
|
|
except:
|
|
filename, offset = line.split(None, 1)
|
|
status[filename] = int(offset), int(inode)
|
|
return status
|
|
|
|
def save_status(status):
|
|
f = file(status_filename, "w")
|
|
for filename, (offset, inode) in status.items():
|
|
f.write("%s|%d|%d\n" % (filename, offset, inode))
|
|
|
|
pushed_back_line = None
|
|
def next_line(file_handle):
|
|
global pushed_back_line
|
|
if pushed_back_line != None:
|
|
line = pushed_back_line
|
|
pushed_back_line = None
|
|
return line
|
|
else:
|
|
try:
|
|
line = file_handle.next()
|
|
# Avoid parsing of (yet) incomplete lines (when acutal application
|
|
# is just in the process of writing)
|
|
# Just check if the line ends with a \n. This handles \n and \r\n
|
|
if not line.endswith("\n"):
|
|
begin_of_line_offset = file_handle.tell() - len(line)
|
|
os.lseek(file_handle.fileno(), begin_of_line_offset, 0)
|
|
return None
|
|
return line
|
|
except:
|
|
return None
|
|
|
|
|
|
def is_inode_cabable(path):
|
|
if "linux" in os_type:
|
|
return True
|
|
elif "windows" in os_type:
|
|
volume_name = "%s:\\\\" % path.split(":", 1)[0]
|
|
import win32api # pylint: disable=import-error
|
|
volume_info = win32api.GetVolumeInformation(volume_name)
|
|
volume_type = volume_info[-1]
|
|
if "ntfs" in volume_type.lower():
|
|
return True
|
|
else:
|
|
return False
|
|
else:
|
|
return False
|
|
|
|
|
|
def process_logfile(logfile, patterns):
|
|
global pushed_back_line
|
|
|
|
# Look at which file offset we have finished scanning
|
|
# the logfile last time. If we have never seen this file
|
|
# before, we set the offset to -1
|
|
offset, prev_inode = status.get(logfile, (-1, -1))
|
|
try:
|
|
file_desc = os.open(logfile, os.O_RDONLY)
|
|
if not is_inode_cabable(logfile):
|
|
inode = 1 # Create a dummy inode
|
|
else:
|
|
inode = os.fstat(file_desc)[1] # 1 = st_ino
|
|
except:
|
|
if debug:
|
|
raise
|
|
sys.stdout.write("[[[%s:cannotopen]]]\n" % logfile)
|
|
return
|
|
|
|
sys.stdout.write("[[[%s]]]\n" % logfile)
|
|
|
|
# Seek to the current end in order to determine file size
|
|
current_end = os.lseek(file_desc, 0, 2) # os.SEEK_END not available in Python 2.4
|
|
status[logfile] = current_end, inode
|
|
|
|
# If we have never seen this file before, we just set the
|
|
# current pointer to the file end. We do not want to make
|
|
# a fuss about ancient log messages...
|
|
if offset == -1:
|
|
if not debug:
|
|
return
|
|
else:
|
|
offset = 0
|
|
|
|
|
|
# If the inode of the logfile has changed it has appearently
|
|
# been started from new (logfile rotation). At least we must
|
|
# assume that. In some rare cases (restore of a backup, etc)
|
|
# we are wrong and resend old log messages
|
|
if prev_inode >= 0 and inode != prev_inode:
|
|
offset = 0
|
|
|
|
# Our previously stored offset is the current end ->
|
|
# no new lines in this file
|
|
if offset == current_end:
|
|
return # nothing new
|
|
|
|
# If our offset is beyond the current end, the logfile has been
|
|
# truncated or wrapped while keeping the same inode. We assume
|
|
# that it contains all new data in that case and restart from
|
|
# offset 0.
|
|
if offset > current_end:
|
|
offset = 0
|
|
|
|
# now seek to offset where interesting data begins
|
|
os.lseek(file_desc, offset, 0) # os.SEEK_SET not available in Python 2.4
|
|
if os_type == "windows":
|
|
import io # Available with python 2.6
|
|
import codecs
|
|
# Some windows files are encoded in utf_16
|
|
# Peak the first two bytes to determine the encoding...
|
|
peak_handle = os.fdopen(file_desc, "rb")
|
|
first_two_bytes = peak_handle.read(2)
|
|
use_encoding = None
|
|
if first_two_bytes == "\xFF\xFE":
|
|
use_encoding = "utf_16"
|
|
elif first_two_bytes == "\xFE\xFF":
|
|
use_encoding = "utf_16_be"
|
|
|
|
os.lseek(file_desc, offset, 0) # os.SEEK_SET not available in Python 2.4
|
|
file_handle = io.open(file_desc, encoding = use_encoding)
|
|
else:
|
|
file_handle = os.fdopen(file_desc)
|
|
worst = -1
|
|
outputtxt = ""
|
|
lines_parsed = 0
|
|
start_time = time.time()
|
|
|
|
while True:
|
|
line = next_line(file_handle)
|
|
if line == None:
|
|
break # End of file
|
|
|
|
# Handle option maxlinesize
|
|
if opt_maxlinesize != None and len(line) > opt_maxlinesize:
|
|
line = line[:opt_maxlinesize] + "[TRUNCATED]\n"
|
|
|
|
lines_parsed += 1
|
|
# Check if maximum number of new log messages is exceeded
|
|
if opt_maxlines != None and lines_parsed > opt_maxlines:
|
|
outputtxt += "%s Maximum number (%d) of new log messages exceeded.\n" % (
|
|
opt_overflow, opt_maxlines)
|
|
worst = max(worst, opt_overflow_level)
|
|
os.lseek(file_desc, 0, 2) # Seek to end of file, skip all other messages
|
|
break
|
|
|
|
# Check if maximum processing time (per file) is exceeded. Check only
|
|
# every 100'th line in order to save system calls
|
|
if opt_maxtime != None and lines_parsed % 100 == 10 \
|
|
and time.time() - start_time > opt_maxtime:
|
|
outputtxt += "%s Maximum parsing time (%.1f sec) of this log file exceeded.\n" % (
|
|
opt_overflow, opt_maxtime)
|
|
worst = max(worst, opt_overflow_level)
|
|
os.lseek(file_desc, 0, 2) # Seek to end of file, skip all other messages
|
|
break
|
|
|
|
level = "."
|
|
for lev, pattern, cont_patterns, replacements in patterns:
|
|
matches = pattern.search(line[:-1])
|
|
if matches:
|
|
level = lev
|
|
levelint = {'C': 2, 'W': 1, 'O': 0, 'I': -1, '.': -1}[lev]
|
|
worst = max(levelint, worst)
|
|
|
|
# Check for continuation lines
|
|
for cont_pattern in cont_patterns:
|
|
if type(cont_pattern) == int: # add that many lines
|
|
for _unused_x in range(cont_pattern):
|
|
cont_line = next_line(file_handle)
|
|
if cont_line == None: # end of file
|
|
break
|
|
line = line[:-1] + "\1" + cont_line
|
|
|
|
else: # pattern is regex
|
|
while True:
|
|
cont_line = next_line(file_handle)
|
|
if cont_line == None: # end of file
|
|
break
|
|
elif cont_pattern.search(cont_line[:-1]):
|
|
line = line[:-1] + "\1" + cont_line
|
|
else:
|
|
pushed_back_line = cont_line # sorry for stealing this line
|
|
break
|
|
|
|
# Replacement
|
|
for replace in replacements:
|
|
line = replace.replace('\\0', line.rstrip()) + "\n"
|
|
for nr, group in enumerate(matches.groups()):
|
|
line = line.replace('\\%d' % (nr+1), group)
|
|
|
|
break # matching rule found and executed
|
|
|
|
color = {'C': tty_red, 'W': tty_yellow, 'O': tty_green, 'I': tty_blue, '.': ''}[level]
|
|
if debug:
|
|
line = line.replace("\1", "\nCONT:")
|
|
if level == "I":
|
|
level = "."
|
|
if opt_nocontext and level == '.':
|
|
continue
|
|
outputtxt += "%s%s %s%s\n" % (color, level, line[:-1], tty_normal)
|
|
|
|
new_offset = os.lseek(file_desc, 0, 1) # os.SEEK_CUR not available in Python 2.4
|
|
status[logfile] = new_offset, inode
|
|
|
|
# output all lines if at least one warning, error or ok has been found
|
|
if worst > -1:
|
|
sys.stdout.write(outputtxt)
|
|
sys.stdout.flush()
|
|
|
|
# Handle option maxfilesize, regardless of warning or errors that have happened
|
|
if opt_maxfilesize != None and (offset / opt_maxfilesize) < (new_offset / opt_maxfilesize):
|
|
sys.stdout.write("%sW Maximum allowed logfile size (%d bytes) exceeded for the %dth time.%s\n" %
|
|
(tty_yellow, opt_maxfilesize, new_offset / opt_maxfilesize, tty_normal))
|
|
|
|
try:
|
|
# This removes leftover folders which may be generated by crashing frozen binaries
|
|
folder_cleaner = MEIFolderCleaner()
|
|
folder_cleaner.find_and_remove_leftover_folders(hint_filenames = ["mk_logwatch.exe.manifest"])
|
|
except Exception, e:
|
|
sys.stdout.write("ERROR WHILE DOING FOLDER: %s\n" % e)
|
|
sys.exit(1)
|
|
|
|
try:
|
|
config = read_config()
|
|
except Exception, e:
|
|
if debug:
|
|
raise
|
|
sys.stdout.write("CANNOT READ CONFIG FILE: %s\n" % e)
|
|
sys.exit(1)
|
|
|
|
# Simply ignore errors in the status file. In case of a corrupted status file we simply begin
|
|
# with an empty status. That keeps the monitoring up and running - even if we might lose a
|
|
# message in the extreme case of a corrupted status file.
|
|
try:
|
|
status = read_status()
|
|
except Exception, e:
|
|
status = {}
|
|
|
|
|
|
logfile_patterns = {}
|
|
# The filename line may contain options like 'maxlines=100' or 'maxtime=10'
|
|
for filenames, patterns in config:
|
|
# Initialize options with default values
|
|
opt_maxlines = None
|
|
opt_maxtime = None
|
|
opt_maxlinesize = None
|
|
opt_maxfilesize = None
|
|
opt_regex = None
|
|
opt_overflow = 'C'
|
|
opt_overflow_level = 2
|
|
opt_nocontext = False
|
|
try:
|
|
options = [ o.split('=', 1) for o in filenames if '=' in o ]
|
|
for key, value in options:
|
|
if key == 'maxlines':
|
|
opt_maxlines = int(value)
|
|
elif key == 'maxtime':
|
|
opt_maxtime = float(value)
|
|
elif key == 'maxlinesize':
|
|
opt_maxlinesize = int(value)
|
|
elif key == 'maxfilesize':
|
|
opt_maxfilesize = int(value)
|
|
elif key == 'overflow':
|
|
if value not in [ 'C', 'I', 'W', 'O' ]:
|
|
raise Exception("Invalid value %s for overflow. Allowed are C, I, O and W" % value)
|
|
opt_overflow = value
|
|
opt_overflow_level = {'C':2, 'W':1, 'O':0, 'I':0}[value]
|
|
elif key == 'regex':
|
|
opt_regex = re.compile(value)
|
|
elif key == 'iregex':
|
|
opt_regex = re.compile(value, re.I)
|
|
elif key == 'nocontext':
|
|
opt_nocontext = True
|
|
else:
|
|
raise Exception("Invalid option %s" % key)
|
|
except Exception, e:
|
|
if debug:
|
|
raise
|
|
sys.stdout.write("INVALID CONFIGURATION: %s\n" % e)
|
|
sys.exit(1)
|
|
|
|
|
|
for glob_pattern in filenames:
|
|
if '=' in glob_pattern:
|
|
continue
|
|
logfiles = glob.glob(glob_pattern)
|
|
if opt_regex:
|
|
logfiles = [ f for f in logfiles if opt_regex.search(f) ]
|
|
if len(logfiles) == 0:
|
|
sys.stdout.write('[[[%s:missing]]]\n' % glob_pattern)
|
|
else:
|
|
for logfile in logfiles:
|
|
logfile_patterns[logfile] = logfile_patterns.get(logfile, []) + patterns
|
|
|
|
for logfile, patterns in logfile_patterns.items():
|
|
process_logfile(logfile, patterns)
|
|
|
|
if not debug:
|
|
save_status(status)
|