#!/usr/bin/python
# -*- encoding: utf-8; py-indent-offset: 4 -*-
# +------------------------------------------------------------------+
# |             ____ _               _        __  __ _  __           |
# |            / ___| |__   ___  ___| | __   |  \/  | |/ /           |
# |           | |   | '_ \ / _ \/ __| |/ /   | |\/| | ' /            |
# |           | |___| | | |  __/ (__|   <    | |  | | . \            |
# |            \____|_| |_|\___|\___|_|\_\___|_|  |_|_|\_\           |
# |                                                                  |
# | Copyright Mathias Kettner 2014             mk@mathias-kettner.de |
# +------------------------------------------------------------------+
#
# This file is part of Check_MK.
# The official homepage is at http://mathias-kettner.de/check_mk.
#
# check_mk is free software;  you can redistribute it and/or modify it
# under the  terms of the  GNU General Public License  as published by
# the Free Software Foundation in version 2.  check_mk is  distributed
# in the hope that it will be useful, but WITHOUT ANY WARRANTY;  with-
# out even the implied warranty of  MERCHANTABILITY  or  FITNESS FOR A
# PARTICULAR PURPOSE. See the  GNU General Public License for more de-
# tails. You should have  received  a copy of the  GNU  General Public
# License along with GNU Make; see the file  COPYING.  If  not,  write
# to the Free Software Foundation, Inc., 51 Franklin St,  Fifth Floor,
# Boston, MA 02110-1301 USA.

# Example output:
# <<<fileinfo:sep(124)>>>
# 12968175080
# M:\check_mk.ini|missing
# M:\check_mk.ini|1390|12968174867
# M:\check_mk_agent.cc|86277|12968174554
# M:\Makefile|1820|12964010975
# M:\check_mk_agent.exe|102912|12968174364
# M:\crash.cc|1672|12964010975
# M:\crash.exe|20024|12968154426

# Parameters
# "minsize" : ( 5000,  4000 ),  in bytes
# "maxsize" : ( 8000,  9000 ),  in bytes
# "minage"  : ( 600,  1200 ),  in seconds
# "maxage"  : ( 6000, 12000 ), in seconds
fileinfo_groups = []


def fileinfo_groups_get_group_name(group_patterns, filename, reftime):
    found_these_groups = {}
    for group_name, pattern in group_patterns:
        if type(pattern) == str: # support old format
            pattern = (pattern, '')

        inclusion, exclusion = pattern
        inclusion_is_regex   = False
        exclusion_is_regex   = False

        if inclusion.startswith("~"):
            inclusion_is_regex = True
            inclusion = inclusion[1:]
        if exclusion.startswith("~"):
            exclusion_is_regex = True
            exclusion = exclusion[1:]

        inclusion_tmp = fileinfo_process_date(inclusion, reftime)
        if inclusion != inclusion_tmp:
            inclusion = inclusion_tmp

        matches    = []
        num_perc_s = 0
        if inclusion_is_regex:
            incl_match = regex(inclusion).match(filename)
            if incl_match:
                num_perc_s = group_name.count("%s")
                matches    = [g and g or "" for g in incl_match.groups()]
        else:
            incl_match = fnmatch.fnmatch(filename, inclusion)

        if exclusion_is_regex:
            excl_match = regex(exclusion).match(filename)
        else:
            excl_match = fnmatch.fnmatch(filename, exclusion)

        if len(matches) < num_perc_s:
            raise MKGeneralException("Invalid entry in inventory_fileinfo_groups: "
                                     "group name '%s' contains %d times '%%s', but regular expression "
                                     "'%s' contains only %d subexpression(s)." %
                                     (group_name, num_perc_s, inclusion, len(matches)))

        this_group_name = None
        if incl_match and not excl_match:
            if matches:
                for nr, group in enumerate(matches):
                    inclusion  = instantiate_regex_pattern_once(inclusion, group)
                    group_name = group_name.replace("%%%d" % (nr + 1), group)

                this_group_name = group_name % tuple(matches[:num_perc_s])
                this_pattern    = ("~%s" % inclusion, exclusion)

            else:
                this_group_name = group_name
                this_pattern    = pattern

        if this_group_name is not None:
            found_these_groups.setdefault(this_group_name, set())
            found_these_groups[this_group_name].add(this_pattern)

    # Convert pattern containers to lists (sets are not possible in autochecks)
    return dict([ (k, list(v)) for k, v in found_these_groups.items() ])


def inventory_fileinfo_common(info, case):
    inventory = []
    if info:
        reftime = int(info[0][0])

    inventory_groups = host_extra_conf(host_name(), fileinfo_groups)

    for line in info:
        if len(line) >= 3:
            found_groups = {}
            for group_patterns in inventory_groups:
                found_groups.update(fileinfo_groups_get_group_name(group_patterns,
                                    line[0], reftime))

            if not found_groups and case == 'single' and line[1] != 'missing':
                inventory.append((line[0], {}))

            elif found_groups and case == 'group':
                for group_name, patterns in found_groups.items():
                    inventory.append((group_name, {"group_patterns": patterns}))

    return inventory


def fileinfo_process_date(pattern, reftime):
    for what, the_time in [("DATE", reftime), ("YESTERDAY", reftime - 86400)]:
        the_regex = '((?:/|[A-Za-z]).*)\$%s:((?:%%\w.?){1,})\$(.*)' % what
        disect    = re.match(the_regex, pattern)
        if disect:
            prefix      = disect.group(1)
            datepattern = time.strftime(disect.group(2),time.localtime(the_time))
            postfix     = disect.group(3)
            pattern     = prefix + datepattern + postfix
            return pattern
    return pattern


def fileinfo_check_timeranges(params):
    ranges = params.get("timeofday")
    if ranges is None:
        return ""

    now = time.localtime()
    for range_spec in ranges:
        if fileinfo_in_timerange(now, *range_spec):
            return ""
    return "Out of relevant time of day"


def fileinfo_in_timerange(now, range_from, range_to):
    minutes_from = range_from[0] * 60 + range_from[1]
    minutes_to   = range_to[0] * 60 + range_to[1]
    minutes_now  = now.tm_hour * 60 + now.tm_min
    return minutes_now >= minutes_from and minutes_now < minutes_to


def check_fileinfo(item, params, info):
    if len(info) == 0:
        return (3, "no information sent by agent")

    outof_range_txt  = fileinfo_check_timeranges(params)
    in_timerange     = outof_range_txt == ""
    check_definition = False
    state_missing    = params.get("state_missing", 3)

    try:
        reftime = int(info[0][0])
    except (IndexError, ValueError):
        return 3, "Missing reference timestamp"

    # Start counting values on all files
    for line in info[1:]:
        if item == line[0]:
            if line[1] == "missing":
                continue
            state = 0
            size = int(line[1])
            age = reftime - int(line[2])

            check_definition = [
                ("Size", "size", size, get_filesize_human_readable),
                ("Age",  "age",  age,  get_age_human_readable)
            ]

    if check_definition is False:
        state = state_missing if in_timerange else 0
        infotext = ("File not found" if not outof_range_txt else
                    "File not found - %s" % outof_range_txt)
        return state, infotext

    return fileinfo_check_function(check_definition, params, outof_range_txt)


# FIXME the following does not apply anymore, because "%s" is allowed in group_name
# WHAT TO DO WITH precompile?
# Extracts patterns that are relevant for the current host and item.
# Constructs simple list of patterns and makes them available for the check
def fileinfo_groups_precompile(hostname, item, params):
    patterns = []
    for line in host_extra_conf(hostname, fileinfo_groups):
        for group_name_pattern, pattern in line:
            if group_name_pattern == item:
                patterns.append(pattern)

    precomped = params.copy()
    precomped['precompiled_patterns'] = patterns
    return precomped


def check_fileinfo_groups(item, params, info):
    if not info:
        return 3, "No information sent by agent"

    outof_range_txt = fileinfo_check_timeranges(params)
    count_all       = 0
    age_oldest      = None
    age_newest      = 0
    size_all        = 0
    size_smallest   = None
    size_largest    = 0
    date_inclusion  = ""

    try:
        reftime = int(info[0][0])
    except (IndexError, ValueError):
        return 3, "Missing reference timestamp"

    # Start counting values on all files
    for line in info[1:]:
        # Old format does not support '%s' in group name
        group_patterns = set(params.get('precompiled_patterns', []))
        for entry in params.get('group_patterns', []):
            group_patterns.add(entry)

        for pattern in group_patterns:
            # endswith("No such file...") is needed to
            # support the solaris perl based version of fileinfo
            if line[0].endswith("No such file or directory"):
                continue

            if type(pattern) == str:  # support old format
                pattern = (pattern, '')

            inclusion, exclusion = pattern
            inclusion_is_regex   = False
            exclusion_is_regex   = False

            if inclusion.startswith("~"):
                inclusion_is_regex = True
                inclusion = inclusion[1:]
            if exclusion.startswith("~"):
                exclusion_is_regex = True
                exclusion = exclusion[1:]

            inclusion_tmp = fileinfo_process_date(inclusion, reftime)
            if inclusion != inclusion_tmp:
                inclusion = inclusion_tmp

            if inclusion_is_regex:
                incl_match = regex(inclusion).match(line[0])
            else:
                incl_match = fnmatch.fnmatch(line[0], inclusion)

            if exclusion_is_regex:
                excl_match = regex(exclusion).match(line[0])
            else:
                excl_match = fnmatch.fnmatch(line[0], exclusion)

            if str(line[1]) not in ['missing', ''] and \
               incl_match and not excl_match:
                size = int(line[1])
                size_all += size
                if size_smallest is None:
                    size_smallest = size
                else:
                    size_smallest = min(size_smallest, size)
                size_largest = max(size_largest, size)

                age = reftime - int(line[2])
                if age_oldest is None:  # very first match
                    age_oldest = age
                    age_newest = age
                else:
                    age_oldest = max(age_oldest, age)
                    age_newest = min(age_newest, age)
                count_all += 1

    if age_oldest is None:
        age_oldest = 0

    # Start Checking
    check_definition = [
        ("Oldest age", "age_oldest", age_oldest, get_age_human_readable),
        ("Newest age", "age_newest", age_newest, get_age_human_readable),
        ("Count",      "count",      count_all,  saveint),
        ("Size",       "size",       size_all,   get_filesize_human_readable),
    ]

    if size_smallest is not None:
        check_definition.append(("Smallest size", "size_smallest",
                                 size_smallest, get_filesize_human_readable))
    if size_largest != 0:
        check_definition.append(("Largest size", "size_largest",
                                 size_largest, get_filesize_human_readable))
    if date_inclusion:
        check_definition.append(("Date pattern", "date pattern", date_inclusion, str))

    return fileinfo_check_function(check_definition, params, outof_range_txt)


def fileinfo_check_function(check_definition, params, outof_range_txt):
    state = 0
    infos = []
    perfdata = []
    for what_title, what, val, verbfunc in check_definition:
        infos.append("%s: %s" % (what_title, verbfunc(val)))
        if type(val) in [long, int]:  # because strings go into infos but not into perfdata
            warn, crit = "", ""
            for how, comp, cfunc in [
                ("min", "<", lambda a, b: a < b),
                ("max", ">", lambda a, b: a > b),
            ]:
                p = params.get(how + what)
                if p:
                    warn, crit = p
                    if cfunc(val, crit):
                        state = 2
                        infos[-1] += " (%s %s)(!!)" % (comp, verbfunc(crit))
                    elif cfunc(val, warn):
                        state = max(state, 1)
                        infos[-1] += " (%s %s)(!)" % (comp, verbfunc(warn))
            perfdata.append((what, val, warn, crit))

    infotext = ", ".join(infos)
    if outof_range_txt:
        state = 0
        infotext = outof_range_txt + ', ' + infotext.replace('(!)', '').replace('(!!)', '')
    return (state, infotext, perfdata)


def inventory_fileinfo(info):
    return inventory_fileinfo_common(info, "single")


check_info["fileinfo"] = {
    "check_function"          : check_fileinfo,
    "inventory_function"      : inventory_fileinfo,
    "service_description"     : "File %s",
    "has_perfdata"            : True,
    "group"                   : "fileinfo",
}


def inventory_fileinfo_groups(info):
    return inventory_fileinfo_common(info, "group")


check_info['fileinfo.groups'] = {
    "check_function"          : check_fileinfo_groups,
    "inventory_function"      : inventory_fileinfo_groups,
    "service_description"     : "File group %s",
    "has_perfdata"            : True,
    "group"                   : "fileinfo-groups",
    "includes"                : [ "ps.include" ],
}

precompile_params['fileinfo.groups'] = fileinfo_groups_precompile
