#!/usr/bin/env python3
# -*- coding: utf-8; py-indent-offset: 4 -*-
#
# Author:  Linuxfabrik GmbH, Zurich, Switzerland
# Contact: info (at) linuxfabrik (dot) ch
#          https://www.linuxfabrik.ch/
# License: The Unlicense, see LICENSE file.

# https://github.com/Linuxfabrik/monitoring-plugins/blob/main/CONTRIBUTING.md

"""See the check's README for more details."""

import argparse
import sys

import lib.args
import lib.base
import lib.human
import lib.lftest
import lib.nodebb
from lib.globals import STATE_CRIT, STATE_OK, STATE_UNKNOWN, STATE_WARN

__author__ = 'Linuxfabrik GmbH, Zurich/Switzerland'
__version__ = '2026040801'

DESCRIPTION = """Monitors NodeBB cache usage via the admin API. Alerts when cache utilization exceeds
the configured thresholds."""

DEFAULT_CRIT = 90
DEFAULT_INSECURE = False
DEFAULT_NO_PROXY = False
DEFAULT_SERVERITY = 'warn'
DEFAULT_TIMEOUT = 3
DEFAULT_URL = 'http://localhost:4567/forum'
DEFAULT_WARN = 80


def parse_args():
    """Parse command line arguments using argparse."""
    parser = argparse.ArgumentParser(description=DESCRIPTION)

    parser.add_argument(
        '-V',
        '--version',
        action='version',
        version=f'%(prog)s: v{__version__} by {__author__}',
    )

    parser.add_argument(
        '--always-ok',
        help=lib.args.help('--always-ok'),
        dest='ALWAYS_OK',
        action='store_true',
        default=False,
    )

    parser.add_argument(
        '-c',
        '--critical',
        help=lib.args.help('--critical') + ' Default: >= %(default)s',
        dest='CRIT',
        type=int,
        default=DEFAULT_CRIT,
    )

    parser.add_argument(
        '--insecure',
        help=lib.args.help('--insecure'),
        dest='INSECURE',
        action='store_true',
        default=DEFAULT_INSECURE,
    )

    parser.add_argument(
        '--no-proxy',
        help=lib.args.help('--no-proxy'),
        dest='NO_PROXY',
        action='store_true',
        default=DEFAULT_NO_PROXY,
    )

    parser.add_argument(
        '--severity',
        help='Severity for alerts that do not depend on thresholds. '
        'One of "warn" or "crit". '
        'Default: %(default)s',
        dest='SEVERITY',
        default=DEFAULT_SERVERITY,
        choices=['warn', 'crit'],
    )

    parser.add_argument(
        '--test',
        help=lib.args.help('--test'),
        dest='TEST',
        type=lib.args.csv,
    )

    parser.add_argument(
        '--timeout',
        help=lib.args.help('--timeout') + ' Default: %(default)s (seconds)',
        dest='TIMEOUT',
        type=int,
        default=DEFAULT_TIMEOUT,
    )

    parser.add_argument(
        '-p',
        '--token',
        help='NodeBB API bearer token.',
        dest='TOKEN',
        required=True,
    )

    parser.add_argument(
        '--url',
        help='NodeBB API URL. Default: %(default)s',
        dest='URL',
        default=DEFAULT_URL,
    )

    parser.add_argument(
        '-w',
        '--warning',
        help=lib.args.help('--warning') + ' Default: >= %(default)s',
        dest='WARN',
        type=int,
        default=DEFAULT_WARN,
    )

    args, _ = parser.parse_known_args()
    return args


def main():
    """The main function. This is where the magic happens."""

    # parse the command line
    try:
        args = parse_args()
    except SystemExit:
        sys.exit(STATE_UNKNOWN)

    # fetch data
    if args.TEST is None:
        result = lib.nodebb.get_data(args, '/api/admin/advanced/cache')
    else:
        # do not call the command, put in test data
        import json

        stdout, _stderr, _retc = lib.lftest.test(args.TEST)
        result = json.loads(stdout)

    # init some vars
    msg = ''
    state = STATE_OK
    perfdata = ''
    table_data = []

    # analyze data
    for cache in ['postCache', 'groupCache', 'localCache', 'objectCache']:
        result[cache]['hits'] = result[cache]['hits'].replace(',', '')
        result[cache]['misses'] = result[cache]['misses'].replace(',', '')
        result[cache]['hitRatio'] = round(float(result[cache]['hitRatio']) * 100, 1)

        perfdata += lib.base.get_perfdata(
            'cache_' + cache + '_hitRatio',
            result[cache]['hitRatio'],
            uom='%',
            _min=0,
            _max=100,
        )
        perfdata += lib.base.get_perfdata(
            'cache_' + cache + '_hits',
            result[cache]['hits'],
            uom='c',
            _min=0,
        )
        perfdata += lib.base.get_perfdata(
            'cache_' + cache + '_itemCount',
            result[cache]['itemCount'],
            _min=0,
        )
        perfdata += lib.base.get_perfdata(
            'cache_' + cache + '_length',
            result[cache]['length'],
            _min=0,
            _max=result[cache]['max'],
        )
        perfdata += lib.base.get_perfdata(
            'cache_' + cache + '_misses',
            result[cache]['misses'],
            uom='c',
            _min=0,
        )
        perfdata += lib.base.get_perfdata(
            'cache_' + cache + '_percentFull',
            result[cache]['percentFull'],
            uom='%',
            warn=args.WARN,
            crit=args.CRIT,
            _min=0,
            _max=100,
        )

        result[cache]['name'] = cache
        if not result[cache]['enabled']:
            cache_enabled_state = lib.base.str2state(args.SEVERITY)
            state = lib.base.get_worst(state, cache_enabled_state)
            result[cache]['enabled'] = (
                f'{result[cache]["enabled"]}'
                f'{lib.base.state2str(cache_enabled_state, prefix=" ")}'
            )
        cache_usage_state = lib.base.get_state(
            result[cache]['percentFull'], args.WARN, args.CRIT
        )
        state = lib.base.get_worst(state, cache_usage_state)
        result[cache]['percentFull'] = (
            f'{result[cache]["percentFull"]}%'
            f'{lib.base.state2str(cache_usage_state, prefix=" ")}'
        )
        result[cache]['size'] = (
            f'{lib.human.number2human(result[cache]["length"])}'
            f' / '
            f'{lib.human.number2human(result[cache]["max"])}'
        )
        result[cache]['hits'] = lib.human.number2human(result[cache]['hits'])
        result[cache]['misses'] = lib.human.number2human(result[cache]['misses'])
        result[cache]['hitRatio'] = (
            f'{lib.human.number2human(result[cache]["hitRatio"])}%'
        )

        table_data.append(result[cache])

    # build the message
    if table_data:
        keys = [
            'name',
            'enabled',
            'percentFull',
            'size',
            'hits',
            'misses',
            'hitRatio',
        ]
        headers = [
            'Cache',
            'Enabled',
            'Usage',
            'Size',
            'Hits',
            'Misses',
            'HitRatio',
        ]
        msg += lib.base.get_table(table_data, keys, header=headers)

    if state == STATE_CRIT:
        msg = 'There are critical errors.\n\n' + msg
    elif state == STATE_WARN:
        msg = 'There are warnings.\n\n' + msg
    else:
        msg = 'Everything is ok.\n\n' + msg

    # over and out
    lib.base.oao(msg, state, perfdata, always_ok=args.ALWAYS_OK)


if __name__ == '__main__':
    try:
        main()
    except Exception:
        lib.base.cu()
