2020-01-28 12:10:42 +01:00
|
|
|
#!/usr/bin/env python3
|
2014-03-10 16:41:31 +01:00
|
|
|
|
2020-01-28 12:10:42 +01:00
|
|
|
import urllib.request, urllib.error, urllib.parse
|
2016-07-19 17:02:28 +02:00
|
|
|
import base64
|
2014-03-10 16:41:31 +01:00
|
|
|
import json
|
|
|
|
import argparse
|
2015-11-19 05:07:50 +01:00
|
|
|
import sys
|
2017-08-10 10:28:02 +02:00
|
|
|
import ssl
|
2014-03-10 16:41:31 +01:00
|
|
|
from pprint import pprint
|
2020-01-28 12:10:42 +01:00
|
|
|
from urllib.error import HTTPError
|
|
|
|
from urllib.error import URLError
|
2014-03-10 16:41:31 +01:00
|
|
|
|
2020-03-18 08:09:35 +01:00
|
|
|
plugin_description = \
|
|
|
|
"""
|
|
|
|
Check HTTP JSON Nagios Plugin
|
|
|
|
|
|
|
|
Generic Nagios plugin which checks json values from a given endpoint against
|
|
|
|
argument specified rules and determines the status and performance data for
|
|
|
|
that service.
|
|
|
|
"""
|
|
|
|
|
2015-11-19 05:07:50 +01:00
|
|
|
OK_CODE = 0
|
|
|
|
WARNING_CODE = 1
|
|
|
|
CRITICAL_CODE = 2
|
|
|
|
UNKNOWN_CODE = 3
|
2014-03-10 16:41:31 +01:00
|
|
|
|
2022-09-15 15:25:38 +02:00
|
|
|
__version__ = '2.1.2'
|
|
|
|
__version_date__ = '2022-09-15'
|
2016-07-19 17:02:28 +02:00
|
|
|
|
2014-03-10 16:41:31 +01:00
|
|
|
class NagiosHelper:
|
2020-03-03 11:44:55 +01:00
|
|
|
"""
|
|
|
|
Help with Nagios specific status string formatting.
|
|
|
|
"""
|
|
|
|
|
2016-07-19 17:02:28 +02:00
|
|
|
message_prefixes = {OK_CODE: 'OK',
|
|
|
|
WARNING_CODE: 'WARNING',
|
|
|
|
CRITICAL_CODE: 'CRITICAL',
|
|
|
|
UNKNOWN_CODE: 'UNKNOWN'}
|
2016-07-18 20:15:56 +02:00
|
|
|
performance_data = ''
|
|
|
|
warning_message = ''
|
|
|
|
critical_message = ''
|
|
|
|
unknown_message = ''
|
2014-03-10 16:41:31 +01:00
|
|
|
|
2020-03-17 13:30:37 +01:00
|
|
|
def getMessage(self, message=''):
|
2020-03-03 11:44:55 +01:00
|
|
|
"""
|
|
|
|
Build a status-prefixed message with optional performance data
|
|
|
|
generated externally
|
|
|
|
"""
|
|
|
|
|
2020-03-17 13:30:37 +01:00
|
|
|
message += self.warning_message
|
|
|
|
message += self.critical_message
|
|
|
|
message += self.unknown_message
|
|
|
|
code = self.message_prefixes[self.getCode()]
|
|
|
|
output = "{code}: Status {code}. {message}".format(code=code, message=message.strip())
|
2016-07-18 20:15:56 +02:00
|
|
|
if self.performance_data:
|
2020-03-17 13:30:37 +01:00
|
|
|
output = "{code}: {perf_data} Status {code}. {message}|{perf_data}".format(
|
|
|
|
code=code,
|
|
|
|
message=message.strip(),
|
|
|
|
perf_data=self.performance_data)
|
|
|
|
return output.strip()
|
2014-03-10 16:41:31 +01:00
|
|
|
|
2016-07-18 20:15:56 +02:00
|
|
|
def getCode(self):
|
|
|
|
code = OK_CODE
|
|
|
|
if (self.warning_message != ''):
|
|
|
|
code = WARNING_CODE
|
|
|
|
if (self.critical_message != ''):
|
|
|
|
code = CRITICAL_CODE
|
|
|
|
if (self.unknown_message != ''):
|
|
|
|
code = UNKNOWN_CODE
|
|
|
|
return code
|
2015-11-19 05:07:50 +01:00
|
|
|
|
2016-07-18 20:15:56 +02:00
|
|
|
def append_warning(self, warning_message):
|
|
|
|
self.warning_message += warning_message
|
2016-07-19 17:02:28 +02:00
|
|
|
|
2016-07-18 20:15:56 +02:00
|
|
|
def append_critical(self, critical_message):
|
|
|
|
self.critical_message += critical_message
|
2016-07-19 17:02:28 +02:00
|
|
|
|
2016-07-18 20:15:56 +02:00
|
|
|
def append_unknown(self, unknown_message):
|
2017-01-26 17:26:45 +01:00
|
|
|
self.unknown_message += unknown_message
|
2016-07-19 17:02:28 +02:00
|
|
|
|
2020-01-28 12:10:42 +01:00
|
|
|
def append_metrics(self, metrics):
|
|
|
|
(performance_data, warning_message, critical_message) = metrics
|
2016-07-18 20:15:56 +02:00
|
|
|
self.performance_data += performance_data
|
|
|
|
self.append_warning(warning_message)
|
|
|
|
self.append_critical(critical_message)
|
2014-03-10 16:41:31 +01:00
|
|
|
|
2016-07-19 17:02:28 +02:00
|
|
|
|
2014-03-10 16:41:31 +01:00
|
|
|
class JsonHelper:
|
2020-03-03 11:44:55 +01:00
|
|
|
"""
|
|
|
|
Perform simple comparison operations against values in a given
|
|
|
|
JSON dict
|
|
|
|
"""
|
|
|
|
|
2020-03-03 15:15:54 +01:00
|
|
|
def __init__(self, json_data, separator, value_separator):
|
2016-07-18 20:15:56 +02:00
|
|
|
self.data = json_data
|
|
|
|
self.separator = separator
|
2020-03-03 15:15:54 +01:00
|
|
|
self.value_separator = value_separator
|
2016-07-18 20:15:56 +02:00
|
|
|
self.arrayOpener = '('
|
|
|
|
self.arrayCloser = ')'
|
2014-03-10 16:41:31 +01:00
|
|
|
|
2016-07-18 20:15:56 +02:00
|
|
|
def getSubElement(self, key, data):
|
|
|
|
separatorIndex = key.find(self.separator)
|
|
|
|
partialKey = key[:separatorIndex]
|
|
|
|
remainingKey = key[separatorIndex + 1:]
|
|
|
|
if partialKey in data:
|
|
|
|
return self.get(remainingKey, data[partialKey])
|
2020-03-18 08:09:35 +01:00
|
|
|
return (None, 'not_found')
|
2015-05-04 17:57:22 +02:00
|
|
|
|
2016-07-18 20:15:56 +02:00
|
|
|
def getSubArrayElement(self, key, data):
|
|
|
|
subElemKey = key[:key.find(self.arrayOpener)]
|
2016-07-19 17:02:28 +02:00
|
|
|
index = int(key[key.find(self.arrayOpener) +
|
|
|
|
1:key.find(self.arrayCloser)])
|
2016-07-18 20:15:56 +02:00
|
|
|
remainingKey = key[key.find(self.arrayCloser + self.separator) + 2:]
|
2020-03-03 15:02:09 +01:00
|
|
|
|
2016-07-18 20:15:56 +02:00
|
|
|
if key.find(self.arrayCloser + self.separator) == -1:
|
|
|
|
remainingKey = key[key.find(self.arrayCloser) + 1:]
|
|
|
|
if subElemKey in data:
|
|
|
|
if index < len(data[subElemKey]):
|
|
|
|
return self.get(remainingKey, data[subElemKey][index])
|
|
|
|
else:
|
|
|
|
return (None, 'not_found')
|
2020-03-03 15:02:09 +01:00
|
|
|
if index >= len(data):
|
2020-03-18 08:09:35 +01:00
|
|
|
return (None, 'not_found')
|
2016-07-18 20:15:56 +02:00
|
|
|
else:
|
|
|
|
if not subElemKey:
|
|
|
|
return self.get(remainingKey, data[index])
|
|
|
|
else:
|
|
|
|
return (None, 'not_found')
|
2015-08-31 17:36:27 +02:00
|
|
|
|
2016-07-19 17:02:28 +02:00
|
|
|
def equals(self, key, value):
|
|
|
|
return self.exists(key) and \
|
2020-03-03 15:15:54 +01:00
|
|
|
str(self.get(key)) in value.split(self.value_separator)
|
2016-07-19 17:02:28 +02:00
|
|
|
|
|
|
|
def lte(self, key, value):
|
|
|
|
return self.exists(key) and float(self.get(key)) <= float(value)
|
|
|
|
|
|
|
|
def lt(self, key, value):
|
|
|
|
return self.exists(key) and float(self.get(key)) < float(value)
|
|
|
|
|
|
|
|
def gte(self, key, value):
|
|
|
|
return self.exists(key) and float(self.get(key)) >= float(value)
|
|
|
|
|
|
|
|
def gt(self, key, value):
|
|
|
|
return self.exists(key) and float(self.get(key)) > float(value)
|
|
|
|
|
|
|
|
def exists(self, key):
|
|
|
|
return (self.get(key) != (None, 'not_found'))
|
|
|
|
|
2016-07-18 20:15:56 +02:00
|
|
|
def get(self, key, temp_data=''):
|
2020-03-03 11:44:55 +01:00
|
|
|
"""
|
|
|
|
Can navigate nested json keys with a dot format
|
|
|
|
(Element.Key.NestedKey). Returns (None, 'not_found') if not found
|
|
|
|
"""
|
|
|
|
|
2020-07-15 08:07:16 +02:00
|
|
|
if temp_data != '':
|
2016-07-18 20:15:56 +02:00
|
|
|
data = temp_data
|
|
|
|
else:
|
|
|
|
data = self.data
|
2024-03-20 16:19:41 +01:00
|
|
|
|
2016-07-18 20:15:56 +02:00
|
|
|
if len(key) <= 0:
|
|
|
|
return data
|
2024-03-20 16:19:41 +01:00
|
|
|
|
|
|
|
|
|
|
|
if key.find(self.separator) != -1 and key.find(self.arrayOpener) != -1:
|
2016-07-19 17:02:28 +02:00
|
|
|
if key.find(self.separator) < key.find(self.arrayOpener):
|
2016-07-18 20:15:56 +02:00
|
|
|
return self.getSubElement(key, data)
|
|
|
|
else:
|
|
|
|
return self.getSubArrayElement(key, data)
|
2024-03-20 16:19:41 +01:00
|
|
|
|
|
|
|
if key.find(self.separator) != -1:
|
|
|
|
return self.getSubElement(key, data)
|
|
|
|
|
|
|
|
if key.find(self.arrayOpener) != -1:
|
|
|
|
# If we got an arrayOpener but the next char is not [0-9] or * then it might just be a string
|
|
|
|
# This isn't optimal since this 'update (foobar)(0)' still won't work
|
|
|
|
if key[key.find(self.arrayOpener)+1].isnumeric() or key[key.find(self.arrayOpener)+1] == "*":
|
|
|
|
return self.getSubArrayElement(key, data)
|
|
|
|
|
|
|
|
if isinstance(data, dict) and key in data:
|
|
|
|
return data[key]
|
|
|
|
|
|
|
|
return (None, 'not_found')
|
2014-03-10 16:41:31 +01:00
|
|
|
|
2016-07-18 23:22:41 +02:00
|
|
|
def expandKey(self, key, keys):
|
|
|
|
if '(*)' not in key:
|
|
|
|
keys.append(key)
|
|
|
|
return keys
|
|
|
|
subElemKey = ''
|
|
|
|
if key.find('(*)') > 0:
|
|
|
|
subElemKey = key[:key.find('(*)')-1]
|
|
|
|
remainingKey = key[key.find('(*)')+3:]
|
|
|
|
elemData = self.get(subElemKey)
|
2020-03-12 09:37:53 +01:00
|
|
|
if elemData == (None, 'not_found'):
|
2016-07-18 23:22:41 +02:00
|
|
|
keys.append(key)
|
|
|
|
return keys
|
2020-03-12 09:37:53 +01:00
|
|
|
if subElemKey != '':
|
2016-07-18 23:22:41 +02:00
|
|
|
subElemKey = subElemKey + '.'
|
|
|
|
for i in range(len(elemData)):
|
|
|
|
newKey = subElemKey + '(' + str(i) + ')' + remainingKey
|
|
|
|
newKeys = self.expandKey(newKey, [])
|
|
|
|
for j in newKeys:
|
|
|
|
keys.append(j)
|
|
|
|
|
|
|
|
return keys
|
|
|
|
|
2016-07-19 17:02:28 +02:00
|
|
|
|
2015-11-16 05:50:12 +01:00
|
|
|
def _getKeyAlias(original_key):
|
2016-07-18 20:15:56 +02:00
|
|
|
key = original_key
|
|
|
|
alias = original_key
|
|
|
|
if '>' in original_key:
|
|
|
|
keys = original_key.split('>')
|
|
|
|
if len(keys) == 2:
|
|
|
|
key, alias = keys
|
|
|
|
return key, alias
|
2015-11-16 05:50:12 +01:00
|
|
|
|
2016-07-19 17:02:28 +02:00
|
|
|
|
2014-03-10 16:41:31 +01:00
|
|
|
class JsonRuleProcessor:
|
2020-03-03 11:44:55 +01:00
|
|
|
"""
|
|
|
|
Perform checks and gather values from a JSON dict given rules
|
|
|
|
and metrics definitions
|
|
|
|
"""
|
|
|
|
|
2016-07-18 20:15:56 +02:00
|
|
|
def __init__(self, json_data, rules_args):
|
|
|
|
self.data = json_data
|
|
|
|
self.rules = rules_args
|
|
|
|
separator = '.'
|
2020-03-03 15:15:54 +01:00
|
|
|
value_separator = ':'
|
2016-07-19 17:02:28 +02:00
|
|
|
if self.rules.separator:
|
|
|
|
separator = self.rules.separator
|
2020-03-03 15:15:54 +01:00
|
|
|
if self.rules.value_separator:
|
|
|
|
value_separator = self.rules.value_separator
|
|
|
|
self.helper = JsonHelper(self.data, separator, value_separator)
|
2020-06-26 10:31:20 +02:00
|
|
|
debugPrint(rules_args.debug, "rules: %s" % rules_args)
|
|
|
|
debugPrint(rules_args.debug, "separator: %s" % separator)
|
|
|
|
debugPrint(rules_args.debug, "value_separator: %s" % value_separator)
|
2016-07-19 18:43:16 +02:00
|
|
|
self.metric_list = self.expandKeys(self.rules.metric_list)
|
|
|
|
self.key_threshold_warning = self.expandKeys(
|
|
|
|
self.rules.key_threshold_warning)
|
|
|
|
self.key_threshold_critical = self.expandKeys(
|
|
|
|
self.rules.key_threshold_critical)
|
|
|
|
self.key_value_list = self.expandKeys(self.rules.key_value_list)
|
2019-05-09 13:18:34 +02:00
|
|
|
self.key_value_list_not = self.expandKeys(
|
|
|
|
self.rules.key_value_list_not)
|
2016-07-19 18:43:16 +02:00
|
|
|
self.key_list = self.expandKeys(self.rules.key_list)
|
|
|
|
self.key_value_list_critical = self.expandKeys(
|
|
|
|
self.rules.key_value_list_critical)
|
2019-05-09 13:18:34 +02:00
|
|
|
self.key_value_list_not_critical = self.expandKeys(
|
|
|
|
self.rules.key_value_list_not_critical)
|
2016-07-19 18:43:16 +02:00
|
|
|
self.key_list_critical = self.expandKeys(self.rules.key_list_critical)
|
2019-05-09 13:18:34 +02:00
|
|
|
self.key_value_list_unknown = self.expandKeys(
|
|
|
|
self.rules.key_value_list_unknown)
|
2016-07-19 18:43:16 +02:00
|
|
|
|
|
|
|
def expandKeys(self, src):
|
|
|
|
if src is None:
|
2020-03-18 08:09:35 +01:00
|
|
|
return []
|
2016-07-19 18:43:16 +02:00
|
|
|
dest = []
|
|
|
|
for key in src:
|
|
|
|
newKeys = self.helper.expandKey(key, [])
|
|
|
|
for k in newKeys:
|
|
|
|
dest.append(k)
|
|
|
|
return dest
|
2014-03-10 16:41:31 +01:00
|
|
|
|
2016-07-18 20:15:56 +02:00
|
|
|
def checkExists(self, exists_list):
|
|
|
|
failure = ''
|
|
|
|
for k in exists_list:
|
|
|
|
key, alias = _getKeyAlias(k)
|
2016-07-19 17:02:28 +02:00
|
|
|
if (self.helper.exists(key) is False):
|
2016-07-18 20:15:56 +02:00
|
|
|
failure += " Key %s did not exist." % alias
|
|
|
|
return failure
|
2015-11-19 05:07:50 +01:00
|
|
|
|
2016-07-18 20:15:56 +02:00
|
|
|
def checkEquality(self, equality_list):
|
|
|
|
failure = ''
|
|
|
|
for kv in equality_list:
|
|
|
|
k, v = kv.split(',')
|
|
|
|
key, alias = _getKeyAlias(k)
|
2020-03-18 08:09:35 +01:00
|
|
|
if not self.helper.equals(key, v):
|
2019-05-09 13:38:37 +02:00
|
|
|
failure += " Key %s mismatch. %s != %s" % (alias, v,
|
2020-03-18 08:09:35 +01:00
|
|
|
self.helper.get(key))
|
2016-07-18 20:15:56 +02:00
|
|
|
return failure
|
2015-11-19 05:07:50 +01:00
|
|
|
|
2018-01-10 10:23:34 +01:00
|
|
|
def checkNonEquality(self, equality_list):
|
|
|
|
failure = ''
|
|
|
|
for kv in equality_list:
|
|
|
|
k, v = kv.split(',')
|
|
|
|
key, alias = _getKeyAlias(k)
|
2020-03-18 08:09:35 +01:00
|
|
|
if self.helper.equals(key, v):
|
2019-05-09 13:38:37 +02:00
|
|
|
failure += " Key %s match found. %s == %s" % (alias, v,
|
2020-03-18 08:09:35 +01:00
|
|
|
self.helper.get(key))
|
2018-01-10 10:23:34 +01:00
|
|
|
return failure
|
|
|
|
|
2016-07-18 20:15:56 +02:00
|
|
|
def checkThreshold(self, key, alias, r):
|
|
|
|
failure = ''
|
|
|
|
invert = False
|
|
|
|
start = 0
|
|
|
|
end = 'infinity'
|
|
|
|
if r.startswith('@'):
|
|
|
|
invert = True
|
|
|
|
r = r[1:]
|
|
|
|
vals = r.split(':')
|
|
|
|
if len(vals) == 1:
|
|
|
|
end = vals[0]
|
|
|
|
if len(vals) == 2:
|
|
|
|
start = vals[0]
|
|
|
|
if vals[1] != '':
|
|
|
|
end = vals[1]
|
|
|
|
if(start == '~'):
|
|
|
|
if (invert and self.helper.lte(key, end)):
|
2019-05-09 12:39:58 +02:00
|
|
|
failure += " Value (%s) for key %s was less than or equal to %s." % \
|
|
|
|
(self.helper.get(key), alias, end)
|
2016-07-18 20:15:56 +02:00
|
|
|
elif (not invert and self.helper.gt(key, end)):
|
2019-05-09 12:39:58 +02:00
|
|
|
failure += " Value (%s) for key %s was greater than %s." % \
|
|
|
|
(self.helper.get(key), alias, end)
|
2016-07-18 20:15:56 +02:00
|
|
|
elif(end == 'infinity'):
|
|
|
|
if (invert and self.helper.gte(key, start)):
|
2019-05-09 12:39:58 +02:00
|
|
|
failure += " Value (%s) for key %s was greater than or equal to %s." % \
|
|
|
|
(self.helper.get(key), alias, start)
|
2016-07-18 20:15:56 +02:00
|
|
|
elif (not invert and self.helper.lt(key, start)):
|
2019-05-09 12:39:58 +02:00
|
|
|
failure += " Value (%s) for key %s was less than %s." % \
|
|
|
|
(self.helper.get(key), alias, start)
|
2016-07-18 20:15:56 +02:00
|
|
|
else:
|
2016-07-19 17:02:28 +02:00
|
|
|
if (invert and self.helper.gte(key, start) and
|
|
|
|
self.helper.lte(key, end)):
|
2019-05-09 12:39:58 +02:00
|
|
|
failure += " Value (%s) for key %s was inside the range %s:%s." % \
|
|
|
|
(self.helper.get(key), alias, start, end)
|
2016-07-19 17:02:28 +02:00
|
|
|
elif (not invert and (self.helper.lt(key, start) or
|
|
|
|
self.helper.gt(key, end))):
|
2019-05-09 12:39:58 +02:00
|
|
|
failure += " Value (%s) for key %s was outside the range %s:%s." % \
|
|
|
|
(self.helper.get(key), alias, start, end)
|
|
|
|
|
2016-07-18 20:15:56 +02:00
|
|
|
return failure
|
2015-11-19 05:07:50 +01:00
|
|
|
|
2016-07-18 20:15:56 +02:00
|
|
|
def checkThresholds(self, threshold_list):
|
|
|
|
failure = ''
|
|
|
|
for threshold in threshold_list:
|
|
|
|
k, r = threshold.split(',')
|
|
|
|
key, alias = _getKeyAlias(k)
|
|
|
|
failure += self.checkThreshold(key, alias, r)
|
|
|
|
return failure
|
2015-11-19 05:07:50 +01:00
|
|
|
|
2016-07-18 20:15:56 +02:00
|
|
|
def checkWarning(self):
|
|
|
|
failure = ''
|
2016-07-19 18:43:16 +02:00
|
|
|
if self.key_threshold_warning is not None:
|
|
|
|
failure += self.checkThresholds(self.key_threshold_warning)
|
|
|
|
if self.key_value_list is not None:
|
|
|
|
failure += self.checkEquality(self.key_value_list)
|
2019-05-09 13:18:34 +02:00
|
|
|
if self.key_value_list_not is not None:
|
|
|
|
failure += self.checkNonEquality(self.key_value_list_not)
|
2016-07-19 18:43:16 +02:00
|
|
|
if self.key_list is not None:
|
|
|
|
failure += self.checkExists(self.key_list)
|
2016-07-18 20:15:56 +02:00
|
|
|
return failure
|
2015-11-19 05:07:50 +01:00
|
|
|
|
2016-07-18 20:15:56 +02:00
|
|
|
def checkCritical(self):
|
|
|
|
failure = ''
|
2020-06-26 10:39:37 +02:00
|
|
|
if not self.data:
|
|
|
|
failure = " Empty JSON data."
|
2016-07-19 18:43:16 +02:00
|
|
|
if self.key_threshold_critical is not None:
|
|
|
|
failure += self.checkThresholds(self.key_threshold_critical)
|
|
|
|
if self.key_value_list_critical is not None:
|
|
|
|
failure += self.checkEquality(self.key_value_list_critical)
|
2019-05-09 13:18:34 +02:00
|
|
|
if self.key_value_list_not_critical is not None:
|
|
|
|
failure += self.checkNonEquality(self.key_value_list_not_critical)
|
2016-07-19 18:43:16 +02:00
|
|
|
if self.key_list_critical is not None:
|
|
|
|
failure += self.checkExists(self.key_list_critical)
|
2016-07-18 20:15:56 +02:00
|
|
|
return failure
|
2015-11-19 05:07:50 +01:00
|
|
|
|
2019-05-07 16:15:31 +02:00
|
|
|
def checkUnknown(self):
|
|
|
|
unknown = ''
|
2019-05-09 13:18:34 +02:00
|
|
|
if self.key_value_list_unknown is not None:
|
|
|
|
unknown += self.checkEquality(self.key_value_list_unknown)
|
2019-05-09 11:58:50 +02:00
|
|
|
return unknown
|
2019-05-07 16:15:31 +02:00
|
|
|
|
2016-07-18 20:15:56 +02:00
|
|
|
def checkMetrics(self):
|
2020-03-03 11:44:55 +01:00
|
|
|
"""
|
|
|
|
Return a Nagios specific performance metrics string given keys
|
|
|
|
and parameter definitions
|
|
|
|
"""
|
|
|
|
|
2016-07-18 20:15:56 +02:00
|
|
|
metrics = ''
|
|
|
|
warning = ''
|
|
|
|
critical = ''
|
2016-07-19 18:43:16 +02:00
|
|
|
if self.metric_list is not None:
|
|
|
|
for metric in self.metric_list:
|
2016-07-18 20:15:56 +02:00
|
|
|
key = metric
|
|
|
|
minimum = maximum = warn_range = crit_range = None
|
|
|
|
uom = ''
|
|
|
|
if ',' in metric:
|
|
|
|
vals = metric.split(',')
|
|
|
|
if len(vals) == 2:
|
2016-07-19 17:02:28 +02:00
|
|
|
key, uom = vals
|
2016-07-18 20:15:56 +02:00
|
|
|
if len(vals) == 4:
|
2016-07-19 17:02:28 +02:00
|
|
|
key, uom, warn_range, crit_range = vals
|
2016-07-18 20:15:56 +02:00
|
|
|
if len(vals) == 6:
|
2016-07-19 18:43:16 +02:00
|
|
|
key, uom, warn_range, crit_range, \
|
|
|
|
minimum, maximum = vals
|
2016-07-18 20:15:56 +02:00
|
|
|
key, alias = _getKeyAlias(key)
|
|
|
|
if self.helper.exists(key):
|
|
|
|
metrics += "'%s'=%s" % (alias, self.helper.get(key))
|
2016-07-19 17:02:28 +02:00
|
|
|
if uom:
|
|
|
|
metrics += uom
|
|
|
|
if warn_range is not None:
|
2016-07-18 20:15:56 +02:00
|
|
|
warning += self.checkThreshold(key, alias, warn_range)
|
|
|
|
metrics += ";%s" % warn_range
|
2016-07-19 17:02:28 +02:00
|
|
|
if crit_range is not None:
|
2016-07-18 20:15:56 +02:00
|
|
|
critical += self.checkThreshold(key, alias, crit_range)
|
|
|
|
metrics += ";%s" % crit_range
|
2016-07-19 17:02:28 +02:00
|
|
|
if minimum is not None:
|
|
|
|
critical += self.checkThreshold(key, alias, minimum +
|
|
|
|
':')
|
2016-07-18 20:15:56 +02:00
|
|
|
metrics += ";%s" % minimum
|
2016-07-19 17:02:28 +02:00
|
|
|
if maximum is not None:
|
|
|
|
critical += self.checkThreshold(key, alias, '~:' +
|
|
|
|
maximum)
|
2016-07-18 20:15:56 +02:00
|
|
|
metrics += ";%s" % maximum
|
|
|
|
metrics += ' '
|
|
|
|
return ("%s" % metrics, warning, critical)
|
2014-03-10 16:41:31 +01:00
|
|
|
|
2016-07-19 17:02:28 +02:00
|
|
|
|
2020-03-03 12:12:52 +01:00
|
|
|
def parseArgs(args):
|
2020-03-03 11:44:55 +01:00
|
|
|
"""
|
|
|
|
CLI argument definitions and parsing
|
|
|
|
"""
|
|
|
|
|
2019-05-09 15:44:33 +02:00
|
|
|
parser = argparse.ArgumentParser(
|
2020-03-18 08:09:35 +01:00
|
|
|
description=plugin_description + '\n\nVersion: %s (%s)'
|
2020-03-03 11:44:55 +01:00
|
|
|
%(__version__, __version_date__),
|
|
|
|
formatter_class=argparse.RawDescriptionHelpFormatter
|
|
|
|
)
|
2014-03-10 16:41:31 +01:00
|
|
|
|
2016-07-19 17:02:28 +02:00
|
|
|
parser.add_argument('-d', '--debug', action='store_true',
|
2019-05-09 15:44:33 +02:00
|
|
|
help='debug mode')
|
|
|
|
parser.add_argument('-s', '--ssl', action='store_true',
|
|
|
|
help='use TLS to connect to remote host')
|
|
|
|
parser.add_argument('-H', '--host', dest='host',
|
2020-06-19 12:38:31 +02:00
|
|
|
required=not ('-V' in args or '--version' in args),
|
2019-05-09 15:44:33 +02:00
|
|
|
help='remote host to query')
|
2019-05-09 12:39:58 +02:00
|
|
|
parser.add_argument('-k', '--insecure', action='store_true',
|
|
|
|
help='do not check server SSL certificate')
|
2022-09-09 17:28:35 +02:00
|
|
|
parser.add_argument('-X', '--request', dest='method', default='GET', choices=['GET', 'POST'],
|
|
|
|
help='Specifies a custom request method to use when communicating with the HTTP server')
|
2019-05-09 15:44:33 +02:00
|
|
|
parser.add_argument('-V', '--version', action='store_true',
|
|
|
|
help='print version of this plugin')
|
2019-05-09 12:39:58 +02:00
|
|
|
parser.add_argument('--cacert',
|
|
|
|
dest='cacert', help='SSL CA certificate')
|
|
|
|
parser.add_argument('--cert',
|
|
|
|
dest='cert', help='SSL client certificate')
|
|
|
|
parser.add_argument('--key', dest='key',
|
|
|
|
help='SSL client key ( if not bundled into the cert )')
|
2016-07-18 20:15:56 +02:00
|
|
|
parser.add_argument('-P', '--port', dest='port', help='TCP port')
|
2019-05-09 15:44:33 +02:00
|
|
|
parser.add_argument('-p', '--path', dest='path', help='Path')
|
2016-07-19 17:02:28 +02:00
|
|
|
parser.add_argument('-t', '--timeout', type=int,
|
|
|
|
help='Connection timeout (seconds)')
|
|
|
|
parser.add_argument('-B', '--basic-auth', dest='auth',
|
|
|
|
help='Basic auth string "username:password"')
|
|
|
|
parser.add_argument('-D', '--data', dest='data',
|
|
|
|
help='The http payload to send as a POST')
|
|
|
|
parser.add_argument('-A', '--headers', dest='headers',
|
|
|
|
help='The http headers in JSON format.')
|
2016-07-18 20:15:56 +02:00
|
|
|
parser.add_argument('-f', '--field_separator', dest='separator',
|
2019-05-09 13:18:34 +02:00
|
|
|
help='''JSON Field separator, defaults to ".";
|
2016-07-19 17:02:28 +02:00
|
|
|
Select element in an array with "(" ")"''')
|
2020-03-03 15:15:54 +01:00
|
|
|
parser.add_argument('-F', '--value_separator', dest='value_separator',
|
|
|
|
help='''JSON Value separator, defaults to ":"''')
|
2016-07-19 17:02:28 +02:00
|
|
|
parser.add_argument('-w', '--warning', dest='key_threshold_warning',
|
|
|
|
nargs='*',
|
|
|
|
help='''Warning threshold for these values
|
|
|
|
(key1[>alias],WarnRange key2[>alias],WarnRange).
|
|
|
|
WarnRange is in the format [@]start:end, more
|
|
|
|
information at
|
|
|
|
nagios-plugins.org/doc/guidelines.html.''')
|
|
|
|
parser.add_argument('-c', '--critical', dest='key_threshold_critical',
|
|
|
|
nargs='*',
|
|
|
|
help='''Critical threshold for these values
|
|
|
|
(key1[>alias],CriticalRange key2[>alias],CriticalRange.
|
|
|
|
CriticalRange is in the format [@]start:end, more
|
|
|
|
information at
|
|
|
|
nagios-plugins.org/doc/guidelines.html.''')
|
2016-07-18 20:15:56 +02:00
|
|
|
parser.add_argument('-e', '--key_exists', dest='key_list', nargs='*',
|
2016-07-19 17:02:28 +02:00
|
|
|
help='''Checks existence of these keys to determine
|
|
|
|
status. Return warning if key is not present.''')
|
|
|
|
parser.add_argument('-E', '--key_exists_critical',
|
|
|
|
dest='key_list_critical',
|
|
|
|
nargs='*',
|
|
|
|
help='''Same as -e but return critical if key is
|
|
|
|
not present.''')
|
2016-07-18 20:15:56 +02:00
|
|
|
parser.add_argument('-q', '--key_equals', dest='key_value_list', nargs='*',
|
2016-07-19 17:02:28 +02:00
|
|
|
help='''Checks equality of these keys and values
|
|
|
|
(key[>alias],value key2,value2) to determine status.
|
|
|
|
Multiple key values can be delimited with colon
|
|
|
|
(key,value1:value2). Return warning if equality
|
|
|
|
check fails''')
|
|
|
|
parser.add_argument('-Q', '--key_equals_critical',
|
|
|
|
dest='key_value_list_critical', nargs='*',
|
|
|
|
help='''Same as -q but return critical if
|
|
|
|
equality check fails.''')
|
2019-05-09 11:30:26 +02:00
|
|
|
parser.add_argument('-u', '--key_equals_unknown',
|
|
|
|
dest='key_value_list_unknown', nargs='*',
|
|
|
|
help='''Same as -q but return unknown if
|
|
|
|
equality check fails.''')
|
2019-05-09 12:39:58 +02:00
|
|
|
parser.add_argument('-y', '--key_not_equals',
|
|
|
|
dest='key_value_list_not', nargs='*',
|
|
|
|
help='''Checks equality of these keys and values
|
|
|
|
(key[>alias],value key2,value2) to determine status.
|
|
|
|
Multiple key values can be delimited with colon
|
|
|
|
(key,value1:value2). Return warning if equality
|
|
|
|
check succeeds''')
|
|
|
|
parser.add_argument('-Y', '--key_not_equals_critical',
|
|
|
|
dest='key_value_list_not_critical', nargs='*',
|
|
|
|
help='''Same as -q but return critical if equality
|
|
|
|
check succeeds.''')
|
2016-07-18 20:15:56 +02:00
|
|
|
parser.add_argument('-m', '--key_metric', dest='metric_list', nargs='*',
|
2016-07-19 17:02:28 +02:00
|
|
|
help='''Gathers the values of these keys (key[>alias],
|
|
|
|
UnitOfMeasure,WarnRange,CriticalRange,Min,Max) for
|
|
|
|
Nagios performance data. More information about Range
|
|
|
|
format and units of measure for nagios can be found at
|
|
|
|
nagios-plugins.org/doc/guidelines.html
|
|
|
|
Additional formats for this parameter are:
|
|
|
|
(key[>alias]), (key[>alias],UnitOfMeasure),
|
|
|
|
(key[>alias],UnitOfMeasure,WarnRange,
|
|
|
|
CriticalRange).''')
|
2014-03-10 16:41:31 +01:00
|
|
|
|
2020-03-03 12:12:52 +01:00
|
|
|
return parser.parse_args(args)
|
2014-03-10 16:41:31 +01:00
|
|
|
|
2016-07-19 17:02:28 +02:00
|
|
|
|
2014-03-10 16:41:31 +01:00
|
|
|
def debugPrint(debug_flag, message, pretty_flag=False):
|
2020-03-03 11:44:55 +01:00
|
|
|
"""
|
|
|
|
Print debug messages if -d (debug_flat ) is set.
|
|
|
|
"""
|
|
|
|
|
2016-07-18 20:15:56 +02:00
|
|
|
if debug_flag:
|
|
|
|
if pretty_flag:
|
|
|
|
pprint(message)
|
|
|
|
else:
|
2019-05-09 11:58:50 +02:00
|
|
|
print(message)
|
2014-03-10 16:41:31 +01:00
|
|
|
|
2015-11-19 05:07:50 +01:00
|
|
|
|
2020-06-19 12:38:31 +02:00
|
|
|
def main(cliargs):
|
|
|
|
"""
|
|
|
|
Main entrypoint for CLI
|
|
|
|
"""
|
2020-03-03 12:12:52 +01:00
|
|
|
|
2020-06-19 12:38:31 +02:00
|
|
|
args = parseArgs(cliargs)
|
2016-07-18 20:15:56 +02:00
|
|
|
nagios = NagiosHelper()
|
2020-01-28 10:41:29 +01:00
|
|
|
context = None
|
|
|
|
|
2019-05-09 15:44:33 +02:00
|
|
|
if args.version:
|
|
|
|
print('Version: %s - Date: %s' % (__version__, __version_date__))
|
2020-03-18 08:09:35 +01:00
|
|
|
sys.exit(0)
|
2019-05-09 15:44:33 +02:00
|
|
|
|
2016-07-18 20:15:56 +02:00
|
|
|
if args.ssl:
|
|
|
|
url = "https://%s" % args.host
|
2019-05-09 14:55:25 +02:00
|
|
|
|
2022-09-09 17:26:23 +02:00
|
|
|
context = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
|
2019-05-09 14:55:25 +02:00
|
|
|
context.options |= ssl.OP_NO_SSLv2
|
|
|
|
context.options |= ssl.OP_NO_SSLv3
|
|
|
|
|
2018-02-15 17:04:04 +01:00
|
|
|
if args.insecure:
|
2022-10-04 16:04:12 +02:00
|
|
|
context.check_hostname = False
|
2019-05-09 14:55:25 +02:00
|
|
|
context.verify_mode = ssl.CERT_NONE
|
2018-02-15 17:04:04 +01:00
|
|
|
else:
|
|
|
|
context.verify_mode = ssl.CERT_OPTIONAL
|
2021-01-21 12:02:41 +01:00
|
|
|
context.load_default_certs()
|
2019-05-09 14:55:25 +02:00
|
|
|
if args.cacert:
|
|
|
|
try:
|
|
|
|
context.load_verify_locations(args.cacert)
|
|
|
|
except ssl.SSLError:
|
|
|
|
nagios.append_unknown(
|
2020-03-18 08:09:35 +01:00
|
|
|
'Error loading SSL CA cert "%s"!'
|
|
|
|
% args.cacert)
|
2019-05-09 14:55:25 +02:00
|
|
|
|
|
|
|
if args.cert:
|
|
|
|
try:
|
2020-03-18 08:09:35 +01:00
|
|
|
context.load_cert_chain(args.cert, keyfile=args.key)
|
2019-05-09 14:55:25 +02:00
|
|
|
except ssl.SSLError:
|
|
|
|
if args.key:
|
|
|
|
nagios.append_unknown(
|
2020-03-18 08:09:35 +01:00
|
|
|
'Error loading SSL cert. Make sure key "%s" belongs to cert "%s"!'
|
|
|
|
% (args.key, args.cert))
|
2019-05-09 14:55:25 +02:00
|
|
|
else:
|
|
|
|
nagios.append_unknown(
|
2020-03-18 08:09:35 +01:00
|
|
|
'Error loading SSL cert. Make sure "%s" contains the key as well!'
|
|
|
|
% (args.cert))
|
2019-05-09 14:55:25 +02:00
|
|
|
|
|
|
|
if nagios.getCode() != OK_CODE:
|
|
|
|
print(nagios.getMessage())
|
2020-03-18 08:09:35 +01:00
|
|
|
sys.exit(nagios.getCode())
|
2019-05-09 14:55:25 +02:00
|
|
|
|
2016-07-18 20:15:56 +02:00
|
|
|
else:
|
|
|
|
url = "http://%s" % args.host
|
2016-07-19 17:02:28 +02:00
|
|
|
if args.port:
|
|
|
|
url += ":%s" % args.port
|
|
|
|
if args.path:
|
|
|
|
url += "/%s" % args.path
|
2020-03-03 11:44:55 +01:00
|
|
|
|
2016-07-18 20:15:56 +02:00
|
|
|
debugPrint(args.debug, "url:%s" % url)
|
2019-05-09 16:39:41 +02:00
|
|
|
json_data = ''
|
2020-03-03 11:44:55 +01:00
|
|
|
|
2016-07-18 20:15:56 +02:00
|
|
|
try:
|
2022-09-09 17:28:35 +02:00
|
|
|
req = urllib.request.Request(url, method=args.method)
|
2019-05-09 15:53:59 +02:00
|
|
|
req.add_header("User-Agent", "check_http_json")
|
2016-07-18 20:15:56 +02:00
|
|
|
if args.auth:
|
2020-03-18 08:47:27 +01:00
|
|
|
authbytes = str(args.auth).encode()
|
|
|
|
base64str = base64.encodebytes(authbytes).decode().replace('\n', '')
|
2016-07-18 20:15:56 +02:00
|
|
|
req.add_header('Authorization', 'Basic %s' % base64str)
|
|
|
|
if args.headers:
|
2016-07-19 17:02:28 +02:00
|
|
|
headers = json.loads(args.headers)
|
2016-07-18 20:15:56 +02:00
|
|
|
debugPrint(args.debug, "Headers:\n %s" % headers)
|
|
|
|
for header in headers:
|
|
|
|
req.add_header(header, headers[header])
|
|
|
|
if args.timeout and args.data:
|
2022-07-27 13:30:25 +02:00
|
|
|
databytes = str(args.data).encode()
|
2020-01-28 12:10:42 +01:00
|
|
|
response = urllib.request.urlopen(req, timeout=args.timeout,
|
2022-07-27 13:30:25 +02:00
|
|
|
data=databytes, context=context)
|
2016-07-18 20:15:56 +02:00
|
|
|
elif args.timeout:
|
2020-01-28 12:10:42 +01:00
|
|
|
response = urllib.request.urlopen(req, timeout=args.timeout,
|
2020-03-18 08:09:35 +01:00
|
|
|
context=context)
|
2016-07-18 20:15:56 +02:00
|
|
|
elif args.data:
|
2022-07-27 13:30:25 +02:00
|
|
|
databytes = str(args.data).encode()
|
|
|
|
response = urllib.request.urlopen(req, data=databytes, context=context)
|
2016-07-18 20:15:56 +02:00
|
|
|
else:
|
2022-09-08 10:08:39 +02:00
|
|
|
# pylint: disable=consider-using-with
|
2020-01-28 12:10:42 +01:00
|
|
|
response = urllib.request.urlopen(req, context=context)
|
2019-05-09 16:39:41 +02:00
|
|
|
|
2020-01-28 10:42:22 +01:00
|
|
|
json_data = response.read()
|
2019-05-09 16:39:41 +02:00
|
|
|
|
2016-07-18 20:15:56 +02:00
|
|
|
except HTTPError as e:
|
2020-06-19 14:26:59 +02:00
|
|
|
# Try to recover from HTTP Error, if there is JSON in the response
|
2020-06-19 14:26:59 +02:00
|
|
|
if "json" in e.info().get_content_subtype():
|
2020-06-19 14:26:59 +02:00
|
|
|
json_data = e.read()
|
|
|
|
else:
|
|
|
|
nagios.append_unknown(" HTTPError[%s], url:%s" % (str(e.code), url))
|
2016-07-18 20:15:56 +02:00
|
|
|
except URLError as e:
|
2019-05-09 15:06:52 +02:00
|
|
|
nagios.append_critical(" URLError[%s], url:%s" % (str(e.reason), url))
|
|
|
|
|
|
|
|
try:
|
2019-05-09 16:39:41 +02:00
|
|
|
data = json.loads(json_data)
|
2019-05-09 15:06:52 +02:00
|
|
|
except ValueError as e:
|
|
|
|
nagios.append_unknown(" Parser error: %s" % str(e))
|
|
|
|
|
2016-07-18 20:15:56 +02:00
|
|
|
else:
|
|
|
|
debugPrint(args.debug, 'json:')
|
|
|
|
debugPrint(args.debug, data, True)
|
|
|
|
# Apply rules to returned JSON data
|
|
|
|
processor = JsonRuleProcessor(data, args)
|
|
|
|
nagios.append_warning(processor.checkWarning())
|
|
|
|
nagios.append_critical(processor.checkCritical())
|
|
|
|
nagios.append_metrics(processor.checkMetrics())
|
2019-05-07 16:15:31 +02:00
|
|
|
nagios.append_unknown(processor.checkUnknown())
|
2019-05-09 15:06:52 +02:00
|
|
|
|
2016-07-18 20:15:56 +02:00
|
|
|
# Print Nagios specific string and exit appropriately
|
2019-05-09 11:58:50 +02:00
|
|
|
print(nagios.getMessage())
|
2020-03-18 08:09:35 +01:00
|
|
|
sys.exit(nagios.getCode())
|
2019-05-09 15:06:52 +02:00
|
|
|
|
2020-06-19 12:38:31 +02:00
|
|
|
|
|
|
|
if __name__ == "__main__":
|
|
|
|
# Program entry point
|
|
|
|
main(sys.argv[1:])
|
|
|
|
|
2019-05-09 15:06:52 +02:00
|
|
|
#EOF
|