From 504e69fac97fc8ed228ec215911f97f3ce46e1af Mon Sep 17 00:00:00 2001 From: Arthur de Jong Date: Sun, 29 Dec 2019 17:15:14 +0100 Subject: Move Python code to muninplot package This allows more easy installation into a virtualenv. --- README | 2 +- munin.py | 239 -------------------------------------------------- muninplot/__main__.py | 35 ++++++++ muninplot/data.py | 237 +++++++++++++++++++++++++++++++++++++++++++++++++ muninplot/wsgi.py | 120 +++++++++++++++++++++++++ setup.cfg | 2 +- tox.ini | 2 +- web.py | 127 --------------------------- 8 files changed, 395 insertions(+), 369 deletions(-) delete mode 100644 munin.py create mode 100644 muninplot/__main__.py create mode 100644 muninplot/data.py create mode 100644 muninplot/wsgi.py delete mode 100644 web.py diff --git a/README b/README index 71e5be5..0a907d6 100644 --- a/README +++ b/README @@ -19,7 +19,7 @@ Basic configuration snippet for deploying the munin-plot under Apache with mod_wsgi: WSGIDaemonProcess munin-plot threads=5 maximum-requests=100 display-name=%{GROUP} home=/PATH/TO/munin-plot - AliasMatch ^/munin-plot/(graphs.*|data.*)$ /PATH/TO/munin-plot/web.py/$1 + AliasMatch ^/munin-plot/(graphs.*|data.*)$ /PATH/TO/munin-plot/muninplot/wsgi.py/$1 Alias /munin-plot /PATH/TO/munin-plot/static diff --git a/munin.py b/munin.py deleted file mode 100644 index 6ad125c..0000000 --- a/munin.py +++ /dev/null @@ -1,239 +0,0 @@ -# Python module to get data from Munin data files - -# Copyright (C) 2018-2019 Arthur de Jong -# -# Permission is hereby granted, free of charge, to any person obtaining a -# copy of this software and associated documentation files (the "Software"), -# to deal in the Software without restriction, including without limitation -# the rights to use, copy, modify, merge, publish, distribute, sublicense, -# and/or sell copies of the Software, and to permit persons to whom the -# Software is furnished to do so, subject to the following conditions: -# -# The above copyright notice and this permission notice shall be included in -# all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER -# DEALINGS IN THE SOFTWARE. - -import math -import os -import re -import subprocess -import time -from collections import defaultdict -from functools import wraps - - -MUNIN_DBDIR = '/var/lib/munin' - - -def cache(function): - """Cache returned data of the decorated function.""" - data = dict() - - @wraps(function) - def wrapper(*args): - now = time.time() - if args not in data or data[args][1] < now - 300: - data[args] = (function(*args), now) - return data[args][0] - return wrapper - - -def _remove_duplicates(lst): - """Remove duplicate values from the list while maintaining order.""" - seen = set() - for x in lst: - if x not in seen: - seen.add(x) - yield x - - -@cache -def get_info(): - """Return a description of all the graphs.""" - data = defaultdict(dict) - # parse the datafile and group by graph - with open(os.path.join(MUNIN_DBDIR, 'datafile'), 'rt', encoding='utf-8') as f: - for line in f: - if ':' in line: - source, line = line.split(':', 1) - group, host = source.split(';') - key, value = line.split(' ', 1) - if '.' in key: - graph, key = key.split('.', 1) - name = '%s/%s/%s' % (group, host, graph) - data[name][key] = value.strip() - # restructure graph info - for name, info in data.items(): - # collect field information - fields = dict() - for key, value in list(info.items()): - if '.' in key: - info.pop(key) - field, key = key.split('.', 1) - fields.setdefault(field, dict(name=field)) - if key not in ('graph_data_size', 'update_rate'): - fields[field][key] = value - # clean up field information - for _field, field_info in fields.items(): - # remove graph=no from negative fields - negative = field_info.get('negative') - if negative: - fields[negative].pop('graph', None) - # remove graph = no and replace by removing draw - if field_info.pop('graph', '').lower() in ('false', 'no', '0'): - field_info.pop('draw', None) - else: - field_info.setdefault('draw', 'LINE') - # expand graph_vlabel - graph_vlabel = info.get('graph_vlabel', '') - if '${graph_period}' in graph_vlabel: - info['graph_vlabel'] = graph_vlabel.replace( - '${graph_period}', info.get('graph_period', 'second')) - info['name'] = name - info['group'], info['host'], _ = name.split('/') - graph_order = info.pop('graph_order', '') - info['fields'] = [ - fields[field] - for field in _remove_duplicates(graph_order.split()) - if field in fields] - category = info.pop('graph_category', '') - if category: - info['category'] = category.lower() - return data - - -def _get_rrd_files(group, host, graph): - """Return a list of RRD files that are available for the graph.""" - files = os.listdir(os.path.join(MUNIN_DBDIR, group)) - prefix = '%s-%s-' % (host, graph) - return sorted( - f for f in files - if f.startswith(prefix) and f.endswith('.rrd')) - - -def _fetch_rrd(filename, start, end, resolution=300, cf='AVERAGE'): - """Use rrdtool to fetch values to the data.""" - output = subprocess.check_output([ - 'rrdtool', 'fetch', os.path.join(MUNIN_DBDIR, filename), - cf, '-r', str(resolution), '-s', str(start), '-e', str(end)]) - for line in output.decode('utf-8').splitlines(): - if ':' in line: - try: - time, value = line.split(':', 1) - value = float(value) - if not math.isnan(value): - yield int(time), value - except ValueError: - pass - - -def get_raw_values(group, host, graph, start, end, resolution=300, minmax=True): - """Get the data points available from the specified graph.""" - start = int(start / resolution) * resolution - end = int(end / resolution) * resolution - data = defaultdict(defaultdict) - for f in _get_rrd_files(group, host, graph): - field = '-'.join(f.split('-')[2:-1]) - filename = os.path.join(group, f) - for time_, value in _fetch_rrd(filename, start, end, resolution, 'AVERAGE'): - data[time_][field] = value - if minmax: - for time_, value in _fetch_rrd(filename, start, end, resolution, 'MIN'): - data[time_][field + '.min'] = value - for time_, value in _fetch_rrd(filename, start, end, resolution, 'MAX'): - data[time_][field + '.max'] = value - return [dict(time=k, **v) for k, v in sorted(data.items())] - - -cdef_ops = { - '+': (lambda a, b: a + b), - '-': (lambda a, b: a - b), - '*': (lambda a, b: a * b), - '/': (lambda a, b: a / b), -} - -cdef_number_re = re.compile(r'^-?[0-9]+(\.[0-9]*)?$') - - -def cdef_eval(expression, row, suffix=''): - """Evaluate a cdef expression using variables from row.""" - tokens = expression.split(',') - stack = [] - for token in tokens: - if cdef_number_re.match(token): - stack.append(float(token)) - elif token in cdef_ops: - arg2 = stack.pop() - arg1 = stack.pop() - result = cdef_ops[token](arg1, arg2) - stack.append(result) - else: - stack.append(row[token + suffix]) - return stack.pop() - - -def get_values(group, host, graph, start, end, resolution=300, minmax=True): - """Get the data points available from the specified graph.""" - graph_info = get_info()['%s/%s/%s' % (group, host, graph)] - data = get_raw_values(group, host, graph, start, end, resolution, minmax) - for field_info in graph_info['fields']: - negative = field_info.get('negative') - if negative: - for row in data: - try: - values = [ - -row[negative + '.min'], - -row[negative], - -row[negative + '.max']] - ( - row[negative + '.min'], - row[negative], - row[negative + '.max'], - ) = sorted(values) - except KeyError: - pass - cdef = field_info.get('cdef') - if cdef: - field = field_info['name'] - for row in data: - try: - values = [ - cdef_eval(cdef, row, '.min'), - cdef_eval(cdef, row), - cdef_eval(cdef, row, '.max')] - row[field + '.min'], row[field], row[field + '.max'] = sorted(values) - except Exception: - pass - return data - - -def get_resolutions(group, host, graph): - """Return a list of resolutions available for the graph.""" - # find the newest file - rrdfile = sorted( - (os.stat(x).st_mtime, x) for x in ( - os.path.join(MUNIN_DBDIR, group, y) - for y in _get_rrd_files(group, host, graph)))[-1][1] - output = subprocess.check_output(['rrdtool', 'info', rrdfile]) - resolutions = {} - rows = {} - for line in output.decode('utf-8').splitlines(): - if line.startswith('step = '): - # the measurement resolution - step = int(line.split(' = ')[1]) - elif line.startswith('last_update = '): - last_update = int(line.split(' = ')[1]) - last_update = int(last_update / step) * step - elif '.pdp_per_row = ' in line: - pdp_per_row = int(line.split(' = ')[1]) - resolutions[pdp_per_row * step] = line.split('.')[0] - elif '.rows = ' in line: - rows[line.split('.')[0]] = int(line.split(' = ')[1]) - return last_update, [(r, rows[i]) for r, i in sorted(resolutions.items())] diff --git a/muninplot/__main__.py b/muninplot/__main__.py new file mode 100644 index 0000000..03f4a82 --- /dev/null +++ b/muninplot/__main__.py @@ -0,0 +1,35 @@ +# Copyright (C) 2018-2019 Arthur de Jong +# +# Permission is hereby granted, free of charge, to any person obtaining a +# copy of this software and associated documentation files (the "Software"), +# to deal in the Software without restriction, including without limitation +# the rights to use, copy, modify, merge, publish, distribute, sublicense, +# and/or sell copies of the Software, and to permit persons to whom the +# Software is furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +# DEALINGS IN THE SOFTWARE. + +import sys +from wsgiref.simple_server import make_server + +from muninplot.wsgi import application + + +def devserver(): + + sys.stdout = sys.stderr + srv = make_server('0.0.0.0', 8080, application) + srv.serve_forever() + + +if __name__ == '__main__': + devserver() diff --git a/muninplot/data.py b/muninplot/data.py new file mode 100644 index 0000000..a420836 --- /dev/null +++ b/muninplot/data.py @@ -0,0 +1,237 @@ +# Copyright (C) 2018-2019 Arthur de Jong +# +# Permission is hereby granted, free of charge, to any person obtaining a +# copy of this software and associated documentation files (the "Software"), +# to deal in the Software without restriction, including without limitation +# the rights to use, copy, modify, merge, publish, distribute, sublicense, +# and/or sell copies of the Software, and to permit persons to whom the +# Software is furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +# DEALINGS IN THE SOFTWARE. + +import math +import os +import re +import subprocess +import time +from collections import defaultdict +from functools import wraps + + +MUNIN_DBDIR = '/var/lib/munin' + + +def cache(function): + """Cache returned data of the decorated function.""" + data = dict() + + @wraps(function) + def wrapper(*args): + now = time.time() + if args not in data or data[args][1] < now - 300: + data[args] = (function(*args), now) + return data[args][0] + return wrapper + + +def _remove_duplicates(lst): + """Remove duplicate values from the list while maintaining order.""" + seen = set() + for x in lst: + if x not in seen: + seen.add(x) + yield x + + +@cache +def get_info(): + """Return a description of all the graphs.""" + data = defaultdict(dict) + # parse the datafile and group by graph + with open(os.path.join(MUNIN_DBDIR, 'datafile'), 'rt', encoding='utf-8') as f: + for line in f: + if ':' in line: + source, line = line.split(':', 1) + group, host = source.split(';') + key, value = line.split(' ', 1) + if '.' in key: + graph, key = key.split('.', 1) + name = '%s/%s/%s' % (group, host, graph) + data[name][key] = value.strip() + # restructure graph info + for name, info in data.items(): + # collect field information + fields = dict() + for key, value in list(info.items()): + if '.' in key: + info.pop(key) + field, key = key.split('.', 1) + fields.setdefault(field, dict(name=field)) + if key not in ('graph_data_size', 'update_rate'): + fields[field][key] = value + # clean up field information + for _field, field_info in fields.items(): + # remove graph=no from negative fields + negative = field_info.get('negative') + if negative: + fields[negative].pop('graph', None) + # remove graph = no and replace by removing draw + if field_info.pop('graph', '').lower() in ('false', 'no', '0'): + field_info.pop('draw', None) + else: + field_info.setdefault('draw', 'LINE') + # expand graph_vlabel + graph_vlabel = info.get('graph_vlabel', '') + if '${graph_period}' in graph_vlabel: + info['graph_vlabel'] = graph_vlabel.replace( + '${graph_period}', info.get('graph_period', 'second')) + info['name'] = name + info['group'], info['host'], _ = name.split('/') + graph_order = info.pop('graph_order', '') + info['fields'] = [ + fields[field] + for field in _remove_duplicates(graph_order.split()) + if field in fields] + category = info.pop('graph_category', '') + if category: + info['category'] = category.lower() + return data + + +def _get_rrd_files(group, host, graph): + """Return a list of RRD files that are available for the graph.""" + files = os.listdir(os.path.join(MUNIN_DBDIR, group)) + prefix = '%s-%s-' % (host, graph) + return sorted( + f for f in files + if f.startswith(prefix) and f.endswith('.rrd')) + + +def _fetch_rrd(filename, start, end, resolution=300, cf='AVERAGE'): + """Use rrdtool to fetch values to the data.""" + output = subprocess.check_output([ + 'rrdtool', 'fetch', os.path.join(MUNIN_DBDIR, filename), + cf, '-r', str(resolution), '-s', str(start), '-e', str(end)]) + for line in output.decode('utf-8').splitlines(): + if ':' in line: + try: + time, value = line.split(':', 1) + value = float(value) + if not math.isnan(value): + yield int(time), value + except ValueError: + pass + + +def get_raw_values(group, host, graph, start, end, resolution=300, minmax=True): + """Get the data points available from the specified graph.""" + start = int(start / resolution) * resolution + end = int(end / resolution) * resolution + data = defaultdict(defaultdict) + for f in _get_rrd_files(group, host, graph): + field = '-'.join(f.split('-')[2:-1]) + filename = os.path.join(group, f) + for time_, value in _fetch_rrd(filename, start, end, resolution, 'AVERAGE'): + data[time_][field] = value + if minmax: + for time_, value in _fetch_rrd(filename, start, end, resolution, 'MIN'): + data[time_][field + '.min'] = value + for time_, value in _fetch_rrd(filename, start, end, resolution, 'MAX'): + data[time_][field + '.max'] = value + return [dict(time=k, **v) for k, v in sorted(data.items())] + + +cdef_ops = { + '+': (lambda a, b: a + b), + '-': (lambda a, b: a - b), + '*': (lambda a, b: a * b), + '/': (lambda a, b: a / b), +} + +cdef_number_re = re.compile(r'^-?[0-9]+(\.[0-9]*)?$') + + +def cdef_eval(expression, row, suffix=''): + """Evaluate a cdef expression using variables from row.""" + tokens = expression.split(',') + stack = [] + for token in tokens: + if cdef_number_re.match(token): + stack.append(float(token)) + elif token in cdef_ops: + arg2 = stack.pop() + arg1 = stack.pop() + result = cdef_ops[token](arg1, arg2) + stack.append(result) + else: + stack.append(row[token + suffix]) + return stack.pop() + + +def get_values(group, host, graph, start, end, resolution=300, minmax=True): + """Get the data points available from the specified graph.""" + graph_info = get_info()['%s/%s/%s' % (group, host, graph)] + data = get_raw_values(group, host, graph, start, end, resolution, minmax) + for field_info in graph_info['fields']: + negative = field_info.get('negative') + if negative: + for row in data: + try: + values = [ + -row[negative + '.min'], + -row[negative], + -row[negative + '.max']] + ( + row[negative + '.min'], + row[negative], + row[negative + '.max'], + ) = sorted(values) + except KeyError: + pass + cdef = field_info.get('cdef') + if cdef: + field = field_info['name'] + for row in data: + try: + values = [ + cdef_eval(cdef, row, '.min'), + cdef_eval(cdef, row), + cdef_eval(cdef, row, '.max')] + row[field + '.min'], row[field], row[field + '.max'] = sorted(values) + except Exception: + pass + return data + + +def get_resolutions(group, host, graph): + """Return a list of resolutions available for the graph.""" + # find the newest file + rrdfile = sorted( + (os.stat(x).st_mtime, x) for x in ( + os.path.join(MUNIN_DBDIR, group, y) + for y in _get_rrd_files(group, host, graph)))[-1][1] + output = subprocess.check_output(['rrdtool', 'info', rrdfile]) + resolutions = {} + rows = {} + for line in output.decode('utf-8').splitlines(): + if line.startswith('step = '): + # the measurement resolution + step = int(line.split(' = ')[1]) + elif line.startswith('last_update = '): + last_update = int(line.split(' = ')[1]) + last_update = int(last_update / step) * step + elif '.pdp_per_row = ' in line: + pdp_per_row = int(line.split(' = ')[1]) + resolutions[pdp_per_row * step] = line.split('.')[0] + elif '.rows = ' in line: + rows[line.split('.')[0]] = int(line.split(' = ')[1]) + return last_update, [(r, rows[i]) for r, i in sorted(resolutions.items())] diff --git a/muninplot/wsgi.py b/muninplot/wsgi.py new file mode 100644 index 0000000..606f4a0 --- /dev/null +++ b/muninplot/wsgi.py @@ -0,0 +1,120 @@ +# Copyright (C) 2018-2019 Arthur de Jong +# +# Permission is hereby granted, free of charge, to any person obtaining a +# copy of this software and associated documentation files (the "Software"), +# to deal in the Software without restriction, including without limitation +# the rights to use, copy, modify, merge, publish, distribute, sublicense, +# and/or sell copies of the Software, and to permit persons to whom the +# Software is furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +# DEALINGS IN THE SOFTWARE. + +import cgi +import json +import os +import time + +from muninplot.data import get_info, get_resolutions, get_values + + +def static_serve(environ, start_response): + path = environ.get('PATH_INFO', '').lstrip('/') or 'index.html' + path = os.path.normpath(os.sep + path).lstrip(os.sep) + if path.endswith('.html'): + content_type = 'text/html' + elif path.endswith('.js'): + content_type = 'text/javascript' + elif path.endswith('.css'): + content_type = 'text/css' + else: + content_type = 'application/octet-stream' + csp = "default-src 'self'; img-src 'self' data:; style-src 'self' 'unsafe-inline'; " + \ + "script-src 'self' 'unsafe-eval'; frame-ancestors 'none'" + start_response('200 OK', [ + ('Content-Type', content_type), + ('Content-Security-Policy', csp)]) + return [open(os.path.join('static', path), 'rb').read()] + + +def list_graphs(environ, start_response): + start_response('200 OK', [ + ('Content-Type', 'application/json')]) + return [json.dumps(get_info(), indent=2, sort_keys=True).encode('utf-8')] + + +def _field_key(x): + """Order field.min, field, field.max.""" + if x.endswith('.min'): + return x[:-4] + '.0' + elif x.endswith('.max'): + return x[:-4] + '.2' + return x + '.1' + + +def _parse_timestamp(timestamp): + """Return a timestamp value from the specified string.""" + formats = ( + '%Y-%m-%d %H:%M:%S', + '%Y-%m-%d %H:%M', + '%Y-%m-%d') + for fmt in formats: + try: + return time.mktime(time.strptime(timestamp, fmt)) + except ValueError: + pass + raise ValueError('time data %r does not match any known format' % timestamp) + + +def get_data(environ, start_response): + path = environ.get('PATH_INFO', '').lstrip('/') + _, group, host, graph = path.split('/') + last_update, resolutions = get_resolutions(group, host, graph) + parameters = cgi.parse_qs(environ.get('QUERY_STRING', '')) + # get the time range to fetch the data for + end = parameters.get('end') + end = _parse_timestamp(end[0]) if end else last_update + start = parameters.get('start') + start = _parse_timestamp(start[0]) if start else end - 24 * 60 * 60 * 7 + # calculate the minimum resolution that we want + resolution = min(( + parameters.get('resolution', (end - start) / 5000), + resolutions[-1][0])) + # loop over resolutions to find the data + values = [] + for res, rows in resolutions: + if res >= resolution: + s = max((last_update - res * rows, start)) + e = min((last_update, end)) + if e > s: + values = get_values(group, host, graph, s, e, res) + values + end = s + # return the values as CSV + start_response('200 OK', [ + ('Content-Type', 'text/plain')]) + if values: + keys = (x for x in values[0].keys() if x != 'remove') + keys = ['time'] + sorted((k for k in keys if k != 'time'), key=_field_key) + yield ('%s\n' % (','.join(keys))).encode('utf-8') + for value in values: + value['time'] = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(value['time'])) + yield ('%s\n' % (','.join(str(value.get(key, '')) for key in keys))).encode('utf-8') + + +def application(environ, start_response): + # get request path + path = environ.get('PATH_INFO', '').lstrip('/') + if path.startswith('graphs'): + return list_graphs(environ, start_response) + elif path.startswith('data/'): + return get_data(environ, start_response) + else: + return static_serve(environ, start_response) diff --git a/setup.cfg b/setup.cfg index 869dc81..764ef81 100644 --- a/setup.cfg +++ b/setup.cfg @@ -24,7 +24,7 @@ classifiers = [options] include_package_data = true -packages = find: +packages = find:muninplot [sdist] owner=root diff --git a/tox.ini b/tox.ini index 42c0d53..1efa7f9 100644 --- a/tox.ini +++ b/tox.ini @@ -18,7 +18,7 @@ deps = flake8 flake8-tidy-imports flake8-tuple pep8-naming -commands = flake8 . +commands = flake8 muninplot setup.py [testenv:eslint] skip_install = true diff --git a/web.py b/web.py deleted file mode 100644 index 39547dc..0000000 --- a/web.py +++ /dev/null @@ -1,127 +0,0 @@ -# Copyright (C) 2018-2019 Arthur de Jong -# -# Permission is hereby granted, free of charge, to any person obtaining a -# copy of this software and associated documentation files (the "Software"), -# to deal in the Software without restriction, including without limitation -# the rights to use, copy, modify, merge, publish, distribute, sublicense, -# and/or sell copies of the Software, and to permit persons to whom the -# Software is furnished to do so, subject to the following conditions: -# -# The above copyright notice and this permission notice shall be included in -# all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER -# DEALINGS IN THE SOFTWARE. - -import cgi -import json -import os -import sys -import time - -from munin import get_info, get_resolutions, get_values - - -sys.stdout = sys.stderr - - -def static_serve(environ, start_response): - path = environ.get('PATH_INFO', '').lstrip('/') or 'index.html' - path = os.path.normpath(os.sep + path).lstrip(os.sep) - content_type = 'text/html' - if path.endswith('.js'): - content_type = 'text/javascript' - elif path.endswith('.css'): - content_type = 'text/css' - csp = "default-src 'self'; img-src 'self' data:; style-src 'self' 'unsafe-inline'; " + \ - "script-src 'self' 'unsafe-eval'; frame-ancestors 'none'" - start_response('200 OK', [ - ('Content-Type', content_type), - ('Content-Security-Policy', csp)]) - return [open(os.path.join('static', path), 'rb').read()] - - -def list_graphs(environ, start_response): - start_response('200 OK', [ - ('Content-Type', 'application/json')]) - return [json.dumps(get_info(), indent=2, sort_keys=True).encode('utf-8')] - - -def _field_key(x): - """Order field.min, field, field.max.""" - if x.endswith('.min'): - return x[:-4] + '.0' - elif x.endswith('.max'): - return x[:-4] + '.2' - return x + '.1' - - -def _parse_timestamp(timestamp): - """Return a timestamp value from the specified string.""" - formats = ( - '%Y-%m-%d %H:%M:%S', - '%Y-%m-%d %H:%M', - '%Y-%m-%d') - for fmt in formats: - try: - return time.mktime(time.strptime(timestamp, fmt)) - except ValueError: - pass - raise ValueError('time data %r does not match any known format' % timestamp) - - -def get_data(environ, start_response): - path = environ.get('PATH_INFO', '').lstrip('/') - _, group, host, graph = path.split('/') - last_update, resolutions = get_resolutions(group, host, graph) - parameters = cgi.parse_qs(environ.get('QUERY_STRING', '')) - # get the time range to fetch the data for - end = parameters.get('end') - end = _parse_timestamp(end[0]) if end else last_update - start = parameters.get('start') - start = _parse_timestamp(start[0]) if start else end - 24 * 60 * 60 * 7 - # calculate the minimum resolution that we want - resolution = min(( - parameters.get('resolution', (end - start) / 5000), - resolutions[-1][0])) - # loop over resolutions to find the data - values = [] - for res, rows in resolutions: - if res >= resolution: - s = max((last_update - res * rows, start)) - e = min((last_update, end)) - if e > s: - values = get_values(group, host, graph, s, e, res) + values - end = s - # return the values as CSV - start_response('200 OK', [ - ('Content-Type', 'text/plain')]) - if values: - keys = (x for x in values[0].keys() if x != 'remove') - keys = ['time'] + sorted((k for k in keys if k != 'time'), key=_field_key) - yield ('%s\n' % (','.join(keys))).encode('utf-8') - for value in values: - value['time'] = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(value['time'])) - yield ('%s\n' % (','.join(str(value.get(key, '')) for key in keys))).encode('utf-8') - - -def application(environ, start_response): - # get request path - path = environ.get('PATH_INFO', '').lstrip('/') - if path.startswith('graphs'): - return list_graphs(environ, start_response) - elif path.startswith('data/'): - return get_data(environ, start_response) - else: - return static_serve(environ, start_response) - - -if __name__ == '__main__': - from wsgiref.simple_server import make_server - srv = make_server('0.0.0.0', 8080, application) - srv.serve_forever() -- cgit v1.2.3