Commit c941adcc authored by Jérome Perrin's avatar Jérome Perrin

networkbench: remove

The functionality is replaced by https://lab.nexedi.com/nexedi/surykatka

Some parts are still used in software/caddy-frontend so we keep them for a
while.
parent 1eeef169
...@@ -103,7 +103,6 @@ setup(name=name, ...@@ -103,7 +103,6 @@ setup(name=name,
'slaprunnertest = slapos.runner.runnertest:main', 'slaprunnertest = slapos.runner.runnertest:main',
'slaprunnerteststandalone = slapos.runner.runnertest:runStandaloneUnitTest', 'slaprunnerteststandalone = slapos.runner.runnertest:runStandaloneUnitTest',
'zodbpack = slapos.zodbpack:run [zodbpack]', 'zodbpack = slapos.zodbpack:run [zodbpack]',
'networkbench = slapos.networkbench:main',
'cachechecker = slapos.cachechecker:web_checker_utility' 'cachechecker = slapos.cachechecker:web_checker_utility'
] ]
}, },
......
from __future__ import print_function
import socket
import logging
import time
import logging.handlers
import subprocess
import re
import sys
import shutil
import netifaces
import random
import pycurl
import argparse
import json
from io import StringIO
from .ping import ping, ping6
from .dnsbench import resolve
from .http import get_curl, request
import textwrap
class HelpFormatter(argparse.ArgumentDefaultsHelpFormatter):
def _get_help_string(self, action):
return super(HelpFormatter, self)._get_help_string(action) \
if action.default else action.help
def _split_lines(self, text, width):
"""Preserves new lines in option descriptions"""
lines = []
for text in text.splitlines():
lines += textwrap.wrap(text, width)
return lines
def _fill_text(self, text, width, indent):
"""Preserves new lines in other descriptions"""
kw = dict(width=width, initial_indent=indent, subsequent_indent=indent)
return '\n'.join(textwrap.fill(t, **kw) for t in text.splitlines())
botname = socket.gethostname()
date_reg_exp = re.compile('\d{4}[-/]\d{2}[-/]\d{2}')
def _get_network_gateway(self):
return netifaces.gateways()["default"][netifaces.AF_INET][0]
def load_configuration(config_path):
if config_path.startswith("http://") or \
config_path.startswith("ftp://") or \
config_path.startswith("https://") or \
config_path.startswith("file://"):
return download_external_configuration(config_path)
with open(config_path, "r") as f:
return json.load(f)
def download_external_configuration(url):
buffer = StringIO()
curl, _ = get_curl(buffer, url)
response_code = curl.getinfo(pycurl.HTTP_CODE)
if response_code in (200, 0):
try:
return json.loads(buffer.getvalue())
except ValueError:
print("Unable to parse external configuration, error:")
import traceback
traceback.print_exc(file=sys.stderr)
sys.stderr.flush()
print("Ignoring external configuration")
finally:
curl.close()
return {}
def is_rotate_log(log_file_path):
try:
log_file = open(log_file_path, 'r')
except IOError:
return False
# Handling try-except-finally together.
try:
try:
log_file.seek(0, 2)
size = log_file.tell()
log_file.seek(-min(size, 4096), 1)
today = time.strftime("%Y-%m-%d")
for line in reversed(log_file.read().split('\n')):
if len(line.strip()):
match_list = date_reg_exp.findall(line)
if len(match_list):
if match_list[0] != today:
return ValueError(match_list[0])
break
except IOError:
return False
finally:
log_file.close()
def rotate_logfile(handler, log_file):
last_date = is_rotate_log(log_file)
if last_date:
handler.doRollover()
today = time.strftime("%Y-%m-%d")
shutil.move("%s.%s" % (log_file, today),
"%s.%s" % (log_file, last_date))
# The simpler the better
sp = subprocess.Popen("gzip %s.%s" % (log_file, last_date),
stdout=subprocess.PIPE, stdin=subprocess.PIPE, shell=True)
sp.communicate()
def create_logger(name, log_folder, verbose):
new_logger = logging.getLogger(name)
new_logger.setLevel(logging.DEBUG)
log_file = '%s/network_bench.%s.log' % (log_folder, name)
handler = logging.handlers.TimedRotatingFileHandler(
log_file, when="D",
backupCount=1000)
rotate_logfile(handler, log_file)
format = "%%(asctime)-16s;%s;%%(message)s" % botname
handler.setFormatter(logging.Formatter(format))
new_logger.addHandler(handler)
if verbose:
handler = logging.StreamHandler(sys.stdout)
handler.setFormatter(logging.Formatter(format))
new_logger.addHandler(handler)
return new_logger
def run_all(config_dict, log_folder, verbose):
dns_logger = create_logger("dns", log_folder, verbose)
name_dict = config_dict.get("dns", {})
for name in name_dict:
expected = name_dict[name].get("expected")
dns_logger.info(';'.join(str(x) for x in resolve(name, expected)))
ping_logger = create_logger("ping", log_folder, verbose)
for host in config_dict.get("ping",[]):
ping_logger.info(';'.join(str(x) for x in ping(host)))
ping6_logger = create_logger("ping6", log_folder, verbose)
for host in config_dict.get("ping6", []):
ping6_logger.info(';'.join(str(x) for x in ping6(host)))
http_logger = create_logger("http", log_folder, verbose)
url_dict = config_dict.get("url", {})
for url in url_dict:
http_logger.info(';'.join(str(x) for x in request(url, url_dict[url])))
def main():
parser = argparse.ArgumentParser(
description="Run network benchmarch.",
)
_ = parser.add_argument
_('-l', '--logdir', default=".",
help="Directory where the logs are going to be placed.")
_('-c', '--conf', help="Path to the configuration json file.")
_('-v', '--verbose', action='store_true',
help="Show the results on stdout.")
_('-d', '--delay', default=random.randint(0, 30),
help="Delay before start to run," \
"as this script can be called on cron.")
config = parser.parse_args()
print("Downloading %s..." % config.conf.strip())
config_dict = load_configuration(config.conf)
print("Waiting %s before start..." % config.delay)
time.sleep(float(config.delay))
run_all(config_dict,
log_folder=config.logdir,
verbose=config.verbose)
import socket
import time
import dns.resolver
def resolve(name, expected_list=None):
""" Resolve name using standard system name resolution.
"""
begin = time.time()
try:
ip_list = [i.to_text() for i in dns.resolver.query(name, "A")]
resolution = 200
status = "OK"
except dns.resolver.NXDOMAIN:
resolution = 600
status = "Cannot resolve the hostname"
ip_list = []
resolving_time = time.time() - begin
# Out put is:
# TEST IDENTIFIER, NAME, RESOLUTION (200 or 600), Time for resolve,
# status ("OK" or "Cannot resolve the hostname"), Resolved IP.
if expected_list is not None and set(expected_list) != set(ip_list):
status = "UNEXPECTED"
ip_list = "%s (expected) != %s (found)" % (expected_list, ip_list)
return ('DNS', name, resolution, resolving_time, status, ip_list)
import sys
import pycurl
from io import BytesIO
from slapos.util import bytes2str
def get_curl(buffer, url):
curl = pycurl.Curl()
curl.setopt(curl.URL, url)
curl.setopt(curl.CONNECTTIMEOUT, 10)
curl.setopt(curl.TIMEOUT, 30)
curl.setopt(curl.WRITEDATA, buffer)
curl.setopt(curl.SSL_VERIFYPEER, False)
curl.setopt(curl.SSL_VERIFYHOST, False)
result = "OK"
try:
curl.perform()
except Exception:
import traceback
traceback.print_exc(file=sys.stderr)
sys.stderr.flush()
result = "FAIL"
return curl, result
def request(url, expected_dict):
buffer = BytesIO()
curl, result = get_curl(buffer, url)
body = buffer.getvalue()
rendering_time = "%s;%s;%s;%s;%s" % \
(curl.getinfo(curl.NAMELOOKUP_TIME),
curl.getinfo(curl.CONNECT_TIME),
curl.getinfo(curl.PRETRANSFER_TIME),
curl.getinfo(curl.STARTTRANSFER_TIME),
curl.getinfo(curl.TOTAL_TIME))
response_code = curl.getinfo(pycurl.HTTP_CODE)
expected_response = expected_dict.get("expected_response", None)
if expected_response is not None and \
expected_response != response_code:
result = "UNEXPECTED (%s != %s)" % (expected_response, response_code)
expected_text = expected_dict.get("expected_text", None)
if expected_text is not None and \
str(expected_text) not in bytes2str(body):
result = "UNEXPECTED (%s not in page content)" % (expected_text)
curl.close()
info_list = ('GET', url, response_code, rendering_time, result)
return info_list
...@@ -26,61 +26,9 @@ ...@@ -26,61 +26,9 @@
############################################################################## ##############################################################################
import unittest import unittest
import os.path
from slapos.networkbench import dnsbench
from slapos.networkbench.ping import ping, ping6 from slapos.networkbench.ping import ping, ping6
from slapos.networkbench.http import request
DNS_EXPECTED_LIST = ["161.97.166.226", "176.31.129.213"]
class TestDNSBench(unittest.TestCase):
def test_dnsbench_ok(self):
"""
Test dns resolution
"""
info = dnsbench.resolve(
"eu.web.vifib.com", DNS_EXPECTED_LIST)
self.assertEqual(info[0], 'DNS')
self.assertEqual(info[1], 'eu.web.vifib.com')
self.assertEqual(info[2], 200)
self.assertLess(info[3], 30)
self.assertEqual(info[4], 'OK',
"%s != OK, full info: %s" % (info[4], info) )
self.assertEqual(set(info[5]), set([u'161.97.166.226', u'176.31.129.213']),
"%s != set([u'161.97.166.226', u'176.31.129.213']), full info: %s" % (set(info[5]), info))
def test_dnsbench_fail(self):
""" Test dns failure resolution
"""
info = dnsbench.resolve(
"thisdomaindontexist.erp5.com")
self.assertEqual(info[0], 'DNS')
self.assertEqual(info[1], 'thisdomaindontexist.erp5.com')
self.assertEqual(info[2], 600)
self.assertLess(info[3], 30)
self.assertEqual(info[4], 'Cannot resolve the hostname')
self.assertEqual(info[5], [])
def test_dnsbench_unexpected(self):
""" Test dns unexpected resolution
"""
info = dnsbench.resolve(
"www.erp5.com", [DNS_EXPECTED_LIST[0]])
self.assertEqual(info[0], 'DNS')
self.assertEqual(info[1], 'www.erp5.com')
self.assertEqual(info[2], 200)
self.assertLess(info[3], 30)
self.assertEqual(info[4], 'UNEXPECTED')
self.assertTrue(info[5].startswith("['161.97.166.226'] (expected) != "))
class TestPing(unittest.TestCase): class TestPing(unittest.TestCase):
def test_ping_ok(self): def test_ping_ok(self):
...@@ -101,7 +49,6 @@ class TestPing(unittest.TestCase): ...@@ -101,7 +49,6 @@ class TestPing(unittest.TestCase):
self.assertEqual(info[4], -1) self.assertEqual(info[4], -1)
self.assertEqual(info[5], 'Fail to parser ping output') self.assertEqual(info[5], 'Fail to parser ping output')
def test_ping6_ok(self): def test_ping6_ok(self):
info = ping6("localhost") info = ping6("localhost")
self.assertEqual(info[0], 'PING6') self.assertEqual(info[0], 'PING6')
...@@ -119,91 +66,3 @@ class TestPing(unittest.TestCase): ...@@ -119,91 +66,3 @@ class TestPing(unittest.TestCase):
self.assertEqual(info[3], 'failed') self.assertEqual(info[3], 'failed')
self.assertEqual(info[4], -1) self.assertEqual(info[4], -1)
self.assertEqual(info[5], 'Fail to parser ping output') self.assertEqual(info[5], 'Fail to parser ping output')
class TestHTTPBench(unittest.TestCase):
def test_request_ok(self):
""" This test is way to badly written as it depends on
www.erp5.com for now, please replace it
"""
info = request("https://www.erp5.com", {})
self.assertEqual(info[0], 'GET')
self.assertEqual(info[1], 'https://www.erp5.com')
self.assertEqual(info[2], 200)
self.assertEqual(len(info[3].split(';')), 5 )
self.assertEqual(info[4], "OK")
def test_request_expected_response(self):
""" This test is way to badly written as it depends on
www.erp5.com for now, please replace it
"""
info = request("https://www.erp5.com", {"expected_response": 200})
self.assertEqual(info[0], 'GET')
self.assertEqual(info[1], 'https://www.erp5.com')
self.assertEqual(info[2], 200)
self.assertEqual(len(info[3].split(';')), 5 )
self.assertEqual(info[4], "OK")
def test_request_expected_redirection(self):
""" This test is way to badly written as it depends on
www.erp5.com for now, please replace it
"""
info = request("http://www.erp5.com", {"expected_response": 302})
self.assertEqual(info[0], 'GET')
self.assertEqual(info[1], 'http://www.erp5.com')
self.assertEqual(info[2], 302)
self.assertEqual(len(info[3].split(';')), 5 )
self.assertEqual(info[4], "OK")
def test_request_expected_text(self):
""" This test is way to badly written as it depends on
www.erp5.com for now, please replace it
"""
info = request("https://www.erp5.com", {"expected_text": "ERP5"})
self.assertEqual(info[0], 'GET')
self.assertEqual(info[1], 'https://www.erp5.com')
self.assertEqual(info[2], 200)
self.assertEqual(len(info[3].split(';')), 5 )
self.assertEqual(info[4], "OK")
def test_request_fail(self):
""" Test unreachable URL
"""
info = request("http://thisurldontexist.erp5.com", {})
self.assertEqual(info[0], 'GET')
self.assertEqual(info[1], 'http://thisurldontexist.erp5.com')
self.assertEqual(info[2], 0)
self.assertEqual(len(info[3].split(';')), 5 )
self.assertEqual(info[4], "FAIL")
def test_request_unexpected_response(self):
""" This test is way to badly written as it depends on
www.erp5.com for now, please replace it
"""
info = request("http://www.erp5.com", {"expected_response": 200})
self.assertEqual(info[0], 'GET')
self.assertEqual(info[1], 'http://www.erp5.com')
self.assertEqual(info[2], 302)
self.assertEqual(len(info[3].split(';')), 5 )
self.assertEqual(info[4], "UNEXPECTED (200 != 302)")
def test_request_unexpected_text(self):
""" This test is way to badly written as it depends on
www.erp5.com for now, please replace it.
"""
info = request("https://www.erp5.com", {"expected_text": "COUSCOUS"})
self.assertEqual(info[0], 'GET')
self.assertEqual(info[1], 'https://www.erp5.com')
self.assertEqual(info[2], 200)
self.assertEqual(len(info[3].split(';')), 5 )
self.assertEqual(info[4], "UNEXPECTED (COUSCOUS not in page content)")
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment