Commit 72e2950c authored by Alain Takoudjou's avatar Alain Takoudjou

Merge branch 'monitor'

Conflicts:
	setup.py
parents 000f3c1d 470f301c
Pipeline #1244 skipped
......@@ -46,6 +46,7 @@ setup(name=name,
'passlib',
'netifaces',
'erp5.util'
'PyRSS2Gen',
] + additional_install_requires,
extras_require = {
'lampconfigure': ["mysqlclient"], #needed for MySQL Database access
......@@ -72,6 +73,12 @@ setup(name=name,
'is-local-tcp-port-opened = slapos.promise.is_local_tcp_port_opened:main',
'is-process-older-than-dependency-set = slapos.promise.is_process_older_than_dependency_set:main',
'killpidfromfile = slapos.systool:killpidfromfile', # BBB
'monitor.bootstrap = slapos.monitor.monitor:main',
'monitor.collect = slapos.monitor.collect:main',
'monitor.runpromise = slapos.monitor.runpromise:main',
'monitor.genstatus = slapos.monitor.globalstate:main',
'monitor.genrss = slapos.monitor.status2rss:main',
'monitor.configwrite = slapos.monitor.monitor_config_write:main',
'runResiliencyUnitTestTestNode = slapos.resiliencytest:runUnitTest',
'runResiliencyScalabilityTestNode = slapos.resiliencytest:runResiliencyTest',
'runStandaloneResiliencyTest = slapos.resiliencytest:runStandaloneResiliencyTest',
......
# -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (c) 2010-2014 Vifib SARL and Contributors.
# All Rights Reserved.
#
# WARNING: This program as such is intended to be used by professional
# programmers who take the whole responsibility of assessing all potential
# consequences resulting from its eventual inadequacies and bugs
# End users who are looking for a ready-to-use solution with commercial
# guarantees and support are strongly adviced to contract a Free Software
# Service Company
#
# This program is Free Software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public License
# as published by the Free Software Foundation; either version 2.1
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
##############################################################################
import sqlite3
import os
import pwd
import time
import json
import argparse
import psutil
from time import strftime
from datetime import datetime, timedelta
def parseArguments():
"""
Parse arguments for monitor collector instance.
"""
parser = argparse.ArgumentParser()
parser.add_argument('--output_folder',
help='Path of the folder where output files should be written.')
parser.add_argument('--partition_id',
help='ID of the computer partition to collect data from.')
parser.add_argument('--collector_db',
help='The path of slapos collect database.')
return parser.parse_args()
class ResourceCollect:
def __init__(self, db_path = None):
assert os.path.exists(db_path) and os.path.isfile(db_path)
self.uri = db_path
self.connection = None
self.cursor = None
def connect(self):
self.connection = sqlite3.connect(self.uri)
self.cursor = self.connection.cursor()
def close(self):
assert self.connection is not None
self.cursor.close()
self.connection.close()
def _execute(self, sql):
assert self.connection is not None
return self.cursor.execute(sql)
def select(self, table, date=None, columns="*", where=None):
""" Query database for a full table information """
if date is not None:
where_clause = " WHERE date = '%s' " % date
else:
where_clause = ""
if where is not None:
if where_clause == "":
where_clause += " WHERE 1 = 1 "
where_clause += " AND %s " % where
select_sql = "SELECT %s FROM %s %s " % (columns, table, where_clause)
return self._execute(select_sql)
def has_table(self, name):
self.connect()
check_result_cursor = self.select(
table="sqlite_master",
columns='name',
where="type='table' AND name='%s'" % name)
table_exists_result = zip(*check_result_cursor)
if not len(table_exists_result) or table_exists_result[0][0] is None:
return False
return True
def getPartitionCPULoadAverage(self, partition_id, date_scope):
self.connect()
query_result_cursor = self.select("user", date_scope,
columns="SUM(cpu_percent)",
where="partition = '%s'" % partition_id)
cpu_percent_sum = zip(*query_result_cursor)
if len(cpu_percent_sum) and cpu_percent_sum[0][0] is None:
return
query_result_cursor = self.select("user", date_scope,
columns="COUNT(DISTINCT time)",
where="partition = '%s'" % partition_id)
sample_amount = zip(*query_result_cursor)
self.close()
if len(sample_amount) and len(cpu_percent_sum):
return round(cpu_percent_sum[0][0]/sample_amount[0][0], 2)
def getPartitionUsedMemoryAverage(self, partition_id, date_scope):
self.connect()
query_result_cursor = self.select("user", date_scope,
columns="SUM(memory_rss)",
where="partition = '%s'" % partition_id)
memory_sum = zip(*query_result_cursor)
if len(memory_sum) and memory_sum[0][0] is None:
return
query_result_cursor = self.select("user", date_scope,
columns="COUNT(DISTINCT time)",
where="partition = '%s'" % partition_id)
sample_amount = zip(*query_result_cursor)
self.close()
if len(sample_amount) and len(memory_sum):
return round(memory_sum[0][0]/(sample_amount[0][0]*1024*1024.0), 2)
def getPartitionDiskUsedAverage(self, partition_id, date_scope):
if not self.has_table('folder'):
return
self.db.connect()
query_result_cursor = self.select("folder", date_scope,
columns="SUM(disk_used)",
where="partition = '%s'" % partition_id)
disk_used_sum = zip(*query_result_cursor)
if len(disk_used_sum) and disk_used_sum[0][0] is None:
return
query_result_cursor = self.select("folder", date_scope,
columns="COUNT(DISTINCT time)",
where="partition = '%s'" % partition_id)
collect_amount = zip(*query_result_cursor)
self.db.close()
if len(collect_amount) and len(disk_used_sum):
return round(disk_used_sum[0][0]/(collect_amount[0][0]*1024.0), 2)
def getPartitionConsumption(self, partition_id, where=""):
"""
Query collector db to get consumed resource for last minute
"""
self.connect()
comsumption_list = []
if where != "":
where = "and %s" % where
date_scope = datetime.now().strftime('%Y-%m-%d')
min_time = (datetime.now() - timedelta(minutes=1)).strftime('%H:%M:00')
max_time = (datetime.now() - timedelta(minutes=1)).strftime('%H:%M:59')
sql_query = """select count(pid), SUM(cpu_percent) as cpu_result, SUM(cpu_time),
MAX(cpu_num_threads), SUM(memory_percent), SUM(memory_rss), pid, SUM(io_rw_counter),
SUM(io_cycles_counter) from user
where date='%s' and partition='%s' and (time between '%s' and '%s') %s
group by pid order by cpu_result desc""" % (
date_scope, partition_id, min_time, max_time, where)
query_result = self._execute(sql_query)
for result in query_result:
count = int(result[0])
if not count > 0:
continue
resource_dict = {
'pid': result[6],
'cpu_percent': round(result[1]/count, 2),
'cpu_time': round((result[2] or 0)/(60.0), 2),
'cpu_num_threads': round(result[3]/count, 2),
'memory_percent': round(result[4]/count, 2),
'memory_rss': round((result[5] or 0)/(1024*1024.0), 2),
'io_rw_counter': round(result[7]/count, 2),
'io_cycles_counter': round(result[8]/count, 2)
}
try:
pprocess = psutil.Process(int(result[6]))
except psutil.NoSuchProcess:
pass
else:
resource_dict['name'] = pprocess.name()
resource_dict['command'] = pprocess.cmdline()
resource_dict['user'] = pprocess.username()
resource_dict['date'] = datetime.fromtimestamp(pprocess.create_time()).strftime("%Y-%m-%d %H:%M:%S")
comsumption_list.append(resource_dict)
self.close()
return comsumption_list
def getPartitionComsumptionStatus(self, partition_id, where=""):
self.connect()
if where != "":
where = " and %s" % where
date_scope = datetime.now().strftime('%Y-%m-%d')
min_time = (datetime.now() - timedelta(minutes=1)).strftime('%H:%M:00')
max_time = (datetime.now() - timedelta(minutes=1)).strftime('%H:%M:59')
sql_query = """select count(pid), SUM(cpu_percent), SUM(cpu_time),
SUM(cpu_num_threads), SUM(memory_percent), SUM(memory_rss), SUM(io_rw_counter),
SUM(io_cycles_counter) from user where
date='%s' and partition='%s' and (time between '%s' and '%s') %s""" % (
date_scope, partition_id, min_time, max_time, where)
query_result = self._execute(sql_query)
result_list = zip(*query_result)
process_dict = memory_dict = io_dict = {}
if len(result_list):
result = result_list
process_dict = {'total_process': result[0][0],
'cpu_percent': round((result[1][0] or 0), 2),
'cpu_time': round((result[2][0] or 0)/(60.0), 2),
'cpu_num_threads': round((result[3][0] or 0), 2),
'date': '%s %s' % (date_scope, min_time)
}
memory_dict = {'memory_percent': round((result[4][0] or 0), 2),
'memory_rss': round((result[5][0] or 0)/(1024*1024.0), 2),
'date': '%s %s' % (date_scope, min_time)
}
io_dict = {'io_rw_counter': round((result[6][0] or 0), 2),
'io_cycles_counter': round((result[7][0] or 0), 2),
'disk_used': 0,
'date': '%s %s' % (date_scope, min_time)
}
if self.has_table('folder'):
disk_result_cursor = self.select(
"folder", date_scope,
columns="SUM(disk_used)",
where="partition='%s' and (time between '%s' and '%s') %s" % (
partition_id, min_time, max_time, where
)
)
disk_used_sum = zip(*disk_result_cursor)
if len(disk_used_sum) and disk_used_sum[0][0] is not None:
io_dict['disk_used'] = round(disk_used_sum[0][0]/1024.0, 2)
self.close()
return (process_dict, memory_dict, io_dict)
def appendToJsonFile(file_path, content, stepback=2):
with open (file_path, mode="r+") as jfile:
jfile.seek(0, 2)
position = jfile.tell() - stepback
jfile.seek(position)
jfile.write('%s}' % ',"{}"]'.format(content))
def initProcessDataFile(file_path):
with open(process_file, 'w') as fprocess:
data_dict = {
"date": time.time(),
"data": ["date, total process, CPU percent, CPU time, CPU threads"]
}
fprocess.write(json.dumps(data_dict))
def initMemoryDataFile(file_path):
with open(mem_file, 'w') as fmem:
data_dict = {
"date": time.time(),
"data": ["date, memory used percent, memory used"]
}
fmem.write(json.dumps(data_dict))
def initIODataFile(file_path):
with open(io_file, 'w') as fio:
data_dict = {
"date": time.time(),
"data": ["date, io rw counter, io cycles counter, disk used"]
}
fio.write(json.dumps(data_dict))
def main():
parser = parseArguments()
if not os.path.exists(parser.output_folder) and os.path.isdir(parser.output_folder):
raise Exception("Invalid ouput folder: %s" % parser.output_folder)
# Consumption global status
process_file = os.path.join(parser.output_folder, 'monitor_resource_process.data.json')
mem_file = os.path.join(parser.output_folder, 'monitor_resource_memory.data.json')
io_file = os.path.join(parser.output_folder, 'monitor_resource_io.data.json')
resource_file = os.path.join(parser.output_folder, 'monitor_process_resource.status.json')
status_file = os.path.join(parser.output_folder, 'monitor_resource.status.json')
if not os.path.exists(parser.collector_db):
print "Collector database not found..."
initProcessDataFile(process_file)
initMemoryDataFile(mem_file)
initIODataFile(io_file)
with open(status_file, "w") as status_file:
status_file.write('{"cpu_time": 0, "cpu_percent": 0, "memory_rss": 0, "memory_percent": 0, "io_rw_counter": 0, "date": "", "total_process": 0, "disk_used": 0, "io_cycles_counter": 0, "cpu_num_threads": 0}')
with open(resource_file, "w") as resource_file:
resource_file.write('[]')
exit(1)
collector = ResourceCollect(parser.collector_db)
date_scope = datetime.now().strftime('%Y-%m-%d')
stat_info = os.stat(parser.output_folder)
partition_user = pwd.getpwuid(stat_info.st_uid)[0]
process_result, memory_result, io_result = collector.getPartitionComsumptionStatus(partition_user)
label_list = ['date', 'total_process', 'cpu_percent', 'cpu_time', 'cpu_num_threads',
'memory_percent', 'memory_rss', 'io_rw_counter', 'io_cycles_counter',
'disk_used']
resource_status_dict = {}
if not os.path.exists(process_file):
initProcessDataFile(process_file)
if not os.path.exists(mem_file):
initMemoryDataFile(mem_file)
if not os.path.exists(io_file):
initIODataFile(io_file)
if process_result and process_result['total_process'] != 0.0:
appendToJsonFile(process_file, ", ".join(
[str(process_result[key]) for key in label_list if process_result.has_key(key)])
)
resource_status_dict.update(process_result)
if memory_result and memory_result['memory_rss'] != 0.0:
appendToJsonFile(mem_file, ", ".join(
[str(memory_result[key]) for key in label_list if memory_result.has_key(key)])
)
resource_status_dict.update(memory_result)
if io_result and io_result['io_rw_counter'] != 0.0:
appendToJsonFile(io_file, ", ".join(
[str(io_result[key]) for key in label_list if io_result.has_key(key)])
)
resource_status_dict.update(io_result)
with open(status_file, 'w') as fp:
fp.write(json.dumps(resource_status_dict))
# Consumption Resource
resource_process_status_list = collector.getPartitionConsumption(partition_user)
if resource_process_status_list:
with open(resource_file, 'w') as rf:
rf.write(json.dumps(resource_process_status_list))
#!/usr/bin/env python
import sys
import os
import glob
import json
import ConfigParser
import time
from datetime import datetime
def softConfigGet(config, *args, **kwargs):
try:
return config.get(*args, **kwargs)
except (ConfigParser.NoOptionError, ConfigParser.NoSectionError):
return ""
def generateStatisticsData(stat_file_path, content):
# csv document for statictics
if not os.path.exists(stat_file_path):
with open(stat_file_path, 'w') as fstat:
data_dict = {
"date": time.time(),
"data": ["Date, Success, Error, Warning"]
}
fstat.write(json.dumps(data_dict))
current_state = ''
if content.has_key('state'):
current_state = '%s, %s, %s, %s' % (
content['date'],
content['state']['success'],
content['state']['error'],
content['state']['warning'])
# append to file
if current_state:
with open (stat_file_path, mode="r+") as fstat:
fstat.seek(0,2)
position = fstat.tell() -2
fstat.seek(position)
fstat.write('%s}' % ',"{}"]'.format(current_state))
def run(args_list):
monitor_file, instance_file = args_list
monitor_config = ConfigParser.ConfigParser()
monitor_config.read(monitor_file)
base_folder = monitor_config.get('monitor', 'private-folder')
status_folder = monitor_config.get('monitor', 'public-folder')
base_url = monitor_config.get('monitor', 'base-url')
related_monitor_list = monitor_config.get("monitor", "monitor-url-list").split()
statistic_folder = os.path.join(base_folder, 'data', '.jio_documents')
parameter_file = os.path.join(base_folder, 'config', '.jio_documents', 'config.json')
report_date = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
if not os.path.exists(statistic_folder):
try:
os.makedirs(statistic_folder)
except OSError, e:
if e.errno == os.errno.EEXIST and os.path.isdir(statistic_folder):
pass
else: raise
# search for all status files
file_list = filter(os.path.isfile,
glob.glob("%s/*.status.json" % status_folder)
)
error = warning = success = 0
status = 'OK'
promise_list = []
global_state_file = os.path.join(base_folder, 'monitor.global.json')
public_state_file = os.path.join(status_folder, 'monitor.global.json')
for file in file_list:
try:
with open(file, 'r') as temp_file:
tmp_json = json.loads(temp_file.read())
except ValueError:
# bad json file ?
continue
if tmp_json['status'] == 'ERROR':
error += 1
elif tmp_json['status'] == 'OK':
success += 1
elif tmp_json['status'] == 'WARNING':
warning += 1
tmp_json['time'] = tmp_json['start-date'].split(' ')[1]
promise_list.append(tmp_json)
if error:
status = 'ERROR'
elif warning:
status = 'WARNING'
global_state_dict = dict(
status=status,
state={
'error': error,
'success': success,
'warning': warning,
},
type='global',
date=report_date,
_links={"rss_url": {"href": "%s/public/feed" % base_url},
"public_url": {"href": "%s/share/jio_public/" % base_url},
"private_url": {"href": "%s/share/jio_private/" % base_url}
},
data={'state': 'monitor_state.data',
'process_state': 'monitor_process_resource.status',
'process_resource': 'monitor_resource_process.data',
'memory_resource': 'monitor_resource_memory.data',
'io_resource': 'monitor_resource_io.data',
'monitor_process_state': 'monitor_resource.status'}
)
global_state_dict['_embedded'] = {'promises': promise_list}
if os.path.exists(instance_file):
config = ConfigParser.ConfigParser()
config.read(instance_file)
if 'instance' in config.sections():
instance_dict = {}
global_state_dict['title'] = config.get('instance', 'name')
global_state_dict['hosting-title'] = config.get('instance', 'root-name')
if not global_state_dict['title']:
global_state_dict['title'] = 'Instance Monitoring'
instance_dict['computer'] = config.get('instance', 'computer')
instance_dict['ipv4'] = config.get('instance', 'ipv4')
instance_dict['ipv6'] = config.get('instance', 'ipv6')
instance_dict['software-release'] = config.get('instance', 'software-release')
instance_dict['software-type'] = config.get('instance', 'software-type')
instance_dict['partition'] = config.get('instance', 'partition')
global_state_dict['_embedded'].update({'instance' : instance_dict})
if related_monitor_list:
global_state_dict['_links']['related_monitor'] = [{'href': "%s/share/jio_public" % url}
for url in related_monitor_list]
if os.path.exists(parameter_file):
with open(parameter_file) as cfile:
global_state_dict['parameters'] = json.loads(cfile.read())
# Public information with the link to private folder
public_state_dict = dict(
status=status,
date=report_date,
_links={'monitor': {'href': '%s/share/jio_private/' % base_url}},
title=global_state_dict.get('title', '')
)
public_state_dict['hosting-title'] = global_state_dict.get('hosting-title', '')
public_state_dict['_links']['related_monitor'] = global_state_dict['_links'].get('related_monitor', [])
with open(global_state_file, 'w') as fglobal:
fglobal.write(json.dumps(global_state_dict))
with open(public_state_file, 'w') as fpglobal:
fpglobal.write(json.dumps(public_state_dict))
generateStatisticsData(
os.path.join(statistic_folder, 'monitor_state.data.json'),
global_state_dict)
return 0
def main():
if len(sys.argv) < 3:
print("Usage: %s <monitor_conf_path> <instance_conf_path>" % sys.argv[0])
sys.exit(2)
sys.exit(run(sys.argv[1:]))
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import sys
import os
import stat
import json
import ConfigParser
import traceback
import argparse
import urllib2
import ssl
import glob
from datetime import datetime
OPML_START = """<?xml version="1.0" encoding="UTF-8"?>
<!-- OPML generated by SlapOS -->
<opml version="1.1">
<head>
<title>%(root_title)s</title>
<dateCreated>%(creation_date)s</dateCreated>
<dateModified>%(modification_date)s</dateModified>
</head>
<body>
<outline text="%(outline_title)s">"""
OPML_END = """ </outline>
</body>
</opml>"""
OPML_OUTLINE_FEED = '<outline text="%(title)s" title="%(title)s" type="rss" version="RSS" htmlUrl="%(html_url)s" xmlUrl="%(xml_url)s" url="%(global_url)s" />'
def parseArguments():
"""
Parse arguments for monitor instance.
"""
parser = argparse.ArgumentParser()
parser.add_argument('--config_file',
default='monitor.cfg',
help='Monitor Configuration file')
return parser.parse_args()
def mkdirAll(path):
try:
os.makedirs(path)
except OSError, e:
if e.errno == os.errno.EEXIST and os.path.isdir(path):
pass
else: raise
def softConfigGet(config, *args, **kwargs):
try:
return config.get(*args, **kwargs)
except (ConfigParser.NoOptionError, ConfigParser.NoSectionError):
return None
def createSymlink(source, destination):
try:
os.symlink(source, destination)
except OSError, e:
if e.errno != os.errno.EEXIST:
raise
class Monitoring(object):
def __init__(self, configuration_file):
config = self.loadConfig([configuration_file])
# Set Monitor variables
self.title = config.get("monitor", "title")
self.root_title = config.get("monitor", "root-title")
self.service_pid_folder = config.get("monitor", "service-pid-folder")
self.crond_folder = config.get("monitor", "crond-folder")
self.logrotate_d = config.get("monitor", "logrotate-folder")
self.promise_runner = config.get("monitor", "promise-runner")
self.promise_folder_list = config.get("monitor", "promise-folder-list").split()
self.public_folder = config.get("monitor", "public-folder")
self.private_folder = config.get("monitor", "private-folder")
self.collector_db = config.get("monitor", "collector-db")
self.collect_script = config.get("monitor", "collect-script")
self.webdav_folder = config.get("monitor", "webdav-folder")
self.report_script_folder = config.get("monitor", "report-folder")
self.webdav_url = '%s/share' % config.get("monitor", "base-url")
self.public_url = '%s/public' % config.get("monitor", "base-url")
self.python = config.get("monitor", "python") or "python"
self.public_path_list = config.get("monitor", "public-path-list").split()
self.private_path_list = config.get("monitor", "private-path-list").split()
self.monitor_url_list = config.get("monitor", "monitor-url-list").split()
self.parameter_list = [param.strip() for param in config.get("monitor", "parameter-list").split('\n') if param]
# Use this file to write knowledge0_cfg required by webrunner
self.parameter_cfg_file = config.get("monitor", "parameter-file-path").strip()
self.config_folder = os.path.join(self.private_folder, 'config')
self.report_folder = self.private_folder
self.promise_output_file = config.get("monitor", "promise-output-file")
self.bootstrap_is_ok = True
self.promise_dict = {}
for promise_folder in self.promise_folder_list:
self.setupPromiseDictFromFolder(promise_folder)
def loadConfig(self, pathes, config=None):
if config is None:
config = ConfigParser.ConfigParser()
try:
config.read(pathes)
except ConfigParser.MissingSectionHeaderError:
traceback.print_exc()
return config
def readInstanceConfiguration(self):
type_list = ['raw', 'file', 'htpasswd', 'httpdcors']
configuration_list = []
if not self.parameter_list:
return []
for config in self.parameter_list:
config_list = config.strip().split(' ')
# type: config_list[0]
if len(config_list) >= 3 and config_list[0] in type_list:
if config_list[0] == 'raw':
configuration_list.append(dict(
key='',
title=config_list[1],
value=' '.join(config_list[2:])
))
elif (config_list[0] == 'file' or config_list[0] == 'htpasswd') and \
os.path.exists(config_list[2]) and os.path.isfile(config_list[2]):
try:
with open(config_list[2]) as cfile:
parameter = dict(
key=config_list[1],
title=config_list[1],
value=cfile.read(),
description={
"type": config_list[0],
"file": config_list[2]
}
)
if config_list[0] == 'htpasswd':
if len(config_list) != 5 or not os.path.exists(config_list[4]):
print 'htpasswd file is not specified: %s' % str(config_list)
continue
parameter['description']['user'] = config_list[3]
parameter['description']['htpasswd'] = config_list[4]
configuration_list.append(parameter)
except OSError, e:
print 'Cannot read file %s, Error is: %s' % (config_list[2], str(e))
pass
elif config_list[0] == 'httpdcors' and os.path.exists(config_list[2]) and \
os.path.exists(config_list[3]):
old_cors_file = os.path.join(
os.path.dirname(config_list[2]),
'prev_%s' % os.path.basename(config_list[2])
)
try:
cors_content = ""
if os.path.exists(old_cors_file):
with open(old_cors_file) as cfile:
cors_content = cfile.read()
else:
# Create empty file
with open(old_cors_file, 'w') as cfile:
cfile.write("")
parameter = dict(
key=config_list[1],
title=config_list[1],
value=cors_content,
description={
"type": config_list[0],
"cors_file": config_list[2],
"gracefull_bin": config_list[3]
}
)
configuration_list.append(parameter)
except OSError, e:
print 'Cannot read file at %s, Error is: %s' % (old_cors_file, str(e))
pass
return configuration_list
def setupPromiseDictFromFolder(self, folder):
for filename in os.listdir(folder):
path = os.path.join(folder, filename)
if os.path.isfile(path) and os.access(path, os.X_OK):
self.promise_dict[filename] = {"path": path,
"configuration": ConfigParser.ConfigParser()}
def createSymlinksFromConfig(self, destination_folder, source_path_list, name=""):
if destination_folder:
if source_path_list:
for path in source_path_list:
path = path.rstrip('/')
dirname = os.path.join(destination_folder, name)
try:
mkdirAll(dirname) # could also raise OSError
os.symlink(path, os.path.join(dirname, os.path.basename(path)))
except OSError, e:
if e.errno != os.errno.EEXIST:
raise
def getMonitorTitleFromUrl(self, monitor_url):
# This file should be generated
if not monitor_url.startswith('https://') and not monitor_url.startswith('http://'):
return 'Unknown Instance'
if not monitor_url.endswith('/'):
monitor_url = monitor_url + '/'
url = monitor_url + '/.jio_documents/monitor.global.json' # XXX Hard Coded path
try:
# XXX - working here with public url
if hasattr(ssl, '_create_unverified_context'):
context = ssl._create_unverified_context()
response = urllib2.urlopen(url, context=context)
else:
response = urllib2.urlopen(url)
except urllib2.HTTPError:
self.bootstrap_is_ok = False
return 'Unknown Instance'
else:
try:
monitor_dict = json.loads(response.read())
return monitor_dict.get('title', 'Unknown Instance')
except ValueError, e:
print "Bad Json file at %s" % url
self.bootstrap_is_ok = False
return 'Unknown Instance'
def getReportInfoFromFilename(self, filename):
splited_filename = filename.split('_every_')
possible_time_list = ['hour', 'minute']
if len(splited_filename) == 1:
return (filename, "* * * * *")
run_time = splited_filename[1].split('_')
report_name = splited_filename[0]
if len(run_time) != 2 or not run_time[1] in possible_time_list:
return (report_name, "* * * * *")
try:
value = int(run_time[0])
except ValueError:
print "Warning: Bad report filename: %s" % filename
return (report_name, "* * * * *")
if run_time[1] == 'hour':
return (report_name, "* */%s * * *" % value)
if run_time[1] == 'minute':
return (report_name, "*/%s * * * *" % value)
def configureFolders(self):
# configure public and private folder
self.createSymlinksFromConfig(self.webdav_folder, [self.public_folder])
self.createSymlinksFromConfig(self.webdav_folder, [self.private_folder])
#configure jio_documents folder
jio_public = os.path.join(self.webdav_folder, 'jio_public')
jio_private = os.path.join(self.webdav_folder, 'jio_private')
mkdirAll(jio_public)
mkdirAll(jio_private)
createSymlink(self.public_folder,
os.path.join(jio_public, '.jio_documents'))
createSymlink(self.private_folder,
os.path.join(jio_private, '.jio_documents'))
self.data_folder = os.path.join(self.private_folder, 'data', '.jio_documents')
self.document_folder = os.path.join(self.private_folder, 'documents')
config_folder = os.path.join(self.config_folder, '.jio_documents')
mkdirAll(self.data_folder)
mkdirAll(config_folder)
createSymlink(os.path.join(self.private_folder, 'data'),
os.path.join(jio_private, 'data'))
createSymlink(self.config_folder, os.path.join(jio_private, 'config'))
createSymlink(self.data_folder, self.document_folder)
# Cleanup private folder
for file in glob.glob("%s/*.history.json" % self.private_folder):
try:
os.unlink(file)
except OSError:
print "failed to remove file %s. Ignoring..." % file
def makeConfigurationFiles(self):
config_folder = os.path.join(self.config_folder, '.jio_documents')
parameter_config_file = os.path.join(config_folder, 'config.parameters.json')
parameter_file = os.path.join(config_folder, 'config.json')
#mkdirAll(config_folder)
parameter_list = self.readInstanceConfiguration()
description_dict = {}
if parameter_list:
for i in range(0, len(parameter_list)):
key = parameter_list[i]['key']
if key:
description_dict[key] = parameter_list[i].pop('description')
with open(parameter_config_file, 'w') as config_file:
config_file.write(json.dumps(description_dict))
with open(parameter_file, 'w') as config_file:
config_file.write(json.dumps(parameter_list))
try:
with open(self.parameter_cfg_file, 'w') as pfile:
pfile.write('[public]\n')
for parameter in parameter_list:
if parameter['key']:
pfile.write('%s = %s\n' % (parameter['key'], parameter['value']))
except OSError, e:
print "Error failed to create file %s" % self.parameter_cfg_file
pass
def generateOpmlFile(self, feed_url_list, output_file):
if os.path.exists(output_file):
creation_date = datetime.fromtimestamp(os.path.getctime(output_file)).utcnow().strftime("%a, %d %b %Y %H:%M:%S +0000")
modification_date = datetime.utcnow().strftime("%a, %d %b %Y %H:%M:%S +0000")
else:
creation_date = modification_date = datetime.utcnow().strftime("%a, %d %b %Y %H:%M:%S +0000")
opml_content = OPML_START % {'creation_date': creation_date,
'modification_date': modification_date,
'outline_title': 'Monitoring RSS Feed list',
'root_title': self.root_title}
opml_content += OPML_OUTLINE_FEED % {'title': self.title,
'html_url': self.public_url + '/feed',
'xml_url': self.public_url + '/feed',
'global_url': "%s/jio_private/" % self.webdav_url}
for feed_url in feed_url_list:
opml_content += OPML_OUTLINE_FEED % {'title': self.getMonitorTitleFromUrl(feed_url + "/share/jio_public/"),
'html_url': feed_url + '/public/feed',
'xml_url': feed_url + '/public/feed',
'global_url': "%s/share/jio_private/" % feed_url}
opml_content += OPML_END
with open(output_file, 'w') as wfile:
wfile.write(opml_content)
def generateLogrotateEntry(self, name, file_list, option_list):
"""
Will add a new entry in logrotate.d folder. This can help to rotate data file daily
"""
content = "%(logfiles)s {\n%(options)s\n}\n" % {
'logfiles': ' '.join(file_list),
'options': '\n'.join(option_list)
}
file_path = os.path.join(self.logrotate_d, name)
with open(file_path, 'w') as flog:
flog.write(content)
def generateReportCronEntries(self):
cron_line_list = []
report_name_list = [name.replace('.report.json', '')
for name in os.listdir(self.report_folder) if name.endswith('.report.json')]
for filename in os.listdir(self.report_script_folder):
report_script = os.path.join(self.report_script_folder, filename)
if os.path.isfile(report_script) and os.access(report_script, os.X_OK):
report_name, frequency = self.getReportInfoFromFilename(filename)
# report_name = os.path.splitext(filename)[0]
report_json_path = "%s.report.json" % report_name
report_cmd_line = [
frequency,
self.promise_runner,
'--pid_path "%s"' % os.path.join(self.service_pid_folder,
"%s.pid" % filename),
'--output "%s"' % os.path.join(self.report_folder,report_json_path),
'--promise_script "%s"' % report_script,
'--promise_name "%s"' % report_name,
'--monitor_url "%s/jio_private/"' % self.webdav_url, # XXX hardcoded,
'--history_folder "%s"' % self.data_folder,
'--instance_name "%s"' % self.title,
'--hosting_name "%s"' % self.root_title,
'--promise_type "report"']
cron_line_list.append(' '.join(report_cmd_line))
if report_name in report_name_list:
report_name_list.pop(report_name_list.index(report_name))
# cleanup removed report json result
if report_name_list != []:
for report_name in report_name_list:
result_path = os.path.join(self.public_folder, '%s.report.json' % report_name)
if os.path.exists(result_path):
try:
os.unlink(result_path)
except OSError, e:
print "Error: Failed to delete %s" % result_path, str(e)
pass
with open(self.crond_folder + "/monitor-reports", "w") as freport:
freport.write("\n".join(cron_line_list))
def generateServiceCronEntries(self):
# XXX only if at least one configuration file is modified, then write in the cron
#cron_line_list = ['PATH=%s\n' % os.environ['PATH']]
cron_line_list = []
service_name_list = [name.replace('.status.json', '')
for name in os.listdir(self.public_folder) if name.endswith('.status.json')]
for service_name, promise in self.promise_items:
service_config = promise["configuration"]
service_status_path = "%s/%s.status.json" % (self.public_folder, service_name)
mkdirAll(os.path.dirname(service_status_path))
promise_cmd_line = [
softConfigGet(service_config, "service", "frequency") or "* * * * *",
self.promise_runner,
'--pid_path "%s"' % os.path.join(self.service_pid_folder,
"%s.pid" % service_name),
'--output "%s"' % service_status_path,
'--promise_script "%s"' % promise["path"],
'--promise_name "%s"' % service_name,
'--monitor_url "%s/jio_private/"' % self.webdav_url, # XXX hardcoded,
'--history_folder "%s"' % self.public_folder,
'--instance_name "%s"' % self.title,
'--hosting_name "%s"' % self.root_title]
cron_line_list.append(' '.join(promise_cmd_line))
if service_name in service_name_list:
service_name_list.pop(service_name_list.index(service_name))
if service_name_list != []:
# XXX Some service was removed, delete his status file so monitor will not consider his status anymore
for service_name in service_name_list:
status_path = os.path.join(self.public_folder, '%s.status.json' % service_name)
if os.path.exists(status_path):
try:
os.unlink(status_path)
except OSError, e:
print "Error: Failed to delete %s" % status_path, str(e)
pass
with open(self.crond_folder + "/monitor-promises", "w") as fp:
fp.write("\n".join(cron_line_list))
def addCronEntry(self, name, frequency, command):
entry_line = '%s %s' % (frequency, command)
cron_entry_file = os.path.join(self.crond_folder, name)
with open(cron_entry_file, "w") as cronf:
cronf.write(entry_line)
def bootstrapMonitor(self):
if os.path.exists(self.promise_output_file):
os.unlink(self.promise_output_file)
# create symlinks from monitor.conf
self.createSymlinksFromConfig(self.public_folder, self.public_path_list)
self.createSymlinksFromConfig(self.private_folder, self.private_path_list)
self.configureFolders()
# create symlinks from service configurations
self.promise_items = self.promise_dict.items()
for service_name, promise in self.promise_items:
service_config = promise["configuration"]
public_path_list = softConfigGet(service_config, "service", "public-path-list")
private_path_list = softConfigGet(service_config, "service", "private-path-list")
if public_path_list:
self.createSymlinksFromConfig(self.public_folder,
public_path_list.split(),
service_name)
if private_path_list:
self.createSymlinksFromConfig(self.private_folder,
private_path_list.split(),
service_name)
# Generate OPML file
self.generateOpmlFile(self.monitor_url_list,
os.path.join(self.public_folder, 'feeds'))
# put promises to a cron file
self.generateServiceCronEntries()
# put report script to cron
self.generateReportCronEntries()
# Generate parameters files and scripts
self.makeConfigurationFiles()
# Rotate monitor data files
option_list = [
'daily', 'nocreate', 'olddir %s' % self.data_folder, 'rotate 5',
'nocompress', 'extension .json', 'dateext',
'dateformat -%Y-%m-%d', 'notifempty'
]
file_list = [
"%s/*.data.json" % self.private_folder,
"%s/*.data.json" % self.data_folder]
self.generateLogrotateEntry('monitor.data', file_list, option_list)
# Rotate public history status file, delete data of previous days
option_list = [
'daily', 'nocreate', 'rotate 0',
'nocompress', 'notifempty'
]
file_list = ["%s/*.history.json" % self.public_folder]
self.generateLogrotateEntry('monitor.service.status', file_list, option_list)
# Add cron entry for SlapOS Collect
command = "%s %s --output_folder %s --collector_db %s" % (self.python,
self.collect_script, self.data_folder, self.collector_db)
self.addCronEntry('monitor_collect', '* * * * *', command)
# Write an empty file when monitor bootstrap went until the end
if self.bootstrap_is_ok:
with open(self.promise_output_file, 'w') as promise_file:
promise_file.write("")
return 0
def main():
parser = parseArguments()
monitor = Monitoring(parser.config_file)
sys.exit(monitor.bootstrapMonitor())
#!/usr/bin/env python
import sys
import os
import re
import json
import argparse
import subprocess
from datetime import datetime
import time
def parseArguments():
"""
Parse arguments for monitor instance.
"""
parser = argparse.ArgumentParser()
parser.add_argument('--config_folder',
help='Path where json configuration/document will be read and write')
parser.add_argument('--htpasswd_bin',
help='Path apache htpasswd binary. Needed to write htpasswd file.')
parser.add_argument('--output_cfg_file',
help='Ouput parameters in cfg file.')
return parser.parse_args()
class MonitorConfigWrite(object):
def __init__(self, config_json_file, htpasswd_bin, output_cfg_file=""):
self.config_json_file = config_json_file
self.output_cfg_file = output_cfg_file
self.htpasswd_bin = htpasswd_bin
def _fileWrite(self, file_path, content):
if os.path.exists(file_path):
try:
with open(file_path, 'w') as wf:
wf.write(content)
return True
except OSError, e:
print "ERROR while writing changes to %s.\n %s" % (file_path, str(e))
return False
def _htpasswdWrite(self, htpasswd_bin, parameter_dict, value):
if not os.path.exists(parameter_dict['file']):
return False
command = [htpasswd_bin, '-cb', parameter_dict['htpasswd'], parameter_dict['user'], value]
process = subprocess.Popen(
command,
stdin=None,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE
)
result = process.communicate()[0]
if process.returncode != 0:
print result
return False
with open(parameter_dict['file'], 'w') as pfile:
pfile.write(value)
return True
def _httpdCorsDomainWrite(self, httpd_cors_file, httpd_gracefull_bin, cors_domain):
cors_string = ""
cors_domain_list = cors_domain.split()
old_httpd_cors_file = os.path.join(
os.path.dirname(httpd_cors_file),
'prev_%s' % os.path.basename(httpd_cors_file)
)
if os.path.exists(old_httpd_cors_file) and os.path.isfile(old_httpd_cors_file):
try:
with open(old_httpd_cors_file, 'r') as cors_file:
if cors_file.read() == cors_domain:
if os.path.exists(httpd_cors_file) and (os.stat(httpd_cors_file).st_size > 0
or (cors_domain == "" and os.stat(httpd_cors_file).st_size == 0)):
# Skip if cors file is not empty
return True
except OSError, e:
print "Failed to open file at %s. \n%s" % (old_httpd_cors_file, str(e))
for domain in cors_domain_list:
if cors_string:
cors_string += '|'
cors_string += re.escape(domain)
try:
with open(httpd_cors_file, 'w') as file:
file.write('SetEnvIf Origin "^http(s)?://(.+\.)?(%s)$" origin_is=$0\n' % cors_string)
file.write('Header always set Access-Control-Allow-Origin %{origin_is}e env=origin_is')
except OSError, e:
print "ERROR while writing CORS changes to %s.\n %s" % (httpd_cors_file, str(e))
return False
# Save current cors domain list
try:
with open(old_httpd_cors_file, 'w') as cors_file:
cors_file.write(cors_domain)
except OSError, e:
print "Failed to open file at %s. \n%s" % (old_httpd_cors_file, str(e))
return False
# Restart httpd process
try:
subprocess.call(httpd_gracefull_bin)
except OSError, e:
print "Failed to execute command %s.\n %s" % (httpd_gracefull_bin, str(e))
return False
return True
def applyConfigChanges(self):
parameter_config_file = os.path.join(
os.path.dirname(self.config_json_file),
'config.parameters.json'
)
if not os.path.exists(self.config_json_file) or not os.path.isfile(self.config_json_file):
#print "ERROR: Config file doesn't exist... Exiting"
return {}
new_parameter_list = []
parameter_list = []
description_dict = {}
result_dict = {}
try:
with open(self.config_json_file) as tmpfile:
new_parameter_list = json.loads(tmpfile.read())
except ValueError:
print "Error: Couldn't parse json file %s" % self.config_json_file
with open(parameter_config_file) as tmpfile:
description_dict = json.loads(tmpfile.read())
for i in range(0, len(new_parameter_list)):
key = new_parameter_list[i]['key']
if key != '':
description_entry = description_dict[key]
if description_entry['type'] == 'file':
result_dict[key] = self._fileWrite(
description_entry['file'],
new_parameter_list[i]['value']
)
elif description_entry['type'] == 'htpasswd':
result_dict[key] = self._htpasswdWrite(
self.htpasswd_bin,
description_entry,
new_parameter_list[i]['value']
)
elif description_entry['type'] == 'httpdcors':
result_dict[key] = self._httpdCorsDomainWrite(
description_entry['cors_file'],
description_entry['gracefull_bin'],
new_parameter_list[i]['value']
)
if (self.output_cfg_file):
try:
with open(self.output_cfg_file, 'w') as pfile:
pfile.write('[public]\n')
for parameter in new_parameter_list:
if parameter['key']:
pfile.write('%s = %s\n' % (parameter['key'], parameter['value']))
except OSError, e:
print "Error failed to create file %s" % self.output_cfg_file
pass
return result_dict
def main():
parser = parseArguments()
parameter_tmp_file = os.path.join(parser.config_folder, 'config.tmp.json')
config_file = os.path.join(parser.config_folder, 'config.json')
# Run 4 times with sleep
run_counter = 1
max_runn = 4
sleep_time = 15
instance = MonitorConfigWrite(
parameter_tmp_file,
parser.htpasswd_bin,
parser.output_cfg_file)
while True:
result_dict = instance.applyConfigChanges()
if result_dict != {}:
status = True
for key in result_dict:
if not result_dict[key]:
status = False
if status and os.path.exists(parameter_tmp_file):
try:
os.unlink(config_file)
except OSError, e:
print "ERROR cannot remove file: %s" % parameter_tmp_file
else:
os.rename(parameter_tmp_file, config_file)
if run_counter == max_runn:
break
else:
run_counter += 1
time.sleep(sleep_time)
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import sys
import os
import subprocess
import json
import psutil
import time
from shutil import copyfile
import glob
import argparse
import traceback
def parseArguments():
"""
Parse arguments for monitor collector instance.
"""
parser = argparse.ArgumentParser()
parser.add_argument('--pid_path',
help='Path where the pid of this process will be writen.')
parser.add_argument('--output',
help='The Path of file where Json result of this promise will be saved.')
parser.add_argument('--promise_script',
help='Promise script to execute.')
parser.add_argument('--promise_name',
help='Title to give to this promise.')
parser.add_argument('--promise_type',
default='status',
help='Type of promise to execute. [status, report].')
parser.add_argument('--monitor_url',
help='Monitor Instance website URL.')
parser.add_argument('--history_folder',
help='Path where old result file will be placed before generate a new json result file.')
parser.add_argument('--instance_name',
default='UNKNOWN Software Instance',
help='Software Instance name.')
parser.add_argument('--hosting_name',
default='UNKNOWN Hosting Subscription',
help='Hosting Subscription name.')
return parser
def runpromise(parser):
if os.path.exists(parser.pid_path):
with open(parser.pid_path, "r") as pidfile:
try:
pid = int(pidfile.read(6))
except ValueError:
pid = None
if pid and os.path.exists("/proc/" + str(pid)):
print("A process is already running with pid " + str(pid))
return 1
start_date = ""
with open(parser.pid_path, "w") as pidfile:
process = executeCommand(parser.promise_script)
ps_process = psutil.Process(process.pid)
start_date = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(ps_process.create_time()))
pidfile.write(str(process.pid))
status_json = generateStatusJsonFromProcess(process, start_date=start_date)
status_json['_links'] = {"monitor": {"href": parser.monitor_url}}
status_json['title'] = parser.promise_name
status_json['instance'] = parser.instance_name
status_json['hosting_subscription'] = parser.hosting_name
status_json['type'] = parser.promise_type
# Save the lastest status change date (needed for rss)
status_json['change-time'] = ps_process.create_time()
if os.path.exists(parser.output):
with open(parser.output) as f:
try:
last_result = json.loads(f.read())
if status_json['status'] == last_result['status'] and last_result.has_key('change-time'):
status_json['change-time'] = last_result['change-time']
except ValueError:
pass
updateStatusHistoryFolder(
parser.promise_name,
parser.output,
parser.history_folder,
parser.promise_type
)
with open(parser.output, "w") as outputfile:
json.dump(status_json, outputfile)
os.remove(parser.pid_path)
def updateStatusHistoryFolder(name, status_file, history_folder, promise_type):
history_path = os.path.join(history_folder)
if not os.path.exists(status_file):
return
if not os.path.exists(history_folder):
return
if not os.path.exists(history_path):
try:
os.makedirs(history_path)
except OSError, e:
if e.errno == os.errno.EEXIST and os.path.isdir(history_path):
pass
else: raise
with open(status_file, 'r') as sf:
try:
status_dict = json.loads(sf.read())
except ValueError:
traceback.print_exc()
return
if promise_type == 'status':
filename = '%s.history.json' % name
history_file = os.path.join(history_path, filename)
# Remove links from history (not needed)
status_dict.pop('_links', None)
if not os.path.exists(history_file):
with open(history_file, 'w') as f_history:
data_dict = {
"date": time.time(),
"data": [status_dict]
}
f_history.write(json.dumps(data_dict))
else:
# Remove useless informations
status_dict.pop('hosting_subscription', '')
status_dict.pop('title', '')
status_dict.pop('instance', '')
status_dict.pop('type', '')
with open (history_file, mode="r+") as f_history:
f_history.seek(0,2)
position = f_history.tell() -2
f_history.seek(position)
#f_history.write(',%s]}' % str(status_dict))
f_history.write('%s}' % ',{}]'.format(json.dumps(status_dict)))
elif promise_type == 'report':
# keep_item_amount = 3
filename = '%s.history.json' % (
name)
copyfile(status_file, os.path.join(history_path, filename))
"""# Don't let history foler grow too much, keep xx files
file_list = filter(os.path.isfile,
glob.glob("%s/*.%s.history.json" % (history_path, promise_type))
)
file_count = len(file_list)
if file_count > keep_item_amount:
file_list.sort(key=lambda x: os.path.getmtime(x))
while file_count > keep_item_amount:
to_delete = file_list.pop(0)
try:
os.unlink(to_delete)
file_count -= 1
except OSError:
raise"""
def generateStatusJsonFromProcess(process, start_date=None, title=None):
stdout, stderr = process.communicate()
status_json = {}
if process.returncode != 0:
status_json["status"] = "ERROR"
else:
status_json["status"] = "OK"
if stderr:
status_json["message"] = stderr
elif stdout:
status_json["message"] = stdout
if start_date:
status_json["start-date"] = start_date
if title:
status_json["title"] = title
return status_json
def executeCommand(args):
return subprocess.Popen(
args,
#cwd=instance_path,
#env=None if sys.platform == 'cygwin' else {},
stdin=None,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE
)
def main():
arg_parser = parseArguments()
sys.exit(runpromise(arg_parser.parse_args()))
import sys
import os
import json
from datetime import datetime
import base64
import hashlib
import PyRSS2Gen
import argparse
def parseArguments():
"""
Parse arguments for monitor Rss Generator.
"""
parser = argparse.ArgumentParser()
parser.add_argument('--items_folder',
help='Path where to get *.status.json files which contain result of promises.')
parser.add_argument('--output',
help='The Path of file where feed file will be saved.')
parser.add_argument('--feed_url',
help='Url of this feed file.')
parser.add_argument('--public_url',
help='Monitor Instance public URL.')
parser.add_argument('--private_url',
help='Monitor Instance private URL.')
parser.add_argument('--instance_name',
default='UNKNOW Software Instance',
help='Software Instance name.')
parser.add_argument('--hosting_name',
default='',
help='Hosting Subscription name.')
return parser.parse_args()
def getKey(item):
return item.pubDate
def genrss():
parser = parseArguments()
rss_item_list = []
report_date = datetime.utcnow()
for filename in os.listdir(parser.items_folder):
if filename.endswith(".status.json"):
filepath = os.path.join(parser.items_folder, filename)
result_dict = None
try:
result_dict = json.load(open(filepath, "r"))
except ValueError:
print "Failed to load json file: %s" % filepath
continue
description = result_dict.get('message', '')
event_time = datetime.fromtimestamp(result_dict['change-time'])
rss_item = PyRSS2Gen.RSSItem(
categories = [result_dict['status']],
source = PyRSS2Gen.Source(result_dict['title'], parser.public_url),
title = '[%s] %s' % (result_dict['status'], result_dict['title']),
comments = description,
description = "%s: %s\n%s" % (event_time, result_dict['status'], description),
link = parser.private_url,
pubDate = event_time,
guid = PyRSS2Gen.Guid(base64.b64encode("%s, %s" % (parser.hosting_name, result_dict['title'])))
)
rss_item_list.append(rss_item)
### Build the rss feed
sorted(rss_item_list, key=getKey)
rss_feed = PyRSS2Gen.RSS2 (
title = parser.instance_name,
link = parser.feed_url,
description = parser.hosting_name,
lastBuildDate = report_date,
items = rss_item_list
)
with open(parser.output, 'w') as frss:
frss.write(rss_feed.to_xml())
def main():
exit(genrss())
# -*- coding: utf-8 -*-
import os, time
import sys
import shutil
import tempfile
import unittest
import json
from slapos.monitor.monitor import Monitoring
class MonitorBootstrapTest(unittest.TestCase):
def setUp(self):
self.base_dir = tempfile.mkdtemp()
os.mkdir(os.path.join(self.base_dir, 'promise'))
os.mkdir(os.path.join(self.base_dir, 'public'))
os.mkdir(os.path.join(self.base_dir, 'private'))
os.mkdir(os.path.join(self.base_dir, 'cron.d'))
os.mkdir(os.path.join(self.base_dir, 'logrotate.d'))
os.mkdir(os.path.join(self.base_dir, 'monitor-report'))
os.mkdir(os.path.join(self.base_dir, 'webdav'))
os.mkdir(os.path.join(self.base_dir, 'run'))
self.writeContent(os.path.join(self.base_dir, 'param'), '12345')
self.writeContent(os.path.join(self.base_dir, '.monitor_pwd'), 'bcuandjy')
self.writeContent(os.path.join(self.base_dir, 'test-httpd-cors.cfg'), '')
self.writeContent(os.path.join(self.base_dir, 'monitor-htpasswd'), '12345')
self.monitor_config_file = os.path.join(self.base_dir, 'monitor.conf')
self.monitor_config_dict = dict(
base_dir=self.base_dir,
root_title="Monitor ROOT",
title="Monitor",
url_list="",
base_url="https://monitor.test.com",
public_path_list="",
private_path_list="",
promise_run_script="/bin/echo",
collect_run_script="/bin/echo",
)
self.monitor_conf = """[monitor]
parameter-file-path = %(base_dir)s/knowledge0.cfg
promise-folder-list = %(base_dir)s/promise
private-folder = %(base_dir)s/private
public-folder = %(base_dir)s/public
public-path-list = %(public_path_list)s
private-path-list = %(private_path_list)s
crond-folder = %(base_dir)s/cron.d
logrotate-folder = %(base_dir)s/logrotate.d
report-folder = %(base_dir)s/monitor-report
root-title = %(root_title)s
parameter-list =
raw monitor-user admin
file sample %(base_dir)s/param
htpasswd monitor-password %(base_dir)s/.monitor_pwd admin %(base_dir)s/monitor-htpasswd
httpdcors cors-domain %(base_dir)s/test-httpd-cors.cfg /bin/echo
webdav-folder = %(base_dir)s/webdav
collect-script = %(collect_run_script)s
python = python
monitor-url-list = %(url_list)s
collector-db =
base-url = %(base_url)s
title = %(title)s
service-pid-folder = %(base_dir)s/run
promise-output-file = %(base_dir)s/monitor-bootstrap-status
promise-runner = %(promise_run_script)s
"""
self.opml_outline = """<outline text="Monitoring RSS Feed list"><outline text="%(title)s" title="%(title)s" type="rss" version="RSS" htmlUrl="%(base_url)s/public/feed" xmlUrl="%(base_url)s/public/feed" url="%(base_url)s/share/jio_private/" />"""
def tearDown(self):
if os.path.exists(self.base_dir):
shutil.rmtree(self.base_dir)
def writeContent(self, file_path, config):
with open(file_path, 'w') as cfg:
cfg.write(config)
def configPromises(self, amount):
promise_dir = os.path.join(self.base_dir, 'promise')
promse_content = "/bin/bash echo something"
for index in range(1, amount+1):
promise_file = os.path.join(promise_dir, 'monitor_promise-%s' % index)
self.writeContent(promise_file, promse_content)
os.chmod(promise_file, 0755)
def configReports(self, amount):
promise_dir = os.path.join(self.base_dir, 'monitor-report')
promse_content = "/bin/bash echo something"
for index in range(1, amount+1):
promise_file = os.path.join(promise_dir, 'monitor_report-%s' % index)
self.writeContent(promise_file, promse_content)
os.chmod(promise_file, 0755)
def checkOPML(self, url_list):
opml_title = "<title>%(root_title)s</title>" % self.monitor_config_dict
self.assertTrue(os.path.exists(os.path.join(self.base_dir, 'public/feeds')))
with open(os.path.join(self.base_dir, 'public/feeds')) as f:
opml_content = f.read()
self.assertTrue(opml_title in opml_content)
for url in url_list:
opml_outline = self.opml_outline % dict(
title=self.monitor_config_dict['title'],
base_url=url)
self.assertTrue(opml_outline in opml_content)
def check_promises(self):
promise_entry = '* * * * * %(promise_run_script)s --pid_path "%(promise_pid)s" --output "%(promise_output)s" --promise_script "%(promise_executable)s" --promise_name "%(promise_name)s" --monitor_url "%(base_url)s/share/jio_private/" --history_folder "%(base_dir)s/public" --instance_name "%(title)s" --hosting_name "%(root_title)s"'
promise_dir = os.path.join(self.base_dir, 'promise')
promise_cron = os.path.join(self.base_dir, 'cron.d', 'monitor-promises')
self.assertTrue(os.path.exists(promise_cron))
with open(promise_cron) as cronf:
promise_command_list = cronf.read()
for filename in os.listdir(promise_dir):
promise_dict = dict(
promise_pid=os.path.join(self.base_dir, 'run', '%s.pid' % filename),
promise_output=os.path.join(self.base_dir, 'public', '%s.status.json' % filename),
promise_executable=os.path.join(promise_dir, filename),
promise_name=filename
)
promise_dict.update(self.monitor_config_dict)
entry_line = promise_entry % promise_dict
self.assertTrue(entry_line in promise_command_list)
def check_report(self):
promise_entry = '* * * * * %(promise_run_script)s --pid_path "%(promise_pid)s" --output "%(promise_output)s" --promise_script "%(promise_executable)s" --promise_name "%(promise_name)s" --monitor_url "%(base_url)s/share/jio_private/" --history_folder "%(data_dir)s" --instance_name "%(title)s" --hosting_name "%(root_title)s" --promise_type "report"'
promise_dir = os.path.join(self.base_dir, 'monitor-report')
data_dir = os.path.join(self.base_dir, 'private', 'data', '.jio_documents')
promise_cron = os.path.join(self.base_dir, 'cron.d', 'monitor-reports')
self.assertTrue(os.path.exists(promise_cron))
with open(promise_cron) as cronf:
promise_command_list = cronf.read()
for filename in os.listdir(promise_dir):
promise_dict = dict(
promise_pid=os.path.join(self.base_dir, 'run', '%s.pid' % filename),
promise_output=os.path.join(self.base_dir, 'private', '%s.report.json' % filename),
promise_executable=os.path.join(promise_dir, filename),
promise_name=filename,
data_dir=data_dir
)
promise_dict.update(self.monitor_config_dict)
entry_line = promise_entry % promise_dict
self.assertTrue(entry_line in promise_command_list)
def check_folder_equals(self, source, destination):
self.assertTrue(os.path.isdir(source))
if not destination.endswith('/'):
destination += '/'
dest_file_list = os.listdir(destination)
source_file_list = os.listdir(source)
self.assertEquals(dest_file_list, source_file_list)
def check_symlink(self, source, destination):
if source.endswith('/'):
source.rstrip('/')
if destination.endswith('/'):
destination.rstrip('/')
self.assertTrue(os.path.islink(destination))
source_basename = os.path.basename(source)
dest_basename = os.path.basename(destination)
self.assertEquals(source_basename, dest_basename)
if os.path.isdir(source):
self.check_folder_equals(source, destination)
def test_monitor_bootstrap_empty(self):
config_content = self.monitor_conf % self.monitor_config_dict
self.writeContent(self.monitor_config_file, config_content)
instance = Monitoring(self.monitor_config_file)
instance.bootstrapMonitor()
promise_file = os.path.join(self.base_dir, 'monitor-bootstrap-status')
self.assertTrue(os.path.exists(promise_file))
self.checkOPML([self.monitor_config_dict['base_url']])
def test_monitor_bootstrap_check_folder(self):
folder_one = os.path.join(self.base_dir, 'folderOne')
folder_two = os.path.join(self.base_dir, 'folderTwo')
file_public = os.path.join(self.base_dir, 'file_public')
private_one = os.path.join(self.base_dir, 'privateOne')
private_two = os.path.join(self.base_dir, 'privateTwo')
file_private = os.path.join(self.base_dir, 'file_private')
os.mkdir(folder_one)
os.mkdir(folder_two)
os.mkdir(private_one)
os.mkdir(private_two)
self.writeContent(file_public, 'toto')
self.writeContent(private_two+'/toto1', 'toto')
self.writeContent(private_two+'/toto2', 'toto')
self.writeContent(private_two+'/toto3', 'toto')
self.writeContent(folder_two+'/toto1', 'toto')
self.writeContent(folder_two+'/toto2', 'toto')
self.writeContent(folder_two+'/toto3', 'toto')
self.monitor_config_dict['public_path_list'] = '\n '.join([
folder_one,
folder_two,
file_public
])
self.monitor_config_dict['private_path_list'] = '\n '.join([
private_one,
private_two,
file_private
])
config_content = self.monitor_conf % self.monitor_config_dict
self.writeContent(self.monitor_config_file, config_content)
instance = Monitoring(self.monitor_config_file)
instance.bootstrapMonitor()
promise_file = os.path.join(self.base_dir, 'monitor-bootstrap-status')
self.assertTrue(os.path.exists(promise_file))
self.checkOPML([self.monitor_config_dict['base_url']])
# Check jio webdav folder
self.assertTrue(os.path.exists(os.path.join(self.base_dir, 'webdav/jio_public')))
self.assertTrue(os.path.exists(os.path.join(self.base_dir, 'webdav/jio_private')))
# check symlink configured
self.check_symlink(folder_one, os.path.join(self.base_dir, 'public', 'folderOne'))
self.check_symlink(folder_two, os.path.join(self.base_dir, 'public', 'folderTwo'))
self.check_symlink(file_public, os.path.join(self.base_dir, 'public', 'file_public'))
self.check_symlink(private_one, os.path.join(self.base_dir, 'private', 'privateOne'))
self.check_symlink(private_two, os.path.join(self.base_dir, 'private', 'privateTwo'))
self.check_symlink(file_private, os.path.join(self.base_dir, 'private', 'file_private'))
# public and private folder are also accessible via webdav
self.check_symlink(os.path.join(self.base_dir, 'public'),
os.path.join(self.base_dir, 'webdav', 'public'))
self.check_symlink(os.path.join(self.base_dir, 'private'),
os.path.join(self.base_dir, 'webdav', 'private'))
# check that configuration folder exist
self.assertTrue(os.path.exists(os.path.join(self.base_dir, 'private/config')))
self.assertTrue(os.path.exists(os.path.join(self.base_dir, 'private/data')))
self.assertTrue(os.path.exists(os.path.join(self.base_dir, 'private/documents')))
def test_monitor_bootstrap_promises(self):
self.configPromises(3)
config_content = self.monitor_conf % self.monitor_config_dict
self.writeContent(self.monitor_config_file, config_content)
instance = Monitoring(self.monitor_config_file)
instance.bootstrapMonitor()
promise_file = os.path.join(self.base_dir, 'monitor-bootstrap-status')
self.assertTrue(os.path.exists(promise_file))
self.checkOPML([self.monitor_config_dict['base_url']])
self.check_promises()
# Check update promises_list
self.configPromises(5) # Add two promises
os.unlink(os.path.join(self.base_dir, 'promise', 'monitor_promise-2')) # drop promise number 2
instance2 = Monitoring(self.monitor_config_file)
instance2.bootstrapMonitor()
self.assertTrue(os.path.exists(promise_file))
self.check_promises()
def test_monitor_bootstrap_report(self):
self.configReports(3)
config_content = self.monitor_conf % self.monitor_config_dict
self.writeContent(self.monitor_config_file, config_content)
instance = Monitoring(self.monitor_config_file)
instance.bootstrapMonitor()
promise_file = os.path.join(self.base_dir, 'monitor-bootstrap-status')
self.assertTrue(os.path.exists(promise_file))
self.checkOPML([self.monitor_config_dict['base_url']])
self.check_report()
# Check update promises_list
self.configReports(5) # Add two promises
os.unlink(os.path.join(self.base_dir, 'monitor-report', 'monitor_report-1')) # drop promise number 2
instance2 = Monitoring(self.monitor_config_file)
instance2.bootstrapMonitor()
self.assertTrue(os.path.exists(promise_file))
self.check_report()
def test_monitor_bootstrap_genconfig(self):
config_content = self.monitor_conf % self.monitor_config_dict
self.writeContent(self.monitor_config_file, config_content)
instance = Monitoring(self.monitor_config_file)
instance.bootstrapMonitor()
promise_file = os.path.join(self.base_dir, 'monitor-bootstrap-status')
self.assertTrue(os.path.exists(promise_file))
self.checkOPML([self.monitor_config_dict['base_url']])
instance_config = os.path.join(instance.config_folder, '.jio_documents', 'config.json')
self.assertTrue(os.path.exists(instance_config))
config_content = json.loads(open(instance_config).read())
self.assertEquals(len(config_content), 4)
key_list = ['', 'sample', 'monitor-password', 'cors-domain']
for parameter in config_content:
if parameter['key'] in key_list:
key_list.pop(key_list.index(parameter['key']))
if parameter['key'] == '':
self.assertEquals(parameter, dict(
key="",
title="monitor-user",
value="admin"))
if parameter['key'] == 'sample':
self.assertEquals(parameter, dict(
key="sample",
title="sample",
value="12345"))
if parameter['key'] == 'monitor-password':
self.assertEquals(parameter, dict(
key="monitor-password",
title="monitor-password",
value="bcuandjy"))
if parameter['key'] == 'cors-domain':
self.assertEquals(parameter, dict(
key="cors-domain",
title="cors-domain",
value=""))
self.assertEquals(key_list, [])
# -*- coding: utf-8 -*-
import os, time
import sys
import re
import shutil
import tempfile
import unittest
import json
from slapos.monitor.monitor_config_write import MonitorConfigWrite
class MonitorConfigDocument(unittest.TestCase):
def setUp(self):
self.base_dir = tempfile.mkdtemp()
self.config_dir = os.path.join(self.base_dir, 'config')
self.config_path = os.path.join(self.config_dir, 'config.json')
os.mkdir(self.config_dir)
self.httpd_passwd = "btouhjng"
self.file_content = "wjkqelod"
self.httpd_passwd_bin = os.path.join(self.base_dir, 'htpasswd')
self.httpd_passwd_script = """#!/bin/sh
echo "htpasswd $@" > %s/monitor-htpasswd
""" % self.base_dir
self.parameter_dict = {
"cors-domain":
{
"gracefull_bin": ["/bin/echo", "restarted"],
"type": "httpdcors",
"cors_file": "%s/test-httpd-cors.cfg" % self.base_dir
},
"httpd-password":
{
"htpasswd": "%s/monitor-htpasswd" % self.base_dir,
"type": "htpasswd",
"user": "admin",
"file": "%s/.httpd_pwd_real" % self.base_dir
},
"from-file":
{
"type": "file",
"file": "%s/content" % self.base_dir
}
}
self.config = [
{
"value": "raw content2",
"key": "",
"title": "raw-content2"
},
{
"value": "%s" % self.httpd_passwd,
"key": "httpd-password",
"title": "httpd-password"
},
{
"value": "%s" % self.file_content,
"key": "from-file",
"title": "from-file"
},
{
"value": "",
"key": "cors-domain",
"title": "cors-domain"
},
{
"value": "raw content",
"key": "",
"title": "raw-value"
}
]
self.writeContent("%s/test-httpd-cors.cfg" % self.base_dir, "")
self.writeContent("%s/monitor-htpasswd" % self.base_dir, "")
self.writeContent("%s/content" % self.base_dir, self.file_content)
self.writeContent("%s/.httpd_pwd_real" % self.base_dir, self.httpd_passwd)
self.writeContent(self.httpd_passwd_bin, self.httpd_passwd_script)
os.chmod(self.httpd_passwd_bin, 0755)
def tearDown(self):
if os.path.exists(self.base_dir):
shutil.rmtree(self.base_dir)
def writeContent(self, file_path, config):
with open(file_path, 'w') as cfg:
cfg.write(config)
def generate_cors_string(self, cors_domain_list):
cors_string = ""
for domain in cors_domain_list:
if cors_string:
cors_string += '|'
cors_string += re.escape(domain)
cors_string = 'SetEnvIf Origin "^http(s)?://(.+\.)?(%s)$" origin_is=$0\n' % cors_string
cors_string += 'Header always set Access-Control-Allow-Origin %{origin_is}e env=origin_is'
return cors_string
def check_config(self):
config_parameter = os.path.join(self.config_dir, 'config.parameters.json')
config_parameter_json = json.load(open(config_parameter))
config_json = json.load(open(self.config_path))
for config in config_json:
if config["key"]:
self.assertTrue(config_parameter_json.has_key(config["key"]))
parameter = config_parameter_json[config["key"]]
else:
continue
if config["key"] == 'from-file':
self.assertTrue(os.path.exists(parameter['file']))
self.assertEqual(config["value"], open(parameter['file']).read())
elif config["key"] == 'httpd-password':
http_passwd = "%s/monitor-htpasswd" % self.base_dir
#XXX where \n bellow come from ?
command = 'htpasswd -cb %s admin %s%s' % (http_passwd, config["value"], '\n')
self.assertTrue(os.path.exists(parameter['file']))
self.assertTrue(os.path.exists(http_passwd))
self.assertEquals(config["value"], open(parameter['file']).read())
self.assertEquals(open(http_passwd).read(), command)
elif config["key"] == 'cors-domain':
cors_file = "%s/test-httpd-cors.cfg" % self.base_dir
self.assertTrue(os.path.exists(cors_file))
cors_string = self.generate_cors_string(config["value"].split())
self.assertEquals(cors_string, open(cors_file).read())
def check_cfg_config(self, config_list):
cfg_output = os.path.join(self.config_dir, 'config.cfg')
config_cfg = "[public]\n"
for config in config_list:
if config['key']:
config_cfg += '%s = %s\n' % (config['key'], config['value'])
with open(cfg_output) as cfg:
self.assertEqual(cfg.read(), config_cfg)
def test_write_config_default(self):
self.writeContent(self.config_path, json.dumps(self.config))
self.writeContent(os.path.join(self.config_dir, 'config.parameters.json'), json.dumps(self.parameter_dict))
cfg_output = os.path.join(self.config_dir, 'config.cfg')
instance = MonitorConfigWrite(
self.config_path,
self.httpd_passwd_bin,
cfg_output)
result = instance.applyConfigChanges()
self.assertTrue(os.path.exists(cfg_output))
# Check result of non raw parameter edition
self.assertEquals(result, {'cors-domain': True, 'from-file': True, 'httpd-password': True})
self.check_config()
self.check_cfg_config(self.config)
def test_write_config_parts(self):
# remove cors config
for element in self.config:
if element['key'] == "cors-domain":
element['key'] = ""
self.parameter_dict.pop("cors-domain")
self.writeContent(self.config_path, json.dumps(self.config))
self.writeContent(os.path.join(self.config_dir, 'config.parameters.json'), json.dumps(self.parameter_dict))
cfg_output = os.path.join(self.config_dir, 'config.cfg')
instance = MonitorConfigWrite(
self.config_path,
self.httpd_passwd_bin,
cfg_output)
result = instance.applyConfigChanges()
self.assertTrue(os.path.exists(cfg_output))
# Check result of non raw parameter edition
self.assertEquals(result, {'from-file': True, 'httpd-password': True})
self.check_config()
self.check_cfg_config(self.config)
def test_write_config_edit_values(self):
self.writeContent(self.config_path, json.dumps(self.config))
self.writeContent(os.path.join(self.config_dir, 'config.parameters.json'), json.dumps(self.parameter_dict))
cfg_output = os.path.join(self.config_dir, 'config.cfg')
instance = MonitorConfigWrite(
self.config_path,
self.httpd_passwd_bin,
cfg_output)
result = instance.applyConfigChanges()
self.assertTrue(os.path.exists(cfg_output))
self.assertEquals(result, {'cors-domain': True, 'from-file': True, 'httpd-password': True})
self.check_config()
self.check_cfg_config(self.config)
for config in self.config:
if config["key"] != "":
config["value"] = "changed.value"
self.writeContent(self.config_path, json.dumps(self.config))
result = instance.applyConfigChanges()
self.assertEquals(result, {'cors-domain': True, 'from-file': True, 'httpd-password': True})
self.check_config()
self.check_cfg_config(self.config)
# Add new domain in cors domain
for config in self.config:
if config["key"] != "cors-domain":
config["value"] = "changed.value new.domain.com"
self.writeContent(self.config_path, json.dumps(self.config))
result = instance.applyConfigChanges()
self.assertEquals(result, {'cors-domain': True, 'from-file': True, 'httpd-password': True})
self.check_config()
self.check_cfg_config(self.config)
# -*- coding: utf-8 -*-
import os, time
import sys
import shutil
import tempfile
import unittest
import json
from datetime import datetime
from slapos.monitor.runpromise import *
class MonitorPromiseTest(unittest.TestCase):
def setUp(self):
self.base_dir = tempfile.mkdtemp()
self.promise_dir = os.path.join(self.base_dir, 'promise')
self.report_dir = os.path.join(self.base_dir, 'report')
self.public_dir = os.path.join(self.base_dir, 'public')
self.private_dir = os.path.join(self.base_dir, 'private')
self.run_dir = os.path.join(self.base_dir, 'run')
os.mkdir(self.promise_dir)
os.mkdir(self.public_dir)
os.mkdir(self.private_dir)
os.mkdir(self.report_dir)
os.mkdir(self.run_dir)
def tearDown(self):
if os.path.exists(self.base_dir):
shutil.rmtree(self.base_dir)
def writePromiseOK(self, name):
content = """#!/bin/sh
echo "success"
exit 0
"""
promise_path = os.path.join(self.promise_dir, name)
self.writeContent(promise_path, content)
os.chmod(promise_path, 0755)
return promise_path
def writePromiseNOK(self, name):
content = """#!/bin/sh
echo "failed"
exit 2
"""
promise_path = os.path.join(self.promise_dir, name)
self.writeContent(promise_path, content)
os.chmod(promise_path, 0755)
return promise_path
def writeContent(self, file_path, config):
with open(file_path, 'w') as cfg:
cfg.write(config)
def getPromiseParser(self, name, promise_path, promise_type):
pid_path = os.path.join(self.run_dir, '%s.pid' % name)
if promise_type == "report":
output_path = os.path.join(self.private_dir, '%s.report.json' % name)
else:
output_path = os.path.join(self.public_dir, '%s.status.json' % name)
promise_cmd = [
'--pid_path',
'%s' % pid_path, '--output', output_path,
'--promise_script', promise_path,
'--promise_name', name, '--promise_type', promise_type,
'--monitor_url', 'https://monitor.test.com/share/jio_private/',
'--history_folder', self.public_dir,
'--instance_name', 'Monitor', '--hosting_name', 'Monitor ROOT']
arg_parser = parseArguments()
return arg_parser.parse_args(promise_cmd)
def test_promise_OK(self):
promise = self.writePromiseOK('promise_1')
parser = self.getPromiseParser('promise_1', promise, 'status')
runpromise(parser)
result_file = os.path.join(self.public_dir, 'promise_1.status.json')
self.assertTrue(os.path.exists(result_file))
result1 = json.loads(open(result_file).read())
change_time = result1.pop('change-time', 0)
change_date = datetime.fromtimestamp(change_time)
start_date = result1.pop('start-date')
expected_result = {'status': 'OK', 'hosting_subscription': 'Monitor ROOT',
'title': u'promise_1', 'instance': 'Monitor',
'_links':
{'monitor': {'href': 'https://monitor.test.com/share/jio_private/'}},
'message': 'success\n', 'type': 'status'}
self.assertEquals(expected_result, result1)
# second run
runpromise(parser)
result2 = json.loads(open(result_file).read())
change_time2 = result2.pop('change-time', 0)
result2.pop('start-date', '2016-08-05 00:00:00')
change_date2 = datetime.fromtimestamp(change_time2)
self.assertEquals(expected_result, result2)
self.assertEquals(change_date.strftime('%Y-%m-%d %H:%M:%S'),
change_date2.strftime('%Y-%m-%d %H:%M:%S'))
history_file = os.path.join(self.public_dir, 'promise_1.history.json')
self.assertTrue(os.path.exists(history_file))
history = json.load(open(history_file))
self.assertTrue(history['date'] > change_time)
self.assertTrue(len(history['data']) == 1)
result1['change-time'] = change_time
result1['start-date'] = start_date
result1.pop('_links')
self.assertEquals(history['data'][0], result1)
def test_promise_NOK(self):
promise = self.writePromiseNOK('promise_1')
parser = self.getPromiseParser('promise_1', promise, 'status')
runpromise(parser)
result_file = os.path.join(self.public_dir, 'promise_1.status.json')
self.assertTrue(os.path.exists(result_file))
result1 = json.loads(open(result_file).read())
change_time = result1.pop('change-time', 0)
change_date = datetime.fromtimestamp(change_time)
start_date = result1.pop('start-date')
expected_result = {'status': 'ERROR', 'hosting_subscription': 'Monitor ROOT',
'title': u'promise_1', 'instance': 'Monitor',
'_links':
{'monitor': {'href': 'https://monitor.test.com/share/jio_private/'}},
'message': 'failed\n', 'type': 'status'}
self.assertEquals(expected_result, result1)
# second run
runpromise(parser)
result2 = json.loads(open(result_file).read())
change_time2 = result2.pop('change-time', 0)
result2.pop('start-date', '2016-08-05 00:00:00')
change_date2 = datetime.fromtimestamp(change_time2)
self.assertEquals(expected_result, result2)
self.assertEquals(change_date.strftime('%Y-%m-%d %H:%M:%S'),
change_date2.strftime('%Y-%m-%d %H:%M:%S'))
def test_promise_mixed(self):
promise = self.writePromiseOK('promise_1')
parser = self.getPromiseParser('promise_1', promise, 'status')
runpromise(parser)
result_file = os.path.join(self.public_dir, 'promise_1.status.json')
self.assertTrue(os.path.exists(result_file))
result1 = json.loads(open(result_file).read())
change_time = result1.pop('change-time')
change_date = datetime.fromtimestamp(change_time)
start_date = result1.pop('start-date')
expected_result = {'status': 'OK', 'hosting_subscription': 'Monitor ROOT',
'title': u'promise_1', 'instance': 'Monitor',
'_links':
{'monitor': {'href': 'https://monitor.test.com/share/jio_private/'}},
'message': 'success\n', 'type': 'status'}
self.assertEquals(expected_result, result1)
# second run with failure
time.sleep(2)
promise = self.writePromiseNOK('promise_1')
parser = self.getPromiseParser('promise_1', promise, 'status')
expected_result['message'] = 'failed\n'
expected_result['status'] = 'ERROR'
runpromise(parser)
result2 = json.loads(open(result_file).read())
change_time2 = result2.pop('change-time')
result2.pop('start-date')
change_date2 = datetime.fromtimestamp(change_time2)
self.assertEquals(expected_result, result2)
self.assertNotEquals(change_date.strftime('%Y-%m-%d %H:%M:%S'),
change_date2.strftime('%Y-%m-%d %H:%M:%S'))
def test_report_OK(self):
promise = self.writePromiseOK('sample_report')
parser = self.getPromiseParser('sample_report', promise, 'report')
runpromise(parser)
result_file = os.path.join(self.private_dir, 'sample_report.report.json')
self.assertTrue(os.path.exists(result_file))
result1 = json.loads(open(result_file).read())
change_time = result1.pop('change-time', 0)
change_date = datetime.fromtimestamp(change_time)
start_date = result1.pop('start-date')
expected_result = {'status': 'OK', 'hosting_subscription': 'Monitor ROOT',
'title': 'sample_report', 'instance': 'Monitor',
'_links':
{'monitor': {'href': 'https://monitor.test.com/share/jio_private/'}},
'message': 'success\n', 'type': 'report'}
self.assertEquals(expected_result, result1)
# second run
runpromise(parser)
result2 = json.loads(open(result_file).read())
change_time2 = result2.pop('change-time', 0)
result2.pop('start-date', '2016-08-05 00:00:00')
change_date2 = datetime.fromtimestamp(change_time2)
self.assertEquals(expected_result, result2)
self.assertEquals(change_date.strftime('%Y-%m-%d %H:%M:%S'),
change_date2.strftime('%Y-%m-%d %H:%M:%S'))
history_file = os.path.join(self.public_dir, 'sample_report.history.json')
self.assertTrue(os.path.exists(history_file))
history = json.load(open(history_file))
result1['change-time'] = change_time
result1['start-date'] = start_date
#result1.pop('_links')
self.assertEquals(history, result1)
def test_report_mixed(self):
promise = self.writePromiseOK('sample_report')
parser = self.getPromiseParser('sample_report', promise, 'report')
runpromise(parser)
result_file = os.path.join(self.private_dir, 'sample_report.report.json')
self.assertTrue(os.path.exists(result_file))
result1 = json.loads(open(result_file).read())
change_time = result1.pop('change-time', 0)
change_date = datetime.fromtimestamp(change_time)
start_date = result1.pop('start-date')
expected_result = {'status': 'OK', 'hosting_subscription': 'Monitor ROOT',
'title': 'sample_report', 'instance': 'Monitor',
'_links':
{'monitor': {'href': 'https://monitor.test.com/share/jio_private/'}},
'message': 'success\n', 'type': 'report'}
self.assertEquals(expected_result, result1)
# second run with failure
time.sleep(2)
promise = self.writePromiseNOK('sample_report')
parser = self.getPromiseParser('sample_report', promise, 'report')
expected_result['message'] = 'failed\n'
expected_result['status'] = 'ERROR'
runpromise(parser)
result2 = json.loads(open(result_file).read())
change_time2 = result2.pop('change-time')
result2.pop('start-date')
change_date2 = datetime.fromtimestamp(change_time2)
self.assertEquals(expected_result, result2)
self.assertNotEquals(change_date.strftime('%Y-%m-%d %H:%M:%S'),
change_date2.strftime('%Y-%m-%d %H:%M:%S'))
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment