Commit 80a0aafe authored by Rafael Monnerat's avatar Rafael Monnerat

Update Release Candidate

parents b77af1f2 7d279ba8
Changes
=======
1.0.121 (2019-09-12)
--------------------
- generic.mysql.wrap_update_mysql: prepare for MariaDB 10.4
- publish-early: process -init entries in specified order
- Partial support of Python 3
- Remove unused generic.mysql recipe
1.0.119 (2019-08-14)
--------------------
......
......@@ -33,5 +33,5 @@ environment =
[ghostscript-9]
<= ghostscript-common
url = https://github.com/ArtifexSoftware/ghostpdl-downloads/releases/download/gs925/ghostscript-9.25.tar.xz
md5sum = d5ac3f3d76cf82a549bafdf86d58395b
url = https://github.com/ArtifexSoftware/ghostpdl-downloads/releases/download/gs927/ghostscript-9.27.tar.xz
md5sum = dd531503dbbc524f73528359e2ea145c
......@@ -15,8 +15,8 @@ extends =
[groonga]
recipe = slapos.recipe.cmmi
shared = false
url = https://packages.groonga.org/source/groonga/groonga-9.0.0.tar.gz
md5sum = 5475818c734dfc6414d209babea90921
url = https://packages.groonga.org/source/groonga/groonga-9.0.7.tar.gz
md5sum = a2697e46b7460fc81b418666c5abdcb4
# temporary patch to respect more tokens in natural language mode.
patches =
${:_profile_base_location_}/groonga.patch#9ed02fbe8400402d3eab47eee149978b
......@@ -46,8 +46,8 @@ environment =
[groonga-normalizer-mysql]
recipe = slapos.recipe.cmmi
shared = false
url = https://packages.groonga.org/source/groonga-normalizer-mysql/groonga-normalizer-mysql-1.1.3.tar.gz
md5sum = ad30404cb9999a842e98f3902057b152
url = https://packages.groonga.org/source/groonga-normalizer-mysql/groonga-normalizer-mysql-1.1.4.tar.gz
md5sum = effa67fb271d49810850a3b275d040f6
location = ${groonga:location}
configure-options =
--disable-static
......
......@@ -17,7 +17,7 @@ recipe = slapos.recipe.cmmi
url = http://ftp.de.debian.org/debian/pool/main/m/make-dfsg/make-dfsg_3.81.orig.tar.gz
md5sum = 7c93b1ab4680eb21c2c13f4f47741e2d
patches =
${:_profile_base_location_}/make-dfsg_3.81-8.2.diff#fa77bb989a096fafbe7c78582e9415e3
${:_profile_base_location_}/make-dfsg_3.81-8.2.diff#320ce09344a8618b6c47ddb88c09cea9
patch-options = -p1
environment =
PATH=${patch:location}/bin:%(PATH)s
......@@ -1044,6 +1044,15 @@
# Autoconf setup
AC_CONFIG_AUX_DIR(config)
AC_CONFIG_SRCDIR(vpath.c)
@@ -354,7 +356,7 @@
#define GLOB_INTERFACE_VERSION 1
#if !defined _LIBC && defined __GNU_LIBRARY__ && __GNU_LIBRARY__ > 1
# include <gnu-versions.h>
-# if _GNU_GLOB_INTERFACE_VERSION == GLOB_INTERFACE_VERSION
+# if _GNU_GLOB_INTERFACE_VERSION >= GLOB_INTERFACE_VERSION
gnu glob
# endif
#endif
@@ -372,6 +374,24 @@
MAKE_HOST="$host"
AC_SUBST(MAKE_HOST)
......@@ -16744,7 +16753,15 @@
/* end confdefs.h. */
#include <features.h>
@@ -13629,13 +9246,13 @@
@@ -13622,20 +9239,20 @@
#define GLOB_INTERFACE_VERSION 1
#if !defined _LIBC && defined __GNU_LIBRARY__ && __GNU_LIBRARY__ > 1
# include <gnu-versions.h>
-# if _GNU_GLOB_INTERFACE_VERSION == GLOB_INTERFACE_VERSION
+# if _GNU_GLOB_INTERFACE_VERSION >= GLOB_INTERFACE_VERSION
gnu glob
# endif
#endif
_ACEOF
if (eval "$ac_cpp conftest.$ac_ext") 2>&5 |
......@@ -29,9 +29,13 @@ parts =
[mariadb]
recipe = slapos.recipe.cmmi
url = https://downloads.mariadb.org/f/mariadb-${:version}/source/mariadb-${:version}.tar.gz/from/http%3A//fr.mirror.babylon.network/mariadb/?serve
version = 10.3.14
md5sum = b1b9628bbc3ff15e8f5cfc7896c73975
version = 10.3.18
md5sum = b3524c0825c3a1c255496daea38304a0
location = ${buildout:parts-directory}/${:_buildout_section_name_}
pre-configure =
set -e '\bSET(PLUGIN_AUTH_PAM YES)' cmake/build_configurations/mysql_release.cmake
grep -q "$@"
sed -i "/$1/d" "$2"
configure-command = ${cmake:location}/bin/cmake
configure-options =
-DCMAKE_INSTALL_PREFIX=${:location}
......@@ -80,8 +84,8 @@ post-install =
# mroonga - a storage engine for MySQL. It provides fast fulltext search feature to all MySQL users.
# http://mroonga.github.com/
recipe = slapos.recipe.cmmi
url = https://packages.groonga.org/source/mroonga/mroonga-9.00.tar.gz
md5sum = a1deff08a3649d8370436f1c903ed432
url = https://packages.groonga.org/source/mroonga/mroonga-9.05.tar.gz
md5sum = d81629fc6042a44ce56cc6990d563164
pre-configure = set -e
rm -rf fake_mariadb_source
mkdir -p fake_mariadb_source
......
......@@ -60,6 +60,14 @@ repository = https://lab.nexedi.com/Daetalus/pyodide.git
location = ${buildout:parts-directory}/${:_buildout_section_name_}
branch = master
[get-pip]
recipe = slapos.recipe.build:download
# version 19.2.3
url = https://github.com/pypa/get-pip/raw/309a56c5fd94bd1134053a541cb4657a4e47e09d/${:filename}
md5sum = 6f489c199cb5be8a4b84c6bd7ad6e051
filename = get-pip.py
mode = 0755
[pyodide-script]
recipe = slapos.recipe.build
location = ${buildout:parts-directory}/${:_buildout_section_name_}
......@@ -78,7 +86,7 @@ openssl_location = ${openssl:location}
gcc_bin_dir = ${gcc-8.2:location}/bin
python_bin_dir = ${python3.7:location}/bin
md5sum = 47ec6a091e503349d948760dc03bed51
pip_script = ${:_profile_base_location_}/get-pip.py
pip_script = ${get-pip:target}
script =
import subprocess, os, shutil
......
This diff is collapsed.
......@@ -28,7 +28,7 @@ from setuptools import setup, find_packages
import glob
import os
version = '1.0.119'
version = '1.0.121'
name = 'slapos.cookbook'
long_description = open("README.rst").read() + "\n" + \
open("CHANGES.rst").read() + "\n"
......
......@@ -32,10 +32,10 @@ class WrapUpdateMySQL(GenericBaseRecipe):
self.createPythonScript(
self.options['output'],
__name__ + '.mysql.updateMysql',
[{
kw = {
'mysql_upgrade_binary': self.options['binary'],
'mysql_binary': self.options['mysql'],
'mysql_script_file': self.options['init-script'],
}]
}
),
]
......@@ -4,69 +4,45 @@ import time
import sys
import pytz
def updateMysql(conf):
def updateMysql(mysql_upgrade_binary, mysql_binary, mysql_script_file):
sleep = 30
is_succeed = False
try:
script_filename = conf.pop('mysql_script_file')
except KeyError:
pass
else:
assert 'mysql_script' not in conf
with open(script_filename) as script_file:
conf['mysql_script'] = script_file.read()
is_succeeded = False
with open(mysql_script_file) as script_file:
mysql_script = script_file.read()
mysql_list = mysql_binary, '-B'
mysql_tzinfo_to_sql_list = (
os.path.join(os.path.dirname(mysql_binary), 'mysql_tzinfo_to_sql'),
os.path.join(os.path.dirname(pytz.__file__), 'zoneinfo'),
)
while True:
while True:
mysql_upgrade_list = [conf['mysql_upgrade_binary'], '--user=root']
if 'socket' in conf:
mysql_upgrade_list.append('--socket=' + conf['socket'])
mysql_upgrade = subprocess.Popen(mysql_upgrade_list, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
mysql_upgrade = subprocess.Popen(mysql_upgrade_binary,
stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
result = mysql_upgrade.communicate()[0]
if mysql_upgrade.returncode is None:
mysql_upgrade.kill()
if mysql_upgrade.returncode == 0:
print "MySQL database upgraded with result:\n%s" % result
elif 'is already upgraded' in result:
print "No need to upgrade MySQL database"
else:
print "Command %r failed with result:\n%s" % (mysql_upgrade_list, result)
if mysql_upgrade.returncode:
print "Command %r failed with result:\n%s" % (mysql_upgrade_binary, result)
break
mysql_list = [conf['mysql_binary'].strip(), '-B', '--user=root']
if 'socket' in conf:
mysql_list.append('--socket=' + conf['socket'])
print "MySQL database upgraded with result:\n%s" % result
mysql = subprocess.Popen(mysql_list, stdin=subprocess.PIPE,
stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
result = mysql.communicate(conf['mysql_script'])[0]
if mysql.returncode is None:
mysql.kill()
if mysql.returncode != 0:
result = mysql.communicate(mysql_script)[0]
if mysql.returncode:
print 'Command %r failed with:\n%s' % (mysql_list, result)
break
# import timezone database
mysql_tzinfo_to_sql_binary = os.path.join(
os.path.dirname(conf['mysql_binary'].strip()), 'mysql_tzinfo_to_sql')
zoneinfo_directory = '%s/zoneinfo' % os.path.dirname(pytz.__file__)
mysql_tzinfo_to_sql_list = [mysql_tzinfo_to_sql_binary, zoneinfo_directory]
mysql_tzinfo_to_sql = subprocess.Popen(mysql_tzinfo_to_sql_list, stdin=subprocess.PIPE,
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
timezone_sql = mysql_tzinfo_to_sql.communicate()[0]
if mysql_tzinfo_to_sql.returncode != 0:
print 'Command %r failed with:\n%s' % (mysql_tzinfo_to_sql_list, result)
break
mysql = subprocess.Popen(mysql_list + ['mysql',], stdin=subprocess.PIPE,
mysql = subprocess.Popen(mysql_list + ('mysql',), stdin=subprocess.PIPE,
stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
result = mysql.communicate(timezone_sql)[0]
if mysql.returncode is None:
mysql.kill()
if mysql.returncode != 0:
if mysql.returncode:
print 'Command %r failed with:\n%s' % (mysql_list, result)
break
is_succeeded = True
break
if is_succeeded:
print 'SlapOS initialisation script succesfully applied on database.'
break
return
print 'Sleeping for %ss and retrying' % sleep
sys.stdout.flush()
sys.stderr.flush()
......
......@@ -40,7 +40,7 @@ import json
# Use to do from slapos.recipe.librecipe import GenericBaseRecipe
from .generic import GenericBaseRecipe
from .genericslap import GenericSlapRecipe
from .filehash import filehash
from .filehash import filehash, generateHashFromFiles
# Utility functions to (de)serialise live python objects in order to send them
# to master.
......
......@@ -51,6 +51,15 @@ def filehash(filename, type_=DEFAULT_HASH):
shutil.copyfileobj(file_, digest)
return digest.read()
def generateHashFromFiles(file_list):
hasher = hashlib.md5()
for path in file_list:
with open(path, 'rb') as afile:
buf = afile.read()
hasher.update(b"%u\n" % len(buf))
hasher.update(buf)
return hasher.hexdigest()
# Home made hashdeep <http://md5deep.sourceforge.net/>
def dirhash(dirname, type_=DEFAULT_HASH):
"""Walk into a directory an return a unique hash for
......
......@@ -27,7 +27,7 @@
import shlex
from slapos.recipe.librecipe import GenericBaseRecipe
from slapos.recipe.librecipe import GenericBaseRecipe, generateHashFromFiles
class Recipe(GenericBaseRecipe):
"""Recipe to create a script from given command and options.
......@@ -67,18 +67,8 @@ class Recipe(GenericBaseRecipe):
kw['reserve_cpu'] = True
if hash_files:
hash_file_list = hash_files.split()
hash = self.generateHashFromFiles(hash_file_list)
hash = generateHashFromFiles(hash_file_list)
wrapper_path = "%s-%s" % (wrapper_path, hash)
return self.createWrapper(wrapper_path, args, environment, **kw)
def generateHashFromFiles(self, file_list):
import hashlib
hasher = hashlib.md5()
for path in file_list:
with open(path, 'rb') as afile:
buf = afile.read()
hasher.update(b"%u\n" % len(buf))
hasher.update(buf)
hash = hasher.hexdigest()
return hash
......@@ -38,6 +38,7 @@ setup(name=name,
packages=find_packages(),
install_requires=[
'slapos.core',
'slapos.cookbook',
'slapos.libnetworkcache',
'erp5.util',
'requests >= 2.20.0', # needed for recent SSL certificate fixes
......
......@@ -46,6 +46,7 @@ import StringIO
import gzip
import base64
import re
from slapos.recipe.librecipe import generateHashFromFiles
try:
......@@ -283,18 +284,6 @@ class TestDataMixin(object):
except Exception as e:
self.fail(e)
@staticmethod
def generateHashFromFiles(file_list):
import hashlib
hasher = hashlib.md5()
for path in file_list:
with open(path, 'r') as afile:
buf = afile.read()
hasher.update("%s\n" % len(buf))
hasher.update(buf)
hash = hasher.hexdigest()
return hash
def getTrimmedProcessInfo(self):
return '\n'.join(sorted([
'%(group)s:%(name)s %(statename)s' % q for q
......@@ -383,13 +372,13 @@ class TestDataMixin(object):
hash_file_list = [os.path.join(
self.computer_partition_root_path, 'software_release/buildout.cfg')]
hash_value_dict = {
'generic': self.generateHashFromFiles(hash_file_list),
'generic': generateHashFromFiles(hash_file_list),
}
for caddy_wrapper_path in glob.glob(os.path.join(
self.instance_path, '*', 'bin', 'caddy-wrapper')):
partition_id = caddy_wrapper_path.split('/')[-3]
hash_value_dict[
'caddy-%s' % (partition_id)] = self.generateHashFromFiles(
'caddy-%s' % (partition_id)] = generateHashFromFiles(
hash_file_list + [caddy_wrapper_path]
)
for rejected_slave_publish_path in glob.glob(os.path.join(
......@@ -399,7 +388,7 @@ class TestDataMixin(object):
self.instance_path, partition_id, 'etc', 'rejected-slave.pem')
hash_value_dict[
'rejected-slave-publish'
] = self.generateHashFromFiles(
] = generateHashFromFiles(
hash_file_list + [rejected_slave_publish_path, rejected_slave_pem_path]
)
......
......@@ -84,7 +84,7 @@ This software release assigns the following port ranges by default:
memcached-persistent 2000-2009
memcached-volatile 2010-2019
smtp 2025-2029
neo (admin & master) 2050-2051
neo (admin, master) 2050-2052
mariadb 2099
zeo 2100-2149
balancer 2150-2199
......
......@@ -114,7 +114,7 @@
"wsgi": {
"description": "If set to true, Zope is run as a WSGI application, instead of using the Medusa HTTP server.",
"type": "boolean",
"default": false
"default": true
},
"zope-partition-dict": {
"description": "Zope layout definition",
......
......@@ -97,14 +97,14 @@ class TestDefaultParameters(ERP5TestCase, TestPublishedURLIsReachableMixin):
__partition_reference__ = 'defp'
class TestWSGI(ERP5TestCase, TestPublishedURLIsReachableMixin):
"""Test ERP5 WSGI server
class TestMedusa(ERP5TestCase, TestPublishedURLIsReachableMixin):
"""Test ERP5 Medusa server
"""
__partition_reference__ = 'wsgi'
__partition_reference__ = 'medusa'
@classmethod
def getInstanceParameterDict(cls):
return {'_': json.dumps({'wsgi': True})}
return {'_': json.dumps({'wsgi': False})}
class TestApacheBalancerPorts(ERP5TestCase):
......
......@@ -19,7 +19,7 @@ md5sum = 028b6a6456d744c11b1bb2c51ecd51b2
[template-kvm]
filename = instance-kvm.cfg.jinja2
md5sum = c79448a49b1b3dc6e757b90f497c7be1
md5sum = 12a99227828e834d97ee0c68246c467c
[template-kvm-cluster]
filename = instance-kvm-cluster.cfg.jinja2.in
......
......@@ -498,16 +498,15 @@ ipv6-network-info =
recipe = plone.recipe.command
filename = netconfig.sh
path = ${directory:public}/${:filename}
ifconfig = ifconfig \$IFACE {{ slap_configuration.get('tap-ipv4-addr') }} netmask {{ slap_configuration.get('tap-ipv4-netmask') }}
route-iface = route add {{ slap_configuration.get('tap-ipv4-gateway') }} dev \$IFACE
route-network = route add -net {{ slap_configuration.get('tap-ipv4-network') }} netmask {{ slap_configuration.get('tap-ipv4-netmask') }} gw {{ slap_configuration.get('tap-ipv4-gateway') }}
ipv4-add-address = ip -4 address add {{ slap_configuration.get('tap-ipv4-addr') }}/{{ slap_configuration.get('tap-ipv4-netmask') }} dev \$IFACE
{% if nat_restrict == 'true' -%}
route-default = route add default gw {{ slap_configuration.get('tap-ipv4-gateway') }} dev \$IFACE
ipv4-add-default-route = ip route add default via {{ slap_configuration.get('tap-ipv4-gateway') }} dev \$IFACE
{% elif global_ipv4_prefix -%}
route-default = ip route add {{ global_ipv4_prefix }} via {{ slap_configuration.get('tap-ipv4-gateway') }} dev \$IFACE src {{ slap_configuration.get('tap-ipv4-addr') }}
ipv4-add-default-route = ip route add {{ global_ipv4_prefix }} via {{ slap_configuration.get('tap-ipv4-gateway') }} dev \$IFACE src {{ slap_configuration.get('tap-ipv4-addr') }}
{% else -%}
route-default =
ipv4-add-default-route =
{% endif -%}
ipv4-set-link-up = ip link set dev \$IFACE up
command =
cat > ${:path} << EOF
#!/bin/sh
......@@ -515,10 +514,9 @@ command =
#try to be compatible with OS with old names
ip a | grep eth0: && [ \$IFACE = ens3 ] && IFACE=eth0
ip a | grep eth1: && [ \$IFACE = ens4 ] && IFACE=eth1
${:ifconfig}
${:route-iface}
${:route-network}
${:route-default}
${:ipv4-add-address}
${:ipv4-add-default-route}
${:ipv4-set-link-up}
EOF
update-command = ${:command}
{% endif -%}
......
......@@ -43,6 +43,7 @@ setup(name=name,
packages=find_packages(),
install_requires=[
'slapos.core',
'slapos.cookbook',
'slapos.libnetworkcache',
'erp5.util',
'supervisor',
......
......@@ -35,9 +35,8 @@ import StringIO
import subprocess
import json
import psutil
import utils
from slapos.recipe.librecipe import generateHashFromFiles
# for development: debugging logs and install Ctrl+C handler
if os.environ.get('SLAPOS_TEST_DEBUG'):
......@@ -53,17 +52,6 @@ class InstanceTestCase(utils.SlapOSInstanceTestCase):
return (os.path.abspath(os.path.join(os.path.dirname(__file__), '..', 'software.cfg')), )
class ServicesTestCase(InstanceTestCase):
@staticmethod
def generateHashFromFiles(file_list):
import hashlib
hasher = hashlib.md5()
for path in file_list:
with open(path, 'r') as afile:
buf = afile.read()
hasher.update("%s\n" % len(buf))
hasher.update(buf)
hash = hasher.hexdigest()
return hash
def test_hashes(self):
hash_files = [
......@@ -87,7 +75,7 @@ class ServicesTestCase(InstanceTestCase):
for path in hash_files]
for name in expected_process_names:
h = ServicesTestCase.generateHashFromFiles(hash_files)
h = generateHashFromFiles(hash_files)
expected_process_name = name.format(hash=h)
self.assertIn(expected_process_name, process_names)
......@@ -43,6 +43,7 @@ setup(name=name,
packages=find_packages(),
install_requires=[
'slapos.core',
'slapos.cookbook',
'slapos.libnetworkcache',
'erp5.util',
'supervisor',
......
......@@ -35,9 +35,8 @@ import StringIO
import subprocess
import json
import psutil
import utils
from slapos.recipe.librecipe import generateHashFromFiles
# for development: debugging logs and install Ctrl+C handler
if os.environ.get('SLAPOS_TEST_DEBUG'):
......@@ -54,17 +53,6 @@ class InstanceTestCase(utils.SlapOSInstanceTestCase):
class ServicesTestCase(InstanceTestCase):
@staticmethod
def generateHashFromFiles(file_list):
import hashlib
hasher = hashlib.md5()
for path in file_list:
with open(path, 'r') as afile:
buf = afile.read()
hasher.update("%s\n" % len(buf))
hasher.update(buf)
hash = hasher.hexdigest()
return hash
def test_hashes(self):
hash_files = [
......@@ -83,7 +71,7 @@ class ServicesTestCase(InstanceTestCase):
for path in hash_files]
for name in expected_process_names:
h = ServicesTestCase.generateHashFromFiles(hash_files)
h = generateHashFromFiles(hash_files)
expected_process_name = name.format(hash=h)
self.assertIn(expected_process_name, process_names)
{%- if slapparameter_dict.get('admin') != 0 %}
{%- set monitor_dict = slapparameter_dict['monitor'] %}
{%- set bang_on_problem = monitor_dict.pop('bang-on-problem', None) %}
{%- set periodicity = monitor_dict.pop('periodicity', None) %}
[buildout]
extends =
{{ template_monitor }}
parts +=
neo-admin-promise
logrotate-admin
neoctl
monitor-neo-health
[neo-admin]
recipe = slapos.cookbook:neoppod.admin
......@@ -14,12 +20,38 @@ port = ${publish:port-admin}
ssl = {{ dumps(bool(slapparameter_dict['ssl'])) }}
cluster = {{ dumps(slapparameter_dict['cluster']) }}
masters = {{ dumps(slapparameter_dict['masters']) }}
extra-options =
{%- for k, v in monitor_dict.iteritems() %}
{%- if k == 'backup' %}
{%- set k = 'monitor-backup' %}
{%- endif %}
{%- if not isinstance(v, list) %}
{%- set v = [v] %}
{%- endif %}
{%- for v in v %}
--{{k}}={{v}}
{%- endfor %}
{%- endfor %}
[directory]
plugin = ${:etc}/plugin
[neo-admin-promise]
recipe = slapos.cookbook:check_port_listening
hostname = ${neo-admin:ip}
port = ${neo-admin:port}
path = ${directory:promises}/neo-admin-promise
[monitor-neo-health]
<= monitor-promise-base
module = check_neo_health
name = ${:_buildout_section_name_}.py
config-neoctl = ${neoctl:wrapper-path}
{%- if bang_on_problem != None %}
config-bang-on-problem = {{ dumps(bang_on_problem) }}
{%- endif %}
{%- if periodicity != None %}
config-periodicity = {{ dumps(periodicity) }}
{%- endif %}
[monitor-instance-parameter]
monitor-httpd-port = {{ slapparameter_dict.get('admin', 2050) + 1 }}
monitor-title = neo
password = {{ slapparameter_dict['monitor-passwd'] }}
[logrotate-admin]
< = logrotate-entry-base
......@@ -37,3 +69,4 @@ command-line =
--key ${directory:etc}/neo.key
{%- endif %}
wrapper-path = ${directory:bin}/neoctl
{%- endif %}
......@@ -7,7 +7,7 @@
],
"properties": {
"cluster": {
"description": "Cluster unique identifier. Your last line of defense against mixing up NEO clusters and corrupting your data. Choose a unique value for each of your cluster.",
"description": "Cluster unique identifier. Your last line of defense against mixing up NEO clusters and corrupting your data. Choose a unique value for each of your cluster. Space not allowed.",
"type": "string"
},
"partitions": {
......@@ -28,6 +28,25 @@
"description": "Master nodes in the cluster to backup.",
"type": "string"
},
"monitor": {
"description": "Parameters for monitoring.",
"properties": {
"backup": {
"description": "List of cluster names that are expected to backup this cluster.",
"type": "array",
"items": {
"type": "string"
},
"uniqueItems": true
},
"periodicity": {
"description": "Periodicity in minutes at which the cluster health is checked.",
"default": 10,
"type": "number"
}
},
"type": "object"
},
"sla-dict": {
"description": "[NEO SR only] Where to request instances. Each key is a query string for criterions (e.g. \"computer_guid=foo\"), and each value is a list of partition references ('node-0', 'node-1', ...). The prefix 'node-' is mandatory and the number must start from 0. The total number of nodes here must be equal to the length of node-list.",
"additionalProperties": {
......@@ -58,6 +77,7 @@
"items": {
"description": "Dictionary containing parameters required to configure individual nodes.",
"default": {},
"additionalProperties": false,
"properties": {
"admin": {
"description": "Port of admin node. 0 to disable.",
......
{%- if slapparameter_dict.get('master') != 0 %}
[buildout]
parts +=
neo-master-promise
logrotate-master
[neo-master]
......@@ -23,14 +23,9 @@ autostart = {{ slapparameter_dict['autostart'] }}
# this default value is required.
masters = {{ dumps(slapparameter_dict['masters']) }}
[neo-master-promise]
recipe = slapos.cookbook:check_port_listening
hostname = ${neo-master:ip}
port = ${neo-master:port}
path = ${directory:promises}/neo-master-promise
[logrotate-master]
< = logrotate-entry-base
name = neo-master
log = ${neo-master:logfile}
post = {{ bin_directory }}/slapos-kill -s RTMIN+1 -- ${neo-master:binary} -l ${:log}
{%- endif %}
......@@ -51,7 +51,7 @@ recipe = slapos.cookbook:publish.serialised
# TODO: make port a partition parameter
ip = {{ "[%s]" % list(ipv6_set)[0] if ipv6_set else list(ipv4_set)[0] }}
{% set admin = slapparameter_dict.get('admin', 2050) -%}
{% set master = slapparameter_dict.get('master', 2051) -%}
{% set master = slapparameter_dict.get('master', 2052) -%}
{% if master -%}
port-master = {{ master }}
master = ${:ip}:${:port-master}
......@@ -61,6 +61,7 @@ master =
{% if admin -%}
port-admin = {{ admin }}
admin = ${:ip}:${:port-admin}
monitor-base-url = ${monitor-publish-parameters:monitor-base-url}
{% else -%}
admin =
{% endif -%}
......@@ -119,7 +120,6 @@ post = {{ bin_directory }}/slapos-kill -s RTMIN+1 -- {{ bin_directory }}/neostor
[directory]
recipe = slapos.cookbook:mkdirectory
promises = ${buildout:directory}/etc/promises
bin = ${buildout:directory}/bin
etc = ${buildout:directory}/etc
var = ${buildout:directory}/var
......@@ -162,11 +162,11 @@ context =
[buildout]
extends =
{{ logrotate_cfg }}
{%- if master %}
{{ master_cfg }}
{%- endif %}
{%- if admin %}
{{ admin_cfg }}
{%- endif %}
{%- if master %}
{{ master_cfg }}
{%- endif %}
parts +=
{{ '\n '.join(part_list) }}
......@@ -35,13 +35,10 @@ parts =
{{- assert(not sla_dict, sla_dict) }}
{% endmacro -%}
{% macro request_neo(parameter_dict, software_type, prefix='node-') -%}
{% macro request_neo(parameter_dict, software_type, prefix='node-', monitor_base_url_dict=None) -%}
{% set section_id_list = [] -%}
[{{ prefix }}request-common]
<= request-common-base
return =
master
admin
config-masters = {{ '${' ~ prefix ~ 'cluster:masters}' }}
config-cluster = {{ parameter_dict['cluster'] }}
{% set replicas = parameter_dict.get('replicas', 0) -%}
......@@ -61,19 +58,37 @@ software-type = {{ software_type }}
{% do storage_count.append(node.get('storage-count', 1)) -%}
{% endfor -%}
config-autostart = {{ dumps(sum(storage_count)) }}
{% do assert(replicas < len(node_list)) -%}
{% for i, node in enumerate(node_list) -%}
{% set section_id = prefix ~ i -%}
{% do section_id_list.append(section_id) -%}
{%- do assert(replicas < len(node_list)) %}
{%- set admin_list = [] %}
{%- for i, node in enumerate(node_list) %}
{%- set section_id = prefix ~ i %}
{%- do section_id_list.append(section_id) %}
{%- if admin_list %}
{%- do node.setdefault('admin', 0) %}
{%- endif %}
[{{section_id}}]
<= {{ prefix }}request-common
name = {{ section_id }}
{% for k, v in node.iteritems() -%}
return =
master
admin
{%- if node.get('admin') != 0 %}
monitor-base-url
{%- if monitor_base_url_dict != None %}
{%- do monitor_base_url_dict.__setitem__('neo',
'${' ~ section_id ~ ':connection-monitor-base-url}') %}
{%- endif %}
{%- do admin_list.append(section_id) %}
config-monitor-passwd = ${monitor-htpasswd:passwd}
config-monitor = {{ dumps(parameter_dict.get('monitor', {})) }}
{%- endif %}
{%- for k, v in node.iteritems() %}
config-{{ k }} = {{ dumps(v) }}
{% endfor -%}
{%- endfor %}
{{ sla(section_id) }}
{% endfor -%}
{%- endfor %}
{%- do assert(len(admin_list) == 1, admin_list) %}
[{{section(prefix ~ 'cluster')}}]
recipe = slapos.cookbook:neoppod.cluster
......
......@@ -112,19 +112,19 @@ adapter-context =
[root-common]
<= download-base-neo
md5sum = 15fa47a59cc3019f59612aaf33bd9ec5
md5sum = ccc6e33412259415ec6c3452d37b77cc
[instance-neo-admin]
<= download-base-neo
md5sum = ce0d9ff9e899bb706351a99df29238a9
md5sum = 87670ddc6b5d2007dac1b6d2ba86d168
[instance-neo-master]
<= download-base-neo
md5sum = 4faee020eaf7cd495cd6210dfa4eb0c1
md5sum = 9f27195d770b2f57461c60a82c851ab9
[instance-neo]
<= download-base-neo
md5sum = 5fc9fcaec3a5387625af34fe686097ae
md5sum = d18e049d580720e733502b774b0d6790
[template-neo-my-cnf]
<= download-base-neo
......
......@@ -43,6 +43,7 @@ setup(name=name,
packages=find_packages(),
install_requires=[
'slapos.core',
'slapos.cookbook',
'slapos.libnetworkcache',
'erp5.util',
'supervisor',
......
......@@ -32,6 +32,7 @@ import glob
import re
import utils
from slapos.recipe.librecipe import generateHashFromFiles
# for development: debugging logs and install Ctrl+C handler
if os.environ.get('SLAPOS_TEST_DEBUG'):
......@@ -166,18 +167,6 @@ class InstanceTestCase(utils.SlapOSInstanceTestCase):
class ServicesTestCase(InstanceTestCase):
@staticmethod
def generateHash(file_list):
import hashlib
hasher = hashlib.md5()
for path in file_list:
with open(path, 'r') as afile:
buf = afile.read()
hasher.update("%s\n" % len(buf))
hasher.update(buf)
hash = hasher.hexdigest()
return hash
def test_process_list(self):
hash_list = [
'software_release/buildout.cfg',
......@@ -204,7 +193,7 @@ class ServicesTestCase(InstanceTestCase):
for path in hash_list]
for name in expected_process_names:
h = ServicesTestCase.generateHash(hash_file_list)
h = generateHashFromFiles(hash_file_list)
expected_process_name = name.format(hash=h)
self.assertIn(expected_process_name, process_name_list)
......
......@@ -43,6 +43,7 @@ setup(name=name,
packages=find_packages(),
install_requires=[
'slapos.core',
'slapos.cookbook',
'slapos.libnetworkcache',
'erp5.util',
'supervisor',
......
......@@ -36,6 +36,7 @@ import requests
import plantuml
import utils
from slapos.recipe.librecipe import generateHashFromFiles
# for development: debugging logs and install Ctrl+C handler
if os.environ.get('SLAPOS_TEST_DEBUG'):
......@@ -154,16 +155,6 @@ class TestSimpleDiagram(PlantUMLTestCase):
class ServicesTestCase(PlantUMLTestCase):
@staticmethod
def generateHashFromFiles(file_list):
hasher = hashlib.md5()
for path in file_list:
with open(path, 'r') as afile:
buf = afile.read()
hasher.update("%s\n" % len(buf))
hasher.update(buf)
hash = hasher.hexdigest()
return hash
def test_hashes(self):
hash_files = [
......@@ -182,7 +173,7 @@ class ServicesTestCase(PlantUMLTestCase):
for path in hash_files]
for name in expected_process_names:
h = ServicesTestCase.generateHashFromFiles(hash_files)
h = generateHashFromFiles(hash_files)
expected_process_name = name.format(hash=h)
self.assertIn(expected_process_name, process_names)
......@@ -43,6 +43,7 @@ setup(name=name,
packages=find_packages(),
install_requires=[
'slapos.core',
'slapos.cookbook',
'slapos.libnetworkcache',
'erp5.util',
'supervisor',
......
......@@ -35,9 +35,8 @@ import StringIO
import subprocess
import json
import psutil
import utils
from slapos.recipe.librecipe import generateHashFromFiles
# for development: debugging logs and install Ctrl+C handler
if os.environ.get('SLAPOS_TEST_DEBUG'):
......@@ -54,17 +53,6 @@ class InstanceTestCase(utils.SlapOSInstanceTestCase):
class ServicesTestCase(InstanceTestCase):
@staticmethod
def generateHashFromFiles(file_list):
import hashlib
hasher = hashlib.md5()
for path in file_list:
with open(path, 'r') as afile:
buf = afile.read()
hasher.update("%s\n" % len(buf))
hasher.update(buf)
hash = hasher.hexdigest()
return hash
def test_hashes(self):
hash_files = [
......@@ -82,7 +70,7 @@ class ServicesTestCase(InstanceTestCase):
for path in hash_files]
for name in expected_process_names:
h = ServicesTestCase.generateHashFromFiles(hash_files)
h = generateHashFromFiles(hash_files)
expected_process_name = name.format(hash=h)
self.assertIn(expected_process_name, process_names)
......@@ -43,6 +43,7 @@ setup(name=name,
packages=find_packages(),
install_requires=[
'slapos.core',
'slapos.cookbook',
'slapos.libnetworkcache',
'erp5.util',
'supervisor',
......
......@@ -35,9 +35,8 @@ import StringIO
import subprocess
import json
import psutil
import utils
from slapos.recipe.librecipe import generateHashFromFiles
SLAPOS_TEST_IPV4 = os.environ['SLAPOS_TEST_IPV4']
SLAPOS_TEST_IPV6 = os.environ['SLAPOS_TEST_IPV6']
......@@ -83,17 +82,6 @@ class TestPortRedirection(Re6stnetTestCase):
}, portredir_config[0])
class ServicesTestCase(Re6stnetTestCase):
@staticmethod
def generateHashFromFiles(file_list):
import hashlib
hasher = hashlib.md5()
for path in file_list:
with open(path, 'r') as afile:
buf = afile.read()
hasher.update("%s\n" % len(buf))
hasher.update(buf)
hash = hasher.hexdigest()
return hash
@classmethod
def getInstanceParameterDict(cls):
......@@ -115,7 +103,7 @@ class ServicesTestCase(Re6stnetTestCase):
for path in hash_files]
for name in expected_process_names:
h = ServicesTestCase.generateHashFromFiles(hash_files)
h = generateHashFromFiles(hash_files)
expected_process_name = name.format(hash=h)
self.assertIn(expected_process_name, process_names)
......@@ -14,7 +14,7 @@
# not need these here).
[template-erp5]
filename = instance-erp5.cfg.in
md5sum = 001affafc204b638615deea04c95cfdf
md5sum = 70c8d3e4414f6f9f969c9641e840b52f
[template-balancer]
filename = instance-balancer.cfg.in
......
......@@ -152,7 +152,7 @@ connection-url = smtp://127.0.0.2:0/
{% set ((name, server_dict),) = server_dict.items() -%}
{% do neo.append(server_dict.get('cluster')) -%}
{% do server_dict.update(cluster='${publish-early:neo-cluster}') -%}
{{ root_common.request_neo(server_dict, 'zodb-neo', 'neo-') }}
{{ root_common.request_neo(server_dict, 'zodb-neo', 'neo-', monitor_base_url_dict) }}
{% set client_dict = zodb_dict[name].setdefault('storage-dict', {}) -%}
{% for k in 'ssl', '_ca', '_cert', '_key' -%}
{% do k in server_dict and client_dict.setdefault(k, server_dict[k]) -%}
......@@ -256,7 +256,7 @@ config-longrequest-logger-timeout = {{ dumps(zope_parameter_dict.get('longreques
config-large-file-threshold = {{ dumps(zope_parameter_dict.get('large-file-threshold', "10MB")) }}
config-port-base = {{ dumps(zope_parameter_dict.get('port-base', 2200)) }}
config-webdav = {{ dumps(zope_parameter_dict.get('webdav', False)) }}
config-wsgi = {{ dumps(slapparameter_dict.get('wsgi', False)) }}
config-wsgi = {{ dumps(slapparameter_dict.get('wsgi', True)) }}
{% if test_runner_enabled -%}
config-test-runner-apache-url-list = ${publish-early:{{ zope_family }}-test-runner-url-list}
{% endif -%}
......
......@@ -18,7 +18,7 @@ md5sum = c44a7481bb85e3258128afe3fcf23f44
[template-runner]
filename = instance-runner.cfg
md5sum = a5e2ae493b78a0f42d61cc79a0b2e686
md5sum = 48bff0b5f082f22d44966b1151f07133
[template-runner-import-script]
filename = template/runner-import.sh.jinja2
......
......@@ -794,7 +794,7 @@ ip = $${slaprunner:ipv4}
server = $${:ip}:$${:port}
port = 39986
slapgrid-cp = slapgrid-cp
slapgrid-cp-command = $${slaprunner:slapos} node instance --all --cfg $${:slapos-cfg} --verbose --logfile $${:slapgrid-cp-log}
slapgrid-cp-command = $${slaprunner:slapos} node instance --cfg $${:slapos-cfg} --verbose --logfile $${:slapgrid-cp-log}
slapgrid-cp-log = $${runnerdirectory:home}/instance.log
slapgrid-cp-startretries = 0
slapgrid-sr = slapgrid-sr
......
......@@ -43,6 +43,7 @@ setup(name=name,
packages=find_packages(),
install_requires=[
'slapos.core',
'slapos.cookbook',
'slapos.libnetworkcache',
'erp5.util',
'supervisor',
......
......@@ -38,6 +38,7 @@ import json
import psutil
import utils
from slapos.recipe.librecipe import generateHashFromFiles
# for development: debugging logs and install Ctrl+C handler
if os.environ.get('SLAPOS_TEST_DEBUG'):
......@@ -53,17 +54,6 @@ class InstanceTestCase(utils.SlapOSInstanceTestCase):
return (os.path.abspath(os.path.join(os.path.dirname(__file__), '..', 'software.cfg')), )
class ServicesTestCase(InstanceTestCase):
@staticmethod
def generateHashFromFiles(file_list):
import hashlib
hasher = hashlib.md5()
for path in file_list:
with open(path, 'r') as afile:
buf = afile.read()
hasher.update("%s\n" % len(buf))
hasher.update(buf)
hash = hasher.hexdigest()
return hash
def test_hashes(self):
hash_files = [
......@@ -89,7 +79,7 @@ class ServicesTestCase(InstanceTestCase):
for path in hash_files]
for name in expected_process_names:
h = ServicesTestCase.generateHashFromFiles(hash_files)
h = generateHashFromFiles(hash_files)
expected_process_name = name.format(hash=h)
self.assertIn(expected_process_name, process_names)
......@@ -43,6 +43,7 @@ setup(name=name,
packages=find_packages(),
install_requires=[
'slapos.core',
'slapos.cookbook',
'slapos.libnetworkcache',
'erp5.util',
'supervisor',
......
......@@ -32,6 +32,7 @@ import glob
import ConfigParser
import utils
from slapos.recipe.librecipe import generateHashFromFiles
# for development: debugging logs and install Ctrl+C handler
if os.environ.get('SLAPOS_TEST_DEBUG'):
......@@ -58,18 +59,6 @@ class InstanceTestCase(utils.SlapOSInstanceTestCase):
class ServicesTestCase(InstanceTestCase):
@staticmethod
def generateHash(file_list):
import hashlib
hasher = hashlib.md5()
for path in file_list:
with open(path, 'r') as afile:
buf = afile.read()
hasher.update("%s\n" % len(buf))
hasher.update(buf)
hash = hasher.hexdigest()
return hash
def test_process_list(self):
hash_list = [
'software_release/buildout.cfg',
......@@ -91,7 +80,7 @@ class ServicesTestCase(InstanceTestCase):
for path in hash_list]
for name in expected_process_names:
h = ServicesTestCase.generateHash(hash_file_list)
h = generateHashFromFiles(hash_file_list)
expected_process_name = name.format(hash=h)
self.assertIn(expected_process_name, process_name_list)
......
......@@ -78,7 +78,7 @@ md5sum = d41d8cd98f00b204e9800998ecf8427e
[template-erp5]
filename = instance-erp5.cfg.in
md5sum = af5d9aeac2bae695220465a4348ae592
md5sum = ff5e0d8d1ca167399fb438e890baf370
[template-zeo]
filename = instance-zeo.cfg.in
......
......@@ -152,7 +152,7 @@ connection-url = smtp://127.0.0.2:0/
{% set ((name, server_dict),) = server_dict.items() -%}
{% do neo.append(server_dict.get('cluster')) -%}
{% do server_dict.update(cluster='${publish-early:neo-cluster}') -%}
{{ root_common.request_neo(server_dict, 'zodb-neo', 'neo-') }}
{{ root_common.request_neo(server_dict, 'zodb-neo', 'neo-', monitor_base_url_dict) }}
{% set client_dict = zodb_dict[name].setdefault('storage-dict', {}) -%}
{% for k in 'ssl', '_ca', '_cert', '_key' -%}
{% do k in server_dict and client_dict.setdefault(k, server_dict[k]) -%}
......@@ -255,7 +255,7 @@ config-longrequest-logger-timeout = {{ dumps(zope_parameter_dict.get('longreques
config-large-file-threshold = {{ dumps(zope_parameter_dict.get('large-file-threshold', "10MB")) }}
config-port-base = {{ dumps(zope_parameter_dict.get('port-base', 2200)) }}
config-webdav = {{ dumps(zope_parameter_dict.get('webdav', False)) }}
config-wsgi = {{ dumps(slapparameter_dict.get('wsgi', False)) }}
config-wsgi = {{ dumps(slapparameter_dict.get('wsgi', True)) }}
{% if test_runner_enabled -%}
config-test-runner-apache-url-list = ${publish-early:{{ zope_family }}-test-runner-url-list}
{% endif -%}
......
......@@ -78,7 +78,7 @@ part-list =
recipe = slapos.recipe.template:jinja2
rendered = ${buildout:directory}/instance.cfg
template = ${:_profile_base_location_}/instance.cfg.in
md5sum = 7c47a85e310674e451db778d9e4383a6
md5sum = b4fbd7dc8d7fda7dbd1b80f3de273ecf
mode = 0644
context =
key application_location application:location
......@@ -110,6 +110,7 @@ context =
key template_lamp instance-lamp:output
key template_mariadb template-mariadb:target
key template_mariadb_initial_setup template-mariadb-initial-setup:target
key template_mysqld_wrapper template-mysqld-wrapper:rendered
key template_my_cnf template-my-cnf:target
key unixodbc_location unixodbc:location
key openssl_location openssl:location
......
......@@ -81,6 +81,7 @@ gzip-location = {{ gzip_location }}
mariadb-location = {{ mariadb_location }}
template-my-cnf = {{ template_my_cnf }}
template-mariadb-initial-setup = {{ template_mariadb_initial_setup }}
template-mysqld-wrapper = {{ template_mysqld_wrapper }}
link-binary = {{ dumps(mariadb_link_binary) }}
mariadb-resiliency-after-import-script = {{ mariadb_resiliency_after_import_script }}
mariadb-slow-query-report-script = {{ mariadb_slow_query_report_script }}
......
......@@ -136,15 +136,15 @@ pyparsing = 2.2.0
pytz = 2016.10
requests = 2.13.0
six = 1.12.0
slapos.cookbook = 1.0.119
slapos.core = 1.4.26
slapos.cookbook = 1.0.121
slapos.core = 1.4.27
slapos.extension.strip = 0.4
slapos.extension.shared = 1.0
slapos.libnetworkcache = 0.19
slapos.rebootstrap = 4.1
slapos.recipe.build = 0.41
slapos.recipe.cmmi = 0.10
slapos.toolbox = 0.94
slapos.toolbox = 0.95
stevedore = 1.21.0
subprocess32 = 3.5.3
unicodecsv = 0.14.1
......@@ -195,7 +195,7 @@ enum34 = 1.1.6
# Required by:
# slapos.toolbox==0.94
erp5.util = 0.4.60
erp5.util = 0.4.61
# Required by:
# slapos.toolbox==0.94
......@@ -218,8 +218,8 @@ pyrsistent = 0.14.5
ipaddress = 1.0.18
# Required by:
# slapos.cookbook==1.0.119
jsonschema = 3.0.0a3
# slapos.cookbook==1.0.121
jsonschema = 3.0.2
# Required by:
# slapos.toolbox==0.94
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment