Commit 80a0aafe authored by Rafael Monnerat's avatar Rafael Monnerat

Update Release Candidate

parents b77af1f2 7d279ba8
Changes Changes
======= =======
1.0.121 (2019-09-12)
--------------------
- generic.mysql.wrap_update_mysql: prepare for MariaDB 10.4
- publish-early: process -init entries in specified order
- Partial support of Python 3
- Remove unused generic.mysql recipe
1.0.119 (2019-08-14) 1.0.119 (2019-08-14)
-------------------- --------------------
......
...@@ -33,5 +33,5 @@ environment = ...@@ -33,5 +33,5 @@ environment =
[ghostscript-9] [ghostscript-9]
<= ghostscript-common <= ghostscript-common
url = https://github.com/ArtifexSoftware/ghostpdl-downloads/releases/download/gs925/ghostscript-9.25.tar.xz url = https://github.com/ArtifexSoftware/ghostpdl-downloads/releases/download/gs927/ghostscript-9.27.tar.xz
md5sum = d5ac3f3d76cf82a549bafdf86d58395b md5sum = dd531503dbbc524f73528359e2ea145c
...@@ -15,8 +15,8 @@ extends = ...@@ -15,8 +15,8 @@ extends =
[groonga] [groonga]
recipe = slapos.recipe.cmmi recipe = slapos.recipe.cmmi
shared = false shared = false
url = https://packages.groonga.org/source/groonga/groonga-9.0.0.tar.gz url = https://packages.groonga.org/source/groonga/groonga-9.0.7.tar.gz
md5sum = 5475818c734dfc6414d209babea90921 md5sum = a2697e46b7460fc81b418666c5abdcb4
# temporary patch to respect more tokens in natural language mode. # temporary patch to respect more tokens in natural language mode.
patches = patches =
${:_profile_base_location_}/groonga.patch#9ed02fbe8400402d3eab47eee149978b ${:_profile_base_location_}/groonga.patch#9ed02fbe8400402d3eab47eee149978b
...@@ -46,8 +46,8 @@ environment = ...@@ -46,8 +46,8 @@ environment =
[groonga-normalizer-mysql] [groonga-normalizer-mysql]
recipe = slapos.recipe.cmmi recipe = slapos.recipe.cmmi
shared = false shared = false
url = https://packages.groonga.org/source/groonga-normalizer-mysql/groonga-normalizer-mysql-1.1.3.tar.gz url = https://packages.groonga.org/source/groonga-normalizer-mysql/groonga-normalizer-mysql-1.1.4.tar.gz
md5sum = ad30404cb9999a842e98f3902057b152 md5sum = effa67fb271d49810850a3b275d040f6
location = ${groonga:location} location = ${groonga:location}
configure-options = configure-options =
--disable-static --disable-static
......
...@@ -17,7 +17,7 @@ recipe = slapos.recipe.cmmi ...@@ -17,7 +17,7 @@ recipe = slapos.recipe.cmmi
url = http://ftp.de.debian.org/debian/pool/main/m/make-dfsg/make-dfsg_3.81.orig.tar.gz url = http://ftp.de.debian.org/debian/pool/main/m/make-dfsg/make-dfsg_3.81.orig.tar.gz
md5sum = 7c93b1ab4680eb21c2c13f4f47741e2d md5sum = 7c93b1ab4680eb21c2c13f4f47741e2d
patches = patches =
${:_profile_base_location_}/make-dfsg_3.81-8.2.diff#fa77bb989a096fafbe7c78582e9415e3 ${:_profile_base_location_}/make-dfsg_3.81-8.2.diff#320ce09344a8618b6c47ddb88c09cea9
patch-options = -p1 patch-options = -p1
environment = environment =
PATH=${patch:location}/bin:%(PATH)s PATH=${patch:location}/bin:%(PATH)s
...@@ -1044,6 +1044,15 @@ ...@@ -1044,6 +1044,15 @@
# Autoconf setup # Autoconf setup
AC_CONFIG_AUX_DIR(config) AC_CONFIG_AUX_DIR(config)
AC_CONFIG_SRCDIR(vpath.c) AC_CONFIG_SRCDIR(vpath.c)
@@ -354,7 +356,7 @@
#define GLOB_INTERFACE_VERSION 1
#if !defined _LIBC && defined __GNU_LIBRARY__ && __GNU_LIBRARY__ > 1
# include <gnu-versions.h>
-# if _GNU_GLOB_INTERFACE_VERSION == GLOB_INTERFACE_VERSION
+# if _GNU_GLOB_INTERFACE_VERSION >= GLOB_INTERFACE_VERSION
gnu glob
# endif
#endif
@@ -372,6 +374,24 @@ @@ -372,6 +374,24 @@
MAKE_HOST="$host" MAKE_HOST="$host"
AC_SUBST(MAKE_HOST) AC_SUBST(MAKE_HOST)
...@@ -16744,7 +16753,15 @@ ...@@ -16744,7 +16753,15 @@
/* end confdefs.h. */ /* end confdefs.h. */
#include <features.h> #include <features.h>
@@ -13629,13 +9246,13 @@ @@ -13622,20 +9239,20 @@
#define GLOB_INTERFACE_VERSION 1
#if !defined _LIBC && defined __GNU_LIBRARY__ && __GNU_LIBRARY__ > 1
# include <gnu-versions.h>
-# if _GNU_GLOB_INTERFACE_VERSION == GLOB_INTERFACE_VERSION
+# if _GNU_GLOB_INTERFACE_VERSION >= GLOB_INTERFACE_VERSION
gnu glob
# endif
#endif
_ACEOF _ACEOF
if (eval "$ac_cpp conftest.$ac_ext") 2>&5 | if (eval "$ac_cpp conftest.$ac_ext") 2>&5 |
...@@ -29,9 +29,13 @@ parts = ...@@ -29,9 +29,13 @@ parts =
[mariadb] [mariadb]
recipe = slapos.recipe.cmmi recipe = slapos.recipe.cmmi
url = https://downloads.mariadb.org/f/mariadb-${:version}/source/mariadb-${:version}.tar.gz/from/http%3A//fr.mirror.babylon.network/mariadb/?serve url = https://downloads.mariadb.org/f/mariadb-${:version}/source/mariadb-${:version}.tar.gz/from/http%3A//fr.mirror.babylon.network/mariadb/?serve
version = 10.3.14 version = 10.3.18
md5sum = b1b9628bbc3ff15e8f5cfc7896c73975 md5sum = b3524c0825c3a1c255496daea38304a0
location = ${buildout:parts-directory}/${:_buildout_section_name_} location = ${buildout:parts-directory}/${:_buildout_section_name_}
pre-configure =
set -e '\bSET(PLUGIN_AUTH_PAM YES)' cmake/build_configurations/mysql_release.cmake
grep -q "$@"
sed -i "/$1/d" "$2"
configure-command = ${cmake:location}/bin/cmake configure-command = ${cmake:location}/bin/cmake
configure-options = configure-options =
-DCMAKE_INSTALL_PREFIX=${:location} -DCMAKE_INSTALL_PREFIX=${:location}
...@@ -80,8 +84,8 @@ post-install = ...@@ -80,8 +84,8 @@ post-install =
# mroonga - a storage engine for MySQL. It provides fast fulltext search feature to all MySQL users. # mroonga - a storage engine for MySQL. It provides fast fulltext search feature to all MySQL users.
# http://mroonga.github.com/ # http://mroonga.github.com/
recipe = slapos.recipe.cmmi recipe = slapos.recipe.cmmi
url = https://packages.groonga.org/source/mroonga/mroonga-9.00.tar.gz url = https://packages.groonga.org/source/mroonga/mroonga-9.05.tar.gz
md5sum = a1deff08a3649d8370436f1c903ed432 md5sum = d81629fc6042a44ce56cc6990d563164
pre-configure = set -e pre-configure = set -e
rm -rf fake_mariadb_source rm -rf fake_mariadb_source
mkdir -p fake_mariadb_source mkdir -p fake_mariadb_source
......
...@@ -60,6 +60,14 @@ repository = https://lab.nexedi.com/Daetalus/pyodide.git ...@@ -60,6 +60,14 @@ repository = https://lab.nexedi.com/Daetalus/pyodide.git
location = ${buildout:parts-directory}/${:_buildout_section_name_} location = ${buildout:parts-directory}/${:_buildout_section_name_}
branch = master branch = master
[get-pip]
recipe = slapos.recipe.build:download
# version 19.2.3
url = https://github.com/pypa/get-pip/raw/309a56c5fd94bd1134053a541cb4657a4e47e09d/${:filename}
md5sum = 6f489c199cb5be8a4b84c6bd7ad6e051
filename = get-pip.py
mode = 0755
[pyodide-script] [pyodide-script]
recipe = slapos.recipe.build recipe = slapos.recipe.build
location = ${buildout:parts-directory}/${:_buildout_section_name_} location = ${buildout:parts-directory}/${:_buildout_section_name_}
...@@ -78,7 +86,7 @@ openssl_location = ${openssl:location} ...@@ -78,7 +86,7 @@ openssl_location = ${openssl:location}
gcc_bin_dir = ${gcc-8.2:location}/bin gcc_bin_dir = ${gcc-8.2:location}/bin
python_bin_dir = ${python3.7:location}/bin python_bin_dir = ${python3.7:location}/bin
md5sum = 47ec6a091e503349d948760dc03bed51 md5sum = 47ec6a091e503349d948760dc03bed51
pip_script = ${:_profile_base_location_}/get-pip.py pip_script = ${get-pip:target}
script = script =
import subprocess, os, shutil import subprocess, os, shutil
......
This diff is collapsed.
...@@ -28,7 +28,7 @@ from setuptools import setup, find_packages ...@@ -28,7 +28,7 @@ from setuptools import setup, find_packages
import glob import glob
import os import os
version = '1.0.119' version = '1.0.121'
name = 'slapos.cookbook' name = 'slapos.cookbook'
long_description = open("README.rst").read() + "\n" + \ long_description = open("README.rst").read() + "\n" + \
open("CHANGES.rst").read() + "\n" open("CHANGES.rst").read() + "\n"
......
...@@ -32,10 +32,10 @@ class WrapUpdateMySQL(GenericBaseRecipe): ...@@ -32,10 +32,10 @@ class WrapUpdateMySQL(GenericBaseRecipe):
self.createPythonScript( self.createPythonScript(
self.options['output'], self.options['output'],
__name__ + '.mysql.updateMysql', __name__ + '.mysql.updateMysql',
[{ kw = {
'mysql_upgrade_binary': self.options['binary'], 'mysql_upgrade_binary': self.options['binary'],
'mysql_binary': self.options['mysql'], 'mysql_binary': self.options['mysql'],
'mysql_script_file': self.options['init-script'], 'mysql_script_file': self.options['init-script'],
}] }
), ),
] ]
...@@ -4,69 +4,45 @@ import time ...@@ -4,69 +4,45 @@ import time
import sys import sys
import pytz import pytz
def updateMysql(conf): def updateMysql(mysql_upgrade_binary, mysql_binary, mysql_script_file):
sleep = 30 sleep = 30
is_succeed = False with open(mysql_script_file) as script_file:
try: mysql_script = script_file.read()
script_filename = conf.pop('mysql_script_file') mysql_list = mysql_binary, '-B'
except KeyError: mysql_tzinfo_to_sql_list = (
pass os.path.join(os.path.dirname(mysql_binary), 'mysql_tzinfo_to_sql'),
else: os.path.join(os.path.dirname(pytz.__file__), 'zoneinfo'),
assert 'mysql_script' not in conf )
with open(script_filename) as script_file:
conf['mysql_script'] = script_file.read()
is_succeeded = False
while True: while True:
while True: while True:
mysql_upgrade_list = [conf['mysql_upgrade_binary'], '--user=root'] mysql_upgrade = subprocess.Popen(mysql_upgrade_binary,
if 'socket' in conf: stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
mysql_upgrade_list.append('--socket=' + conf['socket'])
mysql_upgrade = subprocess.Popen(mysql_upgrade_list, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
result = mysql_upgrade.communicate()[0] result = mysql_upgrade.communicate()[0]
if mysql_upgrade.returncode is None: if mysql_upgrade.returncode:
mysql_upgrade.kill() print "Command %r failed with result:\n%s" % (mysql_upgrade_binary, result)
if mysql_upgrade.returncode == 0:
print "MySQL database upgraded with result:\n%s" % result
elif 'is already upgraded' in result:
print "No need to upgrade MySQL database"
else:
print "Command %r failed with result:\n%s" % (mysql_upgrade_list, result)
break break
mysql_list = [conf['mysql_binary'].strip(), '-B', '--user=root'] print "MySQL database upgraded with result:\n%s" % result
if 'socket' in conf:
mysql_list.append('--socket=' + conf['socket'])
mysql = subprocess.Popen(mysql_list, stdin=subprocess.PIPE, mysql = subprocess.Popen(mysql_list, stdin=subprocess.PIPE,
stdout=subprocess.PIPE, stderr=subprocess.STDOUT) stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
result = mysql.communicate(conf['mysql_script'])[0] result = mysql.communicate(mysql_script)[0]
if mysql.returncode is None: if mysql.returncode:
mysql.kill()
if mysql.returncode != 0:
print 'Command %r failed with:\n%s' % (mysql_list, result) print 'Command %r failed with:\n%s' % (mysql_list, result)
break break
# import timezone database # import timezone database
mysql_tzinfo_to_sql_binary = os.path.join(
os.path.dirname(conf['mysql_binary'].strip()), 'mysql_tzinfo_to_sql')
zoneinfo_directory = '%s/zoneinfo' % os.path.dirname(pytz.__file__)
mysql_tzinfo_to_sql_list = [mysql_tzinfo_to_sql_binary, zoneinfo_directory]
mysql_tzinfo_to_sql = subprocess.Popen(mysql_tzinfo_to_sql_list, stdin=subprocess.PIPE, mysql_tzinfo_to_sql = subprocess.Popen(mysql_tzinfo_to_sql_list, stdin=subprocess.PIPE,
stdout=subprocess.PIPE, stderr=subprocess.PIPE) stdout=subprocess.PIPE, stderr=subprocess.PIPE)
timezone_sql = mysql_tzinfo_to_sql.communicate()[0] timezone_sql = mysql_tzinfo_to_sql.communicate()[0]
if mysql_tzinfo_to_sql.returncode != 0: if mysql_tzinfo_to_sql.returncode != 0:
print 'Command %r failed with:\n%s' % (mysql_tzinfo_to_sql_list, result) print 'Command %r failed with:\n%s' % (mysql_tzinfo_to_sql_list, result)
break break
mysql = subprocess.Popen(mysql_list + ['mysql',], stdin=subprocess.PIPE, mysql = subprocess.Popen(mysql_list + ('mysql',), stdin=subprocess.PIPE,
stdout=subprocess.PIPE, stderr=subprocess.STDOUT) stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
result = mysql.communicate(timezone_sql)[0] result = mysql.communicate(timezone_sql)[0]
if mysql.returncode is None: if mysql.returncode:
mysql.kill()
if mysql.returncode != 0:
print 'Command %r failed with:\n%s' % (mysql_list, result) print 'Command %r failed with:\n%s' % (mysql_list, result)
break break
is_succeeded = True
break
if is_succeeded:
print 'SlapOS initialisation script succesfully applied on database.' print 'SlapOS initialisation script succesfully applied on database.'
break return
print 'Sleeping for %ss and retrying' % sleep print 'Sleeping for %ss and retrying' % sleep
sys.stdout.flush() sys.stdout.flush()
sys.stderr.flush() sys.stderr.flush()
......
...@@ -40,7 +40,7 @@ import json ...@@ -40,7 +40,7 @@ import json
# Use to do from slapos.recipe.librecipe import GenericBaseRecipe # Use to do from slapos.recipe.librecipe import GenericBaseRecipe
from .generic import GenericBaseRecipe from .generic import GenericBaseRecipe
from .genericslap import GenericSlapRecipe from .genericslap import GenericSlapRecipe
from .filehash import filehash from .filehash import filehash, generateHashFromFiles
# Utility functions to (de)serialise live python objects in order to send them # Utility functions to (de)serialise live python objects in order to send them
# to master. # to master.
......
...@@ -51,6 +51,15 @@ def filehash(filename, type_=DEFAULT_HASH): ...@@ -51,6 +51,15 @@ def filehash(filename, type_=DEFAULT_HASH):
shutil.copyfileobj(file_, digest) shutil.copyfileobj(file_, digest)
return digest.read() return digest.read()
def generateHashFromFiles(file_list):
hasher = hashlib.md5()
for path in file_list:
with open(path, 'rb') as afile:
buf = afile.read()
hasher.update(b"%u\n" % len(buf))
hasher.update(buf)
return hasher.hexdigest()
# Home made hashdeep <http://md5deep.sourceforge.net/> # Home made hashdeep <http://md5deep.sourceforge.net/>
def dirhash(dirname, type_=DEFAULT_HASH): def dirhash(dirname, type_=DEFAULT_HASH):
"""Walk into a directory an return a unique hash for """Walk into a directory an return a unique hash for
......
...@@ -27,7 +27,7 @@ ...@@ -27,7 +27,7 @@
import shlex import shlex
from slapos.recipe.librecipe import GenericBaseRecipe from slapos.recipe.librecipe import GenericBaseRecipe, generateHashFromFiles
class Recipe(GenericBaseRecipe): class Recipe(GenericBaseRecipe):
"""Recipe to create a script from given command and options. """Recipe to create a script from given command and options.
...@@ -67,18 +67,8 @@ class Recipe(GenericBaseRecipe): ...@@ -67,18 +67,8 @@ class Recipe(GenericBaseRecipe):
kw['reserve_cpu'] = True kw['reserve_cpu'] = True
if hash_files: if hash_files:
hash_file_list = hash_files.split() hash_file_list = hash_files.split()
hash = self.generateHashFromFiles(hash_file_list) hash = generateHashFromFiles(hash_file_list)
wrapper_path = "%s-%s" % (wrapper_path, hash) wrapper_path = "%s-%s" % (wrapper_path, hash)
return self.createWrapper(wrapper_path, args, environment, **kw) return self.createWrapper(wrapper_path, args, environment, **kw)
def generateHashFromFiles(self, file_list):
import hashlib
hasher = hashlib.md5()
for path in file_list:
with open(path, 'rb') as afile:
buf = afile.read()
hasher.update(b"%u\n" % len(buf))
hasher.update(buf)
hash = hasher.hexdigest()
return hash
...@@ -38,6 +38,7 @@ setup(name=name, ...@@ -38,6 +38,7 @@ setup(name=name,
packages=find_packages(), packages=find_packages(),
install_requires=[ install_requires=[
'slapos.core', 'slapos.core',
'slapos.cookbook',
'slapos.libnetworkcache', 'slapos.libnetworkcache',
'erp5.util', 'erp5.util',
'requests >= 2.20.0', # needed for recent SSL certificate fixes 'requests >= 2.20.0', # needed for recent SSL certificate fixes
......
...@@ -46,6 +46,7 @@ import StringIO ...@@ -46,6 +46,7 @@ import StringIO
import gzip import gzip
import base64 import base64
import re import re
from slapos.recipe.librecipe import generateHashFromFiles
try: try:
...@@ -283,18 +284,6 @@ class TestDataMixin(object): ...@@ -283,18 +284,6 @@ class TestDataMixin(object):
except Exception as e: except Exception as e:
self.fail(e) self.fail(e)
@staticmethod
def generateHashFromFiles(file_list):
import hashlib
hasher = hashlib.md5()
for path in file_list:
with open(path, 'r') as afile:
buf = afile.read()
hasher.update("%s\n" % len(buf))
hasher.update(buf)
hash = hasher.hexdigest()
return hash
def getTrimmedProcessInfo(self): def getTrimmedProcessInfo(self):
return '\n'.join(sorted([ return '\n'.join(sorted([
'%(group)s:%(name)s %(statename)s' % q for q '%(group)s:%(name)s %(statename)s' % q for q
...@@ -383,13 +372,13 @@ class TestDataMixin(object): ...@@ -383,13 +372,13 @@ class TestDataMixin(object):
hash_file_list = [os.path.join( hash_file_list = [os.path.join(
self.computer_partition_root_path, 'software_release/buildout.cfg')] self.computer_partition_root_path, 'software_release/buildout.cfg')]
hash_value_dict = { hash_value_dict = {
'generic': self.generateHashFromFiles(hash_file_list), 'generic': generateHashFromFiles(hash_file_list),
} }
for caddy_wrapper_path in glob.glob(os.path.join( for caddy_wrapper_path in glob.glob(os.path.join(
self.instance_path, '*', 'bin', 'caddy-wrapper')): self.instance_path, '*', 'bin', 'caddy-wrapper')):
partition_id = caddy_wrapper_path.split('/')[-3] partition_id = caddy_wrapper_path.split('/')[-3]
hash_value_dict[ hash_value_dict[
'caddy-%s' % (partition_id)] = self.generateHashFromFiles( 'caddy-%s' % (partition_id)] = generateHashFromFiles(
hash_file_list + [caddy_wrapper_path] hash_file_list + [caddy_wrapper_path]
) )
for rejected_slave_publish_path in glob.glob(os.path.join( for rejected_slave_publish_path in glob.glob(os.path.join(
...@@ -399,7 +388,7 @@ class TestDataMixin(object): ...@@ -399,7 +388,7 @@ class TestDataMixin(object):
self.instance_path, partition_id, 'etc', 'rejected-slave.pem') self.instance_path, partition_id, 'etc', 'rejected-slave.pem')
hash_value_dict[ hash_value_dict[
'rejected-slave-publish' 'rejected-slave-publish'
] = self.generateHashFromFiles( ] = generateHashFromFiles(
hash_file_list + [rejected_slave_publish_path, rejected_slave_pem_path] hash_file_list + [rejected_slave_publish_path, rejected_slave_pem_path]
) )
......
...@@ -84,7 +84,7 @@ This software release assigns the following port ranges by default: ...@@ -84,7 +84,7 @@ This software release assigns the following port ranges by default:
memcached-persistent 2000-2009 memcached-persistent 2000-2009
memcached-volatile 2010-2019 memcached-volatile 2010-2019
smtp 2025-2029 smtp 2025-2029
neo (admin & master) 2050-2051 neo (admin, master) 2050-2052
mariadb 2099 mariadb 2099
zeo 2100-2149 zeo 2100-2149
balancer 2150-2199 balancer 2150-2199
......
...@@ -114,7 +114,7 @@ ...@@ -114,7 +114,7 @@
"wsgi": { "wsgi": {
"description": "If set to true, Zope is run as a WSGI application, instead of using the Medusa HTTP server.", "description": "If set to true, Zope is run as a WSGI application, instead of using the Medusa HTTP server.",
"type": "boolean", "type": "boolean",
"default": false "default": true
}, },
"zope-partition-dict": { "zope-partition-dict": {
"description": "Zope layout definition", "description": "Zope layout definition",
......
...@@ -97,14 +97,14 @@ class TestDefaultParameters(ERP5TestCase, TestPublishedURLIsReachableMixin): ...@@ -97,14 +97,14 @@ class TestDefaultParameters(ERP5TestCase, TestPublishedURLIsReachableMixin):
__partition_reference__ = 'defp' __partition_reference__ = 'defp'
class TestWSGI(ERP5TestCase, TestPublishedURLIsReachableMixin): class TestMedusa(ERP5TestCase, TestPublishedURLIsReachableMixin):
"""Test ERP5 WSGI server """Test ERP5 Medusa server
""" """
__partition_reference__ = 'wsgi' __partition_reference__ = 'medusa'
@classmethod @classmethod
def getInstanceParameterDict(cls): def getInstanceParameterDict(cls):
return {'_': json.dumps({'wsgi': True})} return {'_': json.dumps({'wsgi': False})}
class TestApacheBalancerPorts(ERP5TestCase): class TestApacheBalancerPorts(ERP5TestCase):
......
...@@ -19,7 +19,7 @@ md5sum = 028b6a6456d744c11b1bb2c51ecd51b2 ...@@ -19,7 +19,7 @@ md5sum = 028b6a6456d744c11b1bb2c51ecd51b2
[template-kvm] [template-kvm]
filename = instance-kvm.cfg.jinja2 filename = instance-kvm.cfg.jinja2
md5sum = c79448a49b1b3dc6e757b90f497c7be1 md5sum = 12a99227828e834d97ee0c68246c467c
[template-kvm-cluster] [template-kvm-cluster]
filename = instance-kvm-cluster.cfg.jinja2.in filename = instance-kvm-cluster.cfg.jinja2.in
......
...@@ -498,16 +498,15 @@ ipv6-network-info = ...@@ -498,16 +498,15 @@ ipv6-network-info =
recipe = plone.recipe.command recipe = plone.recipe.command
filename = netconfig.sh filename = netconfig.sh
path = ${directory:public}/${:filename} path = ${directory:public}/${:filename}
ifconfig = ifconfig \$IFACE {{ slap_configuration.get('tap-ipv4-addr') }} netmask {{ slap_configuration.get('tap-ipv4-netmask') }} ipv4-add-address = ip -4 address add {{ slap_configuration.get('tap-ipv4-addr') }}/{{ slap_configuration.get('tap-ipv4-netmask') }} dev \$IFACE
route-iface = route add {{ slap_configuration.get('tap-ipv4-gateway') }} dev \$IFACE
route-network = route add -net {{ slap_configuration.get('tap-ipv4-network') }} netmask {{ slap_configuration.get('tap-ipv4-netmask') }} gw {{ slap_configuration.get('tap-ipv4-gateway') }}
{% if nat_restrict == 'true' -%} {% if nat_restrict == 'true' -%}
route-default = route add default gw {{ slap_configuration.get('tap-ipv4-gateway') }} dev \$IFACE ipv4-add-default-route = ip route add default via {{ slap_configuration.get('tap-ipv4-gateway') }} dev \$IFACE
{% elif global_ipv4_prefix -%} {% elif global_ipv4_prefix -%}
route-default = ip route add {{ global_ipv4_prefix }} via {{ slap_configuration.get('tap-ipv4-gateway') }} dev \$IFACE src {{ slap_configuration.get('tap-ipv4-addr') }} ipv4-add-default-route = ip route add {{ global_ipv4_prefix }} via {{ slap_configuration.get('tap-ipv4-gateway') }} dev \$IFACE src {{ slap_configuration.get('tap-ipv4-addr') }}
{% else -%} {% else -%}
route-default = ipv4-add-default-route =
{% endif -%} {% endif -%}
ipv4-set-link-up = ip link set dev \$IFACE up
command = command =
cat > ${:path} << EOF cat > ${:path} << EOF
#!/bin/sh #!/bin/sh
...@@ -515,10 +514,9 @@ command = ...@@ -515,10 +514,9 @@ command =
#try to be compatible with OS with old names #try to be compatible with OS with old names
ip a | grep eth0: && [ \$IFACE = ens3 ] && IFACE=eth0 ip a | grep eth0: && [ \$IFACE = ens3 ] && IFACE=eth0
ip a | grep eth1: && [ \$IFACE = ens4 ] && IFACE=eth1 ip a | grep eth1: && [ \$IFACE = ens4 ] && IFACE=eth1
${:ifconfig} ${:ipv4-add-address}
${:route-iface} ${:ipv4-add-default-route}
${:route-network} ${:ipv4-set-link-up}
${:route-default}
EOF EOF
update-command = ${:command} update-command = ${:command}
{% endif -%} {% endif -%}
......
...@@ -43,6 +43,7 @@ setup(name=name, ...@@ -43,6 +43,7 @@ setup(name=name,
packages=find_packages(), packages=find_packages(),
install_requires=[ install_requires=[
'slapos.core', 'slapos.core',
'slapos.cookbook',
'slapos.libnetworkcache', 'slapos.libnetworkcache',
'erp5.util', 'erp5.util',
'supervisor', 'supervisor',
......
...@@ -35,9 +35,8 @@ import StringIO ...@@ -35,9 +35,8 @@ import StringIO
import subprocess import subprocess
import json import json
import psutil
import utils import utils
from slapos.recipe.librecipe import generateHashFromFiles
# for development: debugging logs and install Ctrl+C handler # for development: debugging logs and install Ctrl+C handler
if os.environ.get('SLAPOS_TEST_DEBUG'): if os.environ.get('SLAPOS_TEST_DEBUG'):
...@@ -53,17 +52,6 @@ class InstanceTestCase(utils.SlapOSInstanceTestCase): ...@@ -53,17 +52,6 @@ class InstanceTestCase(utils.SlapOSInstanceTestCase):
return (os.path.abspath(os.path.join(os.path.dirname(__file__), '..', 'software.cfg')), ) return (os.path.abspath(os.path.join(os.path.dirname(__file__), '..', 'software.cfg')), )
class ServicesTestCase(InstanceTestCase): class ServicesTestCase(InstanceTestCase):
@staticmethod
def generateHashFromFiles(file_list):
import hashlib
hasher = hashlib.md5()
for path in file_list:
with open(path, 'r') as afile:
buf = afile.read()
hasher.update("%s\n" % len(buf))
hasher.update(buf)
hash = hasher.hexdigest()
return hash
def test_hashes(self): def test_hashes(self):
hash_files = [ hash_files = [
...@@ -87,7 +75,7 @@ class ServicesTestCase(InstanceTestCase): ...@@ -87,7 +75,7 @@ class ServicesTestCase(InstanceTestCase):
for path in hash_files] for path in hash_files]
for name in expected_process_names: for name in expected_process_names:
h = ServicesTestCase.generateHashFromFiles(hash_files) h = generateHashFromFiles(hash_files)
expected_process_name = name.format(hash=h) expected_process_name = name.format(hash=h)
self.assertIn(expected_process_name, process_names) self.assertIn(expected_process_name, process_names)
...@@ -43,6 +43,7 @@ setup(name=name, ...@@ -43,6 +43,7 @@ setup(name=name,
packages=find_packages(), packages=find_packages(),
install_requires=[ install_requires=[
'slapos.core', 'slapos.core',
'slapos.cookbook',
'slapos.libnetworkcache', 'slapos.libnetworkcache',
'erp5.util', 'erp5.util',
'supervisor', 'supervisor',
......
...@@ -35,9 +35,8 @@ import StringIO ...@@ -35,9 +35,8 @@ import StringIO
import subprocess import subprocess
import json import json
import psutil
import utils import utils
from slapos.recipe.librecipe import generateHashFromFiles
# for development: debugging logs and install Ctrl+C handler # for development: debugging logs and install Ctrl+C handler
if os.environ.get('SLAPOS_TEST_DEBUG'): if os.environ.get('SLAPOS_TEST_DEBUG'):
...@@ -54,17 +53,6 @@ class InstanceTestCase(utils.SlapOSInstanceTestCase): ...@@ -54,17 +53,6 @@ class InstanceTestCase(utils.SlapOSInstanceTestCase):
class ServicesTestCase(InstanceTestCase): class ServicesTestCase(InstanceTestCase):
@staticmethod
def generateHashFromFiles(file_list):
import hashlib
hasher = hashlib.md5()
for path in file_list:
with open(path, 'r') as afile:
buf = afile.read()
hasher.update("%s\n" % len(buf))
hasher.update(buf)
hash = hasher.hexdigest()
return hash
def test_hashes(self): def test_hashes(self):
hash_files = [ hash_files = [
...@@ -83,7 +71,7 @@ class ServicesTestCase(InstanceTestCase): ...@@ -83,7 +71,7 @@ class ServicesTestCase(InstanceTestCase):
for path in hash_files] for path in hash_files]
for name in expected_process_names: for name in expected_process_names:
h = ServicesTestCase.generateHashFromFiles(hash_files) h = generateHashFromFiles(hash_files)
expected_process_name = name.format(hash=h) expected_process_name = name.format(hash=h)
self.assertIn(expected_process_name, process_names) self.assertIn(expected_process_name, process_names)
{%- if slapparameter_dict.get('admin') != 0 %}
{%- set monitor_dict = slapparameter_dict['monitor'] %}
{%- set bang_on_problem = monitor_dict.pop('bang-on-problem', None) %}
{%- set periodicity = monitor_dict.pop('periodicity', None) %}
[buildout] [buildout]
extends =
{{ template_monitor }}
parts += parts +=
neo-admin-promise
logrotate-admin logrotate-admin
neoctl monitor-neo-health
[neo-admin] [neo-admin]
recipe = slapos.cookbook:neoppod.admin recipe = slapos.cookbook:neoppod.admin
...@@ -14,12 +20,38 @@ port = ${publish:port-admin} ...@@ -14,12 +20,38 @@ port = ${publish:port-admin}
ssl = {{ dumps(bool(slapparameter_dict['ssl'])) }} ssl = {{ dumps(bool(slapparameter_dict['ssl'])) }}
cluster = {{ dumps(slapparameter_dict['cluster']) }} cluster = {{ dumps(slapparameter_dict['cluster']) }}
masters = {{ dumps(slapparameter_dict['masters']) }} masters = {{ dumps(slapparameter_dict['masters']) }}
extra-options =
{%- for k, v in monitor_dict.iteritems() %}
{%- if k == 'backup' %}
{%- set k = 'monitor-backup' %}
{%- endif %}
{%- if not isinstance(v, list) %}
{%- set v = [v] %}
{%- endif %}
{%- for v in v %}
--{{k}}={{v}}
{%- endfor %}
{%- endfor %}
[directory]
plugin = ${:etc}/plugin
[monitor-neo-health]
<= monitor-promise-base
module = check_neo_health
name = ${:_buildout_section_name_}.py
config-neoctl = ${neoctl:wrapper-path}
{%- if bang_on_problem != None %}
config-bang-on-problem = {{ dumps(bang_on_problem) }}
{%- endif %}
{%- if periodicity != None %}
config-periodicity = {{ dumps(periodicity) }}
{%- endif %}
[neo-admin-promise] [monitor-instance-parameter]
recipe = slapos.cookbook:check_port_listening monitor-httpd-port = {{ slapparameter_dict.get('admin', 2050) + 1 }}
hostname = ${neo-admin:ip} monitor-title = neo
port = ${neo-admin:port} password = {{ slapparameter_dict['monitor-passwd'] }}
path = ${directory:promises}/neo-admin-promise
[logrotate-admin] [logrotate-admin]
< = logrotate-entry-base < = logrotate-entry-base
...@@ -37,3 +69,4 @@ command-line = ...@@ -37,3 +69,4 @@ command-line =
--key ${directory:etc}/neo.key --key ${directory:etc}/neo.key
{%- endif %} {%- endif %}
wrapper-path = ${directory:bin}/neoctl wrapper-path = ${directory:bin}/neoctl
{%- endif %}
...@@ -7,7 +7,7 @@ ...@@ -7,7 +7,7 @@
], ],
"properties": { "properties": {
"cluster": { "cluster": {
"description": "Cluster unique identifier. Your last line of defense against mixing up NEO clusters and corrupting your data. Choose a unique value for each of your cluster.", "description": "Cluster unique identifier. Your last line of defense against mixing up NEO clusters and corrupting your data. Choose a unique value for each of your cluster. Space not allowed.",
"type": "string" "type": "string"
}, },
"partitions": { "partitions": {
...@@ -28,6 +28,25 @@ ...@@ -28,6 +28,25 @@
"description": "Master nodes in the cluster to backup.", "description": "Master nodes in the cluster to backup.",
"type": "string" "type": "string"
}, },
"monitor": {
"description": "Parameters for monitoring.",
"properties": {
"backup": {
"description": "List of cluster names that are expected to backup this cluster.",
"type": "array",
"items": {
"type": "string"
},
"uniqueItems": true
},
"periodicity": {
"description": "Periodicity in minutes at which the cluster health is checked.",
"default": 10,
"type": "number"
}
},
"type": "object"
},
"sla-dict": { "sla-dict": {
"description": "[NEO SR only] Where to request instances. Each key is a query string for criterions (e.g. \"computer_guid=foo\"), and each value is a list of partition references ('node-0', 'node-1', ...). The prefix 'node-' is mandatory and the number must start from 0. The total number of nodes here must be equal to the length of node-list.", "description": "[NEO SR only] Where to request instances. Each key is a query string for criterions (e.g. \"computer_guid=foo\"), and each value is a list of partition references ('node-0', 'node-1', ...). The prefix 'node-' is mandatory and the number must start from 0. The total number of nodes here must be equal to the length of node-list.",
"additionalProperties": { "additionalProperties": {
...@@ -58,6 +77,7 @@ ...@@ -58,6 +77,7 @@
"items": { "items": {
"description": "Dictionary containing parameters required to configure individual nodes.", "description": "Dictionary containing parameters required to configure individual nodes.",
"default": {}, "default": {},
"additionalProperties": false,
"properties": { "properties": {
"admin": { "admin": {
"description": "Port of admin node. 0 to disable.", "description": "Port of admin node. 0 to disable.",
......
{%- if slapparameter_dict.get('master') != 0 %}
[buildout] [buildout]
parts += parts +=
neo-master-promise
logrotate-master logrotate-master
[neo-master] [neo-master]
...@@ -23,14 +23,9 @@ autostart = {{ slapparameter_dict['autostart'] }} ...@@ -23,14 +23,9 @@ autostart = {{ slapparameter_dict['autostart'] }}
# this default value is required. # this default value is required.
masters = {{ dumps(slapparameter_dict['masters']) }} masters = {{ dumps(slapparameter_dict['masters']) }}
[neo-master-promise]
recipe = slapos.cookbook:check_port_listening
hostname = ${neo-master:ip}
port = ${neo-master:port}
path = ${directory:promises}/neo-master-promise
[logrotate-master] [logrotate-master]
< = logrotate-entry-base < = logrotate-entry-base
name = neo-master name = neo-master
log = ${neo-master:logfile} log = ${neo-master:logfile}
post = {{ bin_directory }}/slapos-kill -s RTMIN+1 -- ${neo-master:binary} -l ${:log} post = {{ bin_directory }}/slapos-kill -s RTMIN+1 -- ${neo-master:binary} -l ${:log}
{%- endif %}
...@@ -51,7 +51,7 @@ recipe = slapos.cookbook:publish.serialised ...@@ -51,7 +51,7 @@ recipe = slapos.cookbook:publish.serialised
# TODO: make port a partition parameter # TODO: make port a partition parameter
ip = {{ "[%s]" % list(ipv6_set)[0] if ipv6_set else list(ipv4_set)[0] }} ip = {{ "[%s]" % list(ipv6_set)[0] if ipv6_set else list(ipv4_set)[0] }}
{% set admin = slapparameter_dict.get('admin', 2050) -%} {% set admin = slapparameter_dict.get('admin', 2050) -%}
{% set master = slapparameter_dict.get('master', 2051) -%} {% set master = slapparameter_dict.get('master', 2052) -%}
{% if master -%} {% if master -%}
port-master = {{ master }} port-master = {{ master }}
master = ${:ip}:${:port-master} master = ${:ip}:${:port-master}
...@@ -61,6 +61,7 @@ master = ...@@ -61,6 +61,7 @@ master =
{% if admin -%} {% if admin -%}
port-admin = {{ admin }} port-admin = {{ admin }}
admin = ${:ip}:${:port-admin} admin = ${:ip}:${:port-admin}
monitor-base-url = ${monitor-publish-parameters:monitor-base-url}
{% else -%} {% else -%}
admin = admin =
{% endif -%} {% endif -%}
...@@ -119,7 +120,6 @@ post = {{ bin_directory }}/slapos-kill -s RTMIN+1 -- {{ bin_directory }}/neostor ...@@ -119,7 +120,6 @@ post = {{ bin_directory }}/slapos-kill -s RTMIN+1 -- {{ bin_directory }}/neostor
[directory] [directory]
recipe = slapos.cookbook:mkdirectory recipe = slapos.cookbook:mkdirectory
promises = ${buildout:directory}/etc/promises
bin = ${buildout:directory}/bin bin = ${buildout:directory}/bin
etc = ${buildout:directory}/etc etc = ${buildout:directory}/etc
var = ${buildout:directory}/var var = ${buildout:directory}/var
...@@ -162,11 +162,11 @@ context = ...@@ -162,11 +162,11 @@ context =
[buildout] [buildout]
extends = extends =
{{ logrotate_cfg }} {{ logrotate_cfg }}
{%- if master %}
{{ master_cfg }}
{%- endif %}
{%- if admin %} {%- if admin %}
{{ admin_cfg }} {{ admin_cfg }}
{%- endif %} {%- endif %}
{%- if master %}
{{ master_cfg }}
{%- endif %}
parts += parts +=
{{ '\n '.join(part_list) }} {{ '\n '.join(part_list) }}
...@@ -35,13 +35,10 @@ parts = ...@@ -35,13 +35,10 @@ parts =
{{- assert(not sla_dict, sla_dict) }} {{- assert(not sla_dict, sla_dict) }}
{% endmacro -%} {% endmacro -%}
{% macro request_neo(parameter_dict, software_type, prefix='node-') -%} {% macro request_neo(parameter_dict, software_type, prefix='node-', monitor_base_url_dict=None) -%}
{% set section_id_list = [] -%} {% set section_id_list = [] -%}
[{{ prefix }}request-common] [{{ prefix }}request-common]
<= request-common-base <= request-common-base
return =
master
admin
config-masters = {{ '${' ~ prefix ~ 'cluster:masters}' }} config-masters = {{ '${' ~ prefix ~ 'cluster:masters}' }}
config-cluster = {{ parameter_dict['cluster'] }} config-cluster = {{ parameter_dict['cluster'] }}
{% set replicas = parameter_dict.get('replicas', 0) -%} {% set replicas = parameter_dict.get('replicas', 0) -%}
...@@ -56,24 +53,42 @@ config-upstream-cluster = {{ dumps(parameter_dict.get('upstream-cluster', '')) } ...@@ -56,24 +53,42 @@ config-upstream-cluster = {{ dumps(parameter_dict.get('upstream-cluster', '')) }
config-upstream-masters = {{ dumps(parameter_dict.get('upstream-masters', '')) }} config-upstream-masters = {{ dumps(parameter_dict.get('upstream-masters', '')) }}
software-type = {{ software_type }} software-type = {{ software_type }}
{% set node_list = parameter_dict.get('node-list', ({},)) -%} {% set node_list = parameter_dict.get('node-list', ({},)) -%}
{% set storage_count = [] -%} {% set storage_count = [] -%}
{% for node in node_list -%} {% for node in node_list -%}
{% do storage_count.append(node.get('storage-count', 1)) -%} {% do storage_count.append(node.get('storage-count', 1)) -%}
{% endfor -%} {% endfor -%}
config-autostart = {{ dumps(sum(storage_count)) }} config-autostart = {{ dumps(sum(storage_count)) }}
{% do assert(replicas < len(node_list)) -%} {%- do assert(replicas < len(node_list)) %}
{% for i, node in enumerate(node_list) -%} {%- set admin_list = [] %}
{% set section_id = prefix ~ i -%} {%- for i, node in enumerate(node_list) %}
{% do section_id_list.append(section_id) -%} {%- set section_id = prefix ~ i %}
{%- do section_id_list.append(section_id) %}
{%- if admin_list %}
{%- do node.setdefault('admin', 0) %}
{%- endif %}
[{{section_id}}] [{{section_id}}]
<= {{ prefix }}request-common <= {{ prefix }}request-common
name = {{ section_id }} name = {{ section_id }}
{% for k, v in node.iteritems() -%} return =
master
admin
{%- if node.get('admin') != 0 %}
monitor-base-url
{%- if monitor_base_url_dict != None %}
{%- do monitor_base_url_dict.__setitem__('neo',
'${' ~ section_id ~ ':connection-monitor-base-url}') %}
{%- endif %}
{%- do admin_list.append(section_id) %}
config-monitor-passwd = ${monitor-htpasswd:passwd}
config-monitor = {{ dumps(parameter_dict.get('monitor', {})) }}
{%- endif %}
{%- for k, v in node.iteritems() %}
config-{{ k }} = {{ dumps(v) }} config-{{ k }} = {{ dumps(v) }}
{% endfor -%} {%- endfor %}
{{ sla(section_id) }} {{ sla(section_id) }}
{% endfor -%} {%- endfor %}
{%- do assert(len(admin_list) == 1, admin_list) %}
[{{section(prefix ~ 'cluster')}}] [{{section(prefix ~ 'cluster')}}]
recipe = slapos.cookbook:neoppod.cluster recipe = slapos.cookbook:neoppod.cluster
......
...@@ -112,19 +112,19 @@ adapter-context = ...@@ -112,19 +112,19 @@ adapter-context =
[root-common] [root-common]
<= download-base-neo <= download-base-neo
md5sum = 15fa47a59cc3019f59612aaf33bd9ec5 md5sum = ccc6e33412259415ec6c3452d37b77cc
[instance-neo-admin] [instance-neo-admin]
<= download-base-neo <= download-base-neo
md5sum = ce0d9ff9e899bb706351a99df29238a9 md5sum = 87670ddc6b5d2007dac1b6d2ba86d168
[instance-neo-master] [instance-neo-master]
<= download-base-neo <= download-base-neo
md5sum = 4faee020eaf7cd495cd6210dfa4eb0c1 md5sum = 9f27195d770b2f57461c60a82c851ab9
[instance-neo] [instance-neo]
<= download-base-neo <= download-base-neo
md5sum = 5fc9fcaec3a5387625af34fe686097ae md5sum = d18e049d580720e733502b774b0d6790
[template-neo-my-cnf] [template-neo-my-cnf]
<= download-base-neo <= download-base-neo
......
...@@ -43,6 +43,7 @@ setup(name=name, ...@@ -43,6 +43,7 @@ setup(name=name,
packages=find_packages(), packages=find_packages(),
install_requires=[ install_requires=[
'slapos.core', 'slapos.core',
'slapos.cookbook',
'slapos.libnetworkcache', 'slapos.libnetworkcache',
'erp5.util', 'erp5.util',
'supervisor', 'supervisor',
......
...@@ -32,6 +32,7 @@ import glob ...@@ -32,6 +32,7 @@ import glob
import re import re
import utils import utils
from slapos.recipe.librecipe import generateHashFromFiles
# for development: debugging logs and install Ctrl+C handler # for development: debugging logs and install Ctrl+C handler
if os.environ.get('SLAPOS_TEST_DEBUG'): if os.environ.get('SLAPOS_TEST_DEBUG'):
...@@ -166,18 +167,6 @@ class InstanceTestCase(utils.SlapOSInstanceTestCase): ...@@ -166,18 +167,6 @@ class InstanceTestCase(utils.SlapOSInstanceTestCase):
class ServicesTestCase(InstanceTestCase): class ServicesTestCase(InstanceTestCase):
@staticmethod
def generateHash(file_list):
import hashlib
hasher = hashlib.md5()
for path in file_list:
with open(path, 'r') as afile:
buf = afile.read()
hasher.update("%s\n" % len(buf))
hasher.update(buf)
hash = hasher.hexdigest()
return hash
def test_process_list(self): def test_process_list(self):
hash_list = [ hash_list = [
'software_release/buildout.cfg', 'software_release/buildout.cfg',
...@@ -204,7 +193,7 @@ class ServicesTestCase(InstanceTestCase): ...@@ -204,7 +193,7 @@ class ServicesTestCase(InstanceTestCase):
for path in hash_list] for path in hash_list]
for name in expected_process_names: for name in expected_process_names:
h = ServicesTestCase.generateHash(hash_file_list) h = generateHashFromFiles(hash_file_list)
expected_process_name = name.format(hash=h) expected_process_name = name.format(hash=h)
self.assertIn(expected_process_name, process_name_list) self.assertIn(expected_process_name, process_name_list)
......
...@@ -43,6 +43,7 @@ setup(name=name, ...@@ -43,6 +43,7 @@ setup(name=name,
packages=find_packages(), packages=find_packages(),
install_requires=[ install_requires=[
'slapos.core', 'slapos.core',
'slapos.cookbook',
'slapos.libnetworkcache', 'slapos.libnetworkcache',
'erp5.util', 'erp5.util',
'supervisor', 'supervisor',
......
...@@ -36,6 +36,7 @@ import requests ...@@ -36,6 +36,7 @@ import requests
import plantuml import plantuml
import utils import utils
from slapos.recipe.librecipe import generateHashFromFiles
# for development: debugging logs and install Ctrl+C handler # for development: debugging logs and install Ctrl+C handler
if os.environ.get('SLAPOS_TEST_DEBUG'): if os.environ.get('SLAPOS_TEST_DEBUG'):
...@@ -154,16 +155,6 @@ class TestSimpleDiagram(PlantUMLTestCase): ...@@ -154,16 +155,6 @@ class TestSimpleDiagram(PlantUMLTestCase):
class ServicesTestCase(PlantUMLTestCase): class ServicesTestCase(PlantUMLTestCase):
@staticmethod
def generateHashFromFiles(file_list):
hasher = hashlib.md5()
for path in file_list:
with open(path, 'r') as afile:
buf = afile.read()
hasher.update("%s\n" % len(buf))
hasher.update(buf)
hash = hasher.hexdigest()
return hash
def test_hashes(self): def test_hashes(self):
hash_files = [ hash_files = [
...@@ -182,7 +173,7 @@ class ServicesTestCase(PlantUMLTestCase): ...@@ -182,7 +173,7 @@ class ServicesTestCase(PlantUMLTestCase):
for path in hash_files] for path in hash_files]
for name in expected_process_names: for name in expected_process_names:
h = ServicesTestCase.generateHashFromFiles(hash_files) h = generateHashFromFiles(hash_files)
expected_process_name = name.format(hash=h) expected_process_name = name.format(hash=h)
self.assertIn(expected_process_name, process_names) self.assertIn(expected_process_name, process_names)
...@@ -43,6 +43,7 @@ setup(name=name, ...@@ -43,6 +43,7 @@ setup(name=name,
packages=find_packages(), packages=find_packages(),
install_requires=[ install_requires=[
'slapos.core', 'slapos.core',
'slapos.cookbook',
'slapos.libnetworkcache', 'slapos.libnetworkcache',
'erp5.util', 'erp5.util',
'supervisor', 'supervisor',
......
...@@ -35,9 +35,8 @@ import StringIO ...@@ -35,9 +35,8 @@ import StringIO
import subprocess import subprocess
import json import json
import psutil
import utils import utils
from slapos.recipe.librecipe import generateHashFromFiles
# for development: debugging logs and install Ctrl+C handler # for development: debugging logs and install Ctrl+C handler
if os.environ.get('SLAPOS_TEST_DEBUG'): if os.environ.get('SLAPOS_TEST_DEBUG'):
...@@ -54,17 +53,6 @@ class InstanceTestCase(utils.SlapOSInstanceTestCase): ...@@ -54,17 +53,6 @@ class InstanceTestCase(utils.SlapOSInstanceTestCase):
class ServicesTestCase(InstanceTestCase): class ServicesTestCase(InstanceTestCase):
@staticmethod
def generateHashFromFiles(file_list):
import hashlib
hasher = hashlib.md5()
for path in file_list:
with open(path, 'r') as afile:
buf = afile.read()
hasher.update("%s\n" % len(buf))
hasher.update(buf)
hash = hasher.hexdigest()
return hash
def test_hashes(self): def test_hashes(self):
hash_files = [ hash_files = [
...@@ -82,7 +70,7 @@ class ServicesTestCase(InstanceTestCase): ...@@ -82,7 +70,7 @@ class ServicesTestCase(InstanceTestCase):
for path in hash_files] for path in hash_files]
for name in expected_process_names: for name in expected_process_names:
h = ServicesTestCase.generateHashFromFiles(hash_files) h = generateHashFromFiles(hash_files)
expected_process_name = name.format(hash=h) expected_process_name = name.format(hash=h)
self.assertIn(expected_process_name, process_names) self.assertIn(expected_process_name, process_names)
...@@ -43,6 +43,7 @@ setup(name=name, ...@@ -43,6 +43,7 @@ setup(name=name,
packages=find_packages(), packages=find_packages(),
install_requires=[ install_requires=[
'slapos.core', 'slapos.core',
'slapos.cookbook',
'slapos.libnetworkcache', 'slapos.libnetworkcache',
'erp5.util', 'erp5.util',
'supervisor', 'supervisor',
......
...@@ -35,9 +35,8 @@ import StringIO ...@@ -35,9 +35,8 @@ import StringIO
import subprocess import subprocess
import json import json
import psutil
import utils import utils
from slapos.recipe.librecipe import generateHashFromFiles
SLAPOS_TEST_IPV4 = os.environ['SLAPOS_TEST_IPV4'] SLAPOS_TEST_IPV4 = os.environ['SLAPOS_TEST_IPV4']
SLAPOS_TEST_IPV6 = os.environ['SLAPOS_TEST_IPV6'] SLAPOS_TEST_IPV6 = os.environ['SLAPOS_TEST_IPV6']
...@@ -83,17 +82,6 @@ class TestPortRedirection(Re6stnetTestCase): ...@@ -83,17 +82,6 @@ class TestPortRedirection(Re6stnetTestCase):
}, portredir_config[0]) }, portredir_config[0])
class ServicesTestCase(Re6stnetTestCase): class ServicesTestCase(Re6stnetTestCase):
@staticmethod
def generateHashFromFiles(file_list):
import hashlib
hasher = hashlib.md5()
for path in file_list:
with open(path, 'r') as afile:
buf = afile.read()
hasher.update("%s\n" % len(buf))
hasher.update(buf)
hash = hasher.hexdigest()
return hash
@classmethod @classmethod
def getInstanceParameterDict(cls): def getInstanceParameterDict(cls):
...@@ -115,7 +103,7 @@ class ServicesTestCase(Re6stnetTestCase): ...@@ -115,7 +103,7 @@ class ServicesTestCase(Re6stnetTestCase):
for path in hash_files] for path in hash_files]
for name in expected_process_names: for name in expected_process_names:
h = ServicesTestCase.generateHashFromFiles(hash_files) h = generateHashFromFiles(hash_files)
expected_process_name = name.format(hash=h) expected_process_name = name.format(hash=h)
self.assertIn(expected_process_name, process_names) self.assertIn(expected_process_name, process_names)
...@@ -14,7 +14,7 @@ ...@@ -14,7 +14,7 @@
# not need these here). # not need these here).
[template-erp5] [template-erp5]
filename = instance-erp5.cfg.in filename = instance-erp5.cfg.in
md5sum = 001affafc204b638615deea04c95cfdf md5sum = 70c8d3e4414f6f9f969c9641e840b52f
[template-balancer] [template-balancer]
filename = instance-balancer.cfg.in filename = instance-balancer.cfg.in
......
...@@ -152,7 +152,7 @@ connection-url = smtp://127.0.0.2:0/ ...@@ -152,7 +152,7 @@ connection-url = smtp://127.0.0.2:0/
{% set ((name, server_dict),) = server_dict.items() -%} {% set ((name, server_dict),) = server_dict.items() -%}
{% do neo.append(server_dict.get('cluster')) -%} {% do neo.append(server_dict.get('cluster')) -%}
{% do server_dict.update(cluster='${publish-early:neo-cluster}') -%} {% do server_dict.update(cluster='${publish-early:neo-cluster}') -%}
{{ root_common.request_neo(server_dict, 'zodb-neo', 'neo-') }} {{ root_common.request_neo(server_dict, 'zodb-neo', 'neo-', monitor_base_url_dict) }}
{% set client_dict = zodb_dict[name].setdefault('storage-dict', {}) -%} {% set client_dict = zodb_dict[name].setdefault('storage-dict', {}) -%}
{% for k in 'ssl', '_ca', '_cert', '_key' -%} {% for k in 'ssl', '_ca', '_cert', '_key' -%}
{% do k in server_dict and client_dict.setdefault(k, server_dict[k]) -%} {% do k in server_dict and client_dict.setdefault(k, server_dict[k]) -%}
...@@ -256,7 +256,7 @@ config-longrequest-logger-timeout = {{ dumps(zope_parameter_dict.get('longreques ...@@ -256,7 +256,7 @@ config-longrequest-logger-timeout = {{ dumps(zope_parameter_dict.get('longreques
config-large-file-threshold = {{ dumps(zope_parameter_dict.get('large-file-threshold', "10MB")) }} config-large-file-threshold = {{ dumps(zope_parameter_dict.get('large-file-threshold', "10MB")) }}
config-port-base = {{ dumps(zope_parameter_dict.get('port-base', 2200)) }} config-port-base = {{ dumps(zope_parameter_dict.get('port-base', 2200)) }}
config-webdav = {{ dumps(zope_parameter_dict.get('webdav', False)) }} config-webdav = {{ dumps(zope_parameter_dict.get('webdav', False)) }}
config-wsgi = {{ dumps(slapparameter_dict.get('wsgi', False)) }} config-wsgi = {{ dumps(slapparameter_dict.get('wsgi', True)) }}
{% if test_runner_enabled -%} {% if test_runner_enabled -%}
config-test-runner-apache-url-list = ${publish-early:{{ zope_family }}-test-runner-url-list} config-test-runner-apache-url-list = ${publish-early:{{ zope_family }}-test-runner-url-list}
{% endif -%} {% endif -%}
......
...@@ -18,7 +18,7 @@ md5sum = c44a7481bb85e3258128afe3fcf23f44 ...@@ -18,7 +18,7 @@ md5sum = c44a7481bb85e3258128afe3fcf23f44
[template-runner] [template-runner]
filename = instance-runner.cfg filename = instance-runner.cfg
md5sum = a5e2ae493b78a0f42d61cc79a0b2e686 md5sum = 48bff0b5f082f22d44966b1151f07133
[template-runner-import-script] [template-runner-import-script]
filename = template/runner-import.sh.jinja2 filename = template/runner-import.sh.jinja2
......
...@@ -794,7 +794,7 @@ ip = $${slaprunner:ipv4} ...@@ -794,7 +794,7 @@ ip = $${slaprunner:ipv4}
server = $${:ip}:$${:port} server = $${:ip}:$${:port}
port = 39986 port = 39986
slapgrid-cp = slapgrid-cp slapgrid-cp = slapgrid-cp
slapgrid-cp-command = $${slaprunner:slapos} node instance --all --cfg $${:slapos-cfg} --verbose --logfile $${:slapgrid-cp-log} slapgrid-cp-command = $${slaprunner:slapos} node instance --cfg $${:slapos-cfg} --verbose --logfile $${:slapgrid-cp-log}
slapgrid-cp-log = $${runnerdirectory:home}/instance.log slapgrid-cp-log = $${runnerdirectory:home}/instance.log
slapgrid-cp-startretries = 0 slapgrid-cp-startretries = 0
slapgrid-sr = slapgrid-sr slapgrid-sr = slapgrid-sr
......
...@@ -43,6 +43,7 @@ setup(name=name, ...@@ -43,6 +43,7 @@ setup(name=name,
packages=find_packages(), packages=find_packages(),
install_requires=[ install_requires=[
'slapos.core', 'slapos.core',
'slapos.cookbook',
'slapos.libnetworkcache', 'slapos.libnetworkcache',
'erp5.util', 'erp5.util',
'supervisor', 'supervisor',
......
...@@ -38,6 +38,7 @@ import json ...@@ -38,6 +38,7 @@ import json
import psutil import psutil
import utils import utils
from slapos.recipe.librecipe import generateHashFromFiles
# for development: debugging logs and install Ctrl+C handler # for development: debugging logs and install Ctrl+C handler
if os.environ.get('SLAPOS_TEST_DEBUG'): if os.environ.get('SLAPOS_TEST_DEBUG'):
...@@ -53,17 +54,6 @@ class InstanceTestCase(utils.SlapOSInstanceTestCase): ...@@ -53,17 +54,6 @@ class InstanceTestCase(utils.SlapOSInstanceTestCase):
return (os.path.abspath(os.path.join(os.path.dirname(__file__), '..', 'software.cfg')), ) return (os.path.abspath(os.path.join(os.path.dirname(__file__), '..', 'software.cfg')), )
class ServicesTestCase(InstanceTestCase): class ServicesTestCase(InstanceTestCase):
@staticmethod
def generateHashFromFiles(file_list):
import hashlib
hasher = hashlib.md5()
for path in file_list:
with open(path, 'r') as afile:
buf = afile.read()
hasher.update("%s\n" % len(buf))
hasher.update(buf)
hash = hasher.hexdigest()
return hash
def test_hashes(self): def test_hashes(self):
hash_files = [ hash_files = [
...@@ -89,7 +79,7 @@ class ServicesTestCase(InstanceTestCase): ...@@ -89,7 +79,7 @@ class ServicesTestCase(InstanceTestCase):
for path in hash_files] for path in hash_files]
for name in expected_process_names: for name in expected_process_names:
h = ServicesTestCase.generateHashFromFiles(hash_files) h = generateHashFromFiles(hash_files)
expected_process_name = name.format(hash=h) expected_process_name = name.format(hash=h)
self.assertIn(expected_process_name, process_names) self.assertIn(expected_process_name, process_names)
...@@ -43,6 +43,7 @@ setup(name=name, ...@@ -43,6 +43,7 @@ setup(name=name,
packages=find_packages(), packages=find_packages(),
install_requires=[ install_requires=[
'slapos.core', 'slapos.core',
'slapos.cookbook',
'slapos.libnetworkcache', 'slapos.libnetworkcache',
'erp5.util', 'erp5.util',
'supervisor', 'supervisor',
......
...@@ -32,6 +32,7 @@ import glob ...@@ -32,6 +32,7 @@ import glob
import ConfigParser import ConfigParser
import utils import utils
from slapos.recipe.librecipe import generateHashFromFiles
# for development: debugging logs and install Ctrl+C handler # for development: debugging logs and install Ctrl+C handler
if os.environ.get('SLAPOS_TEST_DEBUG'): if os.environ.get('SLAPOS_TEST_DEBUG'):
...@@ -58,18 +59,6 @@ class InstanceTestCase(utils.SlapOSInstanceTestCase): ...@@ -58,18 +59,6 @@ class InstanceTestCase(utils.SlapOSInstanceTestCase):
class ServicesTestCase(InstanceTestCase): class ServicesTestCase(InstanceTestCase):
@staticmethod
def generateHash(file_list):
import hashlib
hasher = hashlib.md5()
for path in file_list:
with open(path, 'r') as afile:
buf = afile.read()
hasher.update("%s\n" % len(buf))
hasher.update(buf)
hash = hasher.hexdigest()
return hash
def test_process_list(self): def test_process_list(self):
hash_list = [ hash_list = [
'software_release/buildout.cfg', 'software_release/buildout.cfg',
...@@ -91,7 +80,7 @@ class ServicesTestCase(InstanceTestCase): ...@@ -91,7 +80,7 @@ class ServicesTestCase(InstanceTestCase):
for path in hash_list] for path in hash_list]
for name in expected_process_names: for name in expected_process_names:
h = ServicesTestCase.generateHash(hash_file_list) h = generateHashFromFiles(hash_file_list)
expected_process_name = name.format(hash=h) expected_process_name = name.format(hash=h)
self.assertIn(expected_process_name, process_name_list) self.assertIn(expected_process_name, process_name_list)
......
...@@ -78,7 +78,7 @@ md5sum = d41d8cd98f00b204e9800998ecf8427e ...@@ -78,7 +78,7 @@ md5sum = d41d8cd98f00b204e9800998ecf8427e
[template-erp5] [template-erp5]
filename = instance-erp5.cfg.in filename = instance-erp5.cfg.in
md5sum = af5d9aeac2bae695220465a4348ae592 md5sum = ff5e0d8d1ca167399fb438e890baf370
[template-zeo] [template-zeo]
filename = instance-zeo.cfg.in filename = instance-zeo.cfg.in
......
...@@ -152,7 +152,7 @@ connection-url = smtp://127.0.0.2:0/ ...@@ -152,7 +152,7 @@ connection-url = smtp://127.0.0.2:0/
{% set ((name, server_dict),) = server_dict.items() -%} {% set ((name, server_dict),) = server_dict.items() -%}
{% do neo.append(server_dict.get('cluster')) -%} {% do neo.append(server_dict.get('cluster')) -%}
{% do server_dict.update(cluster='${publish-early:neo-cluster}') -%} {% do server_dict.update(cluster='${publish-early:neo-cluster}') -%}
{{ root_common.request_neo(server_dict, 'zodb-neo', 'neo-') }} {{ root_common.request_neo(server_dict, 'zodb-neo', 'neo-', monitor_base_url_dict) }}
{% set client_dict = zodb_dict[name].setdefault('storage-dict', {}) -%} {% set client_dict = zodb_dict[name].setdefault('storage-dict', {}) -%}
{% for k in 'ssl', '_ca', '_cert', '_key' -%} {% for k in 'ssl', '_ca', '_cert', '_key' -%}
{% do k in server_dict and client_dict.setdefault(k, server_dict[k]) -%} {% do k in server_dict and client_dict.setdefault(k, server_dict[k]) -%}
...@@ -255,7 +255,7 @@ config-longrequest-logger-timeout = {{ dumps(zope_parameter_dict.get('longreques ...@@ -255,7 +255,7 @@ config-longrequest-logger-timeout = {{ dumps(zope_parameter_dict.get('longreques
config-large-file-threshold = {{ dumps(zope_parameter_dict.get('large-file-threshold', "10MB")) }} config-large-file-threshold = {{ dumps(zope_parameter_dict.get('large-file-threshold', "10MB")) }}
config-port-base = {{ dumps(zope_parameter_dict.get('port-base', 2200)) }} config-port-base = {{ dumps(zope_parameter_dict.get('port-base', 2200)) }}
config-webdav = {{ dumps(zope_parameter_dict.get('webdav', False)) }} config-webdav = {{ dumps(zope_parameter_dict.get('webdav', False)) }}
config-wsgi = {{ dumps(slapparameter_dict.get('wsgi', False)) }} config-wsgi = {{ dumps(slapparameter_dict.get('wsgi', True)) }}
{% if test_runner_enabled -%} {% if test_runner_enabled -%}
config-test-runner-apache-url-list = ${publish-early:{{ zope_family }}-test-runner-url-list} config-test-runner-apache-url-list = ${publish-early:{{ zope_family }}-test-runner-url-list}
{% endif -%} {% endif -%}
......
...@@ -78,7 +78,7 @@ part-list = ...@@ -78,7 +78,7 @@ part-list =
recipe = slapos.recipe.template:jinja2 recipe = slapos.recipe.template:jinja2
rendered = ${buildout:directory}/instance.cfg rendered = ${buildout:directory}/instance.cfg
template = ${:_profile_base_location_}/instance.cfg.in template = ${:_profile_base_location_}/instance.cfg.in
md5sum = 7c47a85e310674e451db778d9e4383a6 md5sum = b4fbd7dc8d7fda7dbd1b80f3de273ecf
mode = 0644 mode = 0644
context = context =
key application_location application:location key application_location application:location
...@@ -110,6 +110,7 @@ context = ...@@ -110,6 +110,7 @@ context =
key template_lamp instance-lamp:output key template_lamp instance-lamp:output
key template_mariadb template-mariadb:target key template_mariadb template-mariadb:target
key template_mariadb_initial_setup template-mariadb-initial-setup:target key template_mariadb_initial_setup template-mariadb-initial-setup:target
key template_mysqld_wrapper template-mysqld-wrapper:rendered
key template_my_cnf template-my-cnf:target key template_my_cnf template-my-cnf:target
key unixodbc_location unixodbc:location key unixodbc_location unixodbc:location
key openssl_location openssl:location key openssl_location openssl:location
......
...@@ -81,6 +81,7 @@ gzip-location = {{ gzip_location }} ...@@ -81,6 +81,7 @@ gzip-location = {{ gzip_location }}
mariadb-location = {{ mariadb_location }} mariadb-location = {{ mariadb_location }}
template-my-cnf = {{ template_my_cnf }} template-my-cnf = {{ template_my_cnf }}
template-mariadb-initial-setup = {{ template_mariadb_initial_setup }} template-mariadb-initial-setup = {{ template_mariadb_initial_setup }}
template-mysqld-wrapper = {{ template_mysqld_wrapper }}
link-binary = {{ dumps(mariadb_link_binary) }} link-binary = {{ dumps(mariadb_link_binary) }}
mariadb-resiliency-after-import-script = {{ mariadb_resiliency_after_import_script }} mariadb-resiliency-after-import-script = {{ mariadb_resiliency_after_import_script }}
mariadb-slow-query-report-script = {{ mariadb_slow_query_report_script }} mariadb-slow-query-report-script = {{ mariadb_slow_query_report_script }}
......
...@@ -136,15 +136,15 @@ pyparsing = 2.2.0 ...@@ -136,15 +136,15 @@ pyparsing = 2.2.0
pytz = 2016.10 pytz = 2016.10
requests = 2.13.0 requests = 2.13.0
six = 1.12.0 six = 1.12.0
slapos.cookbook = 1.0.119 slapos.cookbook = 1.0.121
slapos.core = 1.4.26 slapos.core = 1.4.27
slapos.extension.strip = 0.4 slapos.extension.strip = 0.4
slapos.extension.shared = 1.0 slapos.extension.shared = 1.0
slapos.libnetworkcache = 0.19 slapos.libnetworkcache = 0.19
slapos.rebootstrap = 4.1 slapos.rebootstrap = 4.1
slapos.recipe.build = 0.41 slapos.recipe.build = 0.41
slapos.recipe.cmmi = 0.10 slapos.recipe.cmmi = 0.10
slapos.toolbox = 0.94 slapos.toolbox = 0.95
stevedore = 1.21.0 stevedore = 1.21.0
subprocess32 = 3.5.3 subprocess32 = 3.5.3
unicodecsv = 0.14.1 unicodecsv = 0.14.1
...@@ -195,7 +195,7 @@ enum34 = 1.1.6 ...@@ -195,7 +195,7 @@ enum34 = 1.1.6
# Required by: # Required by:
# slapos.toolbox==0.94 # slapos.toolbox==0.94
erp5.util = 0.4.60 erp5.util = 0.4.61
# Required by: # Required by:
# slapos.toolbox==0.94 # slapos.toolbox==0.94
...@@ -218,8 +218,8 @@ pyrsistent = 0.14.5 ...@@ -218,8 +218,8 @@ pyrsistent = 0.14.5
ipaddress = 1.0.18 ipaddress = 1.0.18
# Required by: # Required by:
# slapos.cookbook==1.0.119 # slapos.cookbook==1.0.121
jsonschema = 3.0.0a3 jsonschema = 3.0.2
# Required by: # Required by:
# slapos.toolbox==0.94 # slapos.toolbox==0.94
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment