Commit 9f405777 authored by Jérome Perrin's avatar Jérome Perrin

Update Release Candidate

parents eba20faa a0611ba6
......@@ -5,6 +5,7 @@ parts =
[alsa]
# Contains libasound
recipe = slapos.recipe.cmmi
shared = true
url = ftp://ftp.alsa-project.org/pub/lib/alsa-lib-1.1.3.tar.bz2
md5sum = eefe5992567ba00d6110a540657aaf5c
configure-options =
......
[buildout]
extends =
../git/buildout.cfg
parts =
depot_tools
[depot_tools]
recipe = slapos.recipe.build:gitclone
repository = https://chromium.googlesource.com/chromium/tools/depot_tools.git
branch = master
revsion = e023d4482012d89690f6a483e877eceb47c4501e
git-executable = ${git:location}/bin/git
......@@ -73,7 +73,7 @@ environment =
PATH=${pkgconfig:location}/bin:${gettext:location}/bin:${glib:location}/bin:${xz-utils:location}/bin:${flex:location}/bin:${bison:location}/bin:%(PATH)s
PKG_CONFIG_PATH=${glib:location}/lib/pkgconfig:${pcre:location}/lib/pkgconfig
CPPFLAGS=-I${glib:location}/include/glib-2.0 -I${glib:location}/lib/glib-2.0/include
LDFLAGS=-L${glib:location}/lib -Wl,-rpath=${glib:location}/lib -L${libffi:location}/lib -Wl,-rpath=${libffi:location}/lib -lffi
LDFLAGS=-L${glib:location}/lib -Wl,-rpath=${glib:location}/lib -L${libffi:location}/lib -Wl,-rpath=${libffi:location}/lib -lffi -L${zlib:location}/lib/ -Wl,-rpath=${zlib:location}/lib/
GLIB_CFLAGS=-I${glib:location}/include/glib-2.0 -I${glib:location}/lib/glib-2.0/include
GLIB_LIBS=-L${glib:location}/lib -lglib-2.0 -lintl -lgobject-2.0
FFI_CFLAGS=-I${libffi:location}/include
......
......@@ -11,20 +11,21 @@ url = https://rubygems.org/rubygems/rubygems-2.4.8.zip
ruby-executable = ${ruby:location}/bin/ruby
gems =
msgpack==1.3.3
cool.io==1.4.6
concurrent-ruby==1.1.9
cool.io==1.7.1
dig_rb==1.0.1
http_parser.rb==0.6.0
sigdump==0.2.4
serverengine==2.2.3
strptime==0.1.9
serverengine==2.2.4
strptime==0.2.5
thread_safe==0.3.6
tzinfo==1.2.9
tzinfo==2.0.4
tzinfo-data==1.2021.1
yajl-ruby==1.4.1
fluentd==0.14.14
fluentd==1.8.1
httpclient==2.8.3
json==2.5.1
td-client==0.8.85
fluent-plugin-td==0.10.29
td-client==1.0.8
fluent-plugin-td==1.1.0
gem-options =
--with-icu-lib=${icu:location}/lib/
--with-icu-dir=${icu:location}/
......
......@@ -18,8 +18,8 @@ parts =
[git]
recipe = slapos.recipe.cmmi
shared = true
url = https://www.kernel.org/pub/software/scm/git/git-2.25.1.tar.xz
md5sum = 92bf65673b4fc08b64108d807f36f4d9
url = https://mirrors.edge.kernel.org/pub/software/scm/git/git-2.33.0.tar.xz
md5sum = 0990ff97af1511be0d9f0d3223dd4359
configure-options =
--with-curl=${curl:location}
--with-openssl=${openssl:location}
......
......@@ -46,8 +46,8 @@ environment-extra =
[golang1.16]
<= golang-common
url = https://golang.org/dl/go1.16.8.src.tar.gz
md5sum = 92e69a5e1bb6ea5e7498d12d03160032
url = https://golang.org/dl/go1.16.10.src.tar.gz
md5sum = 49f0a54f0bdcba297bac194d8dafe431
# go1.16 needs go1.4 to bootstrap
environment-extra =
......@@ -55,8 +55,8 @@ environment-extra =
[golang1.17]
<= golang-common
url = https://golang.org/dl/go1.17.1.src.tar.gz
md5sum = a78205838c2a7054522cb91c12982f26
url = https://golang.org/dl/go1.17.3.src.tar.gz
md5sum = 3ea82e5966275f405f0db4f52511bb6e
# go1.17 needs go1.4 to bootstrap
environment-extra =
......
[buildout]
extends =
../patchelf/buildout.cfg
../zlib/buildout.cfg
../alsa/buildout.cfg
../libpng/buildout.cfg
../freetype/buildout.cfg
../xorg/buildout.cfg
parts =
java-jdk
......@@ -24,8 +26,8 @@ configure-command = :
make-binary = :
post-install =
mv * %(location)s
for file in %(location)s/bin/* ; do
for file in %(location)s/bin/* %(location)s/lib/*.so ; do
echo appending rpath to $file
${patchelf:location}/bin/patchelf --set-rpath %(rpath)s $file
done
rpath = ${zlib:location}/lib:@@LOCATION@@/lib
rpath = ${zlib:location}/lib:${alsa:location}/lib:${freetype:location}/lib:${libpng:location}/lib:${libXrender:location}/lib:${libXtst:location}/lib:${libX11:location}/lib:${libXau:location}/lib:${libXext:location}/lib:${libXdmcp:location}/lib:${libXi:location}/lib:${libxcb:location}/lib:@@LOCATION@@/lib:@@LOCATION@@/lib/server
......@@ -8,12 +8,23 @@ parts =
[lua]
recipe = slapos.recipe.cmmi
shared = true
url = http://www.lua.org/ftp/lua-5.3.1.tar.gz
md5sum = 797adacada8d85761c079390ff1d9961
url = http://www.lua.org/ftp/lua-5.4.3.tar.gz
md5sum = ef63ed2ecfb713646a7fcc583cf5f352
configure-command = true
make-options =
"$(uname -sr 2>/dev/null|grep -Eq '^Linux' && echo linux || echo posix)"
MYCFLAGS="-I${readline:location}/include"
MYCFLAGS="-I${readline:location}/include -fPIC"
MYLDFLAGS="-L${readline:location}/lib -Wl,-rpath=${readline:location}/lib"
make-targets =
install INSTALL_TOP=@@LOCATION@@
INSTALL_TOP=@@LOCATION@@
post-install =
mkdir -p %(location)s/lib/pkgconfig
{
make pc INSTALL_TOP=%(location)s
echo '%(pc)s'
} > %(location)s/lib/pkgconfig/lua.pc
pc =
Name: Lua
Description: Lua language engine
Version: $${version}
Libs: -L$${libdir} -llua
Cflags: -I$${includedir}
......@@ -27,6 +27,12 @@ version = v12.18.3
md5sum = 28bf6a4d98b238403fa58a0805f4a979
PATH = ${pkgconfig:location}/bin:${python2.7:location}/bin:%(PATH)s
[nodejs-8.9.4]
<= nodejs-base
version = v8.9.4
md5sum = 4ddc1daff327d7e6f63da57fdfc24f55
PATH = ${pkgconfig:location}/bin:${python2.7:location}/bin:%(PATH)s
[nodejs-8.6.0]
<= nodejs-base
version = v8.6.0
......
......@@ -20,6 +20,6 @@ egg = nxdtest
[nxdtest-repository]
recipe = slapos.recipe.build:gitclone
repository = https://lab.nexedi.com/nexedi/nxdtest.git
revision = 0ad45a9c
revision = 9f413221
location = ${buildout:parts-directory}/nxdtest
git-executable = ${git:location}/bin/git
......@@ -7,5 +7,5 @@ parts =
[perl-Image-ExifTool]
<= perl-CPAN-package
module = Image/Image-ExifTool
version = 12.00
md5sum = 8c3f99a9a8d110ba340ee3f7063c940e
version = 12.30
md5sum = 1f5d66d62418c8b29eb0c0b7fd272b28
......@@ -4,6 +4,7 @@ extends =
../automake/buildout.cfg
../boost-lib/buildout.cfg
../libtool/buildout.cfg
../lua/buildout.cfg
../make/buildout.cfg
../openssl/buildout.cfg
../pkgconfig/buildout.cfg
......@@ -19,17 +20,16 @@ parts =
[powerdns]
recipe = slapos.recipe.cmmi
url = http://downloads.powerdns.com/releases/pdns-4.2.1.tar.bz2
md5sum = b5f3998a3bc438b905c72c0473408839
url = https://downloads.powerdns.com/releases/pdns-4.5.1.tar.bz2
md5sum = 5f0ba98ca59bc3d84cfd09097c8b9953
configure-options =
--prefix=${buildout:parts-directory}/${:_buildout_section_name_}
--with-boost=${boost-lib:location}
--with-libcrypto=${openssl:location}
--with-modules="geoip"
--with-dynmodules=""
--without-lua
--disable-lua-records
pkg_config_depends = ${yaml-cpp:location}/lib/pkgconfig
pkg_config_depends = ${yaml-cpp:location}/lib/pkgconfig:${lua:location}/lib/pkgconfig
environment =
PATH=${autoconf:location}/bin:${automake:location}/bin:${libmaxminddb:location}/bin:${libtool:location}/bin:${make:location}/bin:${pkgconfig:location}/bin:%(PATH)s
LDFLAGS=-L${boost-lib:location}/lib -Wl,-rpath=${boost-lib:location}/lib -L${libmaxminddb:location}/lib -Wl,-rpath=${libmaxminddb:location}/lib -L${openssl:location}/lib -Wl,-rpath=${openssl:location}/lib -L${yaml-cpp:location}/lib -Wl,-rpath=${yaml-cpp:location}/lib -L${zlib:location}/lib -Wl,-rpath=${zlib:location}/lib
......@@ -39,5 +39,3 @@ environment =
YAML_LIBS = -lyaml-cpp
make-options =
LIBTOOL=libtool
make-target =
install
......@@ -40,7 +40,7 @@ patch-options = -p1
patches =
${:_profile_base_location_}/fix_compiler_module_issue_20613.patch#94443a77f903e9de880a029967fa6aa7
${:_profile_base_location_}/pytracemalloc_pep445.patch#9f3145817afa2b7fad801fde8447e396
${:_profile_base_location_}/disabled_module_list.patch#71ad30d32bcdbc50c19cf48675b1246e
${:_profile_base_location_}/disabled_module_list.patch#e038a8016475574c810cbaaf0e42f4ac
${:_profile_base_location_}/asyncore_poll_insteadof_select.patch#ab6991c0ee6e25aeb8951e71f280a2f1
url =
http://www.python.org/ftp/python/${:package_version}/Python-${:package_version}${:package_version_suffix}.tar.xz
......
......@@ -5,7 +5,7 @@
# This global variable is used to hold the list of modules to be disabled.
-disabled_module_list = []
+disabled_module_list = ['_bsddb', '_tkinter', 'bsddb185']
+disabled_module_list = ['_bsddb', '_tkinter', 'bsddb185', 'nis']
def add_dir_to_list(dirlist, dir):
"""Add the directory 'dir' to the list 'dirlist' (at the front) if
......@@ -89,13 +89,13 @@ md5sum = 6097fdb9cbab47c96471274b9044e983
[debian-amd64-buster-netinst.iso]
<= debian-amd64-netinst-base
version = 10.10.0
md5sum = c7d0e562e589e853b5d00563b4311720
version = 10.11.0
md5sum = 9d7b9cc850464d60ac174787c53e8f3f
[debian-amd64-bullseye-netinst.iso]
<= debian-amd64-netinst-base
version = 11.0.0
md5sum = 499953266841cae41612310e65659456
version = 11.1.0
md5sum = b710c178eb434d79ce40ce703d30a5f0
alternate-url = https://cdimage.debian.org/cdimage/release/current/${:arch}/iso-cd/${:filename}
[debian-amd64-netinst.iso]
......
......@@ -76,8 +76,6 @@ post-install =
[dbus-glib]
location = ${buildout:parts-directory}/${:_buildout_section_name_}
environment +=
CPPFLAGS=-I${libexpat:location}/include -I${dbus:location}/include/dbus-1.0 -I${dbus:location}/lib/dbus-1.0/include
LDFLAGS=-L${libexpat:location}/lib -L${gettext:location}/lib -Wl,-rpath=${zlib:location}/lib -L${dbus:location}/lib -Wl,-rpath=${dbus:location}/lib
LD_LIBRARY_PATH=${dbus:location}/lib
DBUS_CFLAGS=-I${dbus:location}/include/dbus-1.0 -I${dbus:location}/lib/dbus-1.0/include
......@@ -86,9 +84,7 @@ environment +=
pre-configure =
sed -i 's#/opt/slapos/parts/dbus/lib/libdbus-1.la#${dbus:location}/lib/libdbus-1.la#' ${dbus-glib:location}/lib/libdbus-glib-1.la
environment +=
PATH=${pkgconfig:location}/bin:${python:location}/bin:%(PATH)s
LD_LIBRARY_PATH=${dbus:location}/lib
LDFLAGS=-L${glib:location}/lib -Wl,-rpath=${glib:location}/lib -L${dbus:location}/lib -Wl,-rpath=${dbus:location}/lib
PYTHON=${python:location}/bin/python${python:version}
PYTHON_INCLUDES=-I${python:location}/include/python${python:version}
PYTHON_LIBS=-L${python:location}/lib -lpython${python:version} -lpthread -ldl -lutil -lm
......@@ -114,34 +110,21 @@ make-options =
[gobject-introspection]
pre-configure =
ln -s ${python:location}/bin/python${python:version} ${python:location}/bin/python2.
sed -i 's#!/opt/slapos/parts/python2.7/bin/python2.7#!${python2.7:location}/bin/python2.7#' ${python2.7:location}/bin/python-config
libtoolize -c -f
aclocal -I${pkgconfig:location}/share/aclocal -I${gettext:location}/share/aclocal -I${libtool:location}/share/aclocal -I${glib:location}/share/aclocal
./autogen.sh
sed -i 's#!/opt/slapos/parts/python${python:version}/bin/python${python:version}#!${python:location}/bin/python${python:version}#' ${python:location}/bin/python${python:version}-config
configure-options +=
--enable-shared
environment +=
PATH=${autoconf:location}/bin:${automake:location}/bin:${pkgconfig:location}/bin:${libtool:location}/bin:${intltool:location}/bin:${gettext:location}/bin:${glib:location}/bin:${flex:location}/bin:${bison-go:location}/bin:%(PATH)s
GIR_DIR=${buildout:parts-directory}/${:_buildout_section_name_}/share/gir-1.0
CPPFLAGS=-I${glib:location}/include/glib-2.0 -I${glib:location}/lib/glib-2.0/include -I${python:location}/include/python${python:version}
LDFLAGS=-L${glib:location}/lib -Wl,-rpath=${glib:location}/lib -L${libffi:location}/lib -Wl,-rpath=${libffi:location}/lib -lffi -L${python:location}/lib
ACLOCAL_PATH=${pkgconfig:location}/share/aclocal:${gettext:location}/share/aclocal:${libtool:location}/share/aclocal:${glib:location}/share/aclocal:${intltool:location}/share/aclocal
M4=${m4:location}/bin/m4
PERL5LIB=${perl:location}/lib/5.28.1/
post-install =
sed -i 's#!${python:location}/bin/python${python:version}#!/opt/slapos/parts/python${python:version}/bin/python${python:version}#' ${python:location}/bin/python-config
sed -i 's#!${python:location}/bin/python${python:version}#!/opt/slapos/parts/python${python:version}/bin/python${python:version}#' ${python:location}/bin/python${python:version}-config
rm -rf ${bison-go:location}
[pygobject3]
pre-configure +=
sed -i 's#!/opt/slapos/parts/python${python:version}/bin/python${python:version}#!${python:location}/bin/python${python:version}#' ${python:location}/bin/python-config
environment +=
CPPFLAGS=-I${glib:location}/include/glib-2.0 -I${glib:location}/lib/glib-2.0/include -I${gettext:location}/include -I${libffi:location}/include -I${python:location}/include/python${python:version}
LDFLAGS=-L${glib:location}/lib -Wl,-rpath=${glib:location}/lib -L${gettext:location}/lib -Wl,-rpath=${gettext:location}/lib -L${python:location}/lib
sed -i 's#!/opt/slapos/parts/python${python:version}/bin/python${python:version}#!${python:location}/bin/python${python:version}#' ${python:location}/bin/python${python:version}-config
post-install =
sed -i 's#!${python:location}/bin/python${python:version}#!/opt/slapos/parts/python${python:version}/bin/python${python:version}#' ${python:location}/bin/python-config
sed -i 's#!${python:location}/bin/python${python:version}#!/opt/slapos/parts/python${python:version}/bin/python${python:version}#' ${python:location}/bin/python${python:version}-config
[ncurses]
configure-options =
......@@ -189,3 +172,9 @@ configure-options +=
--prefix=${buildout:rootdir}/parts/${:_buildout_section_name_}
environment +=
DESTDIR=${buildout:destdir}
[swig]
configure-options +=
--prefix=${buildout:rootdir}/parts/${:_buildout_section_name_}
environment +=
DESTDIR=${buildout:destdir}
......@@ -14,6 +14,7 @@ extends =
parts =
# keep neoppod first and in parts so that ZODB is built correctly
neoppod-develop
neoppod
# for instance
......
......@@ -25,6 +25,7 @@ configure-options =
--without-python
--without-x
--without-jpg
--without-dbus
environment =
PATH=${autoconf:location}/bin:${automake:location}/bin:${gettext:location}/bin:${libtool:location}/bin:${m4:location}/bin:${bzip2:location}/bin:%(PATH)s
CFLAGS=
......@@ -28,7 +28,7 @@ from setuptools import setup, find_packages
import glob
import os
version = '1.0.212'
version = '1.0.214'
name = 'slapos.cookbook'
long_description = open("README.rst").read()
......@@ -110,7 +110,6 @@ setup(name=name,
'generic.kumofs = slapos.recipe.generic_kumofs:Recipe',
'generic.memcached = slapos.recipe.generic_memcached:Recipe',
'generic.mysql.wrap_update_mysql = slapos.recipe.generic_mysql:WrapUpdateMySQL',
'generic.varnish = slapos.recipe.generic_varnish:Recipe',
'gitinit = slapos.recipe.gitinit:Recipe',
'haproxy = slapos.recipe.haproxy:Recipe',
'ipv4toipv6 = slapos.recipe.6tunnel:FourToSix',
......@@ -166,7 +165,6 @@ setup(name=name,
'squid = slapos.recipe.squid:Recipe',
'sshkeys_authority = slapos.recipe.sshkeys_authority:Recipe',
'sshkeys_authority.request = slapos.recipe.sshkeys_authority:Request',
'stunnel = slapos.recipe.stunnel:Recipe',
'switch-softwaretype = slapos.recipe.switch_softwaretype:Recipe',
'symbolic.link = slapos.recipe.symbolic_link:Recipe',
'tidstorage = slapos.recipe.tidstorage:Recipe',
......
generic_varnish
===============
This recipe creates a varnish instance dedicated for ERP5 with a web checker[1]
set up.
How to Use generic_varnish ?
============================
On slap console, you can instantiate varnish like this::
instance = request(
software_type='varnish',
partition_parameter_kw={
'backend-url':'https://[your_backend_address]:your_backend_port',
'web-checker-frontend-url':'http://www.example.com',
'web-checker-mail-address':'web-checker-result@example.com',
'web-checker-smtp-host':'mail.example.com',
}
)
backend-url is the backend url that varnish will cache.
web-checker-frontend-url is the entry-point-url that web checker will check
the HTTP headers of all the pages in the web site.
web-checker-mail-address is the email address where web checker will send
the HTTP Cache cheking result.
web-checker-smtp-host is the smtp server to be used to send the web checker
result.
[Note]
When web-checker-* parameters are not given, web_checker will be disabled.
References
==========
[1] web_checker (it is a part of erp5.util)
http://pypi.python.org/pypi/erp5.util
web_checker: Web site HTTP Cache header checking tool
##############################################################################
#
# Copyright (c) 2012 Vifib SARL and Contributors. All Rights Reserved.
#
# WARNING: This program as such is intended to be used by professional
# programmers who take the whole responsibility of assessing all potential
# consequences resulting from its eventual inadequacies and bugs
# End users who are looking for a ready-to-use solution with commercial
# guarantees and support are strongly adviced to contract a Free Software
# Service Company
#
# This program is Free Software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 3
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
##############################################################################
import os
import re
from slapos.recipe.librecipe import GenericSlapRecipe
class Recipe(GenericSlapRecipe):
"""
Instantiate varnish daemon
TODO:
- use varnish3.x and replace .vcl for it
"""
def _install(self):
ip = self.options['ip']
backend_url = self.options.get('backend-url',
# BBB: Peeking in partition parameters directly. Eew.
self.parameter_dict.get('backend-url',
self.parameter_dict.get('tidstorage-url') # BBB
)
)
backend_server, backend_port = self._getBackendServer(backend_url)
path_list = []
if backend_url.startswith('https://'):
config = dict(
stunnel_binary=self.options['stunnel-binary'],
stunnel_server=ip,
stunnel_port=int(self.options['stunnel-port']),
stunnel_pid_file=self.options['stunnel-pid-file'],
stunnel_conf_file=self.options['stunnel-conf-file'],
shell_path=self.options['shell-path'],
backend_server=backend_server.replace('[', '').replace(']', ''),
backend_port=backend_port,
)
path_list.append(self.createExecutable(self.options['stunnel-wrapper'],
self.substituteTemplate(self.getTemplateFilename('stunnel.in'),
config)))
path_list.append(self.createFile(self.options['stunnel-conf-file'],
self.substituteTemplate(self.getTemplateFilename('stunnel.conf.in'),
config)))
backend_server = ip
backend_port = int(self.options['stunnel-port'])
varnishd_manager_port = int(self.options['manager-port'])
varnishd_server_port = int(self.options['server-port'])
config = dict(
varnishd_binary=self.options['varnishd-binary'],
varnish_ip=ip,
varnishlog_binary=self.options['varnishlog-binary'],
varnishd_manager_port=varnishd_manager_port,
varnishd_server_port=varnishd_server_port,
varnishd_pid_file=self.options['pid-file'],
varnish_instance_name=self.options['varnish-instance-name'],
varnish_data=self.options['varnish-data'],
gcc_location=self.options['gcc-location'],
shell_path=self.options['shell-path'],
vcl_file=self.options['vcl-file'],
backend_port=backend_port,
backend_server=backend_server,
)
path_list.append(self.createExecutable(self.options['varnishd-wrapper'],
self.substituteTemplate(self.getTemplateFilename('varnishd.in'),
config)))
path_list.append(self.createExecutable(self.options['varnishlog-wrapper'],
self.substituteTemplate(self.getTemplateFilename('varnishlog.in'),
config)))
path_list.append(self.createFile(self.options['vcl-file'],
self.substituteTemplate(self.getTemplateFilename('default.vcl.in'),
config)))
return path_list
def _getBackendServer(self, url):
r = re.compile('\/\/(\[.+\]|[\d.]+):(\d*)')
result = r.search(url)
ip = result.groups()[0]
port = result.groups()[1]
return (ip, port)
#This is a basic VCL configuration file for varnish. See the vcl(7)
#man page for details on VCL syntax and semantics.
#
#Default backend definition. Set this to point to your content
#server.
#
backend default {
.host = "%(backend_server)s";
.port = "%(backend_port)s";
.probe = {
.timeout = 30s;
.interval = 5s;
.window = 4;
.threshold = 3;
.request =
"OPTIONS /erp5/getId HTTP/1.1"
"Host: %(backend_server)s:%(backend_port)s"
"Accept-Encoding: identity"
"Connection: close"
"User-Agent: Varnish";
}
}
# Called at the beginning of a request, after the complete request has been received and parsed
sub vcl_recv {
# Pass any requests that Varnish does not understand straight to the backend.
if (req.request != "GET" &&
req.request != "HEAD" &&
req.request != "PUT" &&
req.request != "POST" &&
req.request != "TRACE" &&
req.request != "OPTIONS" &&
req.request != "PURGE" &&
req.request != "DELETE") {
/* Non-RFC2616 or CONNECT which is weird. */
return (pipe);
}
# Pass anything other than GET and HEAD directly.
if (req.request != "GET" && req.request != "HEAD") {
return (pass);
}
if (req.http.Authorization) {
/* Not cacheable by default */
return (pass);
}
# No need to have cookies for static resources
if (req.url ~ "\.(css|gif|ico|jpg|js|png)$") {
unset req.http.Cookie;
}
# Remove bogus cookies
if (req.http.Cookie) {
set req.http.Cookie = regsuball(req.http.Cookie, "(^|; ) *__utm.=[^;]+;? *", "\1");
set req.http.Cookie = regsuball(req.http.Cookie, "(^|; ) *__ac_name=\x22\x22;? *", "\1");
set req.http.Cookie = regsuball(req.http.Cookie, "(^|; ) *__ac=\x22Og.3D.3D\x22;? *", "\1");
}
if (req.http.Cookie == "") {
unset req.http.Cookie;
}
if (req.http.Cookie && req.http.Cookie ~ "(^|; ) *__ac=") {
return (pass);
}
# We do not care about Accept-Encoding, because we don't use varnish as the front most HTTP server.
unset req.http.Accept-Encoding;
if (req.backend.healthy) {
set req.grace = 1h;
} else {
set req.grace = 1w;
}
return (lookup);
}
# Run after a pass in vcl_recv OR after a lookup that returned a hitpass
sub vcl_pass {
# unset If-Modified-Since to avoid reusing anonymous's browser cache
# after login.
unset req.http.If-Modified-Since;
return (pass);
}
# Creates the varnish cache key by the url
sub vcl_hash {
# We use url only for hash.
hash_data(req.url);
return (hash);
}
# Called after a cache lookup if the requested document was found in the cache
sub vcl_hit {
return (deliver);
}
# Called after a cache lookup if the requested document was not found in the cache
sub vcl_miss {
return (fetch);
}
# Called after a document has been successfully retrieved from the backend
sub vcl_fetch {
# Unset Expires that is always overridden by Cache-Control.
unset beresp.http.Expires;
# Unset Pragma that is obsolete.
unset beresp.http.Pragma;
# We only cache 200 (OK) and 304 (Not Modified) responses.
if (beresp.status != 200 && beresp.status != 304) {
set beresp.ttl = 0s;
}
# If max-age is 0 or not set, we want no browser cache.
if (beresp.ttl <= 0s) {
set beresp.http.Cache-Control = "no-store";
# Mark as hit_for_pass for the next 2 minutes.
set beresp.ttl = 120s;
return (hit_for_pass);
}
# We don't care haproxy's cookie.
if (beresp.http.Set-Cookie && beresp.http.Set-Cookie !~ "^SERVERID=[^;]+; path=/$") {
return (hit_for_pass);
} else {
unset beresp.http.Set-Cookie;
}
# We set long enough grace for cachable objects.
set beresp.grace = 1w;
return (deliver);
}
# Called before a cached object is delivered to the client
sub vcl_deliver {
if (obj.hits > 0) {
set resp.http.X-Cache = obj.hits;
} else {
set resp.http.X-Cache = "MISS";
}
return (deliver);
}
foreground = yes
pid = %(stunnel_pid_file)s
debug = 4
[remote]
accept = %(stunnel_server)s:%(stunnel_port)s
client = yes
connect = %(backend_server)s:%(backend_port)s
sni = %(stunnel_server)s
#!%(shell_path)s
DAEMON_OPTS="%(stunnel_conf_file)s"
exec %(stunnel_binary)s ${DAEMON_OPTS} 2>&1
#!%(shell_path)s
DAEMON_OPTS="-F \
-a %(varnish_ip)s:%(varnishd_server_port)s \
-T %(varnish_ip)s:%(varnishd_manager_port)s \
-t 0 \
-p nuke_limit=500 \
-n %(varnish_instance_name)s \
-f %(vcl_file)s \
-s file,%(varnish_data)s/varnish_storage.bin,1G"
PIDFILE=%(varnishd_pid_file)s
# exporting PATH here so that we will pass the PATH variable to the subprocess
export PATH="%(gcc_location)s:$PATH"
exec %(varnishd_binary)s -P ${PIDFILE} ${DAEMON_OPTS} 2>&1
#!%(shell_path)s
DAEMON_OPTS="-a -n %(varnish_instance_name)s"
exec %(varnishlog_binary)s ${DAEMON_OPTS} "$@"
#!%(shell_path)s
DAEMON_OPTS="-F \
-a %(varnish_ip)s:%(varnishd_server_port)s \
-T %(varnish_ip)s:%(varnishd_manager_port)s \
-n %(varnish_instance_name)s \
-f %(vcl_file)s \
-s file,%(varnish_data)s/varnish_storage.bin,1G"
PIDFILE=%(varnishd_pid_file)s
# exporting PATH here so that it will pass the PATH variable to the subprocess
export PATH
# If unset, or set to "0" or "no", exit
if [ -z "${VARNISHLOG_ENABLED}" ] || \
[ "${VARNISHLOG_ENABLED}" = "0" ] || \
[ "${VARNISHLOG_ENABLED}" = "no" ]; then
exit 0;
fi
exec %(varnishlog_binary)s ${DAEMON_OPTS} 2>&1
......@@ -104,6 +104,7 @@ class Recipe(GenericBaseRecipe):
else:
paths.extend(self.createConfig())
paths.extend(self.createRunScript())
self.updateSuperuser()
return paths
......@@ -205,9 +206,24 @@ class Recipe(GenericBaseRecipe):
# encrypt the password to avoid storing in the logs
enc_password = 'md5' + hashlib.md5((password + user).encode()).hexdigest()
change_password_query = """ALTER USER "%s" ENCRYPTED PASSWORD '%s'""" % (user, enc_password)
self.runPostgresCommand(cmd="""ALTER USER "%s" ENCRYPTED PASSWORD '%s'""" % (user, enc_password))
pgdata = self.options['pgdata-directory']
if os.path.exists(os.path.join(pgdata, 'postmaster.pid')):
psql_binary = os.path.join(self.options['bin'], 'psql')
# connect to a running postgres deamon
p = subprocess.Popen([
psql_binary,
'-h', pgdata,
'-U', user,
'-d', self.options['dbname'],
],
stdin=subprocess.PIPE)
p.communicate((change_password_query + '\n').encode())
if p.returncode:
raise UserError("Error updating password")
else:
self.runPostgresCommand(cmd=change_password_query)
def runPostgresCommand(self, cmd):
"""\
......
......@@ -24,15 +24,17 @@
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
##############################################################################
import ast
import re
import logging, os
import zc.buildout.easy_install
from pprint import pformat
from slapos.recipe.librecipe import GenericBaseRecipe
from zc.buildout import UserError
script_template = '''# This script is auto generated by slapgrid, do not edit!
import sys
sys.path[0:0] = %(path)s
sys.path[0:0] = %(paths)s
extra_config_dict = %(config)s
......@@ -54,6 +56,24 @@ for module in list(sys.modules):
%(content)s
'''
execfile_template = """\
def _(path):
global _
del _
with open(path) as f:
return compile(f.read(), path, 'exec')
exec(_(%r))"""
def isPythonName(name):
try:
x, = ast.parse(name).body
if type(x) is ast.Expr:
x = x.value
return type(x) is ast.Name and x.id == name
except Exception:
pass
class Recipe(GenericBaseRecipe):
_WORKING_SET_CACHE_NAME = "slapos.cookbook_pplugin_ws_cache"
......@@ -64,6 +84,35 @@ class Recipe(GenericBaseRecipe):
options['develop-eggs-directory'] = buildout_section['develop-eggs-directory']
super(Recipe, self).__init__(buildout, name, options)
filepath = options.get('file')
module = options.get('module')
klass = options.get('class')
if klass == 'RunPromise':
klass = None
elif klass and not isPythonName(klass):
raise UserError("%r is not a valid class name" % klass)
if bool(module) == bool(filepath):
raise UserError("Either 'module' or 'file' is required but not both")
if module:
if not all(map(isPythonName, module.split('.'))):
raise UserError("%r is not a valid module name" % module)
if klass:
content = 'from %s import %s as RunPromise' % (module, klass)
else:
content = 'from %s import RunPromise' % module
else:
content = execfile_template % filepath
if klass:
content += '\n\nRunPromise = ' + klass
self.content = content
self.config_dict = {key[7:]: options[key]
for key in options
if key.startswith('config-')}
def _get_cache_storage(self):
"""Return a mapping where to store generated working sets.
from https://github.com/buildout/buildout/blob/master/zc.recipe.egg_/src/zc/recipe/egg/egg.py#L170
......@@ -111,26 +160,10 @@ class Recipe(GenericBaseRecipe):
else:
working_set = set()
regex = r"^[\w_\-\.\s]+$"
import_path = self.options.get('import', '').strip()
if import_path:
if not re.search(regex, import_path):
raise ValueError("Import path %r is not a valid" % import_path)
content_string = "from %s import RunPromise" % import_path
else:
# old parameter for compatibility
content_string = self.options['content'].strip()
if not re.search(regex, content_string):
raise ValueError("Promise content %r is not valid" % content_string)
config_dict = {key[7:]: self.options[key]
for key in self.options
if key.startswith('config-')}
return self.createFile(self.options['output'], script_template % {
'path': pformat([dist.location for dist in working_set], indent=2),
'content': content_string,
'config': pformat(config_dict, indent=2),
'paths': pformat(tuple(dist.location for dist in working_set), indent=2),
'config': pformat(self.config_dict, indent=2),
'content': self.content,
}, int(self.options.get('mode', '0644'), 8)),
update = install
##############################################################################
#
# Copyright (c) 2010 Vifib SARL and Contributors. All Rights Reserved.
#
# WARNING: This program as such is intended to be used by professional
# programmers who take the whole responsibility of assessing all potential
# consequences resulting from its eventual inadequacies and bugs
# End users who are looking for a ready-to-use solution with commercial
# guarantees and support are strongly adviced to contract a Free Software
# Service Company
#
# This program is Free Software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 3
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
##############################################################################
import os
import signal
import errno
from slapos.recipe.librecipe import GenericBaseRecipe
def kill(pid_file, sig=signal.SIGUSR1):
if os.path.exists(pid_file):
with open(pid_file) as f:
pid = int(f.read().strip())
try:
os.kill(pid, sig)
except OSError as e:
if e.errno != errno.ESRCH: # No such process
raise e
os.unlink(pid_file)
class Recipe(GenericBaseRecipe):
def install(self):
path_list = []
self.isClient = self.optionIsTrue('client', default=False)
if self.isClient:
self.logger.info("Client mode")
else:
self.logger.info("Server mode")
conf = {}
for type_ in ['remote', 'local']:
for data in ['host', 'port']:
confkey, opt = ['%s%s%s' % (type_, i, data) for i in ['_', '-']]
conf[confkey] = self.options[opt]
pid_file = self.options['pid-file']
conf.update(pid_file=pid_file)
log_file = self.options['log-file']
conf.update(log=log_file)
if self.isClient:
template = self.getTemplateFilename('client.conf.in')
else:
template = self.getTemplateFilename('server.conf.in')
key = self.options['key-file']
cert = self.options['cert-file']
conf.update(key=key, cert=cert)
conf_file = self.createFile(
self.options['config-file'],
self.substituteTemplate(template, conf))
path_list.append(conf_file)
wrapper = self.createWrapper(
self.options['wrapper'],
(self.options['stunnel-binary'], conf_file),
)
path_list.append(wrapper)
# Reload configuration
kill(pid_file, signal.SIGHUP)
if 'post-rotate-script' in self.options:
path_list.append(self.createPythonScript(
self.options['post-rotate-script'],
__name__ + '.kill', (pid_file,)))
return path_list
foreground = yes
output = %(log)s
pid = %(pid_file)s
syslog = no
[service]
client = yes
accept = %(local_host)s:%(local_port)s
connect = %(remote_host)s:%(remote_port)s
foreground = yes
output = %(log)s
pid = %(pid_file)s
syslog = no
key = %(key)s
cert = %(cert)s
[service]
accept = %(remote_host)s:%(remote_port)s
connect = %(local_host)s:%(local_port)s
import os, shutil, tempfile, unittest
from slapos.recipe import promise_plugin
from slapos.test.utils import makeRecipe
from zc.buildout import UserError
from pprint import pformat
import stat, json
import six
......@@ -10,7 +11,7 @@ class TestPromisePlugin(unittest.TestCase):
def setUp(self):
self.tmp = tempfile.mkdtemp()
self.output = os.path.join(self.tmp, 'output.py')
self.options = options = {
self.options = {
'output': self.output,
'eggs': 'slapos.cookbook'
}
......@@ -18,9 +19,16 @@ class TestPromisePlugin(unittest.TestCase):
def tearDown(self):
shutil.rmtree(self.tmp)
def test_parameters(self):
self.options['mode'] = '0644'
self.options['import'] = 'slapos.promise.plugin.check_site_available'
def makeRecipe(self):
return makeRecipe(
promise_plugin.Recipe,
options=self.options,
name='plugin')
def installRecipe(self):
self.makeRecipe().install()
def setConfig(self):
self.options['config-param1'] = "YY^@12"
self.options['config-param2'] = "23'91'"
self.options['config-param3'] = None
......@@ -28,88 +36,102 @@ class TestPromisePlugin(unittest.TestCase):
in multi line
123444
"""
recipe = makeRecipe(
promise_plugin.Recipe,
options=self.options,
name='plugin')
recipe.install()
def assertOutput(self, *expect):
self.assertTrue(os.path.exists(self.output))
with open(self.output, 'r') as f:
content = f.read()
self.assertIn("from slapos.promise.plugin.check_site_available import RunPromise", content)
self.assertEqual(stat.S_IMODE(os.stat(self.output).st_mode), int('644', 8))
expected_dict = dict(
param1=self.options['config-param1'],
param2=self.options['config-param2'],
param3=self.options['config-param3'],
param4=self.options['config-param4'],
)
self.assertIn('extra_config_dict = %s' % pformat(expected_dict, indent=2), content)
def test_no_module_set(self):
recipe = makeRecipe(
promise_plugin.Recipe,
options=self.options,
name='plugin')
with self.assertRaises(KeyError):
recipe.install()
def test_default(self):
self.options['import'] = 'slapos.promise.plugin.check_site_available'
recipe = makeRecipe(
promise_plugin.Recipe,
options=self.options,
name='plugin')
recipe.install()
self.assertTrue(os.path.exists(self.output))
for s in expect:
self.assertIn(s, content)
def assertConfig(self):
items = self.options.items()
expect = {k[7:] : v for k, v in items if k.startswith('config-')}
self.assertOutput("extra_config_dict = %s" % pformat(expect, indent=2))
def assertEmptyConfig(self):
self.assertOutput("extra_config_dict = %s" % ('{}' if six.PY3 else '{ }'))
def test_module(self):
self.options['module'] = 'slapos.promise.plugin.check_site_available'
self.installRecipe()
self.assertOutput("from %s import RunPromise" % self.options['module'])
self.assertEmptyConfig()
def test_file(self):
self.options['file'] = __file__
self.installRecipe()
self.assertOutput("exec(_(%r))" % self.options['file'])
self.assertEmptyConfig()
def test_module_and_parameters(self):
self.options['module'] = 'slapos.promise.plugin.check_site_available'
self.setConfig()
self.installRecipe()
self.assertOutput("from %s import RunPromise" % self.options['module'])
self.assertConfig()
def test_file_and_parameters(self):
self.options['file'] = __file__
self.setConfig()
self.installRecipe()
self.assertOutput("exec(_(%r))" % self.options['file'])
self.assertConfig()
def test_mode(self):
self.options['mode'] = '0644'
self.options['module'] = 'slapos.promise.plugin.check_site_available'
self.installRecipe()
self.assertEqual(stat.S_IMODE(os.stat(self.output).st_mode), int('644', 8))
with open(self.output) as f:
content = f.read()
self.assertIn("from slapos.promise.plugin.check_site_available import RunPromise", content)
self.assertIn('extra_config_dict = %s' % ('{}' if six.PY3 else '{ }'), content)
def test_module_and_class(self):
self.options['module'] = m = 'slapos.promise.plugin.check_site_available'
self.options['class'] = 'MyPromise'
self.installRecipe()
self.assertOutput("from %s import MyPromise as RunPromise" % m)
def test_file_and_class(self):
self.options['file'] = __file__
self.options['class'] = 'MyPromise'
self.installRecipe()
self.assertOutput("exec(_(%r))\n\nRunPromise = MyPromise" % __file__)
def test_no_module_or_file(self):
with self.assertRaises(UserError) as p:
self.makeRecipe()
msg = str(p.exception)
self.assertEqual(msg, "Either 'module' or 'file' is required but not both")
def test_module_and_file(self):
self.options['module'] = 'slapos.promise.plugin.check_site_available'
self.options['file'] = __file__
self.test_no_module_or_file()
def test_bad_parameters(self):
self.options['import'] = 'slapos.promise.plugin.check_site_available'
self.options['config-param1; print "toto"'] = """#xxxx"\nimport os; os.stat(f)"""
self.options['config-param2\n@domething'] = '"#$$*PPP\n\n p = 2*5; print "result is %s" % p'
recipe = makeRecipe(
promise_plugin.Recipe,
options=self.options,
name='plugin')
recipe.install()
self.assertTrue(os.path.exists(self.output))
with open(self.output) as f:
content = f.read()
expected_param1 = r"""'param1; print "toto"': '#xxxx"\nimport os; os.stat(f)',"""
expected_param2 = r"""'param2\n@domething': '"#$$*PPP\n\n p = 2*5; print "result is %s" % p'"""
self.assertIn(expected_param1, content)
self.assertIn(expected_param2, content)
def test_bad_module_path(self):
self.options['import'] = 'slapos.promise.plugin.check_site_available; print "toto"'
recipe = makeRecipe(
promise_plugin.Recipe,
options=self.options,
name='plugin')
with self.assertRaises(ValueError) as p:
recipe.install()
self.assertEqual(str(p.exception), "Import path %r is not a valid" % self.options['import'])
def test_bad_content(self):
self.options['content'] = 'from slapos.plugin.check_site_available import toto; print "toto"'
recipe = makeRecipe(
promise_plugin.Recipe,
options=self.options,
name='plugin')
with self.assertRaises(ValueError) as p:
recipe.install()
self.assertEqual(str(p.exception), "Promise content %r is not valid" % self.options['content'])
self.options['module'] = 'slapos.promise.plugin.check_site_available'
self.options.update((
('config-param1; print "toto"', '#xxxx"\nimport os; os.stat(f)'),
('config-param2\n@domething', '"#$$*PPP\np = 2*5; print "result=%s" % p')
))
self.installRecipe()
self.assertOutput(
r"""'param1; print "toto"': '#xxxx"\nimport os; os.stat(f)',""",
r"""'param2\n@domething': '"#$$*PPP\np = 2*5; print "result=%s" % p'"""
)
def test_bad_module(self):
self.options['module'] = 'slapos.promise.plugin.check_site_available; print "toto"'
with self.assertRaises(UserError) as p:
self.makeRecipe()
self.assertEqual(str(p.exception), "%r is not a valid module name" % self.options['module'])
def test_bad_file(self):
self.options['file'] = 'print "toto"'
self.installRecipe()
self.assertOutput(r"""exec(_('print "toto"'))""")
def test_bad_class(self):
self.options['class'] = 'MyPromise; print "toto"'
with self.assertRaises(UserError) as p:
self.makeRecipe()
self.assertEqual(str(p.exception), "%r is not a valid class name" % self.options['class'])
import unittest
import tempfile
import os
import shutil
import os.path
import tempfile
import textwrap
import unittest
import zc.buildout.testing
class PostgresTest(unittest.TestCase):
def setUp(self):
self.buildout = buildout = zc.buildout.testing.Buildout()
self.pgdata_directory = tempfile.mkdtemp()
self.addCleanup(shutil.rmtree, self.pgdata_directory)
self.services_directory = tempfile.mkdtemp()
self.addCleanup(shutil.rmtree, self.services_directory)
self.software_bin_dir = tempfile.mkdtemp()
# create fake programs
self.addCleanup(shutil.rmtree, self.software_bin_dir)
initdb = os.path.join(self.software_bin_dir, 'initdb')
with open(initdb, 'w') as f:
f.write(textwrap.dedent('''\
#!/bin/sh
if [ ! "$1" = -D ]
then
echo Wrong arguments, expecting -D datadir ... got: "$@"
exit 1
fi
mkdir "$2"
'''))
os.chmod(initdb, 0o775)
postgres = os.path.join(self.software_bin_dir, 'postgres')
with open(postgres, 'w') as f:
f.write(textwrap.dedent('''\
#!/bin/sh
exec cat > %s/postgres.sql
''') % os.path.join(self.pgdata_directory, 'pgdata'))
os.chmod(postgres, 0o775)
psql = os.path.join(self.software_bin_dir, 'psql')
with open(psql, 'w') as f:
f.write(textwrap.dedent('''\
#!/bin/sh -xe
exec cat > %s/psql.sql
''') % os.path.join(self.pgdata_directory, 'pgdata'))
os.chmod(psql, 0o775)
buildout['postgres'] = {
'bin': 'software/parts/postgres/bin/',
'bin': self.software_bin_dir,
'dbname': 'dbname',
'ipv4': '127.0.0.1',
'ipv6': '::1',
'port': '5443',
'pgdata-directory': self.pgdata_directory,
'pgdata-directory': os.path.join(self.pgdata_directory, 'pgdata'),
'services': self.services_directory,
'superuser': 'superuser',
'password': 'secret',
......@@ -37,13 +73,36 @@ class PostgresTest(unittest.TestCase):
def test_install(self):
installed = self.recipe.install()
self.assertIn('postgresql.conf', os.listdir(self.pgdata_directory))
self.assertIn('pg_hba.conf', os.listdir(self.pgdata_directory))
pgdata_directory = os.path.join(self.pgdata_directory, 'pgdata')
self.assertIn('postgresql.conf', os.listdir(pgdata_directory))
self.assertIn('pg_hba.conf', os.listdir(pgdata_directory))
self.assertIn('postgres-start', os.listdir(self.services_directory))
with open(os.path.join(pgdata_directory, 'postgres.sql')) as f:
self.assertEqual(
f.read(),
'ALTER USER "superuser" ENCRYPTED PASSWORD \'md53992d9240b8f81ebd7e1f9a9fafeb06b\'\n'
)
self.assertEqual(
sorted(installed),
sorted([
os.path.join(self.pgdata_directory, 'postgresql.conf'),
os.path.join(self.pgdata_directory, 'pg_hba.conf'),
os.path.join(pgdata_directory, 'postgresql.conf'),
os.path.join(pgdata_directory, 'pg_hba.conf'),
os.path.join(self.services_directory, 'postgres-start')]))
def test_update_password(self):
self.recipe.install()
# simulate a running server
pgdata_directory = os.path.join(self.pgdata_directory, 'pgdata')
open(os.path.join(pgdata_directory, 'postmaster.pid'), 'w').close()
self.recipe.options['password'] = 'new'
self.recipe.install()
with open(os.path.join(pgdata_directory, 'psql.sql')) as f:
self.assertEqual(
f.read(),
'ALTER USER "superuser" ENCRYPTED PASSWORD \'md5442311d398491b7f6b512757b51ae9d8\'\n'
)
......@@ -46,7 +46,7 @@ md5sum = ae4a0043414336a521b524d9c95f1c68
[template-pullrdiffbackup]
filename = instance-pullrdiffbackup.cfg.in
md5sum = f2e6f30a0e8228cbfb93eaaae10fe884
md5sum = 45a4faa217ea5b83ecf271791e1632dd
[template]
filename = instance.cfg.in
......
......@@ -107,8 +107,7 @@ recipe = slapos.cookbook:promise.plugin
eggs =
slapos.toolbox
output = $${directory:plugin}/{{ slave_reference }}_check_backup.py
content =
from slapos.promise.plugin.backupserver_check_backup import RunPromise
module = slapos.promise.plugin.backupserver_check_backup
config-status_dirbasename = $${variables:status_dirbasename}
config-status_name = {{ '$${' ~ slave_reference }}-backup-script:status_name}
config-status_fullpath = {{ '$${' ~ slave_reference }}-backup-script:status_log}
......@@ -117,7 +116,6 @@ config-cron_frequency = {{ frequency }}
config-monitor_url = $${monitor-publish:monitor-base-url}
config-statistic_dirbasename = $${variables:statistic_dirbasename}
config-statistic_name = {{ '$${' ~ slave_reference }}-backup-script:statistic_name}
mode = 600
{% do part_list.append("%s-promise-check-backup" % slave_reference) -%}
[{{ slave_reference }}-backup-script]
......@@ -194,7 +192,7 @@ virtual-depends =
[nginx-listen-promise]
<= monitor-promise-base
module = check_socket_listening
promise = check_socket_listening
name = nginx_listen.py
config-host = $${nginx-configuration:ip}
config-port = $${nginx-configuration:port}
......
......@@ -22,15 +22,15 @@ md5sum = 5784bea3bd608913769ff9a8afcccb68
[profile-caddy-frontend]
filename = instance-apache-frontend.cfg.in
md5sum = 385674eb3e79bdd773ef6cec1d1281e8
md5sum = 0950e09ad1f03f0789308f5f7a7eb1b8
[profile-caddy-replicate]
filename = instance-apache-replicate.cfg.in
md5sum = 99741e618b1c249bd17c9e02778d74ee
md5sum = 7c2e52b76c42bed95702763c344e41dd
[profile-slave-list]
_update_hash_filename_ = templates/apache-custom-slave-list.cfg.in
md5sum = 9bb51f663f69d66b5b3708bf892dd3e6
md5sum = 313671d343ceccfca5af1baa642132c5
[profile-replicate-publish-slave-information]
_update_hash_filename_ = templates/replicate-publish-slave-information.cfg.in
......@@ -102,7 +102,7 @@ md5sum = 38792c2dceae38ab411592ec36fff6a8
[profile-kedifa]
filename = instance-kedifa.cfg.in
md5sum = eab5ae579471ca86b40bd2da3b53fefa
md5sum = dfb4dabd1e4094de1276d171f998ef47
[template-backend-haproxy-rsyslogd-conf]
_update_hash_filename_ = templates/backend-haproxy-rsyslogd.conf.in
......
......@@ -539,7 +539,7 @@ context =
[trafficserver-promise-listen-port]
<= monitor-promise-base
module = check_socket_listening
promise = check_socket_listening
name = trafficserver-port-listening.py
config-host = ${trafficserver-variable:local-ip}
config-port = ${trafficserver-variable:input-port}
......@@ -552,7 +552,7 @@ environment = TS_ROOT=${buildout:directory}
[trafficserver-promise-cache-availability]
<= monitor-promise-base
module = trafficserver_cache_availability
promise = trafficserver_cache_availability
name = trafficserver-cache-availability.py
config-wrapper-path = ${trafficserver-ctl:wrapper-path}
......@@ -653,48 +653,48 @@ context =
[promise-frontend-caddy-configuration]
<= monitor-promise-base
module = validate_frontend_configuration
promise = validate_frontend_configuration
name = frontend-caddy-configuration-promise.py
config-verification-script = ${promise-helper-last-configuration-state:rendered}
[promise-caddy-frontend-v4-https]
<= monitor-promise-base
module = check_socket_listening
promise = check_socket_listening
name = caddy_frontend_ipv4_https.py
config-host = {{ instance_parameter_dict['ipv4-random'] }}
config-port = ${configuration:port}
[promise-caddy-frontend-v4-http]
<= monitor-promise-base
module = check_socket_listening
promise = check_socket_listening
name = caddy_frontend_ipv4_http.py
config-host = {{ instance_parameter_dict['ipv4-random'] }}
config-port = ${configuration:plain_http_port}
[promise-caddy-frontend-v6-https]
<= monitor-promise-base
module = check_socket_listening
promise = check_socket_listening
name = caddy_frontend_ipv6_https.py
config-host = {{ instance_parameter_dict['ipv6-random'] }}
config-port = ${configuration:port}
[promise-caddy-frontend-v6-http]
<= monitor-promise-base
module = check_socket_listening
promise = check_socket_listening
name = caddy_frontend_ipv6_http.py
config-host = {{ instance_parameter_dict['ipv6-random'] }}
config-port = ${configuration:plain_http_port}
[promise-backend-haproxy-http]
<= monitor-promise-base
module = check_socket_listening
promise = check_socket_listening
name = backend_haproxy_http.py
config-host = {{ instance_parameter_dict['ipv4-random'] }}
config-port = ${backend-haproxy-configuration:http-port}
[promise-backend-haproxy-https]
<= monitor-promise-base
module = check_socket_listening
promise = check_socket_listening
name = backend_haproxy_https.py
config-host = {{ instance_parameter_dict['ipv4-random'] }}
config-port = ${backend-haproxy-configuration:https-port}
......@@ -798,7 +798,7 @@ extra-context =
[promise-backend-haproxy-configuration]
<= monitor-promise-base
module = validate_frontend_configuration
promise = validate_frontend_configuration
name = backend-haproxy-configuration.py
config-verification-script = ${promise-backend-haproxy-configuration-helper:rendered}
......@@ -888,7 +888,7 @@ context =
[monitor-verify-re6st-connectivity]
<= monitor-promise-base
module = check_url_available
promise = check_url_available
name = re6st-connectivity.py
config-url = ${configuration:re6st-verification-url}
......@@ -924,7 +924,7 @@ return = domain secure_access
[backend-haproxy-statistic-frontend-promise]
<= monitor-promise-base
module = check_url_available
promise = check_url_available
name = backend-haproxy-statistic-frontend.py
config-http-code = 401
config-url =
......@@ -976,7 +976,7 @@ extra-context =
[promise-slave-introspection-configuration]
<= monitor-promise-base
module = validate_frontend_configuration
promise = validate_frontend_configuration
name = slave-introspection-configuration.py
config-verification-script = ${promise-slave-introspection-configuration-helper:rendered}
......@@ -993,7 +993,7 @@ context =
[promise-slave-introspection-https]
<= monitor-promise-base
module = check_socket_listening
promise = check_socket_listening
name = slave_introspection_https.py
config-host = {{ instance_parameter_dict['ipv6-random'] }}
config-port = ${frontend-configuration:slave-introspection-https-port}
......@@ -1008,7 +1008,7 @@ delaycompress =
[promise-logrotate-setup]
<= monitor-promise-base
module = check_command_execute
promise = check_command_execute
name = ${:_buildout_section_name_}.py
config-command =
${logrotate:wrapper-path} -d
......
......@@ -382,7 +382,7 @@ kedifa-csr_id-certificate = ${request-kedifa:connection-csr_id-certificate}
{% set section_part = '${request-' + frontend %}
[{{ part_name }}]
<= monitor-promise-base
module = check_url_available
promise = check_url_available
name = check-backend-haproxy-statistic-url-{{ frontend }}.py
config-url =
{{ section_part }}:connection-backend-haproxy-statistic-url}
......@@ -865,15 +865,15 @@ rendered = ${directory:etc}/nginx-rejected-slave.conf
[promise-rejected-slave-publish-ip-port]
<= monitor-promise-base
module = check_socket_listening
promise = check_socket_listening
name = rejected-slave-publish-ip-port-listening.py
config-host = ${rejected-slave-publish-configuration:ip}
config-port = ${rejected-slave-publish-configuration:port}
[rejected-slave-promise]
<= monitor-promise-base
module = check_socket_listening
module = check_file_state
promise = check_socket_listening
promise = check_file_state
name = rejected-slave.py
config-filename = ${rejected-slave-json:rendered}
config-state = empty
......
......@@ -193,7 +193,7 @@ template = inline:
[promise-expose-csr_id-ip-port]
<= monitor-promise-base
module = check_socket_listening
promise = check_socket_listening
name = expose-csr_id-ip-port-listening.py
config-host = ${expose-csr_id-configuration:ip}
config-port = ${expose-csr_id-configuration:port}
......@@ -250,7 +250,7 @@ extra-context =
[promise-kedifa-http-reply]
<= monitor-promise-base
module = check_url_available
promise = check_url_available
name = kedifa-http-reply.py
# Kedifa replies 400 on /, so use it to be sure that Kedifa replied
config-http-code = 400
......@@ -331,7 +331,7 @@ monitor-base-url = ${monitor-instance-parameter:monitor-base-url}
[promise-logrotate-setup]
<= monitor-promise-base
module = check_command_execute
promise = check_command_execute
name = ${:_buildout_section_name_}.py
config-command =
${logrotate:wrapper-path} -d
......
......@@ -20,19 +20,8 @@ parts +=
caucase-eggs
template
caddyprofiledeps
kedifa-develop
kedifa
[kedifa-repository]
recipe = slapos.recipe.build:gitclone
repository = https://lab.nexedi.com/nexedi/kedifa.git
git-executable = ${git:location}/bin/git
revision = 3fccc2ec945c59b644a12fa40225795abd61e0b0
[kedifa-develop]
recipe = zc.recipe.egg:develop
setup = ${kedifa-repository:location}
[kedifa]
recipe = zc.recipe.egg
eggs =
......@@ -222,6 +211,7 @@ mode = 0644
<=download-template
[versions]
kedifa = 0.0.6
# Modern KeDiFa requires zc.lockfile
zc.lockfile = 1.4
......
......@@ -353,7 +353,7 @@ filename = {{ '%s.conf' % slave_reference }}
{%- do part_list.append(monitor_ipv6_section_title) %}
[{{ monitor_ipv6_section_title }}]
<= monitor-promise-base
module = check_icmp_packet_lost
promise = check_icmp_packet_lost
name = {{ monitor_ipv6_section_title }}.py
config-address = {{ dumps(monitor_ipv6_test) }}
# promise frequency in minutes (2 times/day)
......@@ -365,7 +365,7 @@ config-frequency = 720
{%- do part_list.append(monitor_ipv4_section_title) %}
[{{ monitor_ipv4_section_title }}]
<= monitor-promise-base
module = check_icmp_packet_lost
promise = check_icmp_packet_lost
name = {{ monitor_ipv4_section_title }}.py
config-address = {{ dumps(monitor_ipv4_test) }}
config-ipv4 = true
......@@ -632,7 +632,7 @@ template = inline:
[promise-expose-csr_id-ip-port]
<= monitor-promise-base
module = check_socket_listening
promise = check_socket_listening
name = expose-csr_id-ip-port-listening.py
config-host = ${expose-csr_id-configuration:ip}
config-port = ${expose-csr_id-configuration:port}
......@@ -655,7 +655,7 @@ commands =
[promise-logrotate-setup]
<= monitor-promise-base
module = check_command_execute
promise = check_command_execute
name = ${:_buildout_section_name_}.py
config-command =
${logrotate:wrapper-path} -d
\ No newline at end of file
......@@ -971,12 +971,11 @@ class HttpFrontendTestCase(SlapOSInstanceTestCase):
return parsed_parameter_dict
def getMasterPartitionPath(self):
# partition w/o etc/trafficserver, but with buildout.cfg
# partition with etc/nginx-rejected-slave.conf
return [
q for q in glob.glob(os.path.join(self.instance_path, '*',))
if not os.path.exists(
os.path.join(q, 'etc', 'trafficserver')) and os.path.exists(
os.path.join(q, 'buildout.cfg'))][0]
if os.path.exists(
os.path.join(q, 'etc', 'nginx-rejected-slave.conf'))][0]
def parseConnectionParameterDict(self):
return self.parseParameterDict(
......
......@@ -18,4 +18,4 @@ md5sum = e986de01a57161b32425f1cd3ccac924
[template-cloudooo-instance]
filename = instance-cloudooo.cfg.in
md5sum = 6e4bdb1df02aed5c96ccf7b9c3c71b89
md5sum = 3e6830c21c206b3ae1140375e5e63b46
......@@ -105,7 +105,7 @@ ssl-session-cache = ${directory:log}/apache-ssl-session-cache
[apache-promise]
# Check any apache port in ipv4, expect other ports and ipv6 to behave consistently
<= monitor-promise-base
module = check_url_available
promise = check_url_available
name = apache.py
config-url = https://{{ ipv4 }}:{{ apache_dict.values()[0][0] }}
# XXX cloudooo replies "400 Bad Request" for GET on / but what we want to check
......
[instance]
filename = instance.cfg
md5sum = 646e50cfa93681e8bd85767621c7a39d
md5sum = ddd17fab15afa5a27cdc0761fbc8f34c
......@@ -41,7 +41,7 @@ wrapper-path = $${directory:service}/dream_platform
[dream-platform-url-available]
<= monitor-promise-base
module = check_url_available
promise = check_url_available
name = $${:_buildout_section_name_}.py
config-url= $${dream_platform_parameter:url}
......
......@@ -29,10 +29,16 @@ script = dream_testrunner
initialization =
${manpy:initialization}
[nodejs]
<= nodejs-8.9.4
[npm_install]
recipe = plone.recipe.command
stop-on-error = true
command = cd ${dream-repository.git:location} && PATH=${git:location}/bin/:${nodejs:location}/bin/:$PATH ${nodejs:location}/bin/npm install .
command =
cd ${dream-repository.git:location}
PATH=${git:location}/bin:${nodejs:location}/bin:$PATH
npm install .
update_command = ${:command}
[versions]
......
......@@ -24,6 +24,7 @@
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
##############################################################################
from __future__ import absolute_import
from setuptools import setup, find_packages
version = '0.0.1.dev0'
......@@ -52,7 +53,6 @@ setup(name=name,
'cryptography',
'pexpect',
'pyOpenSSL',
'typing; python_version<"3"',
],
test_suite='test',
)
......@@ -25,6 +25,7 @@
#
##############################################################################
from __future__ import absolute_import
import json
import os
......
from __future__ import absolute_import
import glob
import hashlib
import json
......@@ -8,10 +9,8 @@ import shutil
import subprocess
import tempfile
import time
import urllib
import urlparse
from BaseHTTPServer import BaseHTTPRequestHandler
from typing import Dict
import six.moves.urllib.request, six.moves.urllib.parse, six.moves.urllib.error
from six.moves.BaseHTTPServer import BaseHTTPRequestHandler
import mock
import OpenSSL.SSL
......@@ -28,6 +27,7 @@ from slapos.testing.utils import (CrontabMixin, ManagedHTTPServer,
findFreeTCPPort)
from . import ERP5InstanceTestCase, setUpModule
from six.moves import range
setUpModule # pyflakes
......@@ -44,10 +44,10 @@ class EchoHTTPServer(ManagedHTTPServer):
response = json.dumps(
{
'Path': self.path,
'Incoming Headers': self.headers.dict
'Incoming Headers': dict(self.headers.items()),
},
indent=2,
)
).encode('utf-8')
self.end_headers()
self.wfile.write(response)
......@@ -67,11 +67,11 @@ class EchoHTTP11Server(ManagedHTTPServer):
response = json.dumps(
{
'Path': self.path,
'Incoming Headers': self.headers.dict
'Incoming Headers': dict(self.headers.items()),
},
indent=2,
)
self.send_header("Content-Length", len(response))
).encode('utf-8')
self.send_header("Content-Length", str(len(response)))
self.end_headers()
self.wfile.write(response)
......@@ -110,6 +110,7 @@ class CaucaseService(ManagedResource):
'--netloc', backend_caucased_netloc,
'--service-auto-approve-count', '1',
],
# capture subprocess output not to pollute test's own stdout
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
)
......@@ -127,6 +128,7 @@ class CaucaseService(ManagedResource):
# type: () -> None
self._caucased_process.terminate()
self._caucased_process.wait()
self._caucased_process.stdout.close()
shutil.rmtree(self.directory)
......@@ -139,7 +141,7 @@ class BalancerTestCase(ERP5InstanceTestCase):
@classmethod
def _getInstanceParameterDict(cls):
# type: () -> Dict
# type: () -> dict
return {
'tcpv4-port': 8000,
'computer-memory-percent-threshold': 100,
......@@ -174,10 +176,11 @@ class BalancerTestCase(ERP5InstanceTestCase):
@classmethod
def getInstanceParameterDict(cls):
# type: () -> Dict
# type: () -> dict
return {'_': json.dumps(cls._getInstanceParameterDict())}
def setUp(self):
# type: () -> None
self.default_balancer_url = json.loads(
self.computer_partition.getConnectionParameterDict()['_'])['default']
......@@ -192,7 +195,7 @@ class SlowHTTPServer(ManagedHTTPServer):
self.send_header("Content-Type", "text/plain")
time.sleep(2)
self.end_headers()
self.wfile.write("OK\n")
self.wfile.write(b"OK\n")
log_message = logging.getLogger(__name__ + '.SlowHandler').info
......@@ -203,7 +206,7 @@ class TestLog(BalancerTestCase, CrontabMixin):
__partition_reference__ = 'l'
@classmethod
def _getInstanceParameterDict(cls):
# type: () -> Dict
# type: () -> dict
parameter_dict = super(TestLog, cls)._getInstanceParameterDict()
# use a slow server instead
parameter_dict['dummy_http_server'] = [[cls.getManagedResource("slow_web_server", SlowHTTPServer).netloc, 1, False]]
......@@ -212,7 +215,7 @@ class TestLog(BalancerTestCase, CrontabMixin):
def test_access_log_format(self):
# type: () -> None
requests.get(
urlparse.urljoin(self.default_balancer_url, '/url_path'),
six.moves.urllib.parse.urljoin(self.default_balancer_url, '/url_path'),
verify=False,
)
time.sleep(.5) # wait a bit more until access is logged
......@@ -285,6 +288,7 @@ class TestLog(BalancerTestCase, CrontabMixin):
self.assertFalse(os.path.exists(rotated_log_file))
def test_error_log(self):
# type: () -> None
# stop backend server
backend_server = self.getManagedResource("slow_web_server", SlowHTTPServer)
self.addCleanup(backend_server.open)
......@@ -326,7 +330,7 @@ class BalancerCookieHTTPServer(ManagedHTTPServer):
# The name of this cookie is SERVERID
assert self.headers['X-Balancer-Current-Cookie'] == 'SERVERID'
self.end_headers()
self.wfile.write(server._name)
self.wfile.write(server._name.encode('utf-8'))
log_message = logging.getLogger(__name__ + '.BalancerCookieHTTPServer').info
return RequestHandler
......@@ -338,7 +342,7 @@ class TestBalancer(BalancerTestCase):
__partition_reference__ = 'b'
@classmethod
def _getInstanceParameterDict(cls):
# type: () -> Dict
# type: () -> dict
parameter_dict = super(TestBalancer, cls)._getInstanceParameterDict()
# use two backend servers
......@@ -349,6 +353,7 @@ class TestBalancer(BalancerTestCase):
return parameter_dict
def test_balancer_round_robin(self):
# type: () -> None
# requests are by default balanced to both servers
self.assertEqual(
{requests.get(self.default_balancer_url, verify=False).text for _ in range(10)},
......@@ -356,6 +361,7 @@ class TestBalancer(BalancerTestCase):
)
def test_balancer_server_down(self):
# type: () -> None
# if one backend is down, it is excluded from balancer
self.getManagedResource("backend_web_server2", BalancerCookieHTTPServer).close()
self.addCleanup(self.getManagedResource("backend_web_server2", BalancerCookieHTTPServer).open)
......@@ -365,14 +371,16 @@ class TestBalancer(BalancerTestCase):
)
def test_balancer_set_cookie(self):
# type: () -> None
# if backend provides a "SERVERID" cookie, balancer will overwrite it with the
# backend selected by balancing algorithm
self.assertIn(
requests.get(urlparse.urljoin(self.default_balancer_url, '/set_cookie'), verify=False).cookies['SERVERID'],
requests.get(six.moves.urllib.parse.urljoin(self.default_balancer_url, '/set_cookie'), verify=False).cookies['SERVERID'],
('default-0', 'default-1'),
)
def test_balancer_respects_sticky_cookie(self):
# type: () -> None
# if request is made with the sticky cookie, the client stick on one balancer
cookies = dict(SERVERID='default-1')
self.assertEqual(
......@@ -388,6 +396,7 @@ class TestBalancer(BalancerTestCase):
'backend_web_server1')
def test_balancer_stats_socket(self):
# type: () -> None
# real time statistics can be obtained by using the stats socket and there
# is a wrapper which makes this a bit easier.
socat_process = subprocess.Popen(
......@@ -397,14 +406,14 @@ class TestBalancer(BalancerTestCase):
stderr=subprocess.STDOUT
)
try:
output, _ = socat_process.communicate("show stat\n")
output, _ = socat_process.communicate(b"show stat\n")
except:
socat_process.kill()
socat_process.wait()
raise
self.assertEqual(socat_process.poll(), 0)
# output is a csv
self.assertIn('family_default,FRONTEND,', output)
self.assertIn(b'family_default,FRONTEND,', output)
class TestTestRunnerEntryPoints(BalancerTestCase):
......@@ -413,7 +422,7 @@ class TestTestRunnerEntryPoints(BalancerTestCase):
__partition_reference__ = 't'
@classmethod
def _getInstanceParameterDict(cls):
# type: () -> Dict
# type: () -> dict
parameter_dict = super(
TestTestRunnerEntryPoints,
cls,
......@@ -436,23 +445,24 @@ class TestTestRunnerEntryPoints(BalancerTestCase):
return parameter_dict
def test_use_proper_backend(self):
# type: () -> None
# requests are directed to proper backend based on URL path
test_runner_url_list = self.getRootPartitionConnectionParameterDict(
)['default-test-runner-url-list']
url_0, url_1, url_2 = test_runner_url_list
self.assertEqual(
urlparse.urlparse(url_0).netloc,
urlparse.urlparse(url_1).netloc)
six.moves.urllib.parse.urlparse(url_0).netloc,
six.moves.urllib.parse.urlparse(url_1).netloc)
self.assertEqual(
urlparse.urlparse(url_0).netloc,
urlparse.urlparse(url_2).netloc)
six.moves.urllib.parse.urlparse(url_0).netloc,
six.moves.urllib.parse.urlparse(url_2).netloc)
path_0 = '/VirtualHostBase/https/{netloc}/VirtualHostRoot/_vh_unit_test_0/something'.format(
netloc=urlparse.urlparse(url_0).netloc)
netloc=six.moves.urllib.parse.urlparse(url_0).netloc)
path_1 = '/VirtualHostBase/https/{netloc}/VirtualHostRoot/_vh_unit_test_1/something'.format(
netloc=urlparse.urlparse(url_0).netloc)
netloc=six.moves.urllib.parse.urlparse(url_0).netloc)
path_2 = '/VirtualHostBase/https/{netloc}/VirtualHostRoot/_vh_unit_test_2/something'.format(
netloc=urlparse.urlparse(url_0).netloc)
netloc=six.moves.urllib.parse.urlparse(url_0).netloc)
self.assertEqual(
{
......@@ -489,7 +499,7 @@ class TestHTTP(BalancerTestCase):
"""
@classmethod
def _getInstanceParameterDict(cls):
# type: () -> Dict
# type: () -> dict
parameter_dict = super(TestHTTP, cls)._getInstanceParameterDict()
# use a HTTP/1.1 server instead
parameter_dict['dummy_http_server'] = [[cls.getManagedResource("HTTP/1.1 Server", EchoHTTP11Server).netloc, 1, False]]
......@@ -511,13 +521,13 @@ class TestHTTP(BalancerTestCase):
'%{http_version}',
self.default_balancer_url,
]),
'2',
b'2',
)
def test_keep_alive(self):
# type: () -> None
# when doing two requests, connection is established only once
session = requests.Session()
with requests.Session() as session:
session.verify = False
# do a first request, which establish a first connection
......@@ -530,7 +540,8 @@ class TestHTTP(BalancerTestCase):
session.get(self.default_balancer_url).raise_for_status()
new_conn.assert_not_called()
parsed_url = urlparse.urlparse(self.default_balancer_url)
parsed_url = six.moves.urllib.parse.urlparse(self.default_balancer_url)
# check that we have an open file for the ip connection
self.assertTrue([
c for c in psutil.Process(os.getpid()).connections()
......@@ -553,12 +564,12 @@ class ContentTypeHTTPServer(ManagedHTTPServer):
# type: () -> None
self.send_response(200)
if self.path == '/':
self.send_header("Content-Length", 0)
self.send_header("Content-Length", '0')
return self.end_headers()
content_type = self.path[1:]
body = "OK"
body = b"OK"
self.send_header("Content-Type", content_type)
self.send_header("Content-Length", len(body))
self.send_header("Content-Length", str(len(body)))
self.end_headers()
self.wfile.write(body)
......@@ -571,7 +582,7 @@ class TestContentEncoding(BalancerTestCase):
__partition_reference__ = 'ce'
@classmethod
def _getInstanceParameterDict(cls):
# type: () -> Dict
# type: () -> dict
parameter_dict = super(TestContentEncoding, cls)._getInstanceParameterDict()
parameter_dict['dummy_http_server'] = [
[cls.getManagedResource("content_type_server", ContentTypeHTTPServer).netloc, 1, False],
......@@ -599,7 +610,7 @@ class TestContentEncoding(BalancerTestCase):
'application/font-woff2',
'application/x-font-opentype',
'application/wasm',):
resp = requests.get(urlparse.urljoin(self.default_balancer_url, content_type), verify=False)
resp = requests.get(six.moves.urllib.parse.urljoin(self.default_balancer_url, content_type), verify=False)
self.assertEqual(resp.headers['Content-Type'], content_type)
self.assertEqual(
resp.headers.get('Content-Encoding'),
......@@ -609,7 +620,7 @@ class TestContentEncoding(BalancerTestCase):
def test_no_gzip_encoding(self):
# type: () -> None
resp = requests.get(urlparse.urljoin(self.default_balancer_url, '/image/png'), verify=False)
resp = requests.get(six.moves.urllib.parse.urljoin(self.default_balancer_url, '/image/png'), verify=False)
self.assertNotIn('Content-Encoding', resp.headers)
self.assertEqual(resp.text, 'OK')
......@@ -692,7 +703,7 @@ class CaucaseCertificate(ManagedResource):
cas_args + [
'--send-csr', self.csr_file,
],
).split()[0]
).split()[0].decode()
assert csr_id
for _ in range(30):
......@@ -708,11 +719,11 @@ class CaucaseCertificate(ManagedResource):
time.sleep(1)
else:
raise RuntimeError('getting service certificate failed.')
with open(self.cert_file) as f:
assert 'BEGIN CERTIFICATE' in f.read()
with open(self.cert_file) as cert_file:
assert 'BEGIN CERTIFICATE' in cert_file.read()
def revoke(self, caucase):
# type: (str, CaucaseService) -> None
# type: (CaucaseService) -> None
"""Revoke the client certificate on this caucase instance.
"""
subprocess.check_call([
......@@ -729,7 +740,7 @@ class TestFrontendXForwardedFor(BalancerTestCase):
@classmethod
def _getInstanceParameterDict(cls):
# type: () -> Dict
# type: () -> dict
frontend_caucase = cls.getManagedResource('frontend_caucase', CaucaseService)
certificate = cls.getManagedResource('client_certificate', CaucaseCertificate)
certificate.request(u'shared frontend', frontend_caucase)
......@@ -784,10 +795,10 @@ class TestServerTLSProvidedCertificate(BalancerTestCase):
@classmethod
def _getInstanceParameterDict(cls):
# type: () -> Dict
# type: () -> dict
server_caucase = cls.getManagedResource('server_caucase', CaucaseService)
server_certificate = cls.getManagedResource('server_certificate', CaucaseCertificate)
server_certificate.request(cls._ipv4_address.decode(), server_caucase)
server_certificate.request(six.ensure_text(cls._ipv4_address), server_caucase)
parameter_dict = super(TestServerTLSProvidedCertificate, cls)._getInstanceParameterDict()
with open(server_certificate.cert_file) as f:
parameter_dict['ssl']['cert'] = f.read()
......@@ -806,7 +817,7 @@ class TestClientTLS(BalancerTestCase):
@classmethod
def _getInstanceParameterDict(cls):
# type: () -> Dict
# type: () -> dict
frontend_caucase1 = cls.getManagedResource('frontend_caucase1', CaucaseService)
certificate1 = cls.getManagedResource('client_certificate1', CaucaseCertificate)
certificate1.request(u'client_certificate1', frontend_caucase1)
......@@ -827,7 +838,6 @@ class TestClientTLS(BalancerTestCase):
def test_refresh_crl(self):
# type: () -> None
logger = self.logger
class DebugLogFile:
......@@ -846,6 +856,7 @@ class TestClientTLS(BalancerTestCase):
# when client certificate can be authenticated, backend receive the CN of
# the client certificate in "remote-user" header
def _make_request():
# type: () -> dict
return requests.get(
self.default_balancer_url,
cert=(client_certificate.cert_file, client_certificate.key_file),
......@@ -897,6 +908,7 @@ class TestClientTLS(BalancerTestCase):
with self.assertRaisesRegexp(Exception, 'certificate revoked'):
_make_request()
class TestPathBasedRouting(BalancerTestCase):
"""Check path-based routing rewrites URLs as expected.
"""
......@@ -904,7 +916,7 @@ class TestPathBasedRouting(BalancerTestCase):
@classmethod
def _getInstanceParameterDict(cls):
# type: () -> Dict
# type: () -> dict
parameter_dict = super(
TestPathBasedRouting,
cls,
......@@ -936,14 +948,15 @@ class TestPathBasedRouting(BalancerTestCase):
published_dict = json.loads(self.computer_partition.getConnectionParameterDict()['_'])
scheme = 'scheme'
netloc = 'example.com:8080'
prefix = '/VirtualHostBase/' + scheme + '//' + urllib.quote(
prefix = '/VirtualHostBase/' + scheme + '//' + six.moves.urllib.parse.quote(
netloc,
safe='',
)
# For easier reading of test data, visualy separating the virtual host
# For easier reading of test data, visually separating the virtual host
# base from the virtual host root
vhr = '/VirtualHostRoot'
def assertRoutingEqual(family, path, expected_path):
# type: (str, str, str) -> None
# sanity check: unlike the rules, this test is sensitive to outermost
# slashes, and paths must be absolute-ish for code simplicity.
assert path.startswith('/')
......@@ -959,7 +972,7 @@ class TestPathBasedRouting(BalancerTestCase):
# test will need to be updated accordingly.
self.assertEqual(
requests.get(
urlparse.urljoin(published_dict[family], prefix + vhr + path),
six.moves.urllib.parse.urljoin(published_dict[family], prefix + vhr + path),
verify=False,
).json()['Path'],
expected_path,
......@@ -978,7 +991,7 @@ class TestPathBasedRouting(BalancerTestCase):
# Rule precedence: family rules applied before general rules.
assertRoutingEqual('default', '/next', prefix + '/erp5/web_site_module/another_next_website' + vhr + '/_vh_next')
# Fallback on general rules when no family-specific rule matches
# Note: the root is special in that there is aways a trailing slash in the
# Note: the root is special in that there is always a trailing slash in the
# produced URL.
assertRoutingEqual('default', '/', prefix + '/erp5/web_site_module/123' + vhr + '/')
# Rule-less family reach general rules.
......
......@@ -25,10 +25,11 @@
#
##############################################################################
from __future__ import absolute_import
import os
import json
import glob
import urlparse
import six.moves.urllib.parse
import socket
import time
......@@ -37,6 +38,9 @@ import requests
from . import ERP5InstanceTestCase
from . import setUpModule
import six
from six.moves import map
from six.moves import range
setUpModule # pyflakes
......@@ -48,7 +52,7 @@ class TestPublishedURLIsReachableMixin(object):
# We access ERP5 trough a "virtual host", which should make
# ERP5 produce URLs using https://virtual-host-name:1234/virtual_host_root
# as base.
virtual_host_url = urlparse.urljoin(
virtual_host_url = six.moves.urllib.parse.urljoin(
base_url,
'/VirtualHostBase/https/virtual-host-name:1234/{}/VirtualHostRoot/_vh_virtual_host_root/'
.format(site_id))
......@@ -60,7 +64,7 @@ class TestPublishedURLIsReachableMixin(object):
# erp5 site is not created, with 500 when mysql is not yet reachable, so we
# configure this requests session to retry.
# XXX we should probably add a promise instead
session = requests.Session()
with requests.Session() as session:
session.mount(
base_url,
requests.adapters.HTTPAdapter(
......@@ -76,7 +80,7 @@ class TestPublishedURLIsReachableMixin(object):
# login page can be rendered and contain the text "ERP5"
r = session.get(
urlparse.urljoin(base_url, '{}/login_form'.format(site_id)),
six.moves.urllib.parse.urljoin(base_url, '{}/login_form'.format(site_id)),
verify=verify,
allow_redirects=False,
)
......@@ -119,6 +123,7 @@ class TestMedusa(ERP5InstanceTestCase, TestPublishedURLIsReachableMixin):
def getInstanceParameterDict(cls):
return {'_': json.dumps({'wsgi': False})}
class TestJupyter(ERP5InstanceTestCase, TestPublishedURLIsReachableMixin):
"""Test ERP5 Jupyter notebook
"""
......@@ -143,6 +148,7 @@ class TestJupyter(ERP5InstanceTestCase, TestPublishedURLIsReachableMixin):
[result.status_code, result.is_redirect, result.headers['Location']]
)
class TestBalancerPorts(ERP5InstanceTestCase):
"""Instantiate with two zope families, this should create for each family:
- a balancer entry point with corresponding haproxy
......@@ -169,7 +175,7 @@ class TestBalancerPorts(ERP5InstanceTestCase):
}
def checkValidHTTPSURL(self, url):
parsed = urlparse.urlparse(url)
parsed = six.moves.urllib.parse.urlparse(url)
self.assertEqual(parsed.scheme, 'https')
self.assertTrue(parsed.hostname)
self.assertTrue(parsed.port)
......@@ -254,7 +260,7 @@ class TestSeleniumTestRunner(ERP5InstanceTestCase, TestPublishedURLIsReachableMi
with open(config_file.strip()) as f:
self.assertEqual(
f.read(),
json.dumps(json.loads(self.getInstanceParameterDict()['_'])['test-runner']))
json.dumps(json.loads(self.getInstanceParameterDict()['_'])['test-runner'], sort_keys=True))
class TestDisableTestRunner(ERP5InstanceTestCase, TestPublishedURLIsReachableMixin):
......@@ -270,8 +276,8 @@ class TestDisableTestRunner(ERP5InstanceTestCase, TestPublishedURLIsReachableMix
"""
# self.computer_partition_root_path is the path of root partition.
# we want to assert that no scripts exist in any partition.
bin_programs = map(os.path.basename,
glob.glob(self.computer_partition_root_path + "/../*/bin/*"))
bin_programs = list(map(os.path.basename,
glob.glob(self.computer_partition_root_path + "/../*/bin/*")))
self.assertTrue(bin_programs) # just to check the glob was correct.
self.assertNotIn('runUnitTest', bin_programs)
......@@ -352,7 +358,7 @@ class TestZopeNodeParameterOverride(ERP5InstanceTestCase, TestPublishedURLIsReac
storage["storage"] = "root"
storage["server"] = zeo_addr
with open('%s/etc/zope-%s.conf' % (partition, zope)) as f:
conf = map(str.strip, f.readlines())
conf = list(map(str.strip, f.readlines()))
i = conf.index("<zodb_db root>") + 1
conf = iter(conf[i:conf.index("</zodb_db>", i)])
for line in conf:
......@@ -361,23 +367,23 @@ class TestZopeNodeParameterOverride(ERP5InstanceTestCase, TestPublishedURLIsReac
if line == '</zeoclient>':
break
checkParameter(line, storage)
for k, v in storage.iteritems():
for k, v in six.iteritems(storage):
self.assertIsNone(v, k)
del storage
else:
checkParameter(line, zodb)
for k, v in zodb.iteritems():
for k, v in six.iteritems(zodb):
self.assertIsNone(v, k)
partition = self.getComputerPartitionPath('zope-a')
for zope in xrange(3):
for zope in range(3):
checkConf({
"cache-size-bytes": "20MB",
}, {
"cache-size": "50MB",
})
partition = self.getComputerPartitionPath('zope-bb')
for zope in xrange(5):
for zope in range(5):
checkConf({
"cache-size-bytes": "500MB" if zope else 1<<20,
}, {
......
......@@ -26,10 +26,11 @@
#
##############################################################################
from __future__ import absolute_import
import os
import json
import glob
import urlparse
import six.moves.urllib.parse
import socket
import sys
import time
......@@ -60,6 +61,7 @@ class MariaDBTestCase(ERP5InstanceTestCase):
@classmethod
def _getInstanceParameterDict(cls):
# type: () -> dict
return {
'tcpv4-port': 3306,
'max-connection-count': 5,
......@@ -75,12 +77,14 @@ class MariaDBTestCase(ERP5InstanceTestCase):
@classmethod
def getInstanceParameterDict(cls):
# type: () -> dict
return {'_': json.dumps(cls._getInstanceParameterDict())}
def getDatabaseConnection(self):
# type: () -> MySQLdb.connections.Connection
connection_parameter_dict = json.loads(
self.computer_partition.getConnectionParameterDict()['_'])
db_url = urlparse.urlparse(connection_parameter_dict['database-list'][0])
db_url = six.moves.urllib.parse.urlparse(connection_parameter_dict['database-list'][0])
self.assertEqual('mysql', db_url.scheme)
self.assertTrue(db_url.path.startswith('/'))
......@@ -91,12 +95,15 @@ class MariaDBTestCase(ERP5InstanceTestCase):
host=db_url.hostname,
port=db_url.port,
db=database_name,
use_unicode=True,
charset='utf8mb4'
)
class TestCrontabs(MariaDBTestCase, CrontabMixin):
def test_full_backup(self):
# type: () -> None
self._executeCrontabAtDate('mariadb-backup', '2050-01-01')
with gzip.open(
os.path.join(
......@@ -106,10 +113,11 @@ class TestCrontabs(MariaDBTestCase, CrontabMixin):
'mariadb-full',
'20500101000000.sql.gz',
),
'r') as dump:
'rt') as dump:
self.assertIn('CREATE TABLE', dump.read())
def test_logrotate_and_slow_query_digest(self):
# type: () -> None
# slow query digest needs to run after logrotate, since it operates on the rotated
# file, so this tests both logrotate and slow query digest.
......@@ -148,7 +156,7 @@ class TestCrontabs(MariaDBTestCase, CrontabMixin):
'slowquery_digest',
'slowquery_digest.txt-2050-01-01.xz',
)
with lzma.open(slow_query_report, 'r') as f:
with lzma.open(slow_query_report, 'rt') as f:
# this is the hash for our "select sleep(n)" slow query
self.assertIn("ID 0xF9A57DD5A41825CA", f.read())
......@@ -170,7 +178,7 @@ class TestCrontabs(MariaDBTestCase, CrontabMixin):
subprocess.check_output('faketime 2050-01-01 %s' % check_slow_query_promise_plugin['command'], shell=True)
self.assertEqual(
error_context.exception.output,
"""\
b"""\
Threshold is lower than expected:
Expected total queries : 1.0 and current is: 2
Expected slowest query : 0.1 and current is: 3
......@@ -179,6 +187,7 @@ Expected slowest query : 0.1 and current is: 3
class TestMariaDB(MariaDBTestCase):
def test_utf8_collation(self):
# type: () -> None
cnx = self.getDatabaseConnection()
with contextlib.closing(cnx):
cnx.query(
......@@ -199,11 +208,12 @@ class TestMariaDB(MariaDBTestCase):
"""
select * from test_utf8_collation where col1 = "a"
""")
self.assertEqual((('à',),), cnx.store_result().fetch_row(maxrows=2))
self.assertEqual(((u'à',),), cnx.store_result().fetch_row(maxrows=2))
class TestMroonga(MariaDBTestCase):
def test_mroonga_plugin_loaded(self):
# type: () -> None
cnx = self.getDatabaseConnection()
with contextlib.closing(cnx):
cnx.query("show plugins")
......@@ -213,6 +223,7 @@ class TestMroonga(MariaDBTestCase):
plugins)
def test_mroonga_normalize_udf(self):
# type: () -> None
# example from https://mroonga.org/docs/reference/udf/mroonga_normalize.html#usage
cnx = self.getDatabaseConnection()
with contextlib.closing(cnx):
......@@ -220,7 +231,8 @@ class TestMroonga(MariaDBTestCase):
"""
SELECT mroonga_normalize("ABCDあぃうぇ㍑")
""")
self.assertEqual((('abcdあぃうぇリットル',),),
# XXX this is returned as bytes by mroonga/mariadb (this might be a bug)
self.assertEqual(((u'abcdあぃうぇリットル'.encode('utf-8'),),),
cnx.store_result().fetch_row(maxrows=2))
if 0:
......@@ -233,10 +245,11 @@ class TestMroonga(MariaDBTestCase):
"""
SELECT mroonga_normalize("aBcDあぃウェ㍑", "NormalizerMySQLUnicodeCIExceptKanaCIKanaWithVoicedSoundMark")
""")
self.assertEqual((('ABCDあぃうぇ㍑',),),
self.assertEqual(((u'ABCDあぃうぇ㍑'.encode('utf-8'),),),
cnx.store_result().fetch_row(maxrows=2))
def test_mroonga_full_text_normalizer(self):
# type: () -> None
# example from https://mroonga.org//docs/tutorial/storage.html#how-to-specify-the-normalizer
cnx = self.getDatabaseConnection()
with contextlib.closing(cnx):
......@@ -269,11 +282,12 @@ class TestMroonga(MariaDBTestCase):
WHERE MATCH (content) AGAINST ("+ブラック" IN BOOLEAN MODE)
""")
self.assertEqual(
((datetime.date(2013, 4, 23), 'ブラックコーヒーを飲んだ。'),),
((datetime.date(2013, 4, 23), u'ブラックコーヒーを飲んだ。'),),
cnx.store_result().fetch_row(maxrows=2),
)
def test_mroonga_full_text_normalizer_TokenBigramSplitSymbolAlphaDigit(self):
# type: () -> None
# Similar to as ERP5's testI18NSearch with erp5_full_text_mroonga_catalog
cnx = self.getDatabaseConnection()
with contextlib.closing(cnx):
......@@ -317,11 +331,12 @@ class TestMroonga(MariaDBTestCase):
self.assertEqual(((1,),), cnx.store_result().fetch_row(maxrows=2))
def test_mroonga_full_text_stem(self):
# type: () -> None
# example from https://mroonga.org//docs/tutorial/storage.html#how-to-specify-the-token-filters
cnx = self.getDatabaseConnection()
with contextlib.closing(cnx):
cnx.query("SELECT mroonga_command('register token_filters/stem')")
self.assertEqual((('true',),), cnx.store_result().fetch_row(maxrows=2))
self.assertEqual(((b'true',),), cnx.store_result().fetch_row(maxrows=2))
cnx.query(
"""
CREATE TABLE memos (
......
......@@ -16,6 +16,7 @@
# See COPYING file for full licensing terms.
# See https://www.nexedi.com/licensing for rationale and options.
from __future__ import absolute_import
import json
import os.path
import unittest
......
......@@ -18,4 +18,4 @@ md5sum = 6dcbe21f99aa6675e8e3b74bc9cbb0e6
[template-default]
filename = instance-default.cfg.jinja.in
md5sum = ffb6b74b55e7ca01666254353ae1cebe
md5sum = 536a28ff250c691ca374f75aa5f0aa76
......@@ -261,7 +261,7 @@ instance-promises =
[shellinabox-frontend-listen-promise]
<= monitor-promise-base
module = check_socket_listening
promise = check_socket_listening
name = $${:_buildout_section_name_}.py
config-host = $${shellinabox-frontend:hostname}
config-port = $${shellinabox-frontend:port}
......@@ -278,6 +278,6 @@ config-port = $${shellinabox-frontend:port}
[testnode-log-frontend-promise]
<= monitor-promise-base
module = check_url_available
promise = check_url_available
name = $${:_buildout_section_name_}.py
config-url = $${testnode-log-frontend:connection-secure_access}
......@@ -49,7 +49,7 @@ WebTest = 2.0.33
soupsieve = 1.8
#gitdb2 = 2.0.4
#smmap2 = 2.0.4
waitress = 1.2.1
waitress = 1.4.4
z3c.etestbrowser = 2.0.1
zope.testbrowser = 5.3.2
WSGIProxy2 = 0.4.6
......
# THIS IS NOT A BUILDOUT FILE, despite purposedly using a compatible syntax.
# The only allowed lines here are (regexes):
# - "^#" comments, copied verbatim
# - "^[" section beginings, copied verbatim
# - lines containing an "=" sign which must fit in the following categorie.
# - "^\s*filename\s*=\s*path\s*$" where "path" is relative to this file
# Copied verbatim.
# - "^\s*hashtype\s*=.*" where "hashtype" is one of the values supported
# by the re-generation script.
# Re-generated.
# - other lines are copied verbatim
# Substitution (${...:...}), extension ([buildout] extends = ...) and
# section inheritance (< = ...) are NOT supported (but you should really
# not need these here).
[instance-profile]
filename = instance.cfg
md5sum = c265bf7ec199190ba9f77960cac5da38
[template-fluentd]
filename = instance-fluentd.cfg
md5sum = 35f9d95f6a75e28bfeafc3568ca16f05
[service-fluentd]
recipe = slapos.cookbook:wrapper
wrapper-path = {{ directory['service'] }}/fluentd-service
command-line = ${fluentd:location}/bin/fluentd -v -c {{ fluentd_agent_conf }}
environment =
GEM_PATH=${fluentd:location}/lib/ruby/gems/
{% set part_list = [] -%}
{% for port in port_list -%}
{% set promise_section_title = 'fluentd-port-' ~ port ~ '-listening' -%}
{% do part_list.append(promise_section_title) -%}
[{{ promise_section_title }}]
<= monitor-promise-base
promise = check_socket_listening
name = {{ promise_section_title }}.py
config-host = $${slap-configuration:ipv6-random}
config-port = {{ port }}
{% endfor %}
[buildout]
parts =
service-fluentd
{%- for part in part_list %}
{{ part }}
{%- endfor %}
extends = ${monitor-template:output}
[buildout]
parts =
switch-softwaretype
eggs-directory = ${buildout:eggs-directory}
develop-eggs-directory = ${buildout:develop-eggs-directory}
offline = true
[switch-softwaretype]
recipe = slapos.cookbook:switch-softwaretype
default = dynamic-template-fluentd:rendered
RootSoftwareInstance = $${:default}
[directory]
recipe = slapos.cookbook:mkdirectory
home = $${buildout:directory}
etc = $${:home}/etc
var = $${:home}/var
service = $${:etc}/service
bin = $${:home}/bin
[slap-configuration]
recipe = slapos.cookbook:slapconfiguration
computer = $${slap_connection:computer_id}
partition = $${slap_connection:partition_id}
url = $${slap_connection:server_url}
key = $${slap_connection:key_file}
cert = $${slap_connection:cert_file}
[dynamic-template-fluentd]
recipe = slapos.recipe.template:jinja2
template = ${template-fluentd:output}
rendered = $${buildout:directory}/instance-fluentd.cfg
extensions = jinja2.ext.do
context =
key fluentd_agent_conf fluentd-agent-conf:rendered
key port_list fluentd-conf:port-list
section directory directory
[fluentd-conf]
recipe = slapos.recipe.build
slapparameter-dict = $${slap-configuration:configuration}
init =
import re
options['text'] = options['slapparameter-dict'].get('conf_text') or ''
options['port-list'] = re.findall(r'<source>.*port (\d+).*<\/source>', options['text'], re.DOTALL)
[fluentd-agent-conf]
recipe = slapos.recipe.template:jinja2
template = inline:{{ conf }}
rendered = $${directory:etc}/fluentd-agent.conf
mode = 0644
context =
key conf fluentd-conf:text
[buildout]
parts =
service-fluentd
eggs-directory = {{ buildout['eggs-directory'] }}
develop-eggs-directory = {{ buildout['develop-eggs-directory'] }}
offline = true
[instance-parameter]
recipe = slapos.cookbook:slapconfiguration.serialised
computer = ${slap_connection:computer_id}
partition = ${slap_connection:partition_id}
url = ${slap_connection:server_url}
key = ${slap_connection:key_file}
cert = ${slap_connection:cert_file}
[directory]
recipe = slapos.cookbook:mkdirectory
home = ${buildout:directory}
etc = ${:home}/etc
var = ${:home}/var
script = ${:etc}/run/
service = ${:etc}/service
promise = ${:etc}/promise/
log = ${:var}/log
bin = ${:home}/bin
[service-fluentd]
recipe = slapos.cookbook:wrapper
wrapper-path = ${directory:service}/fluentd-service
command-line = {{ fluentd_location }}/bin/fluentd
-v
-c ${fluentd-agent-conf:rendered}
environment =
GEM_PATH={{ fluentd_location }}/lib/ruby/gems/1.8/
[fluentd-agent-conf]
recipe = slapos.recipe.template:jinja2
template = inline:{% raw -%}
{{ slapparameter_dict.get('conf_text', '') }}
{%- endraw %}
rendered = ${directory:etc}/fluentd-agent.conf
mode = 0644
context =
key slapparameter_dict instance-parameter:configuration
[buildout]
extends =
buildout.hash.cfg
../../component/fluentd/buildout.cfg
../../stack/slapos.cfg
../../stack/monitor/buildout.cfg
parts =
instance-profile
......@@ -10,17 +12,20 @@ parts =
[python]
part = python3
[instance-profile]
recipe = slapos.recipe.template:jinja2
template = ${:_profile_base_location_}/instance.cfg.in
rendered = ${buildout:directory}/instance.cfg
[template-base]
recipe = slapos.recipe.template
url = ${:_profile_base_location_}/${:filename}
mode = 0644
extensions = jinja2.ext.do
context =
section buildout buildout
key fluentd_location fluentd:location
[instance-profile]
< = template-base
output = ${buildout:directory}/template.cfg
[template-fluentd]
< = template-base
output = ${buildout:directory}/template-fluentd.cfg
[fluentd]
gems +=
fluent-plugin-wendelin==0.4
fluent-plugin-bin==0.2
fluent-plugin-wendelin==0.5
fluent-plugin-bin==0.3
......@@ -41,6 +41,7 @@ setup(name=name,
url="https://lab.nexedi.com/nexedi/slapos",
packages=find_packages(),
install_requires=[
'msgpack',
'slapos.core',
'slapos.libnetworkcache',
'erp5.util',
......
......@@ -25,10 +25,25 @@
#
##############################################################################
import msgpack
import os
import random
import shutil
import socket
import struct
import subprocess
import tempfile
import time
import six
from six.moves.SimpleHTTPServer import SimpleHTTPRequestHandler
from six.moves.socketserver import StreamRequestHandler, TCPServer
from slapos.testing.testcase import makeModuleSetUpAndTestCaseClass
from slapos.testing.utils import findFreeTCPPort
FLUENTD_PORT = 24224
FLUSH_INTERVAL = 1
setUpModule, SlapOSInstanceTestCase = makeModuleSetUpAndTestCaseClass(
......@@ -50,3 +65,218 @@ class FluentdTestCase(SlapOSInstanceTestCase):
for expected_process_name in expected_process_name_list:
self.assertIn(expected_process_name, process_names)
class OneRequestServer(TCPServer):
address_family = socket.AF_INET6
timeout = 1
def get_first_data(self, flush_interval=1):
start = time.time()
while(not self.RequestHandlerClass.received_data
and time.time() - start < 10*flush_interval):
self.handle_request()
return self.RequestHandlerClass.received_data
class WendelinTutorialTestCase(FluentdTestCase):
@classmethod
def get_configuration(cls):
return ''
@classmethod
def getInstanceParameterDict(cls):
return {'conf_text': cls._conf,}
@classmethod
def measureDict(cls):
return {k: v.encode() for k, v in
zip((b'pressure', b'humidity', b'temperature'), cls._measurementList)}
@classmethod
def setUpClass(cls):
fluentd_dir = os.path.join(cls.computer_partition_root_path,
'software_release', 'parts', 'fluentd')
cls._fluentd_bin = os.path.join(fluentd_dir, 'bin', 'fluentd')
cls._gem_path = os.path.join(fluentd_dir, 'lib', 'ruby', 'gems')
cls._tmp_dir = tempfile.mkdtemp()
cls._measurementList = cls.sensor_value_list()
cls._conf = cls.get_configuration()
super(FluentdTestCase, cls).setUpClass()
@classmethod
def sensor_value_list(cls):
return [str(value) for value in (round(random.uniform(870, 1084), 2),
round(random.uniform(0, 100), 2),
round(random.uniform(-20, 50), 3))]
def serve(self, port, request_handler_class):
server_address = (self._ipv6_address, port)
server = OneRequestServer(server_address, request_handler_class)
data = server.get_first_data(FLUSH_INTERVAL)
server.server_close()
return data
@classmethod
def tearDownClass(cls):
shutil.rmtree(cls._tmp_dir)
super(FluentdTestCase, cls).tearDownClass()
def read_fluentd_conf(self, configuration):
conf_path = os.path.join(self._tmp_dir, 'fluentd.conf')
with open(conf_path, "w") as conf_file:
conf_file.write(configuration)
return subprocess.check_output(
[self._fluentd_bin, '-c', conf_path, '--dry-run'],
env={'GEM_PATH': self._gem_path},
universal_newlines=True,
)
def _test_configuration(self, expected_str):
self.assertRegexpMatches(
self.read_fluentd_conf(self._conf),
expected_str,
)
class FluentdHTTPRequestHandler(StreamRequestHandler):
received_data = b''
def handle(self):
data = self.rfile.readline().strip()
# ignore heartbeats (https://docs.fluentd.org/output/forward#heartbeat_type)
if len(data) > 0:
FluentdHTTPRequestHandler.received_data = data
# see https://wendelin.nexedi.com/wendelin-Learning.Track/wendelin-Tutorial.Setup.Fluentd.on.Sensor
class SensorConfTestCase(WendelinTutorialTestCase):
@classmethod
def get_configuration(cls):
script_path = os.path.join(cls._tmp_dir, "custom_read_bme280.py")
with open(script_path, "w") as script:
script.write(cls.sensor_script(cls._measurementList))
return cls.sensor_conf(script_path)
@classmethod
def sensor_conf(cls, script_path):
return '''\
<source>
@type exec
tag tag.name
command python %s
run_interval %ss
<parse>
keys pressure, humidity, temperature
</parse>
</source>
<match tag.name>
@type forward
<server>
name myserver1
host %s
</server>
<buffer>
flush_mode immediate
</buffer>
</match>''' % (script_path, FLUSH_INTERVAL, cls._ipv6_address)
@classmethod
def sensor_script(cls, measurementList):
return '''\
#!/usr/bin/python
# -*- coding: utf-8 -*-
print("%s")''' % "\t".join(measurementList)
def test_configuration(self):
self._test_configuration(
r'adding forwarding server \'myserver1\' host="%s" port=%s weight=60'
% (self._ipv6_address, FLUENTD_PORT)
)
def test_send_data(self):
tag, data, header = msgpack.unpackb(
self.serve(FLUENTD_PORT, FluentdHTTPRequestHandler),
raw=True,
)
self.assertEqual(b'tag.name', tag)
self.assertEqual(self.measureDict(), msgpack.unpackb(data)[-1])
self.assertEqual({b'compressed': b'text', b'size': 1}, header)
class WendelinHTTPRequestHandler(SimpleHTTPRequestHandler):
received_data = b''
def do_POST(self):
WendelinHTTPRequestHandler.received_data = self.rfile.read(
int(self.headers['Content-Length']))
self.send_response(200)
self.end_headers()
# see https://wendelin.nexedi.com/wendelin-Learning.Track/wendelin-Tutorial.Setup.Fluentd.on.IOTGateway
class GatewayConfTestCase(WendelinTutorialTestCase):
@classmethod
def gateway_conf(cls, fluentd_port, wendelin_port):
return '''\
<source>
@type forward
port %s
bind %s
</source>
<match tag.name>
@type wendelin
streamtool_uri http://[%s]:%s/erp5/portal_ingestion_policies/default
user foo
password bar
<buffer>
flush_mode interval
@type file
path fluentd-buffer-file/
flush_interval %ss
</buffer>
</match>''' % (fluentd_port, cls._ipv6_address, cls._ipv6_address,
wendelin_port, FLUSH_INTERVAL)
@classmethod
def get_configuration(cls):
fluentd_port = findFreeTCPPort(cls._ipv6_address)
cls._fluentd_port = fluentd_port
wendelin_port = findFreeTCPPort(cls._ipv6_address)
cls._wendelin_port = wendelin_port
return cls.gateway_conf(fluentd_port, wendelin_port)
def test_configuration_file(self):
self._test_configuration('starting fluentd')
def test_wendelin_data_forwarding(self):
sock = socket.socket(socket.AF_INET6, socket.SOCK_STREAM)
sock.connect((self._ipv6_address, self._fluentd_port))
data = [
msgpack.ExtType(0, struct.pack('!Q', int(time.time()) << 32)),
self.measureDict(),
]
sock.sendall(
msgpack.packb([
b'tag.name',
msgpack.packb(data),
{b'size': 1, b'compressed': b'text'},
], use_bin_type=False),
)
sock.close()
self.assertEqual(
data,
msgpack.unpackb(
self.serve(self._wendelin_port, WendelinHTTPRequestHandler)),
)
\ No newline at end of file
......@@ -10,6 +10,9 @@ parts =
eggs
instance-cfg
[python]
part = python3
# eggs for instance.cfg
[eggs]
recipe = zc.recipe.egg
......@@ -22,7 +25,7 @@ eggs =
<= go-git-package
go.importpath = lab.nexedi.com/nexedi/galene
repository = https://lab.nexedi.com/nexedi/galene.git
revision = galene-0.3.3
revision = galene-0.4
[gowork]
install =
......
......@@ -54,7 +54,7 @@ md5sum = 0f1ec4077dab586cc003ae13f689eda2
[instance-gitlab.cfg.in]
_update_hash_filename_ = instance-gitlab.cfg.in
md5sum = 6b34d4b96ae0067977fa509046d71231
md5sum = f099d01baefe41c8f0944c2437b30881
[instance-gitlab-export.cfg.in]
_update_hash_filename_ = instance-gitlab-export.cfg.in
......
......@@ -430,7 +430,7 @@ tune-command =
# [promise-<something>] to check <something> by url
[promise-byurl]
<= monitor-promise-base
module = check_command_execute
promise = check_command_execute
name = ${:_buildout_section_name_}.py
config-http-code = 200
......@@ -472,7 +472,7 @@ depend =
[promise-postgresql]
<= monitor-promise-base
module = check_command_execute
promise = check_command_execute
name = promise-postgresql.py
config-command =
{{ postgresql_location }}/bin/psql \
......@@ -515,7 +515,7 @@ depend =
[promise-redis]
<= monitor-promise-base
module = check_command_execute
promise = check_command_execute
name = promise-redis.py
config-command = ${service-redis:promise_wrapper}
......@@ -769,7 +769,7 @@ depend =
# XXX this depends on gitlab-workhorse being up
# (nginx is configured to proxy all requests to gitlab-workhorse)
config-url = ${backend-info:url}/users/sign_in
module = check_url_available
promise = check_url_available
[logrotate-entry-nginx]
<= logrotate-entry-base
......
......@@ -4,7 +4,7 @@ md5sum = 5dfeeb5eca125dcaa5f9e537f941dd41
[instance-headless-chromium]
_update_hash_filename_ = instance-headless-chromium.cfg.in
md5sum = fad685238b26ca20537c12ce7432e7e7
md5sum = 7392d20e48dbc599eb8e9d02e8095bbf
[template-nginx-conf]
_update_hash_filename_ = templates/nginx.conf.in
......
......@@ -174,7 +174,7 @@ monitor-httpd-port = {{ parameter_dict['monitor-httpd-port'] }}
# queried with the correct credentials.
[frontend-ok-promise]
<= monitor-promise-base
module = check_url_available
promise = check_url_available
name = headless-chromium-frontend-ok.py
url = ${remote-debugging-frontend:connection-secure_access}
config-url = ${:url}
......@@ -185,7 +185,7 @@ config-password = ${frontend-instance-password:passwd}
# when queried with no credentials.
[frontend-secure-promise]
<= monitor-promise-base
module = check_url_available
promise = check_url_available
name = headless-chromium-frontend-secure.py
url = ${remote-debugging-frontend:connection-secure_access}
config-url = ${:url}
......
......@@ -14,4 +14,4 @@
# not need these here).
[instance-profile]
filename = instance.cfg.in
md5sum = c771dee1ef9aedad7c6ebf9418afe08e
md5sum = 483b76d8e6bf72d72a38a3f7bf66fe08
......@@ -87,7 +87,7 @@ wrapper-path = ${directory:service}/helloweb-${:kind}
# promise, that checks that helloweb service is alive
[helloweb-promise]
<= monitor-promise-base
module = check_socket_listening
promise = check_socket_listening
name = helloweb-${:kind}.py
{# macro to instantiate service of `kind` to listen on `port` #}
......
......@@ -21,7 +21,7 @@ md5sum = 9e486efe4ab1aba8cb72b04f6c6da8ad
[instance_html5as]
_update_hash_filename_ = instance_html5as.cfg.in
md5sum = 283440057c659bde2ae7fcc2c4c5b781
md5sum = f86b2f37c0acd21ca1f41d90c5477d75
[template_nginx_conf]
_update_hash_filename_ = templates/nginx_conf.in
......
......@@ -185,7 +185,7 @@ context =
# Port Listening checking promise
[port-listening-promise]
<= monitor-promise-base
module = check_socket_listening
promise = check_socket_listening
name = nginx-port-listening.py
config-host = ${html5as:ip}
config-port = ${html5as:port}
......@@ -235,7 +235,7 @@ return = domain secure_access
# Add a promise to make sure the cdn is properly configured
[html5as-frontend-promise]
<= monitor-promise-base
module = check_url_available
promise = check_url_available
name = html5as-http-frontend.py
url = ${html5as-frontend:connection-secure_access}
config-url = ${:url}
......@@ -22,7 +22,7 @@ md5sum = 87781e6bcb523bb8434888d5f984f36c
[template-validator]
filename = instance-validator.cfg.in
md5sum = 9d12472bb2e337d3cc18f2cc6f235425
md5sum = dc8b8d03b0af9cd32398d1fe86267bb7
[template]
filename = instance.cfg.in
......
......@@ -55,7 +55,7 @@ scheme = https
[tomcat-listen-promise]
<= monitor-promise-base
module = check_socket_listening
promise = check_socket_listening
name = tomcat_listen.py
config-host = $${tomcat-configuration:ip}
config-port = $${tomcat-configuration:port}
......
......@@ -15,7 +15,7 @@
[template-cfg]
filename = instance.cfg.in
md5sum = 9653104b2217dc26b23f9c1b997124ca
md5sum = 6f5acc546a7e9ad502d5fe586a3c3072
[template_nginx_conf]
_update_hash_filename_ = templates/nginx_conf.in
......
......@@ -113,6 +113,7 @@ template =
. ${hugo:go-environment}
cd ${basedirectory:data}/${slap-configuration:configuration.site}
if [ -d "public" ]; then rm -Rf public; fi
export TMPDIR=${hugo:path-tmp}
hugo && hugo server --bind=${hugo:ip} --port=${hugo:hugo-port} --baseURL=${hugo-frontend:connection-secure_access} --appendPort=false
[hugo-server-service]
......@@ -124,7 +125,7 @@ hash-files =
[hugo-port-listening-promise]
<= monitor-promise-base
module = check_socket_listening
promise = check_socket_listening
name = hugo-port-listening.py
config-host = ${hugo:ip}
config-port = ${hugo:hugo-port}
......@@ -144,7 +145,7 @@ return = domain secure_access
[hugo-frontend-promise]
<= monitor-promise-base
module = check_url_available
promise = check_url_available
name = hugo-http-frontend.py
url = ${hugo-frontend:connection-secure_access}
config-url = ${:url}
......@@ -212,7 +213,7 @@ template =
[nginx-port-listening-promise]
<= monitor-promise-base
module = check_socket_listening
promise = check_socket_listening
name = nginx-port-listening.py
config-host = ${hugo:ip}
config-port = ${hugo:nginx-port}
......@@ -238,7 +239,7 @@ return = domain secure_access
[nginx-frontend-promise]
<= monitor-promise-base
module = check_url_available
promise = check_url_available
name = nginx-http-frontend.py
url = ${nginx-frontend:connection-secure_access}
config-url = ${:url}
......
......@@ -19,7 +19,7 @@ md5sum = 6c17361a49cfc47564063b867aab6e8c
[template-jscrawler]
filename = instance-jscrawler.cfg.jinja2.in
md5sum = f61e0507717447e47c76a2b2712f17f4
md5sum = 33bfddbc23fa794ab97770ef7776b390
[template-jscrawler-builder]
filename = template-jscrawler.builder.sh.in
......
......@@ -47,7 +47,7 @@ return = secure_access domain
[jscrawler-frontend-promise]
<= monitor-promise-base
module = check_url_available
promise = check_url_available
name = jscrawler_frontend.py
config-url = ${request-jscrawler-frontend:connection-secure_access}
......@@ -59,7 +59,7 @@ log = ${httpd-wrapper:log-file}
[httpd-listen-promise]
<= monitor-promise-base
module = check_socket_listening
promise = check_socket_listening
name = httpd-listen.py
config-host = ${httpd-wrapper:host}
config-port = ${httpd-wrapper:port}
......
......@@ -30,7 +30,7 @@ parts =
instance
[nodejs]
<= nodejs-8.6.0
<= nodejs-14.16.0
[eggs]
recipe = zc.recipe.egg
......
......@@ -19,7 +19,7 @@ md5sum = de37ec3d4adb0be4c67bcc7397f27c91
[instance-jupyter]
filename = instance-jupyter.cfg.in
md5sum = cbc90e517ae3680ab8bef04c6f503af5
md5sum = 95e3da48abdd257fb9d5dbdf14ea87b9
[jupyter-notebook-config]
filename = jupyter_notebook_config.py.jinja
......
......@@ -146,7 +146,7 @@ sla-instance_guid = ${slap-parameter:frontend-instance-guid}
[frontend-promise]
<= monitor-promise-base
module = check_url_available
promise = check_url_available
name = frontend_promise.py
config-url = ${publish-connection-parameter:url}
......@@ -160,7 +160,7 @@ sla-instance_guid = ${slap-parameter:frontend-additional-instance-guid}
[frontend-additional-promise]
<= monitor-promise-base
module = check_url_available
promise = check_url_available
name = frontend_additional_promise.py
config-url = ${publish-connection-parameter:url-additional}
{% endif %}
......
......@@ -43,12 +43,12 @@ KVM instance parameters:
- nbd2-port (default: 1024)
- nbd2-host
- ram-size (default: 1024)
- disk-size = (default: 10)
- ram-size (default: 4096)
- disk-size = (default: 40)
- disk-type (default: virtio)
Disk size and Disk type are used if no virtual hard drive is specified.
- cpu-count (default: 1)
- cpu-count (default: 2)
- cpu-options
cpu-option is a string: [cores=cores][,threads=threads][,sockets=sockets][,maxcpus=maxcpus]
- numa
......
......@@ -15,23 +15,23 @@
[template]
filename = instance.cfg.in
md5sum = b129c9b2a614563d3f7c3f9e906d59f2
md5sum = f2b0f1ed27148504f220e06eaceff935
[template-kvm]
filename = instance-kvm.cfg.jinja2
md5sum = d0f96be4e80b96e6ac33f6d474767b13
md5sum = 17c58f74d1ed4cb7dce11bf9af71dd33
[template-kvm-cluster]
filename = instance-kvm-cluster.cfg.jinja2.in
md5sum = 59b92e1300aad4e9b116c532caf7d042
md5sum = fcb35c32ef985e3d69a7914711675dcc
[template-kvm-resilient]
filename = instance-kvm-resilient.cfg.jinja2
md5sum = 7de5756f59ef7d823cd8ed33e6d15230
md5sum = a0fd4911401cbbda74323e8d1c7b18ad
[template-kvm-import]
filename = instance-kvm-import.cfg.jinja2.in
md5sum = 7b15fdc6f19b1f44ff5a56586102ffe2
md5sum = 0415353c961ece593dd5d6457dab5200
[template-kvm-import-script]
filename = template/kvm-import.sh.jinja2
......@@ -47,7 +47,7 @@ md5sum = b617d64de73de1eed518185f310bbc82
[template-nbd]
filename = instance-nbd.cfg.jinja2
md5sum = 259e06f289f68297e0609e4ab1af8e86
md5sum = 4bcb07c1a9223e2d956651aa25d23654
[template-ansible-promise]
filename = template/ansible-promise.in
......@@ -55,7 +55,7 @@ md5sum = b7e87479a289f472b634a046b44b5257
[template-kvm-run]
filename = template/template-kvm-run.in
md5sum = a502782244d1be536b732ebb40725f47
md5sum = f840e8b7af83982525f66d7ec12b7085
[template-kvm-controller]
filename = template/kvm-controller-run.in
......@@ -75,7 +75,7 @@ md5sum = fb330a796fadb6cd5c85217f80a42af3
[template-httpd]
filename = instance-kvm-http.cfg.in
md5sum = f4bcde62e008c2da9c65617ba7f73f08
md5sum = d57764bb7135037b4d21543b2f56ce1d
[image-download-controller]
_update_hash_filename_ = template/image-download-controller.py
......
......@@ -225,7 +225,7 @@
"title": "RAM size",
"description": "RAM size, in MB.",
"type": "integer",
"default": 1024,
"default": 4096,
"minimum": 1024,
"multipleOf": 512
},
......@@ -233,6 +233,7 @@
"title": "Maximum RAM size, in MB",
"description": "Define the maximum size of the memory. The size is in MB and should be a multiple of 512. Defaults to ram-size + 512",
"type": "integer",
"default": 4608,
"minimum": 1024,
"multipleOf": 512
},
......@@ -254,7 +255,7 @@
"title": "Disk size",
"description": "Disk size, in GB.",
"type": "integer",
"default": 10,
"default": 40,
"minimum": 1
},
"disk-format": {
......@@ -313,6 +314,7 @@
"title": "CPU count",
"description": "Number of CPU cores.",
"type": "integer",
"default": 2,
"minimum": 1
},
"cpu-max-count": {
......@@ -474,9 +476,15 @@
"oneOf": [
{
"const": [
"https://shacache.nxdcdn.com/0a6aee1d9aafc1ed095105c052f9fdd65ed00ea9274188c9cd0072c8e6838ab40e246d45a1e6956d74ef1b04a1fc042151762f25412e9ff0cbf49418eef7992e#a3ebc76aec372808ad80000108a2593a"
"https://shacache.nxdcdn.com/02257c3ec27e45d9f022c181a69b59da67e5c72871cdb4f9a69db323a1fad58093f2e69702d29aa98f5f65e920e0b970d816475a5a936e1f3bf33832257b7e92#b710c178eb434d79ce40ce703d30a5f0"
],
"title": "Debian Bullseye 11.1 netinst x86_64"
},
{
"const": [
"https://shacache.nxdcdn.com/d82b0510fd919c2a851ee93ea0f7ad6779bfa597297a5c7463b63746799f001321ec4c9b8ba6cfe20248dd2da28100ad3b78e74489a8c0c573238f226a509a9d#9d7b9cc850464d60ac174787c53e8f3f"
],
"title": "Debian Buster 10.5 netinst x86_64"
"title": "Debian Buster 10.11 netinst x86_64"
},
{
"const": [
......
......@@ -77,14 +77,14 @@ config-authorized-key = {{ dumps(slapparameter_dict.get('authorized-keys') | joi
{% endif -%}
config-nbd-port = {{ dumps(kvm_parameter_dict.get('nbd-port', 1024)) }}
config-nbd2-port = {{ dumps(kvm_parameter_dict.get('nbd-port2', 1024)) }}
config-ram-size = {{ dumps(kvm_parameter_dict.get('ram-size', 1024)) }}
config-ram-max-size = {{ dumps(kvm_parameter_dict.get('ram-max-size', int(kvm_parameter_dict.get('ram-size', 1024)) + 512)) }}
config-ram-size = {{ dumps(kvm_parameter_dict.get('ram-size', 4096)) }}
config-ram-max-size = {{ dumps(kvm_parameter_dict.get('ram-max-size', int(kvm_parameter_dict.get('ram-size', 4096)) + 512)) }}
config-enable-device-hotplug = {{ dumps(kvm_parameter_dict.get('enable-device-hotplug', False)) }}
config-ram-hotplug-slot-size = {{ dumps(kvm_parameter_dict.get('ram-hotplug-slot-size', 512)) }}
config-disk-size = {{ dumps(kvm_parameter_dict.get('disk-size', 10)) }}
config-disk-size = {{ dumps(kvm_parameter_dict.get('disk-size', 40)) }}
config-disk-type = {{ dumps(kvm_parameter_dict.get('disk-type', 'virtio')) }}
config-disk-format = {{ dumps(kvm_parameter_dict.get('disk-format', 'qcow2')) }}
config-cpu-count = {{ dumps(kvm_parameter_dict.get('cpu-count', 1)) }}
config-cpu-count = {{ dumps(kvm_parameter_dict.get('cpu-count', 2)) }}
config-cpu-max-count = {{ dumps(kvm_parameter_dict.get('cpu-max-count', int(kvm_parameter_dict.get('cpu-count', 1)) + 1)) }}
{{ setconfig('numa', kvm_parameter_dict.get('numa', '')) }}
{{ setconfig('machine-options', kvm_parameter_dict.get('machine-options', '')) }}
......
......@@ -65,7 +65,7 @@ stop-on-error = true
[httpd-promise]
<= monitor-promise-base
module = check_socket_listening
promise = check_socket_listening
name = apache-httpd.py
config-host = ${apache-conf:ip}
config-port = ${apache-conf:port}
......@@ -91,6 +91,6 @@ mode = 700
[kvm-disk-image-corruption-promise]
# Check that disk image is not corrupted
<= monitor-promise-base
module = check_command_execute
promise = check_command_execute
name = kvm-disk-image-corruption.py
config-command = ${kvm-disk-image-corruption-bin:output}
......@@ -13,7 +13,7 @@
"title": "RAM size",
"description": "RAM size, in MB.",
"type": "integer",
"default": 1024,
"default": 4096,
"minimum": 1024,
"multipleOf": 512
},
......@@ -21,6 +21,7 @@
"title": "Maximum RAM size, in MB",
"description": "Define the maximum size of the memory. The size is in MB and should be a multiple of 512. Defaults to ram-size + 512",
"type": "integer",
"default": 4608,
"minimum": 1024,
"multipleOf": 512
},
......@@ -42,7 +43,7 @@
"title": "Disk size",
"description": "Disk size, in GB.",
"type": "integer",
"default": 10,
"default": 40,
"minimum": 1
},
"disk-type": {
......@@ -101,7 +102,8 @@
"title": "CPU count",
"description": "Number of CPU cores.",
"type": "integer",
"minimum": 1
"minimum": 1,
"default": 2
},
"cpu-max-count": {
"title": "Maximum CPU amount",
......
......@@ -102,13 +102,13 @@ mode = 700
[kvm-frontend-url-promise]
# Check that url parameter is complete
<= monitor-promise-base
module = check_command_execute
promise = check_command_execute
name = kvm-frontend-url.py
config-command = ${kvm-frontend-url-bin:output}
[kvm-backend-url-promise]
# Check that backend url is reachable
<= monitor-promise-base
module = check_url_available
promise = check_url_available
name = frontend_promise.py
config-url = ${publish-connection-information:url}
......@@ -22,8 +22,8 @@
{% set boot_image_url_list_enabled = 'boot-image-url-list' in slapparameter_dict %}
{% set boot_image_url_select_enabled = 'boot-image-url-select' in slapparameter_dict %}
{% set bootstrap_script_url = slapparameter_dict.get('bootstrap-script-url') -%}
{% set cpu_max_count = dumps(slapparameter_dict.get('cpu-max-count', int(slapparameter_dict.get('cpu-count', 1)) + 1)) %}
{% set ram_max_size = dumps(slapparameter_dict.get('ram-max-size', int(slapparameter_dict.get('ram-size', 1024)) + 512)) %}
{% set cpu_max_count = dumps(slapparameter_dict.get('cpu-max-count', int(slapparameter_dict.get('cpu-count', 2)) + 1)) %}
{% set ram_max_size = dumps(slapparameter_dict.get('ram-max-size', int(slapparameter_dict.get('ram-size', 4096)) + 512)) %}
{% set extends_list = [] -%}
{% set part_list = [] -%}
......@@ -91,7 +91,7 @@ bytes = 8
## boot-image-url-select support BEGIN
[empty-file-state-base-select-promise]
<= monitor-promise-base
module = check_file_state
promise = check_file_state
name = ${:_buildout_section_name_}.py
config-state = empty
# It's very hard to put the username and password correctly, after schema://
......@@ -190,7 +190,7 @@ config-filename = ${boot-image-url-select-download-wrapper:error-state-file}
## boot-image-url-list support BEGIN
[empty-file-state-base-list-promise]
<= monitor-promise-base
module = check_file_state
promise = check_file_state
name = ${:_buildout_section_name_}.py
config-state = empty
# It's very hard to put the username and password correctly, after schema://
......@@ -289,7 +289,7 @@ config-filename = ${boot-image-url-list-download-wrapper:error-state-file}
## virtual-hard-drive-url support BEGIN
[empty-file-state-base-virtual-promise]
<= monitor-promise-base
module = check_file_state
promise = check_file_state
name = ${:_buildout_section_name_}.py
config-state = empty
# It's very hard to put the username and password correctly, after schema://
......@@ -441,8 +441,8 @@ socket-path = ${kvm-controller-parameter-dict:socket-path}
smp-max-count = {{ cpu_max_count }}
ram-max-size = {{ ram_max_size }}
{%- if enable_device_hotplug %}
init-ram-size = 1024
init-smp-count = 1
init-ram-size = 4096
init-smp-count = 2
{%- else %}
init-ram-size = ${kvm-controller-parameter-dict:ram-size}
init-smp-count = ${kvm-controller-parameter-dict:cpu-count}
......@@ -586,7 +586,7 @@ command-line = ${kvm-controller:rendered}
[kvm-vnc-promise]
<= monitor-promise-base
module = check_socket_listening
promise = check_socket_listening
name = vnc_promise.py
config-host = ${kvm-parameter-dict:vnc-ip}
config-port = ${kvm-parameter-dict:vnc-port}
......@@ -614,7 +614,7 @@ mode = 700
[kvm-disk-image-corruption-promise]
# Check that disk image is not corrupted
<= monitor-promise-base
module = check_command_execute
promise = check_command_execute
name = kvm-disk-image-corruption.py
config-command = ${kvm-disk-image-corruption-bin:output}
......@@ -643,7 +643,7 @@ context =
[kvm-started-promise]
<= monitor-promise-base
module = check_command_execute
promise = check_command_execute
name = qemu-virtual-machine-is-ready.py
config-command = ${kvm-started-bin:rendered}
......@@ -708,7 +708,7 @@ wrapper = ${directory:bin}/websockify
[novnc-promise]
<= monitor-promise-base
module = check_socket_listening
promise = check_socket_listening
name = novnc_promise.py
config-host = ${novnc-instance:ip}
config-port = ${novnc-instance:port}
......@@ -764,7 +764,7 @@ sla-instance_guid = ${slap-parameter:frontend-instance-guid}
[frontend-promise]
<= monitor-promise-base
module = check_url_available
promise = check_url_available
name = frontend_promise.py
config-url = ${publish-connection-information:url}
......@@ -778,7 +778,7 @@ sla-instance_guid = ${slap-parameter:frontend-additional-instance-guid}
[frontend-additional-promise]
<= monitor-promise-base
module = check_url_available
promise = check_url_available
name = frontend_additional_promise.py
config-url = ${publish-connection-information:url-additional}
{% endif %}
......@@ -801,7 +801,7 @@ hash-existing-files = ${buildout:directory}/software_release/buildout.cfg
[httpd-promise]
<= monitor-promise-base
module = check_socket_listening
promise = check_socket_listening
name = httpd.py
config-host = ${httpd:host}
config-port = ${httpd:port}
......@@ -1029,7 +1029,7 @@ context =
[ansible-vm-promise]
<= monitor-promise-base
module = check_command_execute
promise = check_command_execute
name = ansible_{{ name }}.py
config-command = ${ansible-vm-bin:rendered}
......@@ -1068,14 +1068,14 @@ nbd2-host =
boot-image-url-list =
enable-device-hotplug = False
ram-size = 1024
ram-size = 4096
ram-hotplug-slot-size = 512
disk-size = 10
disk-size = 40
disk-type = virtio
disk-format = qcow2
disk-device-path =
cpu-count = 1
cpu-count = 2
disk-cache =
disk-aio =
auto-ballooning = True
......
......@@ -43,7 +43,7 @@ rendered = ${rootdirectory:bin}/check-nbd-running.sh
[nbd-promise]
<= monitor-promise-base
module = check_command_execute
promise = check_command_execute
name = nbd_promise.py
config-command = ${nbd-checker-bin:rendered}
......@@ -65,7 +65,7 @@ key = ${gen-passwd:passwd}
[onetimeupload-promise]
<= monitor-promise-base
module = check_socket_listening
promise = check_socket_listening
name = onetimeupload_promise.py
config-host = ${onetimeupload-instance:ip}
config-port = ${onetimeupload-instance:port}
......
......@@ -82,7 +82,7 @@ extra-context =
raw dash_executable_location ${dash:location}/bin/dash
raw dnsresolver_executable ${buildout:bin-directory}/dnsresolver
raw dcron_executable_location ${dcron:location}/sbin/crond
raw debian_amd64_netinst_location ${debian-amd64-buster-netinst.iso:target}
raw debian_amd64_netinst_location ${debian-amd64-bullseye-netinst.iso:target}
raw whitelist_domains_default ${whitelist-domains-default:location}/${whitelist-domains-default:filename}
raw whitelist_firewall_download_controller ${whitelist-firewall-download-controller:target}
raw image_download_controller ${image-download-controller:target}
......
......@@ -277,6 +277,8 @@ kvm_argument_list = [qemu_path,
'-pidfile', pid_file_path, '-msg', 'timestamp=on',
'-D', logfile,
'-nodefaults',
# switch to tablet mode for the mouse to have it synced with a client, see https://wiki.gentoo.org/wiki/QEMU/Options#USB
'-usbdevice', 'tablet',
]
for disk_info in disk_info_list:
kvm_argument_list += (
......
......@@ -230,7 +230,7 @@ class TestMemoryManagement(InstanceTestCase, KvmMixin):
def test(self):
kvm_pid_1, info_list = self.getKvmProcessInfo(['-smp', '-m'])
self.assertEqual(
['1,maxcpus=2', '1024M,slots=128,maxmem=1536M'],
['2,maxcpus=3', '4096M,slots=128,maxmem=4608M'],
info_list
)
self.rerequestInstance({
......@@ -264,7 +264,7 @@ class TestMemoryManagement(InstanceTestCase, KvmMixin):
kvm_pid_1, info_list = self.getKvmProcessInfo(['-smp', '-m'])
self.assertEqual(
['1,maxcpus=2', '1024M,slots=128,maxmem=1536M'],
['2,maxcpus=3', '4096M,slots=128,maxmem=4608M'],
info_list
)
self.assertEqual(
......@@ -275,15 +275,15 @@ class TestMemoryManagement(InstanceTestCase, KvmMixin):
parameter_dict = {
'enable-device-hotplug': 'true',
# to avoid restarts the max RAM and CPU has to be static
'ram-max-size': '2048',
'cpu-max-count': '4',
'ram-max-size': '8192',
'cpu-max-count': '6',
}
self.rerequestInstance(parameter_dict)
self.slap.waitForInstance(max_retry=2)
kvm_pid_2, info_list = self.getKvmProcessInfo(['-smp', '-m'])
self.assertEqual(
['1,maxcpus=4', '1024M,slots=128,maxmem=2048M'],
['2,maxcpus=6', '4096M,slots=128,maxmem=8192M'],
info_list
)
self.assertEqual(
......@@ -292,21 +292,21 @@ class TestMemoryManagement(InstanceTestCase, KvmMixin):
)
self.assertNotEqual(kvm_pid_1, kvm_pid_2, "Unexpected: KVM not restarted")
parameter_dict.update(**{
'ram-size': '1536',
'cpu-count': '2'
'ram-size': '5120',
'cpu-count': '4'
})
self.rerequestInstance(parameter_dict)
self.slap.waitForInstance(max_retry=10)
kvm_pid_3, info_list = self.getKvmProcessInfo(['-smp', '-m'])
self.assertEqual(
['1,maxcpus=4', '1024M,slots=128,maxmem=2048M'],
['2,maxcpus=6', '4096M,slots=128,maxmem=8192M'],
info_list
)
self.assertEqual(kvm_pid_2, kvm_pid_3, "Unexpected: KVM restarted")
self.assertEqual(
getHotpluggedCpuRamValue(),
{'cpu_count': 1, 'ram_mb': 512}
{'cpu_count': 2, 'ram_mb': 1024}
)
......
[instance-profile]
filename = instance.cfg.in
md5sum = 8e48fa7c66a59b3d5faf0216922a574f
md5sum = 143f46b125389f39905226ec9482ce2a
......@@ -45,7 +45,7 @@ promises =
[metabase-promise]
<= monitor-promise-base
module = check_url_available
promise = check_url_available
name = $${:_buildout_section_name_}.py
config-url= $${metabase-instance:url}/api/session/properties
......@@ -91,7 +91,7 @@ command-line =
[postgresql-promise]
<= monitor-promise-base
module = check_command_execute
promise = check_command_execute
name = promise-postgresql.py
config-command = $${postgresql-psql:wrapper-path} -c '\q'
......
......@@ -18,7 +18,7 @@ md5sum = b6c2df0d4a62473d6dae26b10c0a4adc
[template-monitor]
_update_hash_filename_ = instance-monitor.cfg.jinja2
md5sum = 165a15672fc85981f68b9af2d6253254
md5sum = f23c007d6d6aed137cfd54aaa7ba52ab
[json-test-template]
_update_hash_filename_ = json-test-template.json.in.jinja2
......@@ -26,7 +26,7 @@ md5sum = 2eb5596544d9c341acf653d4f7ce2680
[template-monitor-edgetest-basic]
_update_hash_filename_ = instance-monitor-edgetest-basic.cfg.jinja2
md5sum = 61309a48f7b0135cba21b09247a2d8fd
md5sum = 05c00ac393b50cfdef5d3bc5af93fe98
[template-monitor-edgetest]
_update_hash_filename_ = instance-monitor-edgetest.cfg.jinja2
......@@ -34,7 +34,7 @@ md5sum = 3c8ab4e78f66c974eb95afc595a13514
[template-monitor-edgebot]
_update_hash_filename_ = instance-monitor-edgebot.cfg.jinja2
md5sum = 2ac74559d6108ca0dbabb872f1071e44
md5sum = 436bb5251c8f1cd1e64bd5d3987d699c
[network-bench-cfg]
filename = network_bench.cfg.in
......
......@@ -36,7 +36,7 @@
{%- set safe_name = part_id.replace('_', '').replace('.', '-').replace(' ', '-') %}
[{{part_id}}]
<= monitor-promise-base
module = check_surykatka_json
promise = check_surykatka_json
name = {{ safe_name }}.py
config-report = http_query
config-url = {{ slave['url'] }}
......@@ -51,7 +51,7 @@ config-json-file = ${surykatka-config-{{ class }}:json}
[surykatka-bot-promise-{{ class }}]
<= monitor-promise-base
module = check_surykatka_json
promise = check_surykatka_json
name = surykatka-bot-promise-{{ class }}.py
config-report = bot_status
config-json-file = ${surykatka-config-{{ class }}:json}
......
......@@ -58,7 +58,7 @@
{%- do PART_LIST.append(part_name) %}
[{{part_name}}]
<= monitor-promise-base
module = check_surykatka_json
promise = check_surykatka_json
name = {{ promise_name }}.py
config-report = http_query
config-url = {{ url }}
......@@ -75,7 +75,7 @@ config-json-file = ${surykatka-config-{{ class }}:json}
{%- do PART_LIST.append('surykatka-bot-%i-promise' % (class,)) %}
[surykatka-bot-{{ class }}-promise]
<= monitor-promise-base
module = check_surykatka_json
promise = check_surykatka_json
name = surykatka-bot-{{ class }}.py
config-report = bot_status
config-json-file = ${surykatka-config-{{ class }}:json}
......@@ -110,7 +110,7 @@ hash-existing-files = ${buildout:directory}/software_release/buildout.cfg
{%- do PART_LIST.append('surykatka-json-%i-promise'% (class,)) %}
[surykatka-json-{{ class }}-promise]
<= monitor-promise-base
module = check_file_state
promise = check_file_state
name = surykatka-json-{{ class }}.py
config-filename = ${surykatka-config-{{ class }}:json}
config-state = not-empty
......
......@@ -82,7 +82,7 @@ wrapper-path = ${monitor-directory:bin}/monitor-collect-csv-dump
[monitor-check-memory-usage]
<= monitor-promise-base
module = check_command_execute
promise = check_command_execute
name = check-computer-memory-usage.py
config-command = {{ buildout_bin}}/check-computer-memory -db ${monitor-instance-parameter:collector-db} --threshold ${slap-parameter:memory-percent-threshold} --unit percent
......@@ -90,11 +90,8 @@ config-command = {{ buildout_bin}}/check-computer-memory -db ${monitor-instance-
recipe = slapos.cookbook:promise.plugin
eggs =
slapos.toolbox
file = ${monitor-conf-parameters:promise-output-file}
content =
from slapos.promise.plugin.check_server_cpu_load import RunPromise
module = slapos.promise.plugin.check_server_cpu_load
output = ${directory:plugins}/system-CPU-load-check.py
mode = 600
config-cpu-load-threshold = ${slap-parameter:cpu-load-threshold}
[publish-connection-information]
......
......@@ -14,4 +14,4 @@
# not need these here).
[template-instance]
filename = instance.cfg
md5sum = 0974248c0b0ad5da45670386a5301e47
md5sum = 14132bba14a1e66e7abb1a7c58b333e5
......@@ -76,7 +76,7 @@ command-line = sudo -V
[promise-sudo-on-host]
# assert sudo is installed, as it is required to enter the chroot 'cros_sdk'
<= monitor-promise-base
module = check_command_execute
promise = check_command_execute
name = ${:_buildout_section_name_}.py
config-command = ${promise-sudo-on-host-bin:wrapper-path}
......
......@@ -22,7 +22,7 @@ md5sum = c13b4f1a5aa526a8d3f8e02bf6baf785
[instance-neo-admin]
filename = instance-neo-admin.cfg.in
md5sum = dabc1e50475055b3ee9184dcace5e8d2
md5sum = b6e1ccb1d90160110202e5111eec2afa
[instance-neo-master]
filename = instance-neo-master.cfg.in
......
......@@ -18,7 +18,7 @@ plugin = ${:etc}/plugin
[monitor-neo-health]
<= monitor-promise-base
module = check_neo_health
promise = check_neo_health
name = ${:_buildout_section_name_}.py
config-neoctl = ${neoctl:wrapper-path}
{%- if bang_on_problem != None %}
......
......@@ -26,4 +26,4 @@ md5sum = 6f42f0a8c5e5c0c657541a65c4d9ee57
[template-nextcloud-instance]
filename = nextcloud-instance.cfg.in
md5sum = 86a92f542e516ac92802908b85354073
md5sum = 05f946a6523677e5dcf80e9fad230d1c
......@@ -28,7 +28,7 @@ depend =
[redis-promise]
<= monitor-promise-base
module = check_command_execute
promise = check_command_execute
name = redis.py
config-command = ${service-redis:promise_wrapper}
......@@ -106,7 +106,7 @@ depends =
[nextcloud-install-promise]
<= monitor-promise-base
module = check_command_execute
promise = check_command_execute
name = check-nextcloud-install.py
config-command = ${nc-install-wrapper:output}
......
[template]
filename = instance.cfg.in
md5sum = f9b6d01e29f2edddd9d6f99591976c33
md5sum = 56e986c74ef236f261834c57f5861ce0
[template-nginx-configuration]
filename = template-nginx.cfg.in
......
......@@ -75,7 +75,7 @@ promises =
[nginx-available-promise]
<= monitor-promise-base
module = check_url_available
promise = check_url_available
name = $${:_buildout_section_name_}.py
config-url = $${nginx-configuration:base-url}/status
......
......@@ -15,7 +15,7 @@
[instance]
filename = instance.cfg.in
md5sum = c962079a88a6ce97d8ce20fa4e8edfd1
md5sum = de38ed0348a9d50e01dbf383a661d53e
[tomcat-server-xml]
filename = server.xml.in
......
......@@ -87,7 +87,7 @@ instance-promises =
[tomcat-listen-promise]
<= monitor-promise-base
module = check_socket_listening
promise = check_socket_listening
name = $${:_buildout_section_name_}.py
config-host = $${tomcat-instance:ip}
config-port = $${tomcat-instance:port}
......
......@@ -18,15 +18,15 @@ md5sum = fddea033e1aa9d6147a1a47bd7cc4b62
[template-powerdns]
filename = instance-powerdns.cfg
md5sum = c04c3b490e7f9f35af3d204a9df51f35
md5sum = a6fcfcef942cd9b57c2b0c69e318362c
[template-pdns-configuration]
_update_hash_filename_ = template/pdns.conf.jinja2
md5sum = 20c37ea06a8fa405bc02470d5115fd11
md5sum = 851353e1d4dd562ace58b3345c2da515
[template-dns-replicate]
_update_hash_filename_ = instance-powerdns-replicate.cfg.jinja2
md5sum = 4ff993a39da03d9d66d7c0f98efeb1e0
md5sum = 5b4b46136c6547c27508c4789ac5d0ee
[iso-list]
_update_hash_filename_ = template/zz.countries.nexedi.dk.rbldnsd
......@@ -34,4 +34,4 @@ md5sum = c4dc8c141d81b92d92cdb82ca67a13ee
[template-zones-file]
_update_hash_filename_ = template/zones-file.yml.jinja2
md5sum = 612de569ac3d1e8cc10b830683ff92ae
md5sum = 1fab79102f296a1259ce4ac9d054be9f
......@@ -66,14 +66,14 @@ name = {{dns_name}}
state = {{ slapparameter_dict.pop(state_key) }}
{% endif%}
config-supported-zone-list = {{ ' '.join(supported_zone_list) }}
config-soa = {{ "%s,%s" % (dns_domain, server_admin) }}
config-soa = {{ "%s,%s,0,10800,3600,604800,3600" % (dns_domain, server_admin) }}
{% for parameter in sla_parameters -%}
sla-{{ parameter }} = {{ slapparameter_dict.pop( sla_key + parameter ) }}
{% endfor -%}
[{{promise_section_title}}]
<= monitor-promise-base
module = check_socket_listening
promise = check_socket_listening
name = {{promise_section_title}}.py
config-host = {{ '${' ~ request_section_title ~ ':connection-powerdns-ipv6}' }}
config-port = {{ '${' ~ request_section_title ~ ':connection-powerdns-port}' }}
......
......@@ -137,7 +137,7 @@ extra-context =
# Promises
[pdns-promise-listen-port]
<= monitor-promise-base
module = check_socket_listening
promise = check_socket_listening
name = pdns-port-listening.py
config-host = $${pdns:ipv4}
config-port = $${pdns:port}
......
# -------------------------------------------------------------------------
# Configure ip/port binding
local-address={{ pdns.get('ipv4') }}
local-ipv6={{ pdns.get('ipv6') }}
local-address={{ pdns.get('ipv4') }}, {{ pdns.get('ipv6') }}
local-port={{ pdns.get('port') }}
......
......@@ -22,7 +22,7 @@ domains:
# Note: For each domain, one record of the domain name MUST exist with a soa record.
records:
{{ zone }}:
- soa: {{ slapparameter_dict.get('soa', 'ns0.example.com,admin@example.com').replace(',', ' ') }}
- soa: {{ slapparameter_dict.get('soa', 'ns0.example.com,admin@example.com,0,10800,3600,604800,3600').replace(',', ' ') }}
{%- for ns in slapparameter_dict.get('ns-record', 'ns0.example.com,ns1.example.com').split(',') %}
- ns: {{ ns }}
{%- endfor %}
......
......@@ -19,7 +19,7 @@ md5sum = efb4238229681447aa7fe73898dffad4
[instance-default]
filename = instance-default.cfg.in
md5sum = c6dce31a36e4e13de62687e9888aeb77
md5sum = f6c583d24940a3a6838bd421dbb84a20
[proftpd-config-file]
filename = proftpd-config-file.cfg.in
......
......@@ -86,7 +86,7 @@ template = inline:{{ slapparameter_dict['ssh-key'] | indent }}
[proftpd-listen-promise]
<= monitor-promise-base
module = check_socket_listening
promise = check_socket_listening
name = ${:_buildout_section_name_}.py
config-host = ${proftpd:ipv6}
config-port = ${proftpd:sftp-port}
......
......@@ -15,4 +15,4 @@
[instance-profile]
filename = instance.cfg.in
md5sum = 500b773d1a63a6a895f9b8038a582b05
md5sum = 9c4336f1f5143d3281c6706ff14abdd3
......@@ -33,7 +33,7 @@ pureftpd-dir = ${:srv}/pureftpd/
[check-port-listening-promise]
<= monitor-promise-base
module = check_socket_listening
promise = check_socket_listening
name = check_nginx_port.py
[pureftpd-listen-promise]
......
......@@ -18,7 +18,7 @@ md5sum = 71531ed9c9b79fa769ab367e7ea2d2a5
[template-re6stnet]
filename = instance-re6stnet.cfg.in
md5sum = 870c34cf58acaaee21c71182dd3cb0cf
md5sum = 98f86d2a10d909215ae88ba6a602da27
[template-apache-conf]
filename = apache.conf.in
......
......@@ -170,14 +170,14 @@ context =
[re6st-registry-promise]
<= monitor-promise-base
module = check_socket_listening
promise = check_socket_listening
name = re6st-registry.py
config-host = ${re6st-registry:ipv4}
config-port = ${re6st-registry:port}
[apache-registry-promise]
<= monitor-promise-base
module = check_socket_listening
promise = check_socket_listening
name = apache-re6st-registry.py
config-host = ${apache-conf:ipv6}
config-port = ${apache-conf:port}
......
......@@ -18,7 +18,7 @@ md5sum = 8a08be95a04f1a47098c4fdef80bdfed
[instance-repman.cfg]
_update_hash_filename_ = instance-repman.cfg.jinja2.in
md5sum = 839642d7a56447b3f08fa69729faca61
md5sum = 697a1b546c883da45c14dbcd2d73b2b9
[config-toml.in]
_update_hash_filename_ = templates/config.toml.in
......@@ -34,7 +34,7 @@ md5sum = 0eeb24c6aa0760f0d33c4cc2828ddf30
[template-mariadb.cfg]
_update_hash_filename_ = instance-mariadb.cfg.jinja2.in
md5sum = 21a29a41768b2370d671d3086b3ef2bb
md5sum = a5c204cac552754520aee0570d379723
[template-my-cnf]
_update_hash_filename_ = templates/my.cnf.in
......
......@@ -327,13 +327,13 @@ dash = {{ dumps(dash) }}
[{{ section('promise-check-computer-memory') }}]
<= monitor-promise-base
module = check_command_execute
promise = check_command_execute
name = check-computer-memory.py
config-command = "{{ parameter_dict["check-computer-memory-binary"] }}" -db ${monitor-instance-parameter:collector-db} --threshold "{{ slapparameter_dict["computer-memory-percent-threshold"] }}" --unit percent
[{{ section('promise') }}]
<= monitor-promise-base
module = check_command_execute
promise = check_command_execute
name = mariadb.py
config-command = "{{ parameter_dict['bin-directory'] }}/is-local-tcp-port-opened" "{{ ip }}" "{{ port }}"
......
......@@ -216,21 +216,21 @@ depends =
[proxysql-{{ name }}-admin-promise]
<= monitor-promise-base
module = check_socket_listening
promise = check_socket_listening
name = proxysql-{{ name }}-admin-port-listening.py
config-host = {{ ipv4 }}
config-port = {{ '${' ~ name ~ '-cluster-parameter:proxy-admin-port}' }}
[proxysql-{{ name }}-promise]
<= monitor-promise-base
module = check_socket_listening
promise = check_socket_listening
name = proxysql-{{ name }}-port-listening.py
config-host = {{ ipv4 }}
config-port = {{ '${' ~ name ~ '-cluster-parameter:proxy-port}' }}
[proxysql-{{ name }}-ipv6-promise]
<= monitor-promise-base
module = check_socket_listening
promise = check_socket_listening
name = proxysql-{{ name }}-ipv6-port-listening.py
config-host = {{ ip }}
config-port = {{ '${' ~ name ~ '-cluster-parameter:proxy-port}' }}
......@@ -403,14 +403,14 @@ context =
[repman-listen-promise]
<= monitor-promise-base
module = check_socket_listening
promise = check_socket_listening
name = repman_service_listen.py
config-host = ${repman-parameter:ipv4}
config-port = ${repman-parameter:port}
[repman-listen-ssl-promise]
<= monitor-promise-base
module = check_socket_listening
promise = check_socket_listening
name = repman_service_ssl_listen.py
config-host = ${repman-parameter:ipv4}
config-port = ${repman-parameter:secure-port}
......@@ -508,13 +508,13 @@ return = domain secure_access
[repman-frontend-promise]
<= monitor-promise-base
module = check_url_available
promise = check_url_available
name = check_repman_frontend.py
config-url = https://${repman-frontend:connection-domain}
[repman-backend-promise]
<= monitor-promise-base
module = check_url_available
promise = check_url_available
name = check_repman_backend.py
config-url = ${nginx-parameter:backend-ssl-url}
......
......@@ -19,4 +19,4 @@ md5sum = 0084214fae4ee1aad2c878aa393757af
[template-selenium]
filename = instance-selenium.cfg.in
md5sum = 884196ea35de35fa9159517912441ce6
md5sum = 35ba19f7cb4fe7fc9469611f2446c94e
......@@ -283,7 +283,7 @@ instance-promises =
[check-port-listening-promise]
<= monitor-promise-base
module = check_socket_listening
promise = check_socket_listening
name = $${:_buildout_section_name_}.py
[sshd-listen-promise]
......@@ -304,7 +304,7 @@ config-port = $${selenium-server-hub-instance:port}
# Promise waiting for all nodes to be registered
[selenium-server-hub-nodes-registered-promise]
<= monitor-promise-base
module = check_command_execute
promise = check_command_execute
name = $${:_buildout_section_name_}.py
config-command =
$${selenium-server-check-nodes-registered:rendered} $${selenium-server-hub-instance:api-url} $${:expected-node-count}
......
......@@ -337,8 +337,8 @@ class TestFrontend(WebServerMixin, SeleniumServerTestCase):
class TestSSHServer(SeleniumServerTestCase):
@classmethod
def getInstanceParameterDict(cls):
cls.ssh_key = paramiko.RSAKey.generate(1024)
return {'ssh-authorized-key': 'ssh-rsa {}'.format(cls.ssh_key.get_base64())}
cls.ssh_key = paramiko.ECDSAKey.generate(bits=384)
return {'ssh-authorized-key': 'ecdsa-sha2-nistp384 {}'.format(cls.ssh_key.get_base64())}
def test_connect(self):
parameter_dict = self.computer_partition.getConnectionParameterDict()
......
......@@ -18,7 +18,7 @@ md5sum = 84f099cc9852c4f53a075dccbb3880f0
[template-balancer]
filename = instance-balancer.cfg.in
md5sum = c7c0bb9abbd0f8cc6c7956d83a61c4b3
md5sum = f565956476c31881b6e51ae1c27793ad
[template-apache-backend-conf]
filename = apache-backend.conf.in
......
......@@ -203,7 +203,7 @@ input = inline:
[{{ section('apache-promise') }}]
# Check any apache port in ipv4, expect other ports and ipv6 to behave consistently
<= monitor-promise-base
module = check_socket_listening
promise = check_socket_listening
name = apache.py
config-host = {{ ipv4 }}
config-port = {{ apache_dict.values()[0][0] }}
......@@ -297,13 +297,13 @@ promise-threshold = {{ slapparameter_dict['apachedex-promise-threshold'] }}
[{{ section('monitor-promise-apachedex-result') }}]
<= monitor-promise-base
module = check_command_execute
promise = check_command_execute
name = check-apachedex-result.py
config-command = "{{ parameter_dict['promise-check-apachedex-result'] }}" --apachedex_path "${directory:apachedex}" --status_file ${monitor-directory:private}/apachedex.report.json --threshold "${apachedex-parameters:promise-threshold}"
[{{ section('promise-check-computer-memory') }}]
<= monitor-promise-base
module = check_command_execute
promise = check_command_execute
name = check-computer-memory.py
config-command = "{{ parameter_dict["check-computer-memory-binary"] }}" -db ${monitor-instance-parameter:collector-db} --threshold "{{ slapparameter_dict["computer-memory-percent-threshold"] }}" --unit percent
......
......@@ -2,7 +2,7 @@
This software release is used to run integration test of slapos softwares.
The approach is to use setuptools' integrated test runner, `python setup.py test`, to run tests.
The approach is to use python unittest runner to run tests.
The `python` used in this command will be a `zc.recipe.egg` interpreter with
all eggs pre-installed by this software release.
......@@ -17,6 +17,7 @@ changes to the code, run tests and publish changes.
```bash
# install this software release and request an instance
# use software-py3.cfg instead of software.cfg if the SR you want to test is written in Python 3
SR=https://lab.nexedi.com/nexedi/slapos/raw/1.0/software/slapos-sr-testing/software.cfg
COMP=slaprunner
INSTANCE_NAME=$COMP
......@@ -32,7 +33,7 @@ slapos request --node=computer_guid=$COMP $INSTANCE_NAME $SR
source ( environment-script from step above )
# The source code is a git clone working copy on the instance
cd ~/srv/runner/instance/slappartXXX/parts/slapos/
cd ~/srv/runner/instance/slappartXXX/software_release/parts/slapos-repository/
# change directory to the directory containing test for this software
cd ./software/helloworld/test/
......@@ -44,22 +45,25 @@ SLAPOS_TEST_DEBUG=1 python_for_test -m unittest discover -v
## Environment variables
The `environment-script` set all variabels except `SLAPOS_TEST_DEBUG` and `SLAPOS_TEST_VERBOSE` for you, but for reference, here is the list of variables which control the test runner:
The `environment-script` set all variables except `SLAPOS_TEST_DEBUG`, `SLAPOS_TEST_VERBOSE`, `SLAPOS_TEST_SKIP_SOFTWARE_CHECK` and `SLAPOS_TEST_SKIP_SOFTWARE_REBUILD` for you, but for reference, here is the list of variables which control the test runner:
| Variable | Description |
| --- | --- |
| `SLAPOS_TEST_VERBOSE` | If set to 1, debugging output will be printed on console. This also adjust the log level of python process running tests. When running on test nodes, this is not set, so keep this difference in mind if test rely on python logger |
| `SLAPOS_TEST_DEBUG` | If set to 1, `slapos node instance` and `slapos node software` will run with `--buildout-debug` flag, which will invoke python debugger on error. |
| `SLAPOS_TEST_SKIP_SOFTWARE_CHECK` | If set to 1, software checks will not be performed. This can be used to speed up running tests locally, as checking software is an expensive operation. |
| `SLAPOS_TEST_SKIP_SOFTWARE_REBUILD` | If set to 1, software will not be rebuilt before running the test. The default is to use `slapos node software --all`, which unconditionally rebuild all softwares and to use `slapos node software`, which installs the software only if it's not yet available when the environment variable is set to 1. |
| `SLAPOS_TEST_IPV6` | ipv6 used by this instance. Usually you want to use a global address here to be able to connect to this instance. |
| `SLAPOS_TEST_IPV4` | ipv4 used by this instance. |
| `SLAPOS_TEST_WORKING_DIR` | Path to use as a working directory to hold the standalone SlapOS. |
| `SLAPOS_TEST_SHARED_PART_LIST` | A `:` separated of paths to look for already installed shared parts. The SlapOS used in the test will not write in these, but will use a dedicated directory in `$SLAPOS_TEST_WORKING_DIR` |
| `SLAPOS_TEST_VERBOSE` | If set to 1, debugging output will be printed on console. This also adjust the log level of python process running tests. When running on test nodes, this is not set, so keep this difference in mind if test rely on python logger |
| `SLAPOS_TEST_DEBUG` | If set to 1, `slapos node instance` and `slapos node software` will run with `--buildout-debug` flag, which will invoke python debugger on error. |
## Frequently Asked Questions
### Where to find docs about the testing framework ?
Please refere to the docstrings from `slapos.testing` module, from `slapos.core` package.
Please refer to the docstrings from `slapos.testing` module, from `slapos.core` package.
This uses python unittest module from standard library, especially the setup hooks:
- `setUpModule` installs the software and perform some static checks
......@@ -69,7 +73,7 @@ This uses python unittest module from standard library, especially the setup hoo
### Can I run slapos commands to debug ?
The standalone slapos is created in `$SLAPOS_TEST_WORKING_DIR`. In this directory you will have a `bin/slapos` that you can run to start or stop services.
It's fine to use this command during development, but to programatically interract with the environment within the test, the recommended approach is to use supervisor XML-RPC API.
It's fine to use this command during development, but to programmatically interact with the environment within the test, the recommended approach is to use supervisor XML-RPC API.
### How to use a development version of `slapos.cookbook` ?
......@@ -82,3 +86,7 @@ At the end of the test, a snapshot of the slapos instances is created. Sometimes
Most of the time, problems are because on test nodes paths are very long. One advanced technique to reproduce the problem in your development environment is to set `SLAPOS_TEST_WORKING_DIR` environment variable to a path with the same length as the ones on test nodes.
One way to make instances uses a slightly shorter path is to define `__partition_reference__` class attribute, so that the instances uses this as prefix instead of the class name.
### Can I run only specific test ?
Yes, please refer to python unittest documentation. For example, you can use `python_for_test -m unittest -v test_module.TestClass.test_function` to run only test function.
......@@ -337,6 +337,7 @@ tests =
theia ${slapos.test.theia-setup:setup}
metabase ${slapos.test.metabase-setup:setup}
nginx-push-stream ${slapos.test.nginx-push-stream-setup:setup}
erp5 ${slapos.test.erp5-setup:setup}
###
${:extra}
......@@ -345,7 +346,6 @@ extra =
# You should not add more lines here.
backupserver ${slapos.test.backupserver-setup:setup}
caddy-frontend ${slapos.test.caddy-frontend-setup:setup}
erp5 ${slapos.test.erp5-setup:setup}
upgrade_erp5 ${slapos.test.upgrade_erp5-setup:setup}
htmlvalidatorserver ${slapos.test.htmlvalidatorserver-setup:setup}
slapos-master ${slapos.test.slapos-master-setup:setup}
......
......@@ -15,4 +15,4 @@
[template]
filename = instance.cfg
md5sum = 2a82b7d5163d042e85b14a645707a093
md5sum = 84bd6729e9b299c457cea2d1be6d05a4
......@@ -32,6 +32,16 @@ repository = ${kedifa-repository:location}
<= download-source
repository = ${caucase-repository:location}
[caucase-test-runner]
recipe = slapos.recipe.template:jinja2
template = inline:#!/bin/sh
export HOSTS="$(mktemp)"
trap 'rm "$HOSTS"' EXIT
printf '%s testhost\n%s testhost\n' "$SLAPOS_TEST_IPV4" "$SLAPOS_TEST_IPV6" > "$HOSTS"
export CAUCASE_NETLOC=testhost:8000 LD_PRELOAD=${userhosts:location}/lib/userhosts.so:$LD_PRELOAD
exec python -m unittest discover -v
rendered = $${caucase:location}/host_setting.sh
[slapos.libnetworkcache]
<= download-source
repository = ${slapos.libnetworkcache-repository:location}
......@@ -77,7 +87,7 @@ repository = ${rubygemsrecipe-repository:location}
recipe = slapos.recipe.template:jinja2
rendered = $${create-directory:etc}/$${:_buildout_section_name_}
template = inline:
export PATH=${coreutils:location}/bin:${curl:location}/bin:${openssl:location}/bin:${git:location}/bin:${libxslt:location}/bin:${socat:location}/bin:${lmsensors:location}/bin:${rsync:location}/bin/:${buildout:bin-directory}:$PATH
export PATH=${coreutils:location}/bin:${curl:location}/bin:${openssl:location}/bin:${jq:location}/bin:${sed:location}/bin:${grep:location}/bin:${git:location}/bin:${libxslt:location}/bin:${socat:location}/bin:${lmsensors:location}/bin:${rsync:location}/bin/:${buildout:bin-directory}:$PATH
export SLAPOS_TEST_IPV4=$${slap-configuration:ipv4-random}
export SLAPOS_TEST_IPV6=$${slap-configuration:ipv6-random}
export SLAPOS_TEST_EGGS_DIRECTORY=$${buildout:eggs-directory}
......@@ -98,9 +108,7 @@ template = inline:
)
TestCase(
"caucase",
# XXX caucase uses 2to3 dynamically in setup.py, so it only supports
# runnning tests with python setup.py test
['python', 'setup.py', 'test'],
['$${caucase-test-runner:rendered}'],
cwd="""$${caucase:location}""",
summaryf=UnitTest.summary,
)
......
......@@ -13,6 +13,10 @@ extends =
../../component/socat/buildout.cfg
../../component/lmsensors/buildout.cfg
../../component/rsync/buildout.cfg
../../component/jq/buildout.cfg
../../component/sed/buildout.cfg
../../component/grep/buildout.cfg
../../component/userhosts/buildout.cfg
../../stack/slapos.cfg
../../stack/caucase/buildout.cfg
../../stack/nxdtest.cfg
......
......@@ -18,7 +18,7 @@ md5sum = 8d6878ff1d2e75010c50a1a2b0c13b24
[template-runner]
filename = instance-runner.cfg
md5sum = 2a09b11c7dbade65d50e66287bf4c7b9
md5sum = 384285ab789396b6e674a8125ce2d030
[template-runner-import-script]
filename = template/runner-import.sh.jinja2
......@@ -26,7 +26,7 @@ md5sum = f2e2493bc5da90a53f86e5bcf64d2d57
[instance-runner-import]
filename = instance-runner-import.cfg.in
md5sum = ea7667f9af952bc4bdf43aad4520759f
md5sum = a4ebf6918a2c68c02898b2142357f490
[instance-runner-export]
filename = instance-runner-export.cfg.in
......
......@@ -134,7 +134,7 @@ mode = 755
[importer-consistency-promise]
<= monitor-promise-base
module = check_command_execute
promise = check_command_execute
name = importer-consistency-promise.py
config-command = ${importer-consistency-promise-bin:output}
......@@ -158,7 +158,7 @@ mode = 755
[software-release-deployment-promise]
<= monitor-promise-base
module = check_command_execute
promise = check_command_execute
name = software-release-deployment-promise.py
config-command =${software-release-deployment-bin:output}
......
......@@ -87,7 +87,7 @@ return = site_url domain
[custom-frontend-promise]
<= monitor-promise-base
module = check_url_available
promise = check_url_available
name = custom_frontend_promise.py
config-url = https://$${request-custom-frontend:connection-domain}
{% if slapparameter_dict.get('custom-frontend-basic-auth') -%}
......@@ -111,7 +111,7 @@ template = inline:
[custom-frontend-url-ready-promise]
<= monitor-promise-base
module = check_command_execute
promise = check_command_execute
name = custom_frontend_ready_promise.py
config-command = $${custom-frontend-url-ready-promise-bin:rendered}
......@@ -436,7 +436,7 @@ mode = 700
[apache-httpd-promise]
<= monitor-promise-base
module = check_url_available
promise = check_url_available
name = $${:filename}.py
filename = apache-httpd-listening-on-tcp
config-url = $${apache-httpd:access-url}
......@@ -537,7 +537,7 @@ return = site_url domain
[slaprunner-frontend-promise]
<= monitor-promise-base
module = check_url_available
promise = check_url_available
name = slaprunner_frontend.py
config-url = https://$${request-frontend:connection-domain}/login
......@@ -556,7 +556,7 @@ return = secure_access domain
[httpd-frontend-promise]
<= monitor-promise-base
module = check_url_available
promise = check_url_available
name = slaprunner-apache-http-frontend.py
config-url = $${request-httpd-frontend:connection-secure_access}
......@@ -619,14 +619,14 @@ monitor-password = $${monitor-publish-parameters:monitor-password}
[slaprunner-promise]
<= monitor-promise-base
module = check_socket_listening
promise = check_socket_listening
name = slaprunner.py
config-host = $${slaprunner:ipv6}
config-port = $${slaprunner:runner_port}
[runner-sshd-promise]
<= monitor-promise-base
module = check_socket_listening
promise = check_socket_listening
name = runner-sshd.py
config-host = $${slap-network-information:global-ipv6}
config-port = $${runner-sshd-port:port}
......@@ -863,20 +863,20 @@ log = $${runnerdirectory:home}/instance/*/.slapgrid/log/instance.log $${runnerdi
[supervisord-promise]
<= monitor-promise-base
module = check_socket_listening
promise = check_socket_listening
name = supervisord.py
config-host = $${slaprunner:ipv4}
config-port = $${supervisord:port}
[slapos-supervisord-promise]
<= monitor-promise-base
module = check_command_execute
promise = check_command_execute
name = instance_supervisord.py
config-command = ${buildout:bin-directory}/slapos node supervisorctl --cfg=$${slaprunner:slapos.cfg} pid
[slapos-proxy-promise]
<= monitor-promise-base
module = check_socket_listening
promise = check_socket_listening
name = slaproxy.py
config-host = $${slaprunner:ipv4}
config-port = $${slaprunner:proxy_port}
......
......@@ -339,9 +339,9 @@ class TestWeb(SlaprunnerTestCase):
class TestSSH(SlaprunnerTestCase):
@classmethod
def getInstanceParameterDict(cls):
cls.ssh_key_list = [paramiko.RSAKey.generate(1024) for i in range(2)]
cls.ssh_key_list = [paramiko.ECDSAKey.generate(bits=384) for i in range(2)]
return {
'user-authorized-key': 'ssh-rsa {}\nssh-rsa {}'.format(
'user-authorized-key': 'ecdsa-sha2-nistp384 {}\necdsa-sha2-nistp384 {}'.format(
*[key.get_base64() for key in cls.ssh_key_list]
)
}
......@@ -363,7 +363,7 @@ class TestSSH(SlaprunnerTestCase):
username, fingerprint_from_url = ssh_info.split(';fingerprint=')
client = paramiko.SSHClient()
self.assertTrue(fingerprint_from_url.startswith('ssh-rsa-'), '')
self.assertTrue(fingerprint_from_url.startswith('ssh-rsa-'), fingerprint_from_url)
fingerprint_from_url = fingerprint_from_url[len('ssh-rsa-'):]
class KeyPolicy(object):
......
......@@ -15,7 +15,7 @@
[instance-theia]
_update_hash_filename_ = instance-theia.cfg.jinja.in
md5sum = e39925b69a8bc17d17be54c075ae2f88
md5sum = f396d9a0780f4fb17016dbd32b56d7b8
[instance]
_update_hash_filename_ = instance.cfg.in
......@@ -23,11 +23,11 @@ md5sum = a7d78b4002266c69ece05a476df82791
[instance-import]
_update_hash_filename_ = instance-import.cfg.jinja.in
md5sum = 861ef130f27175c2978a9b946b138dd5
md5sum = 57b707cf0ed83be1959d26a88c131906
[instance-export]
_update_hash_filename_ = instance-export.cfg.jinja.in
md5sum = b3cedaa1603ca8ed83fdd94ef4b35cc8
md5sum = 190a736471f0e0cffcb2838968e01d84
[instance-resilient]
_update_hash_filename_ = instance-resilient.cfg.jinja
......@@ -47,7 +47,7 @@ md5sum = 9e8c17a4b2d802695caf0c2c052f0d11
[yarn.lock]
_update_hash_filename_ = yarn.lock
md5sum = c37c2e9578794967a404d08859c09813
md5sum = 6faa52754c46e505912a478bc8ba3300
[python-language-server-requirements.txt]
_update_hash_filename_ = python-language-server-requirements.txt
......@@ -59,7 +59,7 @@ md5sum = 8157c22134200bd862a07c6521ebf799
[slapos.css.in]
_update_hash_filename_ = slapos.css.in
md5sum = d2930ec3ef973b7908f0fa896033fd64
md5sum = 841141fc699b8d8918ed0669e6e61995
[logo.png]
_update_hash_filename_ = logo.png
......
......@@ -69,7 +69,7 @@ export-promises =
[export-promise]
<= monitor-promise-base
module = check_command_execute
promise = check_command_execute
name = resiliency-export-promise.py
config-command = $${export-promise-script:rendered}
......
......@@ -127,7 +127,7 @@ import-promises =
[import-promise]
<= monitor-promise-base
module = check_command_execute
promise = check_command_execute
name = resiliency-import-promise.py
config-command = $${import-promise-script:rendered}
......
......@@ -82,21 +82,21 @@ instance-promises =
[theia-listen-promise]
<= monitor-promise-base
module = check_socket_listening
promise = check_socket_listening
name = $${:_buildout_section_name_}.py
config-host = $${theia-instance:ip}
config-port = $${theia-instance:port}
[frontend-listen-promise]
<= monitor-promise-base
module = check_socket_listening
promise = check_socket_listening
name = $${:_buildout_section_name_}.py
config-host = $${frontend-instance:ip}
config-port = $${frontend-instance:port}
[frontend-authentification-promise]
<= monitor-promise-base
module = check_url_available
promise = check_url_available
name = $${:_buildout_section_name_}.py
username = $${frontend-instance-password:username}
password = $${frontend-instance-password:passwd}
......@@ -106,7 +106,7 @@ config-url = https://$${:username}:$${:password}@[$${:ip}]:$${:port}
[remote-frontend-url-available-promise]
<= monitor-promise-base
module = check_url_available
promise = check_url_available
name = $${:_buildout_section_name_}.py
config-url = $${remote-frontend:connection-secure_access}
config-http-code = 401
......@@ -114,7 +114,7 @@ config-http-code = 401
{% if additional_frontend %}
[remote-additional-frontend-url-available-promise]
<= monitor-promise-base
module = check_url_available
promise = check_url_available
name = $${:_buildout_section_name_}.py
config-url = $${remote-additional-frontend:connection-secure_access}
config-http-code = 401
......@@ -122,7 +122,7 @@ config-http-code = 401
[slapos-standalone-listen-promise]
<= monitor-promise-base
module = check_socket_listening
promise = check_socket_listening
# XXX promise plugins can not contain "slapos" in their names
name = standalone-listen-promise.py
config-host = $${slapos-standalone-instance:hostname}
......@@ -130,13 +130,13 @@ config-port = $${slapos-standalone-instance:port}
[slapos-standalone-ready-promise]
<= monitor-promise-base
module = check_socket_listening
promise = check_socket_listening
name = standalone-ready-promise.py
config-abstract = $${directory:runner}/standalone_ready
[slapos-autorun-promise]
<= monitor-promise-base
module = check_service_state
promise = check_service_state
name = autorun-state-promise.py
config-service = $${slapos-autorun:service-name}
config-expect = $${slapos-autorun:autorun}
......
/* backported fixes */
/* https://github.com/eclipse-theia/theia/commit/616c34e1c446a706f4cb02182b2d9195ef3ea854 */
.monaco-editor .monaco-list .monaco-list-row.focused,
.monaco-editor .monaco-list .monaco-list-row.focused,
.monaco-editor .monaco-list .monaco-list-row.focused .suggest-icon {
color: var(--theia-list-activeSelectionForeground) !important;
background-color: var(--theia-list-activeSelectionBackground) !important;
}
/* logo */
.theia-icon {
background-image: url('/{{ logo_image }}');
......
This source diff could not be displayed because it is too large. You can view the blob instead.
......@@ -19,8 +19,8 @@ md5sum = b43d5e8d1fc2d0eeb54f91cefe6a5bae
[template-turnserver]
filename = instance-turnserver.cfg.jinja2.in
md5sum = 7af3318d7249e9afe22436d9fe200159
md5sum = 932c4d82faa8e28b62bfbfc3dfe31c02
[template-insecure-turnserver]
filename = instance-insecure-turnserver.cfg.jinja2.in
md5sum = 3db65c3a16eb76ab438ac3817d1a5fea
md5sum = 504f0f5ead8600b80ba43f828a0f82b6
......@@ -57,7 +57,7 @@ hash-existing-files = ${buildout:directory}/software_release/buildout.cfg
[promise-check-turnserver-port]
<= monitor-promise-base
module = check_socket_listening
promise = check_socket_listening
name = turnserver-port-listening.py
config-host = {{ listening_ip }}
config-port = {{ turn_port }}
......
......@@ -123,14 +123,14 @@ hash-existing-files = ${buildout:directory}/software_release/buildout.cfg
[promise-check-turnserver-port]
<= monitor-promise-base
module = check_socket_listening
promise = check_socket_listening
name = turnserver-port-listening.py
config-host = {{ listening_ip }}
config-port = {{ turn_port }}
[promise-check-turnserver-tls-port]
<= monitor-promise-base
module = check_socket_listening
promise = check_socket_listening
name = turnserver-tls-port-listening.py
config-host = {{ listening_ip }}
config-port = {{ turn_tls_port }}
......
......@@ -3,7 +3,7 @@ request = portal.REQUEST
reference = request['reference']
data_chunk = request['data_chunk']
module = portal.data_stream_module
promise = portal.data_stream_module
try:
data_stream = module[reference]
except KeyError:
......
......@@ -36,6 +36,6 @@ mode = 0644
depends = ${caucase-jinja2-library-eggs:eggs}
[versions]
caucase = 0.9.10
caucase = 0.9.12
pem = 21.1.0
PyJWT = 1.7.1
......@@ -15,4 +15,4 @@
[caucase-jinja2-library]
filename = caucase.jinja2.library
md5sum = a5c7a46c6fb85aa22a371d9d2cd9e57e
md5sum = 1e3607e514320441ddccdb6d1a21f705
......@@ -37,7 +37,7 @@ command-line = '{{ buildout_bin_directory }}/caucased'
{% if promise -%}
[{{ prefix }}-promise]
<= monitor-promise-base
module = check_command_execute
promise = check_command_execute
name = {{ prefix }}.py
config-command = '{{ buildout_bin_directory }}/caucase-probe' 'http://{{ netloc }}'
{%- endif %}
......@@ -119,7 +119,7 @@ command-line = '{{ buildout_bin_directory }}/caucase-updater'
{% if promise -%}
[{{ prefix }}-promise]
<= monitor-promise-base
module = check_certificate
promise = check_certificate
name = {{ prefix }}.py
config-certificate = {{ crt_path }}
config-key = {{ key_path }}
......
......@@ -26,11 +26,11 @@ md5sum = d10b8e35b02b5391cf46bf0c7dbb1196
[template-mariadb]
filename = instance-mariadb.cfg.in
md5sum = 7d064777c1c4e7b275b255db4f4b1da9
md5sum = c82ea00c4514b72fb97a6fa7ac36ec52
[template-kumofs]
filename = instance-kumofs.cfg.in
md5sum = fed6dd2bdc389b4fc7e7b7ca32c5d4b6
md5sum = cfe4696a67bf4886a5d8252a5274a941
[template-zope-conf]
filename = zope.conf.in
......@@ -50,7 +50,7 @@ md5sum = 1102c3e37a5a2e8aa2d8a2607ab633c8
[template-postfix]
filename = instance-postfix.cfg.in
md5sum = 2a68a3e7c5c509cbd4cfa9e670ac91c7
md5sum = 0f666e5e7e52afda433feb9f02452717
[template-postfix-master-cf]
filename = postfix_master.cf.in
......@@ -78,7 +78,7 @@ md5sum = fcc8470824c448a56e2282c43b870cb5
[template-zeo]
filename = instance-zeo.cfg.in
md5sum = 79b6b422df512b5a075eba54a6895a01
md5sum = 1f33f3b93da32b34e2fd11471648835d
[template-zodb-base]
filename = instance-zodb-base.cfg.in
......@@ -86,15 +86,15 @@ md5sum = bc821f9f9696953b10a03ad7b59a1936
[template-zope]
filename = instance-zope.cfg.in
md5sum = 58ca95f6e0c067702a03fc3be66d50c1
md5sum = 769e81946c346530cebfce6ad7553165
[template-balancer]
filename = instance-balancer.cfg.in
md5sum = c6c1b3e4b2f3c6f256153dcfe9fbecad
md5sum = d6166515fda7b09df754672536b131be
[template-haproxy-cfg]
filename = haproxy.cfg.in
md5sum = 3f4f7e49c504cbf610fc5dc462713dfc
md5sum = 9d61e05c8578e0f17e349603ccaaf52c
[template-rsyslogd-cfg]
filename = rsyslogd.cfg.in
......@@ -102,4 +102,4 @@ md5sum = 5cf0316fdd17a940031e4083bbededd8
[instance-wcfs.cfg.in]
filename = instance-wcfs.cfg.in
md5sum = 945e8e4552a6bdf228b9609567b09399
md5sum = eb4be2669a9a56187cc4366272e11d18
......@@ -172,7 +172,7 @@ listen family_{{ name }}
# logs
capture request header Referer len 512
capture request header User-Agent len 512
log-format "%{+Q}o %{-Q}ci - - [%trg] %r %ST %B %{+Q}[capture.req.hdr(0)] %{+Q}[capture.req.hdr(1)] %Tt"
log-format "%{+Q}o %{-Q}ci - - [%trg] %r %ST %B %{+Q}[capture.req.hdr(0)] %{+Q}[capture.req.hdr(1)] %Ta"
{% for outer_prefix, inner_prefix in family_path_routing_dict.get(name, []) + path_routing_list %}
{% set outer_prefix = outer_prefix.strip('/') -%}
......
......@@ -272,7 +272,7 @@ hash-files = ${rsyslogd-cfg:rendered}
[{{ section ('rsyslogd-listen-promise') }}]
<= monitor-promise-base
module = check_command_execute
promise = check_command_execute
name = rsyslogd_listen_promise.py
config-command = test -S ${rsyslogd-cfg-parameter-dict:log-socket}
......@@ -303,7 +303,7 @@ certificate-and-key = ${directory:etc}/certificate-and-key-generated.pem
[{{ section('haproxy-promise') }}]
<= monitor-promise-base
# Check any haproxy port in ipv4, expect other ports and ipv6 to behave consistently
module = check_socket_listening
promise = check_socket_listening
name = haproxy.py
config-host = {{ ipv4 }}
config-port = {{ haproxy_dict.values()[0][0] }}
......@@ -382,13 +382,13 @@ promise-threshold = {{ slapparameter_dict['apachedex-promise-threshold'] }}
[{{ section('monitor-promise-apachedex-result') }}]
<= monitor-promise-base
module = check_command_execute
promise = check_command_execute
name = check-apachedex-result.py
config-command = "{{ parameter_dict['promise-check-apachedex-result'] }}" --apachedex_path "${directory:apachedex}" --status_file ${monitor-directory:private}/apachedex.report.json --threshold "${apachedex-parameters:promise-threshold}"
[{{ section('promise-check-computer-memory') }}]
<= monitor-promise-base
module = check_command_execute
promise = check_command_execute
name = check-computer-memory.py
config-command = "{{ parameter_dict["check-computer-memory-binary"] }}" -db ${monitor-instance-parameter:collector-db} --threshold "{{ slapparameter_dict["computer-memory-percent-threshold"] }}" --unit percent
......
......@@ -86,7 +86,7 @@ rendered = ${directory:srv}/exporter.exclude
# Deploy zope promises scripts
[promise-template]
<= monitor-promise-base
module = check_socket_listening
promise = check_socket_listening
config-host = ${kumofs-instance:ip}
config-port = ${kumofs-instance:server-listen-port}
......@@ -112,7 +112,7 @@ config-port = ${kumofs-instance:manager-port}
[promise-check-computer-memory]
<= monitor-promise-base
module = check_command_execute
promise = check_command_execute
name = check-computer-memory.py
config-command = "{{ parameter_dict["check-computer-memory-binary"] }}" -db ${monitor-instance-parameter:collector-db} --threshold "{{ slapparameter_dict["computer-memory-percent-threshold"] }}" --unit percent
......
......@@ -332,7 +332,7 @@ context =
{%if slapparameter_dict.get('max-slowqueries-threshold') and slapparameter_dict.get('slowest-query-threshold') %}
[{{ section('monitor-promise-slowquery-result') }}]
<= monitor-promise-base
module = check_command_execute
promise = check_command_execute
name = check-slow-query-pt-digest-result.py
config-command = "{{ parameter_dict['promise-check-slow-queries-digest-result'] }}" --ptdigest_path "${directory:slowquery}" --status_file ${monitor-directory:private}/mariadb_slow_query.report.json --max_queries_threshold "${:max_queries_threshold}" --slowest_query_threshold "${:slowest_queries_threshold}"
max_queries_threshold = {{ slapparameter_dict['max-slowqueries-threshold'] }}
......@@ -341,13 +341,13 @@ slowest_queries_threshold = {{ slapparameter_dict['slowest-query-threshold'] }}
[{{ section('promise-check-computer-memory') }}]
<= monitor-promise-base
module = check_command_execute
promise = check_command_execute
name = check-computer-memory.py
config-command = "{{ parameter_dict["check-computer-memory-binary"] }}" -db ${monitor-instance-parameter:collector-db} --threshold "{{ slapparameter_dict["computer-memory-percent-threshold"] }}" --unit percent
[{{ section('promise') }}]
<= monitor-promise-base
module = check_command_execute
promise = check_command_execute
name = mariadb.py
config-command = "${binary-wrap-mysql:wrapper-path}" --execute ';' {% if database_list and database_list[0].get('user') %} --host="${my-cnf-parameters:ip}" --port="${my-cnf-parameters:port}" --user="{{ database_list[0]['user'] }}" --password="{{ database_list[0]['password'] }}" {% endif %}
......
......@@ -80,7 +80,7 @@ wrapper-path = ${directory:run}/munnel
[{{ section('munnel-promise') }}]
<= monitor-promise-base
module = check_socket_listening
promise = check_socket_listening
name = munnel.py
config-host = {{ ip }}
config-port = {{ milter_port }}
......@@ -262,14 +262,14 @@ wrapper-path = ${directory:run}/postfix-master
[{{ section('postfix-promise') }}]
<= monitor-promise-base
module = check_socket_listening
promise = check_socket_listening
name = postfix.py
config-host = {{ ip }}
config-port = {{ tcpv4_port }}
[{{ section('promise-check-computer-memory') }}]
<= monitor-promise-base
module = check_command_execute
promise = check_command_execute
name = check-computer-memory.py
config-command = "{{ parameter_dict["check-computer-memory-binary"] }}" -db ${monitor-instance-parameter:collector-db} --threshold "{{ slapparameter_dict["computer-memory-percent-threshold"] }}" --unit percent
......
......@@ -52,7 +52,7 @@ wrapper-path = ${directory:service-on-watch}/wcfs
[wcfs-promise]
<= monitor-promise-base
module = check_command_execute
promise = check_command_execute
name = ${:_buildout_section_name_}.py
config-command = {{ bin_directory }}/wcfs status {{ zurl }}
......
......@@ -55,7 +55,7 @@ post = test ! -s {{ "${" ~ zeo_section_name ~":pid-path}" }} || {{ bin_directory
[{{ section(zeo_section_name ~ "-promise") }}]
<= monitor-promise-base
module = check_socket_listening
promise = check_socket_listening
name = zeo-{{ family }}.py
config-host = {{ "${" ~ zeo_section_name ~ ":ip}" }}
config-port = {{ "${" ~ zeo_section_name ~ ":port}" }}
......@@ -89,7 +89,7 @@ tidstorage-wrapper = ${directory:services}/tidstoraged
[{{ section("promise-tidstorage") }}]
<= monitor-promise-base
module = check_socket_listening
promise = check_socket_listening
name = tidstorage.py
config-host = ${tidstorage:ip}
config-port = ${tidstorage:port}
......@@ -177,7 +177,7 @@ mode = 755
[{{ section('promise-check-computer-memory') }}]
<= monitor-promise-base
module = check_command_execute
promise = check_command_execute
name = check-computer-memory.py
config-command = "{{ parameter_dict["check-computer-memory-binary"] }}" -db ${monitor-instance-parameter:collector-db} --threshold "{{ slapparameter_dict["computer-memory-percent-threshold"] }}" --unit percent
......
......@@ -358,7 +358,7 @@ hash-existing-files =
[{{ section("promise-" ~ name) }}]
<= monitor-promise-base
module = check_socket_listening
promise = check_socket_listening
name = {{ name }}.py
config-host = {{ ipv4 }}
config-port = {{ port }}
......@@ -373,7 +373,7 @@ ipv4-port = {{ port }}
[{{ section("promise-tunnel-" ~ name) }}]
<= monitor-promise-base
module = check_socket_listening
promise = check_socket_listening
name = {{ zope_tunnel_base_name }}.py
config-host = {{ '${' ~ zope_tunnel_section_name ~ ':ipv6}' }}
config-port = {{ '${' ~ zope_tunnel_section_name ~ ':ipv6-port}' }}
......@@ -384,7 +384,7 @@ config-port = {{ '${' ~ zope_tunnel_section_name ~ ':ipv6-port}' }}
{% if longrequest_logger_interval > 0 -%}
[{{ section('promise-check-' ~name ~ '-longrequest-error-log') }}]
<= monitor-promise-base
module = check_error_on_zope_longrequest_log
promise = check_error_on_zope_longrequest_log
name = {{'check-' ~ name ~ '-longrequest-error-log.py'}}
config-log-file = {{ '${' ~ conf_parameter_name ~ ':longrequest-logger-file}' }}
config-error-threshold = {{ slapparameter_dict["zope-longrequest-logger-error-threshold"] }}
......@@ -528,7 +528,7 @@ expected-value =
[{{ section("promise-test-runner-apache-url") }}]
<= monitor-promise-base
module = check_command_execute
promise = check_command_execute
name = ${:_buildout_section_name_}.py
config-command = ${promise-test-runner-apache-url-executable:path}
......@@ -537,7 +537,7 @@ config-command = ${promise-test-runner-apache-url-executable:path}
[{{ section('promise-check-computer-memory') }}]
<= monitor-promise-base
module = check_command_execute
promise = check_command_execute
name = check-computer-memory.py
config-command = "{{ parameter_dict["check-computer-memory-binary"] }}" -db ${monitor-instance-parameter:collector-db} --threshold "{{ slapparameter_dict["computer-memory-percent-threshold"] }}" --unit percent
......
......@@ -23,7 +23,6 @@ extends =
../../component/mariadb/buildout.cfg
../../component/perl/buildout.cfg
../../component/sqlite3/buildout.cfg
../../component/stunnel/buildout.cfg
../../component/xz-utils/buildout.cfg
../../component/zlib/buildout.cfg
../erp5/buildout.cfg
......@@ -83,7 +82,6 @@ context =
key logrotate_cfg template-logrotate-base:rendered
key gzip_location gzip:location
key xz_utils_location xz-utils:location
key stunnel_location stunnel:location
key template_monitor monitor2-template:rendered
key mariadb_link_binary template-mariadb:link-binary
key mariadb_location mariadb:location
......
......@@ -18,11 +18,11 @@ md5sum = e4e070f93adaf917f9427ae9f35573d9
[instance-apache-php]
filename = instance-apache-php.cfg.in
md5sum = 4afee4377fa9cbc1e4ff80647b2f279c
md5sum = e7a14c01e6314e2bffebd7d80cf1c488
[instance-lamp]
filename = instance-lamp.cfg.jinja2.in
md5sum = 79f562260895df2665a85df5cb442193
md5sum = e0e2e88b6deeb011b998b78e4e468555
[template-apache.conf]
filename = apache.conf.in
......
......@@ -207,7 +207,7 @@ backend-url = ${apache-php-configuration:url}
[promise]
# Check any apache port in ipv4, expect other ports and ipv6 to behave consistently
<= monitor-promise-base
module = check_socket_listening
promise = check_socket_listening
name = apache-httpd-port-listening.py
config-host = ${apache-php-configuration:ip}
config-port = ${apache-php-configuration:port}
......
......@@ -77,7 +77,7 @@ return = domain secure_access
[lamp-frontend-promise]
<= monitor-promise-base
module = check_url_available
promise = check_url_available
name = lamp-http-frontend.py
url = ${request-frontend:connection-secure_access}
config-url = ${:url}
......
......@@ -14,7 +14,7 @@
# not need these here).
[monitor2-template]
filename = instance-monitor.cfg.jinja2.in
md5sum = d4185c191e8b9df20e1f98cd8c556b1d
md5sum = 3cba541a8b0b22c2648848ed1d259174
[monitor-httpd-conf]
_update_hash_filename_ = templates/monitor-httpd.conf.in
......
......@@ -302,14 +302,12 @@ pre = {{ monitor_statistic }} --history_folder ${monitor-directory:public}
recipe = slapos.cookbook:promise.plugin
eggs =
slapos.toolbox
content =
from slapos.promise.plugin.${:module} import RunPromise
mode = 600
module = slapos.promise.plugin.${:promise}
output = ${directory:plugins}/${:name}
[monitor-httpd-promise]
<= monitor-promise-base
module = check_url_available
promise = check_url_available
name = monitor-httpd-listening-on-tcp.py
config-url = ${monitor-httpd-conf-parameter:url}
config-http-code = 401
......@@ -360,7 +358,7 @@ return = domain secure_access
# credentials.
[check-monitor-password-promise]
<= monitor-promise-base
module = check_url_available
promise = check_url_available
name = check-monitor-frontend-password.py
url = ${monitor-frontend:connection-secure_access}
config-url = ${:url}
......@@ -371,7 +369,7 @@ config-password = ${monitor-instance-parameter:password}
# supplied.
[monitor-frontend-promise]
<= monitor-promise-base
module = check_url_available
promise = check_url_available
name = monitor-http-frontend.py
url = ${monitor-frontend:connection-secure_access}
config-url = ${:url}
......@@ -379,7 +377,7 @@ config-http-code = 401
[monitor-bootstrap-promise]
<= monitor-promise-base
module = monitor_bootstrap_status
promise = monitor_bootstrap_status
name = monitor-bootstrap-status.py
config-process-pid-file = ${monitor-conf-parameters:pid-file}
config-process-name = ${start-monitor:name}
......@@ -387,13 +385,13 @@ config-status-file = ${monitor-conf-parameters:promise-output-file}
[promise-check-slapgrid]
<= monitor-promise-base
module = check_partition_deployment_state
promise = check_partition_deployment_state
name = buildout-${slap-connection:partition-id}-status.py
config-monitor-url = ${monitor-instance-parameter:monitor-base-url}
[promise-check-free-disk-space]
<= monitor-promise-base
module = check_free_disk_space
promise = check_free_disk_space
name = check-free-disk-space.py
config-collectordb = ${monitor-instance-parameter:collector-db}
config-threshold-file = ${directory:etc}/min-free-disk-size
......
......@@ -14,23 +14,23 @@
# not need these here).
[pbsready]
filename = pbsready.cfg.in
md5sum = 005125621d157b3ae04c428ea6060e37
md5sum = 1d3aba1ba770ad3fcc2ab6c0b9266409
[pbsready-import]
filename = pbsready-import.cfg.in
md5sum = dd8f0728e53b49822eed5d613839558f
md5sum = a8c9821951425bedbdea30a870fb5138
[pbsready-export]
filename = pbsready-export.cfg.in
md5sum = 2b0c71b085cfe8017f28098c160b1f49
md5sum = 8f15263c4a27ec315eb3a12dbf7a7b34
[template-pull-backup]
filename = instance-pull-backup.cfg.in
md5sum = b240dc76a663190304d8bcb9cabcda8f
md5sum = 4425db50d551fb8a974e547308990bac
[template-replicated]
filename = template-replicated.cfg.in
md5sum = 41aee09e9f9abbae59b0442e1e76387f
md5sum = c4012ccc2c473ae5c7cad9dcac61e0f1
[template-parts]
filename = template-parts.cfg.in
......
......@@ -217,7 +217,7 @@ wrapper-path = $${rootdirectory:bin}/stalled-pull-push
[pull-push-stalled-promise]
<= monitor-promise-base
module = check_command_execute
promise = check_command_execute
name = stalled-pull-push.py
config-command = $${pull-push-stalled-promise-bin:wrapper-path}
......@@ -233,7 +233,7 @@ context =
[notifier-feed-status-promise]
<= monitor-promise-base
module = check_command_execute
promise = check_command_execute
name = notifier-feed-check-malformed-or-failure.py
config-command = $${notifier-feed-status-promise-bin:rendered}
......
......@@ -60,7 +60,7 @@ rendered = ${rootdirectory:bin}/exporter-status
[notifier-exporter-promise]
<= monitor-promise-base
module = check_command_execute
promise = check_command_execute
name = exporter-status.py
config-command = ${notifier-exporter-promise-bin:rendered}
......
......@@ -81,7 +81,7 @@ mode = 700
[backup-checksum-integrity-promise]
<= monitor-promise-base
module = check_command_execute
promise = check_command_execute
name = backup-checksum-integrity.py
config-command = $${backup-checksum-integrity-promise-bin:rendered}
......@@ -159,7 +159,7 @@ wrapper-path = $${basedirectory:services}/resilient-web-takeover-httpd
[resilient-web-takeover-httpd-promise]
<= monitor-promise-base
module = check_url_available
promise = check_url_available
name = resilient-web-takeover-httpd.py
config-url = http://[$${resilient-web-takeover-httpd-configuration-file:listening-ip}]:$${resilient-web-takeover-httpd-configuration-file:listening-port}/
......
......@@ -165,7 +165,7 @@ wrapper-path = $${rootdirectory:bin}/stalled-notifier-callbacks
[notifier-stalled-promise]
<= monitor-promise-base
module = check_command_execute
promise = check_command_execute
name = stalled-notifier-callbacks.py
config-command = $${notifier-stalled-promise-bin:wrapper-path}
......@@ -217,7 +217,7 @@ wrapper-path = $${basedirectory:scripts}/sshd-graceful
[sshd-promise]
<= monitor-promise-base
module = check_socket_listening
promise = check_socket_listening
name = sshd.py
config-host = $${slap-network-information:global-ipv6}
config-port = $${sshd-port:port}
......@@ -261,7 +261,7 @@ mode = 700
[resilient-sshkeys-sshd-promise]
<= monitor-promise-base
module = check_command_execute
promise = check_command_execute
name = public-key-existence.py
config-command = $${resilient-sshkeys-sshd-promise-bin:output}
......@@ -281,7 +281,7 @@ context =
[notifier-feed-status-promise]
<= monitor-promise-base
module = check_command_execute
promise = check_command_execute
name = notifier-feed-check-malformed-or-failure.py
config-command = $${notifier-feed-status-promise-bin:rendered}
#----------------
......
......@@ -148,7 +148,7 @@ mode = 700
[resilient-request-{{namebase}}-public-key-promise]
<= monitor-promise-base
module = check_command_execute
promise = check_command_execute
name = resilient-request-{{namebase}}-public-key.py
config-command = ${resilient-request-{{namebase}}-public-key-promise-bin:output}
......@@ -170,7 +170,7 @@ mode = 700
[resilient-request-{{namebase}}-pseudo-replicating-{{id}}-public-key-promise]
<= monitor-promise-base
module = check_command_execute
promise = check_command_execute
name = resilient-request-{{namebase}}-pseudo-replicating-{{id}}-public-key.py
config-command = ${resilient-request-{{namebase}}-pseudo-replicating-{{id}}-public-key-promise-bin:output}
......@@ -238,7 +238,7 @@ mode = 700
[resilient-request-pbs-{{namebase}}-{{id}}-public-key-promise]
<= monitor-promise-base
module = check_command_execute
promise = check_command_execute
name = resilient-request-pbs-{{namebase}}-{{id}}-public-key.py
config-command = ${resilient-request-pbs-{{namebase}}-{{id}}-public-key-promise-bin:output}
......
......@@ -188,9 +188,9 @@ requests = 2.24.0
scandir = 1.10.0
setproctitle = 1.1.10
setuptools-dso = 1.7
rubygemsrecipe = 0.4.2
rubygemsrecipe = 0.4.3
six = 1.12.0
slapos.cookbook = 1.0.212
slapos.cookbook = 1.0.214
slapos.core = 1.6.19
slapos.extension.strip = 0.4
slapos.extension.shared = 1.0
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment