Commit 21f7177e authored by Kirill Smelkov's avatar Kirill Smelkov

Merge branch 'master+ZODB4-wc2' into y/wc2-next

* master+ZODB4-wc2: (72 commits)
  Release slapos.cookbook (1.0.217)
  software/theia: use a different storage-path for password
  software/slapos-master: expose random_activity_priority argument of testrunner
  slapos.recipe.random: allow to use a character twice in a password
  slapos.recipe.random: use more characters to generate password
  slapos.recipe.random: increase default password size to 16
  ...
  stack/erp5: expose random_activity_priority argument of testrunner
  version up: slapos.recipe.build 0.50, sslapos.recipe.cmmi 0.18
  couchdb, erlang, membase, spidermonkey: move to unstable
  mariadb: build RocksDB without fallocate
  version up: groonga 11.0.9, mroonga 11.09, groonga-normalizer-mysql 1.1.5
  version up: MariaDB 10.3.32/10.4.22
  mariadb: small cleanup
  erp5: Add oic library for OpenIdConnect
  component/tcl: Use software release provided zlib
  software/theia: Fix password leak in authentication promise
  software/theia: version up 1.20.0
  component/trafficserver: fix LuaJIT integration
  software/kvm: fix TestDiskDevicePathWipeDiskOndestroy test
  ...
parents be8ddac7 c398531e
......@@ -3,17 +3,9 @@
[buildout]
parts = 6tunnel
extends =
../autoconf/buildout.cfg
../automake/buildout.cfg
[6tunnel]
recipe = slapos.recipe.cmmi
shared = true
url = https://github.com/wojtekka/6tunnel/releases/download/0.11rc2/6tunnel-0.11rc2.tar.gz
md5sum = 74e02d4f0704b3083a01feda66033449
pre-configure =
aclocal
autoconf
environment =
PATH=${autoconf:location}/bin:${automake:location}/bin:%(PATH)s
url = https://github.com/wojtekka/6tunnel/releases/download/0.13/6tunnel-0.13.tar.gz
md5sum = b13ba5ad8efc5d74b2dd71c2df85ef35
......@@ -39,9 +39,9 @@ configure-options =
[apache]
recipe = slapos.recipe.cmmi
shared = true
version = 2.4.49
version = 2.4.51
url = https://archive.apache.org/dist/httpd/httpd-${:version}.tar.bz2
md5sum = f294efbeabcf6027fccc7983a6daa55f
md5sum = d2793fc1c8cb8ba355cee877d1f2d46d
configure-options = --disable-static
--enable-authn-alias
--enable-bucketeer
......
diff -ur autoconf-2.69/bin/Makefile.in autoconf-2.69/bin/Makefile.in
--- autoconf-2.69/bin/Makefile.in 2012-04-25 04:40:26.000000000 +0200
+++ autoconf-2.69/bin/Makefile.in 2017-04-12 16:47:38.029273723 +0200
@@ -214,7 +214,7 @@
# apply to us.
MY_AUTOM4TE = \
autom4te_perllibdir='$(top_srcdir)'/lib \
- AUTOM4TE_CFG='$(AUTOM4TE_CFG)' $(top_builddir)/bin/autom4te \
+ AUTOM4TE_CFG='$(AUTOM4TE_CFG)' perl $(top_builddir)/bin/autom4te \
-B '$(top_builddir)'/lib -B '$(top_srcdir)'/lib # keep ` '
diff -ur autoconf-2.69/lib/autoconf/Makefile.in autoconf-2.69/lib/autoconf/Makefile.in
--- autoconf-2.69/lib/autoconf/Makefile.in 2012-04-25 04:40:26.000000000 +0200
+++ autoconf-2.69/lib/autoconf/Makefile.in 2017-04-12 16:47:38.033273747 +0200
@@ -230,7 +230,7 @@
# apply to us.
MY_AUTOM4TE = \
autom4te_perllibdir='$(top_srcdir)'/lib \
- AUTOM4TE_CFG='$(AUTOM4TE_CFG)' $(top_builddir)/bin/autom4te \
+ AUTOM4TE_CFG='$(AUTOM4TE_CFG)' perl $(top_builddir)/bin/autom4te \
-B '$(top_builddir)'/lib -B '$(top_srcdir)'/lib # keep ` '
diff -ur autoconf-2.69/lib/autoscan/Makefile.in autoconf-2.69/lib/autoscan/Makefile.in
--- autoconf-2.69/lib/autoscan/Makefile.in 2012-04-25 04:40:26.000000000 +0200
+++ autoconf-2.69/lib/autoscan/Makefile.in 2017-04-12 16:47:38.029273723 +0200
@@ -216,7 +216,7 @@
# apply to us.
MY_AUTOM4TE = \
autom4te_perllibdir='$(top_srcdir)'/lib \
- AUTOM4TE_CFG='$(AUTOM4TE_CFG)' $(top_builddir)/bin/autom4te \
+ AUTOM4TE_CFG='$(AUTOM4TE_CFG)' perl $(top_builddir)/bin/autom4te \
-B '$(top_builddir)'/lib -B '$(top_srcdir)'/lib # keep ` '
diff -ur autoconf-2.69/lib/autotest/Makefile.in autoconf-2.69/lib/autotest/Makefile.in
--- autoconf-2.69/lib/autotest/Makefile.in 2012-04-25 04:40:26.000000000 +0200
+++ autoconf-2.69/lib/autotest/Makefile.in 2017-04-12 16:47:38.029273723 +0200
@@ -223,7 +223,7 @@
# apply to us.
MY_AUTOM4TE = \
autom4te_perllibdir='$(top_srcdir)'/lib \
- AUTOM4TE_CFG='$(AUTOM4TE_CFG)' $(top_builddir)/bin/autom4te \
+ AUTOM4TE_CFG='$(AUTOM4TE_CFG)' perl $(top_builddir)/bin/autom4te \
-B '$(top_builddir)'/lib -B '$(top_srcdir)'/lib # keep ` '
diff -ur autoconf-2.69/lib/m4sugar/Makefile.in autoconf-2.69/lib/m4sugar/Makefile.in
--- autoconf-2.69/lib/m4sugar/Makefile.in 2012-04-25 04:40:26.000000000 +0200
+++ autoconf-2.69/lib/m4sugar/Makefile.in 2017-04-12 16:47:38.033273747 +0200
@@ -228,7 +228,7 @@
# apply to us.
MY_AUTOM4TE = \
autom4te_perllibdir='$(top_srcdir)'/lib \
- AUTOM4TE_CFG='$(AUTOM4TE_CFG)' $(top_builddir)/bin/autom4te \
+ AUTOM4TE_CFG='$(AUTOM4TE_CFG)' perl $(top_builddir)/bin/autom4te \
-B '$(top_builddir)'/lib -B '$(top_srcdir)'/lib # keep ` '
diff -ur autoconf-2.69/tests/Makefile.in autoconf-2.69/tests/Makefile.in
--- autoconf-2.69/tests/Makefile.in 2012-04-25 04:40:26.000000000 +0200
+++ autoconf-2.69/tests/Makefile.in 2017-04-12 16:47:38.025273698 +0200
@@ -201,7 +201,7 @@
# apply to us.
MY_AUTOM4TE = \
autom4te_perllibdir='$(top_srcdir)'/lib \
- AUTOM4TE_CFG='$(AUTOM4TE_CFG)' $(top_builddir)/bin/autom4te \
+ AUTOM4TE_CFG='$(AUTOM4TE_CFG)' perl $(top_builddir)/bin/autom4te \
-B '$(top_builddir)'/lib -B '$(top_srcdir)'/lib # keep ` '
diff -ur autoconf-2.71.orig/lib/freeze.mk autoconf-2.71/lib/freeze.mk
--- autoconf-2.71.orig/lib/freeze.mk 2021-01-28 21:46:48.000000000 +0100
+++ autoconf-2.71/lib/freeze.mk 2021-10-25 09:21:38.519238189 +0200
@@ -31,7 +31,7 @@
# apply to us.
MY_AUTOM4TE = \
autom4te_perllibdir='$(top_srcdir)'/lib \
- AUTOM4TE_CFG='$(AUTOM4TE_CFG)' $(top_build_prefix)bin/autom4te \
+ AUTOM4TE_CFG='$(AUTOM4TE_CFG)' perl $(top_build_prefix)bin/autom4te \
-B '$(top_build_prefix)'lib -B '$(top_srcdir)'/lib # keep ` '
# When processing the file with diversion disabled, there must be no
diff -ur autoconf-2.71.orig/Makefile.in autoconf-2.71/Makefile.in
--- autoconf-2.71.orig/Makefile.in 2021-01-28 22:06:02.000000000 +0100
+++ autoconf-2.71/Makefile.in 2021-10-25 09:22:07.231239851 +0200
@@ -577,7 +577,7 @@
# apply to us.
MY_AUTOM4TE = \
autom4te_perllibdir='$(top_srcdir)'/lib \
- AUTOM4TE_CFG='$(AUTOM4TE_CFG)' $(top_build_prefix)bin/autom4te \
+ AUTOM4TE_CFG='$(AUTOM4TE_CFG)' perl $(top_build_prefix)bin/autom4te \
-B '$(top_build_prefix)'lib -B '$(top_srcdir)'/lib # keep ` '
......@@ -10,12 +10,12 @@ parts =
[autoconf]
recipe = slapos.recipe.cmmi
shared = true
url = http://ftp.gnu.org/gnu/autoconf/autoconf-2.69.tar.gz
md5sum = 82d05e03b93e45f5a39b828dc9c6c29b
url = http://ftp.gnu.org/gnu/autoconf/autoconf-2.71.tar.gz
md5sum = f64e38d671fdec06077a41eb4d5ee476
pre-configure = cp -f ${gnu-config:location}/config.sub ${gnu-config:location}/config.guess build-aux/
patch-options = -p1
patches =
${:_profile_base_location_}/autoconf-2.69-shebang_workaround.patch#9d286e6f9c271dff361891e381be706d
${:_profile_base_location_}/autoconf-2.71-shebang_workaround.patch#9b4e417d661101f737d588eb1401747d
environment =
M4=${m4:location}/bin/m4
PATH=${patch:location}/bin:${perl:location}/bin:%(PATH)s
......@@ -10,8 +10,8 @@ parts =
[automake]
recipe = slapos.recipe.cmmi
shared = true
md5sum = 53f38e7591fa57c3d2cee682be668e5b
url = https://ftp.gnu.org/gnu/automake/automake-1.16.1.tar.xz
md5sum = 4017e96f89fca45ca946f1c5db6be714
url = https://ftp.gnu.org/gnu/automake/automake-1.16.5.tar.xz
patch-options = -p1
patches =
${:_profile_base_location_}/automake-1.16-shebang_workaround.patch#203f9199b0e629de3630b5959f8cf73e
......
......@@ -42,6 +42,24 @@ patches =
http://ftp.gnu.org/pub/gnu/bash/bash-4.3-patches/bash43-028#dd51fa67913b5dca45a702b672b3323f
http://ftp.gnu.org/pub/gnu/bash/bash-4.3-patches/bash43-029#0729364c977ef4271e9f8dfafadacf67
http://ftp.gnu.org/pub/gnu/bash/bash-4.3-patches/bash43-030#efb709fdb1368945513de23ccbfae053
http://ftp.gnu.org/pub/gnu/bash/bash-4.3-patches/bash43-031#236df1ac1130a033ed0dbe2d2115f28f
http://ftp.gnu.org/pub/gnu/bash/bash-4.3-patches/bash43-032#2360f7e79cfb28526f80021025ea5909
http://ftp.gnu.org/pub/gnu/bash/bash-4.3-patches/bash43-033#b551c4ee7b8713759e4143499d0bbd48
http://ftp.gnu.org/pub/gnu/bash/bash-4.3-patches/bash43-034#c9a56fbe0348e05a886dff97f2872b74
http://ftp.gnu.org/pub/gnu/bash/bash-4.3-patches/bash43-035#e564e8ab44ed1ca3a4e315a9f6cabdc9
http://ftp.gnu.org/pub/gnu/bash/bash-4.3-patches/bash43-036#b00ff66c41a7c0f06e191200981980b0
http://ftp.gnu.org/pub/gnu/bash/bash-4.3-patches/bash43-037#be2a7b05f6ae560313f3c9d5f7127bda
http://ftp.gnu.org/pub/gnu/bash/bash-4.3-patches/bash43-038#61e0522830b24fbe8c0d1b010f132470
http://ftp.gnu.org/pub/gnu/bash/bash-4.3-patches/bash43-039#a4775487abe958536751c8ce53cdf6f9
http://ftp.gnu.org/pub/gnu/bash/bash-4.3-patches/bash43-040#80d3587c58854e226055ef099ffeb535
http://ftp.gnu.org/pub/gnu/bash/bash-4.3-patches/bash43-041#20bf63eef7cb441c0b1cc49ef3191d03
http://ftp.gnu.org/pub/gnu/bash/bash-4.3-patches/bash43-042#70790646ae61e207c995e44931390e50
http://ftp.gnu.org/pub/gnu/bash/bash-4.3-patches/bash43-043#855a46955cb251534e80b4732b748e37
http://ftp.gnu.org/pub/gnu/bash/bash-4.3-patches/bash43-044#29623d3282fcbb37e1158136509b5bb8
http://ftp.gnu.org/pub/gnu/bash/bash-4.3-patches/bash43-045#4473244ca5abfd4b018ea26dc73e7412
http://ftp.gnu.org/pub/gnu/bash/bash-4.3-patches/bash43-046#7e5fb09991c077076b86e0e057798913
http://ftp.gnu.org/pub/gnu/bash/bash-4.3-patches/bash43-047#8483153bad1a6f52cadc3bd9a8df7835
http://ftp.gnu.org/pub/gnu/bash/bash-4.3-patches/bash43-048#e9f5dc12a32b2e0d3961344e794f92b3
configure-options =
--with-curses
environment =
......
......@@ -12,8 +12,8 @@ parts =
[bison]
recipe = slapos.recipe.cmmi
shared = true
url = http://ftp.gnu.org/gnu/bison/bison-3.3.2.tar.xz
md5sum = c9b552dee234b2f6b66e56b27e5234c9
url = https://ftp.gnu.org/gnu/bison/bison-3.8.2.tar.xz
md5sum = c28f119f405a2304ff0a7ccdcc629713
environment =
M4=${m4:location}/bin/m4
PATH=${autoconf:location}/bin:${automake:location}/bin:${patch:location}/bin:${perl:location}/bin:${xz-utils:location}/bin:%(PATH)s
......
......@@ -9,8 +9,13 @@ parts =
[cmake]
recipe = slapos.recipe.cmmi
shared = true
url = https://cmake.org/files/v3.18/cmake-3.18.4.tar.gz
md5sum = 0380beaee1c39a22455db02651abe7be
url = https://cmake.org/files/v3.21/cmake-3.21.3.tar.gz
md5sum = c0feb5855604f68b09bdb3acb623619e
environment =
CMAKE_INCLUDE_PATH=${ncurses:location}/include:${openssl:location}/include
CMAKE_LIBRARY_PATH=${ncurses:location}/lib:${openssl:location}/lib
[cmake-3.18]
<= cmake
url = https://cmake.org/files/v3.18/cmake-3.18.4.tar.gz
md5sum = 0380beaee1c39a22455db02651abe7be
......@@ -9,8 +9,8 @@ parts =
[coreutils]
recipe = slapos.recipe.cmmi
shared = true
url = https://ftp.gnu.org/gnu/coreutils/coreutils-8.31.tar.xz
md5sum = 0009a224d8e288e8ec406ef0161f9293
url = https://ftp.gnu.org/gnu/coreutils/coreutils-9.0.tar.xz
md5sum = 0d79ae8a6124546e3b94171375e5e5d0
configure-options =
--disable-libcap
--prefix=@@LOCATION@@
......
......@@ -16,8 +16,8 @@ parts =
[curl]
recipe = slapos.recipe.cmmi
shared = true
url = http://curl.haxx.se/download/curl-7.76.0.tar.xz
md5sum = 41178ceea57c863f883b6fe2c3ac276f
url = http://curl.haxx.se/download/curl-7.79.1.tar.xz
md5sum = 74d3c4ca8aaa6c0619806d6e246e65fb
configure-options =
--disable-static
--disable-ech
......@@ -45,7 +45,6 @@ configure-options =
--without-nss
--without-libpsl
--without-libgsasl
--without-libmetalink
--without-libssh2
--without-libssh
--without-librtmp
......
......@@ -7,8 +7,8 @@ parts = dash-output
[dash]
recipe = slapos.recipe.cmmi
shared = true
url = http://gondor.apana.org.au/~herbert/dash/files/dash-0.5.8.tar.gz
md5sum = 5c152209680dab3c319e8923f6c51378
url = http://gondor.apana.org.au/~herbert/dash/files/dash-0.5.11.tar.gz
md5sum = 027236e48b9202607b1418fee42c473e
configure-options =
--disable-static
--disable-fnmatch
......
......@@ -11,8 +11,8 @@ extends =
[file]
recipe = slapos.recipe.cmmi
shared = true
url = http://ftp.icm.edu.pl/packages/file/file-5.39.tar.gz
md5sum = 1c450306053622803a25647d88f80f25
url = http://ftp.icm.edu.pl/packages/file/file-5.41.tar.gz
md5sum = 18233bb0a0089dfdc7dfbc93b96f231b
configure-options =
--disable-static
--disable-libseccomp
......
......@@ -7,8 +7,8 @@ parts =
[findutils]
recipe = slapos.recipe.cmmi
shared = true
url = http://ftp.debian.org/debian/pool/main/f/findutils/findutils_4.6.0+git+20190510.orig.tar.xz
md5sum = 9ae8d2b323b0b12a484abcbff1d2c486
url = http://ftp.debian.org/debian/pool/main/f/findutils/findutils_4.8.0.orig.tar.xz
md5sum = eeefe2e6380931a77dfa6d9350b43186
[findutils-output]
# Shared binary location to ease migration
......
......@@ -5,9 +5,9 @@ parts =
[gdbm]
recipe = slapos.recipe.cmmi
shared = true
version = 1.19
version = 1.22
url = http://ftp.gnu.org/gnu/gdbm/gdbm-${:version}.tar.gz
md5sum = aeb29c6a90350a4c959cd1df38cd0a7e
md5sum = 0bbd38f12656e4728e2f7c4708aec014
configure-options =
--disable-static
--enable-libgdbm-compat
......@@ -18,8 +18,8 @@ parts =
[git]
recipe = slapos.recipe.cmmi
shared = true
url = https://mirrors.edge.kernel.org/pub/software/scm/git/git-2.33.0.tar.xz
md5sum = 0990ff97af1511be0d9f0d3223dd4359
url = https://mirrors.edge.kernel.org/pub/software/scm/git/git-2.33.1.tar.xz
md5sum = 3462f34d9c17288eee854b7645f6a0a1
configure-options =
--with-curl=${curl:location}
--with-openssl=${openssl:location}
......
......@@ -21,8 +21,8 @@ environment-extra =
[libgpg-error]
<= gpg-common
version = 1.27
md5sum = 5217ef3e76a7275a2a3b569a12ddc989
version = 1.42
md5sum = 133fed221ba8f63f5842858a1ff67cb3
configure-options-extra =
--disable-doc
--disable-tests
......@@ -37,15 +37,15 @@ environment-extra =
[libgcrypt]
<= with-gpg-error
version = 1.8.1
md5sum = b21817f9d850064d2177285f1073ec55
version = 1.9.4
md5sum = edc7becfe09c75d8f95ff7623e40c52e
configure-options-extra2 =
--disable-doc
[gnutls]
<= gpg-common
url = http://www.gnupg.org/ftp/gcrypt/gnutls/v3.5/gnutls-3.5.15.tar.xz
md5sum = bcdcbc65c50a7499617ad9f4d0058de9
url = https://www.gnupg.org/ftp/gcrypt/gnutls/v3.7/gnutls-3.7.2.tar.xz
md5sum = 95c32a1af583ecfcb280648874c0fbd9
configure-options-extra =
--disable-doc
--disable-static
......
......@@ -15,8 +15,8 @@ extends =
[groonga]
recipe = slapos.recipe.cmmi
shared = true
url = https://packages.groonga.org/source/groonga/groonga-11.0.2.tar.gz
md5sum = 753ba6fad77598baf93615c4b9c535b1
url = https://packages.groonga.org/source/groonga/groonga-11.0.9.tar.gz
md5sum = 9c66445d92c8b7536f1b28119ac1855b
groonga-plugin-dir = @@LOCATION@@/lib/groonga/plugins/
# temporary patch to respect more tokens in natural language mode.
patches =
......@@ -48,9 +48,8 @@ environment =
[groonga-normalizer-mysql]
recipe = slapos.recipe.cmmi
shared = true
url = https://packages.groonga.org/source/groonga-normalizer-mysql/groonga-normalizer-mysql-1.1.4.tar.gz
md5sum = effa67fb271d49810850a3b275d040f6
url = https://packages.groonga.org/source/groonga-normalizer-mysql/groonga-normalizer-mysql-1.1.5.tar.gz
md5sum = 842d02becc6dcc25a02fa7e789c2cba7
groonga-plugin-dir = @@LOCATION@@/lib/groonga/plugins/
pre-configure = mkdir -p ${:groonga-plugin-dir}
make-targets = GROONGA_PLUGINS_DIR=${:groonga-plugin-dir} install
......
......@@ -7,7 +7,7 @@ parts =
[gzip]
recipe = slapos.recipe.cmmi
shared = true
url = https://ftp.gnu.org/pub/gnu/gzip/gzip-1.10.tar.xz
md5sum = 691b1221694c3394f1c537df4eee39d3
url = https://ftp.gnu.org/pub/gnu/gzip/gzip-1.11.tar.xz
md5sum = d1e93996dba00cab0caa7903cd01d454
environment =
PATH=${xz-utils:location}/bin:%(PATH)s
......@@ -53,7 +53,8 @@ version = 92.0.4515.107
[headless-chromium]
recipe = slapos.recipe.cmmi
path = ${chromium-source:location}
location = ${:path}/out/headless
# XXX
fake-location = ${:path}/out/headless
# Configuration file for GN, the tool to build the actual compilation
# configuration file.
......@@ -91,17 +92,17 @@ configure-command =
> ${chromium-source:gclient-location}/.gclient
gclient sync --no-history
# Generate build configuration files.
mkdir -p ${:location}
echo '${:build-config-options}' > ${:location}/args.gn
gn gen ${:location}
mkdir -p ${:fake-location}
echo '${:build-config-options}' > ${:fake-location}/args.gn
gn gen ${:fake-location}
# You can run the headless Chromium shell using
# ${:binary} --remote-debugging-port=1234
make-binary =
autoninja -C ${:location} headless_shell
autoninja -C ${:fake-location} headless_shell
# By building our own version of Chromedriver, we can ensure version
# compatibility. The build is quite cheap compared to Chromium, anyway.
autoninja -C ${:location} chromedriver
autoninja -C ${:fake-location} chromedriver
environment =
PATH=${depot_tools:location}:${gperf:location}/bin:${pkgconfig:location}/bin:${coreutils:location}/bin:${git:location}/bin:${curl:location}/bin:%(PATH)s
LDFLAGS="-Wl,-rpath=${nss:location}/lib,-rpath=${nspr:location}/lib"
......@@ -111,10 +112,10 @@ environment =
NM="${:llvm-toolchain}/llvm-nm"
# Expose devtools frontend location.
devtools-frontend = ${:location}/gen/third_party/devtools-frontend/src/front_end
devtools-frontend = ${:fake-location}/gen/third_party/devtools-frontend/src/front_end
binary = ${:location}/headless_shell
chromedriver = ${:location}/chromedriver
binary = ${:fake-location}/headless_shell
chromedriver = ${:fake-location}/chromedriver
promises =
${:binary}
${:chromedriver}
......
......@@ -8,8 +8,8 @@ parts = libcap-ng
[libcap-ng]
recipe = slapos.recipe.cmmi
shared = true
url = https://people.redhat.com/sgrubb/libcap-ng/libcap-ng-0.7.10.tar.gz
md5sum = 57dc267e2949cdecb651a929f9206572
url = https://people.redhat.com/sgrubb/libcap-ng/libcap-ng-0.8.2.tar.gz
md5sum = faf1ef766cf068ad1aba4008ced665f7
location = @@LOCATION@@
configure-options =
--with-python=no
......
......@@ -10,8 +10,8 @@ parts =
[libfastjson]
recipe = slapos.recipe.cmmi
url = https://github.com/rsyslog/libfastjson/archive/v0.99.8.tar.gz
md5sum = 730713ad1d851def7ac8898f751bbfdd
url = https://github.com/rsyslog/libfastjson/archive/v0.99.9.tar.gz
md5sum = 3c45e6efc838cd364588d6d1822c4ea8
shared = true
pre-configure =
autoreconf -fvi -I ${libtool:location}/share/aclocal -I ${pkgconfig:location}/share/aclocal -I ${automake:location}/share/aclocal
......
......@@ -2,7 +2,6 @@
extends =
../bzip2/buildout.cfg
../cmake/buildout.cfg
../patch/buildout.cfg
../perl/buildout.cfg
../popt/buildout.cfg
../zlib/buildout.cfg
......@@ -12,12 +11,9 @@ parts =
[librsync]
recipe = slapos.recipe.cmmi
shared = true
url = https://github.com/librsync/librsync/archive/v2.0.0.tar.gz
md5sum = cbda9c3eba21bcf2d56a4080ba7a5dc4
url = https://github.com/librsync/librsync/archive/v2.3.2.tar.gz
md5sum = 74ba5b50de5ba3d595828e9109fa5fce
location = @@LOCATION@@
patch-options = -p1
patches =
${:_profile_base_location_}/librsync-2.0.0-issue50.patch#5bac5363646a2c2ec6d2c4b26ca4cd7f
configure-command = ${cmake:location}/bin/cmake
configure-options =
-DCMAKE_INSTALL_PREFIX=${:location}
......
--- librsync-2.0.0/src/search.c 2017-02-20 13:39:48.012922600 +0100
+++ librsync-2.0.0/src/search.c 2017-02-20 13:41:43.661880014 +0100
@@ -218,7 +218,7 @@
r = m;
}
- if (l == r) {
+ if ((l == r) && (l <= bucket->r)) {
int i = sig->targets[l].i;
rs_block_sig_t *b = &(sig->block_sigs[i]);
if (weak_sum != b->weak_sum)
......@@ -5,8 +5,8 @@ parts =
[libtasn1]
recipe = slapos.recipe.cmmi
shared = true
url = http://ftp.gnu.org/gnu/libtasn1/libtasn1-4.12.tar.gz
md5sum = 5c724bd1f73aaf4a311833e1cd297b21
url = https://ftp.gnu.org/gnu/libtasn1/libtasn1-4.17.0.tar.gz
md5sum = c46f6eb3bd1287031ae5d36465094402
configure-options =
--disable-static
--disable-gtk-doc-html
......@@ -8,30 +8,12 @@ extends =
[libuuid]
recipe = slapos.recipe.cmmi
shared = true
url = http://www.kernel.org/pub/linux/utils/util-linux/v2.18/util-linux-ng-2.18.tar.bz2
md5sum = 2f5f71e6af969d041d73ab778c141a77
url = http://www.kernel.org/pub/linux/utils/util-linux/v2.37/util-linux-2.37.2.tar.xz
md5sum = d659bf7cd417d93dc609872f6334b019
configure-options =
--disable-static
--disable-all-programs
--enable-libuuid
--disable-agetty
--disable-cramfs
--disable-fallocate
--disable-fsck
--disable-libblkid
--disable-libmount
--disable-makeinstall-chown
--disable-makeinstall-setuid
--disable-mount
--disable-nls
--disable-pivot_root
--disable-rename
--disable-require-password
--disable-schedutils
--disable-switch_root
--disable-tls
--disable-unshare
--disable-uuidd
--disable-wall
--without-libiconv-prefix
--without-libintl-prefix
--without-ncurses
......@@ -40,7 +22,5 @@ configure-options =
--without-selinux
--without-audit
make-options =
-C shlibs/uuid
environment =
PATH=${perl:location}/bin:%(PATH)s
......@@ -7,6 +7,6 @@ parts =
[libyaml]
recipe = slapos.recipe.cmmi
shared = true
url = http://pyyaml.org/download/libyaml/yaml-0.1.6.tar.gz
md5sum = 5fe00cda18ca5daeb43762b80c38e06e
url = http://pyyaml.org/download/libyaml/yaml-0.2.5.tar.gz
md5sum = bb15429d8fb787e7d3f1c83ae129a999
pre-configure = cp -f ${gnu-config:location}/config.sub ${gnu-config:location}/config.guess config/
......@@ -8,8 +8,8 @@ parts = logrotate
[logrotate]
recipe = slapos.recipe.cmmi
shared = true
url = https://github.com/logrotate/logrotate/releases/download/3.17.0/logrotate-3.17.0.tar.xz
md5sum = ac2a7151fc8a187201872358a20a2813
url = https://github.com/logrotate/logrotate/releases/download/3.18.1/logrotate-3.18.1.tar.xz
md5sum = 07d5aba26c350f9ab5730c25a7277751
# BBB this is only for backward-compatibility.
post-install =
ln -nsf . @@LOCATION@@/usr
......
# LuaJIT is a Just-In-Time Compiler (JIT) for the Lua programming language.
# https://luajit.org/luajit.html
[buildout]
parts = luajit
[luajit]
recipe = slapos.recipe.cmmi
shared = true
url = https://luajit.org/download/LuaJIT-2.0.5.tar.gz
md5sum = 48353202cbcacab84ee41a5a70ea0a2c
configure-command = true
# pass dummy LDCONFIG to skip needless calling of ldconfig by non-root user
make-options =
DPREFIX=@@LOCATION@@
LDCONFIG=/bin/echo
......@@ -30,16 +30,15 @@ parts =
recipe = slapos.recipe.cmmi
shared = true
url = https://archive.mariadb.org//mariadb-${:version}/source/mariadb-${:version}.tar.gz
version = 10.4.19
md5sum = bf60c7a3feac5854745cd1ad5133f09a
location = @@LOCATION@@
version = 10.4.22
md5sum = 0d5e1b9e3694322e18819811a2bf81fa
pre-configure =
set '\bSET(PLUGIN_AUTH_PAM YES CACHE BOOL "")' cmake/build_configurations/mysql_release.cmake
grep -q "$@"
sed -i "/$1/d" "$2"
configure-command = ${cmake:location}/bin/cmake
configure-options =
-DCMAKE_INSTALL_PREFIX=${:location}
-DCMAKE_INSTALL_PREFIX=@@LOCATION@@
-DBUILD_CONFIG=mysql_release
-DDEFAULT_CHARSET=utf8
-DDEFAULT_COLLATION=utf8_unicode_ci
......@@ -68,6 +67,10 @@ configure-options =
-DCMAKE_INSTALL_RPATH=${:CMAKE_LIBRARY_PATH}
-DCMAKE_INCLUDE_PATH=${unixodbc:location}/include
-DCMAKE_LIBRARY_PATH=${unixodbc:location}/lib
# for RocksDB - see
# https://lore.kernel.org/linux-btrfs/ed3642c2-682e-08a1-f18d-2d63409b7631@nexedi.com/T/
-DWITH_FALLOCATE=NO
##
CMAKE_CFLAGS = -I${bzip2:location}/include -I${jemalloc:location}/include -I${libaio:location}/include -I${libxml2:location}/include -I${ncurses:location}/include -I${openssl:location}/include -I${pcre:location}/include -I${readline5:location}/include -I${xz-utils:location}/include -I${zlib:location}/include -I${unixodbc:location}/include -I${lz4:location}/include -I${snappy:location}/include -I${zstd:location}/include
CMAKE_LIBRARY_PATH = ${bzip2:location}/lib:${jemalloc:location}/lib:${libaio:location}/lib:${libxml2:location}/lib:${ncurses:location}/lib:${openssl:location}/lib:${pcre:location}/lib:${readline5:location}/lib:${xz-utils:location}/lib:${zlib:location}/lib:${unixodbc:location}/lib:${lz4:location}/lib:${snappy:location}/lib:${zstd:location}/lib
environment =
......@@ -80,13 +83,13 @@ patch-options = -p1
patches =
https://sources.debian.org/data/main/m/mariadb-10.3/1:10.3.22-0+deb10u1/debian/patches/0024-Revert-to-using-system-pcre-library.patch#1c6a0f2634f5a56122299674b77b1131
post-install =
ldd=`ldd ${:location}/lib/plugin/ha_rocksdb.so`
ldd=`ldd %(location)s/lib/plugin/ha_rocksdb.so`
for x in ${lz4:location} ${snappy:location} ${zstd:location}
do echo "$ldd" |grep -qF " $x/lib/"
done
set -- wsrep-lib/wsrep-API/*/wsrep_api.h
install -DpT $1 ${:location}/$1
cp -a wsrep-lib/include ${:location}/wsrep-lib
install -DpT $1 %(location)s/$1
cp -a wsrep-lib/include %(location)s/wsrep-lib
[mroonga-mariadb]
# mroonga - a storage engine for MySQL. It provides fast fulltext search feature to all MySQL users.
......@@ -96,8 +99,8 @@ post-install =
# as plugin-dir ( https://mariadb.com/kb/en/server-system-variables/#plugin_dir )
recipe = slapos.recipe.cmmi
shared = true
url = https://packages.groonga.org/source/mroonga/mroonga-11.02.tar.gz
md5sum = 0729c74efc92bfc404b88597488d07e9
url = https://packages.groonga.org/source/mroonga/mroonga-11.09.tar.gz
md5sum = 8b1786332edc61c41a769f225e6063b2
pre-configure =
rm -rf fake_mariadb_source
mkdir -p fake_mariadb_source
......@@ -131,15 +134,15 @@ environment =
### (we just override here for easier revert)
[mariadb-10.3]
<= mariadb-10.4
version = 10.3.29
md5sum = a5adad1c4fb1717d7fe6d608fd4d40de
version = 10.3.32
md5sum = 12341dc150c810c0072a40e55825ca57
post-install =
ldd=`ldd ${:location}/lib/plugin/ha_rocksdb.so`
ldd=`ldd %(location)s/lib/plugin/ha_rocksdb.so`
for x in ${lz4:location} ${snappy:location} ${zstd:location}
do echo "$ldd" |grep -qF " $x/lib/"
done
mkdir -p ${:location}/include/wsrep &&
cp -p wsrep/wsrep_api.h ${:location}/include/wsrep
mkdir -p %(location)s/include/wsrep &&
cp -p wsrep/wsrep_api.h %(location)s/include/wsrep
[mariadb]
location = ${mariadb-10.3:location}
......
......@@ -7,8 +7,8 @@ extends =
[nettle]
recipe = slapos.recipe.cmmi
shared = true
url = http://ftp.gnu.org/gnu/nettle/nettle-3.3.tar.gz
md5sum = 10f969f78a463704ae73529978148dbe
url = https://ftp.gnu.org/gnu/nettle/nettle-3.7.3.tar.gz
md5sum = a60273d0fab9c808646fcf5e9edc2e8f
patches =
${:_profile_base_location_}/nettle-lib-location.patch#3c5f5b285ffd5bc30436ee0f4c662084
configure-option =
......
......@@ -11,8 +11,8 @@ parts = nginx-output
[nginx-common]
recipe = slapos.recipe.cmmi
shared = true
url = https://nginx.org/download/nginx-1.19.2.tar.gz
md5sum = 3dc55f6451ed6f819f1c796f4e5e9617
url = https://nginx.org/download/nginx-1.20.1.tar.gz
md5sum = 8ca6edd5076bdfad30a69c9c9b41cc68
[nginx]
<= nginx-common
......
......@@ -5,6 +5,6 @@ parts =
[noVNC]
recipe = slapos.recipe.build:download-unpacked
shared = true
url = https://github.com/novnc/noVNC/archive/refs/tags/v1.2.0.tar.gz
md5sum = 290dfabc4ecdd58d62ccb8c34a922962
url = https://github.com/novnc/noVNC/archive/refs/tags/v1.3.0.tar.gz
md5sum = 22847b4f6e9caa916aa5eceb046f27aa
strip-top-level-dir = true
......@@ -17,8 +17,8 @@ parts =
[openssl]
recipe = slapos.recipe.cmmi
shared = true
url = https://www.openssl.org/source/openssl-1.1.1k.tar.gz
md5sum = c4e7d95f782b08116afa27b30393dd27
url = https://www.openssl.org/source/openssl-1.1.1l.tar.gz
md5sum = ac0d4387f3ba0ad741b0580dd45f6ff3
location = @@LOCATION@@
# 'prefix' option to override --openssldir/--prefix (which is useful
# when combined with DESTDIR). Used by slapos.package.git/obs
......
......@@ -10,9 +10,9 @@ extends =
[p11-kit]
recipe = slapos.recipe.cmmi
shared = true
url = https://github.com/p11-glue/p11-kit/releases/download/${:version}/p11-kit-${:version}.tar.gz
version = 0.23.7
md5sum = ebbefd123210594231adb4bde21b8560
url = https://github.com/p11-glue/p11-kit/releases/download/${:version}/p11-kit-${:version}.tar.xz
version = 0.24.0
md5sum = 8ccf11c4a2e2e505b8e516d8549e64a5
configure-options =
--disable-static
--disable-doc-html
......
......@@ -7,8 +7,8 @@ parts =
[pcre]
recipe = slapos.recipe.cmmi
shared = true
url = https://ftp.pcre.org/pub/pcre/pcre-8.43.tar.bz2
md5sum = 636222e79e392c3d95dcc545f24f98c4
url = https://ftp.pcre.org/pub/pcre/pcre-8.45.tar.bz2
md5sum = 4452288e6a0eefb2ab11d36010a1eebb
configure-options =
--disable-static
--enable-unicode-properties
......
......@@ -5,8 +5,8 @@ parts =
[popt]
recipe = slapos.recipe.cmmi
shared = true
url = ftp://anduin.linuxfromscratch.org/BLFS/svn/p/popt-1.16.tar.gz
md5sum = 3743beefa3dd6247a73f8f7a32c14c33
url = http://ftp.rpm.org/popt/releases/popt-1.x/popt-1.18.tar.gz
md5sum = 450f2f636e6a3aa527de803d0ae76c5a
configure-options =
--disable-static
......
......@@ -45,7 +45,7 @@ configure-command = true
environment =
GIT_VERSION=${:version}
PKG_CONFIG_PATH=${openssl:location}/lib/pkgconfig:${gnutls:location}/lib/pkgconfig:${libgcrypt:location}/lib/pkgconfig:${zlib:location}/lib/pkgconfig:${pcre:location}/lib/pkgconfig
PATH=${m4:location}/bin:${libtool:location}/bin:${libgcrypt:location}/bin:${curl:location}/bin:${perl:location}/bin:${pkgconfig:location}/bin:${bzip2:location}/bin:${autoconf:location}/bin:${git:location}/bin:${automake:location}/bin:${patch:location}/bin:${cmake:location}/bin:%(PATH)s
PATH=${m4:location}/bin:${libtool:location}/bin:${libgcrypt:location}/bin:${curl:location}/bin:${perl:location}/bin:${pkgconfig:location}/bin:${bzip2:location}/bin:${autoconf:location}/bin:${git:location}/bin:${automake:location}/bin:${patch:location}/bin:${cmake-3.18:location}/bin:%(PATH)s
CXXFLAGS=-I${openssl:location}/include -I${gnutls:location}/include -I${zlib:location}/include
CFLAGS=-I${gnutls:location}/include
LDFLAGS=-L${openssl:location}/lib -Wl,-rpath -Wl,${gnutls:location}/lib -L${gnutls:location}/lib -Wl,-rpath=${curl:location}/lib -L${libtool:location}/lib -L${zlib:location}/lib -Wl,-rpath -Wl,${zlib:location}/lib -L${curl:location}/lib -L${pcre:location}/lib -L${jemalloc:location}/lib -L${libmicrohttpd:location}/lib
......
......@@ -10,8 +10,8 @@ extends =
[rsyslogd]
recipe = slapos.recipe.cmmi
url = https://www.rsyslog.com/files/download/rsyslog/rsyslog-8.2006.0.tar.gz
md5sum = 33de768941953ceeca9d1a437b47891b
url = https://www.rsyslog.com/files/download/rsyslog/rsyslog-8.2110.0.tar.gz
md5sum = 2d2b9d4a70a6e2fd4a7e806a5782c56b
shared = true
configure-options =
--disable-klog
......
......@@ -8,8 +8,8 @@ parts =
[sqlite3]
recipe = slapos.recipe.cmmi
shared = true
url = https://sqlite.org/2021/sqlite-autoconf-3350500.tar.gz
md5sum = d1d1aba394c8e0443077dc9f1a681bb8
url = https://sqlite.org/2021/sqlite-autoconf-3360000.tar.gz
md5sum = f5752052fc5b8e1b539af86a3671eac7
configure-options =
--disable-static
--enable-readline
......
......@@ -10,8 +10,8 @@ parts =
[swig]
recipe = slapos.recipe.cmmi
shared = true
url = http://prdownloads.sourceforge.net/swig/swig-3.0.10.tar.gz
md5sum = bb4ab8047159469add7d00910e203124
url = https://sourceforge.net/projects/swig/files/swig/swig-3.0.12/swig-3.0.12.tar.gz/download
md5sum = 82133dfa7bba75ff9ad98a7046be687c
configure-options =
--disable-ccache
--with-python=${buildout:executable}
......
[buildout]
extends =
../zlib/buildout.cfg
parts = tcl
[tcl]
recipe = slapos.recipe.cmmi
url = http://prdownloads.sourceforge.net/tcl/tcl8.5.15-src.tar.gz
md5sum = f3df162f92c69b254079c4d0af7a690f
url = http://prdownloads.sourceforge.net/tcl/tcl8.6.11-src.tar.gz
md5sum = 8a4c004f48984a03a7747e9ba06e4da4
shared = true
environment =
CPPFLAGS=-I${zlib:location}/include
LDFLAGS=-L${zlib:location}/lib -Wl,-rpath=${zlib:location}/lib
configure-command = ./unix/configure
configure-options =
--prefix=@@LOCATION@@
......
......@@ -3,6 +3,7 @@ extends =
../defaults.cfg
../libtool/buildout.cfg
../libxml2/buildout.cfg
../luajit/buildout.cfg
../make/buildout.cfg
../ncurses/buildout.cfg
../openssl/buildout.cfg
......@@ -23,8 +24,8 @@ min_version = 8
[trafficserver]
recipe = slapos.recipe.cmmi
url = http://apache.claz.org/trafficserver/trafficserver-9.0.2.tar.bz2
md5sum = 4df67ada24665116bafedd71503215cb
url = http://apache.claz.org/trafficserver/trafficserver-9.1.0.tar.bz2
md5sum = 994b0aa879cbd95054048f34bf8ed954
shared = true
patch-options = -p1
configure-options =
......@@ -32,6 +33,7 @@ configure-options =
--with-pcre=${pcre:location}
--with-ncurses=${ncurses:location}
--with-tcl=${tcl:location}/lib/
--with-luajit=${luajit:location}
--with-lzma=${xz-utils:location}
--with-zlib=${zlib:location}
--disable-curl
......@@ -40,7 +42,7 @@ configure-options =
--disable-posix-cap
environment =
PATH=${libtool:location}/bin:${make:location}/bin:${patch:location}/bin:${perl:location}/bin:${pkgconfig:location}/bin:%(PATH)s
LDFLAGS =-L${openssl:location}/lib -Wl,-rpath=${openssl:location}/lib -L${tcl:location}/lib -Wl,-rpath=${tcl:location}/lib -L${zlib:location}/lib -Wl,-rpath=${zlib:location}/lib
LDFLAGS =-L${openssl:location}/lib -Wl,-rpath=${openssl:location}/lib -L${tcl:location}/lib -Wl,-rpath=${tcl:location}/lib -L${zlib:location}/lib -Wl,-rpath=${zlib:location}/lib -Wl,-rpath=${luajit:location}/lib -lm
make-target =
check
......
......@@ -319,8 +319,8 @@ environment =
[pixman]
recipe = slapos.recipe.cmmi
shared = true
url = http://cairographics.org/releases/pixman-0.34.0.tar.gz
md5sum = e80ebae4da01e77f68744319f01d52a3
url = https://www.cairographics.org/releases/pixman-0.40.0.tar.gz
md5sum = 73858c0862dd9896fb5f62ae267084a4
configure-options =
--disable-static
......
......@@ -12,8 +12,8 @@ extends =
[zbar]
recipe = slapos.recipe.cmmi
shared = true
url = https://github.com/mchehab/zbar/archive/0.23.1.tar.gz
md5sum = 04f1ffafd0f12473d82763931d9c7c68
url = https://github.com/mchehab/zbar/archive/0.23.90.tar.gz
md5sum = cb1667e20c1d7acf1b9911414adaeb84
pre-configure =
autoreconf -vfi -I ${libtool:location}/share/aclocal -I ${pkgconfig:location}/share/aclocal
configure-options =
......
......@@ -28,7 +28,7 @@ from setuptools import setup, find_packages
import glob
import os
version = '1.0.214'
version = '1.0.217'
name = 'slapos.cookbook'
long_description = open("README.rst").read()
......
......@@ -107,7 +107,9 @@ class Mac(object):
pass
def generatePassword(length):
return ''.join(random.SystemRandom().sample(string.ascii_lowercase, length))
system_random = random.SystemRandom()
alphabet = string.ascii_letters + string.digits
return ''.join(system_random.choice(alphabet) for i in range(length))
class Password(object):
......@@ -119,7 +121,7 @@ class Password(object):
recipes like slapos.recipe.template:jinja2 to safely process the password.
Options:
- bytes: password length (default: 8 characters)
- bytes: password length (default: 16 characters)
- storage-path: plain-text persistent storage for password,
that can only be accessed by the user
(default: ${buildout:parts-directory}/${:_buildout_section_name_})
......@@ -149,7 +151,7 @@ class Password(object):
if e.errno != errno.ENOENT:
raise
if not passwd:
passwd = self.generatePassword(int(options.get('bytes', '8')))
passwd = self.generatePassword(int(options.get('bytes', '16')))
self.update = self.install
options['passwd'] = passwd
# Password must not go into .installed file, for 2 reasons:
......
......@@ -14,7 +14,7 @@
# not need these here).
[template]
filename = instance.cfg.in
md5sum = 1dfbd20c77fb3c1f01005a8a920d2ed9
md5sum = fb3a20e7f555a9ce7fe1ec547d0fcdfc
[profile-common]
filename = instance-common.cfg.in
......@@ -26,11 +26,11 @@ md5sum = 0950e09ad1f03f0789308f5f7a7eb1b8
[profile-caddy-replicate]
filename = instance-apache-replicate.cfg.in
md5sum = 7c2e52b76c42bed95702763c344e41dd
md5sum = c5d1e235959a877b4f3157369c6f5e10
[profile-slave-list]
_update_hash_filename_ = templates/apache-custom-slave-list.cfg.in
md5sum = 313671d343ceccfca5af1baa642132c5
md5sum = c67e172c0c6eca955b18962404056a33
[profile-replicate-publish-slave-information]
_update_hash_filename_ = templates/replicate-publish-slave-information.cfg.in
......@@ -50,7 +50,7 @@ md5sum = 37475d79f28c5f126bc1947fdb938fdb
[template-backend-haproxy-configuration]
_update_hash_filename_ = templates/backend-haproxy.cfg.in
md5sum = d2851c7ebd2c9baa2edecb3ca3485511
md5sum = ae4c9ce775ea003aa51eda5ecbbeec73
[template-empty]
_update_hash_filename_ = templates/empty.in
......@@ -98,7 +98,7 @@ md5sum = 8e1c6c06c09beb921965b3ce98c67c9e
[caddyprofiledeps-dummy]
filename = caddyprofiledummy.py
md5sum = 38792c2dceae38ab411592ec36fff6a8
md5sum = 59cb33f11272ee09eccea74981d2304a
[profile-kedifa]
filename = instance-kedifa.cfg.in
......
import urlparse
class Recipe(object):
def __init__(self, *args, **kwargs):
pass
......@@ -7,3 +9,13 @@ class Recipe(object):
def update(self):
return self.install()
def validate_netloc(netloc):
# a bit crazy way to validate that the passed parameter is haproxy
# compatible server netloc
parsed = urlparse.urlparse('scheme://'+netloc)
if ':' in parsed.hostname:
hostname = '[%s]' % parsed.hostname
else:
hostname = parsed.hostname
return netloc == '%s:%s' % (hostname, parsed.port)
......@@ -207,6 +207,15 @@ context =
{% endif %}
{% endif %}
{% endfor %}
{% for url_key in ['url-netloc-list', 'https-url-netloc-list', 'health-check-failover-url-netloc-list'] %}
{% if url_key in slave %}
{% for netloc in slave[url_key].split() %}
{% if not caddyprofiledummy.validate_netloc(netloc) %}
{% do slave_error_list.append('slave %s %r invalid' % (url_key, netloc)) %}
{% endif %}
{% endfor %}
{% endif %}
{% endfor %}
{% for k in ['ssl_proxy_ca_crt', 'health-check-failover-ssl-proxy-ca-crt'] %}
{% if k in slave %}
{% set crt = slave.get(k, '') %}
......
......@@ -348,6 +348,26 @@
],
"type": "string",
"default": "false"
},
"url-netloc-list": {
"type": "string",
"title": "[EXPERT] List of netlocs for \"Backend URL\"",
"description": "Space separated list of netlocs (ip and port) of backend to connect to. They will share the scheme and path of the original URL and additional backend parameters (like \"SSL Backend Authority's Certificate\"). Each of them will be used, and at least one is enough for the connectivity to work, and the best results are with \"Health Check\" feature enabled. Port is mandatory, so hostnames shall be provided as hostname:port (eg. example.com:80), IPv4 - as ipv4:port (eg. 127.0.0.1:80), IPv6 - as ipv6:port (eg. ::1:80). Simply this parameters only overrides netloc (network location) of the original URL."
},
"https-url-netloc-list": {
"type": "string",
"title": "[EXPERT] List of netlocs for \"HTTPS Backend URL\"",
"description": "See \"[EXPERT] List of netlocs for \"Backend URL\"\" description."
},
"health-check-failover-url-netloc-list": {
"type": "string",
"title": "[EXPERT] List of netlocs for \"Failover backend URL\"",
"description": "See \"[EXPERT] List of netlocs for \"Backend URL\"\" description."
},
"health-check-failover-https-url-netloc-list": {
"type": "string",
"title": "[EXPERT] List of netlocs for \"Failover HTTPS Backend URL\"",
"description": "See \"[EXPERT] List of netlocs for \"Backend URL\"\" description."
}
},
"title": "Input Parameters",
......
......@@ -55,6 +55,7 @@ extra-context =
import subprocess_module subprocess
import functools_module functools
import validators validators
import caddyprofiledummy caddyprofiledummy
# Must match the key id in [switch-softwaretype] which uses this section.
raw software_type RootSoftwareInstance-default-custom-personal-replicate
......
......@@ -53,7 +53,7 @@ context =
{#- * stabilise values for backend #}
{%- for key, prefix in [('url', 'http_backend'), ('https-url', 'https_backend')] %}
{%- set parsed = urlparse_module.urlparse(slave_instance.get(key, '').strip()) %}
{%- set info_dict = {'scheme': parsed.scheme, 'hostname': parsed.hostname, 'port': parsed.port or DEFAULT_PORT[parsed.scheme], 'path': parsed.path, 'fragment': parsed.fragment, 'query': parsed.query} %}
{%- set info_dict = {'scheme': parsed.scheme, 'hostname': parsed.hostname, 'port': parsed.port or DEFAULT_PORT[parsed.scheme], 'path': parsed.path, 'fragment': parsed.fragment, 'query': parsed.query, 'netloc-list': slave_instance.get(key + '-netloc-list', '').split() } %}
{%- do slave_instance.__setitem__(prefix, info_dict) %}
{%- endfor %}
{%- do slave_instance.__setitem__('ssl_proxy_verify', ('' ~ slave_instance.get('ssl-proxy-verify', '')).lower() in TRUE_VALUES) %}
......@@ -66,6 +66,7 @@ context =
{%- do info_dict.__setitem__('health-check-failover-path', parsed.path) %}
{%- do info_dict.__setitem__('health-check-failover-query', parsed.query) %}
{%- do info_dict.__setitem__('health-check-failover-fragment', parsed.fragment) %}
{%- do info_dict.__setitem__('health-check-netloc-list', slave_instance.get('health-check-failover-url-netloc-list', '').split()) %}
{%- do slave_instance.__setitem__(prefix, info_dict) %}
{%- endfor %}
{%- do slave_instance.__setitem__('health-check-failover-ssl-proxy-verify', ('' ~ slave_instance.get('health-check-failover-ssl-proxy-verify', '')).lower() in TRUE_VALUES) %}
......
......@@ -106,7 +106,7 @@ backend {{ slave_instance['slave_reference'] }}-{{ scheme }}
{%- do path_list.append(query) %}
{%- endif %}
{%- set path = '?'.join(path_list) %}
{%- if hostname and port %}
{%- if hostname and port or len(info_dict['netloc-list']) > 0 %}
timeout server {{ slave_instance['request-timeout'] }}s
timeout connect {{ slave_instance['backend-connect-timeout'] }}s
retries {{ slave_instance['backend-connect-retries'] }}
......@@ -122,7 +122,15 @@ backend {{ slave_instance['slave_reference'] }}-{{ scheme }}
{%- endif %}
{%- do active_check_option_list.append('timeout check %ss' % (slave_instance['health-check-timeout'])) %}
{%- endif %}
{%- if len(info_dict['netloc-list']) > 0 %}
{%- set counter = {'count': 1} %}
{%- for netloc in info_dict['netloc-list'] %}
server {{ slave_instance['slave_reference'] }}-backend-{{ scheme }}-{{ counter['count'] }} {{ netloc }} {{ ' '.join(ssl_list) }} {{ ' ' + ' '.join(active_check_list)}}
{%- do counter.__setitem__('count', counter['count'] + 1) %}
{%- endfor %}
{%- else %}
server {{ slave_instance['slave_reference'] }}-backend-{{ scheme }} {{ hostname }}:{{ port }} {{ ' '.join(ssl_list) }} {{ ' ' + ' '.join(active_check_list)}}
{%- endif %}
{%- for active_check_option in active_check_option_list %}
{{ active_check_option }}
{%- endfor %}
......@@ -161,10 +169,18 @@ backend {{ slave_instance['slave_reference'] }}-{{ scheme }}-failover
{%- endif %}
{%- set path = '?'.join(path_list) %}
{%- if hostname and port %}
timeout server {{ slave_instance['request-timeout'] }}s
{%- if len(info_dict['health-check-netloc-list']) > 0 %}
{%- set counter = {'count': 1} %}
{%- for netloc in info_dict['health-check-netloc-list'] %}
server {{ slave_instance['slave_reference'] }}-backend-{{ scheme }}-{{ counter['count'] }} {{ netloc }} {{ ' '.join(ssl_list) }}
{%- do counter.__setitem__('count', counter['count'] + 1) %}
{%- endfor %}
{%- else %}
server {{ slave_instance['slave_reference'] }}-backend-{{ scheme }} {{ hostname }}:{{ port }} {{ ' '.join(ssl_list) }}
{%- endif %}
timeout connect {{ slave_instance['backend-connect-timeout'] }}s
timeout server {{ slave_instance['request-timeout'] }}s
retries {{ slave_instance['backend-connect-retries'] }}
server {{ slave_instance['slave_reference'] }}-backend-{{ scheme }} {{ hostname }}:{{ port }} {{ ' '.join(ssl_list) }}
{%- if path %}
http-request set-path {{ path }}%[path]
{%- endif %}
......
......@@ -92,6 +92,12 @@ KEDIFA_PORT = '15080'
# has to be not partition one
SOURCE_IP = '127.0.0.1'
# ATS version expectation in Via string
VIA_STRING = (
r'^http\/1.1 caddy-frontend-1\[.*\] '
r'\(ApacheTrafficServer\/9\.[0-9]\.[0-9]+\)$',
)[0]
# IP on which test run, in order to mimic HTTP[s] access
TEST_IP = os.environ['SLAPOS_TEST_IPV4']
......@@ -280,7 +286,7 @@ def isHTTP2(domain):
out, err = prc.communicate()
assert prc.returncode == 0, "Problem running %r. Output:\n%s\nError:\n%s" % (
curl_command, out, err)
return 'Using HTTP2, server supports multi-use' in err
return 'Using HTTP2, server supports' in err
class TestDataMixin(object):
......@@ -638,16 +644,40 @@ class HttpFrontendTestCase(SlapOSInstanceTestCase):
server_side=True)
cls.backend_url = 'http://%s:%s/' % server.server_address
cls.server_process = multiprocessing.Process(
server_process = multiprocessing.Process(
target=server.serve_forever, name='HTTPServer')
cls.server_process.start()
cls.logger.debug('Started process %s' % (cls.server_process,))
server_process.start()
cls.logger.debug('Started process %s' % (server_process,))
cls.backend_https_url = 'https://%s:%s/' % server_https.server_address
cls.server_https_process = multiprocessing.Process(
server_https_process = multiprocessing.Process(
target=server_https.serve_forever, name='HTTPSServer')
cls.server_https_process.start()
cls.logger.debug('Started process %s' % (cls.server_https_process,))
server_https_process.start()
cls.logger.debug('Started process %s' % (server_https_process,))
class NetlocHandler(TestHandler):
identification = 'netloc'
netloc_a_http = ThreadedHTTPServer(
(cls._ipv4_address, cls._server_netloc_a_http_port),
NetlocHandler)
netloc_a_http_process = multiprocessing.Process(
target=netloc_a_http.serve_forever, name='netloc-a-http')
netloc_a_http_process.start()
netloc_b_http = ThreadedHTTPServer(
(cls._ipv4_address, cls._server_netloc_b_http_port),
NetlocHandler)
netloc_b_http_process = multiprocessing.Process(
target=netloc_b_http.serve_forever, name='netloc-b-http')
netloc_b_http_process.start()
cls.server_process_list = [
server_process,
server_https_process,
netloc_a_http_process,
netloc_b_http_process,
]
@classmethod
def cleanUpCertificate(cls):
......@@ -656,8 +686,7 @@ class HttpFrontendTestCase(SlapOSInstanceTestCase):
@classmethod
def stopServerProcess(cls):
for server in ['server_process', 'server_https_process']:
process = getattr(cls, server, None)
for process in cls.server_process_list:
if process is not None:
cls.logger.debug('Stopping process %s' % (process,))
process.join(10)
......@@ -1027,6 +1056,8 @@ class HttpFrontendTestCase(SlapOSInstanceTestCase):
cls._server_http_port = findFreeTCPPort(cls._ipv4_address)
cls._server_https_port = findFreeTCPPort(cls._ipv4_address)
cls._server_https_auth_port = findFreeTCPPort(cls._ipv4_address)
cls._server_netloc_a_http_port = findFreeTCPPort(cls._ipv4_address)
cls._server_netloc_b_http_port = findFreeTCPPort(cls._ipv4_address)
cls.startServerProcess()
except BaseException:
cls.logger.exception("Error during setUpClass")
......@@ -1065,6 +1096,12 @@ class HttpFrontendTestCase(SlapOSInstanceTestCase):
class SlaveHttpFrontendTestCase(HttpFrontendTestCase):
def _get_backend_haproxy_configuration(self):
backend_configuration_file = glob.glob(os.path.join(
self.instance_path, '*', 'etc', 'backend-haproxy.cfg'))[0]
with open(backend_configuration_file) as fh:
return fh.read()
@classmethod
def requestDefaultInstance(cls, state='started'):
default_instance = super(
......@@ -1320,6 +1357,13 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin):
# authenticating to http backend shall be no-op
'authenticate-to-backend': True,
},
'url-netloc-list': {
'url': cls.backend_url,
'url-netloc-list': '%(ip)s:%(port_a)s %(ip)s:%(port_b)s' % {
'ip': cls._ipv4_address,
'port_a': cls._server_netloc_a_http_port,
'port_b': cls._server_netloc_b_http_port},
},
'auth-to-backend': {
# in here use reserved port for the backend, which is going to be
# started later
......@@ -1347,6 +1391,14 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin):
'strict-transport-security-sub-domains': True,
'strict-transport-security-preload': True,
},
'https-url-netloc-list': {
'url': cls.backend_url + 'http',
'https-url': cls.backend_url + 'https',
'https-url-netloc-list': '%(ip)s:%(port_a)s %(ip)s:%(port_b)s' % {
'ip': cls._ipv4_address,
'port_a': cls._server_netloc_a_http_port,
'port_b': cls._server_netloc_b_http_port},
},
'server-alias': {
'url': cls.backend_url,
'server-alias': 'alias1.example.com alias2.example.com',
......@@ -1715,9 +1767,9 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin):
'monitor-base-url': 'https://[%s]:8401' % self._ipv6_address,
'backend-client-caucase-url': 'http://[%s]:8990' % self._ipv6_address,
'domain': 'example.com',
'accepted-slave-amount': '51',
'accepted-slave-amount': '54',
'rejected-slave-amount': '0',
'slave-amount': '51',
'slave-amount': '54',
'rejected-slave-dict': {
},
'warning-slave-dict': {
......@@ -1959,6 +2011,15 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin):
self.assertIn("backend _Url-http\n", content)
self.assertNotIn("backend _Url-https\n", content)
def test_url_netloc_list(self):
parameter_dict = self.assertSlaveBase('url-netloc-list')
result = fakeHTTPSResult(parameter_dict['domain'], 'path')
# assure that the request went to backend specified in the netloc
self.assertEqual(
result.headers['X-Backend-Identification'],
'netloc'
)
def test_auth_to_backend(self):
parameter_dict = self.assertSlaveBase('auth-to-backend')
......@@ -3579,7 +3640,7 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin):
self.assertNotEqual(via, None)
self.assertRegexpMatches(
via,
r'^http\/1.1 caddy-frontend-1\[.*\] \(ApacheTrafficServer\/9\.0\.[0-9]+\)$'
VIA_STRING
)
def test_enable_cache_server_alias(self):
......@@ -3621,7 +3682,7 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin):
self.assertNotEqual(via, None)
self.assertRegexpMatches(
via,
r'^http\/1.1 caddy-frontend-1\[.*\] \(ApacheTrafficServer\/9\.0\.[0-9]+\)$'
VIA_STRING
)
result = fakeHTTPResult(
......@@ -3738,7 +3799,7 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin):
self.assertNotEqual(via, None)
self.assertRegexpMatches(
via,
r'^http\/1.1 caddy-frontend-1\[.*\] \(ApacheTrafficServer\/9\.0\.[0-9]+\)$'
VIA_STRING
)
# BEGIN: Check that squid.log is correctly filled in
......@@ -3940,7 +4001,7 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin):
self.assertNotEqual(via, None)
self.assertRegexpMatches(
via,
r'^http\/1.1 caddy-frontend-1\[.*\] \(ApacheTrafficServer\/9\.0\.[0-9]+\)$'
VIA_STRING
)
# check stale-if-error support is really respected if not present in the
......@@ -4083,7 +4144,7 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin):
self.assertNotEqual(via, None)
self.assertRegexpMatches(
via,
r'^http\/1.1 caddy-frontend-1\[.*\] \(ApacheTrafficServer\/9\.0\.[0-9]+\)$'
VIA_STRING
)
try:
......@@ -4130,7 +4191,7 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin):
self.assertNotEqual(via, None)
self.assertRegexpMatches(
via,
r'^http\/1.1 caddy-frontend-1\[.*\] \(ApacheTrafficServer\/9\.0\.[0-9]+\)$'
VIA_STRING
)
def test_enable_http2_false(self):
......@@ -4458,6 +4519,19 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin):
timeout connect 10s
retries 5""" in content)
def test_https_url_netloc_list(self):
parameter_dict = self.assertSlaveBase('https-url-netloc-list')
result = fakeHTTPSResult(parameter_dict['domain'], 'path')
# assure that the request went to backend specified in the netloc
self.assertEqual(
result.headers['X-Backend-Identification'],
'netloc'
)
result = fakeHTTPResult(parameter_dict['domain'], 'path')
# assure that the request went to backend NOT specified in the netloc
self.assertNotIn('X-Backend-Identification', result.headers)
class TestReplicateSlave(SlaveHttpFrontendTestCase, TestDataMixin):
instance_parameter_dict = {
......@@ -7162,6 +7236,23 @@ class TestSlaveHealthCheck(SlaveHttpFrontendTestCase, TestDataMixin):
'health-check-failover-https-url':
cls.backend_url + 'failover-https-url?a=b&c=',
},
'health-check-failover-url-netloc-list': {
'https-only': False, # http and https access to check
'health-check-timeout': 1, # fail fast for test
'health-check-interval': 1, # fail fast for test
'url': cls.backend_url + 'url',
'https-url': cls.backend_url + 'https-url',
'health-check': True,
'health-check-http-path': '/health-check-failover-url',
'health-check-failover-url': cls.backend_url + 'failover-url?a=b&c=',
'health-check-failover-https-url':
cls.backend_url + 'failover-https-url?a=b&c=',
'health-check-failover-url-netloc-list':
'%(ip)s:%(port_a)s %(ip)s:%(port_b)s' % {
'ip': cls._ipv4_address,
'port_a': cls._server_netloc_a_http_port,
'port_b': cls._server_netloc_b_http_port},
},
'health-check-failover-url-auth-to-backend': {
'https-only': False, # http and https access to check
'health-check-timeout': 1, # fail fast for test
......@@ -7251,12 +7342,6 @@ backend _health-check-default-http
timeout check 2s""" % (backend, )
}
def _get_backend_haproxy_configuration(self):
backend_configuration_file = glob.glob(os.path.join(
self.instance_path, '*', 'etc', 'backend-haproxy.cfg'))[0]
with open(backend_configuration_file) as fh:
return fh.read()
def _test(self, key):
parameter_dict = self.assertSlaveBase(key)
self.assertIn(
......@@ -7308,6 +7393,14 @@ backend _health-check-default-http
headers={'X-Reply-Status-Code': '502'})
self.assertEqual(result.status_code, httplib.CREATED)
def restoreBackend():
result = requests.put(
self.backend_url + slave_parameter_dict[
'health-check-http-path'].strip('/'),
headers={})
self.assertEqual(result.status_code, httplib.CREATED)
self.addCleanup(restoreBackend)
time.sleep(3) # > health-check-timeout + health-check-interval
result = fakeHTTPSResult(parameter_dict['domain'], '/failoverpath')
......@@ -7342,6 +7435,38 @@ backend _health-check-default-http
r'"GET /failoverpath HTTP/1.1"'
)
def test_health_check_failover_url_netloc_list(self):
parameter_dict = self.assertSlaveBase(
'health-check-failover-url-netloc-list')
slave_parameter_dict = self.getSlaveParameterDictDict()[
'health-check-failover-url-netloc-list']
# check normal access
result = fakeHTTPSResult(parameter_dict['domain'], '/path')
self.assertNotIn('X-Backend-Identification', result.headers)
# start replying with bad status code
result = requests.put(
self.backend_url + slave_parameter_dict[
'health-check-http-path'].strip('/'),
headers={'X-Reply-Status-Code': '502'})
self.assertEqual(result.status_code, httplib.CREATED)
self.assertEqual(result.status_code, httplib.CREATED)
def restoreBackend():
result = requests.put(
self.backend_url + slave_parameter_dict[
'health-check-http-path'].strip('/'),
headers={})
self.assertEqual(result.status_code, httplib.CREATED)
self.addCleanup(restoreBackend)
time.sleep(3) # > health-check-timeout + health-check-interval
# check failover, uses netloc
result = fakeHTTPSResult(parameter_dict['domain'], '/path')
self.assertEqual(
result.headers['X-Backend-Identification'],
'netloc'
)
def test_health_check_failover_url_auth_to_backend(self):
parameter_dict = self.assertSlaveBase(
'health-check-failover-url-auth-to-backend')
......
......@@ -21,6 +21,9 @@ T-2/var/log/httpd/_auth-to-backend-not-configured_error_log
T-2/var/log/httpd/_auth-to-backend_access_log
T-2/var/log/httpd/_auth-to-backend_backend_log
T-2/var/log/httpd/_auth-to-backend_error_log
T-2/var/log/httpd/_bad-backend_access_log
T-2/var/log/httpd/_bad-backend_backend_log
T-2/var/log/httpd/_bad-backend_error_log
T-2/var/log/httpd/_ciphers_access_log
T-2/var/log/httpd/_ciphers_error_log
T-2/var/log/httpd/_custom_domain_access_log
......@@ -70,6 +73,9 @@ T-2/var/log/httpd/_enable_cache_server_alias_error_log
T-2/var/log/httpd/_https-only_access_log
T-2/var/log/httpd/_https-only_backend_log
T-2/var/log/httpd/_https-only_error_log
T-2/var/log/httpd/_https-url-netloc-list_access_log
T-2/var/log/httpd/_https-url-netloc-list_backend_log
T-2/var/log/httpd/_https-url-netloc-list_error_log
T-2/var/log/httpd/_monitor-ipv4-test_access_log
T-2/var/log/httpd/_monitor-ipv4-test_error_log
T-2/var/log/httpd/_monitor-ipv6-test_access_log
......@@ -153,6 +159,9 @@ T-2/var/log/httpd/_type-zope-virtualhostroot-https-port_error_log
T-2/var/log/httpd/_type-zope_access_log
T-2/var/log/httpd/_type-zope_backend_log
T-2/var/log/httpd/_type-zope_error_log
T-2/var/log/httpd/_url-netloc-list_access_log
T-2/var/log/httpd/_url-netloc-list_backend_log
T-2/var/log/httpd/_url-netloc-list_error_log
T-2/var/log/httpd/_url_https-url_access_log
T-2/var/log/httpd/_url_https-url_backend_log
T-2/var/log/httpd/_url_https-url_error_log
......
......@@ -21,6 +21,9 @@ T-2/var/log/httpd/_auth-to-backend-not-configured_error_log
T-2/var/log/httpd/_auth-to-backend_access_log
T-2/var/log/httpd/_auth-to-backend_backend_log
T-2/var/log/httpd/_auth-to-backend_error_log
T-2/var/log/httpd/_bad-backend_access_log
T-2/var/log/httpd/_bad-backend_backend_log
T-2/var/log/httpd/_bad-backend_error_log
T-2/var/log/httpd/_ciphers_access_log
T-2/var/log/httpd/_ciphers_error_log
T-2/var/log/httpd/_custom_domain_access_log
......@@ -70,6 +73,9 @@ T-2/var/log/httpd/_enable_cache_server_alias_error_log
T-2/var/log/httpd/_https-only_access_log
T-2/var/log/httpd/_https-only_backend_log
T-2/var/log/httpd/_https-only_error_log
T-2/var/log/httpd/_https-url-netloc-list_access_log
T-2/var/log/httpd/_https-url-netloc-list_backend_log
T-2/var/log/httpd/_https-url-netloc-list_error_log
T-2/var/log/httpd/_monitor-ipv4-test_access_log
T-2/var/log/httpd/_monitor-ipv4-test_error_log
T-2/var/log/httpd/_monitor-ipv6-test_access_log
......@@ -153,6 +159,9 @@ T-2/var/log/httpd/_type-zope-virtualhostroot-https-port_error_log
T-2/var/log/httpd/_type-zope_access_log
T-2/var/log/httpd/_type-zope_backend_log
T-2/var/log/httpd/_type-zope_error_log
T-2/var/log/httpd/_url-netloc-list_access_log
T-2/var/log/httpd/_url-netloc-list_backend_log
T-2/var/log/httpd/_url-netloc-list_error_log
T-2/var/log/httpd/_url_https-url_access_log
T-2/var/log/httpd/_url_https-url_backend_log
T-2/var/log/httpd/_url_https-url_error_log
......
......@@ -24,6 +24,9 @@ T-2/var/log/httpd/_health-check-disabled_error_log
T-2/var/log/httpd/_health-check-failover-url-auth-to-backend_access_log
T-2/var/log/httpd/_health-check-failover-url-auth-to-backend_backend_log
T-2/var/log/httpd/_health-check-failover-url-auth-to-backend_error_log
T-2/var/log/httpd/_health-check-failover-url-netloc-list_access_log
T-2/var/log/httpd/_health-check-failover-url-netloc-list_backend_log
T-2/var/log/httpd/_health-check-failover-url-netloc-list_error_log
T-2/var/log/httpd/_health-check-failover-url-ssl-proxy-verified_access_log
T-2/var/log/httpd/_health-check-failover-url-ssl-proxy-verified_backend_log
T-2/var/log/httpd/_health-check-failover-url-ssl-proxy-verified_error_log
......
......@@ -622,6 +622,11 @@
}
}
]
},
"random-activity-priority": {
"type": "string",
"title": "Random Activity Priority",
"description": "Control `random_activity_priority` argument of test runner. Can be set to an empty string to automatically generate a seed for each test."
}
},
"type": "object"
......
......@@ -97,16 +97,17 @@ class WendelinTutorialTestCase(FluentdTestCase):
@classmethod
def setUpClass(cls):
fluentd_dir = os.path.join(cls.computer_partition_root_path,
'software_release', 'parts', 'fluentd')
cls._fluentd_bin = os.path.join(fluentd_dir, 'bin', 'fluentd')
cls._gem_path = os.path.join(fluentd_dir, 'lib', 'ruby', 'gems')
cls._tmp_dir = tempfile.mkdtemp()
cls._measurementList = cls.sensor_value_list()
cls._conf = cls.get_configuration()
super(FluentdTestCase, cls).setUpClass()
fluentd_dir = os.path.join(cls.computer_partition_root_path,
'software_release', 'parts', 'fluentd')
cls._fluentd_bin = os.path.join(fluentd_dir, 'bin', 'fluentd')
cls._gem_path = os.path.join(fluentd_dir, 'lib', 'ruby', 'gems')
@classmethod
def sensor_value_list(cls):
return [str(value) for value in (round(random.uniform(870, 1084), 2),
......
......@@ -19,7 +19,7 @@ md5sum = f2b0f1ed27148504f220e06eaceff935
[template-kvm]
filename = instance-kvm.cfg.jinja2
md5sum = f902dd10cb052ac262a4a96b9362b3a3
md5sum = 93cbee3403e7e23b4278143c32209ddc
[template-kvm-cluster]
filename = instance-kvm-cluster.cfg.jinja2.in
......@@ -39,11 +39,11 @@ md5sum = cd0008f1689dfca9b77370bc4d275b70
[template-kvm-export]
filename = instance-kvm-export.cfg.jinja2
md5sum = 4c9efdc9ef35d1096173084541be712d
md5sum = 09252c282ef86f4bb3a88e91869b0f97
[template-kvm-export-script]
filename = template/kvm-export.sh.jinja2
md5sum = b617d64de73de1eed518185f310bbc82
md5sum = 64aa1ce8785f6b94aabd787fa3443082
[template-nbd]
filename = instance-nbd.cfg.jinja2
......
......@@ -24,10 +24,20 @@ rendered = ${directory:bin}/${slap-parameter:namebase}-exporter
# Resilient stack wants a "wrapper" parameter
wrapper = ${:rendered}
mode = 0700
{%- set disk_type = slapparameter_dict.get('disk-type', 'virtio') %}
{%- if disk_type == "virtio" %}
device = virtio0
{%- elif disk_type == "ide" %}
{#- Manually found device name in case of disk-type == ide #}
device = ide0-hd0
{%- else %}
# unsupported disk-type {{ disk_type }}
{%- endif %}
context =
section directory directory
section buildout buildout
key socket_path kvm-instance:socket-path
key device :device
raw gzip_binary {{ gzip_binary }}
# Extends publish section with resilient parameters
......
......@@ -1177,6 +1177,7 @@ context =
[wipe-disk-device-wrapper]
recipe = slapos.recipe.template:jinja2
template = inline:
#!/bin/sh
{%- for disk_device in disk_device_path.split() %}
dd if=/dev/zero of={{ disk_device }} bs=4096 count=500k
{%- endfor %}
......
......@@ -10,7 +10,7 @@ BACKUP_FILE=virtual.qcow2
QMP_CLIENT={{ buildout['directory'] }}/software_release/bin/qemu-qmp-client
$QMP_CLIENT --socket {{ socket_path }} --drive-backup $BACKUP_DIR/$BACKUP_FILE
$QMP_CLIENT --socket {{ socket_path }} --drive-backup $BACKUP_DIR/$BACKUP_FILE {{ device }}
# Due to the way qmp works, the VM file cannot be compressed on the fly.
# Although the compression step is optional, the importer uses the .gz file
......
......@@ -573,6 +573,25 @@ class TestInstanceResilient(InstanceTestCase, KvmMixin):
def getInstanceSoftwareType(cls):
return 'kvm-resilient'
def test_kvm_exporter(self):
exporter_partition = os.path.join(
self.slap.instance_directory,
self.__partition_reference__ + '2')
backup_path = os.path.join(
exporter_partition, 'srv', 'backup', 'kvm', 'virtual.qcow2.gz')
exporter = os.path.join(exporter_partition, 'bin', 'exporter')
if os.path.exists(backup_path):
os.unlink(backup_path)
def call_exporter():
try:
return (0, subprocess.check_output(
[exporter], stderr=subprocess.STDOUT).decode('utf-8'))
except subprocess.CalledProcessError as e:
return (e.returncode, e.output.decode('utf-8'))
status_code, status_text = call_exporter()
self.assertEqual(0, status_code, status_text)
def test(self):
connection_parameter_dict = self\
.computer_partition.getConnectionParameterDict()
......@@ -646,6 +665,15 @@ ir3:sshd-on-watch RUNNING""",
)
@skipUnlessKvm
class TestInstanceResilientDiskTypeIde(InstanceTestCase, KvmMixin):
@classmethod
def getInstanceParameterDict(cls):
return {
'disk-type': 'ide'
}
@skipUnlessKvm
class TestAccessResilientAdditional(InstanceTestCase):
__partition_reference__ = 'ara'
......@@ -1379,7 +1407,8 @@ class TestDiskDevicePathWipeDiskOndestroy(InstanceTestCase, KvmMixin):
with open(slapos_wipe_device_disk) as fh:
self.assertEqual(
fh.read().strip(),
r"""dd if=/dev/zero of=/dev/virt0 bs=4096 count=500k
r"""#!/bin/sh
dd if=/dev/zero of=/dev/virt0 bs=4096 count=500k
dd if=/dev/zero of=/dev/virt1 bs=4096 count=500k"""
)
self.assertTrue(os.access(slapos_wipe_device_disk, os.X_OK))
......
[template]
filename = instance.cfg.in
md5sum = 56e986c74ef236f261834c57f5861ce0
md5sum = 87fd83d33ba786550a45f484b3ae2b24
[template-nginx-configuration]
filename = template-nginx.cfg.in
md5sum = 022e4b53e1b2db16c4e518fe76f638fa
md5sum = 3eb7dda365d30c3c3c2ce939bbc607d4
[buildout]
parts =
nginx-service
cron-service
cron-entry-logrotate
logrotate-entry-nginx
promises
publish-connection-information
extends = ${monitor-template:rendered}
......@@ -46,6 +49,7 @@ output = $${directory:etc}/nginx.cfg
mode = 0600
access-log = $${directory:log}/nginx-access.log
error-log = $${directory:log}/nginx-error.log
pid-file = $${directory:run}/nginx.pid
ip = $${slap-configuration:ipv6-random}
local-ip = $${slap-configuration:ipv4-random}
port = 9443
......@@ -68,6 +72,15 @@ cert-file = $${directory:ssl}/${:_buildout_section_name_}.cert
common-name = $${nginx-configuration:ip}
stop-on-error = true
[logrotate-entry-nginx]
<= logrotate-entry-base
name = nginx
log =
$${nginx-configuration:access-log}
$${nginx-configuration:error-log}
post =
test ! -s $${nginx-configuration:pid-file} || kill -USR1 $(cat "$${nginx-configuration:pid-file}")
[promises]
recipe =
promises =
......
daemon off; # run in the foreground so supervisord can look after it
worker_processes 4;
pid $${directory:run}/nginx.pid;
pid $${nginx-configuration:pid-file};
events {
worker_connections 768;
......
......@@ -25,12 +25,16 @@
#
##############################################################################
import functools
import os
import lzma
import multiprocessing
import urllib.parse
import uritemplate
import requests
from slapos.testing.utils import CrontabMixin
from slapos.testing.testcase import makeModuleSetUpAndTestCaseClass
setUpModule, SlapOSInstanceTestCase = makeModuleSetUpAndTestCaseClass(
......@@ -38,7 +42,7 @@ setUpModule, SlapOSInstanceTestCase = makeModuleSetUpAndTestCaseClass(
os.path.join(os.path.dirname(__file__), '..', 'software.cfg')))
class TestNginxPushStream(SlapOSInstanceTestCase):
class TestNginxPushStream(SlapOSInstanceTestCase, CrontabMixin):
def setUp(self):
self.connection_parameters = \
self.computer_partition.getConnectionParameterDict()
......@@ -83,3 +87,55 @@ class TestNginxPushStream(SlapOSInstanceTestCase):
self.assertEqual(q.get_nowait(), b': ')
self.assertEqual(q.get_nowait(), b'data: Hello')
def test_log_rotation(self):
status_url = urllib.parse.urljoin(
self.connection_parameters['publisher-url'], '/status')
error_url = urllib.parse.urljoin(
self.connection_parameters['publisher-url'], '/..')
log_file_path = functools.partial(
os.path.join,
self.computer_partition_root_path,
'var',
'log',
)
rotated_file_path = functools.partial(
os.path.join,
self.computer_partition_root_path,
'srv',
'backup',
'logrotate',
)
requests.get(status_url, verify=False)
with open(log_file_path('nginx-access.log')) as f:
self.assertIn('GET /status HTTP', f.read())
requests.get(error_url, verify=False)
with open(log_file_path('nginx-error.log')) as f:
self.assertIn('forbidden', f.read())
# first log rotation initialize the state, but does not actually rotate
self._executeCrontabAtDate('logrotate', '2050-01-01')
self._executeCrontabAtDate('logrotate', '2050-01-02')
# today's file is not compressed
with open(rotated_file_path('nginx-access.log-20500102')) as f:
self.assertIn('GET /status HTTP', f.read())
with open(rotated_file_path('nginx-error.log-20500102')) as f:
self.assertIn('forbidden', f.read())
# after rotation, the program re-opened original log file and writes in
# expected location.
requests.get(status_url, verify=False)
with open(log_file_path('nginx-access.log')) as f:
self.assertIn('GET /status HTTP', f.read())
requests.get(error_url, verify=False)
with open(log_file_path('nginx-error.log')) as f:
self.assertIn('forbidden', f.read())
self._executeCrontabAtDate('logrotate', '2050-01-03')
# yesterday's file are compressed
with lzma.open(rotated_file_path('nginx-access.log-20500102.xz'), 'rt') as f:
self.assertIn('GET /status HTTP', f.read())
with lzma.open(rotated_file_path('nginx-error.log-20500102.xz'), 'rt') as f:
self.assertIn('forbidden', f.read())
......@@ -13,6 +13,10 @@ parts =
[python]
part = python3
[gcc]
# powerdns needs a compiler with C++17 features
min_version = 8
[eggs]
recipe = zc.recipe.egg
eggs =
......
......@@ -12,6 +12,5 @@ http://www.proftpd.org/docs/
# TODO
* log rotation
* make sure SFTPLog is useful (seems very verbose and does not contain more than stdout)
* allow configuring webhooks when new file is uploaded
......@@ -19,7 +19,7 @@ md5sum = efb4238229681447aa7fe73898dffad4
[instance-default]
filename = instance-default.cfg.in
md5sum = f6c583d24940a3a6838bd421dbb84a20
md5sum = 4df64032e14c19363ad3dfe9aecf8e0c
[proftpd-config-file]
filename = proftpd-config-file.cfg.in
......
[buildout]
parts =
promises
cron-service
cron-entry-logrotate
logrotate-entry-proftpd
publish-connection-parameter
extends = {{ template_monitor }}
......@@ -137,6 +140,15 @@ recipe =
instance-promises =
${proftpd-listen-promise:name}
[logrotate-entry-proftpd]
<= logrotate-entry-base
name = proftpd
log =
${proftpd:sftp-log}
${proftpd:xfer-log}
${proftpd:ban-log}
post =
test ! -s ${proftpd:pid-file} || kill -HUP $(cat "${proftpd:pid-file}")
[publish-connection-parameter]
recipe = slapos.cookbook:publish
......
......@@ -25,26 +25,25 @@
#
##############################################################################
import contextlib
import io
import logging
import lzma
import os
import shutil
from urllib.parse import urlparse, parse_qs
import tempfile
import io
import subprocess
import tempfile
import time
from http.server import BaseHTTPRequestHandler
import logging
from urllib.parse import parse_qs, urlparse
import pysftp
import psutil
import paramiko
from paramiko.ssh_exception import SSHException
from paramiko.ssh_exception import AuthenticationException
import psutil
import pysftp
from paramiko.ssh_exception import AuthenticationException, SSHException
from slapos.testing.testcase import makeModuleSetUpAndTestCaseClass
from slapos.testing.utils import findFreeTCPPort
from slapos.testing.utils import ManagedHTTPServer
from slapos.testing.utils import (CrontabMixin, ManagedHTTPServer,
findFreeTCPPort)
setUpModule, SlapOSInstanceTestCase = makeModuleSetUpAndTestCaseClass(
os.path.abspath(
......@@ -227,8 +226,7 @@ class TestFilesAndSocketsInInstanceDir(ProFTPdTestCase):
"""
with self.slap.instance_supervisor_rpc as supervisor:
all_process_info = supervisor.getAllProcessInfo()
# there is only one process in this instance
process_info, = [p for p in all_process_info if p['name'] != 'watchdog']
process_info, = [p for p in all_process_info if 'proftpd' in p['name']]
process = psutil.Process(process_info['pid'])
self.assertEqual('proftpd', process.name()) # sanity check
self.proftpdProcess = process
......@@ -316,8 +314,7 @@ class TestSSHKey(TestSFTPOperations):
class TestAuthenticationURL(TestSFTPOperations):
class AuthenticationServer(ManagedHTTPServer):
class RequestHandler(BaseHTTPRequestHandler):
def do_POST(self):
# type: () -> None
def do_POST(self) -> None:
assert self.headers[
'Content-Type'] == 'application/x-www-form-urlencoded', self.headers[
'Content-Type']
......@@ -330,11 +327,13 @@ class TestAuthenticationURL(TestSFTPOperations):
self.send_response(200)
self.send_header("X-Proftpd-Authentication-Result", "Success")
self.end_headers()
return self.wfile.write(b"OK")
self.wfile.write(b"OK")
return
self.send_response(401)
return self.wfile.write(b"Forbidden")
self.wfile.write(b"Forbidden")
log_message = logging.getLogger(__name__ + '.AuthenticationServer').info
def log_message(self, msg, *args) -> None:
logging.getLogger(__name__ + '.AuthenticationServer').info(msg, *args)
@classmethod
def getInstanceParameterDict(cls):
......@@ -364,3 +363,119 @@ class TestAuthenticationURL(TestSFTPOperations):
parameter_dict = self.computer_partition.getConnectionParameterDict()
self.assertNotIn('username', parameter_dict)
self.assertNotIn('password', parameter_dict)
class LogRotationMixin(CrontabMixin):
"""Mixin test for log rotations.
Verifies that after `_access` the `expected_logged_text` is found in `log_filename`.
This also checks that the log files are rotated properly.
"""
log_filename: str = NotImplemented
expected_logged_text: str = NotImplemented
def _access(self) -> None:
raise NotImplementedError()
def assertFileContains(self, filename: str, text: str) -> None:
"""assert that files contain the text, waiting for file to be created and
retrying a few times to tolerate the cases where text is not yet written
to file.
"""
file_exists = False
for retry in range(10):
if os.path.exists(filename):
file_exists = True
if filename.endswith('.xz'):
f = lzma.open(filename, 'rt')
else:
f = open(filename, 'rt')
with contextlib.closing(f):
content = f.read()
if text in content:
return
time.sleep(0.1 * retry)
self.assertTrue(file_exists, f'{filename} does not exist')
self.assertIn(text, content)
def test(self) -> None:
self._access()
self.assertFileContains(
os.path.join(
self.computer_partition_root_path,
'var',
'log',
self.log_filename,
),
self.expected_logged_text,
)
# first log rotation initialize the state, but does not actually rotate
self._executeCrontabAtDate('logrotate', '2050-01-01')
self._executeCrontabAtDate('logrotate', '2050-01-02')
# today's file is not compressed
self.assertFileContains(
os.path.join(
self.computer_partition_root_path,
'srv',
'backup',
'logrotate',
f'{self.log_filename}-20500102',
),
self.expected_logged_text,
)
# after rotation, the program re-opened original log file and writes in
# expected location, so access are logged again.
self._access()
self.assertFileContains(
os.path.join(
self.computer_partition_root_path,
'var',
'log',
self.log_filename,
),
self.expected_logged_text,
)
self._executeCrontabAtDate('logrotate', '2050-01-03')
# yesterday's file is compressed
self.assertFileContains(
os.path.join(
self.computer_partition_root_path,
'srv',
'backup',
'logrotate',
f'{self.log_filename}-20500102.xz',
),
self.expected_logged_text,
)
class TestAccessLog(ProFTPdTestCase, LogRotationMixin):
log_filename = 'proftpd-sftp.log'
expected_logged_text = "user 'proftpd' authenticated via 'password' method"
def _access(self) -> None:
self._getConnection().close()
class TestXferLog(ProFTPdTestCase, LogRotationMixin):
log_filename = 'proftpd-xfer.log'
expected_logged_text = '/testfile'
def _access(self) -> None:
with self._getConnection() as sftp:
with tempfile.NamedTemporaryFile(mode='w') as f:
f.write("Hello FTP !")
f.flush()
sftp.put(f.name, remotepath='testfile')
class TestBanLog(ProFTPdTestCase, LogRotationMixin):
log_filename = 'proftpd-ban.log'
expected_logged_text = 'denied due to host ban'
def _access(self) -> None:
for _ in range(6):
with self.assertRaisesRegex(
Exception, '(Authentication failed|Connection reset by peer)'):
self._getConnection(password='wrong')
......@@ -33,6 +33,10 @@ parts =
[python]
part = python3
[gowork]
# replication-manager does not build on golang 1.17
golang = ${golang1.16:location}
[instance.cfg]
recipe = slapos.recipe.template:jinja2
rendered = ${buildout:directory}/instance.cfg
......
......@@ -14,7 +14,7 @@
# not need these here).
[template-erp5]
filename = instance-erp5.cfg.in
md5sum = 84f099cc9852c4f53a075dccbb3880f0
md5sum = ce9c231ec47eb8f528345add21cb7822
[template-balancer]
filename = instance-balancer.cfg.in
......
......@@ -21,6 +21,7 @@
{% set test_runner_total_database_count = mariadb_test_database_amount %}
{% set test_runner_enabled = mariadb_test_database_amount > 0 %}
{% endif -%}
{% set test_runner_random_activity_priority = slapparameter_dict.get('test-runner', {}).get('random-activity-priority') -%}
{% set monitor_base_url_dict = {} -%}
{% set monitor_dict = slapparameter_dict.get('monitor', {}) %}
{% set use_ipv6 = slapparameter_dict.get('use-ipv6', False) -%}
......@@ -256,6 +257,7 @@ config-wendelin-core-zblk-fmt = {{ dumps(slapparameter_dict.get('wendelin-core-z
config-wsgi = {{ dumps(slapparameter_dict.get('wsgi', True)) }}
config-test-runner-enabled = {{ dumps(test_runner_enabled) }}
config-test-runner-node-count = {{ dumps(test_runner_node_count) }}
config-test-runner-random-activity-priority = {{ dumps(test_runner_random_activity_priority) }}
config-wcfs_enable = {{ dumps(wcfs_enable) }}
config-test-runner-configuration = {{ dumps(slapparameter_dict.get('test-runner', {})) }}
software-type = zope
......
......@@ -17,6 +17,7 @@ extra =
helloworld ${slapos.test.helloworld-setup:setup}
jupyter ${slapos.test.jupyter-setup:setup}
monitor ${slapos.test.monitor-setup:setup}
nginx-push-stream ${slapos.test.nginx-push-stream-setup:setup}
plantuml ${slapos.test.plantuml-setup:setup}
powerdns ${slapos.test.powerdns-setup:setup}
proftpd ${slapos.test.proftpd-setup:setup}
......
......@@ -237,6 +237,7 @@ extra-eggs =
${slapos.core-setup:egg}
${pillow-python:egg}
${pycurl:egg}
caucase
erp5.util
${python-pynacl:egg}
${python-cryptography:egg}
......@@ -297,6 +298,7 @@ eggs = ${python-interpreter:eggs}
scripts =
slapos
supervisord
caucase
[git-clone-repository]
recipe = slapos.recipe.build:gitclone
......@@ -338,7 +340,6 @@ tests =
slaprunner ${slapos.test.slaprunner-setup:setup}
theia ${slapos.test.theia-setup:setup}
metabase ${slapos.test.metabase-setup:setup}
nginx-push-stream ${slapos.test.nginx-push-stream-setup:setup}
erp5 ${slapos.test.erp5-setup:setup}
###
${:extra}
......
......@@ -15,7 +15,7 @@
[instance-theia]
_update_hash_filename_ = instance-theia.cfg.jinja.in
md5sum = f396d9a0780f4fb17016dbd32b56d7b8
md5sum = 8e4f43e603a5dd57752758c987465d41
[instance]
_update_hash_filename_ = instance.cfg.in
......@@ -47,7 +47,7 @@ md5sum = 9e8c17a4b2d802695caf0c2c052f0d11
[yarn.lock]
_update_hash_filename_ = yarn.lock
md5sum = 6faa52754c46e505912a478bc8ba3300
md5sum = 067d2db611b21f77885f3adfd7f81453
[python-language-server-requirements.txt]
_update_hash_filename_ = python-language-server-requirements.txt
......@@ -59,7 +59,7 @@ md5sum = 8157c22134200bd862a07c6521ebf799
[slapos.css.in]
_update_hash_filename_ = slapos.css.in
md5sum = 841141fc699b8d8918ed0669e6e61995
md5sum = d2930ec3ef973b7908f0fa896033fd64
[logo.png]
_update_hash_filename_ = logo.png
......
......@@ -71,7 +71,7 @@ recipe =
instance-promises =
$${theia-listen-promise:name}
$${frontend-listen-promise:name}
$${frontend-authentification-promise:name}
$${frontend-authentication-promise:name}
$${remote-frontend-url-available-promise:name}
{% if additional_frontend %}
$${remote-additional-frontend-url-available-promise:name}
......@@ -94,15 +94,15 @@ name = $${:_buildout_section_name_}.py
config-host = $${frontend-instance:ip}
config-port = $${frontend-instance:port}
[frontend-authentification-promise]
[frontend-authentication-promise]
<= monitor-promise-base
promise = check_url_available
name = $${:_buildout_section_name_}.py
username = $${frontend-instance-password:username}
password = $${frontend-instance-password:passwd}
ip = $${frontend-instance:ip}
port = $${frontend-instance:port}
config-url = https://$${:username}:$${:password}@[$${:ip}]:$${:port}
config-url = https://[$${:ip}]:$${:port}
config-username = $${frontend-instance-password:username}
config-password = $${frontend-instance-password:passwd}
[remote-frontend-url-available-promise]
<= monitor-promise-base
......@@ -184,6 +184,7 @@ sla-instance_guid = {{ parameter_dict['additional-frontend-guid'] }}
recipe = slapos.cookbook:generate.password
username = admin
bytes = 12
storage-path = $${buildout:parts-directory}/.$${:_buildout_section_name_}
[frontend-instance-port]
recipe = slapos.cookbook:free_port
......
/* backported fixes */
/* https://github.com/eclipse-theia/theia/commit/616c34e1c446a706f4cb02182b2d9195ef3ea854 */
.monaco-editor .monaco-list .monaco-list-row.focused,
.monaco-editor .monaco-list .monaco-list-row.focused,
.monaco-editor .monaco-list .monaco-list-row.focused .suggest-icon {
color: var(--theia-list-activeSelectionForeground) !important;
background-color: var(--theia-list-activeSelectionBackground) !important;
}
/* logo */
.theia-icon {
background-image: url('/{{ logo_image }}');
......
This source diff could not be displayed because it is too large. You can view the blob instead.
......@@ -557,6 +557,9 @@ eggs = ${neoppod:eggs}
zope.globalrequest
waitress
# OpenId Connect
oic
# parameterizing the version of the generated python interpreter name by the
# python section version causes dependency between this egg section and the
# installation of python, which we don't want on an instance
......@@ -756,3 +759,11 @@ beautifulsoup4 = 4.8.2
WebOb = 1.8.5
soupsieve = 1.9.5
eggtestinfo = 0.3
oic = 0.15.1
Beaker = 1.11.0
Mako = 1.1.4
pyjwkest = 1.4.2
alabaster = 0.7.12
future = 0.18.2
pycryptodomex = 3.10.1
......@@ -74,7 +74,7 @@ md5sum = bbef65b4edeb342f08309604ca3717d5
[template-erp5]
filename = instance-erp5.cfg.in
md5sum = fcc8470824c448a56e2282c43b870cb5
md5sum = c10634353841bb09a847168b4add8d2f
[template-zeo]
filename = instance-zeo.cfg.in
......@@ -86,7 +86,7 @@ md5sum = bc821f9f9696953b10a03ad7b59a1936
[template-zope]
filename = instance-zope.cfg.in
md5sum = 769e81946c346530cebfce6ad7553165
md5sum = f3121380ab4d31ba5f4984aec74d0a2f
[template-balancer]
filename = instance-balancer.cfg.in
......
......@@ -21,6 +21,7 @@
{% set test_runner_total_database_count = mariadb_test_database_amount %}
{% set test_runner_enabled = mariadb_test_database_amount > 0 %}
{% endif -%}
{% set test_runner_random_activity_priority = slapparameter_dict.get('test-runner', {}).get('random-activity-priority') -%}
{% set monitor_base_url_dict = {} -%}
{% set monitor_dict = slapparameter_dict.get('monitor', {}) %}
{% set use_ipv6 = slapparameter_dict.get('use-ipv6', False) -%}
......@@ -259,6 +260,7 @@ config-wendelin-core-zblk-fmt = {{ dumps(slapparameter_dict.get('wendelin-core-z
config-wsgi = {{ dumps(slapparameter_dict.get('wsgi', True)) }}
config-test-runner-enabled = {{ dumps(test_runner_enabled) }}
config-test-runner-node-count = {{ dumps(test_runner_node_count) }}
config-test-runner-random-activity-priority = {{ dumps(test_runner_random_activity_priority) }}
config-wcfs_enable = {{ dumps(wcfs_enable) }}
config-test-runner-configuration = {{ dumps(slapparameter_dict.get('test-runner', {})) }}
software-type = zope
......
......@@ -13,6 +13,7 @@
{% set test_runner_address_list = [] -%}
{% set test_runner_enabled = slapparameter_dict['test-runner-enabled'] -%}
{% set test_runner_node_count = slapparameter_dict['test-runner-node-count'] -%}
{% set test_runner_random_activity_priority = slapparameter_dict['test-runner-random-activity-priority'] -%}
{% set longrequest_logger_base_path = buildout_directory ~ '/var/log/longrequest_logger_' -%}
{% macro section(name) %}{% do part_list.append(name) %}{{ name }}{% endmacro -%}
{% set bin_directory = parameter_dict['buildout-bin-directory'] -%}
......@@ -503,6 +504,9 @@ command-line-extra =
--extra_sql_connection_string_list '{{ ','.join(connection_string_list[1:]) }}'
--zserver {{ test_runner_address_list[0][0] ~ ':' ~ test_runner_address_list[0][1] }}
--zserver_frontend_url {{ slapparameter_dict['test-runner-apache-url-list'][0] }}
{% if test_runner_random_activity_priority is not none %}
--random_activity_priority={{ test_runner_random_activity_priority }}
{%- endif %}
[{{ section('runTestSuite') }}]
< = run-test-common
......
......@@ -196,8 +196,8 @@ slapos.extension.strip = 0.4
slapos.extension.shared = 1.0
slapos.libnetworkcache = 0.20
slapos.rebootstrap = 4.5
slapos.recipe.build = 0.49
slapos.recipe.cmmi = 0.17
slapos.recipe.build = 0.50
slapos.recipe.cmmi = 0.18
slapos.recipe.template = 4.6
slapos.toolbox = 0.126
stevedore = 1.21.0:whl
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment