Commit 60f3d46a authored by Łukasz Nowak's avatar Łukasz Nowak

Update Release Candidate

parents a68acc77 12b12447
Pipeline #38161 passed with stage
in 0 seconds
...@@ -105,17 +105,18 @@ egg = ${:_buildout_section_name_} ...@@ -105,17 +105,18 @@ egg = ${:_buildout_section_name_}
setup-eggs = ${python-cffi:egg} setup-eggs = ${python-cffi:egg}
# eggs that are common to ZODB4, ZODB5 and ZODB6. # python3 versions for ZODB6.
[versions] [versions]
BTrees = 5.1.0 BTrees = 6.1.0
persistent = 5.1.0 persistent = 5.1.0
zodbpickle = 3.3.0 zodbpickle = 4.1.1
# Provide ZODB3 for those eggs that still care about ZODB3 compatibility - # Provide ZODB3 for those eggs that still care about ZODB3 compatibility -
# for example wendelin.core. ZODB3 3.11 is just a dependency egg on _latest_ # for example wendelin.core. ZODB3 3.11 is just a dependency egg on _latest_
# ZODB, persistent, BTrees and ZEO. # ZODB, persistent, BTrees and ZEO.
ZODB3 = 3.11.0 ZODB3 = 3.11.0
# eggs that are common to ZODB4 and ZODB5.
[versions:python2] [versions:python2]
BTrees = 4.11.3 BTrees = 4.11.3
persistent = 4.9.3 persistent = 4.9.3
......
...@@ -100,7 +100,7 @@ configure-options = ...@@ -100,7 +100,7 @@ configure-options =
# It will create a pear/temp directory under the SR instead of a shared /tmp/pear/temp. # It will create a pear/temp directory under the SR instead of a shared /tmp/pear/temp.
# XXX we could mkdir tmp there # XXX we could mkdir tmp there
environment = environment =
PKG_CONFIG_PATH=${libxml2:location}/lib/pkgconfig:${openssl:location}/lib/pkgconfig:${libzip:location}/lib/pkgconfig:${sqlite3:location}/lib/pkgconfig:${curl:location}/lib/pkgconfig:${icu:location}/lib/pkgconfig:${oniguruma:location}/lib/pkgconfig:${argon2:location}/lib/pkgconfig:${zlib:location}/lib/pkgconfig:${mariadb:location}/lib/pkgconfig:${libjpeg:location}/lib/pkgconfig:${libpng:location}/lib/pkgconfig:${freetype:location}/lib/pkgconfig:${libiconv:location}/lib/pkgconfig:${libzip:location}/lib/pkgconfig:${libsodium:location}/lib/pkgconfig PKG_CONFIG_PATH=${libxml2:location}/lib/pkgconfig:${openssl:location}/lib/pkgconfig:${libzip:location}/lib/pkgconfig:${sqlite3:location}/lib/pkgconfig:${curl:location}/lib/pkgconfig:${icu:location}/lib/pkgconfig:${oniguruma:location}/lib/pkgconfig:${argon2:location}/lib/pkgconfig:${zlib:location}/lib/pkgconfig:${mariadb:location}/lib/pkgconfig:${libjpeg:location}/lib/pkgconfig:${libpng:location}/lib/pkgconfig:${freetype:location}/lib/pkgconfig:${libiconv:location}/lib/pkgconfig:${libzip:location}/lib/pkgconfig:${libsodium:location}/lib/pkgconfig:${curl:pkgconfig}
PATH=${pkgconfig:location}/bin:${bzip2:location}/bin:${libxml2:location}/bin:${xz-utils:location}/bin:%(PATH)s PATH=${pkgconfig:location}/bin:${bzip2:location}/bin:${libxml2:location}/bin:${xz-utils:location}/bin:%(PATH)s
CPPFLAGS=-I${libzip:location}/include CPPFLAGS=-I${libzip:location}/include
LDFLAGS=-L${bzip2:location}/lib -Wl,-rpath -Wl,${bzip2:location}/lib -Wl,-rpath -Wl,${curl:location}/lib -L${libtool:location}/lib -Wl,-rpath -Wl,${libtool:location}/lib -L${mariadb:location}/lib -Wl,-rpath -Wl,${mariadb:location}/lib -L${zlib:location}/lib -Wl,-rpath -Wl,${zlib:location}/lib -L${libzip:location}/lib -Wl,-rpath -Wl,${libzip:location}/lib -L${argon2:location}/lib/x86_64-linux-gnu -Wl,-rpath -Wl,${argon2:location}/lib/x86_64-linux-gnu -Wl,-rpath -Wl,${zstd:location}/lib -L${libnsl:location}/lib -Wl,-rpath -Wl,${libnsl:location}/lib -L${sqlite3:location}/lib -Wl,-rpath -Wl,${sqlite3:location}/lib LDFLAGS=-L${bzip2:location}/lib -Wl,-rpath -Wl,${bzip2:location}/lib -Wl,-rpath -Wl,${curl:location}/lib -L${libtool:location}/lib -Wl,-rpath -Wl,${libtool:location}/lib -L${mariadb:location}/lib -Wl,-rpath -Wl,${mariadb:location}/lib -L${zlib:location}/lib -Wl,-rpath -Wl,${zlib:location}/lib -L${libzip:location}/lib -Wl,-rpath -Wl,${libzip:location}/lib -L${argon2:location}/lib/x86_64-linux-gnu -Wl,-rpath -Wl,${argon2:location}/lib/x86_64-linux-gnu -Wl,-rpath -Wl,${zstd:location}/lib -L${libnsl:location}/lib -Wl,-rpath -Wl,${libnsl:location}/lib -L${sqlite3:location}/lib -Wl,-rpath -Wl,${sqlite3:location}/lib
......
...@@ -22,8 +22,8 @@ extends = ...@@ -22,8 +22,8 @@ extends =
[apr] [apr]
recipe = slapos.recipe.cmmi recipe = slapos.recipe.cmmi
shared = true shared = true
version = 1.7.4 version = 1.7.5
md5sum = f8a62f3984898ba0ea8b6f26b851cb99 md5sum = 8b156d4d0e804cb1f172312ffe087c25
url = https://archive.apache.org/dist/apr/apr-${:version}.tar.bz2 url = https://archive.apache.org/dist/apr/apr-${:version}.tar.bz2
environment = environment =
LDFLAGS=-Wl,-rpath=${libuuid:location}/lib LDFLAGS=-Wl,-rpath=${libuuid:location}/lib
......
diff -ur autoconf-2.71.orig/lib/freeze.mk autoconf-2.71/lib/freeze.mk diff -ur autoconf-2.72.orig/lib/freeze.mk autoconf-2.72/lib/freeze.mk
--- autoconf-2.71.orig/lib/freeze.mk 2021-01-28 21:46:48.000000000 +0100 --- autoconf-2.72.orig/lib/freeze.mk 2023-03-12 16:29:55.000000000 +0100
+++ autoconf-2.71/lib/freeze.mk 2021-10-25 09:21:38.519238189 +0200 +++ autoconf-2.72/lib/freeze.mk 2024-11-13 11:10:31.491703037 +0100
@@ -31,7 +31,7 @@ @@ -31,7 +31,7 @@
# apply to us. # apply to us.
MY_AUTOM4TE = \ MY_AUTOM4TE = \
autom4te_perllibdir='$(top_srcdir)'/lib \ autom4te_perllibdir='$(top_srcdir)'/lib \
- AUTOM4TE_CFG='$(AUTOM4TE_CFG)' $(top_build_prefix)bin/autom4te \ - AUTOM4TE_CFG='$(AUTOM4TE_CFG)' $(top_build_prefix)bin/autom4te \
+ AUTOM4TE_CFG='$(AUTOM4TE_CFG)' perl $(top_build_prefix)bin/autom4te \ + AUTOM4TE_CFG='$(AUTOM4TE_CFG)' perl $(top_build_prefix)bin/autom4te \
-B '$(top_build_prefix)'lib -B '$(top_srcdir)'/lib # keep ` ' -B '$(top_build_prefix)'lib -B '$(top_srcdir)'/lib # keep ' '
# When processing the file with diversion disabled, there must be no # When processing the file with diversion disabled, there must be no
diff -ur autoconf-2.71.orig/Makefile.in autoconf-2.71/Makefile.in diff -ur autoconf-2.72.orig/Makefile.in autoconf-2.72/Makefile.in
--- autoconf-2.71.orig/Makefile.in 2021-01-28 22:06:02.000000000 +0100 --- autoconf-2.72.orig/Makefile.in 2023-12-22 19:32:21.000000000 +0100
+++ autoconf-2.71/Makefile.in 2021-10-25 09:22:07.231239851 +0200 +++ autoconf-2.72/Makefile.in 2024-11-13 11:10:46.559530554 +0100
@@ -577,7 +577,7 @@ @@ -577,7 +577,7 @@
# apply to us. # apply to us.
MY_AUTOM4TE = \ MY_AUTOM4TE = \
autom4te_perllibdir='$(top_srcdir)'/lib \ autom4te_perllibdir='$(top_srcdir)'/lib \
- AUTOM4TE_CFG='$(AUTOM4TE_CFG)' $(top_build_prefix)bin/autom4te \ - AUTOM4TE_CFG='$(AUTOM4TE_CFG)' $(top_build_prefix)bin/autom4te \
+ AUTOM4TE_CFG='$(AUTOM4TE_CFG)' perl $(top_build_prefix)bin/autom4te \ + AUTOM4TE_CFG='$(AUTOM4TE_CFG)' perl $(top_build_prefix)bin/autom4te \
-B '$(top_build_prefix)'lib -B '$(top_srcdir)'/lib # keep ` ' -B '$(top_build_prefix)'lib -B '$(top_srcdir)'/lib # keep ' '
...@@ -4,18 +4,19 @@ extends = ...@@ -4,18 +4,19 @@ extends =
../patch/buildout.cfg ../patch/buildout.cfg
../perl/buildout.cfg ../perl/buildout.cfg
../gnu-config/buildout.cfg ../gnu-config/buildout.cfg
../xz-utils/buildout.cfg
parts = parts =
autoconf autoconf
[autoconf] [autoconf]
recipe = slapos.recipe.cmmi recipe = slapos.recipe.cmmi
shared = true shared = true
url = http://ftp.gnu.org/gnu/autoconf/autoconf-2.71.tar.gz url = https://ftp.gnu.org/gnu/autoconf/autoconf-2.72.tar.xz
md5sum = f64e38d671fdec06077a41eb4d5ee476 md5sum = 1be79f7106ab6767f18391c5e22be701
pre-configure = cp -f ${gnu-config:location}/config.sub ${gnu-config:location}/config.guess build-aux/ pre-configure = cp -f ${gnu-config:location}/config.sub ${gnu-config:location}/config.guess build-aux/
patch-options = -p1 patch-options = -p1
patches = patches =
${:_profile_base_location_}/autoconf-2.71-shebang_workaround.patch#9b4e417d661101f737d588eb1401747d ${:_profile_base_location_}/autoconf-shebang_workaround.patch#be56764cd102d668f3290d8ebe129226
environment = environment =
M4=${m4:location}/bin/m4 M4=${m4:location}/bin/m4
PATH=${patch:location}/bin:${perl:location}/bin:%(PATH)s PATH=${xz-utils:location}/bin:${patch:location}/bin:${perl:location}/bin:%(PATH)s
...@@ -10,8 +10,8 @@ parts = ...@@ -10,8 +10,8 @@ parts =
[automake] [automake]
recipe = slapos.recipe.cmmi recipe = slapos.recipe.cmmi
shared = true shared = true
md5sum = 4017e96f89fca45ca946f1c5db6be714 md5sum = f908133b080073f3907389f0f73d76f4
url = https://ftp.gnu.org/gnu/automake/automake-1.16.5.tar.xz url = https://ftp.gnu.org/gnu/automake/automake-1.17.tar.gz
patch-options = -p1 patch-options = -p1
patches = patches =
${:_profile_base_location_}/automake-1.16-shebang_workaround.patch#203f9199b0e629de3630b5959f8cf73e ${:_profile_base_location_}/automake-1.16-shebang_workaround.patch#203f9199b0e629de3630b5959f8cf73e
......
...@@ -2,31 +2,14 @@ ...@@ -2,31 +2,14 @@
extends = extends =
../gettext/buildout.cfg ../gettext/buildout.cfg
../ncurses/buildout.cfg ../ncurses/buildout.cfg
../patch/buildout.cfg
parts = parts =
bash bash
[bash] [bash]
recipe = slapos.recipe.cmmi recipe = slapos.recipe.cmmi
shared = true shared = true
url-prefix = https://ftp.gnu.org/pub/gnu/bash/bash-5.1 url = https://ftp.gnu.org/pub/gnu/bash/bash-5.2.37.tar.gz
url = ${:url-prefix}.tar.gz md5sum = 9c28f21ff65de72ca329c1779684a972
md5sum = bb91a17fd6c9032c26d0b2b78b50aff5
patch-binary = ${patch:location}/bin/patch
patch-prefix = ${:url-prefix}-patches/bash51
patches =
${:patch-prefix}-001#57641ddbf92fca25df92a443e36f285a
${:patch-prefix}-002#aed44842ed1a05fcfc3ef146991fdaef
${:patch-prefix}-003#bf96455600a86420d69f5166575192dd
${:patch-prefix}-004#d2c524dba0eea5dc5f00849cc84376a0
${:patch-prefix}-005#5081278e6c35154e28d09f582251c529
${:patch-prefix}-006#f4a8bcda4b7bd2c72b29c107027608a3
${:patch-prefix}-007#bf7816d63ee0476054bf18a488d8bb1b
${:patch-prefix}-008#7e5a30d864f834953b22a55c01c8690b
${:patch-prefix}-009#8e35f11cbfcefe2c07c64d00601fd713
${:patch-prefix}-010#d78ad19986c0355a8d67c9a0e82ad4aa
${:patch-prefix}-011#2416386b5ee94e499ccbf71f6fd4aebd
${:patch-prefix}-012#879b2d8a03162faebb7234c4cd57c5cd
environment = environment =
CPPFLAGS=-I${ncurses:location}/include CPPFLAGS=-I${ncurses:location}/include
LDFLAGS=-L${ncurses:location}/lib -Wl,-rpath=${ncurses:location}/lib LDFLAGS=-L${ncurses:location}/lib -Wl,-rpath=${ncurses:location}/lib
......
...@@ -12,8 +12,8 @@ parts = ...@@ -12,8 +12,8 @@ parts =
[ca-certificates] [ca-certificates]
recipe = slapos.recipe.cmmi recipe = slapos.recipe.cmmi
shared = true shared = true
url = https://deb.debian.org/debian/pool/main/c/ca-certificates/ca-certificates_20230311.tar.xz url = https://deb.debian.org/debian/pool/main/c/ca-certificates/ca-certificates_20240203.tar.xz
md5sum = fc1c3ec0067385f0be8ac7f6e670a0f8 md5sum = 228129ccf8cd99b991d771c44dd4052c
patch-binary = ${patch:location}/bin/patch patch-binary = ${patch:location}/bin/patch
patches = patches =
${:_profile_base_location_}/ca-certificates-any-python.patch#56ecfeb8f23ae00726191a611d08894e ${:_profile_base_location_}/ca-certificates-any-python.patch#56ecfeb8f23ae00726191a611d08894e
......
[buildout] [buildout]
extends = extends =
../gmp/buildout.cfg ../gmp/buildout.cfg
../patch/buildout.cfg
../perl/buildout.cfg ../perl/buildout.cfg
../xz-utils/buildout.cfg ../xz-utils/buildout.cfg
parts = parts =
...@@ -10,19 +9,16 @@ parts = ...@@ -10,19 +9,16 @@ parts =
[coreutils] [coreutils]
recipe = slapos.recipe.cmmi recipe = slapos.recipe.cmmi
shared = true shared = true
url = https://ftp.gnu.org/gnu/coreutils/coreutils-9.4.tar.xz url = https://ftp.gnu.org/gnu/coreutils/coreutils-9.5.tar.xz
md5sum = 459e9546074db2834eefe5421f250025 md5sum = e99adfa059a63db3503cc71f3d151e31
configure-options = configure-options =
--disable-libcap --disable-libcap
--without-selinux --without-selinux
--prefix=@@LOCATION@@ --prefix=@@LOCATION@@
--with-openssl=no --with-openssl=no
environment = environment =
PATH=${patch:location}/bin:${perl:location}/bin:${xz-utils:location}/bin:%(PATH)s PATH=${perl:location}/bin:${xz-utils:location}/bin:%(PATH)s
LDFLAGS=-Wl,--as-needed -L${gmp:location}/lib -Wl,-rpath=${gmp:location}/lib LDFLAGS=-Wl,--as-needed -L${gmp:location}/lib -Wl,-rpath=${gmp:location}/lib
patches =
https://github.com/coreutils/coreutils/commit/c4c5ed8f4e9cd55a12966d4f520e3a13101637d9.patch?full_index=1#5fc691542117b167b456daf222d2a6e5
patch-options = -p1
# disable year 2038 problem ONLY for 32 bit architectures # disable year 2038 problem ONLY for 32 bit architectures
[coreutils:bits32] [coreutils:bits32]
......
...@@ -20,8 +20,8 @@ parts = ...@@ -20,8 +20,8 @@ parts =
[curl] [curl]
recipe = slapos.recipe.cmmi recipe = slapos.recipe.cmmi
shared = true shared = true
url = https://curl.se/download/curl-8.6.0.tar.xz url = https://curl.se/download/curl-8.11.0.tar.xz
md5sum = 8f28f7e08c91cc679a45fccf66184fbc md5sum = 49dd886ac84ed3de693464f78f1ee926
configure-options = configure-options =
--disable-static --disable-static
--disable-ech --disable-ech
...@@ -72,9 +72,10 @@ OPENSSL = ${openssl:location} ...@@ -72,9 +72,10 @@ OPENSSL = ${openssl:location}
PKG_CONFIG_PATH = PKG_CONFIG_PATH =
WITH = WITH =
pkgconfig = ${:OPENSSL}/lib/pkgconfig:${nghttp2:location}/lib/pkgconfig:${libidn2:location}/lib/pkgconfig:${zstd:location}/lib/pkgconfig${:PKG_CONFIG_PATH}
environment = environment =
PATH=${perl:location}/bin:${pkgconfig:location}/bin:${xz-utils:location}/bin:%(PATH)s PATH=${perl:location}/bin:${pkgconfig:location}/bin:${xz-utils:location}/bin:%(PATH)s
PKG_CONFIG_PATH=${:OPENSSL}/lib/pkgconfig:${nghttp2:location}/lib/pkgconfig:${libidn2:location}/lib/pkgconfig${:PKG_CONFIG_PATH} PKG_CONFIG_PATH=${:pkgconfig}
LDFLAGS=-Wl,-rpath=${libidn2:location}/lib -Wl,-rpath=${zlib:location}/lib -Wl,-rpath=${:OPENSSL}/lib -Wl,-rpath=${nghttp2:location}/lib -Wl,-rpath=${zstd:location}/lib ${:LDFLAGS} LDFLAGS=-Wl,-rpath=${libidn2:location}/lib -Wl,-rpath=${zlib:location}/lib -Wl,-rpath=${:OPENSSL}/lib -Wl,-rpath=${nghttp2:location}/lib -Wl,-rpath=${zstd:location}/lib ${:LDFLAGS}
[curl-http3] [curl-http3]
......
From 27d88c40e251b370f4dd2fcc7ae03c2967c68e4c Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?J=C3=A9rome=20Perrin?= <jerome@nexedi.com>
Date: Mon, 2 Sep 2024 04:41:13 +0000
Subject: [PATCH] checkPermission: align behavior with objects raising in
__getattr__
The observed problem was a behavior different between C and python
implementation on python 3, happening with Zope python script. When the
context can not be accessed by the current user, Zope binds a
`Shared.DC.Scripts.Bindings.UnauthorizedBinding`, a class that raises an
Unauthorized error when the context is actually accessed, in order to
postpone the Unauthorized if something is actually accessed. This class
does implements this by raising Unauthorized in __getattr__.
The python implementation of `checkPermission` uses `hasattr` and
`hasattr` has changed between python2 and python3, on python2 it was
ignoring all exceptions, including potential Unauthorized errors and
just returning False, but on python3 these errors are raised.
This change of behavior of python causes checkPermission to behave
differently: when using python implementation on python2 or when using
C implementation, such Unauthorized errors were gracefully handled and
caused checkPermission to return False, but on python3 checkPermission
raises.
This change make this scenario behave the same between python2, python3
and C implementation: Unauthorized errors raised in __getattr__ are
supported. The code is also micro-simplified by doing only one getattr
instead of hasattr and then getattr.
---
src/AccessControl/ImplPython.py | 6 +++++-
src/AccessControl/cAccessControl.c | 7 +++++--
src/AccessControl/tests/testZopeSecurityPolicy.py | 15 +++++++++++++++
4 files changed, 28 insertions(+), 3 deletions(-)
diff --git a/src/AccessControl/ImplPython.py b/src/AccessControl/ImplPython.py
index 1a7788b..0a9326b 100644
--- a/src/AccessControl/ImplPython.py
+++ b/src/AccessControl/ImplPython.py
@@ -31,6 +31,7 @@
from Acquisition import aq_parent
from ExtensionClass import Base
from zope.interface import implementer
+from zExceptions import Unauthorized as zExceptions_Unauthorized
PURE_PYTHON = int(os.environ.get('PURE_PYTHON', '0'))
if PURE_PYTHON:
@@ -71,8 +72,11 @@ def rolesForPermissionOn(perm, object, default=_default_roles, n=None):
r = None
while True:
- if hasattr(object, n):
+ try:
roles = getattr(object, n)
+ except (AttributeError, zExceptions_Unauthorized):
+ pass
+ else:
if roles is None:
if _embed_permission_in_roles:
return (('Anonymous',), n)
diff --git a/src/AccessControl/cAccessControl.c b/src/AccessControl/cAccessControl.c
index 403ed67..1a109fa 100644
--- a/src/AccessControl/cAccessControl.c
+++ b/src/AccessControl/cAccessControl.c
@@ -1847,13 +1847,16 @@ c_rolesForPermissionOn(PyObject *perm, PyObject *object,
Py_INCREF(r);
/*
- while 1:
+ while True:
*/
while (1)
{
/*
- if hasattr(object, n):
+ try:
roles = getattr(object, n)
+ except (AttributeError, zExceptions_Unauthorized):
+ pass
+ else:
*/
PyObject *roles = PyObject_GetAttr(object, n);
if (roles != NULL)
diff --git a/src/AccessControl/tests/testZopeSecurityPolicy.py b/src/AccessControl/tests/testZopeSecurityPolicy.py
index 9b12a0f..ee74bad 100644
--- a/src/AccessControl/tests/testZopeSecurityPolicy.py
+++ b/src/AccessControl/tests/testZopeSecurityPolicy.py
@@ -157,6 +157,15 @@ class PartlyProtectedSimpleItem3 (PartlyProtectedSimpleItem1):
__roles__ = sysadmin_roles
+class DynamicallyUnauthorized(SimpleItemish):
+ # This class raises an Unauthorized on attribute access,
+ # similar to Zope's Shared.DC.Scripts.Bindings.UnauthorizedBinding
+ __ac_local_roles__ = {}
+
+ def __getattr__(self, name):
+ raise Unauthorized('Not authorized to access: %s' % name)
+
+
class SimpleClass:
attr = 1
@@ -173,6 +182,7 @@ def setUp(self):
a.item1 = PartlyProtectedSimpleItem1()
a.item2 = PartlyProtectedSimpleItem2()
a.item3 = PartlyProtectedSimpleItem3()
+ a.d_item = DynamicallyUnauthorized()
uf = UserFolder()
a.acl_users = uf
self.uf = a.acl_users
@@ -351,6 +361,11 @@ def test_checkPermission_proxy_role_scope(self):
r_subitem,
context))
+ def test_checkPermission_dynamically_unauthorized(self):
+ d_item = self.a.d_item
+ context = self.context
+ self.assertFalse(self.policy.checkPermission('View', d_item, context))
+
def testUnicodeRolesForPermission(self):
r_item = self.a.r_item
context = self.context
From a037f2a2e2090dcd63b83af9b06427dd8c7e9536 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?J=C3=A9rome=20Perrin?= <jerome@nexedi.com>
Date: Wed, 22 May 2024 23:58:45 +0900
Subject: [PATCH] Show Python Script source code in tracebacks
Expose a __loader__ in globals so that linecache module is able to use
it to display the source code.
This requires changing the "filename" used when compiling function,
because linecache uses code.co_filename as a cache key, so it's
necessary that each python script use a different filename.
WIP from https://github.com/zopefoundation/Products.PythonScripts/pull/65
---
CHANGES.rst | 2 +
src/Products/PythonScripts/PythonScript.py | 19 ++++++-
.../PythonScripts/tests/testPythonScript.py | 50 ++++++++++++++++++-
3 files changed, 67 insertions(+), 4 deletions(-)
diff --git a/src/Products/PythonScripts/PythonScript.py b/src/Products/PythonScripts/PythonScript.py
index fe4223a..5cb7f37 100644
--- a/src/Products/PythonScripts/PythonScript.py
+++ b/src/Products/PythonScripts/PythonScript.py
@@ -16,7 +16,9 @@
Python code.
"""
+import importlib.abc
import importlib.util
+import linecache
import marshal
import os
import re
@@ -56,7 +58,7 @@
Python_magic = importlib.util.MAGIC_NUMBER
# This should only be incremented to force recompilation.
-Script_magic = 4
+Script_magic = 5
_log_complaint = (
'Some of your Scripts have stale code cached. Since Zope cannot'
' use this code, startup will be slightly slower until these Scripts'
@@ -97,6 +99,16 @@ def manage_addPythonScript(self, id, title='', file=None, REQUEST=None,
return ''
+class PythonScriptLoader(importlib.abc.Loader):
+ """PEP302 loader to display source code in tracebacks
+ """
+ def __init__(self, source):
+ self._source = source
+
+ def get_source(self, name):
+ return self._source
+
+
class PythonScript(Script, Historical, Cacheable):
"""Web-callable scripts written in a safe subset of Python.
@@ -234,7 +246,7 @@ def _compile(self):
self._params,
body=self._body or 'pass',
name=self.id,
- filename=self.meta_type,
+ filename=getattr(self, '_filepath', None) or self.get_filepath(),
globalize=bind_names)
code = compile_result.code
@@ -261,6 +273,7 @@ def _compile(self):
fc.co_argcount)
self.Python_magic = Python_magic
self.Script_magic = Script_magic
+ linecache.clearcache()
self._v_change = 0
def _newfun(self, code):
@@ -331,6 +344,8 @@ def _exec(self, bound_names, args, kw):
PythonScriptTracebackSupplement, self, -1)
safe_globals['__file__'] = getattr(
self, '_filepath', None) or self.get_filepath()
+ safe_globals['__loader__'] = PythonScriptLoader(self._body)
+
function = types.FunctionType(
function_code, safe_globals, None, function_argument_definitions)
diff --git a/src/Products/PythonScripts/tests/testPythonScript.py b/src/Products/PythonScripts/tests/testPythonScript.py
index 60ef6c3..7cd2266 100644
--- a/src/Products/PythonScripts/tests/testPythonScript.py
+++ b/src/Products/PythonScripts/tests/testPythonScript.py
@@ -15,6 +15,7 @@
import io
import os
import sys
+import traceback
import unittest
import warnings
from urllib.error import HTTPError
@@ -241,7 +242,8 @@ def test_manage_DAVget(self):
self.assertEqual(ps.read(), ps.manage_DAVget())
def test_PUT_native_string(self):
- ps = makerequest(self._filePS('complete'))
+ container = DummyFolder('container')
+ ps = makerequest(self._filePS('complete').__of__(container))
self.assertEqual(ps.title, 'This is a title')
self.assertEqual(ps.body(), 'print(foo+bar+baz)\nreturn printed\n')
self.assertEqual(ps.params(), 'foo, bar, baz=1')
@@ -265,7 +267,8 @@ def test_PUT_native_string(self):
self.assertEqual(ps.params(), 'oops')
def test_PUT_bytes(self):
- ps = makerequest(self._filePS('complete'))
+ container = DummyFolder('container')
+ ps = makerequest(self._filePS('complete').__of__(container))
self.assertEqual(ps.title, 'This is a title')
self.assertEqual(ps.body(), 'print(foo+bar+baz)\nreturn printed\n')
self.assertEqual(ps.params(), 'foo, bar, baz=1')
@@ -588,3 +591,46 @@ def test_PythonScript_proxyroles_nonmanager(self):
# Cleanup
noSecurityManager()
+
+
+class TestTraceback(FunctionalTestCase, PythonScriptTestBase):
+
+ def _format_exception(self):
+ return "".join(traceback.format_exception(*sys.exc_info()))
+
+ def test_source_code_in_traceback(self):
+ ps = self._newPS("1 / 0")
+ try:
+ ps()
+ except ZeroDivisionError:
+ formatted_exception = self._format_exception()
+ self.assertIn("1 / 0", formatted_exception)
+
+ ps.write("2 / 0")
+ try:
+ ps()
+ except ZeroDivisionError:
+ formatted_exception = self._format_exception()
+ self.assertIn("2 / 0", formatted_exception)
+
+ def test_multiple_scripts_in_traceback(self):
+ from Products.PythonScripts.PythonScript import manage_addPythonScript
+
+ script1_body = "container.script2()"
+ manage_addPythonScript(
+ self.folder,
+ "script1",
+ file=script1_body,
+ )
+ script2_body = "1 / 0"
+ manage_addPythonScript(
+ self.folder,
+ "script2",
+ file=script2_body,
+ )
+ try:
+ self.folder.script1()
+ except ZeroDivisionError:
+ formatted_exception = self._format_exception()
+ self.assertIn(script1_body, formatted_exception)
+ self.assertIn(script2_body, formatted_exception)
...@@ -8,8 +8,8 @@ parts = ...@@ -8,8 +8,8 @@ parts =
[findutils] [findutils]
recipe = slapos.recipe.cmmi recipe = slapos.recipe.cmmi
shared = true shared = true
url = http://ftp.debian.org/debian/pool/main/f/findutils/findutils_4.9.0.orig.tar.xz url = http://ftp.debian.org/debian/pool/main/f/findutils/findutils_4.10.0.orig.tar.xz
md5sum = 4a4a547e888a944b2f3af31d789a1137 md5sum = 870cfd71c07d37ebe56f9f4aaf4ad872
environment = environment =
PATH=${xz-utils:location}/bin:%(PATH)s PATH=${xz-utils:location}/bin:%(PATH)s
......
...@@ -44,7 +44,7 @@ environment = ...@@ -44,7 +44,7 @@ environment =
PATH=${pkgconfig:location}/bin:${xz-utils:location}/bin:%(PATH)s PATH=${pkgconfig:location}/bin:${xz-utils:location}/bin:%(PATH)s
CPPFLAGS=-I${pcre:location}/include CPPFLAGS=-I${pcre:location}/include
LDFLAGS=-L${pcre:location}/lib -Wl,-rpath=${:location}/lib -Wl,-rpath=${proj:location}/lib -Wl,-rpath=${curl:location}/lib -Wl,-rpath=${geos:location}/lib -Wl,-rpath=${giflib:location}/lib -Wl,-rpath=${openjpeg:location}/lib -Wl,-rpath=${jbigkit:location}/lib -Wl,-rpath=${libexpat:location}/lib -Wl,-rpath=${libjpeg:location}/lib -Wl,-rpath=${libpng:location}/lib -Wl,-rpath=${libtiff:location}/lib -Wl,-rpath=${libxml2:location}/lib -Wl,-rpath=${openssl:location}/lib -Wl,-rpath=${pcre:location}/lib -Wl,-rpath=${sqlite3:location}/lib -Wl,-rpath=${zlib:location}/lib LDFLAGS=-L${pcre:location}/lib -Wl,-rpath=${:location}/lib -Wl,-rpath=${proj:location}/lib -Wl,-rpath=${curl:location}/lib -Wl,-rpath=${geos:location}/lib -Wl,-rpath=${giflib:location}/lib -Wl,-rpath=${openjpeg:location}/lib -Wl,-rpath=${jbigkit:location}/lib -Wl,-rpath=${libexpat:location}/lib -Wl,-rpath=${libjpeg:location}/lib -Wl,-rpath=${libpng:location}/lib -Wl,-rpath=${libtiff:location}/lib -Wl,-rpath=${libxml2:location}/lib -Wl,-rpath=${openssl:location}/lib -Wl,-rpath=${pcre:location}/lib -Wl,-rpath=${sqlite3:location}/lib -Wl,-rpath=${zlib:location}/lib
PKG_CONFIG_PATH=${libxml2:location}/lib/pkgconfig PKG_CONFIG_PATH=${libxml2:location}/lib/pkgconfig:${curl:pkgconfig}
[gdal-python] [gdal-python]
recipe = zc.recipe.egg:custom recipe = zc.recipe.egg:custom
......
...@@ -7,9 +7,9 @@ parts = ...@@ -7,9 +7,9 @@ parts =
[gdbm] [gdbm]
recipe = slapos.recipe.cmmi recipe = slapos.recipe.cmmi
shared = true shared = true
version = 1.23 version = 1.24
url = http://ftp.gnu.org/gnu/gdbm/gdbm-${:version}.tar.gz url = http://ftp.gnu.org/gnu/gdbm/gdbm-${:version}.tar.gz
md5sum = 8551961e36bf8c70b7500d255d3658ec md5sum = c780815649e52317be48331c1773e987
pre-configure = cp -f ${gnu-config:location}/config.sub ${gnu-config:location}/config.guess build-aux/ pre-configure = cp -f ${gnu-config:location}/config.sub ${gnu-config:location}/config.guess build-aux/
configure-options = configure-options =
--disable-static --disable-static
......
...@@ -11,8 +11,8 @@ extends = ...@@ -11,8 +11,8 @@ extends =
[gettext] [gettext]
recipe = slapos.recipe.cmmi recipe = slapos.recipe.cmmi
shared = true shared = true
url = https://ftp.gnu.org/pub/gnu/gettext/gettext-0.22.3.tar.lz url = https://ftp.gnu.org/pub/gnu/gettext/gettext-0.22.5.tar.lz
md5sum = 9f4f1b1432fec4aab29fea004347c9b4 md5sum = d82550b0c72b2bf175b682d27c7565fc
configure-options = configure-options =
--disable-static --disable-static
......
...@@ -13,8 +13,8 @@ parts = haproxy ...@@ -13,8 +13,8 @@ parts = haproxy
[haproxy] [haproxy]
recipe = slapos.recipe.cmmi recipe = slapos.recipe.cmmi
shared = true shared = true
url = https://www.haproxy.org/download/2.6/src/haproxy-2.6.18.tar.gz url = https://www.haproxy.org/download/2.6/src/haproxy-2.6.20.tar.gz
md5sum = 9cb80d59919ebf108d58ecf4618f9acf md5sum = b25c95f231c0c36eeb2957321849d87d
configure-command = true configure-command = true
# for Linux kernel 2.6.28 and above, we use "linux-glibc" as the TARGET, # for Linux kernel 2.6.28 and above, we use "linux-glibc" as the TARGET,
# otherwise use "generic". # otherwise use "generic".
......
...@@ -8,8 +8,8 @@ parts = ...@@ -8,8 +8,8 @@ parts =
[libedit] [libedit]
recipe = slapos.recipe.cmmi recipe = slapos.recipe.cmmi
shared = true shared = true
url = https://thrysoee.dk/editline/libedit-20230828-3.1.tar.gz url = https://thrysoee.dk/editline/libedit-20240808-3.1.tar.gz
md5sum = 16bb2ab0d33bce3467f5cd4ec7d8f3ee md5sum = 42f9434731d9097993b87e073e798ddd
environment = environment =
CPPFLAGS=-I${ncurses:location}/include CPPFLAGS=-I${ncurses:location}/include
LDFLAGS=-L${ncurses:location}/lib -Wl,-rpath=${ncurses:location}/lib LDFLAGS=-L${ncurses:location}/lib -Wl,-rpath=${ncurses:location}/lib
......
...@@ -7,8 +7,8 @@ parts = ...@@ -7,8 +7,8 @@ parts =
[libexpat] [libexpat]
recipe = slapos.recipe.cmmi recipe = slapos.recipe.cmmi
shared = true shared = true
url = https://github.com/libexpat/libexpat/releases/download/R_2_6_3/expat-2.6.3.tar.lz url = https://github.com/libexpat/libexpat/releases/download/R_2_6_4/expat-2.6.4.tar.lz
md5sum = 5732b5335a3c75a052b3a37e99404b99 md5sum = b39fd697eedc931fa8dd5a2cce866234
configure-options = configure-options =
--disable-static --disable-static
--without-xmlwf --without-xmlwf
......
...@@ -8,9 +8,9 @@ parts = ...@@ -8,9 +8,9 @@ parts =
[libffi] [libffi]
recipe = slapos.recipe.cmmi recipe = slapos.recipe.cmmi
shared = true shared = true
version = 3.3 version = 3.4.3
url = http://sourceware.org/pub/libffi/libffi-${:version}.tar.gz url = http://sourceware.org/pub/libffi/libffi-${:version}.tar.gz
md5sum = 6313289e32f1d38a9df4770b014a2ca7 md5sum = b57b0ac1d1072681cee9148a417bd2ec
location = @@LOCATION@@ location = @@LOCATION@@
patch-options = -p1 patch-options = -p1
patches = patches =
......
...@@ -4,11 +4,11 @@ ...@@ -4,11 +4,11 @@
[libidn] [libidn]
recipe = slapos.recipe.cmmi recipe = slapos.recipe.cmmi
shared = true shared = true
url = https://ftp.gnu.org/gnu/libidn/libidn-1.41.tar.gz url = https://ftp.gnu.org/gnu/libidn/libidn-1.42.tar.gz
md5sum = 2cbff2f75f904328ac507af576b07197 md5sum = fe061a95ae23979150a692d102dce4ad
[libidn2] [libidn2]
recipe = slapos.recipe.cmmi recipe = slapos.recipe.cmmi
shared = true shared = true
url = https://ftp.gnu.org/gnu/libidn/libidn2-2.3.4.tar.gz url = https://ftp.gnu.org/gnu/libidn/libidn2-2.3.7.tar.gz
md5sum = a12109804fc9c5d7fb31f068c66655b8 md5sum = de2818c7dea718a4f264f463f595596b
...@@ -7,8 +7,8 @@ parts = libtool ...@@ -7,8 +7,8 @@ parts = libtool
[libtool] [libtool]
recipe = slapos.recipe.cmmi recipe = slapos.recipe.cmmi
shared = true shared = true
md5sum = 2fc0b6ddcd66a89ed6e45db28fa44232 md5sum = e42b7d9ab875f1d013bba3cdb8a59b58
url = https://ftp.gnu.org/gnu/libtool/libtool-2.4.7.tar.xz url = https://ftp.gnu.org/gnu/libtool/libtool-2.5.3.tar.xz
configure-options = configure-options =
--disable-static --disable-static
environment = environment =
......
...@@ -8,8 +8,8 @@ parts = ...@@ -8,8 +8,8 @@ parts =
libxslt libxslt
[libxslt] [libxslt]
url = https://download.gnome.org/sources/libxslt/1.1/libxslt-1.1.38.tar.xz url = https://download.gnome.org/sources/libxslt/1.1/libxslt-1.1.42.tar.xz
md5sum = 7d6e43db810177ddf9818ef394027019 md5sum = 56bc5d89aa39d62002961c150fec08a0
recipe = slapos.recipe.cmmi recipe = slapos.recipe.cmmi
shared = true shared = true
# --disable-static is temporarilly removed due to build error # --disable-static is temporarilly removed due to build error
......
...@@ -8,8 +8,8 @@ parts = logrotate ...@@ -8,8 +8,8 @@ parts = logrotate
[logrotate] [logrotate]
recipe = slapos.recipe.cmmi recipe = slapos.recipe.cmmi
shared = true shared = true
url = https://github.com/logrotate/logrotate/releases/download/3.21.0/logrotate-3.21.0.tar.xz url = https://github.com/logrotate/logrotate/releases/download/3.22.0/logrotate-3.22.0.tar.xz
md5sum = 6c15f45efc3475a576c4f7e6cc481b2c md5sum = 2386501a53ff086f44eeada2b27d50b8
# BBB this is only for backward-compatibility. # BBB this is only for backward-compatibility.
configure-options = configure-options =
--with-selinux=no --with-selinux=no
......
...@@ -8,8 +8,8 @@ parts = ...@@ -8,8 +8,8 @@ parts =
[lua] [lua]
recipe = slapos.recipe.cmmi recipe = slapos.recipe.cmmi
shared = true shared = true
url = http://www.lua.org/ftp/lua-5.4.6.tar.gz url = https://www.lua.org/ftp/lua-5.4.7.tar.gz
md5sum = 25a429319dff20dfbfb9956c2b5be911 md5sum = fc3f3291353bbe6ee6dec85ee61331e8
configure-command = true configure-command = true
make-options = make-options =
"$(uname -sr 2>/dev/null|grep -Eq '^Linux' && echo linux || echo posix)" "$(uname -sr 2>/dev/null|grep -Eq '^Linux' && echo linux || echo posix)"
...@@ -32,8 +32,8 @@ pc = ...@@ -32,8 +32,8 @@ pc =
[lua5.2] [lua5.2]
recipe = slapos.recipe.cmmi recipe = slapos.recipe.cmmi
shared = true shared = true
url = http://www.lua.org/ftp/lua-5.2.3.tar.gz url = https://www.lua.org/ftp/lua-5.2.4.tar.gz
md5sum = dc7f94ec6ff15c985d2d6ad0f1b35654 md5sum = 913fdb32207046b273fdb17aad70be13
configure-command = true configure-command = true
make-options = make-options =
"$(uname -sr 2>/dev/null|grep -Eq '^Linux' && echo linux || echo posix)" "$(uname -sr 2>/dev/null|grep -Eq '^Linux' && echo linux || echo posix)"
......
...@@ -7,8 +7,8 @@ parts = luajit ...@@ -7,8 +7,8 @@ parts = luajit
[luajit] [luajit]
recipe = slapos.recipe.cmmi recipe = slapos.recipe.cmmi
shared = true shared = true
url = https://luajit.org/download/LuaJIT-2.0.5.tar.gz url = https://github.com/LuaJIT/LuaJIT/archive/69bbf3c1b01de8239444b0c430a89fa868978fea.tar.gz
md5sum = 48353202cbcacab84ee41a5a70ea0a2c md5sum = a95ff00d4f327aa68905c35814310d82
configure-command = true configure-command = true
# pass dummy LDCONFIG to skip needless calling of ldconfig by non-root user # pass dummy LDCONFIG to skip needless calling of ldconfig by non-root user
make-options = make-options =
......
...@@ -5,5 +5,5 @@ parts = ...@@ -5,5 +5,5 @@ parts =
[lunzip] [lunzip]
recipe = slapos.recipe.cmmi recipe = slapos.recipe.cmmi
shared = true shared = true
url = http://download-mirror.savannah.gnu.org/releases/lzip/lunzip/lunzip-1.13.tar.gz url = http://download-mirror.savannah.gnu.org/releases/lzip/lunzip/lunzip-1.14.tar.gz
md5sum = 4bc15e65fef99db64e27f4cd369ae02e md5sum = e9ce0807c90d00cbf605c5b710adde91
...@@ -6,6 +6,6 @@ parts = ...@@ -6,6 +6,6 @@ parts =
[lz4] [lz4]
recipe = slapos.recipe.cmmi recipe = slapos.recipe.cmmi
shared = true shared = true
url = https://github.com/lz4/lz4/releases/download/v1.9.4/lz4-1.9.4.tar.gz url = https://github.com/lz4/lz4/releases/download/v1.10.0/lz4-1.10.0.tar.gz
md5sum = e9286adb64040071c5e23498bf753261 md5sum = dead9f5f1966d9ae56e1e32761e4e675
configure-command = true configure-command = true
...@@ -132,8 +132,8 @@ md5sum = c19f97dc1ea3165fb282a8384155dc0a ...@@ -132,8 +132,8 @@ md5sum = c19f97dc1ea3165fb282a8384155dc0a
# as plugin-dir ( https://mariadb.com/kb/en/server-system-variables/#plugin_dir ) # as plugin-dir ( https://mariadb.com/kb/en/server-system-variables/#plugin_dir )
recipe = slapos.recipe.cmmi recipe = slapos.recipe.cmmi
shared = true shared = true
url = https://github.com/mroonga/mroonga/releases/download/v14.07/mroonga-14.07.tar.gz url = https://github.com/mroonga/mroonga/releases/download/v14.08/mroonga-14.08.tar.gz
md5sum = 86d98564a9bff2b993db284ce02c7260 md5sum = 9cb84e401811a063f0f836f0f5d1f7b7
pre-configure = pre-configure =
rm -rf fake_mariadb_source rm -rf fake_mariadb_source
mkdir -p fake_mariadb_source mkdir -p fake_mariadb_source
...@@ -156,7 +156,6 @@ make-targets = plugindir=${:plugin-dir} install ...@@ -156,7 +156,6 @@ make-targets = plugindir=${:plugin-dir} install
patch-options = -p1 patch-options = -p1
patches = patches =
${:_profile_base_location_}/mroonga_boolean.patch#c818568fe35ca6a4298f18e575d962a0 ${:_profile_base_location_}/mroonga_boolean.patch#c818568fe35ca6a4298f18e575d962a0
https://github.com/mroonga/mroonga/commit/8f080086a6b7b15b84169e66fd6bf6956644ef98.patch?full_index=1#a275fa738a09f804515f61f69b8e549a
environment = environment =
PATH=${binutils:location}/bin:${groonga:location}/bin:${patch:location}/bin:${pkgconfig:location}/bin:%(PATH)s PATH=${binutils:location}/bin:${groonga:location}/bin:${patch:location}/bin:${pkgconfig:location}/bin:%(PATH)s
CPPFLAGS=-I${groonga:location}/include/groonga -I${pcre:location}/include CPPFLAGS=-I${groonga:location}/include/groonga -I${pcre:location}/include
......
...@@ -11,8 +11,8 @@ parts = ...@@ -11,8 +11,8 @@ parts =
[nghttp2] [nghttp2]
recipe = slapos.recipe.cmmi recipe = slapos.recipe.cmmi
shared = true shared = true
url = https://github.com/nghttp2/nghttp2/releases/download/v1.62.1/nghttp2-1.62.1.tar.bz2 url = https://github.com/nghttp2/nghttp2/releases/download/v1.64.0/nghttp2-1.64.0.tar.bz2
md5sum = cc2f311e5affee2e78005946e0875fc3 md5sum = 103421866471b6d5fc828189552d98a5
pre-configure = pre-configure =
autoreconf -fisv -I ${libtool:location}/share/aclocal -I ${pkgconfig:location}/share/aclocal autoreconf -fisv -I ${libtool:location}/share/aclocal -I ${pkgconfig:location}/share/aclocal
automake automake
......
...@@ -7,8 +7,8 @@ extends = ...@@ -7,8 +7,8 @@ extends =
[nghttp3] [nghttp3]
recipe = slapos.recipe.cmmi recipe = slapos.recipe.cmmi
shared = true shared = true
url = https://github.com/ngtcp2/nghttp3/archive/refs/tags/v0.15.0.tar.gz url = https://github.com/ngtcp2/nghttp3/releases/download/v1.6.0/nghttp3-1.6.0.tar.xz
md5sum = b9af99d8db0d48f91fc075dcbd837019 md5sum = 32800b32141ef9661bef6a4df7de726a
pre-configure = pre-configure =
autoreconf -fisv -I ${libtool:location}/share/aclocal -I ${pkgconfig:location}/share/aclocal autoreconf -fisv -I ${libtool:location}/share/aclocal -I ${pkgconfig:location}/share/aclocal
automake automake
......
...@@ -11,8 +11,8 @@ parts = nginx-output ...@@ -11,8 +11,8 @@ parts = nginx-output
[nginx-common] [nginx-common]
recipe = slapos.recipe.cmmi recipe = slapos.recipe.cmmi
shared = true shared = true
url = https://nginx.org/download/nginx-1.25.2.tar.gz url = https://nginx.org/download/nginx-1.27.2.tar.gz
md5sum = e0fc592d9721b7fccc2c959b45008ade md5sum = a0411bcbd1ff88bb2ea542af2ef57314
[nginx] [nginx]
<= nginx-common <= nginx-common
......
...@@ -8,8 +8,8 @@ extends = ...@@ -8,8 +8,8 @@ extends =
[ngtcp2] [ngtcp2]
recipe = slapos.recipe.cmmi recipe = slapos.recipe.cmmi
shared = true shared = true
url = https://github.com/ngtcp2/ngtcp2/archive/refs/tags/v0.19.1.tar.gz url = https://github.com/ngtcp2/ngtcp2/releases/download/v1.8.1/ngtcp2-1.8.1.tar.xz
md5sum = 52da88163ad1929496f7ed13943c03b4 md5sum = 5c027f1cbf915ebe30b796c312dedef8
pre-configure = pre-configure =
autoreconf -fisv -I ${libtool:location}/share/aclocal -I ${pkgconfig:location}/share/aclocal autoreconf -fisv -I ${libtool:location}/share/aclocal -I ${pkgconfig:location}/share/aclocal
automake automake
......
...@@ -51,8 +51,8 @@ md5sum = 08f458c00fff496a52ef931c481045cd ...@@ -51,8 +51,8 @@ md5sum = 08f458c00fff496a52ef931c481045cd
[openssl-quictls] [openssl-quictls]
<= openssl-3.0 <= openssl-3.0
url = https://github.com/quictls/openssl/archive/refs/tags/openssl-3.0.15-quic1.tar.gz url = https://github.com/quictls/openssl/archive/refs/tags/openssl-3.3.0-quic1.tar.gz
md5sum = 00fd544d5ae53bf841dc2f155c2f0524 md5sum = 6c473e0a07926a2634e03055870e8370
[openssl-1.1] [openssl-1.1]
<= openssl-common <= openssl-common
......
...@@ -8,7 +8,7 @@ extends = ...@@ -8,7 +8,7 @@ extends =
parts = parts =
pygolang pygolang
gpython gpython-interpreter
# pygolang is installed from git checkout # pygolang is installed from git checkout
...@@ -21,14 +21,6 @@ setup-eggs = ...@@ -21,14 +21,6 @@ setup-eggs =
setuptools-dso setuptools-dso
gevent gevent
# gpython program
[gpython]
recipe = zc.recipe.egg:scripts
eggs = ${pygolang:egg}
scripts = gpython
# convenience for gpython users
exe = ${buildout:bin-directory}/gpython
[pygolang-repository] [pygolang-repository]
recipe = slapos.recipe.build:gitclone recipe = slapos.recipe.build:gitclone
repository = https://lab.nexedi.com/nexedi/pygolang repository = https://lab.nexedi.com/nexedi/pygolang
...@@ -63,6 +55,16 @@ scripts = ${:interpreter} ...@@ -63,6 +55,16 @@ scripts = ${:interpreter}
exe = ${buildout:bin-directory}/${:interpreter} exe = ${buildout:bin-directory}/${:interpreter}
# gpython-interpreter is like python-interpreter, but runs gpython instead of standard python.
[gpython-interpreter]
<= python-interpreter
interpreter = gpython
initialization =
from gpython import main
main()
sys.exit(0)
# pyprog provides macro recipe to build python programs. # pyprog provides macro recipe to build python programs.
# #
# Contrary to zc.recipe.egg:scripts it generates scripts that are run with # Contrary to zc.recipe.egg:scripts it generates scripts that are run with
...@@ -77,6 +79,13 @@ exe = ${buildout:bin-directory}/${:interpreter} ...@@ -77,6 +79,13 @@ exe = ${buildout:bin-directory}/${:interpreter}
# exe = ${buildout:bin-directory}/myprog # exe = ${buildout:bin-directory}/myprog
# entry = my.py.mod:main # entry = my.py.mod:main
# eggs = ... # eggs = ...
#
# By default python interpreter defined in [python-interpreter] section is used
# to run the program. The default can be adjusted as illustrated below:
#
# [myprog]
# <= pyprog
# python-interpreter = gpython-interpreter # set to name of the section that defines the interpreter
[pyprog] [pyprog]
recipe = slapos.recipe.build recipe = slapos.recipe.build
initialization = initialization =
...@@ -88,6 +97,7 @@ init = ...@@ -88,6 +97,7 @@ init =
entry = options['entry'] entry = options['entry']
eggs = options['eggs'] eggs = options['eggs']
pyinit = options['initialization'] pyinit = options['initialization']
pyinterpreter = options.get('python-interpreter', 'python-interpreter')
options['depends'] += '$${.%s.pyprog:recipe}' % name options['depends'] += '$${.%s.pyprog:recipe}' % name
...@@ -97,6 +107,7 @@ init = ...@@ -97,6 +107,7 @@ init =
# indent pyinit with ' ' # indent pyinit with ' '
__pyinit = '\n'.join([' '+_ for _ in pyinit.splitlines()]) __pyinit = '\n'.join([' '+_ for _ in pyinit.splitlines()])
__eggs = '\n'.join([' '+_ for _ in eggs.splitlines()])
self.buildout.parse(""" self.buildout.parse("""
# .X.pyprog is python program to start and run entry # .X.pyprog is python program to start and run entry
...@@ -113,7 +124,7 @@ init = ...@@ -113,7 +124,7 @@ init =
# .X.pyexe is python interpreter used by .X.pyprog # .X.pyexe is python interpreter used by .X.pyprog
[.%(name)s.pyexe] [.%(name)s.pyexe]
<= python-interpreter <= %(pyinterpreter)s
eggs += %(eggs)s eggs += %(__eggs)s
interpreter = $${:_buildout_section_name_} interpreter = $${:_buildout_section_name_}
""" % locals()) """ % locals())
...@@ -2,7 +2,6 @@ ...@@ -2,7 +2,6 @@
extends = extends =
../gnu-config/buildout.cfg ../gnu-config/buildout.cfg
../ncurses/buildout.cfg ../ncurses/buildout.cfg
../patch/buildout.cfg
parts = parts =
readline readline
...@@ -10,14 +9,12 @@ parts = ...@@ -10,14 +9,12 @@ parts =
[readline] [readline]
recipe = slapos.recipe.cmmi recipe = slapos.recipe.cmmi
shared = true shared = true
url = http://ftp.gnu.org/gnu/readline/readline-8.1.tar.gz url = https://ftp.gnu.org/gnu/readline/readline-8.2.13.tar.gz
md5sum = e9557dd5b1409f5d7b37ef717c64518e md5sum = 05080bf3801e6874bb115cd6700b708f
patches =
${:_profile_base_location_}/configure-ncurses.diff#db8187a92f19e0e9d2fe595ca7a0426f
configure-options = configure-options =
--with-shared-termcap-library
--enable-multibyte --enable-multibyte
--disable-static --disable-static
environment = environment =
CPPFLAGS=-I${ncurses:location}/include CPPFLAGS=-I${ncurses:location}/include
LDFLAGS=-L${ncurses:location}/lib -Wl,-rpath=${ncurses:location}/lib LDFLAGS=-L${ncurses:location}/lib -Wl,-rpath=${ncurses:location}/lib
patch-binary = ${patch:location}/bin/patch
--- configure
+++ configure
@@ -6856,6 +6856,7 @@
# *curses*|*termcap*|*termlib*) ;;
# *) SHLIB_LIBS="$SHLIB_LIBS $TERMCAP_LIB" ;;
# esac
+ SHLIB_LIBS="$SHLIB_LIBS $TERMCAP_LIB"
...@@ -2,7 +2,6 @@ ...@@ -2,7 +2,6 @@
parts = parts =
rsyslogd rsyslogd
extends = extends =
../curl/buildout.cfg
../libestr/buildout.cfg ../libestr/buildout.cfg
../libfastjson/buildout.cfg ../libfastjson/buildout.cfg
../util-linux/buildout.cfg ../util-linux/buildout.cfg
...@@ -10,17 +9,18 @@ extends = ...@@ -10,17 +9,18 @@ extends =
[rsyslogd] [rsyslogd]
recipe = slapos.recipe.cmmi recipe = slapos.recipe.cmmi
url = https://www.rsyslog.com/files/download/rsyslog/rsyslog-8.2310.0.tar.gz url = https://www.rsyslog.com/files/download/rsyslog/rsyslog-8.2410.0.tar.gz
md5sum = e492884a5f64d2a069684fcb21171114 md5sum = 875d6c0fe3cf0c6230273106fce6f6c6
shared = true shared = true
configure-options = configure-options =
--disable-fmhttp
--disable-klog --disable-klog
--disable-libgcrypt --disable-libgcrypt
--disable-liblogging-stdlog --disable-liblogging-stdlog
--disable-libsystemd --disable-libsystemd
environment = environment =
PATH=${pkgconfig:location}/bin:%(PATH)s PATH=${pkgconfig:location}/bin:%(PATH)s
PKG_CONFIG_PATH=${libestr:location}/lib/pkgconfig:${curl:location}/lib/pkgconfig:${libfastjson:location}/lib/pkgconfig:${libuuid:location}/lib/pkgconfig PKG_CONFIG_PATH=${libestr:location}/lib/pkgconfig:${libfastjson:location}/lib/pkgconfig:${libuuid:location}/lib/pkgconfig
CPPFLAGS=-I${libestr:location}/include -I${curl:location}/include -I${libfastjson:location}/include -I${libuuid:location}/include -I${zlib:location}/include CPPFLAGS=-I${libestr:location}/include -I${libfastjson:location}/include -I${libuuid:location}/include -I${zlib:location}/include
LDFLAGS=-Wl,-rpath=${libestr:location}/lib -Wl,-rpath=${curl:location}/lib -Wl,-rpath=${libfastjson:location}/lib -Wl,-rpath=${libuuid:location}/lib -L${zlib:location}/lib -Wl,-rpath=${zlib:location}/lib LDFLAGS=-Wl,-rpath=${libestr:location}/lib -Wl,-rpath=${libfastjson:location}/lib -Wl,-rpath=${libuuid:location}/lib -L${zlib:location}/lib -Wl,-rpath=${zlib:location}/lib
ZLIB_CFLAGS=-I${zlib:location}/include ZLIB_CFLAGS=-I${zlib:location}/include
...@@ -11,8 +11,8 @@ parts = ...@@ -11,8 +11,8 @@ parts =
recipe = slapos.recipe.cmmi recipe = slapos.recipe.cmmi
shared = true shared = true
# Some options need the canonical source code (i.e. not as an amalgamation). # Some options need the canonical source code (i.e. not as an amalgamation).
url = https://deb.debian.org/debian/pool/main/s/sqlite3/sqlite3_3.40.1.orig.tar.xz url = https://deb.debian.org/debian/pool/main/s/sqlite3/sqlite3_3.46.1.orig.tar.xz
md5sum = 79f2507907721b770cbec98195cecece md5sum = b7908531e276de581710c2619f1e8429
configure-options = configure-options =
--disable-static --disable-static
--disable-tcl --disable-tcl
......
...@@ -5,8 +5,8 @@ parts = tcl ...@@ -5,8 +5,8 @@ parts = tcl
[tcl] [tcl]
recipe = slapos.recipe.cmmi recipe = slapos.recipe.cmmi
url = https://prdownloads.sourceforge.net/tcl/tcl8.6.13-src.tar.gz url = https://prdownloads.sourceforge.net/tcl/tcl8.6.15-src.tar.gz
md5sum = 0e4358aade2f5db8a8b6f2f6d9481ec2 md5sum = c13a4d5425b5ae335258342b38ba34c2
shared = true shared = true
environment = environment =
CPPFLAGS=-I${zlib:location}/include CPPFLAGS=-I${zlib:location}/include
......
...@@ -46,6 +46,7 @@ patch-options = -p1 ...@@ -46,6 +46,7 @@ patch-options = -p1
patches = patches =
${:_profile_base_location_}/trafficserver-9.1.1-TSHttpTxnCacheLookupStatusGet-fix.patch#d8ed3db3a48e97eb72aaaf7d7598a2d2 ${:_profile_base_location_}/trafficserver-9.1.1-TSHttpTxnCacheLookupStatusGet-fix.patch#d8ed3db3a48e97eb72aaaf7d7598a2d2
${:_profile_base_location_}/trafficserver-9.1.1-via-string-rapid-cdn.patch#8c39243d7525222385d5964485734f99 ${:_profile_base_location_}/trafficserver-9.1.1-via-string-rapid-cdn.patch#8c39243d7525222385d5964485734f99
${:_profile_base_location_}/trafficserver-9.2.5-duplicate-header.patch#49246720f824e0b9fccb13dbd4ba49b7
environment = environment =
PATH=${libtool:location}/bin:${make:location}/bin:${patch:location}/bin:${perl:location}/bin:${pkgconfig:location}/bin:%(PATH)s PATH=${libtool:location}/bin:${make:location}/bin:${patch:location}/bin:${perl:location}/bin:${pkgconfig:location}/bin:%(PATH)s
LDFLAGS =-L${openssl:location}/lib -Wl,-rpath=${openssl:location}/lib -L${tcl:location}/lib -Wl,-rpath=${tcl:location}/lib -L${zlib:location}/lib -Wl,-rpath=${zlib:location}/lib -Wl,-rpath=${luajit:location}/lib -L${ncurses:location}/lib -Wl,-rpath=${ncurses:location}/lib -lm LDFLAGS =-L${openssl:location}/lib -Wl,-rpath=${openssl:location}/lib -L${tcl:location}/lib -Wl,-rpath=${tcl:location}/lib -L${zlib:location}/lib -Wl,-rpath=${zlib:location}/lib -Wl,-rpath=${luajit:location}/lib -L${ncurses:location}/lib -Wl,-rpath=${ncurses:location}/lib -lm
......
diff -ur trafficserver-9.2.5.old/proxy/http/HttpTransact.cc trafficserver-9.2.5/proxy/http/HttpTransact.cc
--- trafficserver-9.2.5.old/proxy/http/HttpTransact.cc 2024-07-23 23:42:39.000000000 +0200
+++ trafficserver-9.2.5/proxy/http/HttpTransact.cc 2024-11-04 16:26:40.842379904 +0100
@@ -5123,6 +5123,17 @@
MIMEField &field2{*spot2};
name2 = field2.name_get(&name_len2);
+ // It is specified above that content type should not
+ // be altered here however when a duplicate header
+ // is present, all headers following are delete and
+ // re-added back. This includes content type if it follows
+ // any duplicate header. This leads to the loss of
+ // content type in the client response.
+ // This ensures that it is not altered when duplicate
+ // headers are present.
+ if (name2 == MIME_FIELD_CONTENT_TYPE) {
+ continue;
+ }
cached_header->field_delete(name2, name_len2);
}
dups_seen = true;
Only in trafficserver-9.2.5/tests/gold_tests/headers: cachedDuplicateHeaders.test.py
Only in trafficserver-9.2.5/tests/gold_tests/headers/replays: cache-test.replay.yaml
...@@ -5,7 +5,7 @@ parts = ...@@ -5,7 +5,7 @@ parts =
[xz-utils] [xz-utils]
recipe = slapos.recipe.cmmi recipe = slapos.recipe.cmmi
shared = true shared = true
url = https://tukaani.org/xz/xz-5.4.4.tar.bz2 url = https://github.com/tukaani-project/xz/releases/download/v5.6.3/xz-5.6.3.tar.xz
md5sum = fbb849a27e266964aefe26bad508144f md5sum = 57581b216a82482503bb63c8170d549c
configure-options = configure-options =
--disable-static --disable-static
# SlapOS component for zodbtools development.
[buildout]
extends =
buildout.cfg
../git/buildout.cfg
# override zodbtools to install it from latest git version
[zodbtools]
recipe = zc.recipe.egg:develop
setup = ${zodbtools-repository:location}
[zodbtools-repository]
recipe = slapos.recipe.build:gitclone
repository = https://lab.nexedi.com/nexedi/zodbtools.git
branch = master
location = ${buildout:parts-directory}/zodbtools-dev
git-executable = ${git:location}/bin/git
# unpin zodbtools from versions, so that buildout does not fallback to
# installing non-dev egg if dev one has changed its version.
[versions]
zodbtools =
...@@ -6,13 +6,15 @@ extends = ...@@ -6,13 +6,15 @@ extends =
../pygolang/buildout.cfg ../pygolang/buildout.cfg
../ZODB/buildout.cfg ../ZODB/buildout.cfg
../ZEO/buildout.cfg ../ZEO/buildout.cfg
../git/buildout.cfg
parts = parts =
zodbtools/scripts zodbtools
[zodbtools] [zodbtools]
recipe = zc.recipe.egg:eggs recipe = zc.recipe.egg:develop
setup = ${zodbtools-repository:location}
egg = zodbtools egg = zodbtools
eggs = eggs =
${:egg} ${:egg}
...@@ -29,5 +31,9 @@ recipe = zc.recipe.egg:scripts ...@@ -29,5 +31,9 @@ recipe = zc.recipe.egg:scripts
eggs = ${zodbtools:eggs} eggs = ${zodbtools:eggs}
[versions] [zodbtools-repository]
zodbtools = 0.0.0.dev9 recipe = slapos.recipe.build:gitclone
repository = https://lab.nexedi.com/nexedi/zodbtools.git
location = ${buildout:parts-directory}/zodbtools
git-executable = ${git:location}/bin/git
revision = 0.0.0.dev9-21-gb0fdb5f
...@@ -4,10 +4,9 @@ ...@@ -4,10 +4,9 @@
[buildout] [buildout]
extends = extends =
# test<X>.cfg configures ZODB.major=<X>. # test<X>.cfg configures ZODB.major=<X>.
../../stack/nxdtest.cfg ../../stack/nxdtest.cfg
../pytest/buildout.cfg ../pytest/buildout.cfg
buildout-dev.cfg buildout.cfg
parts = parts =
# for instance # for instance
......
...@@ -10,8 +10,8 @@ parts = ...@@ -10,8 +10,8 @@ parts =
recipe = slapos.recipe.cmmi recipe = slapos.recipe.cmmi
shared = true shared = true
url = https://github.com/facebook/zstd/releases/download/v${:version}/zstd-${:version}.tar.gz url = https://github.com/facebook/zstd/releases/download/v${:version}/zstd-${:version}.tar.gz
version = 1.5.5 version = 1.5.6
md5sum = 63251602329a106220e0a5ad26ba656f md5sum = 5a473726b3445d0e5d6296afd1ab6854
location = @@LOCATION@@ location = @@LOCATION@@
configure-command = : configure-command = :
environment = environment =
......
...@@ -22,37 +22,22 @@ CURRENT_EGG_VERSION=`cat setup.py | grep ^version | cut -d\' -f2` ...@@ -22,37 +22,22 @@ CURRENT_EGG_VERSION=`cat setup.py | grep ^version | cut -d\' -f2`
sed -i "s/$CURRENT_EGG_VERSION/$NEXT_VERSION/g" setup.py sed -i "s/$CURRENT_EGG_VERSION/$NEXT_VERSION/g" setup.py
git commit -m "Release slapos.cookbook ($NEXT_VERSION)" setup.py git commit -m "Release slapos.cookbook ($NEXT_VERSION)" setup.py
if [ ! $EGG_ONLY == "Y" ];
then
sed -i "s/slapos.cookbook = $CURRENT_EGG_VERSION/slapos.cookbook = $NEXT_VERSION/g" stack/slapos.cfg
sed -i "s/slapos.cookbook==$CURRENT_EGG_VERSION/slapos.cookbook==$NEXT_VERSION/g" stack/slapos.cfg
git commit -m "stack/slapos: slapos.cookbook version up ($NEXT_VERSION)" stack/slapos.cfg
fi
echo "############### Merging into 1.0 and generating the tag ################" echo "############### Merging into 1.0 and generating the tag ################"
git checkout 1.0 git checkout 1.0
git reset --hard origin/1.0 git reset --hard origin/1.0
git clean -f git clean -f
git merge master -m "Update Release Candidate" git merge master -m "Update Release Candidate"
# Revert changes from master
git diff HEAD..master | git apply
# Download patch to update revisions based on tests
wget -O update-release.sh https://nexedi.erp5.net/portal_skins/custom/TestResultModule_getReleaseCandidateRevision
bash update-release.sh
git commit -m 'Update git revisions' -a
git tag $NEXT_VERSION -m "Release $NEXT_VERSION" git tag $NEXT_VERSION -m "Release $NEXT_VERSION"
echo "############### Building sdist ################" echo "############### Building sdist ################"
python setup.py sdist python3 setup.py sdist
echo "###################################################################" echo "###################################################################"
echo "You are about to release a new tag, a new version of slapos.cookbook" echo "You are about to release a new tag, a new version of slapos.cookbook"
echo " and a new version of SlapOS Software Release" echo " and a new version of SlapOS Software Release"
echo "" echo ""
echo "Lastest release: $CURRENT_VERSION" echo "Latest release: $CURRENT_VERSION"
echo "Next Release to be Tagged: $NEXT_VERSION" echo "Next Release to be Tagged: $NEXT_VERSION"
echo "Current slapos.cookbook version: $CURRENT_EGG_VERSION" echo "Current slapos.cookbook version: $CURRENT_EGG_VERSION"
echo "Next slapos.cookbook to be released: $NEXT_VERSION" echo "Next slapos.cookbook to be released: $NEXT_VERSION"
...@@ -70,15 +55,15 @@ echo " To review $NEXT_VERSION use :: git log $NEXT_VERSION" ...@@ -70,15 +55,15 @@ echo " To review $NEXT_VERSION use :: git log $NEXT_VERSION"
echo " To review individual commits :: git show HASH" echo " To review individual commits :: git show HASH"
echo "" echo ""
echo "" echo ""
echo "Once everything is ok, please upload slapos.cookbook egg /!\\ BEFORE /!\\" echo "Once everything is ok, please upload slapos.cookbook egg to pypi."
echo "push your changes into the repository. Please use twine for it." echo "Please use twine for it."
echo "" echo ""
echo "To upload :: python -m twine upload dist/slapos.cookbook-$NEXT_VERSION*" echo "To upload :: python -m twine upload dist/slapos.cookbook-$NEXT_VERSION*"
echo "To verify if upload succeed access: https://pypi.org/project/slapos.cookbook/$NEXT_VERSION/" echo "To verify if upload succeed access: https://pypi.org/project/slapos.cookbook/$NEXT_VERSION/"
echo "" echo ""
echo " More info on twine: https://packaging.python.org/tutorials/packaging-projects/#uploading-the-distribution-archives" echo " More info on twine: https://packaging.python.org/tutorials/packaging-projects/#uploading-the-distribution-archives"
echo "" echo ""
echo "Once egg is updated please push your changes into main repository:" echo "Also push your changes into upstream repository:"
echo "" echo ""
echo " git push origin master 1.0 $NEXT_VERSION" echo " git push origin master 1.0 $NEXT_VERSION"
echo "" echo ""
......
[buildout]
extends =
../software.cfg
parts =
rdiff-backup
[rdiff-backup]
shared = false
##############################################################################
#
# Copyright (c) 2024 Nexedi SA and Contributors. All Rights Reserved.
#
# WARNING: This program as such is intended to be used by professional
# programmers who take the whole responsibility of assessing all potential
# consequences resulting from its eventual inadequacies and bugs
# End users who are looking for a ready-to-use solution with commercial
# guarantees and support are strongly adviced to contract a Free Software
# Service Company
#
# This program is free software: you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation, either version 3
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
#
##############################################################################
from setuptools import setup, find_packages
version = '0.0.1.dev0'
name = 'slapos.test.backupserver-agent'
long_description = open("README.md").read()
setup(name=name,
version=version,
description="Test for SlapOS' Backupserver Agent",
long_description=long_description,
long_description_content_type='text/markdown',
maintainer="Nexedi",
maintainer_email="info@nexedi.com",
url="https://lab.nexedi.com/nexedi/slapos",
packages=find_packages(),
install_requires=[
'slapos.core',
'slapos.libnetworkcache',
'erp5.util',
],
zip_safe=True,
test_suite='test',
)
##############################################################################
#
# Copyright (c) 2024 Nexedi SA and Contributors. All Rights Reserved.
#
# WARNING: This program as such is intended to be used by professional
# programmers who take the whole responsibility of assessing all potential
# consequences resulting from its eventual inadequacies and bugs
# End users who are looking for a ready-to-use solution with commercial
# guarantees and support are strongly adviced to contract a Free Software
# Service Company
#
# This program is free software: you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation, either version 3
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
#
##############################################################################
import hashlib
import os
import shutil
import subprocess
import tempfile
from slapos.testing.testcase import installSoftwareUrlList, makeModuleSetUpAndTestCaseClass
software_release_url = os.path.abspath(
os.path.join(os.path.dirname(__file__), '..', 'software.cfg'))
setUpModule, InstanceTestCase = makeModuleSetUpAndTestCaseClass(
os.path.abspath(software_release_url))
class TestBackupServerAgent(InstanceTestCase):
request_instance = False
def test(self):
rdiff_backup_path = os.path.join(
self.slap.software_directory,
hashlib.md5(self.getSoftwareURL().encode()).hexdigest(),
'parts',
'rdiff-backup',
'bin',
'rdiff-backup',
)
result = subprocess.run(
[rdiff_backup_path, '--version'],
capture_output=True,
)
self.assertEqual(result.returncode, 0, result.stdout)
self.assertEqual(result.stdout, b'rdiff-backup 1.0.5\n')
...@@ -26,7 +26,7 @@ md5sum = 851262d7174da868805cb7c8e1ced7c0 ...@@ -26,7 +26,7 @@ md5sum = 851262d7174da868805cb7c8e1ced7c0
[template-backup-script] [template-backup-script]
filename = template-backup-script.sh.in filename = template-backup-script.sh.in
md5sum = 3f3286347a7e271e7bfa66e1a840989b md5sum = 43b92958f34d5ca84e708bace817b971
[template-crontab-line] [template-crontab-line]
filename = template-crontab-line.in filename = template-crontab-line.in
......
...@@ -30,6 +30,9 @@ parts = ...@@ -30,6 +30,9 @@ parts =
template-crontab-line template-crontab-line
slapos-cookbook slapos-cookbook
[rdiff-backup]
<= rdiff-backup-1.0.5
[rssgen-eggs] [rssgen-eggs]
recipe = zc.recipe.egg recipe = zc.recipe.egg
interpreter = python-${:_buildout_section_name_} interpreter = python-${:_buildout_section_name_}
......
...@@ -13,7 +13,7 @@ ${coreutils-output:echo} "Available only if backup succeed." > $${:statistic_log ...@@ -13,7 +13,7 @@ ${coreutils-output:echo} "Available only if backup succeed." > $${:statistic_log
# set -e # set -e
cd $${:datadirectory} cd $${:datadirectory}
${util-linux:location}/bin/flock $${:status_log}.lock \ ${util-linux:location}/bin/flock $${:status_log}.lock \
${rdiff-backup-1.0.5:location}/bin/rdiff-backup \ ${rdiff-backup:location}/bin/rdiff-backup \
$${:exclude_string} \ $${:exclude_string} \
--include='$${:include}' \ --include='$${:include}' \
--exclude='**' \ --exclude='**' \
...@@ -26,7 +26,7 @@ RESULT=$? ...@@ -26,7 +26,7 @@ RESULT=$?
if [ $RESULT -eq 0 ] if [ $RESULT -eq 0 ]
then then
${coreutils-output:echo} "`${coreutils-output:date} --iso-8601=seconds -u`, $${:statistic_log}, $${:hostname}, backup success" >> $${:status_log} ${coreutils-output:echo} "`${coreutils-output:date} --iso-8601=seconds -u`, $${:statistic_log}, $${:hostname}, backup success" >> $${:status_log}
${findutils-output:find} rdiff-backup-data/ -maxdepth 1 -name "session_statistic*" | ${coreutils-output:sort} | ${coreutils-output:tail} -n 1 | ${findutils-output:xargs} ${rdiff-backup-1.0.5:location}/bin/rdiff-backup --calculate-average > $${:statistic_log} ${findutils-output:find} rdiff-backup-data/ -maxdepth 1 -name "session_statistic*" | ${coreutils-output:sort} | ${coreutils-output:tail} -n 1 | ${findutils-output:xargs} ${rdiff-backup:location}/bin/rdiff-backup --calculate-average > $${:statistic_log}
else else
${coreutils-output:echo} "`${coreutils-output:date} --iso-8601=seconds -u`, $${:statistic_log}, $${:hostname}, backup failed" >> $${:status_log} ${coreutils-output:echo} "`${coreutils-output:date} --iso-8601=seconds -u`, $${:statistic_log}, $${:hostname}, backup failed" >> $${:status_log}
fi fi
...@@ -15,4 +15,4 @@ ...@@ -15,4 +15,4 @@
[instance.cfg.in] [instance.cfg.in]
filename = instance.cfg.in filename = instance.cfg.in
md5sum = 1e9012cb8476e00497b3fe9881158440 md5sum = 765bb15f322f5566a66d15baa8c68f9a
...@@ -159,7 +159,7 @@ command-line = ...@@ -159,7 +159,7 @@ command-line =
--render-try-index --render-try-index
--allow-all --allow-all
--auth-method basic --auth-method basic
--auth ${admin-password:user}:${admin-password:passwd-sha512-crypt}@/:rw --auth ${admin-password:user}:${admin-password:passwd-sha512-crypt}@/:rw,/pub:rw
--auth @/pub --auth @/pub
--tls-cert ${dufs-certificate:cert-file} --tls-cert ${dufs-certificate:cert-file}
--tls-key ${dufs-certificate:key-file} --tls-key ${dufs-certificate:key-file}
...@@ -189,9 +189,7 @@ return = domain secure_access ...@@ -189,9 +189,7 @@ return = domain secure_access
[frontend-available-promise] [frontend-available-promise]
<= check-url-available-promise <= check-url-available-promise
url = ${frontend:connection-secure_access} url = ${frontend-url:healthcheck-url}
check-secure = 1
[promises] [promises]
recipe = recipe =
...@@ -216,6 +214,9 @@ init = ...@@ -216,6 +214,9 @@ init =
assert not frontend_url.username assert not frontend_url.username
self.options['upload-url'] = frontend_url._replace( self.options['upload-url'] = frontend_url._replace(
netloc=f'{admin_user}:{admin_password}@{frontend_url.netloc}').geturl() netloc=f'{admin_user}:{admin_password}@{frontend_url.netloc}').geturl()
self.options['healthcheck-url'] = frontend_url._replace(
path='/__dufs__/health').geturl()
[publish-connection-parameter] [publish-connection-parameter]
recipe = slapos.cookbook:publish recipe = slapos.cookbook:publish
......
...@@ -13,8 +13,8 @@ parts = ...@@ -13,8 +13,8 @@ parts =
[dufs] [dufs]
recipe = slapos.recipe.cmmi recipe = slapos.recipe.cmmi
shared = true shared = true
url = https://github.com/sigoden/dufs/archive/refs/tags/v0.40.0.tar.gz url = https://github.com/sigoden/dufs/archive/refs/tags/v0.43.0.tar.gz
md5sum = 3b71b3d07af69d6ba92c054625dc0dd2 md5sum = 77da2d3e5b5f7f159707db5c93ce8a9d
configure-command = : configure-command = :
make-binary = cargo install --root=%(location)s --path . --locked make-binary = cargo install --root=%(location)s --path . --locked
make-targets = make-targets =
......
...@@ -43,6 +43,7 @@ setup( ...@@ -43,6 +43,7 @@ setup(
install_requires=[ install_requires=[
'slapos.core', 'slapos.core',
'slapos.libnetworkcache', 'slapos.libnetworkcache',
'lxml',
'requests', 'requests',
], ],
zip_safe=True, zip_safe=True,
......
...@@ -25,7 +25,9 @@ ...@@ -25,7 +25,9 @@
# #
############################################################################## ##############################################################################
import base64
import contextlib import contextlib
import json
import io import io
import os import os
import pathlib import pathlib
...@@ -33,13 +35,15 @@ import subprocess ...@@ -33,13 +35,15 @@ import subprocess
import tempfile import tempfile
import urllib.parse import urllib.parse
import requests import requests
import lxml.html
from slapos.testing.testcase import makeModuleSetUpAndTestCaseClass from slapos.testing.testcase import makeModuleSetUpAndTestCaseClass
setUpModule, SlapOSInstanceTestCase = makeModuleSetUpAndTestCaseClass( setUpModule, SlapOSInstanceTestCase = makeModuleSetUpAndTestCaseClass(
os.path.abspath( pathlib.Path(__file__).parent.parent / 'software.cfg')
os.path.join(os.path.dirname(__file__), '..', 'software.cfg')))
class TestFileServer(SlapOSInstanceTestCase): class TestFileServer(SlapOSInstanceTestCase):
...@@ -63,6 +67,11 @@ class TestFileServer(SlapOSInstanceTestCase): ...@@ -63,6 +67,11 @@ class TestFileServer(SlapOSInstanceTestCase):
self.addCleanup(os.unlink, ca_cert.name) self.addCleanup(os.unlink, ca_cert.name)
return ca_cert.name return ca_cert.name
def _decode_index_content(self, response_text:str) -> dict:
index_data, = lxml.html.fromstring(
response_text).xpath('.//template[@id="index-data"]/text()')
return json.loads(base64.b64decode(index_data))
def test_anonymous_can_only_access_public(self): def test_anonymous_can_only_access_public(self):
resp = requests.get( resp = requests.get(
self.connection_parameters['public-url'], self.connection_parameters['public-url'],
...@@ -87,12 +96,13 @@ class TestFileServer(SlapOSInstanceTestCase): ...@@ -87,12 +96,13 @@ class TestFileServer(SlapOSInstanceTestCase):
urllib.parse.urljoin(self.connection_parameters['public-url'], '..'), urllib.parse.urljoin(self.connection_parameters['public-url'], '..'),
verify=self.ca_cert, verify=self.ca_cert,
) )
self.assertIn('pub', resp.text) self.assertEqual(
self.assertNotIn('secret', resp.text) [path['name'] for path in self._decode_index_content(resp.text)['paths']],
['pub'])
self.assertEqual(resp.status_code, requests.codes.ok) self.assertEqual(resp.status_code, requests.codes.ok)
def test_index(self): def test_index(self):
pub = pathlib.Path(self.computer_partition_root_path) / 'srv' / 'www' / 'pub' pub = self.computer_partition_root_path / 'srv' / 'www' / 'pub'
(pub / 'with-index').mkdir() (pub / 'with-index').mkdir()
(pub / 'with-index' / 'index.html').write_text('<html>Hello !</html>') (pub / 'with-index' / 'index.html').write_text('<html>Hello !</html>')
self.assertEqual( self.assertEqual(
...@@ -106,10 +116,14 @@ class TestFileServer(SlapOSInstanceTestCase): ...@@ -106,10 +116,14 @@ class TestFileServer(SlapOSInstanceTestCase):
(pub / 'without-index' / 'file.txt').write_text('Hello !') (pub / 'without-index' / 'file.txt').write_text('Hello !')
self.assertIn( self.assertIn(
'file.txt', 'file.txt',
requests.get( [path['name'] for path in
urllib.parse.urljoin(self.connection_parameters['public-url'], 'without-index/'), self._decode_index_content(
verify=self.ca_cert, requests.get(
).text) urllib.parse.urljoin(self.connection_parameters['public-url'], 'without-index/'),
verify=self.ca_cert,
).text)['paths']
]
)
def test_upload_file_refused_without_auth(self): def test_upload_file_refused_without_auth(self):
parsed_upload_url = urllib.parse.urlparse(self.connection_parameters['upload-url']) parsed_upload_url = urllib.parse.urlparse(self.connection_parameters['upload-url'])
...@@ -125,19 +139,21 @@ class TestFileServer(SlapOSInstanceTestCase): ...@@ -125,19 +139,21 @@ class TestFileServer(SlapOSInstanceTestCase):
self.assertEqual(resp.status_code, requests.codes.unauthorized) self.assertEqual(resp.status_code, requests.codes.unauthorized)
def test_upload_file(self): def test_upload_file(self):
resp = requests.put( for path in 'hello.txt', 'pub/hello.txt', 'create/intermediate/paths':
urllib.parse.urljoin(self.connection_parameters['upload-url'], 'hello.txt'), with self.subTest(path):
data=io.BytesIO(b'hello'), resp = requests.put(
verify=self.ca_cert, urllib.parse.urljoin(self.connection_parameters['upload-url'], path),
) data=io.BytesIO(b'hello'),
self.assertEqual(resp.status_code, requests.codes.created) verify=self.ca_cert,
)
self.assertEqual(resp.status_code, requests.codes.created)
resp = requests.get( resp = requests.get(
urllib.parse.urljoin(self.connection_parameters['upload-url'], 'hello.txt'), urllib.parse.urljoin(self.connection_parameters['upload-url'], path),
verify=self.ca_cert, verify=self.ca_cert,
) )
self.assertEqual(resp.text, 'hello') self.assertEqual(resp.text, 'hello')
self.assertEqual(resp.status_code, requests.codes.ok) self.assertEqual(resp.status_code, requests.codes.ok)
def test_renew_certificate(self): def test_renew_certificate(self):
def _getpeercert(): def _getpeercert():
...@@ -168,7 +184,7 @@ class TestFileServer(SlapOSInstanceTestCase): ...@@ -168,7 +184,7 @@ class TestFileServer(SlapOSInstanceTestCase):
# reprocess instance to get the new certificate, after removing the timestamp # reprocess instance to get the new certificate, after removing the timestamp
# to force execution # to force execution
(pathlib.Path(self.computer_partition_root_path) / '.timestamp').unlink() (self.computer_partition_root_path / '.timestamp').unlink()
self.waitForInstance() self.waitForInstance()
cert_after = _getpeercert() cert_after = _getpeercert()
......
...@@ -82,7 +82,6 @@ class TestRepozo(ZEOTestCase, CrontabMixin): ...@@ -82,7 +82,6 @@ class TestRepozo(ZEOTestCase, CrontabMixin):
with self.db() as db: with self.db() as db:
with db.transaction() as cnx: with db.transaction() as cnx:
cnx.root.state = "after backup" cnx.root.state = "after backup"
db.close()
restore_script = self.computer_partition_root_path / "srv" / "runner-import-restore" restore_script = self.computer_partition_root_path / "srv" / "runner-import-restore"
self.assertTrue(restore_script.exists()) self.assertTrue(restore_script.exists())
......
...@@ -14,7 +14,7 @@ ...@@ -14,7 +14,7 @@
# not need these here). # not need these here).
[instance.cfg] [instance.cfg]
filename = instance.cfg.in filename = instance.cfg.in
md5sum = 3ffdd78aeb77ab581c51ce419176dd37 md5sum = 3607ea995293975a736be136f0cdf675
[watcher] [watcher]
_update_hash_filename_ = watcher.in _update_hash_filename_ = watcher.in
...@@ -34,7 +34,7 @@ md5sum = c559a24ab6281268b608ed3bccb8e4ce ...@@ -34,7 +34,7 @@ md5sum = c559a24ab6281268b608ed3bccb8e4ce
[gitlab-parameters.cfg] [gitlab-parameters.cfg]
_update_hash_filename_ = gitlab-parameters.cfg _update_hash_filename_ = gitlab-parameters.cfg
md5sum = 95b18789111ed239146d243e39ffefbe md5sum = 16b25d654fe1f219a78d8a3da16b07dd
[gitlab-shell-config.yml.in] [gitlab-shell-config.yml.in]
_update_hash_filename_ = template/gitlab-shell-config.yml.in _update_hash_filename_ = template/gitlab-shell-config.yml.in
...@@ -54,7 +54,7 @@ md5sum = d769ea27820e932c596c35bbbf3f2902 ...@@ -54,7 +54,7 @@ md5sum = d769ea27820e932c596c35bbbf3f2902
[instance-gitlab.cfg.in] [instance-gitlab.cfg.in]
_update_hash_filename_ = instance-gitlab.cfg.in _update_hash_filename_ = instance-gitlab.cfg.in
md5sum = 6d8d20ded84622339d49c60b0e61380c md5sum = 35bb9f1d8f4fd6675bd768d8a7e1253c
[instance-gitlab-export.cfg.in] [instance-gitlab-export.cfg.in]
_update_hash_filename_ = instance-gitlab-export.cfg.in _update_hash_filename_ = instance-gitlab-export.cfg.in
...@@ -66,7 +66,7 @@ md5sum = 70612697434bf4fbe838fdf4fd867ed8 ...@@ -66,7 +66,7 @@ md5sum = 70612697434bf4fbe838fdf4fd867ed8
[nginx-gitlab-http.conf.in] [nginx-gitlab-http.conf.in]
_update_hash_filename_ = template/nginx-gitlab-http.conf.in _update_hash_filename_ = template/nginx-gitlab-http.conf.in
md5sum = 4980c1571a4dd7753aaa60d065270849 md5sum = b40b6d7948f4a54c45f2ecbb7e3d7a36
[nginx.conf.in] [nginx.conf.in]
_update_hash_filename_ = template/nginx.conf.in _update_hash_filename_ = template/nginx.conf.in
......
...@@ -96,7 +96,7 @@ configuration.nginx_proxy_connect_timeout = 300 ...@@ -96,7 +96,7 @@ configuration.nginx_proxy_connect_timeout = 300
# nginx advanced # nginx advanced
configuration.nginx_worker_processes = 4 configuration.nginx_worker_processes = 4
configuration.nginx_worker_connections = 10240 configuration.nginx_worker_connections = 10240
configuration.nginx_log_format = $remote_addr - $remote_user [$time_local] "$request" $status $body_bytes_sent "$http_referer" "$http_user_agent" configuration.nginx_log_format = $trusted_remote_addr - $remote_user [$time_local] "$request" $status $body_bytes_sent "$http_referer" "$http_user_agent"
configuration.nginx_sendfile = on configuration.nginx_sendfile = on
configuration.nginx_tcp_nopush = on configuration.nginx_tcp_nopush = on
configuration.nginx_tcp_nodelay = on configuration.nginx_tcp_nodelay = on
...@@ -115,7 +115,13 @@ configuration.nginx_gzip_enabled = true ...@@ -115,7 +115,13 @@ configuration.nginx_gzip_enabled = true
# configuring trusted proxies # configuring trusted proxies
# GitLab is behind a reverse proxy, so we don't want the IP address of the proxy # GitLab is behind a reverse proxy, so we don't want the IP address of the proxy
# to show up as the client address (because rack attack blacklists the lab # to show up as the client address (because rack attack blacklists the lab
# frontend) # frontend). frontend-caucase-url-list offers a more manageable approach than listing
# all frontends IPs.
configuration.nginx_real_ip_trusted_addresses = configuration.nginx_real_ip_trusted_addresses =
configuration.nginx_real_ip_header = X-Forwarded-For configuration.nginx_real_ip_header = X-Forwarded-For
configuration.nginx_real_ip_recursive = off configuration.nginx_real_ip_recursive = off
# space separated URLs of caucase service providing CA to validate frontends client
# certificate and trust the frontend if they provide a valid certificate.
configuration.frontend-caucase-url-list =
# rate limit of git projects archive download, in requests per minutes.
configuration.nginx_download_archive_rate_limit = 1
...@@ -748,6 +748,63 @@ copytruncate = true ...@@ -748,6 +748,63 @@ copytruncate = true
# Nginx frontend # # Nginx frontend #
###################### ######################
[frontend-caucase-ssl]
ca =
crl =
{% import "caucase" as caucase with context %}
{% set frontend_caucase_url_list = instance_parameter_dict.get('configuration.frontend-caucase-url-list', '').split() -%}
{% set frontend_caucase_url_hash_list = [] -%}
{% set frontend_caucase_updater_section_list = [] -%}
{% for frontend_caucase_url in frontend_caucase_url_list -%}
{% set hash = hashlib.md5(frontend_caucase_url.encode()).hexdigest() -%}
{% do frontend_caucase_url_hash_list.append(hash) -%}
{% set data_dir = '${nginx-ssl-dir:ssl}/%s' % hash -%}
{{ caucase.updater(
prefix='frontend-caucase-updater-%s' % hash,
buildout_bin_directory=buildout_bin_directory,
updater_path='${directory:service}/frontend-caucase-updater-%s' % hash,
url=frontend_caucase_url,
data_dir=data_dir,
ca_path='%s/ca.crt' % data_dir,
crl_path='%s/crl.pem' % data_dir,
on_renew='${frontend-caucase-updater-housekeeper:output}',
max_sleep=1,
openssl=openssl_bin,
)}}
{% do frontend_caucase_updater_section_list.append('frontend-caucase-updater-%s' % hash) -%}
{% endfor -%}
{% if frontend_caucase_url_hash_list %}
{% do frontend_caucase_updater_section_list.append('frontend-caucase-updater-housekeeper') -%}
[frontend-caucase-ssl]
ca = ${nginx-ssl-dir:ssl}/frontend_ca.crt
crl = ${nginx-ssl-dir:ssl}/frontend_crl.pem
[frontend-caucase-updater-housekeeper]
recipe = slapos.recipe.template
output = ${directory:bin}/frontend-caucase-updater-housekeeper
mode = 700
inline =
#!/bin/sh -e
# assemble all frontends CA and CRL in one file
CA=${frontend-caucase-ssl:ca}
:> $CA.tmp
CRL=${frontend-caucase-ssl:crl}
:> $CRL.tmp
{% for hash in frontend_caucase_url_hash_list %}
{% set data_dir = '${nginx-ssl-dir:ssl}/%s' % hash %}
echo "# {{ data_dir }}/ca.crt" >> $CA.tmp
cat "{{ data_dir }}/ca.crt" >> $CA.tmp
echo "# {{ data_dir }}/crl.pem" >> $CRL.tmp
cat "{{ data_dir }}/crl.pem" >> $CRL.tmp
{% endfor %}
mv $CA.tmp $CA
mv $CRL.tmp $CRL
kill -HUP $(cat ${directory:run}/nginx.pid)
{% endif %}
# srv/nginx/ prefix + etc/ log/ ... # srv/nginx/ prefix + etc/ log/ ...
[nginx-dir] [nginx-dir]
recipe = slapos.cookbook:mkdirectory recipe = slapos.cookbook:mkdirectory
...@@ -787,6 +844,9 @@ ssl = ${nginx-ssl-dir:ssl} ...@@ -787,6 +844,9 @@ ssl = ${nginx-ssl-dir:ssl}
cert_file = ${nginx-generate-certificate:cert_file} cert_file = ${nginx-generate-certificate:cert_file}
key_file = ${nginx-generate-certificate:key_file} key_file = ${nginx-generate-certificate:key_file}
client_ca_file = ${frontend-caucase-ssl:ca}
client_crl_file = ${frontend-caucase-ssl:crl}
[nginx-symlinks] [nginx-symlinks]
# (nginx wants <prefix>/logs to be there from start - else it issues alarm to the log) # (nginx wants <prefix>/logs to be there from start - else it issues alarm to the log)
...@@ -801,6 +861,9 @@ depend = ...@@ -801,6 +861,9 @@ depend =
${nginx-symlinks:recipe} ${nginx-symlinks:recipe}
${promise-nginx:recipe} ${promise-nginx:recipe}
${logrotate-entry-nginx:recipe} ${logrotate-entry-nginx:recipe}
{% for section in frontend_caucase_updater_section_list %}
{{ '${' ~ section ~ ':recipe}' }}
{% endfor %}
[promise-nginx] [promise-nginx]
...@@ -853,8 +916,6 @@ rake = ${gitlab-rake:wrapper-path} ...@@ -853,8 +916,6 @@ rake = ${gitlab-rake:wrapper-path}
# run command on every reinstantiation # run command on every reinstantiation
update-command = ${:command} update-command = ${:command}
# https://gitlab.com/gitlab-org/gitlab-foss/issues/38457
# we need to manually install ajv@^4.0.0 with yarn to fix the bug 'yarn check failed!'
command = command =
${:rake} gitlab:assets:clean && ${:rake} gitlab:assets:clean &&
${:rake} gettext:compile RAILS_ENV=production && ${:rake} gettext:compile RAILS_ENV=production &&
......
...@@ -41,8 +41,12 @@ configuration.icp_license = ...@@ -41,8 +41,12 @@ configuration.icp_license =
recipe = slapos.recipe.template:jinja2 recipe = slapos.recipe.template:jinja2
mode = 0644 mode = 0644
output= $${buildout:directory}/$${:_buildout_section_name_} output= $${buildout:directory}/$${:_buildout_section_name_}
extensions = jinja2.ext.do
import-list =
rawfile caucase ${caucase-jinja2-library:target}
context = context =
import os os import os os
import hashlib hashlib
import pwd pwd import pwd pwd
key bin_directory buildout:bin-directory key bin_directory buildout:bin-directory
......
...@@ -3,6 +3,7 @@ ...@@ -3,6 +3,7 @@
extends = extends =
buildout.hash.cfg buildout.hash.cfg
../../stack/slapos.cfg ../../stack/slapos.cfg
../../stack/caucase/buildout.cfg
../../stack/nodejs.cfg ../../stack/nodejs.cfg
../../stack/monitor/buildout.cfg ../../stack/monitor/buildout.cfg
../../component/libgit2/buildout.cfg ../../component/libgit2/buildout.cfg
...@@ -54,6 +55,7 @@ parts = ...@@ -54,6 +55,7 @@ parts =
slapos-cookbook slapos-cookbook
eggs eggs
caucase-eggs
bash bash
curl curl
......
...@@ -37,6 +37,8 @@ upstream gitlab-workhorse { ...@@ -37,6 +37,8 @@ upstream gitlab-workhorse {
server unix:{{ gitlab_workhorse.socket }}; server unix:{{ gitlab_workhorse.socket }};
} }
limit_req_zone $trusted_remote_addr zone=downloadarchive:10m rate={{ cfg('nginx_download_archive_rate_limit') }}r/m;
{# not needed for us - the frontend can do the redirection and also {# not needed for us - the frontend can do the redirection and also
gitlab/nginx speaks HSTS on https port so when we access https port via http gitlab/nginx speaks HSTS on https port so when we access https port via http
protocol, it gets redirected to https protocol, it gets redirected to https
...@@ -76,11 +78,12 @@ server { ...@@ -76,11 +78,12 @@ server {
## https://raymii.org/s/tutorials/Strong_SSL_Security_On_nginx.html & https://cipherli.st/ ## https://raymii.org/s/tutorials/Strong_SSL_Security_On_nginx.html & https://cipherli.st/
ssl_certificate {{ nginx.cert_file }}; ssl_certificate {{ nginx.cert_file }};
ssl_certificate_key {{ nginx.key_file }}; ssl_certificate_key {{ nginx.key_file }};
{# we don't need - most root CA will be included by default
<% if @ssl_client_certificate %> {% if nginx.client_ca_file %}
ssl_client_certificate <%= @ssl_client_certificate%>; ssl_client_certificate {{ nginx.client_ca_file }};
<% end %> ssl_crl {{ nginx.client_crl_file }};
#} ssl_verify_client optional_no_ca;
{% endif %}
# GitLab needs backwards compatible ciphers to retain compatibility with Java IDEs # GitLab needs backwards compatible ciphers to retain compatibility with Java IDEs
# NOTE(slapos) ^^^ is not relevant for us - we are behind frontend and clients # NOTE(slapos) ^^^ is not relevant for us - we are behind frontend and clients
...@@ -110,6 +113,18 @@ server { ...@@ -110,6 +113,18 @@ server {
set_real_ip_from {{ trusted_address }}; set_real_ip_from {{ trusted_address }};
{% endfor %} {% endfor %}
## SlapOS: For Real IP, instead of trusting the frontends through their IP addresses,
## we expect the frontends to use a client certificate and we trust frontends only if
## we can validate that certificate.
set $trusted_remote_addr $remote_addr;
{% if nginx.client_ca_file %}
set_real_ip_from 0.0.0.0/0;
set_real_ip_from ::/0;
if ($ssl_client_verify != SUCCESS) {
set $trusted_remote_addr $realip_remote_addr;
}
{% endif %}
## HSTS Config ## HSTS Config
## https://www.nginx.com/blog/http-strict-transport-security-hsts-and-nginx/ ## https://www.nginx.com/blog/http-strict-transport-security-hsts-and-nginx/
{% if int(cfg("nginx_hsts_max_age")) > 0 -%} {% if int(cfg("nginx_hsts_max_age")) > 0 -%}
...@@ -148,6 +163,8 @@ server { ...@@ -148,6 +163,8 @@ server {
proxy_http_version 1.1; proxy_http_version 1.1;
limit_req_status 429;
{# we do not support relative URL - path is always "/" #} {# we do not support relative URL - path is always "/" #}
{% set path = "/" %} {% set path = "/" %}
...@@ -163,7 +180,21 @@ server { ...@@ -163,7 +180,21 @@ server {
{% if cfg_https %} {% if cfg_https %}
proxy_set_header X-Forwarded-Ssl on; proxy_set_header X-Forwarded-Ssl on;
{% endif %} {% endif %}
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; proxy_set_header X-Forwarded-For $trusted_remote_addr;
proxy_set_header X-Forwarded-Proto {{ "https" if cfg_https else "http" }};
proxy_pass http://gitlab-workhorse;
}
## archive downloads are rate limited.
location ~ /[^/]+/[^/]+/-/archive/.* {
limit_req zone=downloadarchive;
proxy_set_header Host $http_host;
proxy_set_header X-Real-IP $remote_addr;
{% if cfg_https %}
proxy_set_header X-Forwarded-Ssl on;
{% endif %}
proxy_set_header X-Forwarded-For $trusted_remote_addr;
proxy_set_header X-Forwarded-Proto {{ "https" if cfg_https else "http" }}; proxy_set_header X-Forwarded-Proto {{ "https" if cfg_https else "http" }};
proxy_pass http://gitlab-workhorse; proxy_pass http://gitlab-workhorse;
...@@ -188,7 +219,7 @@ server { ...@@ -188,7 +219,7 @@ server {
{% if cfg_https %} {% if cfg_https %}
proxy_set_header X-Forwarded-Ssl on; proxy_set_header X-Forwarded-Ssl on;
{% endif %} {% endif %}
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; proxy_set_header X-Forwarded-For $trusted_remote_addr;
proxy_set_header X-Forwarded-Proto {{ "https" if cfg_https else "http" }}; proxy_set_header X-Forwarded-Proto {{ "https" if cfg_https else "http" }};
proxy_pass http://gitlab-workhorse; proxy_pass http://gitlab-workhorse;
......
...@@ -26,65 +26,206 @@ ...@@ -26,65 +26,206 @@
############################################################################## ##############################################################################
import os import os
import requests
import functools import functools
import urllib.parse
import subprocess
import time
from typing import Optional, Tuple
import bs4 import bs4
from urllib.parse import urljoin import requests
from slapos.testing.testcase import makeModuleSetUpAndTestCaseClass from slapos.testing.testcase import makeModuleSetUpAndTestCaseClass
from slapos.testing.caucase import CaucaseCertificate, CaucaseService
setUpModule, SlapOSInstanceTestCase = makeModuleSetUpAndTestCaseClass( setUpModule, SlapOSInstanceTestCase = makeModuleSetUpAndTestCaseClass(
os.path.abspath( os.path.abspath(os.path.join(os.path.dirname(__file__), "..", "software.cfg"))
os.path.join(os.path.dirname(__file__), '..', 'software.cfg'))) )
class TestGitlab(SlapOSInstanceTestCase): class TestGitlab(SlapOSInstanceTestCase):
__partition_reference__ = 'G' # solve path too long for postgresql and unicorn __partition_reference__ = "G" # solve path too long for postgresql and unicorn
instance_max_retry = 50 # puma takes time to be ready
@classmethod @classmethod
def getInstanceSoftwareType(cls): def getInstanceSoftwareType(cls):
return 'gitlab' return "gitlab"
@classmethod @classmethod
def getInstanceParameterDict(cls): def getInstanceParameterDict(cls):
return {'root-password': 'admin1234'} frontend_caucase = cls.getManagedResource("frontend_caucase", CaucaseService)
certificate = cls.getManagedResource("client_certificate", CaucaseCertificate)
certificate.request("shared frontend", frontend_caucase)
return {
"root-password": "admin1234",
"frontend-caucase-url-list": frontend_caucase.url,
}
def setUp(self): def setUp(self):
self.backend_url = self.computer_partition.getConnectionParameterDict( self.backend_url = self.computer_partition.getConnectionParameterDict()[
)['backend_url'] "backend_url"
]
def test_http_get(self): def test_http_get(self):
resp = requests.get(self.backend_url, verify=False) resp = requests.get(self.backend_url, verify=False)
self.assertTrue( self.assertTrue(resp.status_code in [requests.codes.ok, requests.codes.found])
resp.status_code in [requests.codes.ok, requests.codes.found])
def test_rack_attack_sign_in_rate_limiting(self): def test_rack_attack_sign_in_rate_limiting(self):
session = requests.session() client_certificate = self.getManagedResource(
"client_certificate", CaucaseCertificate
)
session = requests.Session()
session.cert = (client_certificate.cert_file, client_certificate.key_file)
# Load the login page to get a CSRF token. # Load the login page to get a CSRF token.
response = session.get(urljoin(self.backend_url, 'users/sign_in'), verify=False) response = session.get(
urllib.parse.urljoin(self.backend_url, "users/sign_in"), verify=False
)
self.assertEqual(response.status_code, 200) self.assertEqual(response.status_code, 200)
# Extract the CSRF token and param. # Extract the CSRF token and param.
bsoup = bs4.BeautifulSoup(response.text, 'html.parser') bsoup = bs4.BeautifulSoup(response.text, "html.parser")
csrf_param = bsoup.find('meta', dict(name='csrf-param'))['content'] csrf_param = bsoup.find("meta", dict(name="csrf-param"))["content"]
csrf_token = bsoup.find('meta', dict(name='csrf-token'))['content'] csrf_token = bsoup.find("meta", dict(name="csrf-token"))["content"]
request_data = { request_data = {
'user[login]': 'test', "user[login]": "test",
'user[password]': 'random', "user[password]": "random",
csrf_param: csrf_token} csrf_param: csrf_token,
}
sign_in = functools.partial( sign_in = functools.partial(
session.post, session.post, response.url, data=request_data, verify=False
response.url, )
data=request_data,
verify=False)
for _ in range(10): for _ in range(10):
sign_in(headers={'X-Forwarded-For': '1.2.3.4'}) sign_in(headers={"X-Forwarded-For": "1.2.3.4"}).raise_for_status()
# after 10 authentication failures, this client is rate limited # after 10 authentication failures, this client is rate limited
self.assertEqual(sign_in(headers={'X-Forwarded-For': '1.2.3.4'}).status_code, 429) self.assertEqual(sign_in(headers={"X-Forwarded-For": "1.2.3.4"}).status_code, 429)
# but other clients are not # but other clients are not
self.assertNotEqual(sign_in(headers={'X-Forwarded-For': '5.6.7.8'}).status_code, 429) self.assertNotEqual(
sign_in(headers={"X-Forwarded-For": "5.6.7.8"}).status_code, 429
)
def _get_client_ip_address_from_nginx_log(
self, cert: Optional[Tuple[str, str]]
) -> str:
requests.get(
urllib.parse.urljoin(
self.backend_url,
f"/users/sign_in?request_id={self.id()}",
),
verify=False,
cert=cert,
headers={"X-Forwarded-For": "1.2.3.4"},
).raise_for_status()
nginx_log_file = (
self.computer_partition_root_path / "var" / "log" / "nginx" / "gitlab_access.log"
)
for _ in range(100):
last_log_line = nginx_log_file.read_text().splitlines()[-1]
if self.id() in last_log_line:
return last_log_line.split("-")[0].strip()
time.sleep(1)
raise RuntimeError(f"Could not find {self.id()} in {last_log_line=}")
def test_client_ip_in_nginx_log_with_certificate(self):
client_certificate = self.getManagedResource(
"client_certificate", CaucaseCertificate
)
self.assertEqual(
self._get_client_ip_address_from_nginx_log(
cert=(client_certificate.cert_file, client_certificate.key_file)
),
"1.2.3.4",
)
def test_client_ip_in_nginx_log_without_certificate(self):
self.assertNotEqual(
self._get_client_ip_address_from_nginx_log(cert=None),
"1.2.3.4",
)
def test_client_ip_in_nginx_log_with_not_verified_certificate(self):
another_unrelated_caucase = self.getManagedResource(
"another_unrelated_caucase", CaucaseService
)
unknown_client_certificate = self.getManagedResource(
"unknown_client_certificate", CaucaseCertificate
)
unknown_client_certificate.request(
"unknown client certificate", another_unrelated_caucase
)
self.assertNotEqual(
self._get_client_ip_address_from_nginx_log(
cert=(unknown_client_certificate.cert_file, unknown_client_certificate.key_file)
),
"1.2.3.4",
)
def test_download_archive_rate_limiting(self):
gitlab_rails_bin = self.computer_partition_root_path / 'bin' / 'gitlab-rails'
subprocess.check_call(
(gitlab_rails_bin,
'runner',
"user = User.find(1);" \
"token = user.personal_access_tokens.create(scopes: [:api], name: 'Root token');" \
"token.set_token('SLurtnxPscPsU-SDm4oN');" \
"token.save!"),
)
client_certificate = self.getManagedResource('client_certificate', CaucaseCertificate)
with requests.Session() as session:
session.cert = (client_certificate.cert_file, client_certificate.key_file)
session.verify = False
ret = session.post(
urllib.parse.urljoin(self.backend_url, '/api/v4/projects'),
data={
'name': 'sample-test',
'visibility': 'public',
},
headers={"PRIVATE-TOKEN" : 'SLurtnxPscPsU-SDm4oN'},
)
ret.raise_for_status()
project_id = ret.json()['id']
session.post(
urllib.parse.urljoin(
self.backend_url, f"/api/v4/projects/{project_id}/repository/commits"
),
json={
"branch": "main",
"commit_message": "Add a file to test download archive",
"actions": [
{"action": "create", "file_path": "README.md", "content": "file content"}
],
},
headers={"PRIVATE-TOKEN": "SLurtnxPscPsU-SDm4oN"},
).raise_for_status()
for i, ext in enumerate(("zip", "tar.gz", "tar.bz2", "tar")):
headers = {"X-Forwarded-For": f"{i}.{i}.{i}.{i}"}
get = functools.partial(
session.get,
urllib.parse.urljoin(
self.backend_url,
f"/root/sample-test/-/archive/main/sample-test-main.{ext}",
),
headers=headers,
)
with self.subTest(ext):
get().raise_for_status()
self.assertEqual(get().status_code, 429)
self.assertEqual(
session.get(
urllib.parse.urljoin(
self.backend_url,
f"/root/sample-test/-/archive/invalidref/sample-test-invalidref.zip",
),
).status_code,
404,
)
...@@ -23,7 +23,7 @@ md5sum = b4330fbe0c9c3631f4f477c06d3460b3 ...@@ -23,7 +23,7 @@ md5sum = b4330fbe0c9c3631f4f477c06d3460b3
[instance-agent] [instance-agent]
filename = instance-agent.cfg.in filename = instance-agent.cfg.in
md5sum = 6bbc97cf8e752d22773d5f23ecdda37d md5sum = 9fc368a3ac16bee297a4f2ad87b3bbb4
[influxdb-config-file] [influxdb-config-file]
......
...@@ -137,17 +137,6 @@ init = ...@@ -137,17 +137,6 @@ init =
"tags": { "tags": {
"computer_id": slap_connection['computer-id'], "computer_id": slap_connection['computer-id'],
}, },
# built-in inputs
"cpu": {
"drop": ["cpu_time"],
"percpu": True,
"totalcpu": True,
},
"disk": {},
"io": {},
"mem": {},
"system": {},
"inputs": inputs, "inputs": inputs,
"processors": processors, "processors": processors,
...@@ -164,6 +153,20 @@ init = ...@@ -164,6 +153,20 @@ init =
}, },
}, },
} }
# built-in inputs
inputs["cpu"].append(
{
"drop": ["cpu_time"],
"percpu": True,
"totalcpu": True,
}
)
inputs["disk"].append({})
inputs["io"].append({})
inputs["mem"].append({})
inputs["net"].append({"ignore_protocol_stats": True})
inputs["system"].append({})
for application in slapparameter_dict.get("applications", []): for application in slapparameter_dict.get("applications", []):
partition_mapping = {} partition_mapping = {}
partition_root_directory = '' partition_root_directory = ''
...@@ -219,20 +222,20 @@ init = ...@@ -219,20 +222,20 @@ init =
"field_columns_include": ["failed_message_count"], "field_columns_include": ["failed_message_count"],
"tag_columns_include": ["cmf_activity_queue"], "tag_columns_include": ["cmf_activity_queue"],
}, },
{ # TODO: these queries are slow and maybe not correct
"query": """ # {
select cast(coalesce(max(UNIX_TIMESTAMP(now()) - UNIX_TIMESTAMP(message.date)), 0) as int) # "query": """
as waiting_time, 'message' as cmf_activity_queue # select cast(coalesce(max(UNIX_TIMESTAMP(now()) - UNIX_TIMESTAMP(message.date)), 0) as int)
from message where processing_node in (-1, 0) and message.message not like '%after_tag%' # as waiting_time, 'message' as cmf_activity_queue
union all # from message where processing_node in (-1, 0) and message.message not like '%after_tag%'
select cast(coalesce(max(UNIX_TIMESTAMP(now()) - UNIX_TIMESTAMP(message_queue.date)), 0) as int) # union all
as waiting_time, 'message_queue' as cmf_activity_queue # select cast(coalesce(max(UNIX_TIMESTAMP(now()) - UNIX_TIMESTAMP(message_queue.date)), 0) as int)
from message_queue where processing_node in (-1, 0) and message_queue.message not like '%after_tag%' # as waiting_time, 'message_queue' as cmf_activity_queue
""", # from message_queue where processing_node in (-1, 0) and message_queue.message not like '%after_tag%'
"field_columns_include": ["waiting_time"], # """,
"tag_columns_include": ["cmf_activity_queue"], # "field_columns_include": ["waiting_time"],
# "tag_columns_include": ["cmf_activity_queue"],
}, # },
], ],
"tags": dict( "tags": dict(
partition.get("static-tags", {}), partition.get("static-tags", {}),
......
...@@ -95,4 +95,4 @@ md5sum = e9d40162ba77472775256637a2617d14 ...@@ -95,4 +95,4 @@ md5sum = e9d40162ba77472775256637a2617d14
[boot-image-select-source-config] [boot-image-select-source-config]
filename = template/boot-image-select-source-config.json.in filename = template/boot-image-select-source-config.json.in
md5sum = 5dc0cbb8f8dccfdd5c52d0af4a2b2c48 md5sum = d53afe719e2cbfc2480277af340f8429
...@@ -9,7 +9,7 @@ ...@@ -9,7 +9,7 @@
"Arch Linux 2020.09.01 x86_64" : "https://shacache.nxdcdn.com/fc17e8c6ae0790162f4beb8fa6226d945cff638429588999b3a08493ff27b280dc2939fba825ae04be1d9082ea8d7c3c002c5e4c39fbbcf88b8ab5104619e28a#ebcdb2223a77f098af3923fe1fa180aa", "Arch Linux 2020.09.01 x86_64" : "https://shacache.nxdcdn.com/fc17e8c6ae0790162f4beb8fa6226d945cff638429588999b3a08493ff27b280dc2939fba825ae04be1d9082ea8d7c3c002c5e4c39fbbcf88b8ab5104619e28a#ebcdb2223a77f098af3923fe1fa180aa",
"Fedora Server 32 netinst x86_64" : "https://shacache.nxdcdn.com/c5a511f349a1146b615e6fab9c24f9be4362046adcf24f0ff82c470d361fac5f6628895e2110ebf8ff87db49d4c413a0a332699da6b1bec64275e0c17a15b999#ca7a1e555c04b4d9a549065fa2ddf713", "Fedora Server 32 netinst x86_64" : "https://shacache.nxdcdn.com/c5a511f349a1146b615e6fab9c24f9be4362046adcf24f0ff82c470d361fac5f6628895e2110ebf8ff87db49d4c413a0a332699da6b1bec64275e0c17a15b999#ca7a1e555c04b4d9a549065fa2ddf713",
"FreeBSD 12.1 RELEASE bootonly x86_64" : "https://shacache.nxdcdn.com/6c355def68b3c0427f21598cb054ffc893568902f205601ac60f192854769b31bc9cff8eeb6ce99ef975a8fb887d8d3e56fc6cd5ea5cb4b3bba1175c520047cb#57088b77f795ca44b00971e44782ee23", "FreeBSD 12.1 RELEASE bootonly x86_64" : "https://shacache.nxdcdn.com/6c355def68b3c0427f21598cb054ffc893568902f205601ac60f192854769b31bc9cff8eeb6ce99ef975a8fb887d8d3e56fc6cd5ea5cb4b3bba1175c520047cb#57088b77f795ca44b00971e44782ee23",
"SUSE Linux Enterprise Server 15 SP6 x86_64": "https://shacache.nxdcdn.com/e72e03bbcc4c54ce4b8d5f360b47dab9ee514d754e8d78c403626cf000d6ae98d808b3bcff2201e3cf49c1be1b0f308f1cb5ed81676adcb1837dfc811d2451ac", "SUSE Linux Enterprise Server 15 SP6 x86_64": "https://shacache.nxdcdn.com/e72e03bbcc4c54ce4b8d5f360b47dab9ee514d754e8d78c403626cf000d6ae98d808b3bcff2201e3cf49c1be1b0f308f1cb5ed81676adcb1837dfc811d2451ac#ad2f29ff40fd245b50fe261a88039675",
} -%} } -%}
{%- if boot_image_url_select %} {%- if boot_image_url_select %}
{#- Fail in the promise if bad boot-image-url-select is set -#} {#- Fail in the promise if bad boot-image-url-select is set -#}
......
...@@ -366,6 +366,7 @@ class EdgeMixin(object): ...@@ -366,6 +366,7 @@ class EdgeMixin(object):
class TestEdgeBasic(EdgeMixin, SlapOSInstanceTestCase): class TestEdgeBasic(EdgeMixin, SlapOSInstanceTestCase):
instance_max_retry = 40
surykatka_dict = {} surykatka_dict = {}
def assertConnectionParameterDict(self): def assertConnectionParameterDict(self):
......
...@@ -133,7 +133,7 @@ inline = ...@@ -133,7 +133,7 @@ inline =
exec "$basedir/bin/mysqld" --defaults-file='{{defaults_file}}' "$@" exec "$basedir/bin/mysqld" --defaults-file='{{defaults_file}}' "$@"
[versions] [versions]
coverage = 7.5.1 coverage = 7.6.4
ecdsa = 0.13 ecdsa = 0.13
mysqlclient = 2.0.1 mysqlclient = 2.0.1
PyMySQL = 0.10.1 PyMySQL = 0.10.1
......
...@@ -232,7 +232,7 @@ ...@@ -232,7 +232,7 @@
"type": "string" "type": "string"
}, },
"health-check-http-method": { "health-check-http-method": {
"title": "Health Check HTTP Metod", "title": "Health Check HTTP Method",
"description": "Selects method to do the active check. CONNECT means that connection will be enough for the check, otherwise it's HTTP method.", "description": "Selects method to do the active check. CONNECT means that connection will be enough for the check, otherwise it's HTTP method.",
"enum": [ "enum": [
"GET", "GET",
......
...@@ -2,15 +2,19 @@ ...@@ -2,15 +2,19 @@
extends = extends =
buildout.hash.cfg buildout.hash.cfg
../../stack/slapos.cfg ../../stack/slapos.cfg
../../component/coreutils/buildout.cfg
../../component/curl/buildout.cfg
../../component/dash/buildout.cfg ../../component/dash/buildout.cfg
../../component/findutils/buildout.cfg
../../component/gzip/buildout.cfg ../../component/gzip/buildout.cfg
../../component/haproxy/buildout.cfg
../../component/logrotate/buildout.cfg ../../component/logrotate/buildout.cfg
../../component/nginx/buildout.cfg
../../component/openssl/buildout.cfg
../../component/rsyslogd/buildout.cfg
../../component/trafficserver/buildout.cfg ../../component/trafficserver/buildout.cfg
../../component/xz-utils/buildout.cfg ../../component/xz-utils/buildout.cfg
../../component/rsyslogd/buildout.cfg
../../component/haproxy/buildout.cfg
../../component/nginx/buildout.cfg
../../component/findutils/buildout.cfg
../../stack/caucase/buildout.cfg ../../stack/caucase/buildout.cfg
......
...@@ -32,6 +32,9 @@ PATH = ${git:location}/bin:%(PATH)s ...@@ -32,6 +32,9 @@ PATH = ${git:location}/bin:%(PATH)s
recipe = zc.recipe.egg:develop recipe = zc.recipe.egg:develop
setup = ${re6stnet-repository:location} setup = ${re6stnet-repository:location}
environment = re6stnet-setup-env environment = re6stnet-setup-env
setup-eggs =
editables
hatchling
[re6stnet] [re6stnet]
recipe = zc.recipe.egg recipe = zc.recipe.egg
......
...@@ -43,6 +43,11 @@ setup = ${slapos-repository:location}/ ...@@ -43,6 +43,11 @@ setup = ${slapos-repository:location}/
egg = slapos.test.backupserver egg = slapos.test.backupserver
setup = ${slapos-repository:location}/software/backupserver/test/ setup = ${slapos-repository:location}/software/backupserver/test/
[slapos.test.backupserver-agent-setup]
<= setup-develop-egg
egg = slapos.test.backupserver-agent
setup = ${slapos-repository:location}/software/backupserver/agent/test/
[slapos.test.clammit-setup] [slapos.test.clammit-setup]
<= setup-develop-egg <= setup-develop-egg
egg = slapos.test.clammit egg = slapos.test.clammit
...@@ -378,6 +383,7 @@ eggs += ...@@ -378,6 +383,7 @@ eggs +=
supervisor supervisor
${slapos.cookbook-setup:egg} ${slapos.cookbook-setup:egg}
${slapos.test.backupserver-setup:egg} ${slapos.test.backupserver-setup:egg}
${slapos.test.backupserver-agent-setup:egg}
# ${slapos.test.beremiz-ide-setup:egg} # ${slapos.test.beremiz-ide-setup:egg}
${slapos.test.beremiz-runtime-setup:egg} ${slapos.test.beremiz-runtime-setup:egg}
${slapos.test.caucase-setup:egg} ${slapos.test.caucase-setup:egg}
...@@ -469,6 +475,7 @@ context = ...@@ -469,6 +475,7 @@ context =
tests = tests =
json-schemas ${slapos.cookbook-setup:setup} json-schemas ${slapos.cookbook-setup:setup}
backupserver ${slapos.test.backupserver-setup:setup} backupserver ${slapos.test.backupserver-setup:setup}
backupserver-agent ${slapos.test.backupserver-agent-setup:setup}
# beremiz-ide ${slapos.test.beremiz-ide-setup:setup} # beremiz-ide ${slapos.test.beremiz-ide-setup:setup}
beremiz-runtime ${slapos.test.beremiz-runtime-setup:setup} beremiz-runtime ${slapos.test.beremiz-runtime-setup:setup}
caucase ${slapos.test.caucase-setup:setup} caucase ${slapos.test.caucase-setup:setup}
......
...@@ -136,6 +136,9 @@ setup = ${rubygemsrecipe-repository:location} ...@@ -136,6 +136,9 @@ setup = ${rubygemsrecipe-repository:location}
<= setup-develop-egg <= setup-develop-egg
egg = re6stnet[test] egg = re6stnet[test]
setup = ${re6stnet-repository:location} setup = ${re6stnet-repository:location}
setup-eggs =
editables
hatchling
[extra-eggs] [extra-eggs]
eggs = eggs =
......
...@@ -15,11 +15,11 @@ ...@@ -15,11 +15,11 @@
[instance-theia] [instance-theia]
_update_hash_filename_ = instance-theia.cfg.jinja.in _update_hash_filename_ = instance-theia.cfg.jinja.in
md5sum = 23b498618bce83a6eb8df0470417f59e md5sum = b4e87cff99a8521e6d0b911e3ef35b30
[instance] [instance]
_update_hash_filename_ = instance.cfg.in _update_hash_filename_ = instance.cfg.in
md5sum = 5aab12790cdb1981cb0caf00d389a227 md5sum = 837eb2786f185ddb5a28d29e271652f7
[instance-import] [instance-import]
_update_hash_filename_ = instance-import.cfg.jinja.in _update_hash_filename_ = instance-import.cfg.jinja.in
......
...@@ -53,7 +53,11 @@ pidfiles = $${:var}/run ...@@ -53,7 +53,11 @@ pidfiles = $${:var}/run
statefiles = $${:var}/state statefiles = $${:var}/state
services = $${:etc}/service services = $${:etc}/service
{% if parameter_dict['testing-short-embedded-instance-path'] %}
runner = $${:home}/r
{% else %}
runner = $${:srv}/runner runner = $${:srv}/runner
{% endif %}
backup = $${:srv}/backup/theia backup = $${:srv}/backup/theia
project = $${:srv}/project project = $${:srv}/project
...@@ -567,8 +571,13 @@ ipv4 = {{ ipv4_random }} ...@@ -567,8 +571,13 @@ ipv4 = {{ ipv4_random }}
ipv6 = {{ slap_resource.get('ipv6-range-network') or ipv6_theia }} ipv6 = {{ slap_resource.get('ipv6-range-network') or ipv6_theia }}
port = $${slapos-standalone-port:port} port = $${slapos-standalone-port:port}
base-directory = $${directory:runner} base-directory = $${directory:runner}
{% if parameter_dict['testing-short-embedded-instance-path'] %}
software-root = $${directory:runner}/s
instance-root = $${directory:runner}/i
{% else %}
software-root = $${directory:runner}/software software-root = $${directory:runner}/software
instance-root = $${directory:runner}/instance instance-root = $${directory:runner}/instance
{% endif %}
local-software-release-root = $${directory:home} local-software-release-root = $${directory:home}
slapos-bin = ${buildout:bin-directory}/slapos slapos-bin = ${buildout:bin-directory}/slapos
slapos-configuration = $${directory:runner}/etc/slapos.cfg slapos-configuration = $${directory:runner}/etc/slapos.cfg
......
...@@ -55,7 +55,8 @@ default-parameters = ...@@ -55,7 +55,8 @@ default-parameters =
"additional-frontend-sr": "$${:frontend-sr}", "additional-frontend-sr": "$${:frontend-sr}",
"additional-frontend-sr-type": "default", "additional-frontend-sr-type": "default",
"additional-frontend-guid": null, "additional-frontend-guid": null,
"monitor-httpd-port": 8386 "monitor-httpd-port": 8386,
"testing-short-embedded-instance-path": null
} }
frontend-sr = http://git.erp5.org/gitweb/slapos.git/blob_plain/HEAD:/software/apache-frontend/software.cfg frontend-sr = http://git.erp5.org/gitweb/slapos.git/blob_plain/HEAD:/software/apache-frontend/software.cfg
......
...@@ -24,7 +24,6 @@ ...@@ -24,7 +24,6 @@
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. # Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
# #
############################################################################## ##############################################################################
from __future__ import unicode_literals
import gzip import gzip
import json import json
...@@ -32,13 +31,12 @@ import os ...@@ -32,13 +31,12 @@ import os
import re import re
import subprocess import subprocess
import time import time
import unittest
import shutil import shutil
import requests import requests
import tempfile import tempfile
from datetime import datetime, timedelta from datetime import datetime, timedelta
from six.moves.urllib.parse import urljoin from urllib.parse import urljoin
from mimetypes import guess_type from mimetypes import guess_type
from json.decoder import JSONDecodeError from json.decoder import JSONDecodeError
...@@ -67,6 +65,21 @@ def setUpModule(): ...@@ -67,6 +65,21 @@ def setUpModule():
) )
class TestTheiaResilienceWithShortPaths(test_resiliency.TestTheiaResilience):
"""TestTheiaResilience, but with shorter paths for embedded slapos, to
overcome OS limit with the length of unix sockets or #! "shebang" lines.
"""
@classmethod
def getInstanceParameterDict(cls):
return dict(
super().getInstanceParameterDict(),
**{'testing-short-embedded-instance-path': 'true'})
@classmethod
def _getSlapos(cls, instance_type='export'):
return cls.getPartitionPath(instance_type, 'r', 'bin', 'slapos')
class ERP5Mixin(object): class ERP5Mixin(object):
_test_software_url = erp5_software_release_url _test_software_url = erp5_software_release_url
_connexion_parameters_regex = re.compile(r"{.*}", re.DOTALL) _connexion_parameters_regex = re.compile(r"{.*}", re.DOTALL)
...@@ -242,7 +255,8 @@ class TestTheiaResilienceERP5(ERP5Mixin, test_resiliency.TestTheiaResilience): ...@@ -242,7 +255,8 @@ class TestTheiaResilienceERP5(ERP5Mixin, test_resiliency.TestTheiaResilience):
out = subprocess.check_output((mysql_bin, 'erp5', '-e', query), universal_newlines=True) out = subprocess.check_output((mysql_bin, 'erp5', '-e', query), universal_newlines=True)
self.assertIn(self._erp5_new_title, out, 'Mariadb catalog is not properly restored') self.assertIn(self._erp5_new_title, out, 'Mariadb catalog is not properly restored')
class TestTheiaResiliencePeertube(test_resiliency.TestTheiaResilience):
class TestTheiaResiliencePeertube(TestTheiaResilienceWithShortPaths):
test_instance_max_retries = 12 test_instance_max_retries = 12
backup_max_tries = 480 backup_max_tries = 480
backup_wait_interval = 60 backup_wait_interval = 60
...@@ -446,10 +460,11 @@ class TestTheiaResiliencePeertube(test_resiliency.TestTheiaResilience): ...@@ -446,10 +460,11 @@ class TestTheiaResiliencePeertube(test_resiliency.TestTheiaResilience):
def _getPeertubePartitionPath(self, instance_type, servicename, *paths): def _getPeertubePartitionPath(self, instance_type, servicename, *paths):
partition = self._getPeertubePartition(servicename) partition = self._getPeertubePartition(servicename)
return self.getPartitionPath( return self.getPartitionPath(
instance_type, 'srv', 'runner', 'instance', partition, *paths) instance_type, 'r', 'i', partition, *paths)
class TestTheiaResilienceGitlab(test_resiliency.TestTheiaResilience):
test_instance_max_retries = 12 class TestTheiaResilienceGitlab(TestTheiaResilienceWithShortPaths):
test_instance_max_retries = 50 # puma takes time to be ready
backup_max_tries = 480 backup_max_tries = 480
backup_wait_interval = 60 backup_wait_interval = 60
_connection_parameters_regex = re.compile(r"{.*}", re.DOTALL) _connection_parameters_regex = re.compile(r"{.*}", re.DOTALL)
...@@ -467,7 +482,7 @@ class TestTheiaResilienceGitlab(test_resiliency.TestTheiaResilience): ...@@ -467,7 +482,7 @@ class TestTheiaResilienceGitlab(test_resiliency.TestTheiaResilience):
stderr=subprocess.STDOUT, stderr=subprocess.STDOUT,
text=True, text=True,
) )
print(out) self.logger.info("_getGitlabConnectionParameters output: %s", out)
return json.loads(self._connection_parameters_regex.search(out).group(0).replace("'", '"')) return json.loads(self._connection_parameters_regex.search(out).group(0).replace("'", '"'))
def test_twice(self): def test_twice(self):
...@@ -499,7 +514,7 @@ class TestTheiaResilienceGitlab(test_resiliency.TestTheiaResilience): ...@@ -499,7 +514,7 @@ class TestTheiaResilienceGitlab(test_resiliency.TestTheiaResilience):
# Create a new project # Create a new project
print("Gitlab create a project") print("Gitlab create a project")
path = '/api/v3/projects' path = '/api/v4/projects'
parameter_dict = {'name': 'sample-test', 'namespace': 'open'} parameter_dict = {'name': 'sample-test', 'namespace': 'open'}
# Token can be set manually # Token can be set manually
headers = {"PRIVATE-TOKEN" : 'SLurtnxPscPsU-SDm4oN'} headers = {"PRIVATE-TOKEN" : 'SLurtnxPscPsU-SDm4oN'}
...@@ -508,14 +523,14 @@ class TestTheiaResilienceGitlab(test_resiliency.TestTheiaResilience): ...@@ -508,14 +523,14 @@ class TestTheiaResilienceGitlab(test_resiliency.TestTheiaResilience):
# Check the project is exist # Check the project is exist
print("Gitlab check project is exist") print("Gitlab check project is exist")
path = '/api/v3/projects' path = '/api/v4/projects'
response = requests.get(backend_url + path, headers=headers, verify=False) response = requests.get(backend_url + path, params={'search': 'sample-test'}, headers=headers, verify=False)
try: try:
projects = response.json() projects = response.json()
except JSONDecodeError: except JSONDecodeError:
self.fail("No json file returned! Maybe your Gitlab URL is incorrect.") self.fail("No json file returned! Maybe your Gitlab URL is incorrect.")
# Only one project exist # Only one project matches the search
self.assertEqual(len(projects), 1) self.assertEqual(len(projects), 1)
# The project name is sample-test, which we created above. # The project name is sample-test, which we created above.
self.assertIn("sample-test", projects[0]['name_with_namespace']) self.assertIn("sample-test", projects[0]['name_with_namespace'])
...@@ -543,12 +558,14 @@ class TestTheiaResilienceGitlab(test_resiliency.TestTheiaResilience): ...@@ -543,12 +558,14 @@ class TestTheiaResilienceGitlab(test_resiliency.TestTheiaResilience):
output = subprocess.check_output(('git', 'push', 'origin'), cwd=repo_path, universal_newlines=True) output = subprocess.check_output(('git', 'push', 'origin'), cwd=repo_path, universal_newlines=True)
# Do a fake periodically update # Do a fake periodically update
# Compute backup date in the near future # Compute backup date in the future
soon = (datetime.now() + timedelta(minutes=4)) # During slapos node instance, the static assets are recompiled, which takes a lot
# of time, so we give it at least 20 minutes.
soon = (datetime.now() + timedelta(minutes=20))
frequency = "%d * * * *" % soon.minute frequency = "%d * * * *" % soon.minute
params = 'backup_frequency=%s' % frequency params = 'backup_frequency=%s' % frequency
# Update Peertube parameters # Update Gitlab parameters
print('Requesting Gitlab with parameters %s' % params) print('Requesting Gitlab with parameters %s' % params)
self.checkSlapos('request', 'test_instance', self._test_software_url, '--parameters', params) self.checkSlapos('request', 'test_instance', self._test_software_url, '--parameters', params)
...@@ -557,8 +574,8 @@ class TestTheiaResilienceGitlab(test_resiliency.TestTheiaResilience): ...@@ -557,8 +574,8 @@ class TestTheiaResilienceGitlab(test_resiliency.TestTheiaResilience):
self.callSlapos('node', 'restart', 'all') self.callSlapos('node', 'restart', 'all')
# Wait until after the programmed backup date, and a bit more # Wait until after the programmed backup date, and a bit more
t = (soon - datetime.now()).total_seconds() t = ((soon - datetime.now()) + timedelta(minutes=10)).total_seconds()
time.sleep(t + 240) time.sleep(t)
self.callSlapos('node', 'status') self.callSlapos('node', 'status')
os.chdir(self.temp_clone_dir) os.chdir(self.temp_clone_dir)
...@@ -583,9 +600,9 @@ class TestTheiaResilienceGitlab(test_resiliency.TestTheiaResilience): ...@@ -583,9 +600,9 @@ class TestTheiaResilienceGitlab(test_resiliency.TestTheiaResilience):
# Check the project is exist # Check the project is exist
print("Gitlab check project is exist") print("Gitlab check project is exist")
path = '/api/v3/projects' path = '/api/v4/projects'
headers = {"PRIVATE-TOKEN" : 'SLurtnxPscPsU-SDm4oN'} headers = {"PRIVATE-TOKEN" : 'SLurtnxPscPsU-SDm4oN'}
response = requests.get(backend_url + path, headers=headers, verify=False) response = requests.get(backend_url + path, params={'search': 'sample-test'}, headers=headers, verify=False)
try: try:
projects = response.json() projects = response.json()
except JSONDecodeError: except JSONDecodeError:
...@@ -623,4 +640,4 @@ class TestTheiaResilienceGitlab(test_resiliency.TestTheiaResilience): ...@@ -623,4 +640,4 @@ class TestTheiaResilienceGitlab(test_resiliency.TestTheiaResilience):
def _getGitlabPartitionPath(self, instance_type, servicename, *paths): def _getGitlabPartitionPath(self, instance_type, servicename, *paths):
partition = self._getGitlabPartition(servicename) partition = self._getGitlabPartition(servicename)
return self.getPartitionPath( return self.getPartitionPath(
instance_type, 'srv', 'runner', 'instance', partition, *paths) instance_type, 'r', 'i', partition, *paths)
...@@ -72,34 +72,42 @@ def setUpModule(): ...@@ -72,34 +72,42 @@ def setUpModule():
class ResilientTheiaTestCase(ResilientTheiaMixin, TheiaTestCase): class ResilientTheiaTestCase(ResilientTheiaMixin, TheiaTestCase):
@classmethod @classmethod
def _processEmbeddedInstance(cls, retries=0, instance_type='export'): def _processEmbeddedInstance(cls, retries=0, instance_type='export'):
for _ in range(retries): for retry in range(retries):
try: try:
output = cls.captureSlapos('node', 'instance', instance_type=instance_type, stderr=subprocess.STDOUT) output = cls.captureSlapos('node', 'instance', instance_type=instance_type, stderr=subprocess.STDOUT, text=True)
except subprocess.CalledProcessError: except subprocess.CalledProcessError:
continue continue
print(output) cls.logger.info("_processEmbeddedInstance retry=%s output=%s", retry, output)
break break
else: else:
if retries: if retries:
# Sleep a bit as an attempt to workaround monitoring boostrap not being ready # Sleep a bit as an attempt to workaround monitoring boostrap not being ready
print("Wait before running slapos node instance one last time") print("Wait before running slapos node instance one last time")
time.sleep(120) time.sleep(120)
cls.checkSlapos('node', 'instance', instance_type=instance_type) try:
cls.checkSlapos('node', 'instance', instance_type=instance_type, text=True)
except subprocess.CalledProcessError as e:
cls.logger.error(e.output, exc_info=True)
raise
@classmethod @classmethod
def _processEmbeddedSoftware(cls, retries=0, instance_type='export'): def _processEmbeddedSoftware(cls, retries=0, instance_type='export'):
for _ in range(retries): for retry in range(retries):
try: try:
output = cls.captureSlapos('node', 'software', instance_type=instance_type, stderr=subprocess.STDOUT) output = cls.captureSlapos('node', 'software', instance_type=instance_type, stderr=subprocess.STDOUT, text=True)
except subprocess.CalledProcessError: except subprocess.CalledProcessError:
continue continue
print(output) cls.logger.info("_processEmbeddedSoftware retry=%s output=%s", retry, output)
break break
else: else:
if retries: if retries:
print("Wait before running slapos node software one last time") print("Wait before running slapos node software one last time")
time.sleep(120) time.sleep(120)
cls.checkSlapos('node', 'software', instance_type=instance_type) try:
cls.checkSlapos('node', 'software', instance_type=instance_type, text=True)
except subprocess.CalledProcessError as e:
cls.logger.error(e.output, exc_info=True)
raise
@classmethod @classmethod
def _deployEmbeddedSoftware(cls, software_url, instance_name, retries=0, instance_type='export'): def _deployEmbeddedSoftware(cls, software_url, instance_name, retries=0, instance_type='export'):
......
...@@ -2,7 +2,7 @@ ...@@ -2,7 +2,7 @@
extends = extends =
# versions pins from zope, vendored with: # versions pins from zope, vendored with:
# curl https://zopefoundation.github.io/Zope/releases/5.10/versions-prod.cfg > zope-versions.cfg # curl https://zopefoundation.github.io/Zope/releases/5.11.1/versions-prod.cfg > zope-versions.cfg
# When updating, keep in mind that some versions are defined in other places, # When updating, keep in mind that some versions are defined in other places,
# for example component/ZEO , component/ZODB and stack/slapos # for example component/ZEO , component/ZODB and stack/slapos
zope-versions.cfg zope-versions.cfg
...@@ -619,7 +619,7 @@ eggs = ...@@ -619,7 +619,7 @@ eggs =
Products.ZSQLMethods Products.ZSQLMethods
Products.ExternalMethod Products.ExternalMethod
Products.SiteErrorLog Products.SiteErrorLog
tempstorage Products.TemporaryFolder
Products.Sessions Products.Sessions
Products.ZODBMountPoint Products.ZODBMountPoint
Record Record
...@@ -662,16 +662,12 @@ SOAPpy-py3-patches = ${:_profile_base_location_}/../../component/egg-patch/SOAPp ...@@ -662,16 +662,12 @@ SOAPpy-py3-patches = ${:_profile_base_location_}/../../component/egg-patch/SOAPp
SOAPpy-py3-patch-options = -p1 SOAPpy-py3-patch-options = -p1
[eggs:python3] [eggs:python3]
AccessControl-patches = ${:_profile_base_location_}/../../component/egg-patch/AccessControl/157.patch#9b01341bd4271555c9caa66cb9d0f098
AccessControl-patch-options = -p1
interval-patches = ${:_profile_base_location_}/../../component/egg-patch/interval/0001-python3-support.patch#66ac345f0a6d73e0bd29e394b7646311 interval-patches = ${:_profile_base_location_}/../../component/egg-patch/interval/0001-python3-support.patch#66ac345f0a6d73e0bd29e394b7646311
interval-patch-options = -p1 interval-patch-options = -p1
Products.DCWorkflow-patches = ${:_profile_base_location_}/../../component/egg-patch/Products.DCWorkflow/workflow_method-3.0.0.patch#4cc8607213b1ef08331366d9873becaa Products.DCWorkflow-patches = ${:_profile_base_location_}/../../component/egg-patch/Products.DCWorkflow/workflow_method-3.0.0.patch#4cc8607213b1ef08331366d9873becaa
Products.DCWorkflow-patch-options = -p1 Products.DCWorkflow-patch-options = -p1
Products.MimetypesRegistry-patches = ${:_profile_base_location_}/../../component/egg-patch/Products.MimetypesRegistry/40.patch#1e85995d08747f73df5ea7353a41453d Products.MimetypesRegistry-patches = ${:_profile_base_location_}/../../component/egg-patch/Products.MimetypesRegistry/40.patch#1e85995d08747f73df5ea7353a41453d
Products.MimetypesRegistry-patch-options = -p1 Products.MimetypesRegistry-patch-options = -p1
Products.PythonScripts-patches = ${:_profile_base_location_}/../../component/egg-patch/Products.PythonScripts/65.patch#61bd90d4c1ead3669bfe7c959d957ab6
Products.PythonScripts-patch-options = -p1
[eggs:python2] [eggs:python2]
DateTime-patches = DateTime-patches =
...@@ -755,8 +751,7 @@ depends = ...@@ -755,8 +751,7 @@ depends =
# neoppod, mysqlclient, slapos.recipe.template # neoppod, mysqlclient, slapos.recipe.template
# patched eggs # patched eggs
AccessControl = 7.0+SlapOSPatched001 Acquisition = 6.1+SlapOSPatched001
Acquisition = 5.2+SlapOSPatched001
PyPDF2 = 1.26.0+SlapOSPatched002 PyPDF2 = 1.26.0+SlapOSPatched002
pysvn = 1.9.15+SlapOSPatched001 pysvn = 1.9.15+SlapOSPatched001
python-magic = 0.4.12+SlapOSPatched001 python-magic = 0.4.12+SlapOSPatched001
...@@ -773,7 +768,7 @@ dask = 0.18.1 ...@@ -773,7 +768,7 @@ dask = 0.18.1
deepdiff = 6.7.1 deepdiff = 6.7.1
dill = 0.3.8:whl dill = 0.3.8:whl
docutils = 0.17.1 docutils = 0.17.1
erp5-coverage-plugin = 0.0.1 erp5-coverage-plugin = 0.0.2
erp5diff = 0.8.1.9 erp5diff = 0.8.1.9
facebook-sdk = 2.0.0 facebook-sdk = 2.0.0
five.formlib = 1.0.4 five.formlib = 1.0.4
...@@ -818,10 +813,11 @@ Products.MailHost = 5.2 ...@@ -818,10 +813,11 @@ Products.MailHost = 5.2
Products.MimetypesRegistry = 3.0.1+SlapOSPatched001 Products.MimetypesRegistry = 3.0.1+SlapOSPatched001
Products.PluggableAuthService = 3.0 Products.PluggableAuthService = 3.0
Products.PluginRegistry = 2.0 Products.PluginRegistry = 2.0
Products.PythonScripts = 5.0+SlapOSPatched001 Products.PythonScripts = 5.1
Products.Sessions = 5.0 Products.Sessions = 5.0
Products.SiteErrorLog = 6.0 Products.SiteErrorLog = 6.0
Products.StandardCacheManagers = 5.0 Products.StandardCacheManagers = 5.0
Products.TemporaryFolder = 7.0
Products.ZCatalog = 7.0 Products.ZCatalog = 7.0
Products.ZODBMountPoint = 2.0 Products.ZODBMountPoint = 2.0
Products.ZSQLMethods = 4.1 Products.ZSQLMethods = 4.1
...@@ -890,10 +886,6 @@ zope.password = 4.4 ...@@ -890,10 +886,6 @@ zope.password = 4.4
zope.sendmail = 6.1 zope.sendmail = 6.1
zope.session = 4.5 zope.session = 4.5
# temporary versions, until updated in zope-versions.cfg
[versions]
DateTime = 5.5
[versions:python2] [versions:python2]
AccessControl = 4.4 AccessControl = 4.4
...@@ -905,6 +897,7 @@ Chameleon = 3.9.1 ...@@ -905,6 +897,7 @@ Chameleon = 3.9.1
DateTime = 4.9+SlapOSPatched004 DateTime = 4.9+SlapOSPatched004
deepdiff = 3.3.0 deepdiff = 3.3.0
DocumentTemplate = 3.4 DocumentTemplate = 3.4
erp5-coverage-plugin = 0.0.1
ExtensionClass = 4.9 ExtensionClass = 4.9
five.globalrequest = 99.1 five.globalrequest = 99.1
five.localsitemanager = 3.4 five.localsitemanager = 3.4
...@@ -939,6 +932,7 @@ Products.PythonScripts = 4.15 ...@@ -939,6 +932,7 @@ Products.PythonScripts = 4.15
Products.Sessions = 4.15 Products.Sessions = 4.15
Products.SiteErrorLog = 5.7 Products.SiteErrorLog = 5.7
Products.StandardCacheManagers = 4.2 Products.StandardCacheManagers = 4.2
Products.TemporaryFolder = 3.0
Products.TIDStorage = 5.5.0 Products.TIDStorage = 5.5.0
Products.ZCatalog = 5.4 Products.ZCatalog = 5.4
Products.ZODBMountPoint = 1.3 Products.ZODBMountPoint = 1.3
......
...@@ -94,7 +94,7 @@ md5sum = 9547bacad0635b0f64cac48f15c4e9ae ...@@ -94,7 +94,7 @@ md5sum = 9547bacad0635b0f64cac48f15c4e9ae
[template-balancer] [template-balancer]
filename = instance-balancer.cfg.in filename = instance-balancer.cfg.in
md5sum = 48b8b8b4b87973beaa1fd6299244ebd6 md5sum = 409a7505548576ebf0e4d5cc218e0753
[template-haproxy-cfg] [template-haproxy-cfg]
filename = haproxy.cfg.in filename = haproxy.cfg.in
......
...@@ -219,6 +219,7 @@ ca-cert = ${haproxy-conf-ssl:ca-cert} ...@@ -219,6 +219,7 @@ ca-cert = ${haproxy-conf-ssl:ca-cert}
crl = ${haproxy-conf-ssl:crl} crl = ${haproxy-conf-ssl:crl}
{% endif %} {% endif %}
stats-socket = ${directory:run}/ha.sock stats-socket = ${directory:run}/ha.sock
admin-socket = ${directory:run}/haa.sock
path-routing-list = {{ dumps(slapparameter_dict['path-routing-list']) }} path-routing-list = {{ dumps(slapparameter_dict['path-routing-list']) }}
family-path-routing-dict = {{ dumps(slapparameter_dict['family-path-routing-dict']) }} family-path-routing-dict = {{ dumps(slapparameter_dict['family-path-routing-dict']) }}
pidfile = ${directory:run}/haproxy.pid pidfile = ${directory:run}/haproxy.pid
...@@ -336,18 +337,60 @@ context = ...@@ -336,18 +337,60 @@ context =
extensions = jinja2.ext.do extensions = jinja2.ext.do
[haproxy-reload] [haproxy-reload]
recipe = collective.recipe.template recipe = slapos.recipe.template
output = ${directory:bin}/${:_buildout_section_name_} output = ${directory:bin}/${:_buildout_section_name_}
mode = 700 mode = 700
input = inline =
inline: #!${buildout:executable}
#!/bin/sh """Restarts haproxy and waits for all workers to have been restarted"""
kill -USR2 $(cat "${haproxy-cfg-parameter-dict:pidfile}") import errno
import contextlib
import socket
import sys
import time
ADMIN_SOCKET = '''${haproxy-cfg-parameter-dict:admin-socket}'''
def send_command(command, connect_retries=10):
with contextlib.closing(socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)) as sock:
while True:
connect_retries = connect_retries - 1
try:
sock.connect(ADMIN_SOCKET)
except OSError as e:
if e.errno != errno.ECONNREFUSED:
raise
if not connect_retries:
raise
time.sleep(1)
else:
break
sock.sendall((command + "\nquit\n").encode())
response = b""
while True:
data = sock.recv(4096)
if not data:
break
response += data
return response.decode()
send_command("reload")
for _ in range(360):
time.sleep(1)
proc = send_command("show proc")
if "old workers" not in proc:
sys.exit(0)
print(proc)
sys.exit(1)
[{{ section('haproxy') }}] [{{ section('haproxy') }}]
recipe = slapos.cookbook:wrapper recipe = slapos.cookbook:wrapper
wrapper-path = ${directory:services-on-watch}/haproxy wrapper-path = ${directory:services-on-watch}/haproxy
command-line = "{{ parameter_dict['haproxy'] }}/sbin/haproxy" -f "${haproxy-cfg:output}" command-line =
"{{ parameter_dict['haproxy'] }}/sbin/haproxy"
-S ${haproxy-cfg-parameter-dict:admin-socket},level,operator
-f "${haproxy-cfg:output}"
hash-files = ${haproxy-cfg:output} hash-files = ${haproxy-cfg:output}
[{{ section('haproxy-socat-stats')}}] [{{ section('haproxy-socat-stats')}}]
......
...@@ -2,43 +2,43 @@ ...@@ -2,43 +2,43 @@
# Version pins for required and commonly used dependencies. # Version pins for required and commonly used dependencies.
[versions] [versions]
Zope = 5.10 Zope = 5.11.1
Zope2 = 4.0 Zope2 = 4.0
AccessControl = 6.3 AccessControl = 7.2
Acquisition = 5.1 Acquisition = 6.1
AuthEncoding = 5.0 AuthEncoding = 5.0
BTrees = 5.1 BTrees = 6.1
Chameleon = 4.2.0 Chameleon = 4.4.4
DateTime = 5.3 DateTime = 5.5
DocumentTemplate = 4.6 DocumentTemplate = 4.6
ExtensionClass = 5.1 ExtensionClass = 6.0
MultiMapping = 5.0 MultiMapping = 5.0
Paste = 3.7.1 Paste = 3.10.1
PasteDeploy = 3.1.0 PasteDeploy = 3.1.0
Persistence = 4.1 Persistence = 5.1
RestrictedPython = 7.1 RestrictedPython = 7.4
WebTest = 3.0.0 WebTest = 3.0.1
WSGIProxy2 = 0.5.1 WSGIProxy2 = 0.5.1
WebOb = 1.8.7 WebOb = 1.8.9
ZConfig = 4.0 ZConfig = 4.1
ZODB = 5.8.1 ZODB = 6.0
beautifulsoup4 = 4.12.2 beautifulsoup4 = 4.12.3
cffi = 1.16.0 cffi = 1.17.1
multipart = 0.2.4 multipart = 0.2.5
persistent = 5.1 persistent = 6.1
pycparser = 2.21 pycparser = 2.22
python-gettext = 5.0 python-gettext = 5.0
pytz = 2023.3.post1 pytz = 2024.2
six = 1.16.0 six = 1.16.0
roman = 4.1 roman = 4.2
soupsieve = 2.5 soupsieve = 2.6
transaction = 4.0 transaction = 5.0
waitress = 2.1.2 waitress = 3.0.1
z3c.pt = 4.0 z3c.pt = 4.4
zExceptions = 5.0 zExceptions = 5.0
zc.lockfile = 3.0.post1 zc.lockfile = 3.0.post1
zc.recipe.egg = 2.0.7 zc.recipe.egg = 2.0.7
zodbpickle = 3.1 zodbpickle = 4.1.1
zope.annotation = 5.0 zope.annotation = 5.0
zope.browser = 3.0 zope.browser = 3.0
zope.browsermenu = 5.0 zope.browsermenu = 5.0
...@@ -46,47 +46,45 @@ zope.browserpage = 5.0 ...@@ -46,47 +46,45 @@ zope.browserpage = 5.0
zope.browserresource = 5.1 zope.browserresource = 5.1
zope.cachedescriptors = 5.0 zope.cachedescriptors = 5.0
zope.component = 6.0 zope.component = 6.0
zope.configuration = 5.0 zope.configuration = 5.0.1
zope.container = 5.2 zope.container = 6.1
zope.contentprovider = 5.0 zope.contentprovider = 6.0
zope.contenttype = 5.1 zope.contenttype = 5.1
zope.datetime = 5.0.0 zope.datetime = 5.0.0
zope.deferredimport = 5.0 zope.deferredimport = 5.0
zope.deprecation = 5.0 zope.deprecation = 5.0
zope.dottedname = 6.0 zope.dottedname = 6.0
zope.event = 5.0 zope.event = 5.0
zope.exceptions = 5.0.1 zope.exceptions = 5.2
zope.filerepresentation = 6.0 zope.filerepresentation = 6.0
zope.globalrequest = 2.0 zope.globalrequest = 2.0
zope.hookable = 6.0 zope.hookable = 7.0
zope.i18n = 5.1 zope.i18n = 5.2
zope.i18nmessageid = 6.1.0 zope.i18nmessageid = 7.0
zope.interface = 6.3 zope.interface = 7.1.1
zope.lifecycleevent = 5.0 zope.lifecycleevent = 5.0
zope.location = 5.0 zope.location = 5.0
zope.pagetemplate = 5.0 zope.pagetemplate = 5.1
zope.processlifetime = 3.0 zope.processlifetime = 3.0
zope.proxy = 5.1 zope.proxy = 6.1
zope.ptresource = 5.0 zope.ptresource = 5.0
zope.publisher = 7.0 zope.publisher = 7.1
zope.schema = 7.0.1 zope.schema = 7.0.1
zope.security = 6.2 zope.security = 7.3
zope.sequencesort = 5.0 zope.sequencesort = 5.0
zope.site = 5.0 zope.site = 5.0
zope.size = 5.0 zope.size = 5.0
zope.structuredtext = 5.0 zope.structuredtext = 5.0
zope.tal = 5.0.1 zope.tal = 5.0.1
zope.tales = 6.0 zope.tales = 6.0
zope.testbrowser = 6.0 zope.testbrowser = 7.0
zope.testing = 5.0.1 zope.testing = 5.0.1
zope.traversing = 5.0 zope.traversing = 5.0
zope.viewlet = 5.0 zope.viewlet = 5.0
## XXX our old buildout for bootstrap does not support `python37` ## XXX our old buildout for bootstrap does not support `python38`
## [versions:python37] ## [versions:python38]
## # PasteDeploy 3.x works on Python 3.7 but pulls tons of dependencies ## # Chameleon >= 4.5 requires Python 3.9
## PasteDeploy = 2.1.1 ## Chameleon = 4.4.4
## # SoupSieve 2.5 and up requires Python 3.8 ## # waitress >= 3.0.1 requires Python 3.9
## soupsieve = 2.4.1 ## waitress = 3.0.0
## # cffi 1.16.0 requires Python 3.8
## cffi = 1.15.1
...@@ -30,7 +30,7 @@ md5sum = 1b8645835f04081861266436505fd28f ...@@ -30,7 +30,7 @@ md5sum = 1b8645835f04081861266436505fd28f
[template-replicated] [template-replicated]
filename = template-replicated.cfg.in filename = template-replicated.cfg.in
md5sum = 67c863b15dbfa937babdbd620f95c1ff md5sum = 743012b9e8d318712187621867613bd4
[template-parts] [template-parts]
filename = template-parts.cfg.in filename = template-parts.cfg.in
......
...@@ -87,7 +87,7 @@ return = ssh-public-key resilient-ssh-url notification-url ip takeover-url takeo ...@@ -87,7 +87,7 @@ return = ssh-public-key resilient-ssh-url notification-url ip takeover-url takeo
pbs-notification-id = ${slap-connection:computer-id}-${slap-connection:partition-id}-{{namebase}}-{{id}}-push pbs-notification-id = ${slap-connection:computer-id}-${slap-connection:partition-id}-{{namebase}}-{{id}}-push
{% for extra_parameter_for_pseudo_replicating_instance in ["software-root", "buildout-shared-folder"] %} {% for extra_parameter_for_pseudo_replicating_instance in ["software-root", "buildout-shared-folder", "testing-short-embedded-instance-path"] %}
{% if slapparameter_dict.get(extra_parameter_for_pseudo_replicating_instance) %} {% if slapparameter_dict.get(extra_parameter_for_pseudo_replicating_instance) %}
config-{{ extra_parameter_for_pseudo_replicating_instance }} = {{ slapparameter_dict.get(extra_parameter_for_pseudo_replicating_instance) }} config-{{ extra_parameter_for_pseudo_replicating_instance }} = {{ slapparameter_dict.get(extra_parameter_for_pseudo_replicating_instance) }}
{% endif %} {% endif %}
......
...@@ -138,10 +138,10 @@ eggs = ...@@ -138,10 +138,10 @@ eggs =
# The last version of setuptools compatible with Python 3.7 # The last version of setuptools compatible with Python 3.7
setuptools = 67.8.0 setuptools = 67.8.0
# Use SlapOS patched zc.buildout # Use SlapOS patched zc.buildout
zc.buildout = 3.0.1+slapos005 zc.buildout = 3.0.1+slapos006
pip = 23.2.1 pip = 23.2.1
# Use SlapOS patched zc.recipe.egg (zc.recipe.egg 2.x is for Buildout 2) # Use SlapOS patched zc.recipe.egg (zc.recipe.egg 2.x is for Buildout 2)
zc.recipe.egg = 2.0.8.dev0+slapos005 zc.recipe.egg = 2.0.8.dev0+slapos006
aiofiles = 23.1.0:whl aiofiles = 23.1.0:whl
aiohttp = 3.8.5:whl aiohttp = 3.8.5:whl
...@@ -200,6 +200,7 @@ decorator = 4.3.0 ...@@ -200,6 +200,7 @@ decorator = 4.3.0
defusedxml = 0.7.1 defusedxml = 0.7.1
distro = 1.7.0 distro = 1.7.0
dnspython = 1.16.0 dnspython = 1.16.0
editables = 0.5:whl
entrypoints = 0.3:whl entrypoints = 0.3:whl
enum34 = 1.1.10 enum34 = 1.1.10
erp5.util = 0.4.76 erp5.util = 0.4.76
...@@ -222,6 +223,7 @@ GitPython = 3.1.30 ...@@ -222,6 +223,7 @@ GitPython = 3.1.30
greenlet = 3.0.1 greenlet = 3.0.1
h11 = 0.14.0 h11 = 0.14.0
h5py = 3.11.0 h5py = 3.11.0
hatchling = 1.25.0:whl
httpcore = 1.0.4:whl httpcore = 1.0.4:whl
httplib2 = 0.22.0 httplib2 = 0.22.0
httpx = 0.27.0:whl httpx = 0.27.0:whl
...@@ -296,6 +298,7 @@ paramiko = 2.11.0 ...@@ -296,6 +298,7 @@ paramiko = 2.11.0
parso = 0.7.1 parso = 0.7.1
passlib = 1.7.4 passlib = 1.7.4
pathlib2 = 2.3.5 pathlib2 = 2.3.5
pathspec = 0.12.1:whl
patsy = 0.5.3 patsy = 0.5.3
pbr = 5.9.0 pbr = 5.9.0
pexpect = 4.8.0 pexpect = 4.8.0
...@@ -383,6 +386,7 @@ tornado = 6.4 ...@@ -383,6 +386,7 @@ tornado = 6.4
traitlets = 5.14.1:whl traitlets = 5.14.1:whl
trio = 0.22.0 trio = 0.22.0
trio-websocket = 0.9.2 trio-websocket = 0.9.2
trove-classifiers = 2024.10.21.16:whl
Twisted = 22.4.0:whl Twisted = 22.4.0:whl
txaio = 23.1.1 txaio = 23.1.1
typeguard = 3.0.2:whl typeguard = 3.0.2:whl
...@@ -413,10 +417,10 @@ zeroconf = 0.62.0:whl ...@@ -413,10 +417,10 @@ zeroconf = 0.62.0:whl
zipp = 3.12.0:whl zipp = 3.12.0:whl
zodburi = 2.5.0 zodburi = 2.5.0
zope.event = 5.0 zope.event = 5.0
zope.exceptions = 5.0.1 zope.exceptions = 5.2
zope.interface = 6.3 zope.interface = 7.1.1
zope.testing = 5.0.1 zope.testing = 5.0.1
zope.testrunner = 6.4 zope.testrunner = 6.6
[versions:sys.version_info < (3,10)] [versions:sys.version_info < (3,10)]
# keep old statsmodels by default until slapos.toolbox is updated # keep old statsmodels by default until slapos.toolbox is updated
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment