Commit 9ef0ad81 authored by Thomas Gambier's avatar Thomas Gambier 🚴🏼

Update Release Candidate

parents 9a301bf3 a053cefd
...@@ -2,6 +2,8 @@ ...@@ -2,6 +2,8 @@
extends = extends =
../pygolang/buildout.cfg ../pygolang/buildout.cfg
parts =
[virtual-env-base] [virtual-env-base]
recipe = slapos.recipe.build recipe = slapos.recipe.build
name = ${:_buildout_section_name_} name = ${:_buildout_section_name_}
...@@ -10,64 +12,133 @@ init = ...@@ -10,64 +12,133 @@ init =
from zc.buildout.easy_install import working_set from zc.buildout.easy_install import working_set
import os import os
name = options['name'] name = options['name']
eggs = options['eggs'] eggs = options.get('eggs')
try: self.message = options.get('message')
scripts = "scripts = " + options['scripts'] self.chain = options.get('chain')
except KeyError: environment = options.get('environment')
scripts = "" scripts = options.get('scripts')
self.buildout.parse(""" eggs_template = """
[.%(name)s.install-eggs] [.%(name)s.install-eggs]
recipe = zc.recipe.egg recipe = zc.recipe.egg
eggs = %(eggs)s eggs =
%(eggs)s
%(scripts)s %(scripts)s
[.%(name)s.install-interpreter] [.%(name)s.install-interpreter]
<= python-interpreter <= python-interpreter
eggs += %(eggs)s eggs +=
""" % locals()) %(eggs)s
"""
instance_template = """
[.%(name)s.instance]
recipe = slapos.recipe.template
output = ${buildout:directory}/instance.cfg
depends = $%(cookbook)s
inline =
[buildout]
parts = publish
eggs-directory = ${buildout:eggs-directory}
develop-eggs-directory = ${buildout:develop-eggs-directory}
[publish]
recipe = slapos.cookbook:publish
activate-script = %(location)s
"""
if eggs:
self.buildout.parse(eggs_template % {
"eggs": "\n ".join(e.strip() for e in eggs.splitlines()),
"name": name,
"scripts": "scripts = " + scripts if scripts else "",
})
if is_true(options.get('default-instance')): if is_true(options.get('default-instance')):
cookbook = "{slapos-cookbook:recipe}" self.buildout.parse(instance_template % {
self.buildout.parse(""" "cookbook": "{slapos-cookbook:recipe}",
[.%(name)s.instance] "location": location,
recipe = slapos.recipe.template "name": name,
output = ${buildout:directory}/instance.cfg })
depends = $%(cookbook)s
inline = env = {
[buildout] "PATH": self.buildout['buildout']['bin-directory'] + ":\$PATH",
parts = publish "PS1": "\"(" + self.name + ") \$PS1\"",
}
eggs-directory = ${buildout:eggs-directory}
develop-eggs-directory = ${buildout:develop-eggs-directory} if environment:
for line in environment.splitlines():
[publish] key, value = line.split("=", 1)
recipe = slapos.cookbook:publish env[key.strip()] = value.strip()
activate-script = %(location)s
""" % locals()) self.env = env
install = install =
message = ""
if self.message:
message = "echo " + "\n echo ".join(
"%r" % line for line in self.message.splitlines())
message += "\n echo \'\'"
chain = ""
if self.chain:
chain = "source " + "\n source ".join(
"%r" % line for line in self.chain.splitlines())
with open(location, "w") as f: with open(location, "w") as f:
f.write(options['template'] % { f.write(options['template'] % {
"path" : self.buildout['buildout']['bin-directory'], "env": " ".join("%s %s" % (k, v) for k, v in self.env.items()),
"name" : self.name, "message": message,
"chain": chain,
}) })
# Template virtual env for bash shell in posix # Template virtual env for bash shell in posix
[virtual-env-base:posix] [virtual-env-base:posix]
template = template =
deactivate () { if type deactivate > /dev/null 2>&1
set PATH PS1 then
export _OLD_PARAM=( "$@")
set %(env)s
while [ "$1" ]; do while [ "$1" ]; do
eval "if [ \"\$_OLD_VENV_$1\" ]; then $1=\$_OLD_VENV_$1; else unset $1; fi; unset \$_OLD_VENV_$1" if ! ( echo $_LIST_OLD_VENV | grep $1 ) > /dev/null 2>&1
then
eval "export _OLD_VENV_$1=\$$1"
eval "export _LIST_OLD_VENV=\"$1 \$_LIST_OLD_VENV\""
fi
eval "export $1=\"$2\""
shift
shift shift
done done
unset -f deactivate if [[ -n "$_OLD_PARAM" ]]; then
} set "$${_OLD_PARAM[@]}"
fi
unset _OLD_PARAM
VENV_PATH=%(path)s %(chain)s
_OLD_VENV_PATH=$PATH %(message)s
_OLD_VENV_PS1=$PS1 else
PATH=$VENV_PATH:$PATH deactivate () {
PS1='(%(name)s) '$PS1 set $_LIST_OLD_VENV
while [ "$1" ]; do
eval "if [ \"\$_OLD_VENV_$1\" ]; then $1=\$_OLD_VENV_$1; else unset $1; fi; unset \$_OLD_VENV_$1"
shift
done
unset -f deactivate
unset _LIST_OLD_VENV
}
export _OLD_PARAM=( "$@" )
set %(env)s
while [ "$1" ]; do
eval "_OLD_VENV_$1=\$$1"
eval "export $1=\"$2\""
eval "export _LIST_OLD_VENV=\"$1 \$_LIST_OLD_VENV\""
shift
shift
done
if [[ -n "$_OLD_PARAM" ]]; then
set "$${_OLD_PARAM[@]}"
fi
unset _OLD_PARAM
%(chain)s
%(message)s
fi
Virtual environment
===================
Introduction
------------
The virtual environment macro allows you to quickly create a development environment.
Options
-------
Several options are available to customize your virtual environment :
name
~~~~
The ``name`` option is the name that will be displayed when the environment is activated. For example::
name = virtual-env
gives::
>> source activate
( virtual-env ) >>
**Note:** By default, ``name`` is the name of the Buildout section that uses the macro.
location
~~~~~~~~
The ``location`` option is where the script to be sourced will be stored. For example::
location = project/activate
gives::
>> source project/activate
( virtual-env ) >>
**Note:** Don't forget to add the name of the script in the path.
eggs
~~~~
This option should not be used to install eggs during instantiation (in an instance file).
It works the same way as ``zc.recipe.eggs``, you can add to the line several eggs to download for use in the virtual environment.
A custom Python with the chosen eggs will then be generated. For example::
eggs = numpy
gives::
( virtual-env ) >> python
python
>>> import numpy
scripts
~~~~~~~
This option should not be used to install scripts during instantiation (in an instance file).
It works in the same way as in ``zc.recipe.eggs``, you can add to the line several scripts to generate for use in the virtual environment.For example::
eggs = Django
scripts = django-admin
gives::
( virtual-env ) >> django-admin
**Note:** By default if the option is not used, all scripts will be installed.
default-instance
~~~~~~~~~~~~~~~~
The ``default-instance`` option takes the value ``true`` or ``false``.
If set to ``true``, it will create a minimal instance that will publish the path of the script to be sourced.
If it is set to ``false``, you will be able to create your own custom instance.
**Note:** If you want to use the macro in an ``instance`` file, you should set this option to ``false``.
environment
~~~~~~~~~~~
The ``environment`` option allows you to choose the value of the environment variables of the virtual environment.
They are to be written on the line in the form ``VAR = value``. For example::
environment =
VAR1 = value1
VAR2 = value2
gives::
( virtual-env ) >> echo $VAR1
value1
**Note:** If you want to keep the other values as well, like for PATH for example, you have to do::
PATH = new_val:$PATH
message
~~~~~~~
The ``message`` option allows to display a message when sourcing the virtual environment.
The message will be considered as a string. For example::
message =
You are in a virtual environment.
gives::
>> source activate
You are in a virtual environment.
( virtual-env) >>
chain
~~~~~
The ``chain`` option allows you to chain several scripts created by the macro as if it were one. This can be useful if one script is generated in a ``software`` file and another in an ``instance`` file.
When deactivating, the state of the environment will return to the initial state.
To use this option you just have to specify the script to source by running the script. For example::
chain = project/another_activate
Deactivate
----------
Once you want to exit the virtual environment, you just have to run the ``deactivate`` function. Like::
( virtual-env ) >> deactivate
>>
...@@ -19,7 +19,7 @@ parts = ...@@ -19,7 +19,7 @@ parts =
python3 python3
[python3] [python3]
<= python3.7 <= python3.8
[python3-common] [python3-common]
recipe = slapos.recipe.cmmi recipe = slapos.recipe.cmmi
...@@ -61,7 +61,32 @@ md5sum = 986078f11b39074be22a199e56491d98 ...@@ -61,7 +61,32 @@ md5sum = 986078f11b39074be22a199e56491d98
[python3.7] [python3.7]
<= python3-common <= python3-common
version = 3.7 version = 3.7
package_version = 3.7.7 package_version = 3.7.9
md5sum = 172c650156f7bea68ce31b2fd01fa766 md5sum = 389d3ed26b4d97c741d9e5423da1f43b
patch-options =
patches =
[python3.8]
<= python3-common
version = 3.8
package_version = 3.8.9
md5sum = 51b5bbf2ab447e66d15af4883db1c133
patch-options =
patches =
[python3.9]
<= python3-common
version = 3.9
package_version = 3.9.13
md5sum = 5e2411217b0060828d5f923eb422a3b8
patch-options =
patches =
[python3.10]
<= python3-common
version = 3.10
package_version = 3.10.5
md5sum = f05727cb3489aa93cd57eb561c16747b
patch-options = patch-options =
patches = patches =
...@@ -32,6 +32,7 @@ md5sum = bfb5b09a0d1f887c8c42a6d5f26971ab ...@@ -32,6 +32,7 @@ md5sum = bfb5b09a0d1f887c8c42a6d5f26971ab
patches = patches =
https://gitlab.com/redhat/centos-stream/src/qemu-kvm/-/merge_requests/87.diff#ad41b138aa6f330f95811c9a83637b85 https://gitlab.com/redhat/centos-stream/src/qemu-kvm/-/merge_requests/87.diff#ad41b138aa6f330f95811c9a83637b85
patch-options = -p1 patch-options = -p1
patch-binary = ${patch:location}/bin/patch
pre-configure = pre-configure =
sed -i '/^libmigration\b/s/$/ dependencies: [zlib],/' meson.build sed -i '/^libmigration\b/s/$/ dependencies: [zlib],/' meson.build
sed -i 's/\bsnappy,/zlib, \0/' dump/meson.build sed -i 's/\bsnappy,/zlib, \0/' dump/meson.build
...@@ -59,7 +60,7 @@ configure-options = ...@@ -59,7 +60,7 @@ configure-options =
environment = environment =
CFLAGS=-I${librbd:location}/include/ -I${gettext:location}/include -I${libaio:location}/include -I${liburing:location}/include -I${libcap-ng:location}/include CFLAGS=-I${librbd:location}/include/ -I${gettext:location}/include -I${libaio:location}/include -I${liburing:location}/include -I${libcap-ng:location}/include
LDFLAGS=-L${librbd:location}/lib -Wl,-rpath=${librbd:location}/lib -L${gettext:location}/lib -L${libaio:location}/lib -L${libcap-ng:location}/lib -Wl,-rpath=${libcap-ng:location}/lib -Wl,-rpath=${glib:location}/lib -Wl,-rpath=${gnutls:location}/lib -Wl,-rpath=${nettle:location}/lib -Wl,-rpath=${pixman:location}/lib -Wl,-rpath=${zlib:location}/lib -Wl,-rpath=${gettext:location}/lib -Wl,-rpath=${libpng:location}/lib -Wl,-rpath=${libaio:location}/lib -Wl,-rpath=${liburing:location}/lib -Wl,-rpath=${libcap-ng:location}/lib LDFLAGS=-L${librbd:location}/lib -Wl,-rpath=${librbd:location}/lib -L${gettext:location}/lib -L${libaio:location}/lib -L${libcap-ng:location}/lib -Wl,-rpath=${libcap-ng:location}/lib -Wl,-rpath=${glib:location}/lib -Wl,-rpath=${gnutls:location}/lib -Wl,-rpath=${nettle:location}/lib -Wl,-rpath=${pixman:location}/lib -Wl,-rpath=${zlib:location}/lib -Wl,-rpath=${gettext:location}/lib -Wl,-rpath=${libpng:location}/lib -Wl,-rpath=${libaio:location}/lib -Wl,-rpath=${liburing:location}/lib -Wl,-rpath=${libcap-ng:location}/lib
PATH=${patch:location}/bin:${pkgconfig:location}/bin:${bzip2:location}/bin:%(PATH)s PATH=${pkgconfig:location}/bin:${bzip2:location}/bin:%(PATH)s
PKG_CONFIG_PATH=${glib:location}/lib/pkgconfig:${gnutls:location}/lib/pkgconfig:${gnutls:pkg-config-path}:${libpng:location}/lib/pkgconfig:${liburing:location}/lib/pkgconfig:${ncurses:location}/lib/pkgconfig:${pcre:location}/lib/pkgconfig:${pixman:location}/lib/pkgconfig:${librbd:location}/lib/pkgconfig PKG_CONFIG_PATH=${glib:location}/lib/pkgconfig:${gnutls:location}/lib/pkgconfig:${gnutls:pkg-config-path}:${libpng:location}/lib/pkgconfig:${liburing:location}/lib/pkgconfig:${ncurses:location}/lib/pkgconfig:${pcre:location}/lib/pkgconfig:${pixman:location}/lib/pkgconfig:${librbd:location}/lib/pkgconfig
[qemu:sys.version_info < (3,6)] [qemu:sys.version_info < (3,6)]
......
...@@ -15,6 +15,11 @@ extends = ...@@ -15,6 +15,11 @@ extends =
[systemd-markupsafe-download] [systemd-markupsafe-download]
recipe = slapos.recipe.build:download recipe = slapos.recipe.build:download
shared = true shared = true
url = https://files.pythonhosted.org/packages/bf/10/ff66fea6d1788c458663a84d88787bae15d45daa16f6b3ef33322a51fc7e/${:filename}
filename = MarkupSafe-2.0.1.tar.gz
md5sum = 892e0fefa3c488387e5cc0cad2daa523
[systemd-markupsafe-download:sys.version_info < (3,8)]
url = https://files.pythonhosted.org/packages/4d/de/32d741db316d8fdb7680822dd37001ef7a448255de9699ab4bfcbdf4172b/${:filename} url = https://files.pythonhosted.org/packages/4d/de/32d741db316d8fdb7680822dd37001ef7a448255de9699ab4bfcbdf4172b/${:filename}
filename = MarkupSafe-1.0.tar.gz filename = MarkupSafe-1.0.tar.gz
md5sum = 2fcedc9284d50e577b5192e8e3578355 md5sum = 2fcedc9284d50e577b5192e8e3578355
......
...@@ -86,9 +86,9 @@ eggs += ...@@ -86,9 +86,9 @@ eggs +=
[beremiz] [beremiz]
recipe = slapos.recipe.build:download-unpacked recipe = slapos.recipe.build:download-unpacked
# download beremiz at revision 8171447dc479012a58fae0f2ffd233ade7d28d6a # download beremiz at revision c9b7db300a25806ccaa9d5a844d1e0fd281acb4b
url = https://github.com/beremiz/beremiz/archive/8171447dc479012a58fae0f2ffd233ade7d28d6a.tar.gz url = https://github.com/beremiz/beremiz/archive/c9b7db300a25806ccaa9d5a844d1e0fd281acb4b.tar.gz
md5sum = 48070804b00b633d79dfc4bae3a73646 md5sum = ed28b53deaaa9a10e6160b10e9dad1a8
[beremiz-setup] [beremiz-setup]
recipe = zc.recipe.egg:develop recipe = zc.recipe.egg:develop
......
...@@ -3,6 +3,7 @@ extends = ...@@ -3,6 +3,7 @@ extends =
buildout.hash.cfg buildout.hash.cfg
../../component/git/buildout.cfg ../../component/git/buildout.cfg
../../component/matiec/buildout.cfg ../../component/matiec/buildout.cfg
../../component/open62541/buildout.cfg
../../stack/monitor/buildout.cfg ../../stack/monitor/buildout.cfg
../../stack/slapos.cfg ../../stack/slapos.cfg
...@@ -12,11 +13,20 @@ parts = ...@@ -12,11 +13,20 @@ parts =
instance-profile instance-profile
python-interpreter python-interpreter
matiec matiec
open62541
[open62541]
# Beremiz need it to be in folder parts/open62541
# as Beremiz search for open62541 to BEREMIZ_PATH/../open62541
shared = false
post-install =
mkdir -p @@LOCATION@@/build/bin
ln -sf @@LOCATION@@/lib/libopen62541.a @@LOCATION@@/build/bin/libopen62541.a
[beremiz-source] [beremiz-source]
recipe = slapos.recipe.build:gitclone recipe = slapos.recipe.build:gitclone
repository = https://github.com/beremiz/beremiz.git repository = https://github.com/beremiz/beremiz.git
revision = 8171447dc479012a58fae0f2ffd233ade7d28d6a revision = c9b7db300a25806ccaa9d5a844d1e0fd281acb4b
git-executable = ${git:location}/bin/git git-executable = ${git:location}/bin/git
[beremiz] [beremiz]
......
...@@ -14,7 +14,7 @@ ...@@ -14,7 +14,7 @@
# not need these here). # not need these here).
[template] [template]
filename = instance.cfg.in filename = instance.cfg.in
md5sum = 051ae51b86f9aba169a6777fa2239901 md5sum = f1f04e7f27bc6e40a655dd4badb2a8af
[profile-common] [profile-common]
filename = instance-common.cfg.in filename = instance-common.cfg.in
...@@ -22,19 +22,19 @@ md5sum = 5784bea3bd608913769ff9a8afcccb68 ...@@ -22,19 +22,19 @@ md5sum = 5784bea3bd608913769ff9a8afcccb68
[profile-caddy-frontend] [profile-caddy-frontend]
filename = instance-apache-frontend.cfg.in filename = instance-apache-frontend.cfg.in
md5sum = 1e912fb970401a4b7670b25ba8284a5b md5sum = 874133120f3a4eda1d0505b8608b280f
[profile-caddy-replicate] [profile-caddy-replicate]
filename = instance-apache-replicate.cfg.in filename = instance-apache-replicate.cfg.in
md5sum = 57388e76c7e61b3d7213df8aac0b407d md5sum = 02a10d92d2b0e270454998cf865b6895
[profile-slave-list] [profile-slave-list]
_update_hash_filename_ = templates/apache-custom-slave-list.cfg.in _update_hash_filename_ = templates/apache-custom-slave-list.cfg.in
md5sum = 964a7f673f441f3a3e90c88ab03e3351 md5sum = 268a945e5c7a52c8766b54a817215c4c
[profile-replicate-publish-slave-information] [profile-replicate-publish-slave-information]
_update_hash_filename_ = templates/replicate-publish-slave-information.cfg.in _update_hash_filename_ = templates/replicate-publish-slave-information.cfg.in
md5sum = be54431846fe7f3cee65260eefc83d62 md5sum = b3422f3624054f57b78d0e50a0de399a
[profile-caddy-frontend-configuration] [profile-caddy-frontend-configuration]
_update_hash_filename_ = templates/Caddyfile.in _update_hash_filename_ = templates/Caddyfile.in
...@@ -98,11 +98,11 @@ md5sum = f6f72d03af7d9dc29fb4d4fef1062e73 ...@@ -98,11 +98,11 @@ md5sum = f6f72d03af7d9dc29fb4d4fef1062e73
[caddyprofiledeps-dummy] [caddyprofiledeps-dummy]
filename = caddyprofiledummy.py filename = caddyprofiledummy.py
md5sum = b41b8de115ad815d0b0db306ad650365 md5sum = 1c866272ec0ea0c161f0c0d80cb6e584
[profile-kedifa] [profile-kedifa]
filename = instance-kedifa.cfg.in filename = instance-kedifa.cfg.in
md5sum = b5426129668f39ace55f14012c4a2fd2 md5sum = 2f1c9cc9a3d2f4c6ac59eba5a99d4983
[template-backend-haproxy-rsyslogd-conf] [template-backend-haproxy-rsyslogd-conf]
_update_hash_filename_ = templates/backend-haproxy-rsyslogd.conf.in _update_hash_filename_ = templates/backend-haproxy-rsyslogd.conf.in
...@@ -110,7 +110,7 @@ md5sum = 3336d554661b138dcef97b1d1866803c ...@@ -110,7 +110,7 @@ md5sum = 3336d554661b138dcef97b1d1866803c
[template-slave-introspection-httpd-nginx] [template-slave-introspection-httpd-nginx]
_update_hash_filename_ = templates/slave-introspection-httpd-nginx.conf.in _update_hash_filename_ = templates/slave-introspection-httpd-nginx.conf.in
md5sum = 3067e6ba6c6901821d57d2109517d39c md5sum = b79addf01b6fb93c2f3d018e83eff766
[template-expose-csr-nginx-conf] [template-expose-csr-nginx-conf]
_update_hash_filename_ = templates/expose-csr-nginx.conf.in _update_hash_filename_ = templates/expose-csr-nginx.conf.in
......
from __future__ import print_function
import caucase.client import caucase.client
import caucase.utils import caucase.utils
import os import os
import ssl import ssl
import sys import sys
import urllib import urllib.request, urllib.parse, urllib.error
import urlparse import urllib.parse
from cryptography import x509 from cryptography import x509
from cryptography.hazmat.primitives import serialization from cryptography.hazmat.primitives import serialization
...@@ -24,7 +24,7 @@ class Recipe(object): ...@@ -24,7 +24,7 @@ class Recipe(object):
def validate_netloc(netloc): def validate_netloc(netloc):
# a bit crazy way to validate that the passed parameter is haproxy # a bit crazy way to validate that the passed parameter is haproxy
# compatible server netloc # compatible server netloc
parsed = urlparse.urlparse('scheme://'+netloc) parsed = urllib.parse.urlparse('scheme://'+netloc)
if ':' in parsed.hostname: if ':' in parsed.hostname:
hostname = '[%s]' % parsed.hostname hostname = '[%s]' % parsed.hostname
else: else:
...@@ -33,7 +33,7 @@ def validate_netloc(netloc): ...@@ -33,7 +33,7 @@ def validate_netloc(netloc):
def _check_certificate(url, certificate): def _check_certificate(url, certificate):
parsed = urlparse.urlparse(url) parsed = urllib.parse.urlparse(url)
got_certificate = ssl.get_server_certificate((parsed.hostname, parsed.port)) got_certificate = ssl.get_server_certificate((parsed.hostname, parsed.port))
if certificate.strip() != got_certificate.strip(): if certificate.strip() != got_certificate.strip():
raise ValueError('Certificate for %s does not match expected one' % (url,)) raise ValueError('Certificate for %s does not match expected one' % (url,))
...@@ -44,7 +44,7 @@ def _get_exposed_csr(url, certificate): ...@@ -44,7 +44,7 @@ def _get_exposed_csr(url, certificate):
self_signed = ssl.create_default_context() self_signed = ssl.create_default_context()
self_signed.check_hostname = False self_signed.check_hostname = False
self_signed.verify_mode = ssl.CERT_NONE self_signed.verify_mode = ssl.CERT_NONE
return urllib.urlopen(url, context=self_signed).read() return urllib.request.urlopen(url, context=self_signed).read().decode()
def _get_caucase_client(ca_url, ca_crt, user_key): def _get_caucase_client(ca_url, ca_crt, user_key):
...@@ -72,7 +72,7 @@ def _csr_match(*csr_list): ...@@ -72,7 +72,7 @@ def _csr_match(*csr_list):
number_list = set([]) number_list = set([])
for csr in csr_list: for csr in csr_list:
number_list.add( number_list.add(
x509.load_pem_x509_csr(str(csr)).public_key().public_numbers()) x509.load_pem_x509_csr(csr.encode()).public_key().public_numbers())
return len(number_list) == 1 return len(number_list) == 1
......
...@@ -99,7 +99,7 @@ hash-salt = ${frontend-node-private-salt:value} ...@@ -99,7 +99,7 @@ hash-salt = ${frontend-node-private-salt:value}
init = init =
import hashlib import hashlib
import base64 import base64
options['value'] = base64.urlsafe_b64encode(hashlib.md5(''.join([options['software-release-url'].strip(), options['hash-salt']])).digest()) options['value'] = base64.urlsafe_b64encode(hashlib.md5(''.join([options['software-release-url'].strip(), options['hash-salt']]).encode()).digest()).decode()
[frontend-node-information] [frontend-node-information]
recipe = slapos.recipe.build recipe = slapos.recipe.build
...@@ -359,9 +359,9 @@ partition_ipv6 = ${slap-configuration:ipv6-random} ...@@ -359,9 +359,9 @@ partition_ipv6 = ${slap-configuration:ipv6-random}
extra-context = extra-context =
key caddy_configuration_directory caddy-directory:slave-configuration key caddy_configuration_directory caddy-directory:slave-configuration
key backend_client_caucase_url :backend-client-caucase-url key backend_client_caucase_url :backend-client-caucase-url
import urlparse_module urlparse
import furl_module furl import furl_module furl
import urllib_module urllib import urllib_module urllib
import operator_module operator
key master_key_download_url :master_key_download_url key master_key_download_url :master_key_download_url
key autocert caddy-directory:autocert key autocert caddy-directory:autocert
key caddy_log_directory caddy-directory:slave-log key caddy_log_directory caddy-directory:slave-log
...@@ -475,9 +475,14 @@ slave-introspection-graceful-command = ${slave-introspection-validate:output} && ...@@ -475,9 +475,14 @@ slave-introspection-graceful-command = ${slave-introspection-validate:output} &&
# BBB: SlapOS Master non-zero knowledge BEGIN # BBB: SlapOS Master non-zero knowledge BEGIN
[get-self-signed-fallback-access] [get-self-signed-fallback-access]
recipe = collective.recipe.shelloutput recipe = slapos.recipe.build
commands = certificate-file = ${self-signed-fallback-access:certificate}
certificate = cat ${self-signed-fallback-access:certificate} init =
import os
options['certificate'] = ''
if os.path.exists(options['certificate-file']):
with open(options['certificate-file'], 'r') as fh:
options['certificate'] = fh.read()
[apache-certificate] [apache-certificate]
recipe = slapos.recipe.template:jinja2 recipe = slapos.recipe.template:jinja2
...@@ -1066,7 +1071,7 @@ config-command = ...@@ -1066,7 +1071,7 @@ config-command =
${logrotate:wrapper-path} -d ${logrotate:wrapper-path} -d
[configuration] [configuration]
{%- for key, value in instance_parameter_dict.iteritems() -%} {%- for key, value in instance_parameter_dict.items() -%}
{%- if key.startswith('configuration.') %} {%- if key.startswith('configuration.') %}
{{ key.replace('configuration.', '') }} = {{ dumps(value) }} {{ key.replace('configuration.', '') }} = {{ dumps(value) }}
{%- endif -%} {%- endif -%}
...@@ -1076,13 +1081,13 @@ config-command = ...@@ -1076,13 +1081,13 @@ config-command =
{#- There are dangerous keys like recipe, etc #} {#- There are dangerous keys like recipe, etc #}
{#- XXX: Some other approach would be useful #} {#- XXX: Some other approach would be useful #}
{%- set DROP_KEY_LIST = ['recipe', '__buildout_signature__', 'computer', 'partition', 'url', 'key', 'cert'] %} {%- set DROP_KEY_LIST = ['recipe', '__buildout_signature__', 'computer', 'partition', 'url', 'key', 'cert'] %}
{%- for key, value in instance_parameter_dict.iteritems() -%} {%- for key, value in instance_parameter_dict.items() -%}
{%- if not key.startswith('configuration.') and key not in DROP_KEY_LIST %} {%- if not key.startswith('configuration.') and key not in DROP_KEY_LIST %}
{{ key }} = {{ dumps(value) }} {{ key }} = {{ dumps(value) }}
{%- endif -%} {%- endif -%}
{%- endfor %} {%- endfor %}
[software-parameter-section] [software-parameter-section]
{%- for key, value in software_parameter_dict.iteritems() %} {%- for key, value in software_parameter_dict.items() %}
{{ key }} = {{ dumps(value) }} {{ key }} = {{ dumps(value) }}
{%- endfor %} {%- endfor %}
...@@ -129,7 +129,7 @@ context = ...@@ -129,7 +129,7 @@ context =
{% set config_key = "-frontend-config-%s-" % i %} {% set config_key = "-frontend-config-%s-" % i %}
{% set config_key_length = config_key | length %} {% set config_key_length = config_key | length %}
{% set config_dict = {} %} {% set config_dict = {} %}
{% for key in slapparameter_dict.keys() %} {% for key in list(slapparameter_dict.keys()) %}
{% if key.startswith(sla_key) %} {% if key.startswith(sla_key) %}
{% do sla_dict.__setitem__(key[sla_key_length:], slapparameter_dict.pop(key)) %} {% do sla_dict.__setitem__(key[sla_key_length:], slapparameter_dict.pop(key)) %}
# We check for specific configuration regarding the frontend # We check for specific configuration regarding the frontend
...@@ -164,7 +164,7 @@ context = ...@@ -164,7 +164,7 @@ context =
{% set critical_rejected_slave_dict = {} %} {% set critical_rejected_slave_dict = {} %}
{% set warning_slave_dict = {} %} {% set warning_slave_dict = {} %}
{% set used_host_list = [] %} {% set used_host_list = [] %}
{% for slave in sorted(instance_parameter_dict['slave-instance-list']) %} {% for slave in sorted(instance_parameter_dict['slave-instance-list'], key=operator_module.itemgetter('slave_reference')) %}
{% set slave_error_list = [] %} {% set slave_error_list = [] %}
{% set slave_critical_error_list = [] %} {% set slave_critical_error_list = [] %}
{% set slave_warning_list = [] %} {% set slave_warning_list = [] %}
...@@ -278,7 +278,7 @@ context = ...@@ -278,7 +278,7 @@ context =
{% if k in slave %} {% if k in slave %}
{% set crt = slave.get(k, '') %} {% set crt = slave.get(k, '') %}
{% set check_popen = popen([software_parameter_dict['openssl'], 'x509', '-noout']) %} {% set check_popen = popen([software_parameter_dict['openssl'], 'x509', '-noout']) %}
{% do check_popen.communicate(crt) %} {% do check_popen.communicate(crt.encode()) %}
{% if check_popen.returncode != 0 %} {% if check_popen.returncode != 0 %}
{% do slave_error_list.append('%s is invalid' % (k,)) %} {% do slave_error_list.append('%s is invalid' % (k,)) %}
{% endif %} {% endif %}
...@@ -296,8 +296,8 @@ context = ...@@ -296,8 +296,8 @@ context =
{% if slave.get('ssl_key') and slave.get('ssl_crt') %} {% if slave.get('ssl_key') and slave.get('ssl_crt') %}
{% set key_popen = popen([software_parameter_dict['openssl'], 'rsa', '-noout', '-modulus']) %} {% set key_popen = popen([software_parameter_dict['openssl'], 'rsa', '-noout', '-modulus']) %}
{% set crt_popen = popen([software_parameter_dict['openssl'], 'x509', '-noout', '-modulus']) %} {% set crt_popen = popen([software_parameter_dict['openssl'], 'x509', '-noout', '-modulus']) %}
{% set key_modulus = key_popen.communicate(slave['ssl_key'])[0] | trim %} {% set key_modulus = key_popen.communicate(slave['ssl_key'].encode())[0] | trim %}
{% set crt_modulus = crt_popen.communicate(slave['ssl_crt'])[0] | trim %} {% set crt_modulus = crt_popen.communicate(slave['ssl_crt'].encode())[0] | trim %}
{% if not key_modulus or key_modulus != crt_modulus %} {% if not key_modulus or key_modulus != crt_modulus %}
{% do slave_error_list.append('slave ssl_key and ssl_crt does not match') %} {% do slave_error_list.append('slave ssl_key and ssl_crt does not match') %}
{% endif %} {% endif %}
...@@ -334,7 +334,7 @@ context = ...@@ -334,7 +334,7 @@ context =
{% do warning_slave_dict.__setitem__(slave.get('slave_reference'), sorted(slave_warning_list)) %} {% do warning_slave_dict.__setitem__(slave.get('slave_reference'), sorted(slave_warning_list)) %}
{% endif %} {% endif %}
{% endfor %} {% endfor %}
{% do authorized_slave_list.sort() %} {% do authorized_slave_list.sort(key=operator_module.itemgetter('slave_reference')) %}
[monitor-instance-parameter] [monitor-instance-parameter]
monitor-httpd-port = {{ master_partition_monitor_monitor_httpd_port }} monitor-httpd-port = {{ master_partition_monitor_monitor_httpd_port }}
...@@ -356,7 +356,7 @@ return = slave-instance-information-list monitor-base-url backend-client-csr-url ...@@ -356,7 +356,7 @@ return = slave-instance-information-list monitor-base-url backend-client-csr-url
{%- do base_node_configuration_dict.__setitem__(key, slapparameter_dict[key]) %} {%- do base_node_configuration_dict.__setitem__(key, slapparameter_dict[key]) %}
{%- endif %} {%- endif %}
{%- endfor %} {%- endfor %}
{% for section, frontend_request in request_dict.iteritems() %} {% for section, frontend_request in request_dict.items() %}
{% set state = frontend_request.get('state', '') %} {% set state = frontend_request.get('state', '') %}
[{{section}}] [{{section}}]
<= replicate <= replicate
...@@ -377,14 +377,14 @@ config-cluster-identification = {{ instance_parameter_dict['root-instance-title' ...@@ -377,14 +377,14 @@ config-cluster-identification = {{ instance_parameter_dict['root-instance-title'
{# sort_keys are important in order to avoid shuffling parameters on each run #} {# sort_keys are important in order to avoid shuffling parameters on each run #}
{% do node_configuration_dict.__setitem__(slave_list_name, json_module.dumps(authorized_slave_list, sort_keys=True)) %} {% do node_configuration_dict.__setitem__(slave_list_name, json_module.dumps(authorized_slave_list, sort_keys=True)) %}
{% do node_configuration_dict.__setitem__("frontend-name", frontend_request.get('name')) %} {% do node_configuration_dict.__setitem__("frontend-name", frontend_request.get('name')) %}
{%- for config_key, config_value in node_configuration_dict.iteritems() %} {%- for config_key, config_value in node_configuration_dict.items() %}
config-{{ config_key }} = {{ dumps(config_value) }} config-{{ config_key }} = {{ dumps(config_value) }}
{% endfor -%} {% endfor -%}
{%- for config_key, config_value in base_node_configuration_dict.iteritems() %} {%- for config_key, config_value in base_node_configuration_dict.items() %}
config-{{ config_key }} = {{ dumps(config_value) }} config-{{ config_key }} = {{ dumps(config_value) }}
{% endfor -%} {% endfor -%}
{% if frontend_request.get('sla') %} {% if frontend_request.get('sla') %}
{% for parameter, value in frontend_request.get('sla').iteritems() %} {% for parameter, value in frontend_request.get('sla').items() %}
sla-{{ parameter }} = {{ value }} sla-{{ parameter }} = {{ value }}
{% endfor %} {% endfor %}
{% endif %} {% endif %}
...@@ -489,7 +489,7 @@ config-slave-list = {{ dumps(authorized_slave_list) }} ...@@ -489,7 +489,7 @@ config-slave-list = {{ dumps(authorized_slave_list) }}
config-cluster-identification = {{ instance_parameter_dict['root-instance-title'] }} config-cluster-identification = {{ instance_parameter_dict['root-instance-title'] }}
{% set software_url_key = "-kedifa-software-release-url" %} {% set software_url_key = "-kedifa-software-release-url" %}
{% if slapparameter_dict.has_key(software_url_key) %} {% if software_url_key in slapparameter_dict %}
software-url = {{ slapparameter_dict.pop(software_url_key) }} software-url = {{ slapparameter_dict.pop(software_url_key) }}
{% else %} {% else %}
software-url = ${slap-connection:software-release-url} software-url = ${slap-connection:software-release-url}
...@@ -499,7 +499,7 @@ name = kedifa ...@@ -499,7 +499,7 @@ name = kedifa
return = slave-kedifa-information master-key-generate-auth-url master-key-upload-url master-key-download-url caucase-url kedifa-csr-url csr-certificate monitor-base-url return = slave-kedifa-information master-key-generate-auth-url master-key-upload-url master-key-download-url caucase-url kedifa-csr-url csr-certificate monitor-base-url
{% set sla_kedifa_key = "-sla-kedifa-" %} {% set sla_kedifa_key = "-sla-kedifa-" %}
{% set sla_kedifa_key_length = sla_kedifa_key | length %} {% set sla_kedifa_key_length = sla_kedifa_key | length %}
{% for key in slapparameter_dict.keys() %} {% for key in list(slapparameter_dict.keys()) %}
{% if key.startswith(sla_kedifa_key) %} {% if key.startswith(sla_kedifa_key) %}
sla-{{ key[sla_kedifa_key_length:] }} = {{ slapparameter_dict.pop(key) }} sla-{{ key[sla_kedifa_key_length:] }} = {{ slapparameter_dict.pop(key) }}
{% endif %} {% endif %}
......
...@@ -171,9 +171,14 @@ wrapper-path = ${directory:service}/expose-csr ...@@ -171,9 +171,14 @@ wrapper-path = ${directory:service}/expose-csr
hash-existing-files = ${buildout:directory}/software_release/buildout.cfg hash-existing-files = ${buildout:directory}/software_release/buildout.cfg
[expose-csr-certificate-get] [expose-csr-certificate-get]
recipe = collective.recipe.shelloutput recipe = slapos.recipe.build
commands = certificate-file = ${expose-csr-certificate:certificate}
certificate = cat ${expose-csr-certificate:certificate} init =
import os
options['certificate'] = ''
if os.path.exists(options['certificate-file']):
with open(options['certificate-file'], 'r') as fh:
options['certificate'] = fh.read()
[jinja2-template-base] [jinja2-template-base]
recipe = slapos.recipe.template:jinja2 recipe = slapos.recipe.template:jinja2
...@@ -259,10 +264,8 @@ command = ...@@ -259,10 +264,8 @@ command =
update-command = ${:command} update-command = ${:command}
[{{ slave_reference }}-auth-random] [{{ slave_reference }}-auth-random]
recipe = collective.recipe.shelloutput <= auth-random
file = {{ '${' + slave_reference }}-auth-random-generate:file} file = {{ '${' + slave_reference }}-auth-random-generate:file}
commands =
passwd = cat ${:file} 2>/dev/null || echo "NotReadyYet"
{% endfor %} {% endfor %}
...@@ -273,11 +276,18 @@ command = ...@@ -273,11 +276,18 @@ command =
[ ! -f ${:file} ] && {{ software_parameter_dict['curl'] }}/bin/curl -s -g -X POST https://[${kedifa-config:ip}]:${kedifa-config:port}/reserve-id --cert ${kedifa-config:certificate} --cacert ${kedifa-config:ca-certificate} > ${:file}.tmp && mv ${:file}.tmp ${:file} [ ! -f ${:file} ] && {{ software_parameter_dict['curl'] }}/bin/curl -s -g -X POST https://[${kedifa-config:ip}]:${kedifa-config:port}/reserve-id --cert ${kedifa-config:certificate} --cacert ${kedifa-config:ca-certificate} > ${:file}.tmp && mv ${:file}.tmp ${:file}
update-command = ${:command} update-command = ${:command}
[auth-random]
recipe = slapos.recipe.build
init =
import os
options['passwd'] = 'NotReadyYet'
if os.path.exists(options['file']):
with open(options['file'], 'r') as fh:
options['passwd'] = fh.read()
[master-auth-random] [master-auth-random]
recipe = collective.recipe.shelloutput <= auth-random
file = ${master-auth-random-generate:file} file = ${master-auth-random-generate:file}
commands =
passwd = cat ${:file} 2>/dev/null || echo "NotReadyYet"
[slave-kedifa-information] [slave-kedifa-information]
recipe = slapos.cookbook:publish.serialised recipe = slapos.cookbook:publish.serialised
......
...@@ -34,7 +34,7 @@ replicate = dynamic-profile-caddy-replicate:output ...@@ -34,7 +34,7 @@ replicate = dynamic-profile-caddy-replicate:output
kedifa = dynamic-profile-kedifa:output kedifa = dynamic-profile-kedifa:output
[software-parameter-section] [software-parameter-section]
{% for key,value in software_parameter_dict.iteritems() %} {% for key,value in software_parameter_dict.items() %}
{{ key }} = {{ dumps(value) }} {{ key }} = {{ dumps(value) }}
{% endfor -%} {% endfor -%}
...@@ -54,6 +54,7 @@ filename = instance-caddy-replicate.cfg ...@@ -54,6 +54,7 @@ filename = instance-caddy-replicate.cfg
extra-context = extra-context =
import subprocess_module subprocess import subprocess_module subprocess
import functools_module functools import functools_module functools
import operator_module operator
import validators validators import validators validators
import caddyprofiledummy caddyprofiledummy import caddyprofiledummy caddyprofiledummy
# Must match the key id in [switch-softwaretype] which uses this section. # Must match the key id in [switch-softwaretype] which uses this section.
......
...@@ -22,6 +22,9 @@ parts += ...@@ -22,6 +22,9 @@ parts +=
caddyprofiledeps caddyprofiledeps
kedifa kedifa
[python]
part = python3
[kedifa] [kedifa]
recipe = zc.recipe.egg recipe = zc.recipe.egg
eggs = eggs =
...@@ -57,7 +60,6 @@ recipe = zc.recipe.egg ...@@ -57,7 +60,6 @@ recipe = zc.recipe.egg
eggs = eggs =
caddyprofiledeps caddyprofiledeps
websockify websockify
collective.recipe.shelloutput
[profile-common] [profile-common]
recipe = slapos.recipe.template:jinja2 recipe = slapos.recipe.template:jinja2
......
...@@ -52,13 +52,13 @@ context = ...@@ -52,13 +52,13 @@ context =
{#- * setup defaults to simplify other profiles #} {#- * setup defaults to simplify other profiles #}
{#- * stabilise values for backend #} {#- * stabilise values for backend #}
{%- for key, prefix in [('url', 'http_backend'), ('https-url', 'https_backend')] %} {%- for key, prefix in [('url', 'http_backend'), ('https-url', 'https_backend')] %}
{%- set parsed = urlparse_module.urlparse(slave_instance.get(key, '').strip()) %} {%- set parsed = urllib_module.parse.urlparse(slave_instance.get(key, '').strip()) %}
{%- set info_dict = {'scheme': parsed.scheme, 'hostname': parsed.hostname, 'port': parsed.port or DEFAULT_PORT[parsed.scheme], 'path': parsed.path, 'fragment': parsed.fragment, 'query': parsed.query, 'netloc-list': slave_instance.get(key + '-netloc-list', '').split() } %} {%- set info_dict = {'scheme': parsed.scheme, 'hostname': parsed.hostname, 'port': parsed.port or DEFAULT_PORT[parsed.scheme], 'path': parsed.path, 'fragment': parsed.fragment, 'query': parsed.query, 'netloc-list': slave_instance.get(key + '-netloc-list', '').split() } %}
{%- do slave_instance.__setitem__(prefix, info_dict) %} {%- do slave_instance.__setitem__(prefix, info_dict) %}
{%- endfor %} {%- endfor %}
{%- do slave_instance.__setitem__('ssl_proxy_verify', ('' ~ slave_instance.get('ssl-proxy-verify', '')).lower() in TRUE_VALUES) %} {%- do slave_instance.__setitem__('ssl_proxy_verify', ('' ~ slave_instance.get('ssl-proxy-verify', '')).lower() in TRUE_VALUES) %}
{%- for key, prefix in [('health-check-failover-url', 'http_backend'), ('health-check-failover-https-url', 'https_backend')] %} {%- for key, prefix in [('health-check-failover-url', 'http_backend'), ('health-check-failover-https-url', 'https_backend')] %}
{%- set parsed = urlparse_module.urlparse(slave_instance.get(key, '').strip()) %} {%- set parsed = urllib_module.parse.urlparse(slave_instance.get(key, '').strip()) %}
{%- set info_dict = slave_instance[prefix] %} {%- set info_dict = slave_instance[prefix] %}
{%- do info_dict.__setitem__('health-check-failover-scheme', parsed.scheme) %} {%- do info_dict.__setitem__('health-check-failover-scheme', parsed.scheme) %}
{%- do info_dict.__setitem__('health-check-failover-hostname', parsed.hostname) %} {%- do info_dict.__setitem__('health-check-failover-hostname', parsed.hostname) %}
...@@ -189,7 +189,7 @@ context = ...@@ -189,7 +189,7 @@ context =
{%- do furled.set(password = '${'+ slave_password_section +':passwd}') %} {%- do furled.set(password = '${'+ slave_password_section +':passwd}') %}
{%- do furled.set(path = slave_reference + '/') %} {%- do furled.set(path = slave_reference + '/') %}
{#- We unquote, as furl quotes automatically, but there is buildout value on purpose like ${...:...} in the passwod #} {#- We unquote, as furl quotes automatically, but there is buildout value on purpose like ${...:...} in the passwod #}
{%- set slave_log_access_url = urlparse_module.unquote(furled.tostr()) %} {%- set slave_log_access_url = urllib_module.parse.unquote(furled.tostr()) %}
{%- do slave_publish_dict.__setitem__('log-access', slave_log_access_url) %} {%- do slave_publish_dict.__setitem__('log-access', slave_log_access_url) %}
{%- do slave_publish_dict.__setitem__('slave-reference', slave_reference) %} {%- do slave_publish_dict.__setitem__('slave-reference', slave_reference) %}
{%- do slave_publish_dict.__setitem__('backend-client-caucase-url', backend_client_caucase_url) %} {%- do slave_publish_dict.__setitem__('backend-client-caucase-url', backend_client_caucase_url) %}
...@@ -212,7 +212,7 @@ context = ...@@ -212,7 +212,7 @@ context =
{%- for websocket_path in slave_instance.get('websocket-path-list', '').split() %} {%- for websocket_path in slave_instance.get('websocket-path-list', '').split() %}
{%- set websocket_path = websocket_path.strip('/') %} {%- set websocket_path = websocket_path.strip('/') %}
{#- Unquote the path, so %20 and similar can be represented correctly #} {#- Unquote the path, so %20 and similar can be represented correctly #}
{%- set websocket_path = urllib_module.unquote(websocket_path.strip()) %} {%- set websocket_path = urllib_module.parse.unquote(websocket_path.strip()) %}
{%- if websocket_path %} {%- if websocket_path %}
{%- do websocket_path_list.append(websocket_path) %} {%- do websocket_path_list.append(websocket_path) %}
{%- endif %} {%- endif %}
...@@ -332,7 +332,7 @@ http_port = {{ dumps('' ~ configuration['plain_http_port']) }} ...@@ -332,7 +332,7 @@ http_port = {{ dumps('' ~ configuration['plain_http_port']) }}
local_ipv4 = {{ dumps('' ~ instance_parameter_dict['ipv4-random']) }} local_ipv4 = {{ dumps('' ~ instance_parameter_dict['ipv4-random']) }}
version-hash = {{ version_hash }} version-hash = {{ version_hash }}
node-id = {{ node_id }} node-id = {{ node_id }}
{%- for key, value in slave_instance.iteritems() %} {%- for key, value in slave_instance.items() %}
{%- if value is not none %} {%- if value is not none %}
{{ key }} = {{ dumps(value) }} {{ key }} = {{ dumps(value) }}
{%- endif %} {%- endif %}
...@@ -383,7 +383,7 @@ config-frequency = 720 ...@@ -383,7 +383,7 @@ config-frequency = 720
{%- do part_list.append(publish_section_title) %} {%- do part_list.append(publish_section_title) %}
[{{ publish_section_title }}] [{{ publish_section_title }}]
recipe = slapos.cookbook:publish recipe = slapos.cookbook:publish
{%- for key, value in slave_publish_dict.iteritems() %} {%- for key, value in slave_publish_dict.items() %}
{{ key }} = {{ value }} {{ key }} = {{ value }}
{%- endfor %} {%- endfor %}
{%- else %} {%- else %}
...@@ -463,7 +463,7 @@ csr-certificate = ${expose-csr-certificate-get:certificate} ...@@ -463,7 +463,7 @@ csr-certificate = ${expose-csr-certificate-get:certificate}
{%- do furled.set(password = backend_haproxy_configuration['statistic-password']) %} {%- do furled.set(password = backend_haproxy_configuration['statistic-password']) %}
{%- do furled.set(path = '/') %} {%- do furled.set(path = '/') %}
{#- We unquote, as furl quotes automatically, but there is buildout value on purpose like ${...:...} in the passwod #} {#- We unquote, as furl quotes automatically, but there is buildout value on purpose like ${...:...} in the passwod #}
{%- set statistic_url = urlparse_module.unquote(furled.tostr()) %} {%- set statistic_url = urllib_module.parse.unquote(furled.tostr()) %}
backend-haproxy-statistic-url = {{ statistic_url }} backend-haproxy-statistic-url = {{ statistic_url }}
{#- sort_keys are important in order to avoid shuffling parameters on each run #} {#- sort_keys are important in order to avoid shuffling parameters on each run #}
node-information-json = {{ json_module.dumps(node_information, sort_keys=True) }} node-information-json = {{ json_module.dumps(node_information, sort_keys=True) }}
...@@ -503,7 +503,7 @@ output = ${:file} ...@@ -503,7 +503,7 @@ output = ${:file}
< = jinja2-template-base < = jinja2-template-base
url = {{ template_backend_haproxy_configuration }} url = {{ template_backend_haproxy_configuration }}
output = ${backend-haproxy-config:file} output = ${backend-haproxy-config:file}
backend_slave_list = {{ dumps(sorted(backend_slave_list)) }} backend_slave_list = {{ dumps(sorted(backend_slave_list, key=operator_module.itemgetter('slave_reference'))) }}
extra-context = extra-context =
key backend_slave_list :backend_slave_list key backend_slave_list :backend_slave_list
section configuration backend-haproxy-config section configuration backend-haproxy-config
...@@ -611,9 +611,14 @@ wrapper-path = {{ directory['service'] }}/expose-csr ...@@ -611,9 +611,14 @@ wrapper-path = {{ directory['service'] }}/expose-csr
hash-existing-files = ${buildout:directory}/software_release/buildout.cfg hash-existing-files = ${buildout:directory}/software_release/buildout.cfg
[expose-csr-certificate-get] [expose-csr-certificate-get]
recipe = collective.recipe.shelloutput recipe = slapos.recipe.build
commands = certificate-file = ${expose-csr-certificate:certificate}
certificate = cat ${expose-csr-certificate:certificate} init =
import os
options['certificate'] = ''
if os.path.exists(options['certificate-file']):
with open(options['certificate-file'], 'r') as fh:
options['certificate'] = fh.read()
[promise-logrotate-setup] [promise-logrotate-setup]
<= monitor-promise-base <= monitor-promise-base
......
...@@ -2,7 +2,7 @@ ...@@ -2,7 +2,7 @@
{% set slave_information_dict = {} %} {% set slave_information_dict = {} %}
# regroup slave information from all frontends # regroup slave information from all frontends
{% for frontend, slave_list_raw in slave_information.iteritems() %} {% for frontend, slave_list_raw in slave_information.items() %}
{% if slave_list_raw %} {% if slave_list_raw %}
{% set slave_list = json_module.loads(slave_list_raw) %} {% set slave_list = json_module.loads(slave_list_raw) %}
{% else %} {% else %}
...@@ -27,21 +27,21 @@ ...@@ -27,21 +27,21 @@
{% endfor %} {% endfor %}
{% endfor %} {% endfor %}
{% for slave_reference, rejected_info_list in rejected_slave_information['rejected-slave-dict'].iteritems() %} {% for slave_reference, rejected_info_list in rejected_slave_information['rejected-slave-dict'].items() %}
{% if slave_reference not in slave_information_dict %} {% if slave_reference not in slave_information_dict %}
{% do slave_information_dict.__setitem__(slave_reference, {}) %} {% do slave_information_dict.__setitem__(slave_reference, {}) %}
{% endif %} {% endif %}
{% do slave_information_dict[slave_reference].__setitem__('request-error-list', json_module.dumps(rejected_info_list)) %} {% do slave_information_dict[slave_reference].__setitem__('request-error-list', json_module.dumps(rejected_info_list)) %}
{% endfor %} {% endfor %}
{% for slave_reference, warning_info_list in warning_slave_information['warning-slave-dict'].iteritems() %} {% for slave_reference, warning_info_list in warning_slave_information['warning-slave-dict'].items() %}
{% if slave_reference not in slave_information_dict %} {% if slave_reference not in slave_information_dict %}
{% do slave_information_dict.__setitem__(slave_reference, {}) %} {% do slave_information_dict.__setitem__(slave_reference, {}) %}
{% endif %} {% endif %}
{% do slave_information_dict[slave_reference].__setitem__('warning-list', json_module.dumps(warning_info_list)) %} {% do slave_information_dict[slave_reference].__setitem__('warning-list', json_module.dumps(warning_info_list)) %}
{% endfor %} {% endfor %}
{% for slave_reference, kedifa_dict in json_module.loads(slave_kedifa_information).iteritems() %} {% for slave_reference, kedifa_dict in json_module.loads(slave_kedifa_information).items() %}
{% if slave_reference not in rejected_slave_information['rejected-slave-dict'] %} {% if slave_reference not in rejected_slave_information['rejected-slave-dict'] %}
{% if slave_reference not in slave_information_dict %} {% if slave_reference not in slave_information_dict %}
{% do slave_information_dict.__setitem__(slave_reference, {}) %} {% do slave_information_dict.__setitem__(slave_reference, {}) %}
...@@ -54,7 +54,7 @@ ...@@ -54,7 +54,7 @@
# Publish information for each slave # Publish information for each slave
{% set active_slave_instance_list = json_module.loads(active_slave_instance_dict['active-slave-instance-list']) %} {% set active_slave_instance_list = json_module.loads(active_slave_instance_dict['active-slave-instance-list']) %}
{% for slave_reference, slave_information in slave_information_dict.iteritems() %} {% for slave_reference, slave_information in slave_information_dict.items() %}
{# Filter out destroyed, so not existing anymore, slaves #} {# Filter out destroyed, so not existing anymore, slaves #}
{# Note: This functionality is not yet covered by tests, please modify with care #} {# Note: This functionality is not yet covered by tests, please modify with care #}
{% if slave_reference in active_slave_instance_list %} {% if slave_reference in active_slave_instance_list %}
...@@ -68,11 +68,11 @@ recipe = slapos.cookbook:publish ...@@ -68,11 +68,11 @@ recipe = slapos.cookbook:publish
{# sort_keys are important in order to avoid shuffling parameters on each run #} {# sort_keys are important in order to avoid shuffling parameters on each run #}
log-access-url = {{ dumps(json_module.dumps(log_access_url, sort_keys=True)) }} log-access-url = {{ dumps(json_module.dumps(log_access_url, sort_keys=True)) }}
{% endif %} {% endif %}
{% for key, value in slave_information.iteritems() %} {% for key, value in slave_information.items() %}
{{ key }} = {{ dumps(value) }} {{ key }} = {{ dumps(value) }}
{% endfor %} {% endfor %}
{% endif %} {% endif %}
{% for frontend_key, frontend_value in frontend_information.iteritems() %} {% for frontend_key, frontend_value in frontend_information.items() %}
{{ frontend_key }} = {{ frontend_value }} {{ frontend_key }} = {{ frontend_value }}
{% endfor %} {% endfor %}
{% endfor %} {% endfor %}
......
...@@ -23,7 +23,7 @@ http { ...@@ -23,7 +23,7 @@ http {
fastcgi_temp_path {{ parameter_dict['var'] }} 1 2; fastcgi_temp_path {{ parameter_dict['var'] }} 1 2;
uwsgi_temp_path {{ parameter_dict['var'] }} 1 2; uwsgi_temp_path {{ parameter_dict['var'] }} 1 2;
scgi_temp_path {{ parameter_dict['var'] }} 1 2; scgi_temp_path {{ parameter_dict['var'] }} 1 2;
{% for slave, directory in slave_log_directory.iteritems() %} {% for slave, directory in slave_log_directory.items() %}
location /{{ slave }} { location /{{ slave }} {
alias {{ directory }}; alias {{ directory }};
autoindex on; autoindex on;
......
...@@ -28,26 +28,26 @@ ...@@ -28,26 +28,26 @@
import glob import glob
import os import os
import requests import requests
import httplib import http.client
from requests_toolbelt.adapters import source from requests_toolbelt.adapters import source
import json import json
import multiprocessing import multiprocessing
import subprocess import subprocess
from unittest import skip from unittest import skip
import ssl import ssl
from BaseHTTPServer import HTTPServer from http.server import HTTPServer
from BaseHTTPServer import BaseHTTPRequestHandler from http.server import BaseHTTPRequestHandler
from SocketServer import ThreadingMixIn from socketserver import ThreadingMixIn
import time import time
import tempfile import tempfile
import ipaddress import ipaddress
import StringIO import io
import gzip import gzip
import base64 import base64
import re import re
from slapos.recipe.librecipe import generateHashFromFiles from slapos.recipe.librecipe import generateHashFromFiles
import xml.etree.ElementTree as ET import xml.etree.ElementTree as ET
import urlparse import urllib.parse
import socket import socket
import sys import sys
import logging import logging
...@@ -130,7 +130,7 @@ def patch_broken_pipe_error(): ...@@ -130,7 +130,7 @@ def patch_broken_pipe_error():
"""Monkey Patch BaseServer.handle_error to not write """Monkey Patch BaseServer.handle_error to not write
a stacktrace to stderr on broken pipe. a stacktrace to stderr on broken pipe.
https://stackoverflow.com/a/7913160""" https://stackoverflow.com/a/7913160"""
from SocketServer import BaseServer from socketserver import BaseServer
handle_error = BaseServer.handle_error handle_error = BaseServer.handle_error
...@@ -162,10 +162,10 @@ def createKey(): ...@@ -162,10 +162,10 @@ def createKey():
def createSelfSignedCertificate(name_list): def createSelfSignedCertificate(name_list):
key, key_pem = createKey() key, key_pem = createKey()
subject_alternative_name_list = x509.SubjectAlternativeName( subject_alternative_name_list = x509.SubjectAlternativeName(
[x509.DNSName(unicode(q)) for q in name_list] [x509.DNSName(str(q)) for q in name_list]
) )
subject = issuer = x509.Name([ subject = issuer = x509.Name([
x509.NameAttribute(NameOID.COMMON_NAME, u'Test Self Signed Certificate'), x509.NameAttribute(NameOID.COMMON_NAME, 'Test Self Signed Certificate'),
]) ])
certificate = x509.CertificateBuilder().subject_name( certificate = x509.CertificateBuilder().subject_name(
subject subject
...@@ -192,10 +192,10 @@ def createCSR(common_name, ip=None): ...@@ -192,10 +192,10 @@ def createCSR(common_name, ip=None):
subject_alternative_name_list = [] subject_alternative_name_list = []
if ip is not None: if ip is not None:
subject_alternative_name_list.append( subject_alternative_name_list.append(
x509.IPAddress(ipaddress.ip_address(unicode(ip))) x509.IPAddress(ipaddress.ip_address(str(ip)))
) )
csr = x509.CertificateSigningRequestBuilder().subject_name(x509.Name([ csr = x509.CertificateSigningRequestBuilder().subject_name(x509.Name([
x509.NameAttribute(NameOID.COMMON_NAME, unicode(common_name)), x509.NameAttribute(NameOID.COMMON_NAME, str(common_name)),
])) ]))
if len(subject_alternative_name_list): if len(subject_alternative_name_list):
...@@ -219,10 +219,10 @@ class CertificateAuthority(object): ...@@ -219,10 +219,10 @@ class CertificateAuthority(object):
public_key = self.key.public_key() public_key = self.key.public_key()
builder = x509.CertificateBuilder() builder = x509.CertificateBuilder()
builder = builder.subject_name(x509.Name([ builder = builder.subject_name(x509.Name([
x509.NameAttribute(NameOID.COMMON_NAME, unicode(common_name)), x509.NameAttribute(NameOID.COMMON_NAME, str(common_name)),
])) ]))
builder = builder.issuer_name(x509.Name([ builder = builder.issuer_name(x509.Name([
x509.NameAttribute(NameOID.COMMON_NAME, unicode(common_name)), x509.NameAttribute(NameOID.COMMON_NAME, str(common_name)),
])) ]))
builder = builder.not_valid_before( builder = builder.not_valid_before(
datetime.datetime.utcnow() - datetime.timedelta(days=2)) datetime.datetime.utcnow() - datetime.timedelta(days=2))
...@@ -283,7 +283,7 @@ def isHTTP2(domain): ...@@ -283,7 +283,7 @@ def isHTTP2(domain):
out, err = prc.communicate() out, err = prc.communicate()
assert prc.returncode == 0, "Problem running %r. Output:\n%s\nError:\n%s" % ( assert prc.returncode == 0, "Problem running %r. Output:\n%s\nError:\n%s" % (
curl_command, out, err) curl_command, out, err)
return 'Using HTTP2, server supports' in err return 'Using HTTP2, server supports'.encode() in err
class TestDataMixin(object): class TestDataMixin(object):
...@@ -305,7 +305,7 @@ class TestDataMixin(object): ...@@ -305,7 +305,7 @@ class TestDataMixin(object):
except IOError: except IOError:
test_data = '' test_data = ''
for hash_type, hash_value in hash_value_dict.items(): for hash_type, hash_value in list(hash_value_dict.items()):
runtime_data = runtime_data.replace(hash_value, '{hash-%s}' % ( runtime_data = runtime_data.replace(hash_value, '{hash-%s}' % (
hash_type),) hash_type),)
...@@ -321,7 +321,8 @@ class TestDataMixin(object): ...@@ -321,7 +321,8 @@ class TestDataMixin(object):
) )
except AssertionError: except AssertionError:
if os.environ.get('SAVE_TEST_DATA', '0') == '1': if os.environ.get('SAVE_TEST_DATA', '0') == '1':
open(test_data_file, 'w').write(runtime_data.strip() + '\n') with open(test_data_file, 'w') as fh:
fh.write(runtime_data.strip() + '\n')
raise raise
finally: finally:
self.maxDiff = maxDiff self.maxDiff = maxDiff
...@@ -510,26 +511,34 @@ class TestHandler(BaseHTTPRequestHandler): ...@@ -510,26 +511,34 @@ class TestHandler(BaseHTTPRequestHandler):
self.wfile.write(json.dumps({self.path: config}, indent=2)) self.wfile.write(json.dumps({self.path: config}, indent=2))
def do_PUT(self): def do_PUT(self):
incoming_config = {}
for key, value in list(self.headers.items()):
if key.startswith('X-'):
incoming_config[key] = value
config = { config = {
'status_code': self.headers.dict.get('x-reply-status-code', '200') 'status_code': incoming_config.pop('X-Reply-Status-Code', '200')
} }
prefix = 'x-reply-header-' prefix = 'X-Reply-Header-'
length = len(prefix) length = len(prefix)
for key, value in self.headers.dict.items(): for key in list(incoming_config.keys()):
if key.startswith(prefix): if key.startswith(prefix):
header = '-'.join([q.capitalize() for q in key[length:].split('-')]) header = '-'.join([q.capitalize() for q in key[length:].split('-')])
config[header] = value.strip() config[header] = incoming_config.pop(key)
if 'x-reply-body' in self.headers.dict: if 'X-Reply-Body' in incoming_config:
config['Body'] = base64.b64decode(self.headers.dict['x-reply-body']) config['Body'] = base64.b64decode(
incoming_config.pop('X-Reply-Body')).decode()
config['X-Drop-Header'] = self.headers.dict.get('x-drop-header') config['X-Drop-Header'] = incoming_config.pop('X-Drop-Header', None)
self.configuration[self.path] = config self.configuration[self.path] = config
self.send_response(201) self.send_response(201)
self.send_header("Content-Type", "application/json") self.send_header("Content-Type", "application/json")
self.end_headers() self.end_headers()
self.wfile.write(json.dumps({self.path: config}, indent=2)) reply = {self.path: config}
if incoming_config:
reply['unknown_config'] = incoming_config
self.wfile.write(json.dumps(reply, indent=2).encode())
def do_POST(self): def do_POST(self):
return self.do_GET() return self.do_GET()
...@@ -548,33 +557,33 @@ class TestHandler(BaseHTTPRequestHandler): ...@@ -548,33 +557,33 @@ class TestHandler(BaseHTTPRequestHandler):
header_dict = config header_dict = config
else: else:
drop_header_list = [] drop_header_list = []
for header in (self.headers.dict.get('x-drop-header') or '').split(): for header in (self.headers.get('x-drop-header') or '').split():
drop_header_list.append(header) drop_header_list.append(header)
response = None response = None
status_code = 200 status_code = 200
timeout = int(self.headers.dict.get('timeout', '0')) timeout = int(self.headers.get('timeout', '0'))
if 'x-maximum-timeout' in self.headers.dict: if 'x-maximum-timeout' in self.headers:
maximum_timeout = int(self.headers.dict['x-maximum-timeout']) maximum_timeout = int(self.headers['x-maximum-timeout'])
timeout = random.randrange(maximum_timeout) timeout = random.randrange(maximum_timeout)
if 'x-response-size' in self.headers.dict: if 'x-response-size' in self.headers:
min_response, max_response = [ min_response, max_response = [
int(q) for q in self.headers.dict['x-response-size'].split(' ')] int(q) for q in self.headers['x-response-size'].split(' ')]
reponse_size = random.randrange(min_response, max_response) reponse_size = random.randrange(min_response, max_response)
response = ''.join( response = ''.join(
random.choice(string.lowercase) for x in range(reponse_size)) random.choice(string.lowercase) for x in range(reponse_size))
compress = int(self.headers.dict.get('compress', '0')) compress = int(self.headers.get('compress', '0'))
header_dict = {} header_dict = {}
prefix = 'x-reply-header-' prefix = 'x-reply-header-'
length = len(prefix) length = len(prefix)
for key, value in self.headers.dict.items(): for key, value in list(self.headers.items()):
if key.startswith(prefix): if key.startswith(prefix):
header = '-'.join([q.capitalize() for q in key[length:].split('-')]) header = '-'.join([q.capitalize() for q in key[length:].split('-')])
header_dict[header] = value.strip() header_dict[header] = value.strip()
if response is None: if response is None:
if 'x-reply-body' not in self.headers.dict: if 'x-reply-body' not in self.headers:
headers_dict = dict() headers_dict = dict()
for header in self.headers.keys(): for header in list(self.headers.keys()):
content = self.headers.getheaders(header) content = self.headers.get_all(header)
if len(content) == 0: if len(content) == 0:
headers_dict[header] = None headers_dict[header] = None
elif len(content) == 1: elif len(content) == 1:
...@@ -587,12 +596,12 @@ class TestHandler(BaseHTTPRequestHandler): ...@@ -587,12 +596,12 @@ class TestHandler(BaseHTTPRequestHandler):
} }
response = json.dumps(response, indent=2) response = json.dumps(response, indent=2)
else: else:
response = base64.b64decode(self.headers.dict['x-reply-body']) response = base64.b64decode(self.headers['x-reply-body'])
time.sleep(timeout) time.sleep(timeout)
self.send_response(status_code) self.send_response(status_code)
for key, value in header_dict.items(): for key, value in list(header_dict.items()):
self.send_header(key, value) self.send_header(key, value)
if self.identification is not None: if self.identification is not None:
...@@ -608,16 +617,18 @@ class TestHandler(BaseHTTPRequestHandler): ...@@ -608,16 +617,18 @@ class TestHandler(BaseHTTPRequestHandler):
self.send_header('Via', 'http/1.1 backendvia') self.send_header('Via', 'http/1.1 backendvia')
if compress: if compress:
self.send_header('Content-Encoding', 'gzip') self.send_header('Content-Encoding', 'gzip')
out = StringIO.StringIO() out = io.BytesIO()
# compress with level 0, to find out if in the middle someting would # compress with level 0, to find out if in the middle someting would
# like to alter the compression # like to alter the compression
with gzip.GzipFile(fileobj=out, mode="w", compresslevel=0) as f: with gzip.GzipFile(fileobj=out, mode="wb", compresslevel=0) as f:
f.write(response) f.write(response.encode())
response = out.getvalue() response = out.getvalue()
self.send_header('Backend-Content-Length', len(response)) self.send_header('Backend-Content-Length', len(response))
if 'Content-Length' not in drop_header_list: if 'Content-Length' not in drop_header_list:
self.send_header('Content-Length', len(response)) self.send_header('Content-Length', len(response))
self.end_headers() self.end_headers()
if getattr(response, 'encode', None) is not None:
response = response.encode()
self.wfile.write(response) self.wfile.write(response)
...@@ -717,7 +728,7 @@ class HttpFrontendTestCase(SlapOSInstanceTestCase): ...@@ -717,7 +728,7 @@ class HttpFrontendTestCase(SlapOSInstanceTestCase):
master_parameter_dict = self.parseConnectionParameterDict() master_parameter_dict = self.parseConnectionParameterDict()
caucase_url = master_parameter_dict['backend-client-caucase-url'] caucase_url = master_parameter_dict['backend-client-caucase-url']
ca_certificate = requests.get(caucase_url + '/cas/crt/ca.crt.pem') ca_certificate = requests.get(caucase_url + '/cas/crt/ca.crt.pem')
assert ca_certificate.status_code == httplib.OK assert ca_certificate.status_code == http.client.OK
ca_certificate_file = os.path.join( ca_certificate_file = os.path.join(
self.working_directory, 'ca-backend-client.crt.pem') self.working_directory, 'ca-backend-client.crt.pem')
with open(ca_certificate_file, 'w') as fh: with open(ca_certificate_file, 'w') as fh:
...@@ -759,7 +770,7 @@ class HttpFrontendTestCase(SlapOSInstanceTestCase): ...@@ -759,7 +770,7 @@ class HttpFrontendTestCase(SlapOSInstanceTestCase):
def _fetchKedifaCaucaseCaCertificateFile(cls, parameter_dict): def _fetchKedifaCaucaseCaCertificateFile(cls, parameter_dict):
ca_certificate = requests.get( ca_certificate = requests.get(
parameter_dict['kedifa-caucase-url'] + '/cas/crt/ca.crt.pem') parameter_dict['kedifa-caucase-url'] + '/cas/crt/ca.crt.pem')
assert ca_certificate.status_code == httplib.OK assert ca_certificate.status_code == http.client.OK
cls.kedifa_caucase_ca_certificate_file = os.path.join( cls.kedifa_caucase_ca_certificate_file = os.path.join(
cls.working_directory, 'kedifa-caucase.ca.crt.pem') cls.working_directory, 'kedifa-caucase.ca.crt.pem')
open(cls.kedifa_caucase_ca_certificate_file, 'w').write( open(cls.kedifa_caucase_ca_certificate_file, 'w').write(
...@@ -769,7 +780,7 @@ class HttpFrontendTestCase(SlapOSInstanceTestCase): ...@@ -769,7 +780,7 @@ class HttpFrontendTestCase(SlapOSInstanceTestCase):
def _fetchBackendClientCaCertificateFile(cls, parameter_dict): def _fetchBackendClientCaCertificateFile(cls, parameter_dict):
ca_certificate = requests.get( ca_certificate = requests.get(
parameter_dict['backend-client-caucase-url'] + '/cas/crt/ca.crt.pem') parameter_dict['backend-client-caucase-url'] + '/cas/crt/ca.crt.pem')
assert ca_certificate.status_code == httplib.OK assert ca_certificate.status_code == http.client.OK
cls.backend_client_caucase_ca_certificate_file = os.path.join( cls.backend_client_caucase_ca_certificate_file = os.path.join(
cls.working_directory, 'backend-client-caucase.ca.crt.pem') cls.working_directory, 'backend-client-caucase.ca.crt.pem')
open(cls.backend_client_caucase_ca_certificate_file, 'w').write( open(cls.backend_client_caucase_ca_certificate_file, 'w').write(
...@@ -785,12 +796,12 @@ class HttpFrontendTestCase(SlapOSInstanceTestCase): ...@@ -785,12 +796,12 @@ class HttpFrontendTestCase(SlapOSInstanceTestCase):
auth = requests.get( auth = requests.get(
parameter_dict['master-key-generate-auth-url'], parameter_dict['master-key-generate-auth-url'],
verify=cls.kedifa_caucase_ca_certificate_file) verify=cls.kedifa_caucase_ca_certificate_file)
assert auth.status_code == httplib.CREATED assert auth.status_code == http.client.CREATED
upload = requests.put( upload = requests.put(
parameter_dict['master-key-upload-url'] + auth.text, parameter_dict['master-key-upload-url'] + auth.text,
data=cls.key_pem + cls.certificate_pem, data=cls.key_pem + cls.certificate_pem,
verify=cls.kedifa_caucase_ca_certificate_file) verify=cls.kedifa_caucase_ca_certificate_file)
assert upload.status_code == httplib.CREATED assert upload.status_code == http.client.CREATED
cls.runKedifaUpdater() cls.runKedifaUpdater()
@classmethod @classmethod
...@@ -891,7 +902,7 @@ class HttpFrontendTestCase(SlapOSInstanceTestCase): ...@@ -891,7 +902,7 @@ class HttpFrontendTestCase(SlapOSInstanceTestCase):
via_id = '%s-%s' % ( via_id = '%s-%s' % (
self.node_information_dict['node-id'], self.node_information_dict['node-id'],
self.node_information_dict['version-hash-history'].keys()[0]) list(self.node_information_dict['version-hash-history'].keys())[0])
if via: if via:
self.assertIn('Via', headers) self.assertIn('Via', headers)
if cached: if cached:
...@@ -925,7 +936,7 @@ class HttpFrontendTestCase(SlapOSInstanceTestCase): ...@@ -925,7 +936,7 @@ class HttpFrontendTestCase(SlapOSInstanceTestCase):
frontend, url = entry frontend, url = entry
result = requests.get(url, verify=False) result = requests.get(url, verify=False)
self.assertEqual( self.assertEqual(
httplib.OK, http.client.OK,
result.status_code, result.status_code,
'While accessing %r of %r the status code was %r' % ( 'While accessing %r of %r the status code was %r' % (
url, frontend, result.status_code)) url, frontend, result.status_code))
...@@ -939,11 +950,11 @@ class HttpFrontendTestCase(SlapOSInstanceTestCase): ...@@ -939,11 +950,11 @@ class HttpFrontendTestCase(SlapOSInstanceTestCase):
sorted([q['name'] for q in result.json()]), sorted([q['name'] for q in result.json()]),
['access.log', 'backend.log', 'error.log']) ['access.log', 'backend.log', 'error.log'])
self.assertEqual( self.assertEqual(
httplib.OK, http.client.OK,
requests.get(url + 'access.log', verify=False).status_code requests.get(url + 'access.log', verify=False).status_code
) )
self.assertEqual( self.assertEqual(
httplib.OK, http.client.OK,
requests.get(url + 'error.log', verify=False).status_code requests.get(url + 'error.log', verify=False).status_code
) )
# assert only for few tests, as backend log is not available for many of # assert only for few tests, as backend log is not available for many of
...@@ -952,7 +963,7 @@ class HttpFrontendTestCase(SlapOSInstanceTestCase): ...@@ -952,7 +963,7 @@ class HttpFrontendTestCase(SlapOSInstanceTestCase):
'test_url', 'test_auth_to_backend', 'test_compressed_result']: 'test_url', 'test_auth_to_backend', 'test_compressed_result']:
if self.id().endswith(test_name): if self.id().endswith(test_name):
self.assertEqual( self.assertEqual(
httplib.OK, http.client.OK,
requests.get(url + 'backend.log', verify=False).status_code requests.get(url + 'backend.log', verify=False).status_code
) )
...@@ -963,11 +974,11 @@ class HttpFrontendTestCase(SlapOSInstanceTestCase): ...@@ -963,11 +974,11 @@ class HttpFrontendTestCase(SlapOSInstanceTestCase):
kedifa_ipv6_base = 'https://[%s]:%s' % (self._ipv6_address, KEDIFA_PORT) kedifa_ipv6_base = 'https://[%s]:%s' % (self._ipv6_address, KEDIFA_PORT)
base = '^' + kedifa_ipv6_base.replace( base = '^' + kedifa_ipv6_base.replace(
'[', r'\[').replace(']', r'\]') + '/.{32}' '[', r'\[').replace(']', r'\]') + '/.{32}'
self.assertRegexpMatches( self.assertRegex(
generate_auth_url, generate_auth_url,
base + r'\/generateauth$' base + r'\/generateauth$'
) )
self.assertRegexpMatches( self.assertRegex(
upload_url, upload_url,
base + r'\?auth=$' base + r'\?auth=$'
) )
...@@ -983,13 +994,13 @@ class HttpFrontendTestCase(SlapOSInstanceTestCase): ...@@ -983,13 +994,13 @@ class HttpFrontendTestCase(SlapOSInstanceTestCase):
def assertNodeInformationWithPop(self, parameter_dict): def assertNodeInformationWithPop(self, parameter_dict):
key = 'caddy-frontend-1-node-information-json' key = 'caddy-frontend-1-node-information-json'
node_information_json_dict = {} node_information_json_dict = {}
for k in parameter_dict.keys(): for k in list(parameter_dict.keys()):
if k.startswith('caddy-frontend') and k.endswith( if k.startswith('caddy-frontend') and k.endswith(
'node-information-json'): 'node-information-json'):
node_information_json_dict[k] = parameter_dict.pop(k) node_information_json_dict[k] = parameter_dict.pop(k)
self.assertEqual( self.assertEqual(
[key], [key],
node_information_json_dict.keys() list(node_information_json_dict.keys())
) )
node_information_dict = json.loads(node_information_json_dict[key]) node_information_dict = json.loads(node_information_json_dict[key])
...@@ -1000,13 +1011,13 @@ class HttpFrontendTestCase(SlapOSInstanceTestCase): ...@@ -1000,13 +1011,13 @@ class HttpFrontendTestCase(SlapOSInstanceTestCase):
def assertBackendHaproxyStatisticUrl(self, parameter_dict): def assertBackendHaproxyStatisticUrl(self, parameter_dict):
url_key = 'caddy-frontend-1-backend-haproxy-statistic-url' url_key = 'caddy-frontend-1-backend-haproxy-statistic-url'
backend_haproxy_statistic_url_dict = {} backend_haproxy_statistic_url_dict = {}
for key in parameter_dict.keys(): for key in list(parameter_dict.keys()):
if key.startswith('caddy-frontend') and key.endswith( if key.startswith('caddy-frontend') and key.endswith(
'backend-haproxy-statistic-url'): 'backend-haproxy-statistic-url'):
backend_haproxy_statistic_url_dict[key] = parameter_dict.pop(key) backend_haproxy_statistic_url_dict[key] = parameter_dict.pop(key)
self.assertEqual( self.assertEqual(
[url_key], [url_key],
backend_haproxy_statistic_url_dict.keys() list(backend_haproxy_statistic_url_dict.keys())
) )
backend_haproxy_statistic_url = backend_haproxy_statistic_url_dict[url_key] backend_haproxy_statistic_url = backend_haproxy_statistic_url_dict[url_key]
...@@ -1014,7 +1025,7 @@ class HttpFrontendTestCase(SlapOSInstanceTestCase): ...@@ -1014,7 +1025,7 @@ class HttpFrontendTestCase(SlapOSInstanceTestCase):
backend_haproxy_statistic_url, backend_haproxy_statistic_url,
verify=False, verify=False,
) )
self.assertEqual(httplib.OK, result.status_code) self.assertEqual(http.client.OK, result.status_code)
self.assertIn('testing partition 0', result.text) self.assertIn('testing partition 0', result.text)
self.assertIn('Statistics Report for HAProxy', result.text) self.assertIn('Statistics Report for HAProxy', result.text)
...@@ -1075,7 +1086,7 @@ class HttpFrontendTestCase(SlapOSInstanceTestCase): ...@@ -1075,7 +1086,7 @@ class HttpFrontendTestCase(SlapOSInstanceTestCase):
def parseParameterDict(self, parameter_dict): def parseParameterDict(self, parameter_dict):
parsed_parameter_dict = {} parsed_parameter_dict = {}
for key, value in parameter_dict.items(): for key, value in list(parameter_dict.items()):
if key in [ if key in [
'rejected-slave-dict', 'rejected-slave-dict',
'warning-slave-dict', 'warning-slave-dict',
...@@ -1218,8 +1229,8 @@ class SlaveHttpFrontendTestCase(HttpFrontendTestCase): ...@@ -1218,8 +1229,8 @@ class SlaveHttpFrontendTestCase(HttpFrontendTestCase):
@classmethod @classmethod
def requestSlaves(cls): def requestSlaves(cls):
for slave_reference, partition_parameter_kw in cls\ for slave_reference, partition_parameter_kw in list(
.getSlaveParameterDictDict().items(): cls.getSlaveParameterDictDict().items()):
software_url = cls.getSoftwareURL() software_url = cls.getSoftwareURL()
software_type = cls.getInstanceSoftwareType() software_type = cls.getInstanceSoftwareType()
cls.logger.debug( cls.logger.debug(
...@@ -1265,8 +1276,8 @@ class SlaveHttpFrontendTestCase(HttpFrontendTestCase): ...@@ -1265,8 +1276,8 @@ class SlaveHttpFrontendTestCase(HttpFrontendTestCase):
def getSlaveConnectionParameterDictList(cls): def getSlaveConnectionParameterDictList(cls):
parameter_dict_list = [] parameter_dict_list = []
for slave_reference, partition_parameter_kw in cls\ for slave_reference, partition_parameter_kw in list(
.getSlaveParameterDictDict().items(): cls.getSlaveParameterDictDict().items()):
parameter_dict_list.append(cls.requestSlaveInstance( parameter_dict_list.append(cls.requestSlaveInstance(
partition_reference=slave_reference, partition_reference=slave_reference,
partition_parameter_kw=partition_parameter_kw, partition_parameter_kw=partition_parameter_kw,
...@@ -1303,8 +1314,8 @@ class SlaveHttpFrontendTestCase(HttpFrontendTestCase): ...@@ -1303,8 +1314,8 @@ class SlaveHttpFrontendTestCase(HttpFrontendTestCase):
def updateSlaveConnectionParameterDictDict(cls): def updateSlaveConnectionParameterDictDict(cls):
cls.slave_connection_parameter_dict_dict = {} cls.slave_connection_parameter_dict_dict = {}
# run partition for slaves to be setup # run partition for slaves to be setup
for slave_reference, partition_parameter_kw in cls\ for slave_reference, partition_parameter_kw in list(
.getSlaveParameterDictDict().items(): cls.getSlaveParameterDictDict().items()):
slave_instance = cls.requestSlaveInstance( slave_instance = cls.requestSlaveInstance(
partition_reference=slave_reference, partition_reference=slave_reference,
partition_parameter_kw=partition_parameter_kw, partition_parameter_kw=partition_parameter_kw,
...@@ -1329,7 +1340,7 @@ class SlaveHttpFrontendTestCase(HttpFrontendTestCase): ...@@ -1329,7 +1340,7 @@ class SlaveHttpFrontendTestCase(HttpFrontendTestCase):
self.assertKedifaKeysWithPop(parameter_dict, '') self.assertKedifaKeysWithPop(parameter_dict, '')
self.assertNodeInformationWithPop(parameter_dict) self.assertNodeInformationWithPop(parameter_dict)
if hostname is None: if hostname is None:
hostname = reference.translate(None, '_-').lower() hostname = reference.replace('_', '').replace('-', '').lower()
expected_parameter_dict.update(**{ expected_parameter_dict.update(**{
'domain': '%s.example.com' % (hostname,), 'domain': '%s.example.com' % (hostname,),
'replication_number': '1', 'replication_number': '1',
...@@ -1351,7 +1362,7 @@ class SlaveHttpFrontendTestCase(HttpFrontendTestCase): ...@@ -1351,7 +1362,7 @@ class SlaveHttpFrontendTestCase(HttpFrontendTestCase):
self.instance_path, '*', 'var', 'log', 'httpd', log_name self.instance_path, '*', 'var', 'log', 'httpd', log_name
))[0] ))[0]
self.assertRegexpMatches( self.assertRegex(
open(log_file, 'r').readlines()[-1], open(log_file, 'r').readlines()[-1],
log_regexp) log_regexp)
...@@ -1477,11 +1488,11 @@ class TestMasterAIKCDisabledAIBCCDisabledRequest( ...@@ -1477,11 +1488,11 @@ class TestMasterAIKCDisabledAIBCCDisabledRequest(
backend_client_caucase_url, backend_client_ca_pem, backend_client_caucase_url, backend_client_ca_pem,
backend_client_csr_pem) backend_client_csr_pem)
kedifa_key_file = os.path.join(cls.working_directory, 'kedifa-key.pem') kedifa_key_file = os.path.join(cls.working_directory, 'kedifa-key.pem')
with open(kedifa_key_file, 'w') as fh: with open(kedifa_key_file, 'wb') as fh:
fh.write(kedifa_crt_pem + kedifa_key_pem) fh.write(kedifa_crt_pem + kedifa_key_pem)
backend_client_key_file = os.path.join( backend_client_key_file = os.path.join(
cls.working_directory, 'backend-client-key.pem') cls.working_directory, 'backend-client-key.pem')
with open(backend_client_key_file, 'w') as fh: with open(backend_client_key_file, 'wb') as fh:
fh.write(backend_client_crt_pem + backend_client_key_pem) fh.write(backend_client_crt_pem + backend_client_key_pem)
# Simulate human: create service keys # Simulate human: create service keys
...@@ -1804,7 +1815,9 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin): ...@@ -1804,7 +1815,9 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin):
}, },
'disabled-cookie-list': { 'disabled-cookie-list': {
'url': cls.backend_url, 'url': cls.backend_url,
'disabled-cookie-list': 'Chocolate Vanilia', # Note: Do not reorder the entires below, see comments in
# test_disabled_cookie_list
'disabled-cookie-list': 'Coconut Chocolate Vanilia',
}, },
'monitor-ipv4-test': { 'monitor-ipv4-test': {
'monitor-ipv4-test': 'monitor-ipv4-test', 'monitor-ipv4-test': 'monitor-ipv4-test',
...@@ -1949,13 +1962,13 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin): ...@@ -1949,13 +1962,13 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin):
with lzma.open( with lzma.open(
os.path.join(ats_logrotate_dir, old_file_name + '.xz')) as fh: os.path.join(ats_logrotate_dir, old_file_name + '.xz')) as fh:
self.assertEqual( self.assertEqual(
'old', 'old'.encode(),
fh.read() fh.read()
) )
with lzma.open( with lzma.open(
os.path.join(ats_logrotate_dir, older_file_name + '.xz')) as fh: os.path.join(ats_logrotate_dir, older_file_name + '.xz')) as fh:
self.assertEqual( self.assertEqual(
'older', 'older'.encode(),
fh.read() fh.read()
) )
...@@ -2072,12 +2085,12 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin): ...@@ -2072,12 +2085,12 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin):
self.certificate_pem, self.certificate_pem,
der2pem(result.peercert)) der2pem(result.peercert))
self.assertEqual(httplib.SERVICE_UNAVAILABLE, result.status_code) self.assertEqual(http.client.SERVICE_UNAVAILABLE, result.status_code)
result_http = fakeHTTPResult( result_http = fakeHTTPResult(
parameter_dict['domain'], 'test-path') parameter_dict['domain'], 'test-path')
self.assertEqual( self.assertEqual(
httplib.FOUND, http.client.FOUND,
result_http.status_code result_http.status_code
) )
...@@ -2089,7 +2102,7 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin): ...@@ -2089,7 +2102,7 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin):
# check that 404 is as configured # check that 404 is as configured
result_missing = fakeHTTPSResult( result_missing = fakeHTTPSResult(
'forsuredoesnotexists.example.com', '') 'forsuredoesnotexists.example.com', '')
self.assertEqual(httplib.NOT_FOUND, result_missing.status_code) self.assertEqual(http.client.NOT_FOUND, result_missing.status_code)
self.assertEqual( self.assertEqual(
"""<html> """<html>
<head> <head>
...@@ -2150,7 +2163,7 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin): ...@@ -2150,7 +2163,7 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin):
) )
via_id = '%s-%s' % ( via_id = '%s-%s' % (
self.node_information_dict['node-id'], self.node_information_dict['node-id'],
self.node_information_dict['version-hash-history'].keys()[0]) list(self.node_information_dict['version-hash-history'].keys())[0])
if cached: if cached:
self.assertEqual( self.assertEqual(
[ [
...@@ -2245,7 +2258,7 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin): ...@@ -2245,7 +2258,7 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin):
'test-path/deep/.././deeper') 'test-path/deep/.././deeper')
self.assertEqual( self.assertEqual(
httplib.FOUND, http.client.FOUND,
result_http.status_code result_http.status_code
) )
...@@ -2366,7 +2379,7 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin): ...@@ -2366,7 +2379,7 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin):
self.assertEqual( self.assertEqual(
result.status_code, result.status_code,
httplib.BAD_GATEWAY http.client.BAD_GATEWAY
) )
finally: finally:
self.stopAuthenticatedServerProcess() self.stopAuthenticatedServerProcess()
...@@ -2408,7 +2421,7 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin): ...@@ -2408,7 +2421,7 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin):
'test-path/deep/.././deeper') 'test-path/deep/.././deeper')
self.assertEqual( self.assertEqual(
httplib.FOUND, http.client.FOUND,
result_http.status_code result_http.status_code
) )
...@@ -2551,7 +2564,7 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin): ...@@ -2551,7 +2564,7 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin):
der2pem(result.peercert)) der2pem(result.peercert))
self.assertEqual( self.assertEqual(
httplib.MOVED_PERMANENTLY, http.client.MOVED_PERMANENTLY,
result.status_code result.status_code
) )
...@@ -2707,7 +2720,7 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin): ...@@ -2707,7 +2720,7 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin):
auth = requests.get( auth = requests.get(
self.current_generate_auth, self.current_generate_auth,
verify=self.kedifa_caucase_ca_certificate_file) verify=self.kedifa_caucase_ca_certificate_file)
self.assertEqual(httplib.CREATED, auth.status_code) self.assertEqual(http.client.CREATED, auth.status_code)
data = self.customdomain_ca_certificate_pem + \ data = self.customdomain_ca_certificate_pem + \
self.customdomain_ca_key_pem + \ self.customdomain_ca_key_pem + \
...@@ -2717,7 +2730,7 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin): ...@@ -2717,7 +2730,7 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin):
self.current_upload_url + auth.text, self.current_upload_url + auth.text,
data=data, data=data,
verify=self.kedifa_caucase_ca_certificate_file) verify=self.kedifa_caucase_ca_certificate_file)
self.assertEqual(httplib.CREATED, upload.status_code) self.assertEqual(http.client.CREATED, upload.status_code)
self.runKedifaUpdater() self.runKedifaUpdater()
result = fakeHTTPSResult( result = fakeHTTPSResult(
...@@ -2734,7 +2747,7 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin): ...@@ -2734,7 +2747,7 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin):
'_custom_domain_ssl_crt_ssl_key_ssl_ca_crt.pem')) '_custom_domain_ssl_crt_ssl_key_ssl_ca_crt.pem'))
self.assertEqual(1, len(certificate_file_list)) self.assertEqual(1, len(certificate_file_list))
certificate_file = certificate_file_list[0] certificate_file = certificate_file_list[0]
with open(certificate_file) as out: with open(certificate_file, 'rb') as out:
self.assertEqual(data, out.read()) self.assertEqual(data, out.read())
def test_ssl_ca_crt_only(self): def test_ssl_ca_crt_only(self):
...@@ -2743,7 +2756,7 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin): ...@@ -2743,7 +2756,7 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin):
auth = requests.get( auth = requests.get(
self.current_generate_auth, self.current_generate_auth,
verify=self.kedifa_caucase_ca_certificate_file) verify=self.kedifa_caucase_ca_certificate_file)
self.assertEqual(httplib.CREATED, auth.status_code) self.assertEqual(http.client.CREATED, auth.status_code)
data = self.ca.certificate_pem data = self.ca.certificate_pem
...@@ -2752,7 +2765,7 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin): ...@@ -2752,7 +2765,7 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin):
data=data, data=data,
verify=self.kedifa_caucase_ca_certificate_file) verify=self.kedifa_caucase_ca_certificate_file)
self.assertEqual(httplib.UNPROCESSABLE_ENTITY, upload.status_code) self.assertEqual(http.client.UNPROCESSABLE_ENTITY, upload.status_code)
self.assertEqual('Key incorrect', upload.text) self.assertEqual('Key incorrect', upload.text)
def test_ssl_ca_crt_garbage(self): def test_ssl_ca_crt_garbage(self):
...@@ -2762,19 +2775,19 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin): ...@@ -2762,19 +2775,19 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin):
auth = requests.get( auth = requests.get(
self.current_generate_auth, self.current_generate_auth,
verify=self.kedifa_caucase_ca_certificate_file) verify=self.kedifa_caucase_ca_certificate_file)
self.assertEqual(httplib.CREATED, auth.status_code) self.assertEqual(http.client.CREATED, auth.status_code)
_, ca_key_pem, csr, _ = createCSR( _, ca_key_pem, csr, _ = createCSR(
parameter_dict['domain']) parameter_dict['domain'])
_, ca_certificate_pem = self.ca.signCSR(csr) _, ca_certificate_pem = self.ca.signCSR(csr)
data = ca_certificate_pem + ca_key_pem + 'some garbage' data = ca_certificate_pem + ca_key_pem + 'some garbage'.encode()
upload = requests.put( upload = requests.put(
self.current_upload_url + auth.text, self.current_upload_url + auth.text,
data=data, data=data,
verify=self.kedifa_caucase_ca_certificate_file) verify=self.kedifa_caucase_ca_certificate_file)
self.assertEqual(httplib.CREATED, upload.status_code) self.assertEqual(http.client.CREATED, upload.status_code)
self.runKedifaUpdater() self.runKedifaUpdater()
result = fakeHTTPSResult( result = fakeHTTPSResult(
...@@ -2792,7 +2805,7 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin): ...@@ -2792,7 +2805,7 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin):
'_ssl_ca_crt_garbage.pem')) '_ssl_ca_crt_garbage.pem'))
self.assertEqual(1, len(certificate_file_list)) self.assertEqual(1, len(certificate_file_list))
certificate_file = certificate_file_list[0] certificate_file = certificate_file_list[0]
with open(certificate_file) as out: with open(certificate_file, 'rb') as out:
self.assertEqual(data, out.read()) self.assertEqual(data, out.read())
def test_ssl_ca_crt_does_not_match(self): def test_ssl_ca_crt_does_not_match(self):
...@@ -2801,7 +2814,7 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin): ...@@ -2801,7 +2814,7 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin):
auth = requests.get( auth = requests.get(
self.current_generate_auth, self.current_generate_auth,
verify=self.kedifa_caucase_ca_certificate_file) verify=self.kedifa_caucase_ca_certificate_file)
self.assertEqual(httplib.CREATED, auth.status_code) self.assertEqual(http.client.CREATED, auth.status_code)
data = self.certificate_pem + self.key_pem + self.ca.certificate_pem data = self.certificate_pem + self.key_pem + self.ca.certificate_pem
...@@ -2810,7 +2823,7 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin): ...@@ -2810,7 +2823,7 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin):
data=data, data=data,
verify=self.kedifa_caucase_ca_certificate_file) verify=self.kedifa_caucase_ca_certificate_file)
self.assertEqual(httplib.CREATED, upload.status_code) self.assertEqual(http.client.CREATED, upload.status_code)
self.runKedifaUpdater() self.runKedifaUpdater()
result = fakeHTTPSResult( result = fakeHTTPSResult(
...@@ -2827,7 +2840,7 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin): ...@@ -2827,7 +2840,7 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin):
'_ssl_ca_crt_does_not_match.pem')) '_ssl_ca_crt_does_not_match.pem'))
self.assertEqual(1, len(certificate_file_list)) self.assertEqual(1, len(certificate_file_list))
certificate_file = certificate_file_list[0] certificate_file = certificate_file_list[0]
with open(certificate_file) as out: with open(certificate_file, 'rb') as out:
self.assertEqual(data, out.read()) self.assertEqual(data, out.read())
def test_https_only(self): def test_https_only(self):
...@@ -2906,14 +2919,14 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin): ...@@ -2906,14 +2919,14 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin):
auth = requests.get( auth = requests.get(
self.current_generate_auth, self.current_generate_auth,
verify=self.kedifa_caucase_ca_certificate_file) verify=self.kedifa_caucase_ca_certificate_file)
self.assertEqual(httplib.CREATED, auth.status_code) self.assertEqual(http.client.CREATED, auth.status_code)
data = self.customdomain_certificate_pem + \ data = self.customdomain_certificate_pem + \
self.customdomain_key_pem self.customdomain_key_pem
upload = requests.put( upload = requests.put(
self.current_upload_url + auth.text, self.current_upload_url + auth.text,
data=data, data=data,
verify=self.kedifa_caucase_ca_certificate_file) verify=self.kedifa_caucase_ca_certificate_file)
self.assertEqual(httplib.CREATED, upload.status_code) self.assertEqual(http.client.CREATED, upload.status_code)
self.runKedifaUpdater() self.runKedifaUpdater()
result = fakeHTTPSResult( result = fakeHTTPSResult(
...@@ -2954,7 +2967,7 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin): ...@@ -2954,7 +2967,7 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin):
'test-path/deep/.././deeper') 'test-path/deep/.././deeper')
self.assertEqual( self.assertEqual(
httplib.FOUND, http.client.FOUND,
result.status_code result.status_code
) )
...@@ -3073,7 +3086,7 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin): ...@@ -3073,7 +3086,7 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin):
'test-path/deep/.././deeper') 'test-path/deep/.././deeper')
self.assertEqual( self.assertEqual(
httplib.FOUND, http.client.FOUND,
result.status_code result.status_code
) )
...@@ -3114,7 +3127,7 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin): ...@@ -3114,7 +3127,7 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin):
headers={'Accept-Encoding': 'gzip, deflate'}) headers={'Accept-Encoding': 'gzip, deflate'})
self.assertEqual( self.assertEqual(
httplib.FOUND, http.client.FOUND,
result.status_code result.status_code
) )
...@@ -3234,7 +3247,7 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin): ...@@ -3234,7 +3247,7 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin):
j = result.json() j = result.json()
except Exception: except Exception:
raise ValueError('JSON decode problem in:\n%s' % (result.text,)) raise ValueError('JSON decode problem in:\n%s' % (result.text,))
parsed = urlparse.urlparse(self.backend_url) parsed = urllib.parse.urlparse(self.backend_url)
self.assertBackendHeaders( self.assertBackendHeaders(
j['Incoming Headers'], parsed.hostname, port='17', proto='irc', j['Incoming Headers'], parsed.hostname, port='17', proto='irc',
ignore_header_list=['Host']) ignore_header_list=['Host'])
...@@ -3340,7 +3353,7 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin): ...@@ -3340,7 +3353,7 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin):
j = result.json() j = result.json()
except Exception: except Exception:
raise ValueError('JSON decode problem in:\n%s' % (result.text,)) raise ValueError('JSON decode problem in:\n%s' % (result.text,))
parsed = urlparse.urlparse(self.backend_url) parsed = urllib.parse.urlparse(self.backend_url)
self.assertBackendHeaders( self.assertBackendHeaders(
j['Incoming Headers'], parsed.hostname, port='17', proto='irc', j['Incoming Headers'], parsed.hostname, port='17', proto='irc',
ignore_header_list=['Host']) ignore_header_list=['Host'])
...@@ -3406,7 +3419,7 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin): ...@@ -3406,7 +3419,7 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin):
der2pem(result.peercert)) der2pem(result.peercert))
self.assertEqual( self.assertEqual(
httplib.FOUND, http.client.FOUND,
result.status_code result.status_code
) )
...@@ -3428,7 +3441,7 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin): ...@@ -3428,7 +3441,7 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin):
der2pem(result.peercert)) der2pem(result.peercert))
self.assertEqual( self.assertEqual(
httplib.FOUND, http.client.FOUND,
result.status_code result.status_code
) )
...@@ -3449,7 +3462,7 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin): ...@@ -3449,7 +3462,7 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin):
der2pem(result.peercert)) der2pem(result.peercert))
self.assertEqual( self.assertEqual(
httplib.SERVICE_UNAVAILABLE, http.client.SERVICE_UNAVAILABLE,
result.status_code result.status_code
) )
...@@ -3457,7 +3470,7 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin): ...@@ -3457,7 +3470,7 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin):
parameter_dict['domain'], 'test-path') parameter_dict['domain'], 'test-path')
self.assertEqual( self.assertEqual(
httplib.FOUND, http.client.FOUND,
result_http.status_code result_http.status_code
) )
...@@ -3496,7 +3509,7 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin): ...@@ -3496,7 +3509,7 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin):
parameter_dict['domain'], 'test-path') parameter_dict['domain'], 'test-path')
self.assertEqual( self.assertEqual(
httplib.FOUND, http.client.FOUND,
result_http.status_code result_http.status_code
) )
...@@ -3517,7 +3530,7 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin): ...@@ -3517,7 +3530,7 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin):
der2pem(result.peercert)) der2pem(result.peercert))
self.assertEqual( self.assertEqual(
httplib.SERVICE_UNAVAILABLE, http.client.SERVICE_UNAVAILABLE,
result.status_code result.status_code
) )
...@@ -3531,12 +3544,12 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin): ...@@ -3531,12 +3544,12 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin):
self.certificate_pem, self.certificate_pem,
der2pem(result.peercert)) der2pem(result.peercert))
self.assertEqual(httplib.SERVICE_UNAVAILABLE, result.status_code) self.assertEqual(http.client.SERVICE_UNAVAILABLE, result.status_code)
result_http = fakeHTTPResult( result_http = fakeHTTPResult(
parameter_dict['domain'], 'test-path') parameter_dict['domain'], 'test-path')
self.assertEqual( self.assertEqual(
httplib.FOUND, http.client.FOUND,
result_http.status_code result_http.status_code
) )
...@@ -3568,12 +3581,12 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin): ...@@ -3568,12 +3581,12 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin):
self.certificate_pem, self.certificate_pem,
der2pem(result.peercert)) der2pem(result.peercert))
self.assertEqual(httplib.SERVICE_UNAVAILABLE, result.status_code) self.assertEqual(http.client.SERVICE_UNAVAILABLE, result.status_code)
result_http = fakeHTTPResult( result_http = fakeHTTPResult(
parameter_dict['domain'], 'test-path') parameter_dict['domain'], 'test-path')
self.assertEqual( self.assertEqual(
httplib.FOUND, http.client.FOUND,
result_http.status_code result_http.status_code
) )
...@@ -3606,13 +3619,13 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin): ...@@ -3606,13 +3619,13 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin):
self.certificate_pem, self.certificate_pem,
der2pem(result.peercert)) der2pem(result.peercert))
self.assertEqual(httplib.SERVICE_UNAVAILABLE, result.status_code) self.assertEqual(http.client.SERVICE_UNAVAILABLE, result.status_code)
result_http = fakeHTTPResult( result_http = fakeHTTPResult(
parameter_dict['domain'], 'test-path') parameter_dict['domain'], 'test-path')
self.assertEqual( self.assertEqual(
httplib.FOUND, http.client.FOUND,
result_http.status_code result_http.status_code
) )
...@@ -3694,7 +3707,7 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin): ...@@ -3694,7 +3707,7 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin):
'X-Reply-Header-Cache-Control': 'max-age=1, stale-while-' 'X-Reply-Header-Cache-Control': 'max-age=1, stale-while-'
'revalidate=3600, stale-if-error=3600'}) 'revalidate=3600, stale-if-error=3600'})
self.assertEqual( self.assertEqual(
httplib.FOUND, http.client.FOUND,
result.status_code result.status_code
) )
...@@ -3733,7 +3746,7 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin): ...@@ -3733,7 +3746,7 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin):
'X-Reply-Header-Cache-Control': 'max-age=1, stale-while-' 'X-Reply-Header-Cache-Control': 'max-age=1, stale-while-'
'revalidate=3600, stale-if-error=3600'}) 'revalidate=3600, stale-if-error=3600'})
self.assertEqual(httplib.OK, result.status_code) self.assertEqual(http.client.OK, result.status_code)
self.assertEqualResultJson(result, 'Path', '/HTTPS/test') self.assertEqualResultJson(result, 'Path', '/HTTPS/test')
self.assertResponseHeaders(result, cached=True) self.assertResponseHeaders(result, cached=True)
...@@ -3798,7 +3811,7 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin): ...@@ -3798,7 +3811,7 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin):
with open(ats_log_file) as fh: with open(ats_log_file) as fh:
ats_log = fh.read() ats_log = fh.read()
self.assertRegexpMatches(ats_log, direct_pattern) self.assertRegex(ats_log, direct_pattern)
# END: Check that squid.log is correctly filled in # END: Check that squid.log is correctly filled in
def _hack_ats(self, max_stale_age): def _hack_ats(self, max_stale_age):
...@@ -3862,10 +3875,10 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin): ...@@ -3862,10 +3875,10 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin):
max_age = int(max_stale_age / 2.) max_age = int(max_stale_age / 2.)
# body_200 is big enough to trigger # body_200 is big enough to trigger
# https://github.com/apache/trafficserver/issues/7880 # https://github.com/apache/trafficserver/issues/7880
body_200 = b'Body 200' * 500 body_200 = 'Body 200' * 500
body_502 = b'Body 502' body_502 = 'Body 502'
body_502_new = b'Body 502 new' body_502_new = 'Body 502 new'
body_200_new = b'Body 200 new' body_200_new = 'Body 200 new'
self.addCleanup(self._unhack_ats) self.addCleanup(self._unhack_ats)
self._hack_ats(max_stale_age) self._hack_ats(max_stale_age)
...@@ -3875,12 +3888,12 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin): ...@@ -3875,12 +3888,12 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin):
result = requests.put(backend_url + path, headers={ result = requests.put(backend_url + path, headers={
'X-Reply-Header-Cache-Control': 'max-age=%s, public' % (max_age,), 'X-Reply-Header-Cache-Control': 'max-age=%s, public' % (max_age,),
'X-Reply-Status-Code': status_code, 'X-Reply-Status-Code': status_code,
'X-Reply-Body': base64.b64encode(body), 'X-Reply-Body': base64.b64encode(body.encode()),
# drop Content-Length header to ensure # drop Content-Length header to ensure
# https://github.com/apache/trafficserver/issues/7880 # https://github.com/apache/trafficserver/issues/7880
'X-Drop-Header': 'Content-Length', 'X-Drop-Header': 'Content-Length',
}) })
self.assertEqual(result.status_code, httplib.CREATED) self.assertEqual(result.status_code, http.client.CREATED)
def checkResult(status_code, body): def checkResult(status_code, body):
result = fakeHTTPSResult( result = fakeHTTPSResult(
...@@ -3892,39 +3905,39 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin): ...@@ -3892,39 +3905,39 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin):
# backend returns something correctly # backend returns something correctly
configureResult('200', body_200) configureResult('200', body_200)
checkResult(httplib.OK, body_200) checkResult(http.client.OK, body_200)
configureResult('502', body_502) configureResult('502', body_502)
time.sleep(1) time.sleep(1)
# even if backend returns 502, ATS gives cached result # even if backend returns 502, ATS gives cached result
checkResult(httplib.OK, body_200) checkResult(http.client.OK, body_200)
# interesting moment, time is between max_age and max_stale_age, triggers # interesting moment, time is between max_age and max_stale_age, triggers
# https://github.com/apache/trafficserver/issues/7880 # https://github.com/apache/trafficserver/issues/7880
time.sleep(max_age + 1) time.sleep(max_age + 1)
checkResult(httplib.OK, body_200) checkResult(http.client.OK, body_200)
# max_stale_age passed, time to return 502 from the backend # max_stale_age passed, time to return 502 from the backend
time.sleep(max_stale_age + 2) time.sleep(max_stale_age + 2)
checkResult(httplib.BAD_GATEWAY, body_502) checkResult(http.client.BAD_GATEWAY, body_502)
configureResult('502', body_502_new) configureResult('502', body_502_new)
time.sleep(1) time.sleep(1)
# even if there is new negative response on the backend, the old one is # even if there is new negative response on the backend, the old one is
# served from the cache # served from the cache
checkResult(httplib.BAD_GATEWAY, body_502) checkResult(http.client.BAD_GATEWAY, body_502)
time.sleep(max_age + 2) time.sleep(max_age + 2)
# now as max-age of negative response passed, the new one is served # now as max-age of negative response passed, the new one is served
checkResult(httplib.BAD_GATEWAY, body_502_new) checkResult(http.client.BAD_GATEWAY, body_502_new)
configureResult('200', body_200_new) configureResult('200', body_200_new)
time.sleep(1) time.sleep(1)
checkResult(httplib.BAD_GATEWAY, body_502_new) checkResult(http.client.BAD_GATEWAY, body_502_new)
time.sleep(max_age + 2) time.sleep(max_age + 2)
# backend is back to normal, as soon as negative response max-age passed # backend is back to normal, as soon as negative response max-age passed
# the new response is served # the new response is served
checkResult(httplib.OK, body_200_new) checkResult(http.client.OK, body_200_new)
@skip('Feature postponed') @skip('Feature postponed')
def test_enable_cache_stale_if_error_respected(self): def test_enable_cache_stale_if_error_respected(self):
...@@ -3976,7 +3989,7 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin): ...@@ -3976,7 +3989,7 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin):
}, },
source_ip=source_ip source_ip=source_ip
) )
self.assertEqual(result.status_code, httplib.BAD_GATEWAY) self.assertEqual(result.status_code, http.client.BAD_GATEWAY)
finally: finally:
self.startServerProcess() self.startServerProcess()
# END: check stale-if-error support # END: check stale-if-error support
...@@ -3994,7 +4007,7 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin): ...@@ -3994,7 +4007,7 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin):
# ATS timed out # ATS timed out
self.assertEqual( self.assertEqual(
httplib.GATEWAY_TIMEOUT, http.client.GATEWAY_TIMEOUT,
result.status_code result.status_code
) )
...@@ -4094,7 +4107,7 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin): ...@@ -4094,7 +4107,7 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin):
j = result.json() j = result.json()
except Exception: except Exception:
raise ValueError('JSON decode problem in:\n%s' % (result.text,)) raise ValueError('JSON decode problem in:\n%s' % (result.text,))
self.assertFalse('pragma' in j['Incoming Headers'].keys()) self.assertFalse('pragma' in list(j['Incoming Headers'].keys()))
def test_enable_cache_disable_via_header(self): def test_enable_cache_disable_via_header(self):
parameter_dict = self.assertSlaveBase('enable_cache-disable-via-header') parameter_dict = self.assertSlaveBase('enable_cache-disable-via-header')
...@@ -4313,7 +4326,7 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin): ...@@ -4313,7 +4326,7 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin):
headers={'Accept-Encoding': 'gzip, deflate'}) headers={'Accept-Encoding': 'gzip, deflate'})
self.assertEqual( self.assertEqual(
httplib.FOUND, http.client.FOUND,
result.status_code result.status_code
) )
...@@ -4328,7 +4341,7 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin): ...@@ -4328,7 +4341,7 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin):
headers={'Accept-Encoding': 'deflate'}) headers={'Accept-Encoding': 'deflate'})
self.assertEqual( self.assertEqual(
httplib.FOUND, http.client.FOUND,
result.status_code result.status_code
) )
...@@ -4342,7 +4355,7 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin): ...@@ -4342,7 +4355,7 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin):
'test-path/deep/.././deeper') 'test-path/deep/.././deeper')
self.assertEqual( self.assertEqual(
httplib.FOUND, http.client.FOUND,
result.status_code result.status_code
) )
...@@ -4356,7 +4369,7 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin): ...@@ -4356,7 +4369,7 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin):
'test-path/deep/.././deeper') 'test-path/deep/.././deeper')
self.assertEqual( self.assertEqual(
httplib.FOUND, http.client.FOUND,
result.status_code result.status_code
) )
...@@ -4368,24 +4381,34 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin): ...@@ -4368,24 +4381,34 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin):
def test_disabled_cookie_list(self): def test_disabled_cookie_list(self):
parameter_dict = self.assertSlaveBase('disabled-cookie-list') parameter_dict = self.assertSlaveBase('disabled-cookie-list')
result = fakeHTTPSResult( replacement_dict = dict(
parameter_dict['domain'], 'test-path', domain=parameter_dict['domain'], ip=TEST_IP, port=HTTPS_PORT)
cookies=dict( curl_command = [
Chocolate='absent', 'curl', '-v', '-k',
Vanilia='absent', '-H', 'Host: %(domain)s' % replacement_dict,
Coffee='present' '--resolve', '%(domain)s:%(port)s:%(ip)s' % replacement_dict,
)) '--cookie',
# Note: Cookie order is extremely important here, do not change
# or test will start to pass incorrectly
'Coconut=absent; Chocolate=absent; Coffee=present; Vanilia=absent',
'https://%(domain)s:%(port)s/' % replacement_dict,
]
prc = subprocess.Popen(
curl_command, stdout=subprocess.PIPE, stderr=subprocess.PIPE
)
out, err = prc.communicate()
self.assertEqual( self.assertEqual(
self.certificate_pem, prc.returncode, 0,
der2pem(result.peercert)) "Problem running %r. Output:\n%s\nError:\n%s" % (
curl_command, out, err))
self.assertEqualResultJson(result, 'Path', '/test-path') # self check - were the cookies sent in required order?
self.assertIn(
self.assertBackendHeaders( 'ookie: Coconut=absent; Chocolate=absent; Coffee=present; '
result.json()['Incoming Headers'], parameter_dict['domain']) 'Vanilia=absent',
err.decode())
# real test - all configured cookies are dropped
self.assertEqual( self.assertEqual(
'Coffee=present', result.json()['Incoming Headers']['cookie']) 'Coffee=present', json.loads(out)['Incoming Headers']['cookie'])
def test_https_url(self): def test_https_url(self):
parameter_dict = self.assertSlaveBase('url_https-url') parameter_dict = self.assertSlaveBase('url_https-url')
...@@ -4409,7 +4432,7 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin): ...@@ -4409,7 +4432,7 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin):
'test-path/deep/.././deeper') 'test-path/deep/.././deeper')
self.assertEqual( self.assertEqual(
httplib.FOUND, http.client.FOUND,
result_http.status_code result_http.status_code
) )
...@@ -4498,13 +4521,13 @@ class TestReplicateSlave(SlaveHttpFrontendTestCase, TestDataMixin): ...@@ -4498,13 +4521,13 @@ class TestReplicateSlave(SlaveHttpFrontendTestCase, TestDataMixin):
'caddy-frontend-2-node-information-json' 'caddy-frontend-2-node-information-json'
] ]
node_information_json_dict = {} node_information_json_dict = {}
for k in parameter_dict.keys(): for k in list(parameter_dict.keys()):
if k.startswith('caddy-frontend') and k.endswith( if k.startswith('caddy-frontend') and k.endswith(
'node-information-json'): 'node-information-json'):
node_information_json_dict[k] = parameter_dict.pop(k) node_information_json_dict[k] = parameter_dict.pop(k)
self.assertEqual( self.assertEqual(
key_list, key_list,
node_information_json_dict.keys() list(node_information_json_dict.keys())
) )
node_information_dict = json.loads(node_information_json_dict[key_list[0]]) node_information_dict = json.loads(node_information_json_dict[key_list[0]])
...@@ -4534,7 +4557,7 @@ class TestReplicateSlave(SlaveHttpFrontendTestCase, TestDataMixin): ...@@ -4534,7 +4557,7 @@ class TestReplicateSlave(SlaveHttpFrontendTestCase, TestDataMixin):
result_http = fakeHTTPResult( result_http = fakeHTTPResult(
parameter_dict['domain'], 'test-path') parameter_dict['domain'], 'test-path')
self.assertEqual(httplib.FOUND, result_http.status_code) self.assertEqual(http.client.FOUND, result_http.status_code)
# prove 2nd frontend by inspection of the instance # prove 2nd frontend by inspection of the instance
slave_configuration_name = '_replicate.conf' slave_configuration_name = '_replicate.conf'
...@@ -5107,51 +5130,51 @@ class TestSlaveSlapOSMasterCertificateCompatibility( ...@@ -5107,51 +5130,51 @@ class TestSlaveSlapOSMasterCertificateCompatibility(
'rejected-slave-dict': { 'rejected-slave-dict': {
}, },
'warning-list': [ 'warning-list': [
u'apache-certificate is obsolete, please use master-key-upload-url', 'apache-certificate is obsolete, please use master-key-upload-url',
u'apache-key is obsolete, please use master-key-upload-url', 'apache-key is obsolete, please use master-key-upload-url',
], ],
'warning-slave-dict': { 'warning-slave-dict': {
u'_custom_domain_ssl_crt_ssl_key': [ '_custom_domain_ssl_crt_ssl_key': [
u'ssl_crt is obsolete, please use key-upload-url', 'ssl_crt is obsolete, please use key-upload-url',
u'ssl_key is obsolete, please use key-upload-url' 'ssl_key is obsolete, please use key-upload-url'
], ],
u'_custom_domain_ssl_crt_ssl_key_ssl_ca_crt': [ '_custom_domain_ssl_crt_ssl_key_ssl_ca_crt': [
u'ssl_ca_crt is obsolete, please use key-upload-url', 'ssl_ca_crt is obsolete, please use key-upload-url',
u'ssl_crt is obsolete, please use key-upload-url', 'ssl_crt is obsolete, please use key-upload-url',
u'ssl_key is obsolete, please use key-upload-url' 'ssl_key is obsolete, please use key-upload-url'
], ],
u'_ssl_ca_crt_does_not_match': [ '_ssl_ca_crt_does_not_match': [
u'ssl_ca_crt is obsolete, please use key-upload-url', 'ssl_ca_crt is obsolete, please use key-upload-url',
u'ssl_crt is obsolete, please use key-upload-url', 'ssl_crt is obsolete, please use key-upload-url',
u'ssl_key is obsolete, please use key-upload-url', 'ssl_key is obsolete, please use key-upload-url',
], ],
u'_ssl_ca_crt_garbage': [ '_ssl_ca_crt_garbage': [
u'ssl_ca_crt is obsolete, please use key-upload-url', 'ssl_ca_crt is obsolete, please use key-upload-url',
u'ssl_crt is obsolete, please use key-upload-url', 'ssl_crt is obsolete, please use key-upload-url',
u'ssl_key is obsolete, please use key-upload-url', 'ssl_key is obsolete, please use key-upload-url',
], ],
# u'_ssl_ca_crt_only': [ # u'_ssl_ca_crt_only': [
# u'ssl_ca_crt is obsolete, please use key-upload-url', # u'ssl_ca_crt is obsolete, please use key-upload-url',
# ], # ],
u'_ssl_from_slave': [ '_ssl_from_slave': [
u'ssl_crt is obsolete, please use key-upload-url', 'ssl_crt is obsolete, please use key-upload-url',
u'ssl_key is obsolete, please use key-upload-url', 'ssl_key is obsolete, please use key-upload-url',
], ],
u'_ssl_from_slave_kedifa_overrides': [ '_ssl_from_slave_kedifa_overrides': [
u'ssl_crt is obsolete, please use key-upload-url', 'ssl_crt is obsolete, please use key-upload-url',
u'ssl_key is obsolete, please use key-upload-url', 'ssl_key is obsolete, please use key-upload-url',
], ],
# u'_ssl_key-ssl_crt-unsafe': [ # u'_ssl_key-ssl_crt-unsafe': [
# u'ssl_key is obsolete, please use key-upload-url', # u'ssl_key is obsolete, please use key-upload-url',
# u'ssl_crt is obsolete, please use key-upload-url', # u'ssl_crt is obsolete, please use key-upload-url',
# ], # ],
u'_type-notebook-ssl_from_slave': [ '_type-notebook-ssl_from_slave': [
u'ssl_crt is obsolete, please use key-upload-url', 'ssl_crt is obsolete, please use key-upload-url',
u'ssl_key is obsolete, please use key-upload-url', 'ssl_key is obsolete, please use key-upload-url',
], ],
u'_type-notebook-ssl_from_slave_kedifa_overrides': [ '_type-notebook-ssl_from_slave_kedifa_overrides': [
u'ssl_crt is obsolete, please use key-upload-url', 'ssl_crt is obsolete, please use key-upload-url',
u'ssl_key is obsolete, please use key-upload-url', 'ssl_key is obsolete, please use key-upload-url',
], ],
} }
} }
...@@ -5192,7 +5215,7 @@ class TestSlaveSlapOSMasterCertificateCompatibility( ...@@ -5192,7 +5215,7 @@ class TestSlaveSlapOSMasterCertificateCompatibility(
auth = requests.get( auth = requests.get(
self.current_generate_auth, self.current_generate_auth,
verify=self.kedifa_caucase_ca_certificate_file) verify=self.kedifa_caucase_ca_certificate_file)
self.assertEqual(httplib.CREATED, auth.status_code) self.assertEqual(http.client.CREATED, auth.status_code)
data = certificate_pem + key_pem data = certificate_pem + key_pem
...@@ -5200,7 +5223,7 @@ class TestSlaveSlapOSMasterCertificateCompatibility( ...@@ -5200,7 +5223,7 @@ class TestSlaveSlapOSMasterCertificateCompatibility(
self.current_upload_url + auth.text, self.current_upload_url + auth.text,
data=data, data=data,
verify=self.kedifa_caucase_ca_certificate_file) verify=self.kedifa_caucase_ca_certificate_file)
self.assertEqual(httplib.CREATED, upload.status_code) self.assertEqual(http.client.CREATED, upload.status_code)
self.runKedifaUpdater() self.runKedifaUpdater()
result = fakeHTTPSResult( result = fakeHTTPSResult(
...@@ -5255,7 +5278,7 @@ class TestSlaveSlapOSMasterCertificateCompatibility( ...@@ -5255,7 +5278,7 @@ class TestSlaveSlapOSMasterCertificateCompatibility(
auth = requests.get( auth = requests.get(
self.current_generate_auth, self.current_generate_auth,
verify=self.kedifa_caucase_ca_certificate_file) verify=self.kedifa_caucase_ca_certificate_file)
self.assertEqual(httplib.CREATED, auth.status_code) self.assertEqual(http.client.CREATED, auth.status_code)
data = certificate_pem + key_pem data = certificate_pem + key_pem
...@@ -5263,7 +5286,7 @@ class TestSlaveSlapOSMasterCertificateCompatibility( ...@@ -5263,7 +5286,7 @@ class TestSlaveSlapOSMasterCertificateCompatibility(
self.current_upload_url + auth.text, self.current_upload_url + auth.text,
data=data, data=data,
verify=self.kedifa_caucase_ca_certificate_file) verify=self.kedifa_caucase_ca_certificate_file)
self.assertEqual(httplib.CREATED, upload.status_code) self.assertEqual(http.client.CREATED, upload.status_code)
self.runKedifaUpdater() self.runKedifaUpdater()
...@@ -5310,7 +5333,7 @@ class TestSlaveSlapOSMasterCertificateCompatibility( ...@@ -5310,7 +5333,7 @@ class TestSlaveSlapOSMasterCertificateCompatibility(
auth = requests.get( auth = requests.get(
self.current_generate_auth, self.current_generate_auth,
verify=self.kedifa_caucase_ca_certificate_file) verify=self.kedifa_caucase_ca_certificate_file)
self.assertEqual(httplib.CREATED, auth.status_code) self.assertEqual(http.client.CREATED, auth.status_code)
data = certificate_pem + key_pem data = certificate_pem + key_pem
...@@ -5318,7 +5341,7 @@ class TestSlaveSlapOSMasterCertificateCompatibility( ...@@ -5318,7 +5341,7 @@ class TestSlaveSlapOSMasterCertificateCompatibility(
self.current_upload_url + auth.text, self.current_upload_url + auth.text,
data=data, data=data,
verify=self.kedifa_caucase_ca_certificate_file) verify=self.kedifa_caucase_ca_certificate_file)
self.assertEqual(httplib.CREATED, upload.status_code) self.assertEqual(http.client.CREATED, upload.status_code)
self.runKedifaUpdater() self.runKedifaUpdater()
...@@ -5377,7 +5400,7 @@ class TestSlaveSlapOSMasterCertificateCompatibility( ...@@ -5377,7 +5400,7 @@ class TestSlaveSlapOSMasterCertificateCompatibility(
auth = requests.get( auth = requests.get(
self.current_generate_auth, self.current_generate_auth,
verify=self.kedifa_caucase_ca_certificate_file) verify=self.kedifa_caucase_ca_certificate_file)
self.assertEqual(httplib.CREATED, auth.status_code) self.assertEqual(http.client.CREATED, auth.status_code)
data = certificate_pem + key_pem data = certificate_pem + key_pem
...@@ -5385,7 +5408,7 @@ class TestSlaveSlapOSMasterCertificateCompatibility( ...@@ -5385,7 +5408,7 @@ class TestSlaveSlapOSMasterCertificateCompatibility(
self.current_upload_url + auth.text, self.current_upload_url + auth.text,
data=data, data=data,
verify=self.kedifa_caucase_ca_certificate_file) verify=self.kedifa_caucase_ca_certificate_file)
self.assertEqual(httplib.CREATED, upload.status_code) self.assertEqual(http.client.CREATED, upload.status_code)
self.runKedifaUpdater() self.runKedifaUpdater()
...@@ -5443,8 +5466,10 @@ class TestSlaveSlapOSMasterCertificateCompatibility( ...@@ -5443,8 +5466,10 @@ class TestSlaveSlapOSMasterCertificateCompatibility(
self.assertEqual(1, len(certificate_file_list)) self.assertEqual(1, len(certificate_file_list))
certificate_file = certificate_file_list[0] certificate_file = certificate_file_list[0]
with open(certificate_file) as out: with open(certificate_file) as out:
expected = self.customdomain_ca_certificate_pem + '\n' + \ expected = \
self.ca.certificate_pem + '\n' + self.customdomain_ca_key_pem self.customdomain_ca_certificate_pem.decode() + '\n' + \
self.ca.certificate_pem.decode() + '\n' + \
self.customdomain_ca_key_pem.decode()
self.assertEqual( self.assertEqual(
expected, expected,
out.read() out.read()
...@@ -5487,8 +5512,9 @@ class TestSlaveSlapOSMasterCertificateCompatibility( ...@@ -5487,8 +5512,9 @@ class TestSlaveSlapOSMasterCertificateCompatibility(
self.assertEqual(1, len(certificate_file_list)) self.assertEqual(1, len(certificate_file_list))
certificate_file = certificate_file_list[0] certificate_file = certificate_file_list[0]
with open(certificate_file) as out: with open(certificate_file) as out:
expected = customdomain_ca_certificate_pem + '\n' + ca.certificate_pem \ expected = customdomain_ca_certificate_pem.decode() + '\n' + \
+ '\n' + customdomain_ca_key_pem ca.certificate_pem.decode() + '\n' + \
customdomain_ca_key_pem.decode()
self.assertEqual( self.assertEqual(
expected, expected,
out.read() out.read()
...@@ -5538,8 +5564,9 @@ class TestSlaveSlapOSMasterCertificateCompatibility( ...@@ -5538,8 +5564,9 @@ class TestSlaveSlapOSMasterCertificateCompatibility(
self.assertEqual(1, len(certificate_file_list)) self.assertEqual(1, len(certificate_file_list))
certificate_file = certificate_file_list[0] certificate_file = certificate_file_list[0]
with open(certificate_file) as out: with open(certificate_file) as out:
expected = self.certificate_pem + '\n' + self.ca.certificate_pem + \ expected = self.certificate_pem.decode() + '\n' + \
'\n' + self.key_pem self.ca.certificate_pem.decode() + '\n' + \
self.key_pem.decode()
self.assertEqual( self.assertEqual(
expected, expected,
out.read() out.read()
...@@ -5602,8 +5629,8 @@ class TestSlaveSlapOSMasterCertificateCompatibilityUpdate( ...@@ -5602,8 +5629,8 @@ class TestSlaveSlapOSMasterCertificateCompatibilityUpdate(
'rejected-slave-dict': {}, 'rejected-slave-dict': {},
'slave-amount': '1', 'slave-amount': '1',
'warning-list': [ 'warning-list': [
u'apache-certificate is obsolete, please use master-key-upload-url', 'apache-certificate is obsolete, please use master-key-upload-url',
u'apache-key is obsolete, please use master-key-upload-url', 'apache-key is obsolete, please use master-key-upload-url',
], ],
} }
...@@ -5710,11 +5737,11 @@ class TestSlaveCiphers(SlaveHttpFrontendTestCase, TestDataMixin): ...@@ -5710,11 +5737,11 @@ class TestSlaveCiphers(SlaveHttpFrontendTestCase, TestDataMixin):
self.certificate_pem, self.certificate_pem,
der2pem(result.peercert)) der2pem(result.peercert))
self.assertEqual(httplib.OK, result.status_code) self.assertEqual(http.client.OK, result.status_code)
result_http = fakeHTTPResult( result_http = fakeHTTPResult(
parameter_dict['domain'], 'test-path') parameter_dict['domain'], 'test-path')
self.assertEqual(httplib.FOUND, result_http.status_code) self.assertEqual(http.client.FOUND, result_http.status_code)
configuration_file = glob.glob( configuration_file = glob.glob(
os.path.join( os.path.join(
...@@ -5736,11 +5763,11 @@ class TestSlaveCiphers(SlaveHttpFrontendTestCase, TestDataMixin): ...@@ -5736,11 +5763,11 @@ class TestSlaveCiphers(SlaveHttpFrontendTestCase, TestDataMixin):
self.certificate_pem, self.certificate_pem,
der2pem(result.peercert)) der2pem(result.peercert))
self.assertEqual(httplib.OK, result.status_code) self.assertEqual(http.client.OK, result.status_code)
result_http = fakeHTTPResult( result_http = fakeHTTPResult(
parameter_dict['domain'], 'test-path') parameter_dict['domain'], 'test-path')
self.assertEqual(httplib.FOUND, result_http.status_code) self.assertEqual(http.client.FOUND, result_http.status_code)
configuration_file = glob.glob( configuration_file = glob.glob(
os.path.join( os.path.join(
...@@ -5935,9 +5962,9 @@ class TestSlaveRejectReportUnsafeDamaged(SlaveHttpFrontendTestCase): ...@@ -5935,9 +5962,9 @@ class TestSlaveRejectReportUnsafeDamaged(SlaveHttpFrontendTestCase):
result_json = result.json() result_json = result.json()
self.assertEqual( self.assertEqual(
{ {
u'_SITE_4': [u"custom_domain 'duplicate.example.com' clashes"], '_SITE_4': ["custom_domain 'duplicate.example.com' clashes"],
u'_SITE_2': [u"custom_domain 'duplicate.example.com' clashes"], '_SITE_2': ["custom_domain 'duplicate.example.com' clashes"],
u'_SITE_3': [u"server-alias 'duplicate.example.com' clashes"] '_SITE_3': ["server-alias 'duplicate.example.com' clashes"]
}, },
result_json result_json
) )
...@@ -5964,7 +5991,7 @@ class TestSlaveRejectReportUnsafeDamaged(SlaveHttpFrontendTestCase): ...@@ -5964,7 +5991,7 @@ class TestSlaveRejectReportUnsafeDamaged(SlaveHttpFrontendTestCase):
'rejected-slave-dict': { 'rejected-slave-dict': {
'_HTTPS-URL': ['slave https-url "https://[fd46::c2ae]:!py!u\'123123\'"' '_HTTPS-URL': ['slave https-url "https://[fd46::c2ae]:!py!u\'123123\'"'
' invalid'], ' invalid'],
'_URL': [u'slave url "https://[fd46::c2ae]:!py!u\'123123\'" invalid'], '_URL': ['slave url "https://[fd46::c2ae]:!py!u\'123123\'" invalid'],
'_SSL-PROXY-VERIFY_SSL_PROXY_CA_CRT_DAMAGED': [ '_SSL-PROXY-VERIFY_SSL_PROXY_CA_CRT_DAMAGED': [
'ssl_proxy_ca_crt is invalid' 'ssl_proxy_ca_crt is invalid'
], ],
...@@ -6204,7 +6231,7 @@ class TestSlaveRejectReportUnsafeDamaged(SlaveHttpFrontendTestCase): ...@@ -6204,7 +6231,7 @@ class TestSlaveRejectReportUnsafeDamaged(SlaveHttpFrontendTestCase):
der2pem(result.peercert)) der2pem(result.peercert))
self.assertEqual( self.assertEqual(
httplib.MOVED_PERMANENTLY, http.client.MOVED_PERMANENTLY,
result.status_code result.status_code
) )
...@@ -6224,11 +6251,11 @@ class TestSlaveRejectReportUnsafeDamaged(SlaveHttpFrontendTestCase): ...@@ -6224,11 +6251,11 @@ class TestSlaveRejectReportUnsafeDamaged(SlaveHttpFrontendTestCase):
self.certificate_pem, self.certificate_pem,
der2pem(result.peercert)) der2pem(result.peercert))
self.assertEqual(httplib.SERVICE_UNAVAILABLE, result.status_code) self.assertEqual(http.client.SERVICE_UNAVAILABLE, result.status_code)
result_http = fakeHTTPResult( result_http = fakeHTTPResult(
parameter_dict['domain'], 'test-path') parameter_dict['domain'], 'test-path')
self.assertEqual(httplib.FOUND, result_http.status_code) self.assertEqual(http.client.FOUND, result_http.status_code)
monitor_file = glob.glob( monitor_file = glob.glob(
os.path.join( os.path.join(
...@@ -6255,11 +6282,11 @@ class TestSlaveRejectReportUnsafeDamaged(SlaveHttpFrontendTestCase): ...@@ -6255,11 +6282,11 @@ class TestSlaveRejectReportUnsafeDamaged(SlaveHttpFrontendTestCase):
self.certificate_pem, self.certificate_pem,
der2pem(result.peercert)) der2pem(result.peercert))
self.assertEqual(httplib.SERVICE_UNAVAILABLE, result.status_code) self.assertEqual(http.client.SERVICE_UNAVAILABLE, result.status_code)
result_http = fakeHTTPResult( result_http = fakeHTTPResult(
parameter_dict['domain'], 'test-path') parameter_dict['domain'], 'test-path')
self.assertEqual(httplib.FOUND, result_http.status_code) self.assertEqual(http.client.FOUND, result_http.status_code)
monitor_file = glob.glob( monitor_file = glob.glob(
os.path.join( os.path.join(
...@@ -6548,98 +6575,98 @@ class TestPassedRequestParameter(HttpFrontendTestCase): ...@@ -6548,98 +6575,98 @@ class TestPassedRequestParameter(HttpFrontendTestCase):
'kedifa'].pop('monitor-password') 'kedifa'].pop('monitor-password')
) )
backend_client_caucase_url = u'http://[%s]:8990' % (self._ipv6_address,) backend_client_caucase_url = 'http://[%s]:8990' % (self._ipv6_address,)
kedifa_caucase_url = u'http://[%s]:15090' % (self._ipv6_address,) kedifa_caucase_url = 'http://[%s]:15090' % (self._ipv6_address,)
expected_partition_parameter_dict_dict = { expected_partition_parameter_dict_dict = {
'caddy-frontend-1': { 'caddy-frontend-1': {
'X-software_release_url': base_software_url, 'X-software_release_url': base_software_url,
u'apache-certificate': unicode(self.certificate_pem), 'apache-certificate': self.certificate_pem.decode(),
u'apache-key': unicode(self.key_pem), 'apache-key': self.key_pem.decode(),
u'authenticate-to-backend': u'True', 'authenticate-to-backend': 'True',
u'backend-client-caucase-url': backend_client_caucase_url, 'backend-client-caucase-url': backend_client_caucase_url,
u'backend-connect-retries': u'1', 'backend-connect-retries': '1',
u'backend-connect-timeout': u'2', 'backend-connect-timeout': '2',
u'ciphers': u'ciphers', 'ciphers': 'ciphers',
u'cluster-identification': u'testing partition 0', 'cluster-identification': 'testing partition 0',
u'domain': u'example.com', 'domain': 'example.com',
u'enable-http2-by-default': u'True', 'enable-http2-by-default': 'True',
u'extra_slave_instance_list': u'[]', 'extra_slave_instance_list': '[]',
u'frontend-name': u'caddy-frontend-1', 'frontend-name': 'caddy-frontend-1',
u'global-disable-http2': u'True', 'global-disable-http2': 'True',
u'kedifa-caucase-url': kedifa_caucase_url, 'kedifa-caucase-url': kedifa_caucase_url,
u'monitor-cors-domains': u'monitor.app.officejs.com', 'monitor-cors-domains': 'monitor.app.officejs.com',
u'monitor-httpd-port': 8411, 'monitor-httpd-port': 8411,
u'monitor-username': u'admin', 'monitor-username': 'admin',
u'mpm-graceful-shutdown-timeout': u'2', 'mpm-graceful-shutdown-timeout': '2',
u'plain_http_port': '11080', 'plain_http_port': '11080',
u'port': '11443', 'port': '11443',
u'ram-cache-size': u'512K', 'ram-cache-size': '512K',
u're6st-verification-url': u're6st-verification-url', 're6st-verification-url': 're6st-verification-url',
u'request-timeout': u'100', 'request-timeout': '100',
u'slave-kedifa-information': u'{}' 'slave-kedifa-information': '{}'
}, },
'caddy-frontend-2': { 'caddy-frontend-2': {
'X-software_release_url': self.frontend_2_sr, 'X-software_release_url': self.frontend_2_sr,
u'apache-certificate': unicode(self.certificate_pem), 'apache-certificate': self.certificate_pem.decode(),
u'apache-key': unicode(self.key_pem), 'apache-key': self.key_pem.decode(),
u'authenticate-to-backend': u'True', 'authenticate-to-backend': 'True',
u'backend-client-caucase-url': backend_client_caucase_url, 'backend-client-caucase-url': backend_client_caucase_url,
u'backend-connect-retries': u'1', 'backend-connect-retries': '1',
u'backend-connect-timeout': u'2', 'backend-connect-timeout': '2',
u'ciphers': u'ciphers', 'ciphers': 'ciphers',
u'cluster-identification': u'testing partition 0', 'cluster-identification': 'testing partition 0',
u'domain': u'example.com', 'domain': 'example.com',
u'enable-http2-by-default': u'True', 'enable-http2-by-default': 'True',
u'extra_slave_instance_list': u'[]', 'extra_slave_instance_list': '[]',
u'frontend-name': u'caddy-frontend-2', 'frontend-name': 'caddy-frontend-2',
u'global-disable-http2': u'True', 'global-disable-http2': 'True',
u'kedifa-caucase-url': kedifa_caucase_url, 'kedifa-caucase-url': kedifa_caucase_url,
u'monitor-cors-domains': u'monitor.app.officejs.com', 'monitor-cors-domains': 'monitor.app.officejs.com',
u'monitor-httpd-port': 8412, 'monitor-httpd-port': 8412,
u'monitor-username': u'admin', 'monitor-username': 'admin',
u'mpm-graceful-shutdown-timeout': u'2', 'mpm-graceful-shutdown-timeout': '2',
u'plain_http_port': u'11080', 'plain_http_port': '11080',
u'port': u'11443', 'port': '11443',
u'ram-cache-size': u'256K', 'ram-cache-size': '256K',
u're6st-verification-url': u're6st-verification-url', 're6st-verification-url': 're6st-verification-url',
u'request-timeout': u'100', 'request-timeout': '100',
u'slave-kedifa-information': u'{}' 'slave-kedifa-information': '{}'
}, },
'caddy-frontend-3': { 'caddy-frontend-3': {
'X-software_release_url': self.frontend_3_sr, 'X-software_release_url': self.frontend_3_sr,
u'apache-certificate': unicode(self.certificate_pem), 'apache-certificate': self.certificate_pem.decode(),
u'apache-key': unicode(self.key_pem), 'apache-key': self.key_pem.decode(),
u'authenticate-to-backend': u'True', 'authenticate-to-backend': 'True',
u'backend-client-caucase-url': backend_client_caucase_url, 'backend-client-caucase-url': backend_client_caucase_url,
u'backend-connect-retries': u'1', 'backend-connect-retries': '1',
u'backend-connect-timeout': u'2', 'backend-connect-timeout': '2',
u'ciphers': u'ciphers', 'ciphers': 'ciphers',
u'cluster-identification': u'testing partition 0', 'cluster-identification': 'testing partition 0',
u'domain': u'example.com', 'domain': 'example.com',
u'enable-http2-by-default': u'True', 'enable-http2-by-default': 'True',
u'extra_slave_instance_list': u'[]', 'extra_slave_instance_list': '[]',
u'frontend-name': u'caddy-frontend-3', 'frontend-name': 'caddy-frontend-3',
u'global-disable-http2': u'True', 'global-disable-http2': 'True',
u'kedifa-caucase-url': kedifa_caucase_url, 'kedifa-caucase-url': kedifa_caucase_url,
u'monitor-cors-domains': u'monitor.app.officejs.com', 'monitor-cors-domains': 'monitor.app.officejs.com',
u'monitor-httpd-port': 8413, 'monitor-httpd-port': 8413,
u'monitor-username': u'admin', 'monitor-username': 'admin',
u'mpm-graceful-shutdown-timeout': u'2', 'mpm-graceful-shutdown-timeout': '2',
u'plain_http_port': u'11080', 'plain_http_port': '11080',
u'port': u'11443', 'port': '11443',
u're6st-verification-url': u're6st-verification-url', 're6st-verification-url': 're6st-verification-url',
u'request-timeout': u'100', 'request-timeout': '100',
u'slave-kedifa-information': u'{}' 'slave-kedifa-information': '{}'
}, },
'kedifa': { 'kedifa': {
'X-software_release_url': self.kedifa_sr, 'X-software_release_url': self.kedifa_sr,
u'caucase_port': u'15090', 'caucase_port': '15090',
u'cluster-identification': u'testing partition 0', 'cluster-identification': 'testing partition 0',
u'kedifa_port': u'15080', 'kedifa_port': '15080',
u'monitor-cors-domains': u'monitor.app.officejs.com', 'monitor-cors-domains': 'monitor.app.officejs.com',
u'monitor-httpd-port': u'8402', 'monitor-httpd-port': '8402',
u'monitor-username': u'admin', 'monitor-username': 'admin',
u'slave-list': [] 'slave-list': []
}, },
'testing partition 0': { 'testing partition 0': {
'-frontend-2-software-release-url': self.frontend_2_sr, '-frontend-2-software-release-url': self.frontend_2_sr,
...@@ -6653,8 +6680,8 @@ class TestPassedRequestParameter(HttpFrontendTestCase): ...@@ -6653,8 +6680,8 @@ class TestPassedRequestParameter(HttpFrontendTestCase):
'-sla-2-computer_guid': 'local', '-sla-2-computer_guid': 'local',
'-sla-3-computer_guid': 'local', '-sla-3-computer_guid': 'local',
'X-software_release_url': base_software_url, 'X-software_release_url': base_software_url,
'apache-certificate': unicode(self.certificate_pem), 'apache-certificate': self.certificate_pem.decode(),
'apache-key': unicode(self.key_pem), 'apache-key': self.key_pem.decode(),
'authenticate-to-backend': 'True', 'authenticate-to-backend': 'True',
'automatic-internal-backend-client-caucase-csr': 'False', 'automatic-internal-backend-client-caucase-csr': 'False',
'automatic-internal-kedifa-caucase-csr': 'False', 'automatic-internal-kedifa-caucase-csr': 'False',
...@@ -6809,7 +6836,7 @@ class TestSlaveHealthCheck(SlaveHttpFrontendTestCase, TestDataMixin): ...@@ -6809,7 +6836,7 @@ class TestSlaveHealthCheck(SlaveHttpFrontendTestCase, TestDataMixin):
@classmethod @classmethod
def setUpAssertionDict(cls): def setUpAssertionDict(cls):
backend = urlparse.urlparse(cls.backend_url).netloc backend = urllib.parse.urlparse(cls.backend_url).netloc
cls.assertion_dict = { cls.assertion_dict = {
'health-check-disabled': """\ 'health-check-disabled': """\
backend _health-check-disabled-http backend _health-check-disabled-http
...@@ -6894,14 +6921,14 @@ backend _health-check-default-http ...@@ -6894,14 +6921,14 @@ backend _health-check-default-http
self.backend_url + slave_parameter_dict[ self.backend_url + slave_parameter_dict[
'health-check-http-path'].strip('/'), 'health-check-http-path'].strip('/'),
headers={'X-Reply-Status-Code': '502'}) headers={'X-Reply-Status-Code': '502'})
self.assertEqual(result.status_code, httplib.CREATED) self.assertEqual(result.status_code, http.client.CREATED)
def restoreBackend(): def restoreBackend():
result = requests.put( result = requests.put(
self.backend_url + slave_parameter_dict[ self.backend_url + slave_parameter_dict[
'health-check-http-path'].strip('/'), 'health-check-http-path'].strip('/'),
headers={}) headers={})
self.assertEqual(result.status_code, httplib.CREATED) self.assertEqual(result.status_code, http.client.CREATED)
self.addCleanup(restoreBackend) self.addCleanup(restoreBackend)
time.sleep(3) # > health-check-timeout + health-check-interval time.sleep(3) # > health-check-timeout + health-check-interval
...@@ -6951,15 +6978,15 @@ backend _health-check-default-http ...@@ -6951,15 +6978,15 @@ backend _health-check-default-http
self.backend_url + slave_parameter_dict[ self.backend_url + slave_parameter_dict[
'health-check-http-path'].strip('/'), 'health-check-http-path'].strip('/'),
headers={'X-Reply-Status-Code': '502'}) headers={'X-Reply-Status-Code': '502'})
self.assertEqual(result.status_code, httplib.CREATED) self.assertEqual(result.status_code, http.client.CREATED)
self.assertEqual(result.status_code, httplib.CREATED) self.assertEqual(result.status_code, http.client.CREATED)
def restoreBackend(): def restoreBackend():
result = requests.put( result = requests.put(
self.backend_url + slave_parameter_dict[ self.backend_url + slave_parameter_dict[
'health-check-http-path'].strip('/'), 'health-check-http-path'].strip('/'),
headers={}) headers={})
self.assertEqual(result.status_code, httplib.CREATED) self.assertEqual(result.status_code, http.client.CREATED)
self.addCleanup(restoreBackend) self.addCleanup(restoreBackend)
time.sleep(3) # > health-check-timeout + health-check-interval time.sleep(3) # > health-check-timeout + health-check-interval
...@@ -7001,7 +7028,7 @@ backend _health-check-default-http ...@@ -7001,7 +7028,7 @@ backend _health-check-default-http
self.backend_url + slave_parameter_dict[ self.backend_url + slave_parameter_dict[
'health-check-http-path'].strip('/'), 'health-check-http-path'].strip('/'),
headers={'X-Reply-Status-Code': '502'}) headers={'X-Reply-Status-Code': '502'})
self.assertEqual(result.status_code, httplib.CREATED) self.assertEqual(result.status_code, http.client.CREATED)
time.sleep(3) # > health-check-timeout + health-check-interval time.sleep(3) # > health-check-timeout + health-check-interval
...@@ -7034,7 +7061,7 @@ backend _health-check-default-http ...@@ -7034,7 +7061,7 @@ backend _health-check-default-http
self.backend_url + slave_parameter_dict[ self.backend_url + slave_parameter_dict[
'health-check-http-path'].strip('/'), 'health-check-http-path'].strip('/'),
headers={'X-Reply-Status-Code': '502'}) headers={'X-Reply-Status-Code': '502'})
self.assertEqual(result.status_code, httplib.CREATED) self.assertEqual(result.status_code, http.client.CREATED)
time.sleep(3) # > health-check-timeout + health-check-interval time.sleep(3) # > health-check-timeout + health-check-interval
...@@ -7063,7 +7090,7 @@ backend _health-check-default-http ...@@ -7063,7 +7090,7 @@ backend _health-check-default-http
self.backend_url + slave_parameter_dict[ self.backend_url + slave_parameter_dict[
'health-check-http-path'].strip('/'), 'health-check-http-path'].strip('/'),
headers={'X-Reply-Status-Code': '502'}) headers={'X-Reply-Status-Code': '502'})
self.assertEqual(result.status_code, httplib.CREATED) self.assertEqual(result.status_code, http.client.CREATED)
time.sleep(3) # > health-check-timeout + health-check-interval time.sleep(3) # > health-check-timeout + health-check-interval
...@@ -7075,7 +7102,7 @@ backend _health-check-default-http ...@@ -7075,7 +7102,7 @@ backend _health-check-default-http
der2pem(result.peercert)) der2pem(result.peercert))
# as ssl proxy verification failed, service is unavailable # as ssl proxy verification failed, service is unavailable
self.assertEqual(result.status_code, httplib.SERVICE_UNAVAILABLE) self.assertEqual(result.status_code, http.client.SERVICE_UNAVAILABLE)
def test_health_check_failover_url_ssl_proxy_missing(self): def test_health_check_failover_url_ssl_proxy_missing(self):
parameter_dict = self.assertSlaveBase( parameter_dict = self.assertSlaveBase(
...@@ -7093,7 +7120,7 @@ backend _health-check-default-http ...@@ -7093,7 +7120,7 @@ backend _health-check-default-http
self.backend_url + slave_parameter_dict[ self.backend_url + slave_parameter_dict[
'health-check-http-path'].strip('/'), 'health-check-http-path'].strip('/'),
headers={'X-Reply-Status-Code': '502'}) headers={'X-Reply-Status-Code': '502'})
self.assertEqual(result.status_code, httplib.CREATED) self.assertEqual(result.status_code, http.client.CREATED)
time.sleep(3) # > health-check-timeout + health-check-interval time.sleep(3) # > health-check-timeout + health-check-interval
...@@ -7105,7 +7132,7 @@ backend _health-check-default-http ...@@ -7105,7 +7132,7 @@ backend _health-check-default-http
der2pem(result.peercert)) der2pem(result.peercert))
# as ssl proxy verification failed, service is unavailable # as ssl proxy verification failed, service is unavailable
self.assertEqual(result.status_code, httplib.SERVICE_UNAVAILABLE) self.assertEqual(result.status_code, http.client.SERVICE_UNAVAILABLE)
if __name__ == '__main__': if __name__ == '__main__':
...@@ -7120,5 +7147,5 @@ if __name__ == '__main__': ...@@ -7120,5 +7147,5 @@ if __name__ == '__main__':
url_template = 'http://%s:%s/' url_template = 'http://%s:%s/'
server = klass((ip, port), TestHandler) server = klass((ip, port), TestHandler)
print url_template % server.server_address[:2] print((url_template % server.server_address[:2]))
server.serve_forever() server.serve_forever()
[buildout] [buildout]
extends = extends =
../../stack/slapos.cfg ../../stack/slapos.cfg
../../component/macros/virtual-env.cfg ../../component/macros/virtual-env.cfg
parts = parts =
django-env django-env
[django-env] [django-env]
<= virtual-env-base <= virtual-env-base
...@@ -15,8 +15,6 @@ eggs = Django ...@@ -15,8 +15,6 @@ eggs = Django
part = python3 part = python3
[versions] [versions]
Django = 3.2.12 Django = 4.0.6
sqlparse = 0.4.2 sqlparse = 0.4.2
pytz = 2021.3 asgiref = 3.5.2
asgiref = 3.3.2
typing-extensions = 4.1.1:whl
...@@ -13,6 +13,7 @@ extra-eggs += ...@@ -13,6 +13,7 @@ extra-eggs +=
[template] [template]
extra = extra =
# The following list is for SR whose buildout runs only with Python 3. # The following list is for SR whose buildout runs only with Python 3.
caddy-frontend ${slapos.test.caddy-frontend-setup:setup}
erp5testnode ${slapos.test.erp5testnode-setup:setup} erp5testnode ${slapos.test.erp5testnode-setup:setup}
galene ${slapos.test.galene-setup:setup} galene ${slapos.test.galene-setup:setup}
headless-chromium ${slapos.test.headless-chromium-setup:setup} headless-chromium ${slapos.test.headless-chromium-setup:setup}
......
...@@ -359,7 +359,6 @@ extra = ...@@ -359,7 +359,6 @@ extra =
# You should not add more lines here. # You should not add more lines here.
backupserver ${slapos.test.backupserver-setup:setup} backupserver ${slapos.test.backupserver-setup:setup}
beremiz-ide ${slapos.test.beremiz-ide-setup:setup} beremiz-ide ${slapos.test.beremiz-ide-setup:setup}
caddy-frontend ${slapos.test.caddy-frontend-setup:setup}
caucase ${slapos.test.caucase-setup:setup} caucase ${slapos.test.caucase-setup:setup}
cloudooo ${slapos.test.cloudooo-setup:setup} cloudooo ${slapos.test.cloudooo-setup:setup}
dream ${slapos.test.dream-setup:setup} dream ${slapos.test.dream-setup:setup}
......
...@@ -66,7 +66,7 @@ md5sum = 0969fbb25b05c02ef3c2d437b2f4e1a0 ...@@ -66,7 +66,7 @@ md5sum = 0969fbb25b05c02ef3c2d437b2f4e1a0
[template-run-zelenium] [template-run-zelenium]
filename = run-zelenium-test.py.in filename = run-zelenium-test.py.in
md5sum = 38653020296e43f84a93b99cb35aaef6 md5sum = b95084ae9eed95a68eada45e28ef0c04
[template] [template]
filename = instance.cfg.in filename = instance.cfg.in
......
...@@ -77,6 +77,23 @@ def main(): ...@@ -77,6 +77,23 @@ def main():
traceback.print_exc() traceback.print_exc()
time.sleep(600) time.sleep(600)
# Unsubscribe activity
# Launch Zuite_waitForActivities with activity suscribed may create conflict
activity_unsubscribe_url = "%s/erp5/portal_activities/unsubscribe" \
"?__ac_name=%s" \
"&__ac_password=%s" % (parser_configuration['remote-access-url'], {{ repr(user) }}, {{ repr(password) }})
print activity_unsubscribe_url
try:
response = urlopen(activity_unsubscribe_url)
try:
if response.code != 200:
sys.exit(-1)
finally:
response.close()
except Exception:
traceback.print_exc()
tool = taskdistribution.TaskDistributor(portal_url=args.master_url) tool = taskdistribution.TaskDistributor(portal_url=args.master_url)
browser = webdriver.Remote(parser_configuration['server-url'] , parser_configuration['desired-capabilities'] ) browser = webdriver.Remote(parser_configuration['server-url'] , parser_configuration['desired-capabilities'] )
......
...@@ -139,7 +139,7 @@ zc.recipe.egg = 2.0.3+slapos003 ...@@ -139,7 +139,7 @@ zc.recipe.egg = 2.0.3+slapos003
traitlets = 4.3.3 traitlets = 4.3.3
Jinja2 = 2.11.3 Jinja2 = 2.11.3
Importing = 1.10 Importing = 1.10
MarkupSafe = 1.0 MarkupSafe = 2.0.1
PyYAML = 5.4.1 PyYAML = 5.4.1
Werkzeug = 2.0.2 Werkzeug = 2.0.2
ZConfig = 2.9.3 ZConfig = 2.9.3
...@@ -195,7 +195,7 @@ setuptools-dso = 1.7 ...@@ -195,7 +195,7 @@ setuptools-dso = 1.7
rubygemsrecipe = 0.4.3 rubygemsrecipe = 0.4.3
six = 1.12.0 six = 1.12.0
slapos.cookbook = 1.0.253 slapos.cookbook = 1.0.253
slapos.core = 1.7.10 slapos.core = 1.7.11
slapos.extension.strip = 0.4 slapos.extension.strip = 0.4
slapos.extension.shared = 1.0 slapos.extension.shared = 1.0
slapos.libnetworkcache = 0.25 slapos.libnetworkcache = 0.25
...@@ -203,7 +203,7 @@ slapos.rebootstrap = 4.5 ...@@ -203,7 +203,7 @@ slapos.rebootstrap = 4.5
slapos.recipe.build = 0.55 slapos.recipe.build = 0.55
slapos.recipe.cmmi = 0.19 slapos.recipe.cmmi = 0.19
slapos.recipe.template = 5.0 slapos.recipe.template = 5.0
slapos.toolbox = 0.127 slapos.toolbox = 0.128
stevedore = 1.21.0:whl stevedore = 1.21.0:whl
subprocess32 = 3.5.4 subprocess32 = 3.5.4
unicodecsv = 0.14.1 unicodecsv = 0.14.1
...@@ -262,6 +262,9 @@ click = 6.7 ...@@ -262,6 +262,9 @@ click = 6.7
distro = 1.6.0 distro = 1.6.0
Werkzeug = 1.0.1 Werkzeug = 1.0.1
[versions:sys.version_info < (3,8)]
MarkupSafe = 1.0
[networkcache] [networkcache]
download-cache-url = http://shacache.nxdcdn.com download-cache-url = http://shacache.nxdcdn.com
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment