Commit fb80f4a4 authored by Kirill Smelkov's avatar Kirill Smelkov

Merge branch 'master' into master+ZODB4-wc2

* master: (57 commits)
  software/tsn-demo: add TSN demo SR
  component/tsn-rt-measures: add TSN measurement programs
  component/linuxptp: add linuxptp 3.1.1
  component/util-linux: version up 2.37.1
  stack/caucase: Fix md5 change detection
  software/slapos-sr-testing: enable test for hugo
  software/hugo: add test for hugo
  software/hugo: add Hugo software in slapos
  component/hugo: add hugo component into slapos
  stack/slapos.cfg: version up rubygemsrecipe to 0.4.2
  sotware/caddy-frontend: Use instance-caddy-replicate.cfg to replace buildout-switch-softwaretype.cfg
  sotware/caddy-frontend: switch to switch-softwaretype
  version up: Debian 10/11 netinst
  stack/slapos: version up slapos.core 1.6.18
  software/theia: use vscode/git instead of theia/git
  component/tesseract: Workaround for slaprunner paths with double slashs
  software/theia: drop workaround for tasks issue
  software/theia: set warnOnPotentiallyInsecureHostPattern
  software/theia: version up 1.16.0
  component/libsecret: new component
  ...
parents a3042c41 0d3332a6
......@@ -13,8 +13,8 @@ parts = haproxy
[haproxy]
recipe = slapos.recipe.cmmi
shared = true
url = http://www.haproxy.org/download/2.0/src/haproxy-2.0.22.tar.gz
md5sum = 4d6d5debca0d1bcf51293fb58914f1a3
url = http://www.haproxy.org/download/2.0/src/haproxy-2.0.24.tar.gz
md5sum = 493011d81eea0f3700543fc6a80acc03
configure-command = true
# for Linux kernel 2.6.28 and above, we use "linux-glibc" as the TARGET,
# otherwise use "generic".
......
[buildout]
extends =
../golang/buildout.cfg
../../stack/slapos.cfg
parts =
slapos-cookbook
gowork
[hugo-get]
<= go-git-package
go.importpath = github.com/gohugoio/hugo
repository = https://github.com/gohugoio/hugo.git
revision = f6821b88abbc1237af0e28cefbc624e6cda3305a
[gowork]
install =
${hugo-get:location}:./...
environment =
CGO_ENABLED = 1
buildflags = --tags extended
[gowork.goinstall]
depends_gitfetch =
${hugo-get:recipe}
command = set -e
. ${gowork:env.sh}
cd ${hugo-get:location}
go build ${gowork:buildflags} -o ${gowork:bin}/hugo
[hugo]
<= gowork.goinstall
[buildout]
parts = libsecret
extends =
../gnutls/buildout.cfg
../libxslt/buildout.cfg
../pkgconfig/buildout.cfg
../glib/buildout.cfg
../gettext/buildout.cfg
../libuuid/buildout.cfg
../xz-utils/buildout.cfg
[libsecret]
recipe = slapos.recipe.cmmi
url = https://download.gnome.org/sources/libsecret/0.20/libsecret-0.20.4.tar.xz
md5sum = bf92f48afab2891f644f311e0f37683f
shared = true
configure-options =
--with-libgcrypt-prefix=${libgcrypt:location}
--with-libintl-prefix=${gettext:location}
--disable-gtk-doc-html
--disable-manpages
--disable-vala
environment =
PATH=${xz-utils:location}/bin/:${pkgconfig:location}/bin:${libxslt:location}/bin:${glib:location}/bin:${libgcrypt:location}/bin/:%(PATH)s
PKG_CONFIG_PATH=${:pkg-config-path-depends}
LDFLAGS=-Wl,-rpath=${gettext:location}/lib -Wl,-rpath=${zlib:location}/lib -Wl,-rpath=${libgcrypt:location}/lib -Wl,-rpath=${pcre:location}/lib -Wl,-rpath=${libuuid:location}/lib
pkg-config-path-depends = ${glib:location}/lib/pkgconfig:${pcre:location}/lib/pkgconfig:${libgcrypt:location}/lib/pkgconfig:${libuuid:location}/lib/pkgconfig
pkg-config-path = @@LOCATION@@/lib/pkgconfig:${:pkg-config-path-depends}
# Implementation of the Precision Time Protocol (PTP) for linux http://linuxptp.sourceforge.net/
[linuxptp]
recipe = slapos.recipe.cmmi
configure-command = true
url = https://sourceforge.net/projects/linuxptp/files/v3.1/linuxptp-3.1.1.tgz
md5sum = 3b79ab5e77c5b5cf06bc1c8350d405bb
......@@ -86,7 +86,7 @@ revision = 3d3a204177d3a7ab8a2858e04e792a6d11bf133f
git-executable = ${git:location}/bin/git
[nginx-push-stream]
<= nginx
<= nginx-common
configure-options=
--with-ipv6
--with-http_ssl_module
......
......@@ -91,11 +91,11 @@ md5sum = 6097fdb9cbab47c96471274b9044e983
# XXX: This is not the latest version because
# Debian does not provide a stable URL for it.
<= debian-amd64-netinst-base
version = 10.8.0
md5sum = e221f43f4fdd409250908fc4305727d4
version = 10.10.0
md5sum = c7d0e562e589e853b5d00563b4311720
[debian-amd64-testing-netinst.iso]
<= debian-amd64-netinst-base
release = bullseye_di_alpha3
version = bullseye-DI-alpha3
md5sum = bff147077791586fa7c102267da9f2d2
release = bullseye_di_rc3
version = bullseye-DI-rc3
md5sum = 405917de7062c58357a3673c9901f0c4
......@@ -26,6 +26,9 @@ md5sum = 51fe2bcbff1bbce77a25d180fd247f7d
pkg_config_depends = ${leptonica:location}/lib/pkgconfig:${fontconfig:location}/lib/pkgconfig:${fontconfig:pkg_config_depends}:${lcms2:location}/lib/pkgconfig:${xz-utils:location}/lib/pkgconfig
pre-configure =
autoreconf -ivf -I${pkgconfig:location}/share/aclocal -I${libtool:location}/share/aclocal -Wno-portability
# XXX workaround path on slaprunner with a double slash
# https://github.com/tesseract-ocr/tesseract/issues/3527
configure-options = --prefix=$(python -c 'print("""@@LOCATION@@""".replace("//", "/"))')
environment =
PATH=${pkgconfig:location}/bin:${autoconf:location}/bin:${automake:location}/bin:${libtool:location}/bin:${m4:location}/bin:${patch:location}/bin:%(PATH)s
......
# Various programs to do TSN measurements
[buildout]
extends =
../git/buildout.cfg
[tsn-rt-measures-repository]
recipe = slapos.recipe.build:gitclone
repository = https://lab.nexedi.com/nexedi/tsn-rt-measures.git
branch = master
revision = 376a7d063efac807e9f4715d9dda84cf1f2aa24c
git-executable = ${git:location}/bin/git
[tsn-rt-measures]
recipe = slapos.recipe.cmmi
path = ${tsn-rt-measures-repository:location}
configure-command =
echo "No configure.."
......@@ -7,8 +7,8 @@ extends =
[util-linux]
recipe = slapos.recipe.cmmi
shared = true
url = https://www.kernel.org/pub/linux/utils/util-linux/v2.31/util-linux-2.31.1.tar.xz
md5sum = 7733b583dcb51518944d42aa62ef19ea
url = https://www.kernel.org/pub/linux/utils/util-linux/v2.37/util-linux-2.37.1.tar.xz
md5sum = 6d244f0f59247e9109f47d6e5dd0556b
configure-options =
--disable-static
--enable-libuuid
......
......@@ -14,7 +14,7 @@
# not need these here).
[template]
filename = instance.cfg.in
md5sum = 04015a7a552285984d091293ef573fb9
md5sum = 1dfbd20c77fb3c1f01005a8a920d2ed9
[profile-common]
filename = instance-common.cfg.in
......@@ -22,15 +22,15 @@ md5sum = 5784bea3bd608913769ff9a8afcccb68
[profile-caddy-frontend]
filename = instance-apache-frontend.cfg.in
md5sum = 8507a2ace2f789b92c522cc62ca5aace
md5sum = 51087ac7615bd7cc01e60eb23701f625
[profile-caddy-replicate]
filename = instance-apache-replicate.cfg.in
md5sum = 8beb438d06bbb0f917d13e182fb12d17
md5sum = b6fc5a004a1235ffad3af0b4cb0e661f
[profile-slave-list]
_update_hash_filename_ = templates/apache-custom-slave-list.cfg.in
md5sum = 613f777a08373088cbaf7f51fd18ea70
md5sum = 9bb51f663f69d66b5b3708bf892dd3e6
[profile-replicate-publish-slave-information]
_update_hash_filename_ = templates/replicate-publish-slave-information.cfg.in
......@@ -72,10 +72,6 @@ md5sum = d022455a8610bac2dd51101edb035987
_update_hash_filename_ = templates/trafficserver/logging.yaml.jinja2
md5sum = 368b271215a92594ca9e2fa3102d484f
[template-nginx-eventsource-slave-virtualhost]
_update_hash_filename_ = templates/nginx-eventsource-slave.conf.in
md5sum = 217a6c801b8330b0b825f7b8b4c77184
[template-caddy-lazy-script-call]
_update_hash_filename_ = templates/apache-lazy-script-call.sh.in
md5sum = 77d60840591de67b64ab3572e46273a0
......
{%- if instance_parameter_dict['slap-software-type'] == software_type -%}
{% import "caucase" as caucase with context %}
{%- set TRUE_VALUES = ['y', 'yes', '1', 'true'] -%}
[buildout]
......@@ -11,6 +10,7 @@ parts =
directory
logrotate-entry-caddy
caddy-frontend
caddyprofiledeps
switch-caddy-softwaretype
caucase-updater
caucase-updater-promise
......@@ -97,9 +97,11 @@ expose-csr_id-var = ${:var}/expose-csr_id
slave-introspection-var = ${:var}/slave-introspection
[switch-caddy-softwaretype]
recipe = slapos.cookbook:softwaretype
single-default = ${dynamic-custom-personal-profile-slave-list:rendered}
single-custom-personal = ${dynamic-custom-personal-profile-slave-list:rendered}
recipe = slapos.cookbook:switch-softwaretype
default = dynamic-custom-personal-profile-slave-list:rendered
RootSoftwareInstance = ${:default}
single-default = dynamic-custom-personal-profile-slave-list:rendered
single-custom-personal = dynamic-custom-personal-profile-slave-list:rendered
[frontend-configuration]
ip-access-certificate = ${self-signed-ip-access:certificate}
......@@ -114,7 +116,7 @@ slave-introspection-domain = ${slave-introspection-frontend:connection-domain}
# Self Signed certificate for HTTPS IP accesses to the frontend
recipe = plone.recipe.command
update-command = ${:command}
ipv6 = ${slap-network-information:global-ipv6}
ipv6 = ${slap-configuration:ipv6-random}
ipv4 = {{instance_parameter_dict['ipv4-random']}}
certificate = ${caddy-directory:master-autocert-dir}/ip-access-${:ipv6}-${:ipv4}.crt
{#- Can be stopped on error, as does not rely on self provided service #}
......@@ -138,7 +140,7 @@ command =
# Self Signed certificate for HTTPS access to the frontend with fallback certificate
recipe = plone.recipe.command
update-command = ${:command}
ipv6 = ${slap-network-information:global-ipv6}
ipv6 = ${slap-configuration:ipv6-random}
ipv4 = {{instance_parameter_dict['ipv4-random']}}
certificate = ${caddy-directory:master-autocert-dir}/fallback-access.crt
{#- Can be stopped on error, as does not rely on self provided service #}
......@@ -282,6 +284,7 @@ software_type = single-custom-personal
organization = {{ slapparameter_dict['cluster-identification'] }}
organizational-unit = {{ instance_parameter_dict['configuration.frontend-name'] }}
backend-client-caucase-url = {{ slapparameter_dict['backend-client-caucase-url'] }}
partition_ipv6 = ${slap-configuration:ipv6-random}
extra-context =
key caddy_configuration_directory caddy-directory:slave-configuration
key backend_client_caucase_url :backend-client-caucase-url
......@@ -293,7 +296,7 @@ extra-context =
key caddy_log_directory caddy-directory:slave-log
key expose_csr_id_organization :organization
key expose_csr_id_organizational_unit :organizational-unit
key global_ipv6 slap-network-information:global-ipv6
key global_ipv6 slap-configuration:ipv6-random
key empty_template software-release-path:template-empty
key template_default_slave_configuration software-release-path:template-default-slave-virtualhost
key software_type :software_type
......@@ -328,8 +331,8 @@ extra-context =
section frontend_configuration frontend-configuration
key http_port configuration:plain_http_port
key https_port configuration:port
key global_ipv6 slap-configuration:ipv6-random
key local_ipv4 :local_ipv4
key global_ipv6 slap-network-information:global-ipv6
key error_log caddy-configuration:error-log
key not_found_file caddy-configuration:not-found-file
key username monitor-instance-parameter:username
......@@ -904,7 +907,7 @@ recipe = slapos.cookbook:requestoptional
name = Slave Introspection Frontend {{ instance_parameter_dict['configuration.frontend-name'] }}
software-url = http://git.erp5.org/gitweb/slapos.git/blob_plain/HEAD:/software/apache-frontend/software.cfg
slave = true
config-url = https://[${slap-network-information:global-ipv6}]:{{ instance_parameter_dict['configuration.slave-introspection-https-port'] }}/
config-url = https://[${slap-configuration:ipv6-random}]:{{ instance_parameter_dict['configuration.slave-introspection-https-port'] }}/
config-https-only = true
return = domain secure_access
......@@ -914,7 +917,7 @@ recipe = slapos.cookbook:requestoptional
name = Backend Haproxy Statistic Frontend {{ instance_parameter_dict['configuration.frontend-name'] }}
software-url = http://git.erp5.org/gitweb/slapos.git/blob_plain/HEAD:/software/apache-frontend/software.cfg
slave = true
config-url = https://[${slap-network-information:global-ipv6}]:{{ instance_parameter_dict['configuration.backend-haproxy-statistic-port'] }}/
config-url = https://[${slap-configuration:ipv6-random}]:{{ instance_parameter_dict['configuration.backend-haproxy-statistic-port'] }}/
config-https-only = true
return = domain secure_access
......@@ -1022,5 +1025,3 @@ config-command =
{%- for key, value in software_parameter_dict.iteritems() %}
{{ key }} = {{ dumps(value) }}
{%- endfor %}
{%- endif -%} {# if instance_parameter_dict['slap-software-type'] == software_type #}
{% if instance_parameter_dict['slap-software-type'] in software_type %}
{% set aibcc_enabled = True %}
{% import "caucase" as caucase with context %}
{#- SERVER_POLLUTED_KEY_LIST is a list of keys which comes from various SlapOS Master implementations, which mix request and publish keys on each slave information -#}
......@@ -44,13 +43,7 @@ context =
{% set popen = functools_module.partial(subprocess_module.Popen, stdout=subprocess_module.PIPE, stderr=subprocess_module.STDOUT, stdin=subprocess_module.PIPE) %}
{% set part_list = [] %}
{% set single_type_key = 'single-' %}
{% if instance_parameter_dict['slap-software-type'] == "replicate" %}
{% set frontend_type = slapparameter_dict.pop('-frontend-type', 'single-default') %}
{% elif instance_parameter_dict['slap-software-type'] in ['default', 'RootSoftwareInstance'] %}
{% set frontend_type = "%s%s" % (single_type_key, 'custom-personal') %}
{% else %}
{% set frontend_type = "%s%s" % (single_type_key, instance_parameter_dict['slap-software-type']) %}
{% endif %}
{% set frontend_type = "%s%s" % (single_type_key, 'custom-personal') %}
{% set frontend_quantity = slapparameter_dict.pop('-frontend-quantity', '1') | int %}
{% set slave_list_name = 'extra_slave_instance_list' %}
{% set frontend_list = [] %}
......@@ -117,9 +110,7 @@ context =
{% set slave_warning_list = [] %}
{% set slave_server_alias_unclashed = [] %}
{% set slave_type = slave.get('type') %}
{% if slave_type == 'eventsource' %}
{% do slave_error_list.append('type:eventsource is not implemented') %}
{% elif slave_type not in [None, '', 'default', 'zope', 'redirect', 'notebook', 'websocket'] %}
{% if slave_type not in [None, '', 'default', 'zope', 'redirect', 'notebook', 'websocket'] %}
{% do slave_error_list.append('type:%s is not supported' % (slave_type,)) %}
{% endif %}
{# Check health-check-* #}
......@@ -392,12 +383,12 @@ config-url =
#--
#-- Publish slave information
[publish-slave-information]
recipe = slapos.cookbook:softwaretype
default = ${dynamic-publish-slave-information:rendered}
RootSoftwareInstance = ${dynamic-publish-slave-information:rendered}
replicate = ${dynamic-publish-slave-information:rendered}
custom-personal = ${dynamic-publish-slave-information:rendered}
custom-group = ${dynamic-publish-slave-information:rendered}
recipe = slapos.cookbook:switch-softwaretype
default = dynamic-publish-slave-information:rendered
RootSoftwareInstance = ${:default}
replicate = dynamic-publish-slave-information:rendered
custom-personal = dynamic-publish-slave-information:rendered
custom-group = dynamic-publish-slave-information:rendered
[request-kedifa]
<= slap-connection
......@@ -909,6 +900,4 @@ parts =
caucased-backend-client-promise
{% for part in part_list %}
{{ ' %s' % part }}
{% endfor %}
# publish-information
{% endif %}
{% endfor %}
\ No newline at end of file
......@@ -15,14 +15,13 @@
},
"type": {
"default": "",
"description": "Type of slave. If redirect, the slave will redirect to the given URL. If zope, the rewrite rules will be compatible with Virtual Host Monster. Implemented are default, zope, redirect, notebook and websocket, not implemneted is eventsource.",
"description": "Type of slave. If redirect, the slave will redirect to the given URL. If zope, the rewrite rules will be compatible with Virtual Host Monster.",
"enum": [
"",
"zope",
"redirect",
"notebook",
"websocket",
"eventsource"
"websocket"
],
"title": "Backend Type",
"type": "string"
......
......@@ -9,7 +9,7 @@
},
"type": {
"default": "",
"description": "Type of slave. If redirect, the slave will redirect to the given url. If zope, the rewrite rules will be compatible with Virtual Host Monster. Implemented are default, zope, redirect, notebook and websocket, not implemneted is eventsource.",
"description": "Type of slave. If redirect, the slave will redirect to the given url. If zope, the rewrite rules will be compatible with Virtual Host Monster.",
"enum": [
"",
"zope"
......
......@@ -2,6 +2,7 @@
extends = {{ software_parameter_dict['profile_common'] }}
parts =
caddyprofiledeps
switch-softwaretype
[caddyprofiledeps]
......@@ -14,8 +15,8 @@ extensions = jinja2.ext.do
extra-context =
context =
import json_module json
key slapparameter_dict instance-parameter:configuration
section instance_parameter_dict instance-parameter
key slapparameter_dict slap-configuration:configuration
section instance_parameter_dict slap-configuration
section software_parameter_dict software-parameter-section
${:extra-context}
caucase-jinja2-library = {{ software_parameter_dict['caucase_jinja2_library'] }}
......@@ -23,14 +24,14 @@ import-list =
file caucase :caucase-jinja2-library
[switch-softwaretype]
recipe = slapos.cookbook:softwaretype
default = ${dynamic-profile-caddy-replicate:rendered}
RootSoftwareInstance = ${dynamic-profile-caddy-replicate:rendered}
custom-personal = ${dynamic-profile-caddy-replicate:rendered}
single-default = ${dynamic-profile-caddy-frontend:rendered}
single-custom-personal = ${dynamic-profile-caddy-frontend:rendered}
replicate = ${dynamic-profile-caddy-replicate:rendered}
kedifa = ${dynamic-profile-kedifa:rendered}
recipe = slapos.cookbook:switch-softwaretype
default = dynamic-profile-caddy-replicate:rendered
RootSoftwareInstance = ${:default}
custom-personal = dynamic-profile-caddy-replicate:rendered
single-default = dynamic-profile-caddy-frontend:rendered
single-custom-personal = dynamic-profile-caddy-frontend:rendered
replicate = dynamic-profile-caddy-replicate:rendered
kedifa = dynamic-profile-kedifa:rendered
[software-parameter-section]
{% for key,value in software_parameter_dict.iteritems() %}
......@@ -64,7 +65,7 @@ filename = instance-kedifa.cfg
extra-context =
raw software_type kedifa
[instance-parameter]
[slap-configuration]
# Fetches parameters defined in SlapOS Master for this instance.
# Always the same.
recipe = slapos.cookbook:slapconfiguration.serialised
......
{%- if software_type == slap_software_type %}
{%- set kedifa_updater_mapping = [] %}
{%- set cached_server_dict = {} %}
{%- set backend_slave_list = [] %}
......@@ -187,7 +186,7 @@ context =
{%- set furled = furl_module.furl(frontend_configuration['slave-introspection-secure_access']) %}
{%- do furled.set(username = slave_reference.lower()) %}
{%- do furled.set(password = '${'+ slave_password_section +':passwd}') %}
{%- do furled.set(path = slave_reference.lower() + '/') %}
{%- do furled.set(path = slave_reference + '/') %}
{#- We unquote, as furl quotes automatically, but there is buildout value on purpose like ${...:...} in the passwod #}
{%- set slave_log_access_url = urlparse_module.unquote(furled.tostr()) %}
{%- do slave_publish_dict.__setitem__('log-access', slave_log_access_url) %}
......@@ -237,7 +236,7 @@ context =
{#- Set slave logrotate entry #}
[{{slave_log_directory_section}}]
recipe = slapos.cookbook:mkdirectory
log-directory = {{ '${slave-log-directory-dict:' + slave_reference.lower() + '}' }}
log-directory = {{ '${slave-log-directory-dict:' + slave_reference + '}' }}
[{{slave_logrotate_section}}]
<= logrotate-entry-base
......@@ -399,8 +398,8 @@ recipe = slapos.cookbook:publish
{#- Define IPv6 to IPV4 tunneling #}
[tunnel-6to4-base]
recipe = slapos.cookbook:wrapper
ipv4 = ${slap-network-information:local-ipv4}
ipv6 = ${slap-network-information:global-ipv6}
ipv4 = ${slap-configuration:ipv4-random}
ipv6 = ${slap-configuration:ipv6-random}
wrapper-path = {{ directory['service'] }}/6tunnel-${:ipv6-port}
command-line = {{ software_parameter_dict['sixtunnel'] }}/bin/6tunnel -6 -4 -d -l ${:ipv6} ${:ipv6-port} ${:ipv4} ${:ipv4-port}
hash-existing-files = ${buildout:directory}/software_release/buildout.cfg
......@@ -509,7 +508,7 @@ extra-context =
{{ key }} = {{ value }}
{%- endfor %}
local-ipv4 = {{ dumps('' ~ instance_parameter_dict['ipv4-random']) }}
global-ipv6 = ${slap-network-information:global-ipv6}
global-ipv6 = ${slap-configuration:ipv6-random}
request-timeout = {{ dumps('' ~ configuration['request-timeout']) }}
backend-connect-timeout = {{ dumps('' ~ configuration['backend-connect-timeout']) }}
backend-connect-retries = {{ dumps('' ~ configuration['backend-connect-retries']) }}
......@@ -583,12 +582,12 @@ update-command = ${:command}
command =
if ! [ -f ${:key} ] && ! [ -f ${:certificate} ] ; then
openssl req -new -newkey rsa:2048 -sha256 -subj \
"/O={{ expose_csr_id_organization }}/OU={{ expose_csr_id_organizational_unit }}/CN=${slap-network-information:global-ipv6}" \
"/O={{ expose_csr_id_organization }}/OU={{ expose_csr_id_organizational_unit }}/CN=${slap-configuration:ipv6-random}" \
-days 5 -nodes -x509 -keyout ${:key} -out ${:certificate}
fi
[expose-csr_id-configuration]
ip = ${slap-network-information:global-ipv6}
ip = ${slap-configuration:ipv6-random}
port = 17001
key = ${certificate-csr_id:key}
certificate = ${certificate-csr_id:certificate}
......@@ -659,5 +658,4 @@ commands =
module = check_command_execute
name = ${:_buildout_section_name_}.py
config-command =
${logrotate:wrapper-path} -d
{%- endif %} {# if software_type == slap_software_type #}
${logrotate:wrapper-path} -d
\ No newline at end of file
{% set url = slave_parameter.get('url') %}
{% set https_url = slave_parameter.get('https-url', url) %}
{% if url.startswith("http://") or url.startswith("https://") %}
{% set upstream = url.split("/")[2] %}
{% set https_upstream = https_url.split("/")[2] %}
{% set protocol = url.split("/")[0] %}
{% set https_protocol = https_url.split("/")[0] %}
{% set proxy_pass = '%s//%s' % (protocol, slave_parameter.get('slave_reference')) %}
{% set https_proxy_pass = '%s//https_%s' % (protocol, slave_parameter.get('slave_reference')) %}
# TODO-Caddy upstream {{ slave_parameter.get('slave_reference') }} {
# TODO-Caddy server {{ upstream }};
# TODO-Caddy
# TODO-Caddy pstream https_{{ slave_parameter.get('slave_reference') }} {
# TODO-Caddy server {{ https_upstream }};
# TODO-Caddy
# TODO-Caddy server {
# TODO-Caddy listen {{ slave_parameter['local_ipv4'] }}:{{ slave_parameter['nginx_http_port'] }};
# TODO-Caddy
# TODO-Caddy server_name {{ slave_parameter.get('custom_domain') }};
# TODO-Caddy
# TODO-Caddy error_log {{ slave_parameter.get('error_log') }} error;
# TODO-Caddy access_log {{ slave_parameter.get('access_log') }} custom;
# TODO-Caddy
# TODO-Caddy location /pub {
# TODO-Caddy push_stream_publisher;
# TODO-Caddy push_stream_channels_path $arg_id;
# TODO-Caddy # store messages in memory
# TODO-Caddy push_stream_store_messages off;
# TODO-Caddy
# TODO-Caddy # Message size limit
# TODO-Caddy # client_max_body_size MUST be equal to client_body_buffer_size or
# TODO-Caddy # you will be sorry.
# TODO-Caddy client_max_body_size 16k;
# TODO-Caddy client_body_buffer_size 16k;
# TODO-Caddy
# TODO-Caddy }
# TODO-Caddy
# TODO-Caddy location ~ /sub/(.*) {
# TODO-Caddy # activate subscriber mode for this location
# TODO-Caddy add_header "Access-Control-Allow-Origin" "*";
# TODO-Caddy add_header 'Access-Control-Allow-Credentials' 'false';
# TODO-Caddy add_header 'Access-Control-Allow-Methods' 'GET, HEAD, OPTIONS';
# TODO-Caddy add_header 'Access-Control-Allow-Headers' 'Authorization,Content-Type,Accept,Origin,User-Agent,DNT,Cache-Control,X-Mx-ReqToken,Keep-Alive,X-Requested-With,If-Modified-Since';
# TODO-Caddy
# TODO-Caddy push_stream_subscriber eventsource;
# TODO-Caddy # positional channel path
# TODO-Caddy push_stream_channels_path $1;
# TODO-Caddy
# TODO-Caddy # content-type
# TODO-Caddy default_type "text/event-stream; charset=utf-8";
# TODO-Caddy }
# TODO-Caddy
# TODO-Caddy
# TODO-Caddy server {
# TODO-Caddy listen {{ slave_parameter['local_ipv4'] }}:{{ slave_parameter['nginx_https_port'] }} ssl;
# TODO-Caddy
# TODO-Caddy server_name {{ slave_parameter.get('custom_domain') }};
# TODO-Caddy
# TODO-Caddy error_log {{ slave_parameter.get('error_log') }} error;
# TODO-Caddy access_log {{ slave_parameter.get('access_log') }} custom;
# TODO-Caddy
# TODO-Caddy ssl on;
# TODO-Caddy
# TODO-Caddy ssl_session_timeout 5m;
# TODO-Caddy ssl_protocols TLSv1 TLSv1.1 TLSv1.2;
# TODO-Caddy ssl_ciphers 'ECDH+AESGCM:DH+AESGCM:ECDH+AES256:DH+AES256:ECDH+AES128:DH+AES:ECDH+3DES:DH+3DES:RSA+AESGCM:RSA+AES:RSA+3DES:HIGH:!aNULL:!MD5';
# TODO-Caddy ssl_prefer_server_ciphers on;
# TODO-Caddy ssl_session_cache shared:SSL:10m;
# TODO-Caddy location /pub {
# TODO-Caddy push_stream_publisher;
# TODO-Caddy push_stream_channels_path $arg_id;
# TODO-Caddy # store messages in memory
# TODO-Caddy push_stream_store_messages off;
# TODO-Caddy
# TODO-Caddy # Message size limit
# TODO-Caddy # client_max_body_size MUST be equal to client_body_buffer_size or
# TODO-Caddy # you will be sorry.
# TODO-Caddy client_max_body_size 16k;
# TODO-Caddy client_body_buffer_size 16k;
# TODO-Caddy
# TODO-Caddy }
# TODO-Caddy
# TODO-Caddy location ~ /sub/(.*) {
# TODO-Caddy # activate subscriber mode for this location
# TODO-Caddy add_header "Access-Control-Allow-Origin" "*";
# TODO-Caddy add_header 'Access-Control-Allow-Credentials' 'false';
# TODO-Caddy add_header 'Access-Control-Allow-Methods' 'GET, HEAD, OPTIONS';
# TODO-Caddy add_header 'Access-Control-Allow-Headers' 'Authorization,Content-Type,Accept,Origin,User-Agent,DNT,Cache-Control,X-Mx-ReqToken,Keep-Alive,X-Requested-With,If-Modified-Since';
# TODO-Caddy
# TODO-Caddy push_stream_subscriber eventsource;
# TODO-Caddy # positional channel path
# TODO-Caddy push_stream_channels_path $1;
# TODO-Caddy
# TODO-Caddy # content-type
# TODO-Caddy default_type "text/event-stream; charset=utf-8";
# TODO-Caddy }
# TODO-Caddy}
{% endif %}
......@@ -1479,10 +1479,6 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin):
'websocket-path-list': '////ws//// /with%20space/',
'websocket-transparent': 'false',
},
# 'type-eventsource': {
# 'url': cls.backend_url,
# 'type': 'eventsource',
# },
'type-redirect': {
'url': cls.backend_url,
'type': 'redirect',
......@@ -1817,7 +1813,7 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin):
def test_server_polluted_keys_removed(self):
buildout_file = os.path.join(
self.getMasterPartitionPath(), 'buildout-switch-softwaretype.cfg')
self.getMasterPartitionPath(), 'instance-caddy-replicate.cfg')
for line in [
q for q in open(buildout_file).readlines()
if q.startswith('config-slave-list') or q.startswith(
......@@ -3261,54 +3257,6 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin):
)
self.assertFalse('x-real-ip' in j['Incoming Headers'])
@skip('Feature postponed')
def test_type_eventsource(self):
# Caddy: For event source, if I understand
# https://github.com/mholt/caddy/issues/1355 correctly, we could use
# Caddy as a proxy in front of nginx-push-stream . If we have a
# "central shared" caddy instance, can it handle keeping connections
# opens for many clients ?
parameter_dict = self.parseSlaveParameterDict('type-eventsource')
self.assertLogAccessUrlWithPop(parameter_dict)
self.assertEqual(
{
'domain': 'typeeventsource.nginx.example.com',
'replication_number': '1',
'url': 'http://typeeventsource.nginx.example.com',
'site_url': 'http://typeeventsource.nginx.example.com',
'secure_access': 'https://typeeventsource.nginx.example.com',
'backend-client-caucase-url': 'http://[%s]:8990' % self._ipv6_address,
},
parameter_dict
)
result = fakeHTTPSResult(
parameter_dict['domain'], 'pub',
# NGINX_HTTPS_PORT
)
self.assertEqual(
self.certificate_pem,
der2pem(result.peercert))
self.assertEqual(
'',
result.content
)
headers = result.headers.copy()
self.assertKeyWithPop('Expires', headers)
self.assertKeyWithPop('Date', headers)
self.assertEqual(
{
'X-Nginx-PushStream-Explain': 'No channel id provided.',
'Content-Length': '0',
'Cache-Control': 'no-cache, no-store, must-revalidate',
'Connection': 'keep-alive',
'Server': 'nginx'
},
headers
)
def test_type_redirect(self):
parameter_dict = self.assertSlaveBase('type-redirect')
......@@ -4642,7 +4590,7 @@ class TestReplicateSlaveOtherDestroyed(SlaveHttpFrontendTestCase):
self.slap.waitForInstance(self.instance_max_retry)
buildout_file = os.path.join(
self.getMasterPartitionPath(), 'buildout-switch-softwaretype.cfg')
self.getMasterPartitionPath(), 'instance-caddy-replicate.cfg')
with open(buildout_file) as fh:
buildout_file_content = fh.read()
node_1_present = re.search(
......
......@@ -25,7 +25,7 @@ parts =
[eggs]
recipe = zc.recipe.egg
eggs =
erp5.util[testnode]
erp5.util[testnode,benchmark,scalability_tester]
${lxml-python:egg}
......@@ -57,3 +57,4 @@ beautifulsoup4 = 4.7.1
zope.cachedescriptors = 4.3.1
zope.event = 4.4
zope.schema = 4.9.3
slapos.tool.nosqltester = 0.0.4.dev-r45972
......@@ -23,4 +23,4 @@ context =
[fluentd]
gems +=
fluent-plugin-wendelin==0.4
fluent-plugin-bin
fluent-plugin-bin==0.2
......@@ -14,7 +14,7 @@
# not need these here).
[instance.cfg]
filename = instance.cfg.in
md5sum = dc3f318e8a3aa7a59f9394118543e9e3
md5sum = 47e8092404feaf3f28ee6469523689ac
[watcher]
_update_hash_filename_ = watcher.in
......@@ -54,7 +54,7 @@ md5sum = 0f1ec4077dab586cc003ae13f689eda2
[instance-gitlab.cfg.in]
_update_hash_filename_ = instance-gitlab.cfg.in
md5sum = 64ec65b2daa0648453022f3afcbc4da3
md5sum = 6b34d4b96ae0067977fa509046d71231
[instance-gitlab-export.cfg.in]
_update_hash_filename_ = instance-gitlab-export.cfg.in
......
......@@ -5,7 +5,6 @@
# throughput compared to tcp over loopback).
[buildout]
extends =
{{ gitlab_parameters_cfg }}
{{ monitor_template }}
parts =
directory
......@@ -50,26 +49,15 @@ offline = true
##################################
[instance-parameter]
# std stuff to fetch slapos instance parameters
recipe = slapos.cookbook:slapconfiguration
computer= ${slap-connection:computer-id}
partition=${slap-connection:partition-id}
url = ${slap-connection:server-url}
key = ${slap-connection:key-file}
cert = ${slap-connection:cert-file}
{#- There are dangerous keys like recipe, etc #}
{#- XXX: Some other approach would be useful #}
{%- set DROP_KEY_LIST = ['recipe', '__buildout_signature__', 'computer', 'partition', 'url', 'key', 'cert'] %}
{%- for key, value in instance_parameter_dict.iteritems() -%}
{%- if key not in DROP_KEY_LIST %}
{{ key }} = {{ value }}
{%- endif -%}
{%- endfor %}
# autogenerated gitlab instance parameters
<= gitlab-parameters
# adjust/override some default settings:
# automatically load all available CPUs
configuration.unicorn_worker_processes = {{ multiprocessing.cpu_count() + 1 }}
configuration.nginx_worker_processes = {{ multiprocessing.cpu_count() }}
# gitlab non-native parameters
configuration.icp_license =
# for convenience
......
# GitLab "switch-softwaretype" instance
[buildout]
parts = switch-softwaretype
extends =
${gitlab-parameters.cfg:target}
parts =
switch-softwaretype
# std stuff for slapos instance
eggs-directory = ${buildout:eggs-directory}
......@@ -9,14 +12,46 @@ offline = true
[switch-softwaretype]
recipe = slapos.cookbook:softwaretype
gitlab = $${instance-gitlab.cfg:rendered}
gitlab-export = $${instance-gitlab-export.cfg:rendered}
gitlab-test = $${instance-gitlab-test.cfg:rendered}
default = $${:gitlab}
recipe = slapos.cookbook:switch-softwaretype
gitlab = instance-gitlab.cfg:rendered
gitlab-export = instance-gitlab-export.cfg:rendered
gitlab-test = instance-gitlab-test.cfg:rendered
RootSoftwareInstance = $${:gitlab}
# TODO -import, -pull-backup
[worker-processes]
recipe = slapos.recipe.build
init =
import multiprocessing
cpu_count = multiprocessing.cpu_count()
# automatically load all available CPUs
options['unicorn-worker-processes'] = cpu_count + 1
options['nginx-worker-processes'] = cpu_count
[slap-configuration]
# std stuff to fetch slapos instance parameters
recipe = slapos.cookbook:slapconfiguration
computer= $${slap-connection:computer-id}
partition=$${slap-connection:partition-id}
url = $${slap-connection:server-url}
key = $${slap-connection:key-file}
cert = $${slap-connection:cert-file}
# autogenerated gitlab instance parameters
<= gitlab-parameters
# adjust/override some default settings:
configuration.unicorn_worker_processes = $${worker-processes:unicorn-worker-processes}
configuration.nginx_worker_processes = $${worker-processes:nginx-worker-processes}
# gitlab non-native parameters
configuration.icp_license =
# macro: render instance-*.cfg from instance-*.cfg.in
[instance-cfg]
recipe = slapos.recipe.template:jinja2
......@@ -25,13 +60,13 @@ rendered= $${buildout:directory}/$${:_buildout_section_name_}
context =
import os os
import pwd pwd
import multiprocessing multiprocessing
key bin_directory buildout:bin-directory
key eggs_directory buildout:eggs-directory
key develop_eggs_directory buildout:develop-eggs-directory
raw gitlab_repository_location ${gitlab-repository:location}
raw gitlab_shell_repository_location ${gitlab-shell-repository:location}
section instance_parameter_dict slap-configuration
# program binaries
raw bash_bin ${bash:location}/bin/bash
......@@ -67,7 +102,6 @@ context =
# config files
raw database_yml_in ${database.yml.in:target}
raw gitconfig_in ${gitconfig.in:target}
raw gitlab_parameters_cfg ${gitlab-parameters.cfg:target}
raw monitor_template ${monitor2-template:rendered}
raw gitlab_shell_config_yml_in ${gitlab-shell-config.yml.in:target}
raw gitlab_unicorn_startup_in ${gitlab-unicorn-startup.in:target}
......
# THIS IS NOT A BUILDOUT FILE, despite purposedly using a compatible syntax.
# The only allowed lines here are (regexes):
# - "^#" comments, copied verbatim
# - "^[" section beginings, copied verbatim
# - lines containing an "=" sign which must fit in the following categorie.
# - "^\s*filename\s*=\s*path\s*$" where "path" is relative to this file
# Copied verbatim.
# - "^\s*hashtype\s*=.*" where "hashtype" is one of the values supported
# by the re-generation script.
# Re-generated.
# - other lines are copied verbatim
# Substitution (${...:...}), extension ([buildout] extends = ...) and
# section inheritance (< = ...) are NOT supported (but you should really
# not need these here).
[template-cfg]
filename = instance.cfg.in
md5sum = 9653104b2217dc26b23f9c1b997124ca
[template_nginx_conf]
_update_hash_filename_ = templates/nginx_conf.in
md5sum = 7ea6d7cc3e05edafe7fcf707f547cb04
[template_mime_types]
_update_hash_filename_ = templates/mime_types.in
md5sum = 4ef94a7b458d885cd79ba0b930a5727e
[template_index_html]
_update_hash_filename_ = templates/index.html.in
md5sum = d588af7ad4d5adefa2e245d78c401ce1
[buildout]
parts =
default-index-html
default-config-toml
hugo-server
hugo-server-service
hugo-frontend
hugo-frontend-port
hugo-frontend-promise
hugo-port-listening-promise
logrotate-entry-nginx
mime-types
nginx-conf
nginx-launcher
nginx-graceful
nginx-certificate
nginx-frontend
nginx-frontend-port
nginx-frontend-promise
nginx-port-listening-promise
publish-connection-information
eggs-directory = {{ buildout['eggs-directory'] }}
develop-eggs-directory = {{ buildout['develop-eggs-directory'] }}
offline = true
extends = {{ parameter_list['template_monitor'] }}
[slap-configuration]
recipe = slapos.cookbook:slapconfiguration
computer = ${slap-connection:computer-id}
partition = ${slap-connection:partition-id}
url = ${slap-connection:server-url}
key = ${slap-connection:key-file}
cert = ${slap-connection:cert-file}
configuration.hugo-monitor-httpd-port = 8196
configuration.nginx-monitor-httpd-port = 8197
configuration.site = default
# Create all needed directories, depending on your needs
[directory]
recipe = slapos.cookbook:mkdirectory
home = ${buildout:directory}
etc = ${:home}/etc
var = ${:home}/var
srv = ${:home}/srv
bin = ${:home}/bin
[basedirectory]
recipe = slapos.cookbook:mkdirectory
script = ${directory:etc}/run
service = ${directory:etc}/service
log = ${directory:var}/log
run = ${directory:var}/run
backup = ${directory:srv}/backup
data = ${directory:srv}/hugo
ssl = ${directory:etc}/ssl
default-site = ${:data}/default
default-public = ${:default-site}/public
[tempdirectory]
recipe = slapos.cookbook:mkdirectory
tmp = ${directory:home}/tmp
client-body-temp-path = ${:tmp}/client_body_temp_path
proxy-temp-path = ${:tmp}/proxy_temp_path
fastcgi-temp-path = ${:tmp}/fastcgi_temp_path
uwsgi-temp-path = ${:tmp}/uwsgi_temp_path
scgi-temp-path = ${:tmp}/scgi_temp_path
[hugo-frontend-port]
recipe = slapos.cookbook:free_port
minimum = 1313
maximum = 1323
ip = ${slap-configuration:ipv6-random}
[nginx-frontend-port]
recipe = slapos.cookbook:free_port
minimum = 1324
maximum = 1334
ip = ${slap-configuration:ipv6-random}
[hugo]
nb-workers = 2
go-environment = {{ parameter_list['go_environment'] }}
ip = ${slap-configuration:ipv6-random}
hugo-port = ${hugo-frontend-port:port}
nginx-port = ${nginx-frontend-port:port}
hugo-access-url = http://[${:ip}]:${:hugo-port}
nginx-access-url = https://[${:ip}]:${:nginx-port}
path-pid = ${basedirectory:run}/nginx.pid
path-log = ${basedirectory:log}/nginx.log
path-access-log = ${basedirectory:log}/nginx.access.log
path-error-log = ${basedirectory:log}/nginx.error.log
path-tmp = ${tempdirectory:tmp}
path-nginx-conf = ${directory:etc}/nginx.conf
path-mime-types = ${directory:etc}/mime_types
path-nginx = {{ parameter_list['nginx_location'] }}/sbin/nginx
# Docroot
docroot = ${basedirectory:data}/${slap-configuration:configuration.site}/public
[hugo-server]
recipe = slapos.recipe.template:jinja2
rendered = ${directory:bin}/hugo-server
mode = 0700
template =
inline:#!/bin/sh
. ${hugo:go-environment}
cd ${basedirectory:data}/${slap-configuration:configuration.site}
if [ -d "public" ]; then rm -Rf public; fi
hugo && hugo server --bind=${hugo:ip} --port=${hugo:hugo-port} --baseURL=${hugo-frontend:connection-secure_access} --appendPort=false
[hugo-server-service]
recipe = slapos.cookbook:wrapper
wrapper-path = ${basedirectory:service}/hugo-server
command-line = ${hugo-server:rendered}
hash-files =
${hugo-server:rendered}
[hugo-port-listening-promise]
<= monitor-promise-base
module = check_socket_listening
name = hugo-port-listening.py
config-host = ${hugo:ip}
config-port = ${hugo:hugo-port}
[monitor-instance-parameter]
monitor-httpd-port = ${slap-configuration:configuration.hugo-monitor-httpd-port}
[hugo-frontend]
<= slap-connection
recipe = slapos.cookbook:requestoptional
name = Hugo frontend
software-url = http://git.erp5.org/gitweb/slapos.git/blob_plain/HEAD:/software/apache-frontend/software.cfg
slave = true
config-url = ${hugo:hugo-access-url}
config-https-only = true
return = domain secure_access
[hugo-frontend-promise]
<= monitor-promise-base
module = check_url_available
name = hugo-http-frontend.py
url = ${hugo-frontend:connection-secure_access}
config-url = ${:url}
[nginx-conf]
recipe = slapos.recipe.template:jinja2
template = {{ parameter_list['template_nginx_conf'] }}
rendered = ${hugo:path-nginx-conf}
context =
section param_hugo hugo
section param_tempdir tempdirectory
section param_nginxcert nginx-certificate
[mime-types]
recipe = slapos.recipe.template:jinja2
template = {{ parameter_list['template_mime_types'] }}
rendered = ${hugo:path-mime-types}
[nginx-launcher]
recipe = slapos.cookbook:wrapper
command-line = {{ parameter_list['nginx_location'] }}/sbin/nginx -c ${hugo:path-nginx-conf}
wrapper-path = ${basedirectory:service}/nginx
# Generate a self-signed TLS certificate.
[nginx-certificate]
recipe = plone.recipe.command
command =
if [ ! -e ${:key-file} ]
then
{{ parameter_list['openssl_location'] }}/bin/openssl req -x509 -nodes -days 3650 \
-subj "/C=AA/ST=X/L=X/O=Dis/CN=${hugo:ip}" \
-newkey rsa:1024 -keyout ${:key-file} \
-out ${:cert-file}
fi
update-command = ${:command}
key-file = ${basedirectory:ssl}/nginx-certificate.key
cert-file = ${basedirectory:ssl}/nginx-certificate.cert
common-name = ${hugo:ip}
stop-on-error = true
[default-index-html]
recipe = slapos.recipe.template:jinja2
template = {{ parameter_list['template_index_html'] }}
rendered = ${basedirectory:default-public}/index.html
context =
key go_environment hugo:go-environment
key data basedirectory:data
[default-config-toml]
recipe = slapos.recipe.template:jinja2
rendered = ${basedirectory:default-site}/config.toml
template =
inline:
baseURL = "${hugo-frontend:connection-secure_access}"
languageCode = "en-us"
title = "My New Hugo Site"
[nginx-graceful]
recipe = slapos.recipe.template:jinja2
rendered = ${basedirectory:script}/nginx-graceful
mode = 0700
template =
inline:#!/bin/sh
exec kill -s SIGHUP $(cat ${hugo:path-pid})
[nginx-port-listening-promise]
<= monitor-promise-base
module = check_socket_listening
name = nginx-port-listening.py
config-host = ${hugo:ip}
config-port = ${hugo:nginx-port}
[monitor-instance-parameter]
monitor-httpd-port = ${slap-configuration:configuration.nginx-monitor-httpd-port}
[logrotate-entry-nginx]
<= logrotate-entry-base
name = nginx
log = ${hugo:path-access-log} ${hugo:path-error-log}
post = kill -USR1 $(cat ${hugo:path-pid})
[nginx-frontend]
<= slap-connection
recipe = slapos.cookbook:requestoptional
name = Hugo Nginx frontend
software-url = http://git.erp5.org/gitweb/slapos.git/blob_plain/HEAD:/software/apache-frontend/software.cfg
slave = true
config-url = ${hugo:nginx-access-url}
config-https-only = true
return = domain secure_access
[nginx-frontend-promise]
<= monitor-promise-base
module = check_url_available
name = nginx-http-frontend.py
url = ${nginx-frontend:connection-secure_access}
config-url = ${:url}
[publish-connection-information]
recipe = slapos.cookbook:publish
<= monitor-publish
go-environment = ${hugo:go-environment}
data = ${basedirectory:data}
hugo-server-url = ${hugo:hugo-access-url}
nginx-server-url = ${hugo:nginx-access-url}
hugo-server-cdn-url = ${hugo-frontend-promise:url}
nginx-server-cdn-url = ${nginx-frontend-promise:url}
[buildout]
extends =
buildout.hash.cfg
../../stack/slapos.cfg
../../stack/monitor/buildout.cfg
../../component/hugo/buildout.cfg
../../component/nginx/buildout.cfg
../../component/openssl/buildout.cfg
parts =
slapos-cookbook
hugo
template-cfg
[python]
part = python3
[profile-common]
nginx_location = ${nginx:location}
dash_location = ${dash:location}
go_environment = ${gowork:env.sh}
openssl_location = ${openssl:location}
template_nginx_conf = ${template_nginx_conf:target}
template_mime_types = ${template_mime_types:target}
template_index_html = ${template_index_html:target}
template_monitor = ${monitor-template:rendered}
[template-cfg]
recipe = slapos.recipe.template:jinja2
rendered = ${buildout:directory}/template.cfg
template = ${:_profile_base_location_}/${:filename}
mode = 0644
context =
section buildout buildout
section parameter_list profile-common
[download-base]
recipe = slapos.recipe.build:download
url = ${:_profile_base_location_}/${:_update_hash_filename_}
mode = 0644
[template_nginx_conf]
<= download-base
[template_mime_types]
<= download-base
[template_index_html]
<= download-base
<h1 style="text-align: center;" dir="ltr"><span style="font-family: 'comic sans ms', sans-serif;">Welcome to <a href="https://gohugo.io/">Hugo</a>'s workplace</span></h1>
<table border="1" style="border-collapse: collapse; width: 66%; border-color: #ffffff; margin-left: auto; margin-right: auto;">
<tbody>
<tr>
<td style="width: 99.934%;">
<p><span style="font-size: 14pt; font-family: 'comic sans ms', sans-serif;">Here are some instructions you might need:</span></p>
<ol>
<li><span style="font-size: 14pt; font-family: 'comic sans ms', sans-serif;">To get started, please change the directory to a Hugo workplace by</span><br />
<pre><code>$ cd {{ data }}</code></pre>
<span style="font-size: 14pt; font-family: 'comic sans ms', sans-serif;">It's suggested that you store your splendid Hugo site in this directory that you can also find from the published information of the request script (<code>data</code>). There is always a Hugo server running for you so that the website can dynamically update according to your changes.<br /><br /></span></li>
<li><span style="font-size: 14pt; font-family: 'comic sans ms', sans-serif;">Then let's <code>source</code> the <code>go.work</code>'s environment path by typing the following command in your terminal:</span><br />
<pre><code>$ source {{ go_environment }}</code></pre>
<span style="font-family: 'comic sans ms', sans-serif; font-size: 14pt;">&nbsp;You can also find the path (<code>go_environment</code>) from the published information of the request script.<br /><br /></span></li>
<li><span style="font-size: 14pt; font-family: 'comic sans ms', sans-serif;">Once you see <code>(go.work) bash-4.3$</code> displayed on your terminal, you can follow Hugo's official documentation to have a <a href="https://gohugo.io/getting-started/quick-start/">quick start</a><br /><br /></span></li>
<li><span style="font-size: 14pt; font-family: 'comic sans ms', sans-serif;">Pass the <code>site</code> by <code>--parameter site='your_site'</code> with slapos request command (you can do it in the request script) to make your website online, eg.</span><br />
<pre><code>$ slapos request $software_name'_1' $software_release_uri --parameters site='quickstart'</code></pre>
</li>
<li><span style="font-size: 14pt; font-family: 'comic sans ms', sans-serif;"><a href="https://handbook.rapid.space/user/rapidspace-HowTo.Supply.A.Software.And.Request.An.Instance.On.Theia.Runner">re-instanciate</a>(<code>slapos node instance</code>)<br /><br /></span></li>
<li><span style="font-size: 14pt; font-family: 'comic sans ms', sans-serif;">Retrieve the CDN URLs by re-launching the slapos request command. </span><br /><span style="font-size: 14pt; font-family: 'comic sans ms', sans-serif;"><code>hugo-server-cdn-url</code> is the website hosted by the Hugo server while <code>nginx-server-cdn-url</code> is the website hosted by the Ngnix server.</span></li>
</ol>
</td>
</tr>
</tbody>
</table>
<p style="text-align: center;"><span style="font-size: 14pt; font-family: 'comic sans ms', sans-serif;"><strong></strong></span></p>
<p style="text-align: center;"><span style="font-size: 14pt; font-family: 'comic sans ms', sans-serif;"><strong>Now you can access your published website powered by Hugo</strong></span></p>
types {
text/html html htm shtml;
text/css css;
text/xml xml rss;
image/gif gif;
image/jpeg jpeg jpg;
application/x-javascript js;
application/atom+xml atom;
text/mathml mml;
text/plain txt;
text/vnd.sun.j2me.app-descriptor jad;
text/vnd.wap.wml wml;
text/x-component htc;
image/png png;
image/tiff tif tiff;
image/vnd.wap.wbmp wbmp;
image/x-icon ico;
image/x-jng jng;
image/x-ms-bmp bmp;
image/svg+xml svg svgz;
application/java-archive jar war ear;
application/mac-binhex40 hqx;
application/msword doc;
application/pdf pdf;
application/postscript ps eps ai;
application/rtf rtf;
application/vnd.ms-excel xls;
application/vnd.ms-powerpoint ppt;
application/vnd.wap.wmlc wmlc;
application/vnd.google-earth.kml+xml kml;
application/vnd.google-earth.kmz kmz;
application/x-7z-compressed 7z;
application/x-cocoa cco;
application/x-java-archive-diff jardiff;
application/x-java-jnlp-file jnlp;
application/x-makeself run;
application/x-perl pl pm;
application/x-pilot prc pdb;
application/x-rar-compressed rar;
application/x-redhat-package-manager rpm;
application/x-sea sea;
application/x-shockwave-flash swf;
application/x-stuffit sit;
application/x-tcl tcl tk;
application/x-x509-ca-cert der pem crt;
application/x-xpinstall xpi;
application/xhtml+xml xhtml;
application/zip zip;
application/octet-stream bin exe dll;
application/octet-stream deb;
application/octet-stream dmg;
application/octet-stream eot;
application/octet-stream iso img;
application/octet-stream msi msp msm;
application/ogg ogx;
audio/midi mid midi kar;
audio/mpeg mpga mpega mp2 mp3 m4a;
audio/ogg oga ogg spx;
audio/x-realaudio ra;
audio/webm weba;
video/3gpp 3gpp 3gp;
video/mp4 mp4;
video/mpeg mpeg mpg mpe;
video/ogg ogv;
video/quicktime mov;
video/webm webm;
video/x-flv flv;
video/x-mng mng;
video/x-ms-asf asx asf;
video/x-ms-wmv wmv;
video/x-msvideo avi;
}
worker_processes {{ param_hugo['nb-workers'] }};
pid {{ param_hugo['path-pid'] }};
error_log {{ param_hugo['path-error-log'] }};
daemon off;
events {
worker_connections 1024;
accept_mutex off;
}
http {
include {{ param_hugo['path-mime-types'] }};
default_type application/octet-stream;
types_hash_bucket_size 64;
access_log {{ param_hugo['path-access-log'] }} combined;
index index.html;
server {
listen [{{ param_hugo['ip'] }}]:{{ param_hugo['nginx-port'] }} ssl http2;
server_name _;
ssl_certificate {{ param_nginxcert['cert-file'] }};
ssl_certificate_key {{ param_nginxcert['key-file'] }};
ssl_session_timeout 1d;
ssl_session_cache shared:MozSSL:10m;
ssl_session_tickets off;
ssl_protocols TLSv1.3;
ssl_prefer_server_ciphers off;
keepalive_timeout 5;
client_body_temp_path {{ param_tempdir['client-body-temp-path'] }};
proxy_temp_path {{ param_tempdir['proxy-temp-path'] }};
fastcgi_temp_path {{ param_tempdir['fastcgi-temp-path'] }};
uwsgi_temp_path {{ param_tempdir['uwsgi-temp-path'] }};
scgi_temp_path {{ param_tempdir['scgi-temp-path'] }};
# path for static files
root {{ param_hugo['docroot'] }};
}
}
Tests for hugo software release
##############################################################################
#
# Copyright (c) 2019 Nexedi SA and Contributors. All Rights Reserved.
#
# WARNING: This program as such is intended to be used by professional
# programmers who take the whole responsibility of assessing all potential
# consequences resulting from its eventual inadequacies and bugs
# End users who are looking for a ready-to-use solution with commercial
# guarantees and support are strongly adviced to contract a Free Software
# Service Company
#
# This program is Free Software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 3
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
##############################################################################
from setuptools import setup, find_packages
version = '0.0.1.dev0'
name = 'slapos.test.hugo'
with open("README.md") as f:
long_description = f.read()
setup(
name=name,
version=version,
description="Test for SlapOS' HUGO",
long_description=long_description,
long_description_content_type='text/markdown',
maintainer="Nexedi",
maintainer_email="info@nexedi.com",
url="https://lab.nexedi.com/nexedi/slapos",
packages=find_packages(),
install_requires=[
'slapos.core',
'slapos.libnetworkcache',
'erp5.util',
'requests',
],
zip_safe=True,
test_suite='test',
)
##############################################################################
#
# Copyright (c) 2021 Nexedi SA and Contributors. All Rights Reserved.
#
# WARNING: This program as such is intended to be used by professional
# programmers who take the whole responsibility of assessing all potential
# consequences resulting from its eventual inadequacies and bugs
# End users who are looking for a ready-to-use solution with commercial
# guarantees and support are strongly adviced to contract a Free Software
# Service Company
#
# This program is Free Software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 3
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
##############################################################################
import os
import requests
from slapos.testing.testcase import makeModuleSetUpAndTestCaseClass
setUpModule, SlapOSInstanceTestCase = makeModuleSetUpAndTestCaseClass(
os.path.abspath(
os.path.join(os.path.dirname(__file__), '..', 'software.cfg')))
class HugoTestCase(SlapOSInstanceTestCase):
def setUp(self):
self.connection_parameters = self.computer_partition.getConnectionParameterDict()
# Test Pulished Parameters
def test_nginx_url_get(self):
resp = requests.get(self.connection_parameters['nginx-server-cdn-url'], verify=False)
self.assertEqual(requests.codes.ok, resp.status_code)
def test_hugo_url_get(self):
resp = requests.get(self.connection_parameters['hugo-server-cdn-url'], verify=False)
self.assertEqual(requests.codes.ok, resp.status_code)
def test_dir_data(self):
data_path = self.connection_parameters['data']
self.assertTrue(os.path.exists(data_path))
def test_dir_go_env(self):
go_env = self.connection_parameters['go-environment']
self.assertTrue(os.path.exists(go_env))
\ No newline at end of file
# Nginx Push Stream
This software uses the [HTTP Push Stream](https://www.nginx.com/resources/wiki/modules/push_stream/)
module of nginx to make a [Server Sent Event](https://html.spec.whatwg.org/multipage/server-sent-events.html)
server.
Two endpoints are available, published as the following connection parameters:
- `publisher-url`, which uses`/pub{?id}` format, with `id` being the ID of the channel.
Clients can subscribe to this URL and be notified of new messages.
- `subscriber-url`, which uses `/sub{/id}` format, with `id` being the ID of the channel.
`POST` requests to this URL send new messages to this channel.
[template]
filename = instance.cfg.in
md5sum = f9b6d01e29f2edddd9d6f99591976c33
[template-nginx-configuration]
filename = template-nginx.cfg.in
md5sum = 022e4b53e1b2db16c4e518fe76f638fa
[buildout]
parts =
nginx-service
htpasswd
htpasswd-runner
publish-connection-information
eggs-directory = ${buildout:eggs-directory}
develop-eggs-directory = ${buildout:develop-eggs-directory}
offline = true
[directory]
recipe = slapos.cookbook:mkdirectory
etc = $${buildout:directory}/etc
bin = $${buildout:directory}/bin
srv = $${buildout:directory}/srv
var = $${buildout:directory}/var
run = $${:var}/run
log = $${:var}/log
varnginx = $${:var}/nginx
services = $${:etc}/service
cron-entries = $${:etc}/cron.d
www = $${:srv}/www
ssl = $${:etc}/ssl
#################################
# Nginx service
#################################
[nginx-service]
recipe = slapos.recipe.template
url = ${template-nginx-service:output}
output = $${directory:services}/nginx
mode = 0700
virtual-depends =
$${nginx-configuration:ip}
[nginx-configuration]
recipe = slapos.recipe.template
url = ${template-nginx-configuration:output}
output = $${directory:etc}/nginx.cfg
mode = 0600
access_log = $${directory:log}/nginx-access.log
error_log = $${directory:log}/nginx-error.log
ip = $${slap-network-information:global-ipv6}
local_ip = $${slap-network-information:local-ipv4}
port = 9443
publisher_location_prefix = /pub
publisher_push_stream_store_messages = off
publisher_client_max_body_size = 16k
publisher_client_body_buffer_size = 16k
subscriber_allow_origin = '*'
subscriber_location_prefix = /sub
# Prevent to use credential if origin is star
subscriber_allow_credential = 'false'
subscriber_allow_methods = 'GET, HEAD, OPTIONS'
subscriber_allow_headers = 'Authorization,Content-Type,Accept,Origin,User-Agent,DNT,Cache-Control,X-Mx-ReqToken,Keep-Alive,X-Requested-With,If-Modified-Since'
[htpasswd]
recipe = slapos.cookbook:generate.password
storage-path = $${directory:etc}/.pwd
bytes = 8
[htpasswd-runner]
recipe = plone.recipe.command
stop-on-error = true
htpasswd-path = $${directory:etc}/.htpasswd
command = if [ ! -f "$${:htpasswd-path}" ]; then ${buildout:bin-directory}/htpasswd -cb $${:htpasswd-path} $${:user} $${:password}; fi
update-command = $${:command}
user = admin
password = $${htpasswd:passwd}
[publish-connection-information]
recipe = slapos.cookbook:publish
init-password = $${htpasswd:passwd}
init-user = $${htpasswd-runner:user}
publisher-url = http://$${htpasswd-runner:user}:$${htpasswd:passwd}@[$${nginx-configuration:ip}]:$${nginx-configuration:port}$${nginx-configuration:publisher_location_prefix}
subscriber-url = http://$${htpasswd-runner:user}:$${htpasswd:passwd}@[$${nginx-configuration:ip}]:$${nginx-configuration:port}$${nginx-configuration:subscriber_location_prefix}
[buildout]
parts =
switch-softwaretype
nginx-service
promises
publish-connection-information
extends = ${monitor-template:rendered}
eggs-directory = ${buildout:eggs-directory}
develop-eggs-directory = ${buildout:develop-eggs-directory}
offline = true
[switch-softwaretype]
recipe = slapos.cookbook:softwaretype
default = $${:nginx}
nginx = $${dynamic-template-nginx:rendered}
[dynamic-template-nginx]
recipe = slapos.recipe.template:jinja2
template = ${template-nginx:output}
rendered = $${buildout:parts-directory}/$${:_buildout_section_name_}/$${:filename}
filename = instance-nginx.cfg
[slap-connection]
computer-id = $${slap_connection:computer_id}
partition-id = $${slap_connection:partition_id}
server-url = $${slap_connection:server_url}
software-release-url = $${slap_connection:software_release_url}
key-file = $${slap_connection:key_file}
cert-file = $${slap_connection:cert_file}
[instance-parameter]
# Fetches parameters defined in SlapOS Master for this instance.
# Always the same.
recipe = slapos.cookbook:slapconfiguration.serialised
computer = $${slap_connection:computer_id}
partition = $${slap_connection:partition_id}
url = $${slap_connection:server_url}
key = $${slap_connection:key_file}
cert = $${slap_connection:cert_file}
[directory]
recipe = slapos.cookbook:mkdirectory
etc = $${buildout:directory}/etc
bin = $${buildout:directory}/bin
srv = $${buildout:directory}/srv
var = $${buildout:directory}/var
run = $${:var}/run
log = $${:var}/log
varnginx = $${:var}/nginx
services = $${:etc}/service
cron-entries = $${:etc}/cron.d
www = $${:srv}/www
ssl = $${:etc}/ssl
[slap-configuration]
recipe = slapos.cookbook:slapconfiguration
computer = $${slap-connection:computer-id}
partition = $${slap-connection:partition-id}
url = $${slap-connection:server-url}
key = $${slap-connection:key-file}
cert = $${slap-connection:cert-file}
#################################
# Nginx service
#################################
[nginx-service]
recipe = slapos.cookbook:wrapper
wrapper-path = $${directory:services}/nginx
command-line =
${nginx-push-stream-output:nginx} -c $${nginx-configuration:output}
[nginx-configuration]
recipe = slapos.recipe.template
url = ${template-nginx-configuration:output}
output = $${directory:etc}/nginx.cfg
mode = 0600
access-log = $${directory:log}/nginx-access.log
error-log = $${directory:log}/nginx-error.log
ip = $${slap-configuration:ipv6-random}
local-ip = $${slap-configuration:ipv4-random}
port = 9443
base-url = https://[$${nginx-configuration:ip}]:$${nginx-configuration:port}
# Generate a self-signed TLS certificate.
[nginx-certificate]
recipe = plone.recipe.command
command =
if [ ! -e $${:key-file} ]
then
${openssl:location}/bin/openssl req -x509 -nodes -days 3650 \
-subj "/C=AA/ST=X/L=X/O=Dis/CN=$${nginx-configuration:ip}" \
-newkey rsa:1024 -keyout $${:key-file} \
-out $${:cert-file}
fi
update-command = $${:command}
key-file = $${directory:ssl}/${:_buildout_section_name_}.key
cert-file = $${directory:ssl}/${:_buildout_section_name_}.cert
common-name = $${nginx-configuration:ip}
stop-on-error = true
[promises]
recipe =
promises =
$${nginx-available-promise:recipe}
[nginx-available-promise]
<= monitor-promise-base
module = check_url_available
name = $${:_buildout_section_name_}.py
config-url = $${nginx-configuration:base-url}/status
[publish-connection-information]
recipe = slapos.cookbook:publish
# publisher-url and subscriber-url are URITemplates, with an id
# parameter which is the ID of the channel.
publisher-url = $${nginx-configuration:base-url}/pub{?id}
subscriber-url = $${nginx-configuration:base-url}/sub{/id}
[buildout]
extends =
../../stack/slapos.cfg
../../component/dash/buildout.cfg
../../component/nginx/buildout.cfg
../../stack/monitor/buildout.cfg
./buildout.hash.cfg
parts =
slapos-cookbook
......@@ -10,36 +11,18 @@ parts =
nginx-push-stream-module
nginx-push-stream
template
template-nginx-service
template-nginx
[python]
part = python3
[template]
recipe = slapos.recipe.template
url = ${:_profile_base_location_}/instance.cfg.in
md5sum = eb4c69df9a8dbb94fb76d0a6c11e360f
output = ${buildout:directory}/template.cfg
mode = 0644
[template-nginx-service]
recipe = slapos.recipe.template
url = ${:_profile_base_location_}/template-nginx-service.sh.in
md5sum = 90ba5a247c489261d3304528cba56e06
output = ${buildout:directory}/template-nginx-service.sh.in
mode = 0644
[template-nginx-configuration]
recipe = slapos.recipe.template
url = ${:_profile_base_location_}/template-nginx.cfg.in
md5sum = f5658154b82282bc1871f18ddf4529d8
url = ${:_profile_base_location_}/${:filename}
output = ${buildout:directory}/template-nginx.cfg.in
mode = 0644
[template-nginx]
recipe = slapos.recipe.template
url = ${:_profile_base_location_}/instance-nginx.cfg.in
md5sum = 936fea88f5548c4f14e287f1b27dc127
output = ${buildout:directory}/instance-nginx.cfg.in
mode = 0644
[versions]
inotifyx = 0.2.2
#!${dash-output:dash}
# BEWARE: This file is operated by slapos node
# BEWARE: It will be overwritten automatically
exec ${nginx-push-stream-output:nginx} \
-c $${nginx-configuration:output}
......@@ -8,7 +8,7 @@ events {
# multi_accept on;
}
error_log $${nginx-configuration:error_log};
error_log $${nginx-configuration:error-log};
http {
......@@ -33,8 +33,8 @@ http {
# Logging Settings
##
access_log $${nginx-configuration:access_log};
error_log $${nginx-configuration:error_log};
access_log $${nginx-configuration:access-log};
error_log $${nginx-configuration:error-log};
##
# Gzip Settings
......@@ -54,9 +54,21 @@ http {
##
push_stream_shared_memory_size 32m;
server {
listen [$${nginx-configuration:ip}]:$${nginx-configuration:port};
listen $${nginx-configuration:local_ip}:$${nginx-configuration:port};
listen [$${nginx-configuration:ip}]:$${nginx-configuration:port} ssl http2;
listen $${nginx-configuration:local-ip}:$${nginx-configuration:port} ssl http2;
# generated 2021-08-02, Mozilla Guideline v5.6, nginx 1.19.2, OpenSSL 1.1.1k, modern configuration, no HSTS, no OCSP
# https://ssl-config.mozilla.org/#server=nginx&version=1.19.2&config=modern&openssl=1.1.1k&hsts=false&ocsp=false&guideline=5.6
ssl_certificate $${nginx-certificate:cert-file};
ssl_certificate_key $${nginx-certificate:key-file};
ssl_session_timeout 1d;
ssl_session_cache shared:MozSSL:10m;
ssl_session_tickets off;
ssl_protocols TLSv1.3;
ssl_prefer_server_ciphers off;
fastcgi_temp_path $${directory:varnginx} 1 2;
uwsgi_temp_path $${directory:varnginx} 1 2;
......@@ -65,42 +77,42 @@ server {
client_body_temp_path $${directory:varnginx} 1 2;
proxy_temp_path $${directory:varnginx} 1 2;
auth_basic "Nginx Access";
auth_basic_user_file $${htpasswd-runner:htpasswd-path};
## Serve an error 204 (No Content) for favicon.ico
location = /favicon.ico {
return 204;
}
location $${nginx-configuration:publisher_location_prefix} {
push_stream_publisher;
location = /status {
default_type "text/plain";
return 200 'OK';
}
push_stream_channels_path $arg_id;
# store messages in memory
push_stream_store_messages $${nginx-configuration:publisher_push_stream_store_messages};
location /pub {
push_stream_publisher;
# Message size limit
# client_max_body_size MUST be equal to client_body_buffer_size or
# you will be sorry.
client_max_body_size $${nginx-configuration:publisher_client_max_body_size};
client_body_buffer_size $${nginx-configuration:publisher_client_body_buffer_size};
push_stream_channels_path $arg_id;
# store messages in memory
push_stream_store_messages off;
}
# Message size limit
client_max_body_size 16k;
client_body_buffer_size 16k;
}
location ~ $${nginx-configuration:subscriber_location_prefix}/(.*) {
# activate subscriber mode for this location
add_header "Access-Control-Allow-Origin" $${nginx-configuration:subscriber_allow_origin};
add_header 'Access-Control-Allow-Credentials' $${nginx-configuration:subscriber_allow_credential};
add_header 'Access-Control-Allow-Methods' $${nginx-configuration:subscriber_allow_methods};
add_header 'Access-Control-Allow-Headers' $${nginx-configuration:subscriber_allow_headers};
location ~ /sub/(.*) {
# activate subscriber mode for this location
add_header "Access-Control-Allow-Origin" '*';
add_header 'Access-Control-Allow-Credentials' 'false';
add_header 'Access-Control-Allow-Methods' 'GET, HEAD, OPTIONS';
add_header 'Access-Control-Allow-Headers' 'Authorization,Content-Type,Accept,Origin,User-Agent,DNT,Cache-Control,X-Mx-ReqToken,Keep-Alive,X-Requested-With,If-Modified-Since';
push_stream_subscriber eventsource;
# positional channel path
push_stream_channels_path $1;
push_stream_subscriber eventsource;
# positional channel path
push_stream_channels_path $1;
# content-type
default_type "text/event-stream; charset=utf-8";
# content-type
default_type "text/event-stream; charset=utf-8";
}
location / {
......
Tests for Nginx Push Stream software release
##############################################################################
#
# Copyright (c) 2021 Nexedi SA and Contributors. All Rights Reserved.
#
# WARNING: This program as such is intended to be used by professional
# programmers who take the whole responsibility of assessing all potential
# consequences resulting from its eventual inadequacies and bugs
# End users who are looking for a ready-to-use solution with commercial
# guarantees and support are strongly adviced to contract a Free Software
# Service Company
#
# This program is Free Software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 3
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
##############################################################################
from setuptools import setup, find_packages
version = '0.0.1.dev0'
name = 'slapos.test.nginx_push_stream'
long_description = open("README.md").read()
setup(
name=name,
version=version,
description="Test for SlapOS' Nginx Push Stream",
long_description=long_description,
long_description_content_type='text/markdown',
maintainer="Nexedi",
maintainer_email="info@nexedi.com",
url="https://lab.nexedi.com/nexedi/slapos",
packages=find_packages(),
install_requires=[
'slapos.core',
'slapos.libnetworkcache',
'requests',
'uritemplate',
],
zip_safe=True,
test_suite='test',
)
##############################################################################
# coding: utf-8
# Copyright (c) 2021 Nexedi SA and Contributors. All Rights Reserved.
#
# WARNING: This program as such is intended to be used by professional
# programmers who take the whole responsibility of assessing all potential
# consequences resulting from its eventual inadequacies and bugs
# End users who are looking for a ready-to-use solution with commercial
# guarantees and support are strongly adviced to contract a Free Software
# Service Company
#
# This program is Free Software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 3
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
##############################################################################
import os
import multiprocessing
import uritemplate
import requests
from slapos.testing.testcase import makeModuleSetUpAndTestCaseClass
setUpModule, SlapOSInstanceTestCase = makeModuleSetUpAndTestCaseClass(
os.path.abspath(
os.path.join(os.path.dirname(__file__), '..', 'software.cfg')))
class TestNginxPushStream(SlapOSInstanceTestCase):
def setUp(self):
self.connection_parameters = \
self.computer_partition.getConnectionParameterDict()
def test_push_stream_scenario(self):
def process_messages(q):
# type:(multiprocessing.Queue[bytes]) -> None
req = requests.get(
uritemplate.URITemplate(
self.connection_parameters['subscriber-url']).expand(
id='channel_id'),
verify=False,
stream=True,
)
if not req.ok:
q.put(('error: wrong status code %s' % req.status_code).encode())
q.put(b'ready')
for _, line in zip(range(2), req.iter_lines()):
q.put(line)
q = multiprocessing.Queue() # type: multiprocessing.Queue[bytes]
subscriber = multiprocessing.Process(target=process_messages, args=(q, ))
subscriber.start()
self.assertEqual(q.get(timeout=30), b'ready')
resp = requests.post(
uritemplate.URITemplate(
self.connection_parameters['publisher-url']).expand(
id='channel_id'),
verify=False,
data='Hello',
timeout=2,
)
resp.raise_for_status()
try:
subscriber.join(timeout=30)
except multiprocessing.TimeoutError:
subscriber.terminate()
subscriber.join(timeout=30)
self.fail('Process did not terminate')
self.assertEqual(q.get_nowait(), b': ')
self.assertEqual(q.get_nowait(), b'data: Hello')
......@@ -23,3 +23,4 @@ extra =
repman ${slapos.test.repman-setup:setup}
restic-rest-server ${slapos.test.restic_rest_server-setup:setup}
headless-chromium ${slapos.test.headless-chromium-setup:setup}
hugo ${slapos.test.hugo-setup:setup}
......@@ -139,6 +139,11 @@ setup = ${slapos-repository:location}/software/html5as/test/
egg = slapos.test.html5asbase
setup = ${slapos-repository:location}/software/html5as-base/test/
[slapos.test.hugo-setup]
<= setup-develop-egg
egg = slapos.test.hugo
setup = ${slapos-repository:location}/software/hugo/test/
[slapos.test.jupyter-setup]
<= setup-develop-egg
egg = slapos.test.jupyter
......@@ -149,6 +154,11 @@ setup = ${slapos-repository:location}/software/jupyter/test/
egg = slapos.test.nextcloud
setup = ${slapos-repository:location}/software/nextcloud/test/
[slapos.test.nginx-push-stream-setup]
<= setup-develop-egg
egg = slapos.test.nginx_push_stream
setup = ${slapos-repository:location}/software/nginx-push-stream/test/
[slapos.test.turnserver-setup]
<= setup-develop-egg
egg = slapos.test.turnserver
......@@ -252,6 +262,7 @@ extra-eggs =
${slapos.test.slaprunner-setup:egg}
${slapos.test.jupyter-setup:egg}
${slapos.test.nextcloud-setup:egg}
${slapos.test.nginx-push-stream-setup:egg}
${slapos.test.turnserver-setup:egg}
${slapos.test.theia-setup:egg}
${slapos.test.cloudooo-setup:egg}
......@@ -266,6 +277,7 @@ extra-eggs =
${slapos.test.fluentd-setup:egg}
${slapos.test.headless-chromium-setup:egg}
${slapos.test.erp5testnode-setup:egg}
${slapos.test.hugo-setup:egg}
# We don't name this interpreter `python`, so that when we run slapos node
# software, installation scripts running `python` use a python without any
......@@ -324,6 +336,7 @@ tests =
slaprunner ${slapos.test.slaprunner-setup:setup}
theia ${slapos.test.theia-setup:setup}
metabase ${slapos.test.metabase-setup:setup}
nginx-push-stream ${slapos.test.nginx-push-stream-setup:setup}
###
${:extra}
......
......@@ -15,15 +15,39 @@
[instance-theia]
_update_hash_filename_ = instance-theia.cfg.jinja.in
md5sum = 11d347dd2bf762902341746a388673a0
md5sum = 562acf69f344fa9f6d03992d696462d0
[instance]
_update_hash_filename_ = instance.cfg.in
md5sum = 063d3e19da9d3d4bfb77e8e638aa3a77
md5sum = a7d78b4002266c69ece05a476df82791
[instance-import]
_update_hash_filename_ = instance-import.cfg.jinja.in
md5sum = 861ef130f27175c2978a9b946b138dd5
[instance-export]
_update_hash_filename_ = instance-export.cfg.jinja.in
md5sum = b3cedaa1603ca8ed83fdd94ef4b35cc8
[instance-resilient]
_update_hash_filename_ = instance-resilient.cfg.jinja
md5sum = d78a9f885bdebf6720197209e0c21aa0
[theia-common]
_update_hash_filename_ = theia_common.py
md5sum = e57396473b4b6a17d26a747f0030293c
[theia-export]
_update_hash_filename_ = theia_export.py
md5sum = b5f5ac1924b27d3f2be2e5ea291c119e
[theia-import]
_update_hash_filename_ = theia_import.py
md5sum = 9e8c17a4b2d802695caf0c2c052f0d11
[yarn.lock]
_update_hash_filename_ = yarn.lock
md5sum = 80e7ad91deea54cebcccef5a83fdb380
md5sum = 18e8302b2acff3721cad23d829e3df55
[python-language-server-requirements.txt]
_update_hash_filename_ = python-language-server-requirements.txt
......
......@@ -13,6 +13,7 @@ urls = vscode-bat https://open-vsx.org/api/vscode/bat/1.54.3/file/vscode.bat-1.5
vscode-docker https://open-vsx.org/api/vscode/docker/1.54.3/file/vscode.docker-1.54.3.vsix a4e3cdd03833fe6bf3f5045fc5be4433
vscode-emmet https://open-vsx.org/api/vscode/emmet/1.54.3/file/vscode.emmet-1.54.3.vsix 5786c95921794ed3188083a7f804fa29
vscode-fsharp https://open-vsx.org/api/vscode/fsharp/1.54.3/file/vscode.fsharp-1.54.3.vsix 028f4ec9593533ce38603032b3f2821e
vscode-git https://open-vsx.org/api/vscode/git/1.54.3/file/vscode.git-1.54.3.vsix da2f2c0626b4a254a660ae20b0c97611
vscode-go https://open-vsx.org/api/vscode/go/1.54.3/file/vscode.go-1.54.3.vsix 9f095a75e4137079351150722d091256
vscode-groovy https://open-vsx.org/api/vscode/groovy/1.54.3/file/vscode.groovy-1.54.3.vsix 7ae14c40786311fda29059178f30c310
vscode-grunt https://open-vsx.org/api/vscode/grunt/1.54.3/file/vscode.grunt-1.54.3.vsix f7894259f1fa60e939ff2d9b49138a02
......
......@@ -17,6 +17,7 @@ for plugin_and_version in '''\
vscode/docker/latest
vscode/emmet/latest
vscode/fsharp/latest
vscode/git/latest
vscode/go/latest
vscode/groovy/latest
vscode/grunt/latest
......
{%- set parameter_dict = dict(default_parameter_dict, **parameter_dict) -%}
[buildout]
extends = {{ theia_instance_cfg }}
{{ pbsready_export_cfg }}
parts +=
monitor-base
$${:theia-parts}
$${:theia-environment-parts}
resilient-publish-connection-parameter
# The resilient stack makes the 'resilient' instance
# request the 'export' instance with a 'namebase' parameter.
# The export template then expects to receive it in
# slap-parameter:namebase
[slap-parameter]
namebase = {{ parameter_dict['namebase'] }}
# The resilient export stack periodically calls exporter:wrapper
# and then notifies the pull-backup instance that data is ready
# to be pulled from the export instance.
# All it expects is that a script be available in exporter:wrapper.
[exporter]
wrapper = $${theia-export-script:rendered}
[theia-export-script]
recipe = slapos.recipe.template:jinja2
rendered = $${directory:bin}/theia-export-script
mode = 0700
exitcode-file = $${directory:srv}/export-exitcode-file
error-file = $${directory:srv}/export-errormessage-file
context =
raw python ${software-info:python-with-eggs}
raw theia_export ${software-info:theia-export}
raw bash ${software-info:bash}
raw rsync ${software-info:rsync}
raw sqlite3 ${software-info:sqlite3}
raw root_path $${buildout:directory}
raw backup_path $${directory:backup}
raw slapos_cfg $${directory:runner}/etc/slapos.cfg
raw project_path $${directory:project}
raw public_path $${directory:frontend-static-public}
key exitfile :exitcode-file
key errorfile :error-file
{%- raw %}
template =
inline:#!{{ bash }}
{{ python }} {{ theia_export }} \
--rsync {{ rsync }} \
--sqlite3 {{ sqlite3 }} \
--root {{ root_path }} \
--backup {{ backup_path }} \
--cfg {{ slapos_cfg }} \
--dirs {{ project_path }} \
--dirs {{ public_path }} \
--exitfile {{ exitfile }} \
--errorfile {{ errorfile }}
{%- endraw %}
# Add a promise to check that the export script has run
# successfully and recently (at most 2 days ago).
[promises]
export-promises =
$${export-promise:name}
[export-promise]
<= monitor-promise-base
module = check_command_execute
name = resiliency-export-promise.py
config-command = $${export-promise-script:rendered}
[initial-export-exitcode-file]
recipe = slapos.recipe.template:jinja2
rendered = $${theia-export-script:exitcode-file}
template = inline:0
once = $${:rendered}
[export-promise-script]
recipe = slapos.recipe.template:jinja2
rendered = $${directory:bin}/export-promise-script
exitcode-file = $${initial-export-exitcode-file:rendered}
context =
key exitcodefile :exitcode-file
key errorfile theia-export-script:error-file
{%- raw %}
template =
inline:#!/bin/sh
if [ -z $(find {{ repr(exitcodefile) }} -mtime -2) ]
then
echo "ERROR export script last ran on " $(date -r {{ repr(exitcodefile) }})
exit 1
elif [ $( cat {{ repr(exitcodefile) }}) -ne 0 ]
then
echo "ERROR export script failed on " $(date -r {{ repr(exitcodefile) }})
cat {{ repr(errorfile) }}
exit 1
else
echo "OK export script last ran on " $(date -r {{ repr(exitcodefile) }})
exit 0
fi
{%- endraw %}
# Extend resilient parameters with normal theia connection parameters
[resilient-publish-connection-parameter]
<= publish-connection-parameter
{%- set parameter_dict = dict(default_parameter_dict, **parameter_dict) -%}
{%- set additional_frontend = parameter_dict['additional-frontend-guid'] -%}
[buildout]
extends = {{ theia_instance_cfg }}
{{ pbsready_import_cfg }}
parts +=
monitor-base
$${:theia-parts}
$${:theia-environment-parts}
# The resilient stack makes the 'resilient' instance
# request the 'import' instance with a 'namebase' parameter.
# The import template then expects to receive it in
# slap-parameter:namebase
[slap-parameter]
namebase = {{ parameter_dict['namebase'] }}
# Change frontend name to avoid conflicts
[remote-frontend]
name = Import {{ parameter_dict['frontend-name'] }}
{% if additional_frontend -%}
[remote-additional-frontend]
name = Import {{ parameter_dict['additional-frontend-name'] }}
{%- endif %}
# Change port ranges to avoid race conditions on port allocation
[frontend-instance-port]
minimum = 3200
maximum = 3300
[theia-service-port]
minimum = 3700
maximum = 3800
[slapos-standalone-port]
minimum = 4200
maximum = 4300
# Always disable autoprocessing in the import instance
[slapos-autorun]
autorun = stopped
# Change the gravatar favicon seed
[favicon.ico]
seed = Import {{ root_title }}
# The resilient stack calls post-notification-run:output followed by
# importer:wrapper when the instance is notified that the backup files
# have just been pushed to it. All it expects is the path of a script
# in post-notification-run:output and in importer:wrapper.
[post-notification-run]
recipe = slapos.recipe.template:jinja2
rendered = $${directory:bin}/post-notification-run-script
output = $${:rendered}
mode = 0700
template =
inline:#!${software-info:bash}
# Do nothing because the backup signature will
# be verified by the import script itself
exit 0
[importer]
wrapper = $${theia-import-script:rendered}
[theia-import-script]
recipe = slapos.recipe.template:jinja2
rendered = $${directory:bin}/theia-import-script
mode = 0700
exitcode-file = $${directory:srv}/import-exitcode-file
error-file = $${directory:srv}/import-errormessage-file
context =
raw python ${software-info:python-with-eggs}
raw theia_import ${software-info:theia-import}
raw bash ${software-info:bash}
raw rsync ${software-info:rsync}
raw sqlite3 ${software-info:sqlite3}
raw slapos ${software-info:slapos}
raw slapos_node_software_log $${directory:runner}/var/log/slapos-node-software.log
raw slapos_node_instance_log $${directory:runner}/var/log/slapos-node-instance.log
raw supervisorctl ${software-info:supervisorctl}
raw supervisord_conf $${directory:runner}/etc/supervisord.conf
raw root_path $${buildout:directory}
raw backup_path $${directory:backup}
raw slapos_cfg $${directory:runner}/etc/slapos.cfg
raw project_path $${directory:project}
raw public_path $${directory:frontend-static-public}
key exitfile :exitcode-file
key errorfile :error-file
{%- raw %}
template =
inline:#!{{ bash }}
. $${common-environment:rendered}
. $${slapos-standalone-activate:rendered}
{{ python }} {{ theia_import }} \
--rsync {{ rsync }} \
--sqlite3 {{ sqlite3 }} \
--slapos {{ slapos }} \
--srlog {{ slapos_node_software_log }} \
--cplog {{ slapos_node_instance_log }} \
--supervisorctl {{ supervisorctl }} \
--supervisordconf {{ supervisord_conf }} \
--root {{ root_path }} \
--backup {{ backup_path }} \
--cfg {{ slapos_cfg }} \
--dirs {{ project_path }} \
--dirs {{ public_path }} \
--exitfile {{ exitfile }} \
--errorfile {{ errorfile }}
{%- endraw %}
# Add a promise to check that the import script has run
# successfully and recently (at most 2 days ago).
[promises]
import-promises =
$${import-promise:name}
[import-promise]
<= monitor-promise-base
module = check_command_execute
name = resiliency-import-promise.py
config-command = $${import-promise-script:rendered}
[initial-import-exitcode-file]
recipe = slapos.recipe.template:jinja2
rendered = $${theia-import-script:exitcode-file}
template = inline:0
once = $${:rendered}
[import-promise-script]
recipe = slapos.recipe.template:jinja2
rendered = $${directory:bin}/import-promise-script
exitcode-file = $${initial-import-exitcode-file:rendered}
context =
key exitcodefile :exitcode-file
key errorfile theia-import-script:error-file
{%- raw %}
template =
inline:#!/bin/sh
if [ -z $(find {{ repr(exitcodefile) }} -mtime -2) ]
then
echo "ERROR import script last ran on " $(date -r {{ repr(exitcodefile) }})
exit 1
elif [ $( cat {{ repr(exitcodefile) }}) -ne 0 ]
then
echo "ERROR import script failed on " $(date -r {{ repr(exitcodefile) }})
cat {{ repr(errorfile) }}
exit 1
else
echo "OK import script last ran on " $(date -r {{ repr(exitcodefile) }})
exit 0
fi
{%- endraw %}
# Resilient connection parameters of import instance are published
# through the resilient stack.
# Extend resilient parameters with normal theia connection parameters
[resilient-publish-connection-parameter]
<= publish-connection-parameter
{
"$schema": "http://json-schema.org/draft-04/schema",
"type": "object",
"description": "Parameters to instantiate resilient Theia",
"allOf": [
{
"$ref": "instance-input-schema.json#/"
},
{
"properties": {
"resilient-clone-number": {
"title": "Amount of backup(s) to create",
"description": "Amount of backup(s) to create. Each backup consists of a Pull Backup Server and a clone.",
"type": "integer",
"default": 1,
"minimum": 0,
"maximum": 2,
"optional": true
},
"-sla-theia0-computer_guid": {
"title": "Target computer for main instance",
"description": "Target computer GUID for main instance.",
"type": "string",
"optional": true
},
"-sla-theia1-computer_guid": {
"title": "Target computer for first clone",
"description": "Target computer for first clone instance.",
"type": "string",
"optional": true
},
"-sla-pbs1-computer_guid": {
"title": "Target computer for first PBS",
"description": "Target computer for first PBS instance.",
"type": "string",
"optional": true
},
"-sla-theia2-computer_guid": {
"title": "Target computer for second clone",
"description": "Target computer for second clone instance.",
"type": "string",
"optional": true
},
"-sla-pbs2-computer_guid": {
"title": "Target computer for second PBS",
"description": "Target computer for second PBS instance.",
"type": "string",
"optional": true
},
"resiliency-backup-periodicity": {
"title": "Periodicity of backup",
"description": "Periodicity of backup, in cron format.",
"type": "string",
"optional": true
},
"remove-backup-older-than": {
"title": "Remove backups older than...",
"description": "Remove all the backups in PBS that are older than specified value. It should be rdiff-backup-compatible.",
"type": "string",
"default": "2W",
"optional": true
},
"ignore-known-hosts-file": {
"title": "Ignore known_hosts file",
"description": "Set either to fill known_hosts file for ssh or not. Useful if main instance and PBS are using the same IP (slapos proxy, theia).",
"type": "boolean",
"default": false,
"optional": true
}
}
}
]
}
{% import 'parts' as parts %}
{% import 'replicated' as replicated with context %}
{% set number_of_instances = slapparameter_dict.get('resilient-clone-number', 1)|int %}
[buildout]
eggs-directory = {{ eggs_directory }}
develop-eggs-directory = {{ develop_eggs_directory }}
extends =
{{ monitor_template }}
parts +=
# Generate the parts to request theia-export, pull-backup and theia-import
# See stack/resilient/template-parts.cfg.in and stack/resilient/template-replicated.cfg.in
# See below for the generation of the sections corresponding to the parts generated here
{{ parts.replicate("theia", number_of_instances + 1) }}
# Also publish some connection parameters
publish-connection-parameter
[ArgLeader]
[ArgBackup]
# Generate sections to request theia-export, pull-backup and theia-import
# See stack/resilient/template-replicated.cfg.in
# In particular:
#
# [request-theia]
# <= ArgLeader
# software-type = export
# ...
#
# [request-theia-pseudo-replicating-1]
# <= ArgBackup
# software-type = import
# ...
#
# [request-pbs-theia-1]
# software-type = pull-backup
# ...
#
{{ replicated.replicate("theia", number_of_instances + 1,
"export", "import",
"ArgLeader", "ArgBackup",
slapparameter_dict=slapparameter_dict) }}
# Extend the list of return parameters for the export request
# The monitor parameters are only there to assert they are
# actually published by the export instance
[request-theia]
return += url username password backend-url monitor-base-url monitor-setup-url
# Extend the list of return parameters for the import request
# with the monitor parameters to assert they are actually published
[request-theia-pseudo-replicating-1]
return += monitor-base-url monitor-setup-url
# Publish some parameters from the export instance
[publish-connection-parameter]
recipe = slapos.cookbook:publish
url = ${request-theia:connection-url}
username = ${request-theia:connection-username}
password = ${request-theia:connection-password}
backend-url = ${request-theia:connection-backend-url}
# Publish resiliency parameters fetched by the resilient stack
[publish-connection-parameter]
<= publish-connection-information
......@@ -41,23 +41,26 @@ backend-url = $${frontend-instance:url}
[directory]
recipe = slapos.cookbook:mkdirectory
etc = $${buildout:directory}/etc
var = $${buildout:directory}/var
srv = $${buildout:directory}/srv
bin = $${buildout:directory}/bin
tmp = $${buildout:directory}/tmp
dot-theia = $${buildout:directory}/.theia/
home = $${buildout:directory}
etc = $${:home}/etc
var = $${:home}/var
srv = $${:home}/srv
bin = $${:home}/bin
tmp = $${:home}/tmp
dot-theia = $${:home}/.theia/
pidfiles = $${:var}/run
services = $${:etc}/service
runner = $${:srv}/runner
backup = $${:srv}/backup/theia
project = $${:srv}/project
frontend-static = $${:srv}/frontend-static
frontend-static-public = $${:frontend-static}/public
frontend-static-css = $${:frontend-static}/css
bash-completions = $${buildout:directory}/.local/share/bash-completion/completions/
fish-completions = $${buildout:directory}/.config/fish/completions/
bash-completions = $${:home}/.local/share/bash-completion/completions/
fish-completions = $${:home}/.config/fish/completions/
# Promises
......@@ -68,11 +71,13 @@ recipe =
instance-promises =
$${theia-listen-promise:name}
$${frontend-listen-promise:name}
$${frontend-authentification-promise:name}
$${remote-frontend-url-available-promise:name}
{% if additional_frontend %}
$${remote-additional-frontend-url-available-promise:name}
{% endif %}
$${slapos-standalone-listen-promise:name}
$${slapos-standalone-ready-promise:name}
$${slapos-autorun-promise:name}
[theia-listen-promise]
......@@ -89,6 +94,16 @@ name = $${:_buildout_section_name_}.py
config-host = $${frontend-instance:ip}
config-port = $${frontend-instance:port}
[frontend-authentification-promise]
<= monitor-promise-base
module = check_url_available
name = $${:_buildout_section_name_}.py
username = $${frontend-instance-password:username}
password = $${frontend-instance-password:passwd}
ip = $${frontend-instance:ip}
port = $${frontend-instance:port}
config-url = https://$${:username}:$${:password}@[$${:ip}]:$${:port}
[remote-frontend-url-available-promise]
<= monitor-promise-base
module = check_url_available
......@@ -113,13 +128,19 @@ name = standalone-listen-promise.py
config-host = $${slapos-standalone-instance:hostname}
config-port = $${slapos-standalone-instance:port}
[slapos-standalone-ready-promise]
<= monitor-promise-base
module = check_socket_listening
name = standalone-ready-promise.py
config-abstract = $${directory:runner}/standalone_ready
[slapos-autorun-promise]
<= monitor-promise-base
module = check_service_state
# XXX promise plugins can not contain "slapos" in their names
name = autorun-state-promise.py
config-service = $${slapos-autorun:service-name}
config-expect = $${slapos-autorun:autorun}
config-run-directory = $${directory:runner}/var/run
# Remote Caddy Frontend
......@@ -279,13 +300,14 @@ wait-for-files = $${frontend-instance:pidfile}
[favicon.ico]
# generate a pseudo random favicon, different for each instance name.
recipe = slapos.recipe.build
seed = {{ root_title }}
install =
import hashlib, shutil
buildout_offline = self.buildout['buildout']['offline']
self.buildout['buildout']['offline'] = 'false'
try:
gravatar_url = "https://www.gravatar.com/avatar/" + hashlib.md5(
b'''{{ root_title }}'''
b'''$${:seed}'''
).hexdigest() + "?s=256&d=retro"
shutil.copy(self.download(gravatar_url), '''$${:location}''')
except Exception:
......@@ -310,7 +332,7 @@ mode = 0700
template =
inline:
#!/bin/sh
export HOME=$${buildout:directory}
export HOME=$${directory:home}
export PATH=${python-language-server:location}/bin:${java-jdk:location}/bin:${cli-utilities:PATH}:$HOME/.cargo/bin:$PATH
......@@ -375,9 +397,6 @@ template =
. {{ activate }}
unset GIT_EXEC_PATH
set -- --rcfile {{ bashrc }}
# otherwise, assume this shell is running task and add an artificial delay to workaround https://github.com/eclipse-theia/theia/issues/2961
else
sleep 1
fi
exec "$SHELL" "$@"
{% endraw %}
......@@ -419,6 +438,7 @@ ip = {{ ipv4_random }}
ipv4 = {{ ipv4_random }}
ipv6 = {{ ipv6_random }}
port = $${slapos-standalone-port:port}
local-software-release-root = $${directory:home}
slapos-configuration = $${directory:runner}/etc/slapos.cfg
computer-id = slaprunner
......@@ -447,6 +467,7 @@ template =
$${slapos-standalone-config:ipv4} \
$${slapos-standalone-config:ipv6} \
$${slapos-standalone-config:port} \
$${slapos-standalone-config:local-software-release-root} \
$${slapos-standalone-config:computer-id} \
{%- if parameter_dict.get('embedded-sr') %}
--sr='{{ parameter_dict['embedded-sr'] }}' \
......
......@@ -2,6 +2,9 @@
parts =
switch-softwaretype
extends =
${template-resilient-templates:output}
eggs-directory = ${buildout:eggs-directory}
develop-eggs-directory = ${buildout:develop-eggs-directory}
......@@ -18,6 +21,11 @@ recipe = slapos.cookbook:switch-softwaretype
RootSoftwareInstance = $${:default}
default = $${:theia}
theia = theia:rendered
export = export:rendered
import = import:rendered
resilient = resilient:rendered
frozen = instance-frozen:rendered
pull-backup = template-pull-backup:rendered
[theia]
recipe = slapos.recipe.template:jinja2
......@@ -46,3 +54,43 @@ default-parameters =
"additional-frontend-guid": null
}
frontend-sr = http://git.erp5.org/gitweb/slapos.git/blob_plain/HEAD:/software/apache-frontend/software.cfg
[import]
recipe = slapos.recipe.template:jinja2
template = ${instance-import:output}
rendered = $${buildout:directory}/instance-import.cfg
mode = 0644
context =
jsonkey default_parameter_dict theia:default-parameters
key parameter_dict slap-configuration:configuration
key theia_instance_cfg theia:rendered
key pbsready_import_cfg template-pbsready-import:rendered
key root_title slap-configuration:root-instance-title
[export]
recipe = slapos.recipe.template:jinja2
template = ${instance-export:output}
rendered = $${buildout:directory}/instance-export.cfg
mode = 0644
context =
jsonkey default_parameter_dict theia:default-parameters
key parameter_dict slap-configuration:configuration
key theia_instance_cfg theia:rendered
key pbsready_export_cfg template-pbsready-export:rendered
[resilient]
recipe = slapos.recipe.template:jinja2
template = ${instance-resilient:output}
rendered = $${buildout:directory}/instance-resilient.cfg
mode = 0644
extensions = jinja2.ext.do
context =
key buildout buildout:bin-directory
key develop_eggs_directory buildout:develop-eggs-directory
key eggs_directory buildout:eggs-directory
key slapparameter_dict slap-configuration:configuration
raw monitor_template ${monitor-template:rendered}
template-parts-destination = ${template-parts:target}
template-replicated-destination = ${template-replicated:target}
import-list = file parts :template-parts-destination
file replicated :template-replicated-destination
......@@ -12,9 +12,12 @@ extends =
../../component/coreutils/buildout.cfg
../../component/java-jdk/buildout.cfg
../../component/fonts/buildout.cfg
../../component/libsecret/buildout.cfg
../../component/pkgconfig/buildout.cfg
../../stack/nodejs.cfg
../../stack/slapos.cfg
../../stack/monitor/buildout.cfg
../../stack/resilient/buildout.cfg
../../component/defaults.cfg
./download-plugins.cfg
./buildout.hash.cfg
......@@ -22,8 +25,14 @@ extends =
parts =
theia-wrapper
slapos-cookbook
python-with-eggs
instance-theia
instance
instance-import
instance-export
instance-resilient
theia-common
theia-export
# default for slapos-standalone
shared-part-list =
......@@ -54,9 +63,10 @@ initialization =
import argparse
import glob
import json
import os.path
import sys
import os
import signal
import socket
import sys
import time
import slapos.slap.standalone
......@@ -66,6 +76,7 @@ initialization =
parser.add_argument('ipv4')
parser.add_argument('ipv6')
parser.add_argument('server_port', type=int)
parser.add_argument('local_software_release_root')
parser.add_argument('computer_id')
parser.add_argument('--sr')
parser.add_argument('--srtype')
......@@ -101,58 +112,64 @@ initialization =
instance_root="%s/instance" % args.base_directory,
partition_forward_configuration=partition_forward_configuration,
slapos_bin="${buildout:bin-directory}/slapos",
local_software_release_root=args.local_software_release_root,
)
standalone.start()
partition_count = 20
if len(glob.glob(os.path.join(standalone.instance_directory, '*'))) < partition_count:
print("Standalone SlapOS: Formatting {partition_count} partitions".format(
partition_count=partition_count))
standalone.format(
partition_count,
args.ipv4,
args.ipv6
)
print("Standalone SlapOS for computer `{}` started".format(args.computer_id))
# Run instance at least once, to start the supervisor managing instances.
try:
standalone.waitForInstance(max_retry=0)
except slapos.slap.standalone.SlapOSNodeCommandError as e:
print("Error instanciating: {}".format(e))
if args.sr:
try:
with open(args.srparams) as f:
params = json.load(f)
except Exception:
params = None
if not isinstance(params, dict):
params = None
print("Supplying and Requesting Embedded Software {sr} with type {srtype}".format(
sr=args.sr, srtype=args.srtype))
print("With parameters {param_dict} parsed from '{srparams}'".format(
param_dict=params, srparams=args.srparams))
standalone.supply(args.sr)
standalone.request(
args.sr,
"Embedded Instance",
args.srtype,
partition_parameter_kw=params,
)
quit_requested = []
def signal_handler(signum, frame):
print("Signal {signum} received".format(signum=signum))
quit_requested.append(True)
sys.exit()
signal.signal(signal.SIGTERM, signal_handler)
print("Standalone SlapOS ready")
while not quit_requested:
time.sleep(.1)
standalone.start()
try:
partition_count = 20
if len(glob.glob(os.path.join(standalone.instance_directory, '*'))) < partition_count:
print("Standalone SlapOS: Formatting {partition_count} partitions".format(
partition_count=partition_count))
standalone.format(
partition_count,
args.ipv4,
args.ipv6,
)
print("Standalone SlapOS for computer `{}` started".format(args.computer_id))
# Run instance at least once, to start the supervisor managing instances.
try:
standalone.waitForInstance(max_retry=0)
except slapos.slap.standalone.SlapOSNodeCommandError as e:
print("Error instanciating: {}".format(e))
if args.sr:
try:
with open(args.srparams) as f:
params = json.load(f)
except Exception:
params = None
if not isinstance(params, dict):
params = None
print("Supplying and Requesting Embedded Software {sr} with type {srtype}".format(
sr=args.sr, srtype=args.srtype))
print("With parameters {param_dict} parsed from '{srparams}'".format(
param_dict=params, srparams=args.srparams))
standalone.supply(args.sr)
standalone.request(
args.sr,
"Embedded Instance",
args.srtype,
partition_parameter_kw=params,
)
s = socket.socket(socket.AF_UNIX)
s.bind('\0' + os.path.join(args.base_directory, 'standalone_ready'))
s.listen(5)
print("Standalone SlapOS ready")
while True:
s.accept()[0].close()
finally:
print("Stopping standalone subsystem")
standalone.stop()
print("Exiting")
print("Stopping standalone subsystem")
standalone.stop()
print("Exiting")
sys.exit(0)
needs-these-eggs-scripts-in-path =
${supervisor:recipe}
......@@ -191,7 +208,11 @@ stop-on-error = true
[theia]
recipe = plone.recipe.command
command = ${bash:location}/bin/bash -c "
export TMPDIR=${:location}/tmp PATH=${nodejs:location}/bin:$PATH &&
export \
TMPDIR=${:location}/tmp \
PATH=${nodejs:location}/bin:${pkgconfig:location}/bin:$PATH \
PKG_CONFIG_PATH=${libsecret:pkg-config-path} \
LDFLAGS='-Wl,-rpath=${libsecret:location}/lib -L${gettext:location}/lib -Wl,-rpath=${gettext:location}/lib -Wl,-rpath=${glib:location}/lib' && \
mkdir -p ${:location} && \
mkdir -p \$TMPDIR && \
cd ${:location} && \
......@@ -233,6 +254,11 @@ template =
inline:{
"private": true,
"theia": {
"backend": {
"config": {
"warnOnPotentiallyInsecureHostPattern": false
}
},
"frontend": {
"config": {
"applicationName": "Theia SlapOS",
......@@ -262,7 +288,8 @@ template =
"plantuml.render": "PlantUMLServer",
"gitlens.remotes": [{ "domain": "lab.nexedi.com", "type": "GitLab" }],
"java.home": "${java-jdk:location}"
}
},
"warnOnPotentiallyInsecureHostPattern": false
}
},
"generator": {
......@@ -272,17 +299,16 @@ template =
}
},
"dependencies": {
"@theia/bulk-edit": "latest",
"@theia/callhierarchy": "latest",
"@theia/console": "latest",
"@theia/core": "latest",
"@theia/cpp-debug": "latest",
"@theia/debug": "latest",
"@theia/editor": "latest",
"@theia/editor-preview": "latest",
"@theia/file-search": "latest",
"@theia/filesystem": "latest",
"@theia/getting-started": "latest",
"@theia/git": "latest",
"@theia/keymaps": "latest",
"@theia/markers": "latest",
"@theia/messages": "latest",
......@@ -292,16 +318,19 @@ template =
"@theia/navigator": "latest",
"@theia/outline-view": "latest",
"@theia/output": "latest",
"@theia/plugin": "latest",
"@theia/plugin-dev": "latest",
"@theia/plugin-ext": "latest",
"@theia/plugin-ext-vscode": "latest",
"@theia/preferences": "latest",
"@theia/preview": "latest",
"@theia/process": "latest",
"@theia/property-view": "latest",
"@theia/scm": "latest",
"@theia/scm-extra": "latest",
"@theia/search-in-workspace": "latest",
"@theia/task": "latest",
"@theia/terminal": "latest",
"@theia/timeline": "latest",
"@theia/typehierarchy": "latest",
"@theia/userstorage": "latest",
"@theia/variable-resolver": "latest",
......@@ -343,6 +372,18 @@ template =
#!/bin/sh
exec ${nodejs:location}/bin/node ${theia:location}/node_modules/.bin/theia-open "$@"
[python-with-eggs]
recipe = zc.recipe.egg
interpreter = ${:_buildout_section_name_}
eggs =
${slapos-toolbox:eggs}
six
zc.buildout
# Only generate the interpreter script to avoid conflicts with scripts
# for eggs that are also generated by another section, like slapos.toolbox
scripts = ${:interpreter}
[instance-theia]
<= template-base
output = ${buildout:directory}/instance-theia.cfg.jinja
......@@ -350,3 +391,37 @@ output = ${buildout:directory}/instance-theia.cfg.jinja
[instance]
<= template-base
output = ${buildout:directory}/instance.cfg
[instance-import]
<= template-base
output = ${buildout:directory}/instance-import.cfg.jinja
[instance-export]
<= template-base
output = ${buildout:directory}/instance-export.cfg.jinja
[instance-resilient]
<= download-base
[theia-common]
<= download-base
destination = ${buildout:directory}/theia_common.py
[theia-export]
<= download-base
destination = ${buildout:directory}/theia_export.py
[theia-import]
<= download-base
destination = ${buildout:directory}/theia_import.py
[software-info]
slapos = ${buildout:bin-directory}/slapos
python-with-eggs = ${buildout:bin-directory}/${python-with-eggs:interpreter}
python = ${python:location}/bin/python
rsync = ${rsync:location}/bin/rsync
sqlite3 = ${sqlite3:location}/bin/sqlite3
bash = ${bash:location}/bin/bash
supervisorctl = ${buildout:bin-directory}/supervisorctl
theia-export = ${theia-export:output}
theia-import = ${theia-import:output}
......@@ -9,6 +9,13 @@
"description": "Default",
"request": "instance-input-schema.json",
"response": "instance-output-schema.json",
"index": 0
},
"resilient": {
"title": "Resilient",
"description": "Resilient Theia",
"request": "instance-resilient-input-schema.json",
"response": "instance-output-schema.json",
"index": 1
}
}
......
##############################################################################
#
# Copyright (c) 2019 Nexedi SA and Contributors. All Rights Reserved.
#
# WARNING: This program as such is intended to be used by professional
# programmers who take the whole responsibility of assessing all potential
# consequences resulting from its eventual inadequacies and bugs
# End users who are looking for a ready-to-use solution with commercial
# guarantees and support are strongly adviced to contract a Free Software
# Service Company
#
# This program is Free Software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 3
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
##############################################################################
from __future__ import unicode_literals
import gzip
import json
import os
import re
import subprocess
import time
import unittest
import requests
from datetime import datetime, timedelta
from six.moves.urllib.parse import urljoin
from slapos.testing.testcase import installSoftwareUrlList
import test_resiliency
from test import SlapOSInstanceTestCase, theia_software_release_url
erp5_software_release_url = os.path.abspath(
os.path.join(
os.path.dirname(__file__), '..', '..', 'erp5', 'software.cfg'))
def setUpModule():
installSoftwareUrlList(
SlapOSInstanceTestCase,
[theia_software_release_url, erp5_software_release_url],
debug=bool(int(os.environ.get('SLAPOS_TEST_DEBUG', 0))),
)
class ERP5Mixin(object):
_test_software_url = erp5_software_release_url
_connexion_parameters_regex = re.compile(r"{\s*'_'\s*:\s*'(.*)'\s*}")
def _getERP5ConnexionParameters(self, software_type='export'):
slapos = self._getSlapos(software_type)
out = subprocess.check_output(
(slapos, 'request', 'test_instance', self._test_software_url),
stderr=subprocess.STDOUT,
)
print(out)
return json.loads(self._connexion_parameters_regex.search(out).group(1))
def _getERP5Url(self, connexion_parameters, path=''):
return urljoin(connexion_parameters['family-default-v6'], path)
def _getERP5User(self, connexion_parameters):
return connexion_parameters['inituser-login']
def _getERP5Password(self, connexion_parameters):
return connexion_parameters['inituser-password']
def _waitERP5connected(self, url, user, password):
for _ in range(5):
try:
resp = requests.get('%s/getId' % url, auth=(user, password), verify=False, allow_redirects=False)
except Exception:
time.sleep(20)
continue
if resp.status_code != 200:
time.sleep(20)
continue
break
else:
self.fail('Failed to connect to ERP5')
self.assertEqual(resp.text, 'erp5')
def _getERP5Partition(self, servicename):
p = subprocess.Popen(
(self._getSlapos(), 'node', 'status'),
stdout=subprocess.PIPE, universal_newlines=True)
out, _ = p.communicate()
found = set()
for line in out.splitlines():
if servicename in line:
found.add(line.split(':')[0])
if not found:
raise Exception("ERP5 %s partition not found" % servicename)
elif len(found) > 1:
raise Exception("Found several partitions for ERP5 %s" % servicename)
return found.pop()
def _getERP5PartitionPath(self, software_type, servicename, *paths):
partition = self._getERP5Partition(servicename)
return self._getPartitionPath(
software_type, 'srv', 'runner', 'instance', partition, *paths)
class TestTheiaResilienceERP5(ERP5Mixin, test_resiliency.TestTheiaResilience):
test_instance_max_retries = 12
backup_max_tries = 480
backup_wait_interval = 60
def _prepareExport(self):
super(TestTheiaResilienceERP5, self)._prepareExport()
# Connect to erp5
info = self._getERP5ConnexionParameters()
user = self._getERP5User(info)
password = self._getERP5Password(info)
url = self._getERP5Url(info, 'erp5')
self._waitERP5connected(url, user, password)
# Change title
new_title = time.strftime("HelloTitle %a %d %b %Y %H:%M:%S", time.localtime(time.time()))
requests.get('%s/portal_types/setTitle?value=%s' % (url, new_title), auth=(user, password), verify=False)
resp = requests.get('%s/portal_types/getTitle' % url, auth=(user, password), verify=False, allow_redirects=False)
self.assertEqual(resp.text, new_title)
self._erp5_new_title = new_title
# Wait until changes have been catalogued
mariadb_partition = self._getERP5PartitionPath('export', 'mariadb')
mysql_bin = os.path.join(mariadb_partition, 'bin', 'mysql')
wait_activities_script = os.path.join(
mariadb_partition, 'software_release', 'parts', 'erp5',
'Products', 'CMFActivity', 'bin', 'wait_activities')
subprocess.check_call((wait_activities_script, 'erp5'), env={'MYSQL': mysql_bin})
# Check that changes have been catalogued
output = subprocess.check_output((mysql_bin, 'erp5', '-e', 'SELECT title FROM catalog WHERE id="portal_types"'))
self.assertIn(new_title, output)
# Compute backup date in the near future
soon = (datetime.now() + timedelta(minutes=4)).replace(second=0)
date = '*:%d:00' % soon.minute
params = '_={"zodb-zeo": {"backup-periodicity": "%s"}, "mariadb": {"backup-periodicity": "%s"} }' % (date, date)
# Update ERP5 parameters
print('Requesting ERP5 with parameters %s' % params)
slapos = self._getSlapos()
subprocess.check_call((slapos, 'request', 'test_instance', self._test_software_url, '--parameters', params))
# Process twice to propagate parameter changes
for _ in range(2):
subprocess.check_call((slapos, 'node', 'instance'))
# Restart cron (actually all) services to let them take the new date into account
# XXX this should not be required, updating ERP5 parameters should be enough
subprocess.call((slapos, 'node', 'restart', 'all'))
# Wait until after the programmed backup date, and a bit more
t = (soon - datetime.now()).total_seconds()
self.assertLess(0, t)
time.sleep(t + 120)
# Check that mariadb backup has started
mariadb_backup = os.path.join(mariadb_partition, 'srv', 'backup', 'mariadb-full')
mariadb_backup_dump, = os.listdir(mariadb_backup)
# Check that zodb backup has started
zodb_backup = self._getERP5PartitionPath('export', 'zeo', 'srv', 'backup', 'zodb', 'root')
self.assertEqual(len(os.listdir(zodb_backup)), 3)
# Check that mariadb catalog backup contains expected changes
with gzip.open(os.path.join(mariadb_backup, mariadb_backup_dump)) as f:
self.assertIn(new_title, f.read(), "Mariadb catalog backup %s is not up to date" % mariadb_backup_dump)
def _checkTakeover(self):
super(TestTheiaResilienceERP5, self)._checkTakeover()
# Connect to erp5
info = self._getERP5ConnexionParameters()
user = self._getERP5User(info)
password = self._getERP5Password(info)
url = self._getERP5Url(info, 'erp5')
self._waitERP5connected(url, user, password)
resp = requests.get('%s/portal_types/getTitle' % url, auth=(user, password), verify=False, allow_redirects=False)
self.assertEqual(resp.text, self._erp5_new_title)
# Check that the mariadb catalog is not yet restored
mariadb_partition = self._getERP5PartitionPath('export', 'mariadb')
mysql_bin = os.path.join(mariadb_partition, 'bin', 'mysql')
query = 'SELECT title FROM catalog WHERE id="portal_types"'
try:
out = subprocess.check_output((mysql_bin, 'erp5', '-e', query))
except subprocess.CalledProcessError:
out = ''
self.assertNotIn(self._erp5_new_title, out)
# Stop all services
slapos = self._getSlapos()
print("Stop all services")
subprocess.call((slapos, 'node', 'stop', 'all'))
# Manually restore mariadb from backup
mariadb_restore_script = os.path.join(mariadb_partition, 'bin', 'restore-from-backup')
print("Restore mariadb from backup")
subprocess.check_call(mariadb_restore_script)
# Check that the test instance is properly redeployed after restoring mariadb
# This restarts the services and checks the promises of the test instance
# Process twice to propagate state change
for _ in range(2):
self._processEmbeddedInstance(self.test_instance_max_retries)
# Check that the mariadb catalog was properly restored
out = subprocess.check_output((mysql_bin, 'erp5', '-e', query))
self.assertIn(self._erp5_new_title, out, 'Mariadb catalog is not properly restored')
# Resilience Dummy Software
A very simple SR to test resiliency:
- fast installation and deployment
- self-contained - no dependency outside this folder
- has a simple `exporter.exclude` and `runner-import-restore`
[buildout]
parts =
log-writer
exporter.exclude
runner-import-restore
backup-identity-script
eggs-directory = ${buildout:eggs-directory}
develop-eggs-directory = ${buildout:develop-eggs-directory}
offline = true
[directory]
recipe = plone.recipe.command
home = $${buildout:directory}
srv = $${:home}/srv
etc = $${:home}/etc
run = $${:etc}/run
command = mkdir -p $${:run} $${:srv}
[log-writer]
recipe = slapos.recipe.template:jinja2
template = inline:#!/bin/sh
echo "Hello : $(date)" >> $${directory:home}/log.log
rendered = $${directory:run}/log-writer
[exporter.exclude]
recipe = slapos.recipe.template:jinja2
template = inline:$${directory:home}/exclude
rendered = $${directory:srv}/exporter.exclude
[runner-import-restore]
recipe = slapos.recipe.template:jinja2
template = inline:#!/bin/sh
echo "Hello : $(date)" >> $${directory:home}/runner-import-restore.log
exit $TEST_RESTORE_STATUS
rendered = $${directory:srv}/runner-import-restore
[backup-identity-script]
recipe = slapos.recipe.template:jinja2
template = inline:#!/bin/sh
echo "Custom script"
for i in "$@"
do
echo $(sha256sum $i)
done
exit $TEST_BACKUP_STATUS
rendered = $${directory:srv}/.backup_identity_script
[buildout]
find-links +=
http://www.nexedi.org/static/packages/source/
http://www.nexedi.org/static/packages/source/slapos.buildout/
parts =
instance-template
plone-recipe-command
versions = versions
[plone-recipe-command]
recipe = zc.recipe.egg
eggs = plone.recipe.command
[instance-template]
recipe = slapos.recipe.template
url = ${:_profile_base_location_}/instance.cfg.in
output = ${buildout:directory}/instance.cfg
mode = 0644
[versions]
setuptools = 44.0.0
zc.buildout = 2.7.1+slapos010
zc.recipe.egg = 2.0.3+slapos003
Jinja2 = 2.11.2
MarkupSafe = 1.0
This diff is collapsed.
This diff is collapsed.
import contextlib
import errno
import glob
import hashlib
import os
import re
import subprocess as sp
import sqlite3
import six
import zc.buildout.configparser
from slapos.util import bytes2str, str2bytes
RSYNC_FLAGS = ('-rlptgo', '--safe-links', '--stats', '--ignore-missing-args', '--delete', '--delete-excluded')
RSYNC_REGEX = '^(file has vanished: |rsync warning: some files vanished before they could be transferred)'
EXCLUDE_PATTERNS = ('*.sock', '*.socket', '*.pid', '.installed*.cfg')
EXCLUDE_FLAGS = ['--exclude={}'.format(x) for x in sorted(EXCLUDE_PATTERNS)]
def makedirs(path):
try:
os.makedirs(path if os.path.isdir(path) else os.path.dirname(path))
except OSError as e:
if e.errno != errno.EEXIST:
raise
def copytree(rsyncbin, src, dst, exclude=[], extrargs=[], verbosity='-v'):
# Ensure there is a trailing slash in the source directory
# to avoid creating an additional directory level at the destination
src = os.path.join(src, '')
# Compute absolute path of destination
dst = os.path.abspath(dst)
# Create destination dir if it doesn't exist
makedirs(dst)
command = [rsyncbin]
command.extend(RSYNC_FLAGS)
# Exclude destination file from sources
command.append('--filter=-/ {}'.format(dst))
command.extend(EXCLUDE_FLAGS)
command.extend(('--filter=-/ {}'.format(x) for x in sorted(exclude)))
command.extend(extrargs)
command.append(verbosity)
command.append(src)
command.append(dst)
try:
return sp.check_output(command, universal_newlines=True)
except sp.CalledProcessError as e:
# Not all rsync errors are to be considered as errors
if e.returncode != 24 or re.search(RSYNC_REGEX, e.output, re.M) is None:
raise
return e.output
def copydb(sqlite3bin, src_db, dst_db):
makedirs(dst_db)
sp.check_output((sqlite3bin, src_db, '.backup ' + dst_db))
def remove(path):
try:
os.remove(path)
except OSError:
if os.path.exists(path):
raise
def parse_installed(partition):
paths = []
custom_script = os.path.join(partition, 'srv', '.backup_identity_script')
for cfg in glob.glob(os.path.join(partition, '.installed*.cfg')):
try:
with open(cfg) as f:
installed_cfg = zc.buildout.configparser.parse(f, cfg)
except IOError as e:
if e.errno != errno.ENOENT:
raise
else:
for section in six.itervalues(installed_cfg):
for p in section.get('__buildout_installed__', '').splitlines():
p = p.strip()
if p and p != custom_script:
paths.append(p)
return paths
def sha256sum(file_path, chunk_size=1024 * 1024):
sha256 = hashlib.sha256()
with open(file_path, 'rb') as f:
chunk = f.read(chunk_size)
while chunk:
sha256.update(chunk)
chunk = f.read(chunk_size)
return sha256.hexdigest()
def hashwalk(backup_dir, mirror_partitions):
scripts = {}
for p in mirror_partitions:
script_path = os.path.join(p, 'srv', '.backup_identity_script')
if os.path.exists(script_path):
scripts[os.path.abspath(p)] = script_path
for dirpath, dirnames, filenames in os.walk(backup_dir):
filenames.sort()
for f in filenames:
filepath = os.path.join(dirpath, f)
if os.path.isfile(filepath):
displaypath = os.path.relpath(filepath, start=backup_dir)
yield '%s %s' % (sha256sum(filepath), displaypath)
remaining_dirnames = []
for subdir in dirnames:
subdirpath = os.path.abspath(os.path.join(dirpath, subdir))
custom_hashscript = scripts.get(subdirpath)
if custom_hashscript:
print('Using custom signature script %s' % custom_hashscript)
for s in hashcustom(subdirpath, backup_dir, custom_hashscript):
yield s
else:
remaining_dirnames.append(subdir)
remaining_dirnames.sort()
dirnames[:] = remaining_dirnames
@contextlib.contextmanager
def cwd(path):
old_path = os.getcwd()
try:
os.chdir(path)
yield
finally:
os.chdir(old_path)
def hashcustom(mirrordir, backup_dir, custom_hashscript):
workingdir = os.path.join(mirrordir, os.pardir, os.pardir, os.pardir)
with cwd(os.path.abspath(workingdir)):
for dirpath, _, filenames in os.walk(mirrordir):
filepaths = []
for f in filenames:
path = os.path.join(dirpath, f)
if os.path.isfile(path):
filepaths.append('./' + os.path.relpath(path, start=workingdir))
if not filepaths:
continue
hashprocess = sp.Popen(
custom_hashscript, stdin=sp.PIPE, stdout=sp.PIPE, stderr=sp.PIPE)
out, err = hashprocess.communicate(str2bytes('\0'.join(filepaths)))
if hashprocess.returncode != 0:
template = "Custom signature script %s failed on inputs:\n%s"
msg = template % (custom_hashscript, '\n'.join(filepaths))
msg += "\nwith stdout:\n%s" % bytes2str(out)
msg += "\nand stderr:\n%s" % bytes2str(err)
raise Exception(msg)
signatures = bytes2str(out).strip('\n').split('\n')
signatures.sort()
displaypath = os.path.relpath(dirpath, start=backup_dir)
for s in signatures:
yield '%s %s/ (custom)' % (s, displaypath)
import argparse
import glob
import itertools
import os
import sys
import time
import traceback
import six
from six.moves import configparser
sys.path.append(os.path.dirname(__file__))
from theia_common import copytree, copydb, hashwalk, parse_installed, remove
os.environ['LC_ALL'] = 'C'
os.umask(0o77)
BACKUP_WAIT = 10
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--rsync', required=True)
parser.add_argument('--sqlite3', required=True)
parser.add_argument('--root', required=True)
parser.add_argument('--backup', required=True)
parser.add_argument('--cfg', required=True)
parser.add_argument('--dirs', action='append')
parser.add_argument('--exitfile', required=True)
parser.add_argument('--errorfile', required=True)
args = parser.parse_args()
TheiaExport(args)()
class TheiaExport(object):
def __init__(self, args):
self.rsync_bin = args.rsync
self.sqlite3_bin = args.sqlite3
self.root_dir = args.root
self.backup_dir = args.backup
self.slapos_cfg = cfg = args.cfg
self.dirs = args.dirs
self.exit_file = args.exitfile
self.error_file = args.errorfile
configp = configparser.SafeConfigParser()
configp.read(cfg)
self.proxy_db = configp.get('slapproxy', 'database_uri')
self.instance_dir = configp.get('slapos', 'instance_root')
partitions = glob.glob(os.path.join(self.instance_dir, 'slappart*'))
self.partition_dirs = [p for p in partitions if os.path.isdir(p)]
self.copytree_partitions_args = {}
self.logs = []
def mirrorpath(self, src):
return os.path.abspath(os.path.join(
self.backup_dir, os.path.relpath(src, start=self.root_dir)))
def backuptree(self, src, exclude=[], extrargs=[], verbosity='-v'):
dst = self.mirrorpath(src)
return copytree(self.rsync_bin, src, dst, exclude, extrargs, verbosity)
def backupdb(self):
copydb(self.sqlite3_bin, self.proxy_db, self.mirrorpath(self.proxy_db))
def backuppartition(self, partition):
installed = parse_installed(partition)
rules = os.path.join(partition, 'srv', 'exporter.exclude')
extrargs = ('--filter=.-/ ' + rules,) if os.path.exists(rules) else ()
self.backuptree(partition, exclude=installed, extrargs=extrargs)
self.copytree_partitions_args[partition] = (installed, extrargs)
def sign(self, signaturefile):
remove(signaturefile)
pardir = os.path.abspath(os.path.join(self.backup_dir, os.pardir))
tmpfile = os.path.join(pardir, 'backup.signature.tmp')
mirror_partitions = [self.mirrorpath(p) for p in self.partition_dirs]
with open(tmpfile, 'w') as f:
for s in hashwalk(self.backup_dir, mirror_partitions):
f.write(s + '\n')
os.rename(tmpfile, signaturefile)
def checkpartition(self, partition, pattern='/srv/backup/'):
installed, extrargs = self.copytree_partitions_args[partition]
output = self.backuptree(
partition,
exclude=installed,
extrargs=extrargs + ('--dry-run', '--update'),
verbosity='--out-format=%n',
)
return [path for path in output.splitlines() if pattern in path]
def loginfo(self, msg):
print(msg)
self.logs.append(msg)
def __call__(self):
remove(self.error_file)
exitcode = 0
try:
self.export()
except Exception:
exitcode = 1
exc = traceback.format_exc()
with open(self.error_file, 'w') as f:
f.write('\n ... OK\n\n'.join(self.logs))
f.write('\n ... ERROR !\n\n')
f.write(exc)
print('\n\nERROR\n\n' + exc)
finally:
with open(self.exit_file, 'w') as f:
f.write(str(exitcode))
sys.exit(exitcode)
def export(self):
export_start_date = int(time.time())
etc_dir = os.path.join(self.root_dir, 'etc')
with open(os.path.join(etc_dir, '.resilient_timestamp'), 'w') as f:
f.write(str(export_start_date))
self.loginfo('Backup directory ' + etc_dir)
self.backuptree(etc_dir, extrargs=('--filter=- */', '--filter=-! .*'))
for d in self.dirs:
self.loginfo('Backup directory ' + d)
self.backuptree(d)
self.loginfo('Backup slapproxy database')
self.backupdb()
self.loginfo('Backup partitions')
for p in self.partition_dirs:
self.backuppartition(p)
self.loginfo('Compute backup signature')
self.sign(os.path.join(self.backup_dir, 'backup.signature'))
time.sleep(10)
self.loginfo('Check partitions')
modified = list(itertools.chain.from_iterable(
self.checkpartition(p) for p in self.partition_dirs))
if modified:
msg = 'Some files have been modified since the backup started'
self.loginfo(msg + ':')
self.loginfo('\n'.join(modified))
self.loginfo("Let's wait %d minutes and try again" % BACKUP_WAIT)
time.sleep(BACKUP_WAIT * 60)
raise Exception(msg)
self.loginfo('Done')
if __name__ == '__main__':
main()
import argparse
import glob
import itertools
import os
import sys
import subprocess as sp
import traceback
import six
from six.moves import configparser
sys.path.append(os.path.dirname(__file__))
from theia_common import copytree, copydb, hashwalk, parse_installed, remove
os.environ['LC_ALL'] = 'C'
os.umask(0o77)
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--rsync', required=True)
parser.add_argument('--sqlite3', required=True)
parser.add_argument('--slapos', required=True)
parser.add_argument('--srlog', required=True)
parser.add_argument('--cplog', required=True)
parser.add_argument('--supervisorctl', required=True)
parser.add_argument('--supervisordconf', required=True)
parser.add_argument('--root', required=True)
parser.add_argument('--backup', required=True)
parser.add_argument('--cfg', required=True)
parser.add_argument('--dirs', action='append')
parser.add_argument('--exitfile', required=True)
parser.add_argument('--errorfile', required=True)
args = parser.parse_args()
TheiaImport(args)()
class TheiaImport(object):
def __init__(self, args):
self.rsync_bin = args.rsync
self.sqlite3_bin = args.sqlite3
self.slapos_bin = args.slapos
self.sr_log = args.srlog
self.cp_log = args.cplog
self.supervisorctl_bin = args.supervisorctl
self.supervisord_conf = args.supervisordconf
self.root_dir = args.root
self.backup_dir = args.backup
self.slapos_cfg = cfg = args.cfg
self.dirs = args.dirs
self.exit_file = args.exitfile
self.error_file = args.errorfile
configp = configparser.SafeConfigParser()
configp.read(cfg)
self.proxy_db = configp.get('slapproxy', 'database_uri')
self.instance_dir = configp.get('slapos', 'instance_root')
mirror_dir = self.mirrorpath(self.instance_dir)
partitions = glob.glob(os.path.join(mirror_dir, 'slappart*'))
self.mirror_partition_dirs = [p for p in partitions if os.path.isdir(p)]
self.logs = []
def mirrorpath(self, dst):
return os.path.abspath(os.path.join(
self.backup_dir, os.path.relpath(dst, start=self.root_dir)))
def dstpath(self, src):
return os.path.abspath(os.path.join(
self.root_dir, os.path.relpath(src, start=self.backup_dir)))
def restoretree(self, dst, exclude=[], extrargs=[], verbosity='-v'):
src = self.mirrorpath(dst)
return copytree(self.rsync_bin, src, dst, exclude, extrargs, verbosity)
def restoredb(self):
copydb(self.sqlite3_bin, self.mirrorpath(self.proxy_db), self.proxy_db)
def restorepartition(self, mirror_partition):
p = self.dstpath(mirror_partition)
installed = parse_installed(p) if os.path.exists(p) else []
copytree(self.rsync_bin, mirror_partition, p, exclude=installed)
def supervisorctl(self, *args):
supervisor_command = (self.supervisorctl_bin, '-c', self.supervisord_conf)
command = supervisor_command + args
print(' '.join(command))
sp.check_call(command)
def slapos(self, *args):
command = (self.slapos_bin,) + args + ('--cfg', self.slapos_cfg)
print(' '.join(command))
sp.check_call(command)
def verify(self, signaturefile):
pardir = os.path.abspath(os.path.join(self.backup_dir, os.pardir))
moved = os.path.join(pardir, 'backup.signature.moved')
proof = os.path.join(pardir, 'backup.signature.proof')
if os.path.exists(signaturefile):
os.rename(signaturefile, moved)
if not os.path.exists(moved):
msg = 'ERROR the backup signature file is missing'
print(msg)
raise Exception(msg)
with open(proof, 'w') as f:
for s in hashwalk(self.backup_dir, self.mirror_partition_dirs):
f.write(s + '\n')
diffcommand = ('diff', moved, proof)
print(' '.join(diffcommand))
try:
sp.check_output(
diffcommand, stderr=sp.STDOUT, universal_newlines=True)
except sp.CalledProcessError as e:
template = 'ERROR the backup signatures do not match\n\n%s'
msg = template % e.output
print(msg)
raise Exception(msg)
def loginfo(self, msg):
print(msg)
self.logs.append(msg)
def __call__(self):
remove(self.error_file)
exitcode = 0
try:
self.restore()
except Exception:
exitcode = 1
exc = traceback.format_exc()
with open(self.error_file, 'w') as f:
f.write('\n ... OK\n\n'.join(self.logs))
f.write('\n ... ERROR !\n\n')
f.write(exc)
print('\n\nERROR\n\n' + exc)
finally:
with open(self.exit_file, 'w') as f:
f.write(str(exitcode))
sys.exit(exitcode)
def restore(self):
self.loginfo('Verify backup signature')
self.verify(os.path.join(self.backup_dir, 'backup.signature'))
self.loginfo('Stop slapproxy')
self.supervisorctl('stop', 'slapos-proxy')
self.loginfo('Restore partitions')
for m in self.mirror_partition_dirs:
self.restorepartition(m)
for d in self.dirs:
self.loginfo('Restore directory ' + d)
self.restoretree(d)
self.loginfo('Restore slapproxy database')
self.restoredb()
etc_dir = os.path.join(self.root_dir, 'etc')
self.loginfo('Restore directory ' + etc_dir)
self.restoretree(etc_dir, extrargs=('--filter=- */', '--filter=-! .*'))
custom_script = os.path.join(self.root_dir, 'srv', 'runner-import-restore')
if os.path.exists(custom_script):
self.loginfo('Run custom restore script %s' % custom_script)
sp.check_call(custom_script)
self.loginfo('Start slapproxy again')
self.supervisorctl('start', 'slapos-proxy')
self.loginfo('Reformat partitions')
self.slapos('node', 'format', '--now')
self.loginfo('Remove old supervisord configuration files')
conf_dir = os.path.join(self.instance_dir, 'etc', 'supervisor.conf.d')
for f in glob.glob(os.path.join(conf_dir, '*')):
os.remove(f)
self.loginfo('Build Software Releases')
for i in range(3):
try:
self.slapos('node', 'software', '--all', '--logfile', self.sr_log)
except sp.CalledProcessError:
if i == 2:
raise
else:
break
self.loginfo('Remove old custom instance scripts')
partitions_glob = os.path.join(self.instance_dir, 'slappart*')
scripts = os.path.join(partitions_glob, 'srv', 'runner-import-restore')
for f in glob.glob(scripts):
remove(f)
self.loginfo('Remove partition timestamps')
timestamps = os.path.join(partitions_glob, '.timestamp')
for f in glob.glob(timestamps):
remove(f)
self.loginfo('Build Instances')
cp_log = self.cp_log
for i in range(3):
try:
self.slapos('node', 'instance', '--force-stop', '--logfile', cp_log)
except sp.CalledProcessError:
if i == 2:
raise
else:
break
for custom_script in glob.glob(scripts):
self.loginfo('Running custom instance script %s' % custom_script)
sp.check_call(custom_script)
self.loginfo('Done')
if __name__ == '__main__':
main()
This diff is collapsed.
tsn-demo
========
Software release to provide required scripts to run `Time-Sensitive Networking`_ motor demo.
This is an automation of `this demo`_.
setup
=====
To allow SlapOS to setup gpio pins, you need to add the following configuration
in ``/etc/udev/rules.d/99-gpio.rules`` :
.. code-block:: shell
SUBSYSTEM=="gpio", KERNEL=="gpiochip*", ACTION=="add", PROGRAM="/bin/sh -c 'chown root:slapsoft /sys/class/gpio/export /sys/class/gpio/unexport ; chmod 220 /sys/class/gpio/export /sys/class/gpio/unexport'"
SUBSYSTEM=="gpio", KERNEL=="gpio*", ACTION=="add", PROGRAM="/bin/sh -c 'chown root:slapsoft /sys%p/direction /sys%p/value ; chmod 660 /sys%p/direction /sys%p/value'"
tutorial
========
`A tutorial`_ describing how to use this SR is available.
.. _Time-Sensitive Networking: https://1.ieee802.org/tsn/
.. _this demo: https://www.osie-project.eu/P-OSIE.Blog.TSN.Motor.Control.Demo
.. _A tutorial: https://www.osie-project.eu/P-OSIE.Blog.TSN.Motor.Control.Demo.Software.Release
\ No newline at end of file
# THIS IS NOT A BUILDOUT FILE, despite purposedly using a compatible syntax.
# The only allowed lines here are (regexes):
# - "^#" comments, copied verbatim
# - "^[" section beginings, copied verbatim
# - lines containing an "=" sign which must fit in the following categorie.
# - "^\s*filename\s*=\s*path\s*$" where "path" is relative to this file
# Copied verbatim.
# - "^\s*hashtype\s*=.*" where "hashtype" is one of the values supported
# by the re-generation script.
# Re-generated.
# - other lines are copied verbatim
# Substitution (${...:...}), extension ([buildout] extends = ...) and
# section inheritance (< = ...) are NOT supported (but you should really
# not need these here).
[template]
filename = instance.cfg
md5sum = cad8d9c7c17808f23eee936f149183a9
[template-tsn-client]
filename = instance-tsn-client.cfg
md5sum = 829e50492ef9adb0037b82c814f76afd
[template-tsn-server]
filename = instance-tsn-server.cfg
md5sum = 5ce17649f54faf2a9e2a05774d643041
{% set hardware_timestamps = slapparameter_dict.get('hardware-timestamps', 'n') -%}
{% set interface_ip_string_list = [] -%}
{% set interface_list = slapparameter_dict.get('interface-list', '').split() -%}
{% set ip_list = slapparameter_dict.get('ip-list', '').split() -%}
{% set part_list = [] -%}
{% set server_guid_list = slapparameter_dict.get('server-guid-list', '').split() -%}
{% set server_interface_list = slapparameter_dict.get('server-interface-list', '').split() -%}
{% set server_list = [] -%}
{% set script_list = [] -%}
[directory]
recipe = slapos.cookbook:mkdirectory
etc = $${buildout:directory}/etc
scripts = $${:etc}/tsn
run = $${:etc}/run
[tsn-script-template]
recipe = slapos.recipe.template:jinja2
rendered= $${directory:scripts}/$${:_buildout_section_name_}
mode = 755
{% for server_nb, server_guid in enumerate(server_guid_list) -%}
{% set request_tsn_server_section_title = 'request-tsn-server' ~ server_nb -%}
{% do interface_ip_string_list.extend([interface_list[server_nb], '$${' ~ request_tsn_server_section_title ~ ':connection-ip}']) -%}
{% do part_list.append(request_tsn_server_section_title) -%}
[{{ request_tsn_server_section_title }}]
<= slap-connection
recipe = slapos.cookbook:request
name = TSNMeasurementsServer{{ server_nb }}
software-url = $${:software-release-url}
software-type = tsn-server
return = ip
sla-computer_guid = {{ server_guid }}
config-ip = {{ ip_list[server_nb] }}
{% if 'gpio-port' in slapparameter_dict -%}
config-gpio-port = {{ slapparameter_dict.get('gpio-port') }}
{% endif -%}
{% if server_nb < len(server_interface_list) -%}
config-interface = {{ server_interface_list[server_nb] }}
{% endif %}
{% if hardware_timestamps == 'y' -%}
{% set phc2sys_launcher_section_title = 'phc2sys-launcher-' ~ interface_list[server_nb] -%}
{% do part_list.append(phc2sys_launcher_section_title) -%}
{% do script_list.append(phc2sys_launcher_section_title) -%}
[{{ phc2sys_launcher_section_title }}]
<= tsn-script-template
template= inline:
#!/bin/sh
${linuxptp:location}/sbin/pmc -u -b 0 -i {{ interface_list[server_nb] }} "SET GRANDMASTER_SETTINGS_NP clockClass 248 clockAccuracy 0xfe offsetScaledLogVariance 0xffff currentUtcOffset 37 leap61 0 leap59 0 currentUtcOffsetValid 1 ptpTimescale 1 timeTraceable 1 frequencyTraceable 0 timeSource 0xa0"
exec {{ chrt }} -f 95 ${linuxptp:location}/sbin/phc2sys -s CLOCK_REALTIME -c {{ interface_list[server_nb] }} --step_threshold=1 -m -w
{% endif %}
{% set ptp4l_launcher_section_title = 'ptp4l-launcher-' ~ interface_list[server_nb] -%}
{% do part_list.append(ptp4l_launcher_section_title) -%}
{% do script_list.append(ptp4l_launcher_section_title) -%}
[{{ ptp4l_launcher_section_title }}]
<= tsn-script-template
template= inline:
#!/bin/sh
exec {{ chrt }} -f 97 ${linuxptp:location}/sbin/ptp4l {{ '-H' if hardware_timestamps=='y' else '-S' }} -i {{ interface_list[server_nb] }} --step_threshold=1 -m
{% endfor -%}
[tsn-script]
<= tsn-script-template
template= inline:
#!/bin/sh
exec ${tsn-rt-measures:location}/bin/master -a1 -p97 -t4000 {{ interface_ip_string_list | join(' ') }}
[buildout]
parts =
{% for part in part_list -%}
{{ ' ' ~ part }}
{% endfor %}
tsn-script
publish-connection-information
[publish-connection-information]
recipe = slapos.cookbook:publish
{% for script in script_list -%}
{{ script }} = {{ '$${' ~ script ~ ':rendered}' }}
{% endfor -%}
tsn-script = $${tsn-script:rendered}
{% set interface = slapparameter_dict.get('interface', 'eth0') -%}
{% set ip = slapparameter_dict.get('ip', '') -%}
{% set gpio_direction_pin = slapparameter_dict.get('gpio-direction-pin', '192') -%}
{% set gpio_pulse_pin = slapparameter_dict.get('gpio-pulse-pin', '193') -%}
[buildout]
parts =
gpio_setup
ptp4l-launcher-{{ interface }}
tsn-script
publish-connection-information
[directory]
recipe = slapos.cookbook:mkdirectory
etc = $${buildout:directory}/etc
scripts = $${:etc}/tsn
[gpio_setup]
recipe = slapos.recipe.build
gpio-direction-pin = {{ gpio_direction_pin }}
gpio-pulse-pin = {{ gpio_pulse_pin }}
init =
import os
def setup_gpio(pin_number):
gpio_dir = "/sys/class/gpio/gpio" + pin_number
if not os.path.exists(gpio_dir):
with open("/sys/class/gpio/export", "w") as fd:
fd.write(pin_number)
with open(os.path.join(gpio_dir, "direction"), "w") as fd:
fd.write("out")
with open(os.path.join(gpio_dir, "value"), "w") as fd:
fd.write("0")
setup_gpio(options["gpio-direction-pin"])
setup_gpio(options["gpio-pulse-pin"])
[tsn-script-template]
recipe = slapos.recipe.template:jinja2
rendered= $${directory:scripts}/$${:_buildout_section_name_}
mode = 755
[ptp4l-launcher-{{ interface }}]
<= tsn-script-template
template= inline:
#!/bin/sh
exec {{ chrt }} -f 97 ${linuxptp:location}/sbin/ptp4l -s -S -i {{ interface }} --step_threshold=1 -m
[tsn-script]
<= tsn-script-template
template= inline:
#!/bin/sh
exec ${tsn-rt-measures:location}/bin/slave -f {{ interface }} -a1 -p98 -d {{ gpio_direction_pin }} -u {{ gpio_pulse_pin }}
[publish-connection-information]
recipe = slapos.cookbook:publish
ip = {{ ip }}
ptp4l-launcher-{{ interface }} = $${ptp4l-launcher-{{ interface }}:rendered}
tsn-script = $${tsn-script:rendered}
[buildout]
parts =
switch-softwaretype
eggs-directory = ${buildout:eggs-directory}
develop-eggs-directory = ${buildout:develop-eggs-directory}
offline = true
[switch-softwaretype]
recipe = slapos.cookbook:switch-softwaretype
default = tsn-client-dynamic-template:rendered
tsn-server = tsn-server-dynamic-template:rendered
RootSoftwareInstance = $${:default}
[dynamic-template-base]
recipe = slapos.recipe.template:jinja2
rendered = $${buildout:directory}/$${:filename}
context =
raw chrt ${util-linux:location}/bin/chrt
key slapparameter_dict slap-configuration:configuration
key ipv4 slap-configuration:ipv4-random
[tsn-client-dynamic-template]
<= dynamic-template-base
extensions = jinja2.ext.do
template = ${template-tsn-client:output}
filename = instance-tsn-client.cfg
[tsn-server-dynamic-template]
<= dynamic-template-base
template = ${template-tsn-server:output}
filename = instance-tsn-server.cfg
[slap-configuration]
recipe = slapos.cookbook:slapconfiguration
computer = $${slap-connection:computer-id}
partition = $${slap-connection:partition-id}
url = $${slap-connection:server-url}
key = $${slap-connection:key-file}
cert = $${slap-connection:cert-file}
[buildout]
extends =
buildout.hash.cfg
../../stack/slapos.cfg
../../component/linuxptp/buildout.cfg
../../component/tsn-rt-measures/buildout.cfg
../../component/util-linux/buildout.cfg
parts =
slapos-cookbook
eggs
util-linux
template
# eggs for instance-rsn-client.cfg
[eggs]
recipe = zc.recipe.egg
eggs =
plone.recipe.command
[template-base]
recipe = slapos.recipe.template
url = ${:_profile_base_location_}/${:filename}
mode = 0644
[template]
< = template-base
output = ${buildout:directory}/template.cfg
[template-tsn-client]
< = template-base
output = ${buildout:directory}/template-tsn-client.cfg
[template-tsn-server]
< = template-base
output = ${buildout:directory}/template-tsn-server.cfg
[util-linux]
configure-options -=
--disable-schedutils
......@@ -15,4 +15,4 @@
[caucase-jinja2-library]
filename = caucase.jinja2.library
md5sum = 27aba89543032f3c7026ab566f3aa41b
md5sum = b3ee1414719f8a24790ae324ca023066
......@@ -133,7 +133,7 @@ recipe = plone.recipe.command
checksum-file = '{{ template }}.md5'
command =
set -e
md5_current=$(${buildout:executable} -c "import hashlib ; hashlib.md5(open('{{ template }}', 'rb').read()).hexdigest()")
md5_current=$(${buildout:executable} -c "import hashlib ; print hashlib.md5(open('{{ template }}', 'rb').read()).hexdigest()")
md5_old=$([ -f ${:checksum-file} ] && cat ${:checksum-file} || echo none)
if [ "$md5_current" != "$md5_old" ] || [ ! -f '{{ csr }}' ] || [ ! -f '{{ key }}' ] ; then
'{{ buildout_bin_directory }}/caucase-rerequest' --template '{{ template }}' --csr '{{ csr }}' --key '{{ key }}'
......
......@@ -720,7 +720,7 @@ cloudpickle = 0.5.3
dask = 0.18.1
toolz = 0.9.0
zope.globalrequest = 1.5
waitress = 1.3.0
waitress = 1.4.4
xlrd = 1.1.0
# Re-add for as it is required to be there for uninstallation
......
......@@ -14,7 +14,7 @@
# not need these here).
[pbsready]
filename = pbsready.cfg.in
md5sum = 9ceceeee21fa90837c887d2d6866859e
md5sum = 005125621d157b3ae04c428ea6060e37
[pbsready-import]
filename = pbsready-import.cfg.in
......@@ -26,7 +26,7 @@ md5sum = 2b0c71b085cfe8017f28098c160b1f49
[template-pull-backup]
filename = instance-pull-backup.cfg.in
md5sum = e64e13854332bcc2595df187fcae1203
md5sum = b240dc76a663190304d8bcb9cabcda8f
[template-replicated]
filename = template-replicated.cfg.in
......
......@@ -137,12 +137,17 @@ command-line = ${buildout:bin-directory}/generatefeed --output $${:feed-path} --
feed-path = $${directory:monitor-resilient}/pbs-status-rss
wrapper-path = $${rootdirectory:bin}/resilient-genstatusrss.py
[pbs-status-feed-first-run]
recipe = plone.recipe.command
command = $${pbs-resilient-status-feed:wrapper-path}
stop-on-error = true
[cron-pbs-status-feed]
<= cron
recipe = slapos.cookbook:cron.d
name = resilient-pbs-status-feed
frequency = */5 * * * *
command = $${pbs-resilient-status-feed:wrapper-path}
command = $${pbs-status-feed-first-run:command}
[logrotate-entry-notifier]
<= logrotate-entry-base
......
......@@ -145,12 +145,17 @@ command-line = ${buildout:bin-directory}/generatefeed --output $${:feed-path} --
feed-path = $${directory:monitor-resilient}/notifier-status-rss
wrapper-path = $${rootdirectory:bin}/resilient-genstatusrss.py
[notifier-status-feed-first-run]
recipe = plone.recipe.command
command = $${notifier-resilient-status-feed:wrapper-path}
stop-on-error = true
[cron-entry-notifier-status-feed]
<= cron
recipe = slapos.cookbook:cron.d
name = resilient-notifier-status-feed
frequency = */5 * * * *
command = $${notifier-resilient-status-feed:wrapper-path}
command = $${notifier-status-feed-first-run:command}
[notifier-stalled-promise-bin]
recipe = slapos.cookbook:wrapper
......
......@@ -188,10 +188,10 @@ requests = 2.24.0
scandir = 1.10.0
setproctitle = 1.1.10
setuptools-dso = 1.7
rubygemsrecipe = 0.3.0
rubygemsrecipe = 0.4.2
six = 1.12.0
slapos.cookbook = 1.0.197
slapos.core = 1.6.17
slapos.core = 1.6.18
slapos.extension.strip = 0.4
slapos.extension.shared = 1.0
slapos.libnetworkcache = 0.20
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment