Commit 84e7c611 authored by Thomas Gambier's avatar Thomas Gambier

Update Release Candidate

parents ab87e8e5 832ee7ae
...@@ -62,10 +62,12 @@ url = http://www.tortall.net/projects/yasm/releases/yasm-1.1.0.tar.gz ...@@ -62,10 +62,12 @@ url = http://www.tortall.net/projects/yasm/releases/yasm-1.1.0.tar.gz
recipe = slapos.recipe.cmmi recipe = slapos.recipe.cmmi
url = https://storage.googleapis.com/downloads.webmproject.org/releases/webm/libvpx-1.5.0.tar.bz2 url = https://storage.googleapis.com/downloads.webmproject.org/releases/webm/libvpx-1.5.0.tar.bz2
md5sum = 49e59dd184caa255886683facea56fca md5sum = 49e59dd184caa255886683facea56fca
location = @@LOCATION@@
configure-options = configure-options =
--enable-shared --enable-shared
environment = environment =
PATH=${yasm:location}/bin:%(PATH)s PATH=${yasm:location}/bin:%(PATH)s
LDFLAGS=-Wl,-rpath=${:location}/lib
[libx264] [libx264]
recipe = slapos.recipe.cmmi recipe = slapos.recipe.cmmi
...@@ -96,6 +98,7 @@ configure-options = ...@@ -96,6 +98,7 @@ configure-options =
recipe = slapos.recipe.cmmi recipe = slapos.recipe.cmmi
url = https://ffmpeg.org/releases/ffmpeg-4.1.4.tar.bz2 url = https://ffmpeg.org/releases/ffmpeg-4.1.4.tar.bz2
md5sum = 611d171e4aee749b85e04d17e2aee71d md5sum = 611d171e4aee749b85e04d17e2aee71d
location = @@LOCATION@@
pkg_config_depends = ${libxcb:location}/lib/pkgconfig:${libxcb:pkg_config_depends}:${libtheora:location}/lib/pkgconfig:${libtheora:pkg_config_depends}:${libvpx:location}/lib/pkgconfig:${libx264:location}/lib/pkgconfig:${opencore-amr:location}/lib/pkgconfig:${zlib:location}/lib/pkgconfig pkg_config_depends = ${libxcb:location}/lib/pkgconfig:${libxcb:pkg_config_depends}:${libtheora:location}/lib/pkgconfig:${libtheora:pkg_config_depends}:${libvpx:location}/lib/pkgconfig:${libx264:location}/lib/pkgconfig:${opencore-amr:location}/lib/pkgconfig:${zlib:location}/lib/pkgconfig
configure-options = configure-options =
--enable-gpl --enable-gpl
...@@ -119,5 +122,5 @@ configure-options = ...@@ -119,5 +122,5 @@ configure-options =
environment = environment =
PKG_CONFIG_PATH=${:pkg_config_depends} PKG_CONFIG_PATH=${:pkg_config_depends}
CPPFLAGS=-I${bzip2:location}/include -I${libogg:location}/include -I${libtheora:location}/include -I${opencore-amr:location}/include -I${lame:location}/include CPPFLAGS=-I${bzip2:location}/include -I${libogg:location}/include -I${libtheora:location}/include -I${opencore-amr:location}/include -I${lame:location}/include
LDFLAGS=-Wl,-rpath=${buildout:parts-directory}/${:_buildout_section_name_}/lib -L${bzip2:location}/lib -Wl,-rpath=${bzip2:location}/lib -Wl,-rpath=${libxcb:location}/lib -L${libogg:location}/lib -L${libvorbis:location}/lib -Wl,-rpath=${libvorbis:location}/lib -L${libtheora:location}/lib -Wl,-rpath=${libtheora:location}/lib -L${libvpx:location}/lib -Wl,-rpath=${libvpx:location}/lib -L${libx264:location}/lib -Wl,-rpath=${libx264:location}/lib -L${lame:location}/lib -Wl,-rpath=${lame:location}/lib -L${opencore-amr:location}/lib -Wl,-rpath=${opencore-amr:location}/lib -Wl,-rpath=${zlib:location}/lib LDFLAGS=-Wl,-rpath=${:location}/lib -L${bzip2:location}/lib -Wl,-rpath=${bzip2:location}/lib -Wl,-rpath=${libxcb:location}/lib -L${libogg:location}/lib -L${libvorbis:location}/lib -Wl,-rpath=${libvorbis:location}/lib -L${libtheora:location}/lib -Wl,-rpath=${libtheora:location}/lib -L${libvpx:location}/lib -Wl,-rpath=${libvpx:location}/lib -L${libx264:location}/lib -Wl,-rpath=${libx264:location}/lib -L${lame:location}/lib -Wl,-rpath=${lame:location}/lib -L${opencore-amr:location}/lib -Wl,-rpath=${opencore-amr:location}/lib -Wl,-rpath=${zlib:location}/lib
PATH=${pkgconfig:location}/bin:${yasm:location}/bin:%(PATH)s PATH=${pkgconfig:location}/bin:${yasm:location}/bin:%(PATH)s
[buildout]
extends =
../cmake/buildout.cfg
[mosquitto]
recipe = slapos.recipe.cmmi
url = https://mosquitto.org/files/source/mosquitto-2.0.15.tar.gz
md5sum = 22b7a8b05caa692cb22496b791529193
configure-command =
${cmake:location}/bin/cmake
configure-options =
-DWITH_CJSON=no
-DCMAKE_INSTALL_PREFIX=@@LOCATION@@
...@@ -29,7 +29,8 @@ configure-options = ...@@ -29,7 +29,8 @@ configure-options =
--without-libxml --without-libxml
--without-libxslt --without-libxslt
# build core PostgreSQL + pg_trgm contrib extension for GitLab # build core PostgreSQL + pg_trgm contrib extension for GitLab
make-targets = install && make -C contrib/pg_trgm/ install # unaccent contrib extension is for peertube
make-targets = install && make -C contrib/pg_trgm/ install && make -C contrib/unaccent/ install
environment = environment =
CPPFLAGS=-I${zlib:location}/include -I${readline:location}/include -I${openssl:location}/include -I${ncurses:location}/lib CPPFLAGS=-I${zlib:location}/include -I${readline:location}/include -I${openssl:location}/include -I${ncurses:location}/lib
LDFLAGS=-L${zlib:location}/lib -Wl,-rpath=${zlib:location}/lib -L${readline:location}/lib -Wl,-rpath=${readline:location}/lib -L${openssl:location}/lib -Wl,-rpath=${openssl:location}/lib -L${ncurses:location}/lib -Wl,-rpath=${ncurses:location}/lib -L${perl:location}/libs-c -Wl,-rpath=${perl:location}/libs-c LDFLAGS=-L${zlib:location}/lib -Wl,-rpath=${zlib:location}/lib -L${readline:location}/lib -Wl,-rpath=${readline:location}/lib -L${openssl:location}/lib -Wl,-rpath=${openssl:location}/lib -L${ncurses:location}/lib -Wl,-rpath=${ncurses:location}/lib -L${perl:location}/libs-c -Wl,-rpath=${perl:location}/libs-c
......
...@@ -51,23 +51,29 @@ environment = ...@@ -51,23 +51,29 @@ environment =
[python3.7] [python3.7]
<= python3-common <= python3-common
version = 3.7 version = 3.7
package_version = 3.7.13 package_version = 3.7.15
md5sum = 10822726f75fd7efe05a94fbd6ac2258 md5sum = d2ff16776b5d822efc2a7cbf42fc2915
[python3.8] [python3.8]
<= python3-common <= python3-common
version = 3.8 version = 3.8
package_version = 3.8.13 package_version = 3.8.15
md5sum = c4b7100dcaace9d33ab1fda9a3a038d6 md5sum = cca78a827d2327f5c3ff2dee9f526e7e
[python3.9] [python3.9]
<= python3-common <= python3-common
version = 3.9 version = 3.9
package_version = 3.9.13 package_version = 3.9.15
md5sum = 5e2411217b0060828d5f923eb422a3b8 md5sum = 8adc5662c9fd10a23ae8ae9f28b65b49
[python3.10] [python3.10]
<= python3-common <= python3-common
version = 3.10 version = 3.10
package_version = 3.10.6 package_version = 3.10.8
md5sum = afc7e14f7118d10d1ba95ae8e2134bf0 md5sum = e92356b012ed4d0e09675131d39b1bde
[python3.11]
<= python3-common
version = 3.11
package_version = 3.11.0
md5sum = fe92acfa0db9b9f5044958edb451d463
...@@ -121,7 +121,6 @@ setup(name=name, ...@@ -121,7 +121,6 @@ setup(name=name,
'notifier = slapos.recipe.notifier:Recipe', 'notifier = slapos.recipe.notifier:Recipe',
'notifier.callback = slapos.recipe.notifier:Callback', 'notifier.callback = slapos.recipe.notifier:Callback',
'notifier.notify = slapos.recipe.notifier:Notify', 'notifier.notify = slapos.recipe.notifier:Notify',
'novnc = slapos.recipe.novnc:Recipe',
'onetimeupload = slapos.recipe.onetimeupload:Recipe', 'onetimeupload = slapos.recipe.onetimeupload:Recipe',
'pbs = slapos.recipe.pbs:Recipe', 'pbs = slapos.recipe.pbs:Recipe',
'postgres = slapos.recipe.postgres:Recipe', 'postgres = slapos.recipe.postgres:Recipe',
......
...@@ -15,11 +15,11 @@ ...@@ -15,11 +15,11 @@
[instance] [instance]
filename = instance.cfg.in filename = instance.cfg.in
md5sum = db7fe97f4c6b84cfadb7b5c68eab517a md5sum = e8aae0fe3a8bc3f006b8638ed326bbcb
[template-instance-beremiz] [template-instance-beremiz]
filename = instance-beremiz.cfg.jinja2.in filename = instance-beremiz.cfg.jinja2.in
md5sum = 9d59c9392636ab71f37b43fd8008fd10 md5sum = 2b990148e527117bcfb366f8b700c807
[template-instance-beremiz-test] [template-instance-beremiz-test]
filename = instance-beremiz-test.cfg.jinja2.in filename = instance-beremiz-test.cfg.jinja2.in
...@@ -28,3 +28,7 @@ md5sum = a2fa2b9d3a225a1dd71db67bd4fea769 ...@@ -28,3 +28,7 @@ md5sum = a2fa2b9d3a225a1dd71db67bd4fea769
[template-fluxbox-menu.in] [template-fluxbox-menu.in]
filename = fluxbox-menu.in filename = fluxbox-menu.in
md5sum = 09560314eae0225b6085f8626f1a603a md5sum = 09560314eae0225b6085f8626f1a603a
[template-nginx_conf.in]
filename = nginx_conf.in
md5sum = 9ca886120a99befe25ca761ddc54753c
...@@ -27,35 +27,55 @@ stop-on-error = true ...@@ -27,35 +27,55 @@ stop-on-error = true
cert-file = ${directory:ssl}/beremiz.crt cert-file = ${directory:ssl}/beremiz.crt
key-file = ${directory:ssl}/beremiz.key key-file = ${directory:ssl}/beremiz.key
[novnc-instance] [nginx-tempdir]
recipe = slapos.cookbook:novnc recipe = slapos.cookbook:mkdirectory
path = ${directory:bin}/novnc tmp = ${buildout:directory}/tmp
client-body-temp-path = ${:tmp}/client_body_temp_path
proxy-temp-path = ${:tmp}/proxy_temp_path
fastcgi-temp-path = ${:tmp}/fastcgi_temp_path
uwsgi-temp-path = ${:tmp}/uwsgi_temp_path
scgi-temp-path = ${:tmp}/scgi_temp_path
[nginx-launcher]
recipe = slapos.cookbook:wrapper
command-line = {{ nginx_executable }} -c ${nginx-config:output}
wrapper-path = ${directory:services}/nginx
[nginx-config]
recipe = slapos.recipe.template:jinja2
url = {{ template_nginx }}
output = ${directory:etc}/nginx.conf
context =
section params nginx-params
section ca gen-certificate
section tempdir nginx-tempdir
raw docroot {{ novnc_location }}
raw mime {{ nginx_mime }}
[nginx-params]
path-pid = ${directory:run}/nginx.pid
path-error-log = ${directory:log}/nginx-error.log
path-access-log = ${directory:log}/nginx-access.log
ip = {{ ipv6 }} ip = {{ ipv6 }}
port = 6080 port = 6080
vnc-ip = {{ ipv4 }} websocket-ip = {{ ipv4 }}
vnc-port = ${x11vnc:port} websocket-port = ${x11vnc:port}
novnc-location = {{ novnc_location }} websocket-path = websockify
websockify-path = {{ websockify_bin }} nb-workers = 2
ssl-key-path = ${gen-certificate:key-file}
ssl-cert-path = ${gen-certificate:cert-file} [nginx-graceful]
recipe = slapos.recipe.template
[websockify-sighandler] output = ${directory:scripts}/nginx-graceful
recipe = slapos.cookbook:signalwrapper inline =
wrapper-path = ${directory:bin}/websockify-sighandler #!/bin/sh
wrapped-path = ${novnc-instance:path} exec kill -s SIGHUP $(cat ${nginx-params:path-pid})
[websockify-sighandler-service]
recipe = slapos.cookbook:wrapper
command-line = ${websockify-sighandler:wrapper-path}
wrapper-path = ${directory:services}/websockify
hash-existing-files = ${buildout:directory}/software_release/buildout.cfg
[novnc-promise] [novnc-promise]
<= monitor-promise-base <= monitor-promise-base
promise = check_socket_listening promise = check_socket_listening
name = novnc_promise.py name = novnc_promise.py
config-host = ${novnc-instance:ip} config-host = ${nginx-params:ip}
config-port = ${novnc-instance:port} config-port = ${nginx-params:port}
[x11vnc] [x11vnc]
recipe = slapos.cookbook:wrapper recipe = slapos.cookbook:wrapper
...@@ -216,7 +236,8 @@ recipe = slapos.cookbook:requestoptional ...@@ -216,7 +236,8 @@ recipe = slapos.cookbook:requestoptional
shared = true shared = true
config-https-only = True config-https-only = True
config-type = websocket config-type = websocket
config-url = https://[${novnc-instance:ip}]:${novnc-instance:port} config-url = https://[${nginx-params:ip}]:${nginx-params:port}
config-websocket-path-list = ${nginx-params:websocket-path}
return = secure_access domain return = secure_access domain
software-url = http://git.erp5.org/gitweb/slapos.git/blob_plain/HEAD:/software/apache-frontend/software.cfg software-url = http://git.erp5.org/gitweb/slapos.git/blob_plain/HEAD:/software/apache-frontend/software.cfg
software-type = RootSoftwareInstance software-type = RootSoftwareInstance
...@@ -226,9 +247,8 @@ name = Beremiz VNC ...@@ -226,9 +247,8 @@ name = Beremiz VNC
[publish-connection-information] [publish-connection-information]
<= monitor-publish <= monitor-publish
recipe = slapos.cookbook:publish recipe = slapos.cookbook:publish
backend-url = https://[${novnc-instance:ip}]:${novnc-instance:port}/vnc.html?host=[${novnc-instance:ip}]&port=${novnc-instance:port}&encrypt=1 backend-url = https://[${nginx-params:ip}]:${nginx-params:port}/vnc.html?encrypt=1&password=${random-password:passwd}
url = ${request-vnc-frontend:connection-secure_access}/vnc.html?host=${request-vnc-frontend:connection-domain}&port=443&encrypt=1 url = ${request-vnc-frontend:connection-secure_access}/vnc.html?encrypt=1&password=${random-password:passwd}
vnc-password = ${random-password:passwd}
[buildout] [buildout]
extends = extends =
...@@ -242,7 +262,8 @@ parts = ...@@ -242,7 +262,8 @@ parts =
xserver-promise xserver-promise
x11vnc-listen-promise x11vnc-listen-promise
beremiz-x11 beremiz-x11
websockify-sighandler-service nginx-launcher
nginx-graceful
request-vnc-frontend request-vnc-frontend
generate-vnc-password generate-vnc-password
publish-connection-information publish-connection-information
......
...@@ -34,6 +34,7 @@ context = ...@@ -34,6 +34,7 @@ context =
key computer_id slap-configuration:computer key computer_id slap-configuration:computer
raw bin_directory {{ bin_directory }} raw bin_directory {{ bin_directory }}
raw template_monitor {{ template_monitor_cfg }} raw template_monitor {{ template_monitor_cfg }}
raw template_nginx {{ template_nginx }}
raw template_logrotate {{ logrotate_cfg }} raw template_logrotate {{ logrotate_cfg }}
raw logrotate_cfg {{ logrotate_cfg }} raw logrotate_cfg {{ logrotate_cfg }}
raw python_bin {{ python_bin }} raw python_bin {{ python_bin }}
...@@ -62,9 +63,10 @@ extra-context = ...@@ -62,9 +63,10 @@ extra-context =
raw gtk3_location {{ gtk3_location }} raw gtk3_location {{ gtk3_location }}
raw matiec_location {{ matiec_location }} raw matiec_location {{ matiec_location }}
raw mesa_location {{ mesa_location }} raw mesa_location {{ mesa_location }}
raw nginx_executable {{ nginx_executable }}
raw nginx_mime {{ nginx_mime }}
raw novnc_location {{ novnc_location }} raw novnc_location {{ novnc_location }}
raw openssl_bin {{ openssl_location }}/bin/openssl raw openssl_bin {{ openssl_location }}/bin/openssl
raw websockify_bin {{ bin_directory }}/websockify
raw x11vnc_bin {{ x11vnc_location }}/bin/x11vnc raw x11vnc_bin {{ x11vnc_location }}/bin/x11vnc
raw xvfb_bin {{ xserver_location }}/bin/Xvfb raw xvfb_bin {{ xserver_location }}/bin/Xvfb
raw xterm_bin {{ xterm_location }}/bin/xterm raw xterm_bin {{ xterm_location }}/bin/xterm
......
worker_processes {{ params['nb-workers'] }};
pid {{ params['path-pid'] }};
error_log {{ params['path-error-log'] }};
daemon off;
events {
worker_connections 1024;
accept_mutex off;
}
http {
include {{ mime }};
default_type application/octet-stream;
types_hash_bucket_size 64;
access_log {{ params['path-access-log'] }} combined;
index novnc.html;
upstream vnc_proxy {
server {{ params['websocket-ip'] }}:{{ params['websocket-port'] }};
}
server {
listen [{{ params['ip'] }}]:{{ params['port'] }} ssl http2;
server_name _;
ssl_certificate {{ ca['cert-file'] }};
ssl_certificate_key {{ ca['key-file'] }};
ssl_session_timeout 1d;
ssl_session_cache shared:MozSSL:10m;
ssl_session_tickets off;
ssl_protocols TLSv1.3;
ssl_prefer_server_ciphers off;
keepalive_timeout 5;
client_body_temp_path {{ tempdir['client-body-temp-path'] }};
proxy_temp_path {{ tempdir['proxy-temp-path'] }};
fastcgi_temp_path {{ tempdir['fastcgi-temp-path'] }};
uwsgi_temp_path {{ tempdir['uwsgi-temp-path'] }};
scgi_temp_path {{ tempdir['scgi-temp-path'] }};
# path for static files
root {{ docroot }};
location /{{ params['websocket-path'] }} {
proxy_http_version 1.1;
proxy_pass http://vnc_proxy/;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection "upgrade";
# VNC connection timeout
proxy_read_timeout 61s;
# Disable cache
proxy_buffering off;
}
}
}
...@@ -9,7 +9,6 @@ extends = ...@@ -9,7 +9,6 @@ extends =
../../component/noVNC/buildout.cfg ../../component/noVNC/buildout.cfg
../../component/nginx/buildout.cfg ../../component/nginx/buildout.cfg
../../component/lxml-python/buildout.cfg ../../component/lxml-python/buildout.cfg
../../component/numpy/buildout.cfg
../../component/numpy/openblas.cfg ../../component/numpy/openblas.cfg
../../component/matplotlib/buildout.cfg ../../component/matplotlib/buildout.cfg
../../component/wxpython/buildout.cfg ../../component/wxpython/buildout.cfg
...@@ -74,10 +73,8 @@ recipe = zc.recipe.egg ...@@ -74,10 +73,8 @@ recipe = zc.recipe.egg
eggs = eggs =
${wxPython:egg} ${wxPython:egg}
${python-cryptography:egg} ${python-cryptography:egg}
${lxml-python:egg}
${matplotlib:egg} ${matplotlib:egg}
future future
websockify
zeroconf2 zeroconf2
enum34 enum34
pyro pyro
...@@ -131,6 +128,7 @@ context = ...@@ -131,6 +128,7 @@ context =
key template_instance_beremiz template-instance-beremiz:target key template_instance_beremiz template-instance-beremiz:target
key template_instance_beremiz_test template-instance-beremiz-test:target key template_instance_beremiz_test template-instance-beremiz-test:target
key template_logrotate template-logrotate-base:output key template_logrotate template-logrotate-base:output
key template_nginx template-nginx_conf.in:target
key fontconfig_location fontconfig:location key fontconfig_location fontconfig:location
key font_dejavu dejavu-fonts:location key font_dejavu dejavu-fonts:location
key font_liberation liberation-fonts:location key font_liberation liberation-fonts:location
...@@ -144,6 +142,8 @@ context = ...@@ -144,6 +142,8 @@ context =
key instance_template_type :type key instance_template_type :type
key matiec_location matiec:location key matiec_location matiec:location
key mesa_location mesa:location key mesa_location mesa:location
key nginx_executable nginx-output:nginx
key nginx_mime nginx-output:mime
key novnc_location noVNC:location key novnc_location noVNC:location
key nxdtest_template nxdtest-instance.cfg:output key nxdtest_template nxdtest-instance.cfg:output
key python_bin :python-bin key python_bin :python-bin
...@@ -168,12 +168,15 @@ output = ${buildout:directory}/instance-beremiz-test.cfg.jinja2 ...@@ -168,12 +168,15 @@ output = ${buildout:directory}/instance-beremiz-test.cfg.jinja2
<= download-template <= download-template
output = ${buildout:directory}/fluxbox-menu.in output = ${buildout:directory}/fluxbox-menu.in
[template-nginx_conf.in]
<= download-template
output = ${buildout:directory}/nginx_conf.in
[versions] [versions]
Pillow = 6.2.2 Pillow = 6.2.2
matplotlib = 2.2.5 matplotlib = 2.2.5
kiwisolver = 1.1.0 kiwisolver = 1.1.0
cycler = 0.10.0 cycler = 0.10.0
websockify = 0.9.0
Pyro = 3.16 Pyro = 3.16
zeroconf2 = 0.19.2 zeroconf2 = 0.19.2
cython = 0.29.24 cython = 0.29.24
......
...@@ -10,7 +10,6 @@ extends = ...@@ -10,7 +10,6 @@ extends =
../../component/6tunnel/buildout.cfg ../../component/6tunnel/buildout.cfg
../../component/xz-utils/buildout.cfg ../../component/xz-utils/buildout.cfg
../../component/rsyslogd/buildout.cfg ../../component/rsyslogd/buildout.cfg
../../component/numpy/buildout.cfg
../../component/haproxy/buildout.cfg ../../component/haproxy/buildout.cfg
../../component/nginx/buildout.cfg ../../component/nginx/buildout.cfg
../../component/findutils/buildout.cfg ../../component/findutils/buildout.cfg
...@@ -57,7 +56,6 @@ depends = ${software-develop:recipe} ...@@ -57,7 +56,6 @@ depends = ${software-develop:recipe}
recipe = zc.recipe.egg recipe = zc.recipe.egg
eggs = eggs =
software software
websockify
[profile-common] [profile-common]
recipe = slapos.recipe.template:jinja2 recipe = slapos.recipe.template:jinja2
...@@ -226,6 +224,5 @@ plone.recipe.command = 1.1 ...@@ -226,6 +224,5 @@ plone.recipe.command = 1.1
pycrypto = 2.6.1 pycrypto = 2.6.1
smmap = 0.9.0 smmap = 0.9.0
websockify = 0.8.0
furl = 2.1.0 furl = 2.1.0
orderedmultidict = 1.0.1 orderedmultidict = 1.0.1
...@@ -15,11 +15,11 @@ ...@@ -15,11 +15,11 @@
[template] [template]
filename = instance.cfg.in filename = instance.cfg.in
md5sum = a7978940fb9cdcc4e1ec33015ba640ba md5sum = b6204319cca4264b3c351d4dd1f2b5d0
[template-kvm] [template-kvm]
filename = instance-kvm.cfg.jinja2 filename = instance-kvm.cfg.jinja2
md5sum = 69749ef4be49b970af9548d68e6d8785 md5sum = 14a8433ca9f0038bb6cc4b68ef7ea8e5
[template-kvm-cluster] [template-kvm-cluster]
filename = instance-kvm-cluster.cfg.jinja2.in filename = instance-kvm-cluster.cfg.jinja2.in
...@@ -49,13 +49,17 @@ md5sum = 64aa1ce8785f6b94aabd787fa3443082 ...@@ -49,13 +49,17 @@ md5sum = 64aa1ce8785f6b94aabd787fa3443082
filename = instance-nbd.cfg.jinja2 filename = instance-nbd.cfg.jinja2
md5sum = e041e8011ad2ec7f104be173ef76f5e9 md5sum = e041e8011ad2ec7f104be173ef76f5e9
[template-nginx]
filename = template/nginx_conf.in
md5sum = 9ca886120a99befe25ca761ddc54753c
[template-ansible-promise] [template-ansible-promise]
filename = template/ansible-promise.in filename = template/ansible-promise.in
md5sum = 6328f99728284847b8dd1146aadeae1b md5sum = 6328f99728284847b8dd1146aadeae1b
[template-kvm-run] [template-kvm-run]
filename = template/template-kvm-run.in filename = template/template-kvm-run.in
md5sum = fa048a28da7362d570f5b6bd1e05d232 md5sum = 4ce3fc8072e1e010ee99651cb01d3b3d
[template-kvm-controller] [template-kvm-controller]
filename = template/kvm-controller-run.in filename = template/kvm-controller-run.in
......
...@@ -643,31 +643,48 @@ promise = check_command_execute ...@@ -643,31 +643,48 @@ promise = check_command_execute
name = qemu-virtual-machine-is-ready.py name = qemu-virtual-machine-is-ready.py
config-command = ${kvm-started-bin:output} config-command = ${kvm-started-bin:output}
[novnc-instance] [nginx-tempdir]
recipe = slapos.cookbook:novnc recipe = slapos.cookbook:mkdirectory
path = ${ca-novnc:executable} tmp = ${buildout:directory}/tmp
client-body-temp-path = ${:tmp}/client_body_temp_path
proxy-temp-path = ${:tmp}/proxy_temp_path
fastcgi-temp-path = ${:tmp}/fastcgi_temp_path
uwsgi-temp-path = ${:tmp}/uwsgi_temp_path
scgi-temp-path = ${:tmp}/scgi_temp_path
[nginx-launcher]
recipe = slapos.cookbook:wrapper
command-line = ${ca-novnc:executable} -c ${nginx-config:output}
wrapper-path = ${directory:services}/nginx
[nginx-config]
recipe = slapos.recipe.template:jinja2
url = {{ template_nginx }}
output = ${directory:etc}/nginx.conf
context =
section params nginx-params
section ca ca-novnc
section tempdir nginx-tempdir
raw docroot {{ novnc_location }}
raw mime {{ nginx_mime }}
[nginx-params]
path-pid = ${directory:run}/nginx.pid
path-error-log = ${directory:log}/nginx-error.log
path-access-log = ${directory:log}/nginx-access.log
ip = ${slap-network-information:global-ipv6} ip = ${slap-network-information:global-ipv6}
port = 6080 port = 6080
vnc-ip = ${kvm-parameter-dict:vnc-ip} websocket-ip = ${kvm-parameter-dict:vnc-ip}
vnc-port = ${kvm-parameter-dict:vnc-port} websocket-port = ${kvm-parameter-dict:vnc-port}
novnc-location = {{ novnc_location }} websocket-path = websockify
websockify-path = {{ websockify_executable_location }} nb-workers = 2
ssl-key-path = ${ca-novnc:key-file}
ssl-cert-path = ${ca-novnc:cert-file} [nginx-graceful]
recipe = slapos.recipe.template
[websockify-sighandler] output = ${directory:scripts}/nginx-graceful
recipe = slapos.cookbook:signalwrapper inline =
wrapper-path = ${directory:bin}/websockify-sighandler #!/bin/sh
wrapped-path = ${novnc-instance:path} exec kill -s SIGHUP $(cat ${nginx-params:path-pid})
[websockify-sighandler-service]
recipe = slapos.cookbook:wrapper
command-line = ${websockify-sighandler:wrapper-path}
wrapper-path = ${directory:services}/websockify
hash-existing-files = ${buildout:directory}/software_release/buildout.cfg
wait-for-files =
${ca-novnc:key-file}
${ca-novnc:cert-file}
[certificate-authority] [certificate-authority]
recipe = slapos.cookbook:certificate_authority recipe = slapos.cookbook:certificate_authority
...@@ -699,15 +716,15 @@ crl = ${directory:ca-dir}/crl/ ...@@ -699,15 +716,15 @@ crl = ${directory:ca-dir}/crl/
recipe = slapos.cookbook:certificate_authority.request recipe = slapos.cookbook:certificate_authority.request
key-file = ${directory:novnc-conf}/novnc.key key-file = ${directory:novnc-conf}/novnc.key
cert-file = ${directory:novnc-conf}/novnc.crt cert-file = ${directory:novnc-conf}/novnc.crt
executable = ${directory:bin}/novnc executable = {{ nginx_executable }}
wrapper = ${directory:bin}/websockify wrapper = ${directory:bin}/nginx-with-ca
[novnc-promise] [novnc-promise]
<= monitor-promise-base <= monitor-promise-base
promise = check_socket_listening promise = check_socket_listening
name = novnc_promise.py name = novnc_promise.py
config-host = ${novnc-instance:ip} config-host = ${nginx-params:ip}
config-port = ${novnc-instance:port} config-port = ${nginx-params:port}
#---------------- #----------------
...@@ -748,7 +765,8 @@ partition-id = ${slap-connection:partition-id} ...@@ -748,7 +765,8 @@ partition-id = ${slap-connection:partition-id}
shared = true shared = true
config-https-only = True config-https-only = True
config-type = websocket config-type = websocket
config-url = https://[${novnc-instance:ip}]:${novnc-instance:port} config-websocket-path-list = ${nginx-params:websocket-path}
config-url = https://[${nginx-params:ip}]:${nginx-params:port}
return = secure_access domain return = secure_access domain
[request-slave-frontend] [request-slave-frontend]
...@@ -762,7 +780,7 @@ sla-instance_guid = ${slap-parameter:frontend-instance-guid} ...@@ -762,7 +780,7 @@ sla-instance_guid = ${slap-parameter:frontend-instance-guid}
<= monitor-promise-base <= monitor-promise-base
promise = check_url_available promise = check_url_available
name = frontend_promise.py name = frontend_promise.py
config-url = ${request-slave-frontend:connection-secure_access} config-url = ${request-slave-frontend:connection-secure_access}/vnc.html
{% if additional_frontend %} {% if additional_frontend %}
[request-slave-frontend-additional] [request-slave-frontend-additional]
...@@ -823,10 +841,10 @@ blank-line = ...@@ -823,10 +841,10 @@ blank-line =
<= monitor-publish <= monitor-publish
recipe = slapos.cookbook:publish.serialised recipe = slapos.cookbook:publish.serialised
ipv6 = ${slap-network-information:global-ipv6} ipv6 = ${slap-network-information:global-ipv6}
backend-url = https://[${novnc-instance:ip}]:${novnc-instance:port}/vnc.html?auto=1&encrypt=1&password=${kvm-controller-parameter-dict:vnc-passwd} backend-url = https://[${nginx-params:ip}]:${nginx-params:port}/vnc.html?encrypt=1&password=${kvm-controller-parameter-dict:vnc-passwd}
url = ${request-slave-frontend:connection-secure_access}/vnc.html?auto=1&encrypt=1&password=${kvm-controller-parameter-dict:vnc-passwd} url = ${request-slave-frontend:connection-secure_access}/vnc.html?encrypt=1&password=${kvm-controller-parameter-dict:vnc-passwd}
{% if additional_frontend %} {% if additional_frontend %}
url-additional = ${request-slave-frontend-additional:connection-secure_access}/vnc.html?auto=1&encrypt=1&password=${kvm-controller-parameter-dict:vnc-passwd} url-additional = ${request-slave-frontend-additional:connection-secure_access}/vnc.html?encrypt=1&password=${kvm-controller-parameter-dict:vnc-passwd}
{% endif %} {% endif %}
{% set disk_number = len(storage_dict) -%} {% set disk_number = len(storage_dict) -%}
maximum-extra-disk-amount = {{ disk_number }} maximum-extra-disk-amount = {{ disk_number }}
...@@ -1242,8 +1260,8 @@ parts = ...@@ -1242,8 +1260,8 @@ parts =
kvm-controller-wrapper kvm-controller-wrapper
kvm-vnc-promise kvm-vnc-promise
kvm-disk-image-corruption-promise kvm-disk-image-corruption-promise
websockify-sighandler nginx-launcher
websockify-sighandler-service nginx-graceful
novnc-promise novnc-promise
kvm-started-promise kvm-started-promise
cron cron
......
...@@ -89,6 +89,8 @@ extra-context = ...@@ -89,6 +89,8 @@ extra-context =
raw logrotate_cfg ${template-logrotate-base:output} raw logrotate_cfg ${template-logrotate-base:output}
raw novnc_location ${noVNC:location} raw novnc_location ${noVNC:location}
raw netcat_bin ${netcat:location}/bin/netcat raw netcat_bin ${netcat:location}/bin/netcat
raw nginx_executable ${nginx-output:nginx}
raw nginx_mime ${nginx-output:mime}
raw python_executable ${buildout:executable} raw python_executable ${buildout:executable}
raw python_eggs_executable ${buildout:bin-directory}/${python-with-eggs:interpreter} raw python_eggs_executable ${buildout:bin-directory}/${python-with-eggs:interpreter}
raw qemu_executable_location ${qemu:location}/bin/qemu-system-x86_64 raw qemu_executable_location ${qemu:location}/bin/qemu-system-x86_64
...@@ -100,6 +102,7 @@ extra-context = ...@@ -100,6 +102,7 @@ extra-context =
raw template_kvm_controller_run ${template-kvm-controller:target} raw template_kvm_controller_run ${template-kvm-controller:target}
raw template_kvm_run ${template-kvm-run:target} raw template_kvm_run ${template-kvm-run:target}
raw template_monitor ${monitor2-template:output} raw template_monitor ${monitor2-template:output}
raw template_nginx ${template-nginx:target}
raw websockify_executable_location ${buildout:directory}/bin/websockify raw websockify_executable_location ${buildout:directory}/bin/websockify
raw wipe_disk_wrapper ${buildout:directory}/bin/securedelete raw wipe_disk_wrapper ${buildout:directory}/bin/securedelete
template-parts-destination = ${template-parts:target} template-parts-destination = ${template-parts:target}
......
...@@ -8,9 +8,8 @@ extends = ...@@ -8,9 +8,8 @@ extends =
../../component/noVNC/buildout.cfg ../../component/noVNC/buildout.cfg
../../component/openssl/buildout.cfg ../../component/openssl/buildout.cfg
../../component/netcat/buildout.cfg ../../component/netcat/buildout.cfg
../../component/lxml-python/buildout.cfg ../../component/nginx/buildout.cfg
../../component/pycurl/buildout.cfg ../../component/pycurl/buildout.cfg
../../component/numpy/buildout.cfg
../../component/gzip/buildout.cfg ../../component/gzip/buildout.cfg
../../stack/slapos.cfg ../../stack/slapos.cfg
../../stack/resilient/buildout.cfg ../../stack/resilient/buildout.cfg
...@@ -33,28 +32,20 @@ parts = ${:common-parts} ...@@ -33,28 +32,20 @@ parts = ${:common-parts}
# In qemu builtin vnc server, and make it available only for localhost # In qemu builtin vnc server, and make it available only for localhost
# so that only novnc can listen to it. # so that only novnc can listen to it.
#XXX-Cedric: Check status of https://github.com/kanaka/noVNC/issues/13 to see
# When qemu has builtin support for websockets in vnc server to get rid of
# Websockify (socket <-> websocket proxy server) when it is ready.
# May solve previous XXX depending on the implementation.
#XXX-Cedric : add list of keyboard layouts (azerty/us querty/...) parameter to qemu
[python-with-eggs] [python-with-eggs]
recipe = zc.recipe.egg recipe = zc.recipe.egg
interpreter = ${:_buildout_section_name_} interpreter = ${:_buildout_section_name_}
eggs = eggs =
${slapos-toolbox:eggs} ${slapos-toolbox:eggs}
${python-cffi:egg} ${python-cffi:egg}
${lxml-python:egg}
websockify
${slapos-cookbook:eggs} ${slapos-cookbook:eggs}
erp5.util erp5.util
# BBB: eggs used as recipe should be kept otherwise sections depending # BBB: eggs used as recipe should be kept otherwise sections depending
# on it can't be uninstalled # on it can't be uninstalled
collective.recipe.shelloutput collective.recipe.shelloutput
scripts = # Only generate the interpreter script to avoid conflicts with scripts
websockify # for eggs that are also generated by another section, like slapos.toolbox
scripts = ${:interpreter}
# Create all templates that will be used to deploy instances # Create all templates that will be used to deploy instances
[download-base] [download-base]
...@@ -97,6 +88,9 @@ output = ${buildout:directory}/template.cfg ...@@ -97,6 +88,9 @@ output = ${buildout:directory}/template.cfg
[template-nbd] [template-nbd]
<= download-base <= download-base
[template-nginx]
<= download-base
[template-ansible-promise] [template-ansible-promise]
<= download-base <= download-base
...@@ -135,9 +129,3 @@ context = ...@@ -135,9 +129,3 @@ context =
[whitelist-domains-default] [whitelist-domains-default]
<= download-base <= download-base
[versions]
websockify = 0.9.0
gitdb = 0.6.4
pycurl = 7.43.0
smmap = 0.9.0
worker_processes {{ params['nb-workers'] }};
pid {{ params['path-pid'] }};
error_log {{ params['path-error-log'] }};
daemon off;
events {
worker_connections 1024;
accept_mutex off;
}
http {
include {{ mime }};
default_type application/octet-stream;
types_hash_bucket_size 64;
access_log {{ params['path-access-log'] }} combined;
index novnc.html;
upstream vnc_proxy {
server {{ params['websocket-ip'] }}:{{ params['websocket-port'] }};
}
server {
listen [{{ params['ip'] }}]:{{ params['port'] }} ssl http2;
server_name _;
ssl_certificate {{ ca['cert-file'] }};
ssl_certificate_key {{ ca['key-file'] }};
ssl_session_timeout 1d;
ssl_session_cache shared:MozSSL:10m;
ssl_session_tickets off;
ssl_protocols TLSv1.3;
ssl_prefer_server_ciphers off;
keepalive_timeout 5;
client_body_temp_path {{ tempdir['client-body-temp-path'] }};
proxy_temp_path {{ tempdir['proxy-temp-path'] }};
fastcgi_temp_path {{ tempdir['fastcgi-temp-path'] }};
uwsgi_temp_path {{ tempdir['uwsgi-temp-path'] }};
scgi_temp_path {{ tempdir['scgi-temp-path'] }};
# path for static files
root {{ docroot }};
location /{{ params['websocket-path'] }} {
proxy_http_version 1.1;
proxy_pass http://vnc_proxy/;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection "upgrade";
# VNC connection timeout
proxy_read_timeout 61s;
# Disable cache
proxy_buffering off;
}
}
}
...@@ -62,7 +62,7 @@ cluster_doc_port = {{ parameter_dict.get("cluster-doc-port") }} ...@@ -62,7 +62,7 @@ cluster_doc_port = {{ parameter_dict.get("cluster-doc-port") }}
auto_ballooning = '{{ parameter_dict.get("auto-ballooning") }}' in ('true', 'True', '1') auto_ballooning = '{{ parameter_dict.get("auto-ballooning") }}' in ('true', 'True', '1')
vm_name = '{{ parameter_dict.get("name") }}' vm_name = '{{ parameter_dict.get("name") }}'
# If a device (ie.: /dev/sdb) is provided, use it instead # If a device (ie.: /dev/sdb) is provided, use it instead
# the disk_path with disk_format # the disk_path with disk_format
disk_info_list = [] disk_info_list = []
for disk_device_path in '{{ parameter_dict.get("disk-device-path", "") }}'.split(): for disk_device_path in '{{ parameter_dict.get("disk-device-path", "") }}'.split():
...@@ -145,7 +145,7 @@ def getMapStorageList(disk_storage_dict, external_disk_number): ...@@ -145,7 +145,7 @@ def getMapStorageList(disk_storage_dict, external_disk_number):
if id_list: if id_list:
if not map_f_exist: if not map_f_exist:
# shuffle the list to not write disk in data1, data2, ... everytime # shuffle the list to not write disk in data1, data2, ... everytime
shuffle(id_list) shuffle(id_list)
if external_disk_number < last_amount: if external_disk_number < last_amount:
# Drop created disk is not allowed # Drop created disk is not allowed
...@@ -277,7 +277,7 @@ ram = '%sM,slots=128,maxmem=%sM' % (init_ram_size, ram_max_size) ...@@ -277,7 +277,7 @@ ram = '%sM,slots=128,maxmem=%sM' % (init_ram_size, ram_max_size)
kvm_argument_list = [qemu_path, kvm_argument_list = [qemu_path,
'-enable-kvm', '-smp', smp, '-name', vm_name, '-m', ram, '-vga', 'std', '-enable-kvm', '-smp', smp, '-name', vm_name, '-m', ram, '-vga', 'std',
'-vnc', '%s:1,ipv4=on,password=on' % listen_ip, '-vnc', '%s:1,password=on,websocket=on' % listen_ip,
'-boot', 'order=cd,menu=on', '-boot', 'order=cd,menu=on',
'-qmp', 'unix:%s,server,nowait' % socket_path, '-qmp', 'unix:%s,server,nowait' % socket_path,
'-pidfile', pid_file_path, '-msg', 'timestamp=on', '-pidfile', pid_file_path, '-msg', 'timestamp=on',
......
...@@ -212,7 +212,8 @@ i0:kvm-{kvm-hash-value}-on-watch RUNNING ...@@ -212,7 +212,8 @@ i0:kvm-{kvm-hash-value}-on-watch RUNNING
i0:kvm_controller EXITED i0:kvm_controller EXITED
i0:monitor-httpd-{hash}-on-watch RUNNING i0:monitor-httpd-{hash}-on-watch RUNNING
i0:monitor-httpd-graceful EXITED i0:monitor-httpd-graceful EXITED
i0:websockify-{hash}-on-watch RUNNING i0:nginx-graceful EXITED
i0:nginx-on-watch RUNNING
i0:whitelist-domains-download-{hash} RUNNING i0:whitelist-domains-download-{hash} RUNNING
i0:whitelist-firewall-{hash} RUNNING""", i0:whitelist-firewall-{hash} RUNNING""",
self.getProcessInfo() self.getProcessInfo()
...@@ -702,11 +703,12 @@ ir2:kvm-{kvm-hash-value}-on-watch RUNNING ...@@ -702,11 +703,12 @@ ir2:kvm-{kvm-hash-value}-on-watch RUNNING
ir2:kvm_controller EXITED ir2:kvm_controller EXITED
ir2:monitor-httpd-{hash}-on-watch RUNNING ir2:monitor-httpd-{hash}-on-watch RUNNING
ir2:monitor-httpd-graceful EXITED ir2:monitor-httpd-graceful EXITED
ir2:nginx-graceful EXITED
ir2:nginx-on-watch RUNNING
ir2:notifier-on-watch RUNNING ir2:notifier-on-watch RUNNING
ir2:resilient_sshkeys_authority-on-watch RUNNING ir2:resilient_sshkeys_authority-on-watch RUNNING
ir2:sshd-graceful EXITED ir2:sshd-graceful EXITED
ir2:sshd-on-watch RUNNING ir2:sshd-on-watch RUNNING
ir2:websockify-{hash}-on-watch RUNNING
ir2:whitelist-domains-download-{hash} RUNNING ir2:whitelist-domains-download-{hash} RUNNING
ir2:whitelist-firewall-{hash} RUNNING ir2:whitelist-firewall-{hash} RUNNING
ir3:bootstrap-monitor EXITED ir3:bootstrap-monitor EXITED
...@@ -2295,7 +2297,8 @@ ihs0:kvm-{kvm-hash-value}-on-watch RUNNING ...@@ -2295,7 +2297,8 @@ ihs0:kvm-{kvm-hash-value}-on-watch RUNNING
ihs0:kvm_controller EXITED ihs0:kvm_controller EXITED
ihs0:monitor-httpd-{hash}-on-watch RUNNING ihs0:monitor-httpd-{hash}-on-watch RUNNING
ihs0:monitor-httpd-graceful EXITED ihs0:monitor-httpd-graceful EXITED
ihs0:websockify-{hash}-on-watch RUNNING ihs0:nginx-graceful EXITED
ihs0:nginx-on-watch RUNNING
ihs0:whitelist-domains-download-{hash} RUNNING ihs0:whitelist-domains-download-{hash} RUNNING
ihs0:whitelist-firewall-{hash} RUNNING""", ihs0:whitelist-firewall-{hash} RUNNING""",
self.getProcessInfo() self.getProcessInfo()
......
...@@ -70,7 +70,6 @@ config-url= $${metabase-instance:url}/api/session/properties ...@@ -70,7 +70,6 @@ config-url= $${metabase-instance:url}/api/session/properties
[metabase-keystore-password] [metabase-keystore-password]
recipe = slapos.cookbook:generate.password recipe = slapos.cookbook:generate.password
bytes = 24
[metabase-keystore] [metabase-keystore]
recipe = plone.recipe.command recipe = plone.recipe.command
......
[instance-profile]
filename = instance.cfg.in
md5sum = 4c7aa7b2132dc13ddee37fb416decf81
[buildout]
parts =
promises
mosquitto-service
publish-connection-parameter
eggs-directory = {{ buildout["eggs-directory"] }}
develop-eggs-directory = {{ buildout["develop-eggs-directory"] }}
offline = true
[instance-parameter]
recipe = slapos.cookbook:slapconfiguration
computer = ${slap-connection:computer-id}
partition = ${slap-connection:partition-id}
url = ${slap-connection:server-url}
key = ${slap-connection:key-file}
cert = ${slap-connection:cert-file}
[check-port-listening-promise]
recipe = slapos.cookbook:check_port_listening
path = ${directory:promise}/${:_buildout_section_name_}
[directory]
recipe = slapos.cookbook:mkdirectory
home = ${buildout:directory}
etc = ${:home}/etc
var = ${:home}/var
script = ${:etc}/run/
service = ${:etc}/service
promise = ${:etc}/promise/
log = ${:var}/log
bin = ${:home}/bin
[mosquitto-config-file]
recipe = slapos.recipe.build
location = ${directory:etc}/${:_buildout_section_name_}.cfg
ipv4 = ${instance-parameter:ipv4-random}
ipv6 = ${instance-parameter:ipv6-random}
port = 1883
password = ${mosquitto-password-file:location}
install =
config = open(self.options["location"], "w")
port = self.options["port"]
ipv4 = self.options["ipv4"]
ipv6 = self.options["ipv6"]
password = self.options["password"]
config.write(f"listener {port} {ipv4}\nprotocol mqtt\n\n")
config.write(f"listener {port} {ipv6}\nprotocol mqtt\n\n")
config.write(f"password_file {password}\n\n")
[mosquitto-password-file]
recipe = plone.recipe.command
location = ${directory:etc}/${:_buildout_section_name_}.txt
command =
touch ${:location}
{{ mosquitto_location }}/bin/mosquitto_passwd -b ${:location} ${mosquitto-password:username} ${mosquitto-password:passwd}
stop-on-error = true
[mosquitto-password]
recipe = slapos.cookbook:generate.password
username = mosquitto
[mosquitto-listen-promise-ipv4]
<= check-port-listening-promise
hostname = ${mosquitto-config-file:ipv4}
port = ${mosquitto-config-file:port}
[mosquitto-listen-promise-ipv6]
<= check-port-listening-promise
hostname = ${mosquitto-config-file:ipv6}
port = ${mosquitto-config-file:port}
[promises]
recipe =
instance-promises =
${mosquitto-listen-promise-ipv4:path}
${mosquitto-listen-promise-ipv6:path}
[mosquitto-service]
recipe = slapos.cookbook:wrapper
command-line = {{ mosquitto_location }}/sbin/mosquitto -c ${mosquitto-config-file:location}
wrapper-path = ${directory:service}/mosquitto-service
output = $${:wrapper-path}
[publish-connection-parameter]
recipe = slapos.cookbook:publish
ipv4 = mqtt://${mosquitto-config-file:ipv4}:${mosquitto-config-file:port}
ipv6 = mqtt://${mosquitto-config-file:ipv6}:${mosquitto-config-file:port}
username = ${mosquitto-password:username}
password = ${mosquitto-password:passwd}
[buildout]
extends =
buildout.hash.cfg
../../stack/slapos.cfg
../../stack/monitor/buildout.cfg
../../component/mosquitto/buildout.cfg
parts =
slapos-cookbook
instance-profile
plone.recipe.command
[plone.recipe.command]
recipe = zc.recipe.egg
[instance-profile]
recipe = slapos.recipe.template:jinja2
template = ${:_profile_base_location_}/instance.cfg.in
rendered = ${buildout:directory}/instance.cfg
context =
section buildout buildout
key mosquitto_location mosquitto:location
# THIS IS NOT A BUILDOUT FILE, despite purposedly using a compatible syntax.
# The only allowed lines here are (regexes):
# - "^#" comments, copied verbatim
# - "^[" section beginings, copied verbatim
# - lines containing an "=" sign which must fit in the following categorie.
# - "^\s*filename\s*=\s*path\s*$" where "path" is relative to this file
# Copied verbatim.
# - "^\s*hashtype\s*=.*" where "hashtype" is one of the values supported
# by the re-generation script.
# Re-generated.
# - other lines are copied verbatim
# Substitution (${...:...}), extension ([buildout] extends = ...) and
# section inheritance (< = ...) are NOT supported (but you should really
# not need these here).
[instance-profile]
filename = instance.cfg.in
md5sum = 35690065ba18dc49d0108fc1f0a07b9e
[instance-peertube]
_update_hash_filename_ = instance-peertube.cfg.in
md5sum = 52b9a872d69052f53dd29bfa4a067dbf
[template-nginx-service]
filename = template-nginx-service.sh.in
md5sum = 458870b70c33a1621b68961ae2372ad5
[template-peertube-service]
filename = template-peertube-service.sh.in
md5sum = fe42401ea8df7a245955683535f8a063
[template-peertube-yaml]
filename = template-peertube.yaml.in
md5sum = e761995c4c18efc4a199f11dc8fde039
[template-nginx-configuration]
filename = template-nginx.cfg.in
md5sum = a2c0d6e873370d64de707f668a3d40e3
[template-dcron-service]
filename = template-dcron-service.sh.in
md5sum = 851262d7174da868805cb7c8e1ced7c0
[template-crontab-line]
filename = template-crontab-line.in
md5sum = bab861f1d057e837697d78e74e47b0bb
[template-crontab]
filename = template-crontab.in
md5sum = f1f82101258de19068262b7213fc478b
[template-peertube-backup-script]
filename = template-peertube-backup.sh.in
md5sum = e50daa16a2c1866997933981bed45271
[template-peertube-restore-script]
filename = template-peertube-restore.sh.in
md5sum = bb67602f3d652d4e5a06711fae5996f9
{
"$schema": "http://json-schema.org/draft-04/schema#",
"type": "object",
"description": "Parameters to instantiate Peertube",
"additionalProperties": false,
"properties": {
"frequency": {
"title": "Backup Frequency",
"description": "Backup Frequency",
"default": "",
"type": "string"
},
"fronted-url": {
"title": "Frontend URL",
"description": "Frontend URL",
"default": "",
"type": "string"
},
"name": {
"title": "Instance Title",
"description": "The name of your peertube instance.",
"default": "Peertube in Slapos",
"type": "string"
},
"short_description": {
"title": "PeerTube Short Description",
"description": "Short description of your peertube instance.",
"default": "PeerTube, an ActivityPub-federated video streaming platform using P2P directly in your web browser.",
"type": "string"
},
"description": {
"title": "Peertube Long Description",
"description": "The description of your instance",
"default": "Welcome to this PeerTube instance!",
"textarea": true,
"type": "string"
},
"terms": {
"title": "Terms",
"description": "",
"default": "No terms for now.",
"textarea": true,
"type": "string"
},
"code_of_conduct": {
"title": "Code of Conduct",
"description": "",
"default": "",
"textarea": true,
"type": "string"
},
"moderation_information": {
"title": "Moderation Information",
"description": "Who moderates the instance? What is the policy regarding NSFW videos? Political videos? etc",
"default": "",
"textarea": true,
"type": "string"
},
"creation_reason": {
"title": "Creation Reason",
"description": "Why did you create this instance?",
"default": "",
"textarea": true,
"type": "string"
},
"administrator": {
"title":"Administrator",
"description": "Who is behind the instance? A single person? A non profit?",
"default": "",
"type": "string"
},
"maintenance_lifetime": {
"title": "Maintenance Lifetime",
"description": "How long do you plan to maintain this instance?",
"default": "",
"type": "string"
},
"business_model": {
"title": "Business Model",
"description": "How will you pay the PeerTube instance server? With your own funds? With users donations? Advertising?",
"default": "",
"textarea": true,
"type": "string"
},
"hardware_information": {
"title": "Hardware Information",
"description": "",
"default": "",
"textarea": true,
"type": "string"
}
}
}
{
"$schema": "http://json-schema.org/draft-04/schema#",
"description": "Values returned by Peertube instanciation",
"properties": {
"frontend-url": {
"description": "Peertube Frontend URL",
"type": "string"
},
"backend-url": {
"description": "Peertube Backend URL",
"type": "string"
},
"username": {
"description": "Peertube username",
"type": "string"
},
"password": {
"description": "Peertube password",
"type": "string"
}
},
"type": "object"
}
[buildout]
extends =
{{ monitor_template }}
parts =
service-redis
promise-redis
postgresql
postgresql-binary-link
nginx-service
nginx-listen-promise
peertube-yaml
peertube-service
peertube-listen-promise
dcron-service
activate-crontab-file
peertube-backup-cron
peertube-database-resiliency-after-import-script
peertube-database-resiliency-exclude-file
publish-connection-parameter
eggs-directory = {{ eggs_directory }}
develop-eggs-directory = {{ develop_eggs_directory }}
offline = true
[directory]
recipe = slapos.cookbook:mkdirectory
etc = ${buildout:directory}/etc
srv = ${buildout:directory}/srv
var = ${buildout:directory}/var
log = ${:var}/log
run = ${:var}/run
www = ${:var}/www
crontabs = ${:etc}/crontabs
cron-entries = ${:etc}/cron.d
cronstamps = ${:etc}/cronstamps
cron-lines = ${:etc}/cron.lines
peertube_nginx_log = ${:log}/nginx
varnginx = ${:var}/nginx
services = ${:etc}/service
peertube_directory = ${:www}/peertube
config = ${:peertube_directory}/config
storage = ${:peertube_directory}/storage
versions = ${:peertube_directory}/versions
ssl = ${:etc}/ssl
##################
# Postgresql #
##################
[postgresql-password]
recipe = slapos.cookbook:generate.password
[postgresql-address]
recipe = slapos.cookbook:free_port
minimum = 5432
maximum = 5452
ip = {{ ipv4_random }}
[postgresql]
recipe = slapos.cookbook:postgres
bin = {{ postgresql10_location }}/bin/
services = ${directory:services}
dbname = peertube_prod
superuser = peertube
password = ${postgresql-password:passwd}
pgdata-directory = ${directory:srv}/postgresql
ipv4 = ${postgresql-address:ip}
# disable listening on ipv6
ipv6 =
port = ${postgresql-address:port}
[postgresql-binary-link]
recipe = slapos.cookbook:symbolic.link
target-directory = ${directory:bin}
link-binary = ${postgresql:bin}/postgres ${postgresql:bin}/psql
#############
# Nginx #
#############
[nginx-service]
recipe = slapos.recipe.template
url = {{ template_nginx_service }}
output = ${directory:services}/nginx
virtual-depends =
${nginx-configuration:ip}
[nginx-listen-promise]
<= monitor-promise-base
promise = check_url_available
name = nginx_listen.py
config-verify = 0
config-url = https://[${nginx-configuration:ip}]:${nginx-configuration:port}
[nginx-configuration]
recipe = slapos.recipe.template
url = {{ template_nginx_configration }}
output = ${directory:etc}/nginx.cfg
access_log = ${directory:log}/nginx-access.log
error_log = ${directory:log}/nginx-error.log
ip = {{ ipv6_random }}
port = 9443
ssl_key = ${directory:ssl}/nginx.key
ssl_csr = ${directory:ssl}/nginx.csr
ssl_crt = ${directory:ssl}/nginx.crt
#############
# Redis #
#############
[redis]
recipe = slapos.cookbook:mkdirectory
srv = ${directory:srv}/redis
log = ${directory:log}/redis
[service-redis]
recipe = slapos.cookbook:redis.server
wrapper = ${directory:services}/redis
promise-wrapper = ${directory:bin}/redis-promise
server-dir = ${redis:srv}
config-file = ${directory:etc}/redis.conf
log-file = ${redis:log}/redis.log
pid-file = ${directory:run}/redis.pid
use-passwd = false
unixsocket = ${:server-dir}/redis.socket
# port = 0 means "don't listen on TCP at all" - listen only on unix socket
ipv6 = ::1
port = 0
server-bin = {{ redis_binprefix }}/redis-server
cli-bin = {{ redis_binprefix }}/redis-cli
depend =
${logrotate-entry-redis:recipe}
[promise-redis]
<= monitor-promise-base
promise = check_command_execute
name = promise-redis.py
config-command = ${service-redis:promise-wrapper}
[logrotate-entry-redis]
<= logrotate-entry-base
log = ${redis:log}/*.log
name = redis
################
# Peertube #
################
[peertube-passwd]
recipe = slapos.cookbook:generate.password
username = root
[peertube-yaml]
recipe = slapos.recipe.template
url = {{ template_peertube_yaml }}
output = ${directory:config}/peertube.yaml
[peertube-listen-promise]
<= monitor-promise-base
promise = check_url_available
name = peertube_listen.py
config-verify = 0
config-url = ${peertube-parameters:frontend-url}
[peertube-service]
recipe = slapos.recipe.template
url = {{ template_peertube_service }}
output = ${directory:services}/peertube
[peertube-database-resiliency-exclude-file]
recipe = slapos.recipe.template:jinja2
inline = {{ "${postgresql:pgdata-directory}/" }}
output = ${directory:srv}/exporter.exclude
[peertube-database-resiliency-after-import-script]
recipe = slapos.recipe.template
url = {{ template_peertube_restore }}
output = ${directory:srv}/runner-import-restore
mode = 755
#################################
# Cron service #
#################################
[dcron-service]
recipe = slapos.recipe.template
url = {{ template_dcron_service }}
output = ${directory:services}/crond
logfile = ${directory:log}/crond.log
[peertube-backup-script]
recipe = slapos.recipe.template
url = {{ template_peertube_backup }}
backup-file = ${directory:srv}/backup/peertube_prod-dump.db
output = ${directory:bin}/${:_buildout_section_name_}
mode = 0744
[peertube-backup-cron]
recipe = slapos.recipe.template
url = {{ template_crontab_line }}
output = ${directory:bin}/${:_buildout_section_name_}
script = ${peertube-backup-script:output}
name = ${:_buildout_section_name_}
frequency = {{ slapparameter_dict.get('frequency', '@daily') }}
[activate-crontab-file]
# XXX File is never removed
recipe = plone.recipe.command
stop-on-error = true
command = {{ coreutils_cat }} {{ template_crontab }} ${peertube-backup-cron:output} | {{ dcron_output }} -c ${directory:crontabs} -
[frontend]
<= slap-connection
recipe = slapos.cookbook:requestoptional
name = PeerTube Server Frontend
# XXX We have hardcoded SR URL here.
software-url = http://git.erp5.org/gitweb/slapos.git/blob_plain/HEAD:/software/apache-frontend/software.cfg
shared = true
{% if slapparameter_dict.get('frontend-url', 'false') != 'false' -%}
state = stopped
{% endif -%}
config-type = websocket
config-websocket-path-list = /socket.io /socket /tracker/socket
config-url = https://[${nginx-configuration:ip}]:${nginx-configuration:port}
return = domain secure_access
[peertube-parameters]
recipe = slapos.recipe.build
slapparameter-dict = {{ dumps(slapparameter_dict) }}
default-frontend-url = ${frontend:connection-secure_access}
working-dir = {{ peertube_location }}
ipv4-port=9000
node-config-dir=${directory:config}
node-env=production
npm-bin={{ nodejs_location }}/bin/npm
default-parameters =
{
"name" : "Peertube in Slapos",
"frontend-url" : "",
"short_description" : "PeerTube, an ActivityPub-federated video streaming platform using P2P directly in your web browser.",
"description" : "Welcome to this PeerTube instance!",
"terms" : "No terms for now.",
"code_of_conduct" : "",
"moderation_information" : "",
"creation_reason" : "",
"administrator" : "",
"maintenance_lifetime" : "",
"business_model" : "",
"hardware_information" : ""
}
init =
from six.moves.urllib.parse import urlparse
import json
default_parameters = json.loads(options.get('default-parameters').strip())
default_parameters['frontend-url'] = options['default-frontend-url']
parameters = dict(default_parameters, **options['slapparameter-dict'])
# options is not a dict...
options['name'] = parameters['name']
options['frontend-url'] = parameters['frontend-url']
options['short_description'] = parameters['short_description']
options['description'] = parameters['description']
options['terms'] = parameters['terms']
options['code_of_conduct'] = parameters['code_of_conduct']
options['moderation_information'] = parameters['moderation_information']
options['creation_reason'] = parameters['creation_reason']
options['administrator'] = parameters['administrator']
options['maintenance_lifetime'] = parameters['maintenance_lifetime']
options['business_model'] = parameters['business_model']
options['hardware_information'] = parameters['hardware_information']
url = urlparse(options.get('frontend-url'))
if url.port:
options['host'] = '[' + url.hostname + ']'
options['port'] = str(url.port)
else:
options['host'] = url.hostname
options['port'] = str(443)
[publish-connection-parameter]
recipe = slapos.cookbook:publish
backend-url = ${frontend:config-url}
frontend-url = ${peertube-parameters:frontend-url}
password = ${peertube-passwd:passwd}
username = ${peertube-passwd:username}
[buildout]
parts =
switch-softwaretype
eggs-directory = ${buildout:eggs-directory}
develop-eggs-directory = ${buildout:develop-eggs-directory}
offline = true
[peertube]
recipe = slapos.recipe.template:jinja2
url = ${instance-peertube:target}
output = $${buildout:directory}/instance-peertube.cfg
context =
key eggs_directory buildout:eggs-directory
key develop_eggs_directory buildout:develop-eggs-directory
raw monitor_template ${monitor2-template:output}
key slapparameter_dict slap-configuration:configuration
raw ipv6_random $${slap-configuration:ipv6-random}
raw ipv4_random $${slap-configuration:ipv4-random}
raw template_peertube_yaml ${template-peertube-yaml:output}
raw template_nginx_configration ${template-nginx-configuration:output}
raw template_peertube_service ${template-peertube-service:output}
raw template_peertube_restore ${template-peertube-restore-script:output}
raw postgresql10_location ${postgresql10:location}
raw template_nginx_service ${template-nginx-service:output}
raw redis_binprefix ${redis28:location}/bin
raw template_dcron_service ${template-dcron-service:output}
raw template_peertube_backup ${template-peertube-backup-script:output}
raw template_crontab_line ${template-crontab-line:output}
raw coreutils_cat ${coreutils-output:cat}
raw template_crontab ${template-crontab:output}
raw dcron_output ${dcron-output:crontab}
raw peertube_location ${peertube:location}
raw nodejs_location ${nodejs:location}
[switch-softwaretype]
recipe = slapos.cookbook:switch-softwaretype
RootSoftwareInstance = $${:default}
default = peertube:output
[slap-configuration]
recipe = slapos.cookbook:slapconfiguration.serialised
computer= $${slap-connection:computer-id}
partition=$${slap-connection:partition-id}
url = $${slap-connection:server-url}
key = $${slap-connection:key-file}
cert = $${slap-connection:cert-file}
[buildout]
extends =
buildout.hash.cfg
# buildout.hash.cfg is used for automated hash calculation of managed
# instance files by calling update-hash
# buildout.hash.cfg
# "slapos" stack describes basic things needed for 99.9% of SlapOS Software
# Releases
../../stack/slapos.cfg
../../stack/monitor/buildout.cfg
# Extend here component profiles, like openssl, apache, mariadb, curl...
# Or/and extend a stack (lamp, tomcat) that does most of the work for you
# In this example we extend from helloweb component.
../../component/unzip/buildout.cfg
../../component/curl/buildout.cfg
../../component/dcron/buildout.cfg
../../component/vim/buildout.cfg
../../component/nodejs/buildout.cfg
../../component/yarn/buildout.cfg
../../component/python3/buildout.cfg
../../component/nginx/buildout.cfg
../../component/ffmpeg/buildout.cfg
../../component/postgresql/buildout.cfg
../../component/nspr/buildout.cfg
../../component/gcc/buildout.cfg
../../component/libxml2/buildout.cfg
../../component/libxslt/buildout.cfg
../../component/lxml-python/buildout.cfg
# redis-server
../../component/redis/buildout.cfg
../../component/git/buildout.cfg
../../component/wget/buildout.cfg
# backup
../../component/rdiff-backup/buildout.cfg
../../component/rsync/buildout.cfg
parts =
# Call installation of slapos.cookbook egg defined in stack/slapos.cfg (needed
# in 99,9% of Slapos Software Releases)
slapos-cookbook
dcron
gcc
unzip
curl
nodejs
yarn
openssl
python3
nginx
ffmpeg
postgresql
redis
wget
# peetube sections
peertube
peertube-build
instance-profile
[nodejs]
<= nodejs-16.13.2
[peertube]
recipe = slapos.recipe.build:download-unpacked
url = https://github.com/Chocobozzz/PeerTube/releases/download/v4.2.2/peertube-v4.2.2.zip
md5sum = 1c9639748d66e8c49fc27e4705f87622
[peertube-build]
recipe = slapos.recipe.cmmi
path = ${peertube:location}
environment =
PATH=${unzip:location}/bin:${vim:location}/bin:${nodejs:location}/bin:${yarn:location}/bin:${python3:location}/bin:${nginx:location}/sbin:${postgresql10:location}/bin:${gcc-10.2:location}/bin:${redis:location}/bin:{git:location}/bin:{wget:location}/bin:%(PATH)s
CPPFLAGS=-I${openssl:location}/include
LDFLAGS=-L${curl:location}/lib -Wl,-rpath -Wl,${openssl:location}/lib -Wl
pre-configure =
${yarn:location}/bin/yarn install --production --pure-lockfile
configure-command = true
make-binary = cd ${peertube:location} && ${yarn:location}/bin/yarn
post-install =
rm -rf ${buildout:directory}/.cache/yarn/
rm -rf ${buildout:directory}/parts/peertube/client/node_modules/chromedriver/
[instance-profile]
recipe = slapos.recipe.template
url = ${:_profile_base_location_}/${:filename}
output = ${buildout:directory}/instance.cfg
[instance-peertube]
recipe = slapos.recipe.build:download
url = ${:_profile_base_location_}/${:_update_hash_filename_}
destination = ${buildout:directory}/${:_buildout_section_name_}
[template-peertube-service]
recipe = slapos.recipe.template
url = ${:_profile_base_location_}/${:filename}
output = ${buildout:directory}/${:filename}
[template-peertube-yaml]
recipe = slapos.recipe.template
url = ${:_profile_base_location_}/${:filename}
output = ${buildout:directory}/${:filename}
[template-nginx-service]
recipe = slapos.recipe.template
url = ${:_profile_base_location_}/${:filename}
output = ${buildout:directory}/${:filename}
[template-nginx-configuration]
recipe = slapos.recipe.template
url = ${:_profile_base_location_}/${:filename}
output = ${buildout:directory}/${:filename}
[template-dcron-service]
recipe = slapos.recipe.template
url = ${:_profile_base_location_}/${:filename}
output = ${buildout:directory}/${:filename}
[template-crontab-line]
recipe = slapos.recipe.template
url = ${:_profile_base_location_}/${:filename}
output = ${buildout:directory}/${:filename}
[template-crontab]
recipe = slapos.recipe.template
url = ${:_profile_base_location_}/${:filename}
output = ${buildout:directory}/${:filename}
[template-peertube-backup-script]
recipe = slapos.recipe.template
url = ${:_profile_base_location_}/${:filename}
output = ${buildout:directory}/${:filename}
[template-peertube-restore-script]
recipe = slapos.recipe.template
url = ${:_profile_base_location_}/${:filename}
output = ${buildout:directory}/${:filename}
{
"name": "Peertube",
"description": "Peertube Website Text",
"serialisation": "json-in-xml",
"software-type": {
"default": {
"title": "Default",
"software-type": "default",
"description": "Default",
"request": "instance-peertube-input-schema.json",
"response": "instance-peertube-output-schema.json",
"index": 1
}
}
}
# $${:_buildout_section_name_}
$${:frequency} ID=$${:name} $${:script}
# min(0-59) hours(0-23) day(1-31) month(1-12) dow(0-7) command
MAILTO=admins@erp5.org
#!${dash-output:dash}
# BEWARE: This file is operated by slapos node
# BEWARE: It will be overwritten automatically
exec ${dcron-output:crond} \
-s $${directory:cron-entries} \
-c $${directory:crontabs} \
-t $${directory:cronstamps} \
-f -l 5 \
-L $${dcron-service:logfile}
# -M cron_simplelogger
#!${dash-output:dash}
# BEWARE: This file is operated by slapos node
# BEWARE: It will be overwritten automatically
if [ ! -e $${nginx-configuration:ssl_crt} ]
then
${openssl-output:openssl} genrsa -out $${nginx-configuration:ssl_key} 2048
${openssl-output:openssl} req -new \
-subj "/C=AA/ST=Denial/L=Nowhere/O=Dis/CN=$${nginx-configuration:ip}" \
-key $${nginx-configuration:ssl_key} -out $${nginx-configuration:ssl_csr}
${openssl-output:openssl} x509 -req -days 365 \
-in $${nginx-configuration:ssl_csr} \
-signkey $${nginx-configuration:ssl_key} \
-out $${nginx-configuration:ssl_crt}
fi
exec ${nginx-output:nginx} \
-c $${nginx-configuration:output}
daemon off; # run in the foreground so supervisord can look after it
worker_processes 4;
pid $${directory:run}/nginx.pid;
events {
worker_connections 768;
# multi_accept on;
}
error_log $${nginx-configuration:error_log};
http {
##
# Basic Settings
##
sendfile on;
tcp_nopush on;
tcp_nodelay on;
keepalive_timeout 65;
types_hash_max_size 2048;
server_tokens off;
# server_names_hash_bucket_size 64;
# server_name_in_redirect off;
default_type application/octet-stream;
include ${nginx-output:mime};
##
# Logging Settings
##
access_log $${nginx-configuration:access_log};
error_log $${nginx-configuration:error_log};
##
# Gzip Settings
##
gzip on;
gzip_disable "msie6";
gzip_vary on;
gzip_proxied any;
gzip_comp_level 6;
gzip_buffers 16 8k;
gzip_http_version 1.1;
gzip_types text/plain text/css application/json application/javascript application/x-javascript text/xml application/xml application/xml+rss text/javascript;
upstream backend {
server $${slap-configuration:ipv4-random}:$${peertube-parameters:ipv4-port};
}
server {
listen [$${nginx-configuration:ip}]:$${nginx-configuration:port} ssl;
access_log $${directory:peertube_nginx_log}/peertube.access.log; # reduce I/0 with buffer=10m flush=5m
error_log $${directory:peertube_nginx_log}/peertube.error.log;
##
# Security hardening (as of Nov 15, 2020)
# based on Mozilla Guideline v5.6
##
ssl_protocols TLSv1.2 TLSv1.3;
ssl_prefer_server_ciphers on;
ssl_ciphers ECDHE-ECDSA-AES256-GCM-SHA384:ECDHE-RSA-AES256-GCM-SHA384:ECDHE-ECDSA-CHACHA20-POLY1305:ECDHE-RSA-CHACHA20-POLY1305:ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-GCM-SHA256:ECDHE-ECDSA-AES256-SHA384:ECDHE-RSA-AES256-SHA384:ECDHE-ECDSA-AES128-SHA256:ECDHE-RSA-AES128-SHA256; # add ECDHE-RSA-AES256-SHA if you want compatibility with Android 4
ssl_session_timeout 1d; # defaults to 5m
ssl_session_cache shared:SSL:10m; # estimated to 40k sessions
ssl_session_tickets off;
ssl_stapling off;
ssl_stapling_verify on;
# HSTS (https://hstspreload.org), requires to be copied in 'location' sections that have add_header directives
add_header Strict-Transport-Security "max-age=63072000; includeSubDomains";
##
# Application
##
location @api {
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header Host $http_host;
proxy_set_header X-Real-IP $remote_addr;
client_max_body_size 100k; # default is 1M
proxy_connect_timeout 10m;
proxy_send_timeout 10m;
proxy_read_timeout 10m;
send_timeout 10m;
proxy_pass http://backend;
}
location / {
try_files /dev/null @api;
}
location = /api/v1/videos/upload-resumable {
client_max_body_size 0;
proxy_request_buffering off;
try_files /dev/null @api;
}
location ~ ^/api/v1/videos/(upload|([^/]+/studio/edit))$ {
limit_except POST HEAD { deny all; }
# This is the maximum upload size, which roughly matches the maximum size of a video file.
# Note that temporary space is needed equal to the total size of all concurrent uploads.
# This data gets stored in /var/lib/nginx by default, so you may want to put this directory
# on a dedicated filesystem.
client_max_body_size 12G; # default is 1M
add_header X-File-Maximum-Size 8G always; # inform backend of the set value in bytes before mime-encoding (x * 1.4 >= client_max_body_size)
try_files /dev/null @api;
}
location ~ ^/api/v1/(videos|video-playlists|video-channels|users/me) {
client_max_body_size 6M; # default is 1M
add_header X-File-Maximum-Size 4M always; # inform backend of the set value in bytes before mime-encoding (x * 1.4 >= client_max_body_size)
try_files /dev/null @api;
}
##
# Websocket
##
location @api_websocket {
proxy_http_version 1.1;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header Host $http_host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection "upgrade";
proxy_pass http://backend;
}
location /socket.io {
try_files /dev/null @api_websocket;
}
location /tracker/socket {
# Peers send a message to the tracker every 15 minutes
# Don't close the websocket before then
proxy_read_timeout 15m; # default is 60s
try_files /dev/null @api_websocket;
}
##
# Performance optimizations
# For extra performance please refer to https://github.com/denji/nginx-tuning
##
root $${directory:storage};
# Enable compression for JS/CSS/HTML, for improved client load times.
# It might be nice to compress JSON/XML as returned by the API, but
# leaving that out to protect against potential BREACH attack.
gzip on;
gzip_vary on;
gzip_types # text/html is always compressed by HttpGzipModule
text/css
application/javascript
font/truetype
font/opentype
application/vnd.ms-fontobject
image/svg+xml;
gzip_min_length 1000; # default is 20 bytes
gzip_buffers 16 8k;
gzip_comp_level 2; # default is 1
client_body_timeout 30s; # default is 60
client_header_timeout 10s; # default is 60
send_timeout 10s; # default is 60
keepalive_timeout 10s; # default is 75
resolver_timeout 10s; # default is 30
reset_timedout_connection on;
proxy_ignore_client_abort on;
tcp_nopush on; # send headers in one piece
tcp_nodelay on; # don't buffer data sent, good for small data bursts in real time
# If you have a small /var/lib partition, it could be interesting to store temp nginx uploads in a different place
# See https://nginx.org/en/docs/http/ngx_http_core_module.html#client_body_temp_path
#client_body_temp_path $${directory:storage}/nginx/;
# Bypass PeerTube for performance reasons. Optional.
# Should be consistent with client-overrides assets list in /server/controllers/client.ts
location ~ ^/client/(assets/images/(icons/icon-36x36\.png|icons/icon-48x48\.png|icons/icon-72x72\.png|icons/icon-96x96\.png|icons/icon-144x144\.png|icons/icon-192x192\.png|icons/icon-512x512\.png|logo\.svg|favicon\.png|default-playlist\.jpg|default-avatar-account\.png|default-avatar-account-48x48\.png|default-avatar-video-channel\.png|default-avatar-video-channel-48x48\.png))$ {
add_header Cache-Control "public, max-age=31536000, immutable"; # Cache 1 year
root $${directory:peertube_directory}/;
try_files $${directory:storage}/client-overrides/$1 $${directory:peertube_directory}/client/dist/$1 @api;
}
# Bypass PeerTube for performance reasons. Optional.
# location ~ ^/client/(.*\.(js|css|png|svg|woff2|otf|ttf|woff|eot))$ {
# add_header Cache-Control "public, max-age=31536000, immutable"; # Cache 1 year
# alias $${directory:var}/www/peertube/peertube-latest/client/dist/$1;
# }
# Bypass PeerTube for performance reasons. Optional.
location ~ ^/static/(thumbnails|avatars)/ {
if ($request_method = 'OPTIONS') {
add_header Access-Control-Allow-Origin '*';
add_header Access-Control-Allow-Methods 'GET, OPTIONS';
add_header Access-Control-Allow-Headers 'Range,DNT,X-CustomHeader,Keep-Alive,User-Agent,X-Requested-With,If-Modified-Since,Cache-Control,Content-Type';
add_header Access-Control-Max-Age 1728000; # Preflight request can be cached 20 days
add_header Content-Type 'text/plain charset=UTF-8';
add_header Content-Length 0;
return 204;
}
add_header Access-Control-Allow-Origin '*';
add_header Access-Control-Allow-Methods 'GET, OPTIONS';
add_header Access-Control-Allow-Headers 'Range,DNT,X-CustomHeader,Keep-Alive,User-Agent,X-Requested-With,If-Modified-Since,Cache-Control,Content-Type';
add_header Cache-Control "public, max-age=7200"; # Cache response 2 hours
rewrite ^/static/(.*)$ /$1 break;
try_files $uri @api;
}
# # Bypass PeerTube for performance reasons. Optional.
# location ~ ^/static/(webseed|redundancy|streaming-playlists)/ {
# limit_rate_after 5M;
# # Clients usually have 4 simultaneous webseed connections, so the real limit is 3MB/s per client
# set $peertube_limit_rate 800k;
# # Increase rate limit in HLS mode, because we don't have multiple simultaneous connections
# if ($request_uri ~ -fragmented.mp4$) {
# set $peertube_limit_rate 5M;
# }
# # Use this line with nginx >= 1.17.0
# #limit_rate $peertube_limit_rate;
# # Or this line if your nginx < 1.17.0
# set $limit_rate $peertube_limit_rate;
# if ($request_method = 'OPTIONS') {
# add_header Access-Control-Allow-Origin '*';
# add_header Access-Control-Allow-Methods 'GET, OPTIONS';
# add_header Access-Control-Allow-Headers 'Range,DNT,X-CustomHeader,Keep-Alive,User-Agent,X-Requested-With,If-Modified-Since,Cache-Control,Content-Type';
# add_header Access-Control-Max-Age 1728000; # Preflight request can be cached 20 days
# add_header Content-Type 'text/plain charset=UTF-8';
# add_header Content-Length 0;
# return 204;
# }
# if ($request_method = 'GET') {
# add_header Access-Control-Allow-Origin '*';
# add_header Access-Control-Allow-Methods 'GET, OPTIONS';
# add_header Access-Control-Allow-Headers 'Range,DNT,X-CustomHeader,Keep-Alive,User-Agent,X-Requested-With,If-Modified-Since,Cache-Control,Content-Type';
# # Don't spam access log file with byte range requests
# access_log off;
# }
# # Enabling the sendfile directive eliminates the step of copying the data into the buffer
# # and enables direct copying data from one file descriptor to another.
# sendfile on;
# sendfile_max_chunk 1M; # prevent one fast connection from entirely occupying the worker process. should be > 800k.
# aio threads;
# rewrite ^/static/webseed/(.*)$ /videos/$1 break;
# rewrite ^/static/(.*)$ /$1 break;
# try_files $uri @api;
# }
ssl_certificate $${nginx-configuration:ssl_crt};
ssl_certificate_key $${nginx-configuration:ssl_key};
fastcgi_temp_path $${directory:varnginx} 1 2;
uwsgi_temp_path $${directory:varnginx} 1 2;
scgi_temp_path $${directory:varnginx} 1 2;
client_body_temp_path $${directory:varnginx} 1 2;
proxy_temp_path $${directory:varnginx} 1 2;
}
}
#!${dash-output:dash}
# Dump the databse
$${postgresql:bin}/pg_dump -h $${postgresql:pgdata-directory} -U $${postgresql:superuser} -Fc peertube_prod > $${peertube-backup-script:backup-file}
#!${dash-output:dash}
# DO NOT RUN THIS SCRIPT ON PRODUCTION INSTANCE
# OR POSTGRESQL DATA WILL BE ERASED.
# This script will import the dump of the postgresql database to the real
# database. It is launched by the clone (importer) instance of theia
# in the end of the import script.
# Depending on the output, it will create a file containing
# the status of the restoration (success or failure)
die() {
echo "$*" 1>&2
exit 1
}
echo "Postgresql data directory is ready"
# 2. Make sure the postgresql process is not running.
# Quote from the postgresql doc:
# > While the server is running, its PID is stored in the file postmaster.pid in the data directory.
# https://www.postgresql.org/docs/current/server-start.html
# which means if the postmaster.pid exist, then the postgresql is running.
pid_file=$${postgresql:pgdata-directory}/postmaster.pid
if [ -e "$pid_file" ]; then
echo "Postgresql is running, this should not happened, aborting."
exit 1
fi
echo "Starting postgresql..."
$${postgresql:bin}/postgres -D $${postgresql:pgdata-directory} &
postgresql_pid=$!
trap "kill $postgresql_pid" EXIT TERM INT
# If postgres has stopped, abort
if ! [ -d /proc/$postgresql_pid ]; then
echo "postgresql exited, aborting."
exit 1
fi
# run psql
psql() {
PGPASSWORD=$${postgresql:password} $${postgresql:bin}/psql \
-h $${postgresql:ipv4} \
-p $${postgresql:port} \
-U $${postgresql:superuser} \
-d $${postgresql:dbname} \
"$@"
}
echo "Ready to check postgresql is running..."
# initial db setup
# ( first quering PG several times waiting a bit till postgresql is started and ready )
tpgwait=60
while true; do
pgtables="$(psql -c '\d' 2>&1)" && break
tpgwait=$(( $tpgwait - 1 ))
test $tpgwait = 0 && die "pg query problem"
echo "I: PostgreSQL is not ready (yet ?); will retry $tpgwait times..." 1>&2
sleep 1
done
echo "I: PostgreSQL ready." 1>&2
echo "Postgresql is running, ready to restore"
# Restore the database
# Use -d to connect to the default 'postgres' database to allow us to restore the $${postgresql:dbname}
PGPASSWORD=$${postgresql:password} $${postgresql:bin}/pg_restore -h $${postgresql:ipv4} -p $${postgresql:port} -U $${postgresql:superuser} -e -c -C -d postgres $${peertube-backup-script:backup-file} || {
RESTORE_EXIT_CODE=$?
echo 'Backup restoration failed.'
exit $RESTORE_EXIT_CODE
}
echo "Postgresql restore finished"
#!${dash-output:dash}
# setup db
die() {
echo "$*" 1>&2
exit 1
}
# run psql on gitlab db
psql() {
PGPASSWORD=$${postgresql:password} $${postgresql:bin}/psql \
-h $${postgresql:ipv4} \
-p $${postgresql:port} \
-U $${postgresql:superuser} \
-d $${postgresql:dbname} \
"$@"
}
# initial db setup
# ( first quering PG several times waiting a bit till postgresql is started and ready )
tpgwait=5
while true; do
pgtables="$(psql -c '\d' 2>&1)" && break
tpgwait=$(( $tpgwait - 1 ))
test $tpgwait = 0 && die "pg query problem"
echo "I: PostgreSQL is not ready (yet ?); will retry $tpgwait times..." 1>&2
sleep 1
done
echo "I: PostgreSQL ready." 1>&2
cd ${peertube:location}
exec env PT_INITIAL_ROOT_PASSWORD=$${peertube-passwd:passwd} NODE_ENV=peertube\
NODE_CONFIG_DIR=$${directory:config} PATH=${ffmpeg:location}/bin:$PATH\
${nodejs:location}/bin/node ${peertube:location}/dist/server
listen:
hostname: '$${slap-configuration:ipv4-random}'
port: $${peertube-parameters:ipv4-port}
# Correspond to your reverse proxy server_name/listen configuration (i.e., your public PeerTube instance URL)
webserver:
https: true
hostname: '$${peertube-parameters:host}'
port: $${peertube-parameters:port}
# Secrets you need to generate the first time you run PeerTube
secrets:
# Generate one using `openssl rand -hex 32`
peertube: '35b4c762c8eb9e3ccb5bd7ad482ba4ecba0f99571db51ece24976e0f666fea48'
rates_limit:
api:
# 50 attempts in 10 seconds
window: 10 seconds
max: 50
login:
# 15 attempts in 5 min
window: 5 minutes
max: 15
signup:
# 2 attempts in 5 min (only succeeded attempts are taken into account)
window: 5 minutes
max: 2
ask_send_email:
# 3 attempts in 5 min
window: 5 minutes
max: 3
receive_client_log:
# 10 attempts in 10 min
window: 10 minutes
max: 10
# Proxies to trust to get real client IP
# If you run PeerTube just behind a local proxy (nginx), keep 'loopback'
# If you run PeerTube behind a remote proxy, add the proxy IP address (or subnet)
trust_proxy:
- 'loopback'
# Your database name will be database.name OR 'peertube'+database.suffix
database:
hostname: '$${postgresql:ipv4}'
port: '$${postgresql:port}'
ssl: false
suffix: '_prod'
username: '$${postgresql:superuser}'
password: '$${postgresql:password}'
pool:
max: 5
# Redis server for short time storage
# You can also specify a 'socket' path to a unix socket but first need to
# set 'hostname' and 'port' to null
redis:
socket: $${service-redis:unixsocket}
auth: null
db: 0
# SMTP server to send emails
smtp:
# smtp or sendmail
transport: smtp
# Path to sendmail command. Required if you use sendmail transport
sendmail: null
hostname: null
port: 465 # If you use StartTLS: 587
username: null
password: null
tls: true # If you use StartTLS: false
disable_starttls: false
ca_file: null # Used for self signed certificates
from_address: 'admin@example.com'
email:
body:
signature: 'PeerTube'
subject:
prefix: '[PeerTube]'
# Update default PeerTube values
# Set by API when the field is not provided and put as default value in client
defaults:
# Change default values when publishing a video (upload/import/go Live)
publish:
download_enabled: true
comments_enabled: true
# public = 1, unlisted = 2, private = 3, internal = 4
privacy: 1
# CC-BY = 1, CC-SA = 2, CC-ND = 3, CC-NC = 4, CC-NC-SA = 5, CC-NC-ND = 6, Public Domain = 7
# You can also choose a custom licence value added by a plugin
# No licence by default
licence: null
p2p:
# Enable P2P by default in PeerTube client
# Can be enabled/disabled by anonymous users and logged in users
webapp:
enabled: true
# Enable P2P by default in PeerTube embed
# Can be enabled/disabled by URL option
embed:
enabled: true
# From the project root directory
storage:
tmp: '$${directory:peertube_directory}/storage/tmp/' # Use to download data (imports etc), store uploaded files before and during processing...
bin: '$${directory:peertube_directory}/storage/bin/'
avatars: '$${directory:peertube_directory}/storage/avatars/'
videos: '$${directory:peertube_directory}/storage/videos/'
streaming_playlists: '$${directory:peertube_directory}/storage/streaming-playlists/'
redundancy: '$${directory:peertube_directory}/storage/redundancy/'
logs: '$${directory:peertube_directory}/storage/logs/'
previews: '$${directory:peertube_directory}/storage/previews/'
thumbnails: '$${directory:peertube_directory}/storage/thumbnails/'
torrents: '$${directory:peertube_directory}/storage/torrents/'
captions: '$${directory:peertube_directory}/storage/captions/'
cache: '$${directory:peertube_directory}/storage/cache/'
plugins: '$${directory:peertube_directory}/storage/plugins/'
well_known: '$${directory:peertube_directory}/storage/well-known/'
# Overridable client files in client/dist/assets/images:
# - logo.svg
# - favicon.png
# - default-playlist.jpg
# - default-avatar-account.png
# - default-avatar-video-channel.png
# - and icons/*.png (PWA)
# Could contain for example assets/images/favicon.png
# If the file exists, peertube will serve it
# If not, peertube will fallback to the default file
client_overrides: '$${directory:peertube_directory}/storage/client-overrides/'
object_storage:
enabled: false
# Without protocol, will default to HTTPS
endpoint: '' # 's3.amazonaws.com' or 's3.fr-par.scw.cloud' for example
region: 'us-east-1'
# Set this ACL on each uploaded object
upload_acl: 'public-read'
credentials:
# You can also use AWS_ACCESS_KEY_ID env variable
access_key_id: ''
# You can also use AWS_SECRET_ACCESS_KEY env variable
secret_access_key: ''
# Maximum amount to upload in one request to object storage
max_upload_part: 100MB
streaming_playlists:
bucket_name: 'streaming-playlists'
# Allows setting all buckets to the same value but with a different prefix
prefix: '' # Example: 'streaming-playlists:'
# Base url for object URL generation, scheme and host will be replaced by this URL
# Useful when you want to use a CDN/external proxy
base_url: '' # Example: 'https://mirror.example.com'
# Same settings but for webtorrent videos
videos:
bucket_name: 'videos'
prefix: ''
base_url: ''
log:
level: 'info' # 'debug' | 'info' | 'warn' | 'error'
rotation:
enabled : true # Enabled by default, if disabled make sure that 'storage.logs' is pointing to a folder handled by logrotate
max_file_size: 12MB
max_files: 20
anonymize_ip: false
log_ping_requests: true
log_tracker_unknown_infohash: true
prettify_sql: false
# Accept warn/error logs coming from the client
accept_client_log: true
trending:
videos:
interval_days: 7 # Compute trending videos for the last x days
algorithms:
enabled:
- 'hot' # adaptation of Reddit's 'Hot' algorithm
- 'most-viewed' # default, used initially by PeerTube as the trending page
- 'most-liked'
default: 'most-viewed'
# Cache remote videos on your server, to help other instances to broadcast the video
# You can define multiple caches using different sizes/strategies
# Once you have defined your strategies, choose which instances you want to cache in admin -> manage follows -> following
redundancy:
videos:
check_interval: '1 hour' # How often you want to check new videos to cache
strategies: # Just uncomment strategies you want
# -
# size: '10GB'
# # Minimum time the video must remain in the cache. Only accept values > 10 hours (to not overload remote instances)
# min_lifetime: '48 hours'
# strategy: 'most-views' # Cache videos that have the most views
# -
# size: '10GB'
# # Minimum time the video must remain in the cache. Only accept values > 10 hours (to not overload remote instances)
# min_lifetime: '48 hours'
# strategy: 'trending' # Cache trending videos
# -
# size: '10GB'
# # Minimum time the video must remain in the cache. Only accept values > 10 hours (to not overload remote instances)
# min_lifetime: '48 hours'
# strategy: 'recently-added' # Cache recently added videos
# min_views: 10 # Having at least x views
# Other instances that duplicate your content
remote_redundancy:
videos:
# 'nobody': Do not accept remote redundancies
# 'anybody': Accept remote redundancies from anybody
# 'followings': Accept redundancies from instance followings
accept_from: 'anybody'
csp:
enabled: false
report_only: true # CSP directives are still being tested, so disable the report only mode at your own risk!
report_uri:
security:
# Set the X-Frame-Options header to help to mitigate clickjacking attacks
frameguard:
enabled: true
tracker:
# If you disable the tracker, you disable the P2P on your PeerTube instance
enabled: true
# Only handle requests on your videos
# If you set this to false it means you have a public tracker
# Then, it is possible that clients overload your instance with external torrents
private: true
# Reject peers that do a lot of announces (could improve privacy of TCP/UDP peers)
reject_too_many_announces: false
history:
videos:
# If you want to limit users videos history
# -1 means there is no limitations
# Other values could be '6 months' or '30 days' etc (PeerTube will periodically delete old entries from database)
max_age: -1
views:
videos:
# PeerTube creates a database entry every hour for each video to track views over a period of time
# This is used in particular by the Trending page
# PeerTube could remove old remote video views if you want to reduce your database size (video view counter will not be altered)
# -1 means no cleanup
# Other values could be '6 months' or '30 days' etc (PeerTube will periodically delete old entries from database)
remote:
max_age: '30 days'
# PeerTube buffers local video views before updating and federating the video
local_buffer_update_interval: '30 minutes'
ip_view_expiration: '1 hour'
# Used to get country location of views of local videos
geo_ip:
enabled: true
country:
database_url: 'https://dbip.mirror.framasoft.org/files/dbip-country-lite-latest.mmdb'
plugins:
# The website PeerTube will ask for available PeerTube plugins and themes
# This is an unmoderated plugin index, so only install plugins/themes you trust
index:
enabled: true
check_latest_versions_interval: '12 hours' # How often you want to check new plugins/themes versions
url: 'https://packages.joinpeertube.org'
federation:
videos:
federate_unlisted: false
# Add a weekly job that cleans up remote AP interactions on local videos (shares, rates and comments)
# It removes objects that do not exist anymore, and potentially fix their URLs
cleanup_remote_interactions: true
peertube:
check_latest_version:
# Check and notify admins of new PeerTube versions
enabled: true
# You can use a custom URL if your want, that respect the format behind https://joinpeertube.org/api/v1/versions.json
url: 'https://joinpeertube.org/api/v1/versions.json'
webadmin:
configuration:
edition:
# Set this to false if you don't want to allow config edition in the web interface by instance admins
allowed: true
# XML, Atom or JSON feeds
feeds:
videos:
# Default number of videos displayed in feeds
count: 20
comments:
# Default number of comments displayed in feeds
count: 20
###############################################################################
#
# From this point, almost all following keys can be overridden by the web interface
# (local-production.json file). If you need to change some values, prefer to
# use the web interface because the configuration will be automatically
# reloaded without any need to restart PeerTube
#
# /!\ If you already have a local-production.json file, modification of some of
# the following keys will have no effect /!\
#
###############################################################################
cache:
previews:
size: 500 # Max number of previews you want to cache
captions:
size: 500 # Max number of video captions/subtitles you want to cache
torrents:
size: 500 # Max number of video torrents you want to cache
admin:
# Used to generate the root user at first startup
# And to receive emails from the contact form
email: 'admin@example.com'
contact_form:
enabled: false
signup:
enabled: false
limit: 10 # When the limit is reached, registrations are disabled. -1 == unlimited
minimum_age: 16 # Used to configure the signup form
requires_email_verification: false
filters:
cidr: # You can specify CIDR ranges to whitelist (empty = no filtering) or blacklist
whitelist: []
blacklist: []
user:
# Default value of maximum video bytes the user can upload (does not take into account transcoded files)
# Byte format is supported ("1GB" etc)
# -1 == unlimited
video_quota: -1
video_quota_daily: -1
video_channels:
max_per_user: 20 # Allows each user to create up to 20 video channels.
# If enabled, the video will be transcoded to mp4 (x264) with `faststart` flag
# In addition, if some resolutions are enabled the mp4 video file will be transcoded to these new resolutions
# Please, do not disable transcoding since many uploaded videos will not work
transcoding:
enabled: true
# Allow your users to upload .mkv, .mov, .avi, .wmv, .flv, .f4v, .3g2, .3gp, .mts, m2ts, .mxf, .nut videos
allow_additional_extensions: true
# If a user uploads an audio file, PeerTube will create a video by merging the preview file and the audio file
allow_audio_files: true
# Amount of threads used by ffmpeg for 1 transcoding job
threads: 1
# Amount of transcoding jobs to execute in parallel
concurrency: 1
# Choose the transcoding profile
# New profiles can be added by plugins
# Available in core PeerTube: 'default'
profile: 'default'
resolutions: # Only created if the original video has a higher resolution, uses more storage!
0p: false # audio-only (creates mp4 without video stream, always created when enabled)
144p: true
240p: true
360p: true
480p: true
720p: true
1080p: true
1440p: true
2160p: true
# Transcode and keep original resolution, even if it's above your maximum enabled resolution
always_transcode_original_resolution: true
# Generate videos in a WebTorrent format (what we do since the first PeerTube release)
# If you also enabled the hls format, it will multiply videos storage by 2
# If disabled, breaks federation with PeerTube instances < 2.1
webtorrent:
enabled: false
# /!\ Requires ffmpeg >= 4.1
# Generate HLS playlists and fragmented MP4 files. Better playback than with WebTorrent:
# * Resolution change is smoother
# * Faster playback in particular with long videos
# * More stable playback (less bugs/infinite loading)
# If you also enabled the webtorrent format, it will multiply videos storage by 2
hls:
enabled: true
live:
enabled: false
# Limit lives duration
# -1 == unlimited
max_duration: -1 # For example: '5 hours'
# Limit max number of live videos created on your instance
# -1 == unlimited
max_instance_lives: 20
# Limit max number of live videos created by a user on your instance
# -1 == unlimited
max_user_lives: 3
# Allow your users to save a replay of their live
# PeerTube will transcode segments in a video file
# If the user daily/total quota is reached, PeerTube will stop the live
# /!\ transcoding.enabled (and not live.transcoding.enabled) has to be true to create a replay
allow_replay: true
# Allow your users to change latency settings (small latency/default/high latency)
# Small latency live streams cannot use P2P
# High latency live streams can increase P2P ratio
latency_setting:
enabled: true
# Your firewall should accept traffic from this port in TCP if you enable live
rtmp:
enabled: true
# Listening hostname/port for RTMP server
# '::' to listen on IPv6 and IPv4, '0.0.0.0' to listen on IPv4
# Use null to automatically listen on '::' if IPv6 is available, or '0.0.0.0' otherwise
hostname: null
port: 1935
# Public hostname of your RTMP server
# Use null to use the same value than `webserver.hostname`
public_hostname: null
rtmps:
enabled: false
# Listening hostname/port for RTMPS server
# '::' to listen on IPv6 and IPv4, '0.0.0.0' to listen on IPv4
# Use null to automatically listen on '::' if IPv6 is available, or '0.0.0.0' otherwise
hostname: null
port: 1936
# Absolute paths
key_file: ''
cert_file: ''
# Public hostname of your RTMPS server
# Use null to use the same value than `webserver.hostname`
public_hostname: null
# Allow to transcode the live streaming in multiple live resolutions
transcoding:
enabled: true
threads: 2
# Choose the transcoding profile
# New profiles can be added by plugins
# Available in core PeerTube: 'default'
profile: 'default'
resolutions:
144p: false
240p: false
360p: false
480p: false
720p: false
1080p: false
1440p: false
2160p: false
# Also transcode original resolution, even if it's above your maximum enabled resolution
always_transcode_original_resolution: true
video_studio:
# Enable video edition by users (cut, add intro/outro, add watermark etc)
# If enabled, users can create transcoding tasks as they wish
enabled: false
import:
# Add ability for your users to import remote videos (from YouTube, torrent...)
videos:
# Amount of import jobs to execute in parallel
concurrency: 1
# Set a custom video import timeout to not block import queue
timeout: '2 hours'
# Classic HTTP or all sites supported by youtube-dl https://rg3.github.io/youtube-dl/supportedsites.html
http:
# We recommend to use a HTTP proxy if you enable HTTP import to prevent private URL access from this server
# See https://docs.joinpeertube.org/maintain-configuration?id=security for more information
enabled: false
youtube_dl_release:
# Direct download URL to youtube-dl binary
# Github releases API is also supported
# Examples:
# * https://api.github.com/repos/ytdl-org/youtube-dl/releases
# * https://api.github.com/repos/yt-dlp/yt-dlp/releases
# * https://yt-dl.org/downloads/latest/youtube-dl
url: 'https://api.github.com/repos/yt-dlp/yt-dlp/releases'
# Release binary name: 'yt-dlp' or 'youtube-dl'
name: 'yt-dlp'
# Path to the python binary to execute for youtube-dl or yt-dlp
python_path: '/usr/bin/python3'
# IPv6 is very strongly rate-limited on most sites supported by youtube-dl
force_ipv4: false
# Magnet URI or torrent file (use classic TCP/UDP/WebSeed to download the file)
torrent:
# We recommend to only enable magnet URI/torrent import if you trust your users
# See https://docs.joinpeertube.org/maintain-configuration?id=security for more information
enabled: false
# Add ability for your users to synchronize their channels with external channels, playlists, etc.
video_channel_synchronization:
enabled: false
max_per_user: 10
check_interval: 1 hour
# Number of latest published videos to check and to potentially import when syncing a channel
videos_limit_per_synchronization: 10
auto_blacklist:
# New videos automatically blacklisted so moderators can review before publishing
videos:
of_users:
enabled: false
# Instance settings
instance:
name: '$${peertube-parameters:name}' #'Peertube'
short_description: '$${peertube-parameters:short_description}'
description: '$${peertube-parameters:description}'
terms: '$${peertube-parameters:terms}'
code_of_conduct: '$${peertube-parameters:code_of_conduct}'
# Who moderates the instance? What is the policy regarding NSFW videos? Political videos? etc
moderation_information: '$${peertube-parameters:moderation_information}'
# Why did you create this instance?
creation_reason: '$${peertube-parameters:creation_reason}'
# Who is behind the instance? A single person? A non profit?
administrator: '$${peertube-parameters:administrator}'
# How long do you plan to maintain this instance?
maintenance_lifetime: '$${peertube-parameters:maintenance_lifetime}'
# How will you pay the PeerTube instance server? With your own funds? With users donations? Advertising?
business_model: '$${peertube-parameters:business_model}'
# If you want to explain on what type of hardware your PeerTube instance runs
# Example: '2 vCore, 2GB RAM...'
hardware_information: '$${peertube-parameters:hardware_information}'
# What are the main languages of your instance? To interact with your users for example
# Uncomment or add the languages you want
# List of supported languages: https://peertube.cpy.re/api/v1/videos/languages
languages:
# - en
# - es
# - fr
# You can specify the main categories of your instance (dedicated to music, gaming or politics etc)
# Uncomment or add the category ids you want
# List of supported categories: https://peertube.cpy.re/api/v1/videos/categories
categories:
# - 1 # Music
# - 2 # Films
# - 3 # Vehicles
# - 4 # Art
# - 5 # Sports
# - 6 # Travels
# - 7 # Gaming
# - 8 # People
# - 9 # Comedy
# - 10 # Entertainment
# - 11 # News & Politics
# - 12 # How To
# - 13 # Education
# - 14 # Activism
# - 15 # Science & Technology
# - 16 # Animals
# - 17 # Kids
# - 18 # Food
default_client_route: '/videos/trending'
# Whether or not the instance is dedicated to NSFW content
# Enabling it will allow other administrators to know that you are mainly federating sensitive content
# Moreover, the NSFW checkbox on video upload will be automatically checked by default
is_nsfw: false
# By default, `do_not_list` or `blur` or `display` NSFW videos
# Could be overridden per user with a setting
default_nsfw_policy: 'do_not_list'
customizations:
javascript: '' # Directly your JavaScript code (without <script> tags). Will be eval at runtime
css: '' # Directly your CSS code (without <style> tags). Will be injected at runtime
# Robot.txt rules. To disallow robots to crawl your instance and disallow indexation of your site, add `/` to `Disallow:`
robots: |
User-agent: *
Disallow:
# /.well-known/security.txt rules. This endpoint is cached, so you may have to wait a few hours before viewing your changes
# To discourage researchers from testing your instance and disable security.txt integration, set this to an empty string
securitytxt:
'# If you would like to report a security issue\n# you may report it to:\nContact: https://github.com/Chocobozzz/PeerTube/blob/develop/SECURITY.md\nContact: mailto:'
services:
# Cards configuration to format video in Twitter
twitter:
username: '@Chocobozzz' # Indicates the Twitter account for the website or platform on which the content was published
# If true, a video player will be embedded in the Twitter feed on PeerTube video share
# If false, we use an image link card that will redirect on your PeerTube instance
# Change it to `true`, and then test on https://cards-dev.twitter.com/validator to see if you are whitelisted
whitelisted: false
followers:
instance:
# Allow or not other instances to follow yours
enabled: true
# Whether or not an administrator must manually validate a new follower
manual_approval: false
followings:
instance:
# If you want to automatically follow back new instance followers
# If this option is enabled, use the mute feature instead of deleting followings
# /!\ Don't enable this if you don't have a reactive moderation team /!\
auto_follow_back:
enabled: false
# If you want to automatically follow instances of the public index
# If this option is enabled, use the mute feature instead of deleting followings
# /!\ Don't enable this if you don't have a reactive moderation team /!\
auto_follow_index:
enabled: false
# Host your own using https://framagit.org/framasoft/peertube/instances-peertube#peertube-auto-follow
index_url: ''
theme:
default: 'default'
broadcast_message:
enabled: false
message: '' # Support markdown
level: 'info' # 'info' | 'warning' | 'error'
dismissable: false
search:
# Add ability to fetch remote videos/actors by their URI, that may not be federated with your instance
# If enabled, the associated group will be able to "escape" from the instance follows
# That means they will be able to follow channels, watch videos, list videos of non followed instances
remote_uri:
users: false
anonymous: false
# Use a third party index instead of your local index, only for search results
# Useful to discover content outside of your instance
# If you enable search_index, you must enable remote_uri search for users
# If you do not enable remote_uri search for anonymous user, your instance will redirect the user on the origin instance
# instead of loading the video locally
search_index:
enabled: false
# URL of the search index, that should use the same search API and routes
# than PeerTube: https://docs.joinpeertube.org/api-rest-reference.html
# You should deploy your own with https://framagit.org/framasoft/peertube/search-index,
# and can use https://search.joinpeertube.org/ for tests, but keep in mind the latter is an unmoderated search index
url: ''
# You can disable local search, so users only use the search index
disable_local_search: false
# If you did not disable local search, you can decide to use the search index by default
is_default_search: false
# PeerTube client/interface configuration
client:
videos:
miniature:
# By default PeerTube client displays author username
prefer_author_display_name: false
display_author_avatar: false
resumable_upload:
# Max size of upload chunks, e.g. '90MB'
# If null, it will be calculated based on network speed
max_chunk_size: null
menu:
login:
# If you enable only one external auth plugin
# You can automatically redirect your users on this external platform when they click on the login button
redirect_on_single_external_auth: false
Tests for Peertube software release
############################################################################## ##############################################################################
# #
# Copyright (c) 2011 Vifib SARL and Contributors. All Rights Reserved. # Copyright (c) 2019 Nexedi SA and Contributors. All Rights Reserved.
# #
# WARNING: This program as such is intended to be used by professional # WARNING: This program as such is intended to be used by professional
# programmers who take the whole responsibility of assessing all potential # programmers who take the whole responsibility of assessing all potential
...@@ -24,29 +24,29 @@ ...@@ -24,29 +24,29 @@
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. # Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
# #
############################################################################## ##############################################################################
from slapos.recipe.librecipe import GenericBaseRecipe from setuptools import setup, find_packages
import binascii
import os
import sys
class Recipe(GenericBaseRecipe): version = '0.0.1.dev0'
""" name = 'slapos.test.peertube'
novnc instance configuration. with open("README.md") as f:
""" long_description = f.read()
def install(self): setup(
return self.createWrapper( name=name,
self.options['path'], version=version,
( description="Test for SlapOS' peertube",
self.options['websockify-path'], long_description=long_description,
'--web', long_description_content_type='text/markdown',
self.options['novnc-location'], maintainer="Nexedi",
'--key=%s' % self.options['ssl-key-path'], maintainer_email="info@nexedi.com",
'--cert=%s' % self.options['ssl-cert-path'], url="https://lab.nexedi.com/nexedi/slapos",
'--ssl-only', packages=find_packages(),
'%s:%s' % (self.options['ip'], self.options['port']), install_requires=[
'%s:%s' % (self.options['vnc-ip'], self.options['vnc-port']), 'slapos.core',
), 'slapos.libnetworkcache',
wait_list=(self.options['ssl-key-path'], 'erp5.util',
self.options['ssl-cert-path']), 'requests',
) ],
zip_safe=True,
test_suite='test',
)
##############################################################################
#
# Copyright (c) 2019 Nexedi SA and Contributors. All Rights Reserved.
#
# WARNING: This program as such is intended to be used by professional
# programmers who take the whole responsibility of assessing all potential
# consequences resulting from its eventual inadequacies and bugs
# End users who are looking for a ready-to-use solution with commercial
# guarantees and support are strongly adviced to contract a Free Software
# Service Company
#
# This program is Free Software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 3
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
##############################################################################
import os
import requests
import re
from mimetypes import guess_type
from json.decoder import JSONDecodeError
from slapos.testing.testcase import makeModuleSetUpAndTestCaseClass
setUpModule, SlapOSInstanceTestCase = makeModuleSetUpAndTestCaseClass(
os.path.abspath(
os.path.join(os.path.dirname(__file__), '..', 'software.cfg')))
class TestPeerTube(SlapOSInstanceTestCase):
def setUp(self):
self.connection_parameters = self.computer_partition.getConnectionParameterDict()
def test_get(self):
url = self.connection_parameters['frontend-url']
response = requests.get(url, verify=False)
self.assertEqual(requests.codes['OK'], response.status_code)
self.assertIn('PeerTube', response.text)
response =requests.get(url + "/feeds/videos.xml?sort=-trending", verify=False)
self.assertEqual(requests.codes['OK'], response.status_code)
self.assertIn('rss', response.text)
def test_video_upload(self):
api_url = self.connection_parameters['frontend-url']
# api_url: https://[2001:67c:1254:fd::9ee2]:9443
# self.connection_parameters
# {'backend-url': 'https://[2001:67c:1254:fd::9ee2]:9443', 'frontend-hostname': '[2001:67c:1254:fd::9ee2]:9443', 'frontend-url': 'https://[2001:67c:1254:fd::9ee2]:9443', 'password': '8ydTfRpv', 'username': 'root'}
response = requests.get(api_url + '/api/v1/oauth-clients/local', verify=False)
self.assertEqual(requests.codes['OK'], response.status_code)
try:
data = response.json()
except JSONDecodeError:
self.fail("No json file returned! Maybe your Peertube API is incorrect.")
client_id = data['client_id']
client_secret = data['client_secret']
username = self.connection_parameters['username']
password = self.connection_parameters['password']
auth_data = {
'client_id': client_id,
'client_secret': client_secret,
'grant_type': 'password',
'response_type': 'code',
'username': username,
'password': password
}
auth_result = requests.post(api_url + '/api/v1/users/token', data=auth_data, verify=False)
self.assertEqual(requests.codes['OK'], auth_result.status_code)
try:
auth_result_json = auth_result.json()
except JSONDecodeError:
self.fail("No json file returned! Maybe your Peertube API is incorrect.")
token_type = auth_result_json['token_type']
access_token = auth_result_json['access_token']
headers = {
'Authorization': token_type + ' ' + access_token
}
video_name = "Small test video"
file_path = "./small.mp4"
file_mime_type = guess_type(file_path)[0]
with open(file_path, 'rb') as f:
video_data = {
'channelId': 1,
'name': video_name,
'commentEnabled': False,
}
upload_response = requests.post(
api_url + '/api/v1/videos/upload',
headers=headers,
data=video_data,
files={'videofile': (os.path.basename(file_path), f, file_mime_type)},
verify=False
)
try:
video_ids = upload_response.json()
except JSONDecodeError:
self.fail("No json file returned! Maybe your Peertube API is incorrect.")
# {'video': {'id': 7, 'shortUUID': 'nrnKJNCsRP7NkwRr51TK3e', 'uuid': 'ad9ae99d-07db-4e4c-adc3-73566d59a4c5'}}
self.assertIn('video', video_ids)
id = video_ids['video']['id']
# Check the video is uploaded, we can get its stats
response = requests.get(api_url + '/api/v1/videos/' + str(id) + '/stats/overall', headers=headers, verify=False)
self.assertEqual(requests.codes['OK'], response.status_code)
try:
result = response.json()
except JSONDecodeError:
self.fail("No json file returned! Maybe your Peertube API is incorrect.")
self.assertIn('totalWatchTime', response.json())
# Check the transcoding is enabled
response = requests.get(api_url + '/api/v1/config', headers=headers, verify=False)
try:
result = response.json()
except JSONDecodeError:
self.fail("No json file returned! Maybe your Peertube API is incorrect.")
# {
# 'hls': {'enabled': True},
# 'webtorrent': {'enabled': False},
# 'enabledResolutions': [144, 240, 360, 480, 720, 1080, 1440, 2160],
# 'profile': 'default',
# 'availableProfiles': ['default']
# }
self.assertIn("hls", result['transcoding'])
self.assertIn("True", str(result['transcoding']['hls']))
...@@ -242,6 +242,11 @@ setup = ${slapos-repository:location}/software/erp5testnode/test/ ...@@ -242,6 +242,11 @@ setup = ${slapos-repository:location}/software/erp5testnode/test/
egg = slapos.test.beremiz_ide egg = slapos.test.beremiz_ide
setup = ${slapos-repository:location}/software/beremiz-ide/test/ setup = ${slapos-repository:location}/software/beremiz-ide/test/
[slapos.test.peertube-setup]
<= setup-develop-egg
egg = slapos.test.peertube
setup = ${slapos-repository:location}/software/peertube/test/
[slapos.core-repository] [slapos.core-repository]
<= git-clone-repository <= git-clone-repository
repository = https://lab.nexedi.com/nexedi/slapos.core.git repository = https://lab.nexedi.com/nexedi/slapos.core.git
...@@ -316,6 +321,7 @@ eggs += ...@@ -316,6 +321,7 @@ eggs +=
${slapos.test.nextcloud-setup:egg} ${slapos.test.nextcloud-setup:egg}
${slapos.test.nginx-push-stream-setup:egg} ${slapos.test.nginx-push-stream-setup:egg}
${slapos.test.ors-amarisoft-setup:egg} ${slapos.test.ors-amarisoft-setup:egg}
${slapos.test.peertube-setup:egg}
${slapos.test.plantuml-setup:egg} ${slapos.test.plantuml-setup:egg}
${slapos.test.powerdns-setup:egg} ${slapos.test.powerdns-setup:egg}
${slapos.test.proftpd-setup:egg} ${slapos.test.proftpd-setup:egg}
...@@ -406,6 +412,7 @@ tests = ...@@ -406,6 +412,7 @@ tests =
nextcloud ${slapos.test.nextcloud-setup:setup} nextcloud ${slapos.test.nextcloud-setup:setup}
nginx-push-stream ${slapos.test.nginx-push-stream-setup:setup} nginx-push-stream ${slapos.test.nginx-push-stream-setup:setup}
ors-amarisoft ${slapos.test.ors-amarisoft-setup:setup} ors-amarisoft ${slapos.test.ors-amarisoft-setup:setup}
peertube ${slapos.test.peertube-setup:setup}
plantuml ${slapos.test.plantuml-setup:setup} plantuml ${slapos.test.plantuml-setup:setup}
powerdns ${slapos.test.powerdns-setup:setup} powerdns ${slapos.test.powerdns-setup:setup}
proftpd ${slapos.test.proftpd-setup:setup} proftpd ${slapos.test.proftpd-setup:setup}
......
...@@ -38,6 +38,8 @@ import requests ...@@ -38,6 +38,8 @@ import requests
from datetime import datetime, timedelta from datetime import datetime, timedelta
from six.moves.urllib.parse import urljoin from six.moves.urllib.parse import urljoin
from mimetypes import guess_type
from json.decoder import JSONDecodeError
from slapos.testing.testcase import installSoftwareUrlList from slapos.testing.testcase import installSoftwareUrlList
...@@ -48,6 +50,9 @@ from test import SlapOSInstanceTestCase, theia_software_release_url ...@@ -48,6 +50,9 @@ from test import SlapOSInstanceTestCase, theia_software_release_url
erp5_software_release_url = os.path.abspath( erp5_software_release_url = os.path.abspath(
os.path.join( os.path.join(
os.path.dirname(__file__), '..', '..', 'erp5', 'software.cfg')) os.path.dirname(__file__), '..', '..', 'erp5', 'software.cfg'))
peertube_software_release_url = os.path.abspath(
os.path.join(
os.path.dirname(__file__), '..', '..', 'peertube', 'software.cfg'))
def setUpModule(): def setUpModule():
...@@ -232,3 +237,209 @@ class TestTheiaResilienceERP5(ERP5Mixin, test_resiliency.TestTheiaResilience): ...@@ -232,3 +237,209 @@ class TestTheiaResilienceERP5(ERP5Mixin, test_resiliency.TestTheiaResilience):
# Check that the mariadb catalog was properly restored # Check that the mariadb catalog was properly restored
out = subprocess.check_output((mysql_bin, 'erp5', '-e', query), universal_newlines=True) out = subprocess.check_output((mysql_bin, 'erp5', '-e', query), universal_newlines=True)
self.assertIn(self._erp5_new_title, out, 'Mariadb catalog is not properly restored') self.assertIn(self._erp5_new_title, out, 'Mariadb catalog is not properly restored')
class TestTheiaResiliencePeertube(test_resiliency.TestTheiaResilience):
test_instance_max_retries = 12
backup_max_tries = 480
backup_wait_interval = 60
_connexion_parameters_regex = re.compile(r"{.*}", re.DOTALL)
_test_software_url = peertube_software_release_url
def _getPeertubeConnexionParameters(self, instance_type='export'):
out = self.captureSlapos(
'request', 'test_instance', self._test_software_url,
stderr=subprocess.STDOUT,
text=True,
)
print(out)
return json.loads(self._connexion_parameters_regex.search(out).group(0).replace("'", '"'))
def test_twice(self):
# do nothing
pass
def _prepareExport(self):
super(TestTheiaResiliencePeertube, self)._prepareExport()
postgresql_partition = self._getPeertubePartitionPath('export', 'postgres')
postgresql_bin = os.path.join(postgresql_partition, 'bin', 'psql')
postgres_bin = os.path.join(postgresql_partition, 'bin', 'postgres')
postgresql_srv = os.path.join(postgresql_partition, 'srv', 'postgresql')
peertube_conenction_info = self._getPeertubeConnexionParameters()
frontend_url = peertube_conenction_info['frontend-url']
response = requests.get(frontend_url + '/api/v1/oauth-clients/local', verify=False)
self.assertEqual(requests.codes['OK'], response.status_code)
try:
data = response.json()
except JSONDecodeError:
self.fail("No json file returned! Maybe your Peertube API is incorrect.")
client_id = data['client_id']
client_secret = data['client_secret']
username = peertube_conenction_info['username']
password = peertube_conenction_info['password']
auth_data = {
'client_id': client_id,
'client_secret': client_secret,
'grant_type': 'password',
'response_type': 'code',
'username': username,
'password': password
}
auth_result = requests.post(frontend_url + '/api/v1/users/token', data=auth_data, verify=False)
try:
auth_result_json = auth_result.json()
except JSONDecodeError:
self.fail("No json file returned! Maybe your Peertube API is incorrect.")
token_type = auth_result_json['token_type']
access_token = auth_result_json['access_token']
headers = {
'Authorization': token_type + ' ' + access_token
}
video_name = "Small test video"
file_path = "../../peertube/test/small.mp4"
pwd_file_path = os.path.realpath(__file__)
print(pwd_file_path)
file_mime_type = guess_type(file_path)[0]
with open(file_path, 'rb') as f:
video_data = {
'channelId': 1,
'name': video_name,
'commentEnabled': False,
'privacy': 1,
}
upload_response = requests.post(
frontend_url + '/api/v1/videos/upload',
headers=headers,
data=video_data,
files={'videofile': (os.path.basename(file_path), f, file_mime_type)},
verify=False
)
try:
video_ids = upload_response.json()
except JSONDecodeError:
self.fail("No json file returned! Maybe your Peertube API is incorrect.")
# e.g: {'video': {'id': 7, 'shortUUID': 'nrnKJNCsRP7NkwRr51TK3e', 'uuid': 'ad9ae99d-07db-4e4c-adc3-73566d59a4c5'}}
self.assertIn('video', video_ids)
# Checked the modification has been updated in the database
output = subprocess.check_output(
(postgresql_bin, '-h', postgresql_srv, '-U', 'peertube', '-d', 'peertube_prod',
'-c', 'SELECT * FROM "video"'),
universal_newlines=True)
self.assertIn("Small test video", output)
# Do a fake periodically update
# Compute backup date in the near future
soon = (datetime.now() + timedelta(minutes=4)).replace(second=0)
frequency = "%d * * * *" % soon.minute
params = 'frequency=%s' % frequency
# Update Peertube parameters
print('Requesting Peertube with parameters %s' % params)
self.checkSlapos('request', 'test_instance', self._test_software_url, '--parameters', params)
self.checkSlapos('node', 'instance')
self.callSlapos('node', 'restart', 'all')
# Wait until after the programmed backup date, and a bit more
t = (soon - datetime.now()).total_seconds()
self.assertLess(0, t)
time.sleep(t + 120)
self.callSlapos('node', 'status')
# Check that postgresql backup has started
postgresql_backup = os.path.join(postgresql_partition, 'srv', 'backup')
self.assertIn('peertube_prod-dump.db', os.listdir(postgresql_backup))
def _checkTakeover(self):
super(TestTheiaResiliencePeertube, self)._checkTakeover()
postgresql_partition = self._getPeertubePartitionPath('export', 'postgres')
postgresql_bin = os.path.join(postgresql_partition, 'bin', 'psql')
postgres_bin = os.path.join(postgresql_partition, 'bin', 'postgres')
postgresql_srv = os.path.join(postgresql_partition, 'srv', 'postgresql')
peertube_conenction_info = self._getPeertubeConnexionParameters()
frontend_url = peertube_conenction_info['frontend-url']
storage_path = os.path.join(postgresql_partition, 'var', 'www', 'peertube', 'storage')
# Wait for connect Peertube
for _ in range(5):
try:
response = requests.get(frontend_url, verify=False, allow_redirects=False)
except Exception:
time.sleep(20)
continue
if response.status_code != 200:
time.sleep(20)
continue
break
else:
self.fail('Failed to connect to Peertube')
# Get the video path, the part of this path will be used in the video URL
# e.g: var/www/peertube/storage/streaming-playlists/hls/XXXX/YYYY.mp4
# path before hls dir
hls_path = os.path.join(storage_path, 'streaming-playlists', 'hls')
#Choose only one video path
video_path = None
for root, dirs, files in os.walk(hls_path):
for a_file in files:
if a_file.endswith('.mp4'):
video_path = os.path.join(root, a_file)
break
else:
continue
break
# path like "streaming-playlists/hls/XXXX/YYYY.mp4"
self.assertIn('streaming-playlists', video_path)
streaming_video_path = video_path[video_path.index('streaming-playlists'):]
video_url = frontend_url + '/static/' + streaming_video_path
response = requests.get(video_url, verify=False)
# The video mp4 file is accesible through the URL
self.assertEqual(requests.codes['OK'], response.status_code)
video_feeds_url = frontend_url + '/feeds/videos.json'
response = requests.get(video_feeds_url, verify=False)
# The video feeds returns the correct status code
self.assertEqual(requests.codes['OK'], response.status_code)
try:
video_data= response.json()
except JSONDecodeError:
self.fail("No json file returned! Maybe your Peertube feeds URL is incorrect.")
# Check the first video title is in the response content
video_title = video_data['items'][0]['title']
self.assertIn("Small test video", video_title)
def _getPeertubePartition(self, servicename):
p = subprocess.Popen(
(self._getSlapos(), 'node', 'status'),
stdout=subprocess.PIPE, universal_newlines=True)
out, _ = p.communicate()
found = set()
for line in out.splitlines():
if servicename in line:
found.add(line.split(':')[0])
if not found:
raise Exception("Peertube %s partition not found" % servicename)
elif len(found) > 1:
raise Exception("Found several partitions for Peertube %s" % servicename)
return found.pop()
def _getPeertubePartitionPath(self, instance_type, servicename, *paths):
partition = self._getPeertubePartition(servicename)
return self.getPartitionPath(
instance_type, 'srv', 'runner', 'instance', partition, *paths)
...@@ -136,18 +136,19 @@ zc.buildout = 2.7.1+slapos019 ...@@ -136,18 +136,19 @@ zc.buildout = 2.7.1+slapos019
# Use SlapOS patched zc.recipe.egg (zc.recipe.egg 2.x is for Buildout 2) # Use SlapOS patched zc.recipe.egg (zc.recipe.egg 2.x is for Buildout 2)
zc.recipe.egg = 2.0.3+slapos003 zc.recipe.egg = 2.0.3+slapos003
traitlets = 4.3.3 apache-libcloud = 2.4.0
Jinja2 = 2.11.3
Importing = 1.10
MarkupSafe = 2.0.1
PyYAML = 5.4.1
Werkzeug = 2.0.2
ZConfig = 2.9.3
asn1crypto = 1.3.0 asn1crypto = 1.3.0
atomicwrites = 1.4.0 atomicwrites = 1.4.0
atomize = 0.2.0
attrs = 22.1.0
backports.functools-lru-cache = 1.6.1:whl backports.functools-lru-cache = 1.6.1:whl
backports.lzma = 0.0.14 backports.lzma = 0.0.14
bcrypt = 3.1.4
CacheControl = 0.12.6:whl
certifi = 2022.6.15
cffi = 1.14.0 cffi = 1.14.0
chardet = 3.0.4
charset-normalizer = 2.1.1
click = 8.1.3 click = 8.1.3
cliff = 2.8.3:whl cliff = 2.8.3:whl
cmd2 = 0.7.0 cmd2 = 0.7.0
...@@ -155,44 +156,75 @@ collective.recipe.shelloutput = 0.1 ...@@ -155,44 +156,75 @@ collective.recipe.shelloutput = 0.1
collective.recipe.template = 2.0 collective.recipe.template = 2.0
configparser = 4.0.2:whl configparser = 4.0.2:whl
contextlib2 = 0.6.0.post1 contextlib2 = 0.6.0.post1
croniter = 0.3.25
cryptography = 3.3.2 cryptography = 3.3.2
dateparser = 0.7.6 dateparser = 0.7.6
decorator = 4.3.0 decorator = 4.3.0
distro = 1.7.0
dnspython = 1.16.0
enum34 = 1.1.10
erp5.util = 0.4.74
feedparser = 5.2.1
Flask = 1.1.2
funcsigs = 1.0.2 funcsigs = 1.0.2
functools32 = 3.2.3.post2
gevent = 20.9.0 gevent = 20.9.0
geventmp = 0.0.1 geventmp = 0.0.1
gitdb2 = 2.0.5
GitPython = 2.1.11
greenlet = 0.4.17 greenlet = 0.4.17
idna = 2.9 idna = 2.9
igmp = 1.0.4 igmp = 1.0.4
Importing = 1.10
importlib-metadata = 1.7.0:whl importlib-metadata = 1.7.0:whl
inotify-simple = 1.1.1 inotify-simple = 1.1.1
ipaddress = 1.0.23
itsdangerous = 0.24 itsdangerous = 0.24
Jinja2 = 2.11.3
jsonschema = 3.0.2:whl
lock-file = 2.0 lock-file = 2.0
lockfile = 0.12.2:whl
lxml = 4.9.1 lxml = 4.9.1
MarkupSafe = 2.0.1
meld3 = 1.0.2 meld3 = 1.0.2
mock = 3.0.5 mock = 3.0.5
more-itertools = 5.0.0 more-itertools = 5.0.0
msgpack = 0.6.2
netaddr = 0.7.19 netaddr = 0.7.19
netifaces = 0.10.7
packaging = 16.8
paramiko = 2.11.0
passlib = 1.7.1
pathlib2 = 2.3.5 pathlib2 = 2.3.5
pbr = 2.0.0 pbr = 2.0.0
pim-dm = 1.4.0nxd001 pim-dm = 1.4.0nxd001
pkgconfig = 1.5.1
plone.recipe.command = 1.1 plone.recipe.command = 1.1
pluggy = 0.13.1:whl
prettytable = 0.7.2 prettytable = 0.7.2
psutil = 5.8.0 psutil = 5.8.0
pluggy = 0.13.1:whl
py = 1.11.0:whl py = 1.11.0:whl
py-mld = 1.0.3
pyasn1 = 0.4.5
pycparser = 2.20
pycurl = 7.43.0
PyNaCl = 1.3.0
pyOpenSSL = 19.1.0 pyOpenSSL = 19.1.0
pyparsing = 3.0.9:whl pyparsing = 3.0.9:whl
py-mld = 1.0.3
pyroute2 = 0.6.9 pyroute2 = 0.6.9
pyrsistent = 0.18.1
PyRSS2Gen = 1.1
pytest-runner = 5.2:whl
python-dateutil = 2.7.3:whl
pytz = 2022.2.1 pytz = 2022.2.1
PyYAML = 5.4.1
regex = 2020.9.27 regex = 2020.9.27
requests = 2.28.1 requests = 2.28.1
charset-normalizer = 2.1.1 rpdb = 0.1.5
rubygemsrecipe = 0.4.3
scandir = 1.10.0 scandir = 1.10.0
setproctitle = 1.1.10 setproctitle = 1.1.10
setuptools-dso = 1.7 setuptools-dso = 1.7
rubygemsrecipe = 0.4.3
six = 1.16.0 six = 1.16.0
slapos.cookbook = 1.0.291 slapos.cookbook = 1.0.291
slapos.core = 1.8.4 slapos.core = 1.8.4
...@@ -203,70 +235,38 @@ slapos.recipe.build = 0.56 ...@@ -203,70 +235,38 @@ slapos.recipe.build = 0.56
slapos.recipe.cmmi = 0.19 slapos.recipe.cmmi = 0.19
slapos.recipe.template = 5.0 slapos.recipe.template = 5.0
slapos.toolbox = 0.128 slapos.toolbox = 0.128
smmap2 = 2.0.5
stevedore = 1.21.0:whl stevedore = 1.21.0:whl
subprocess32 = 3.5.4 subprocess32 = 3.5.4
supervisor = 4.1.0
traitlets = 4.3.3
tzlocal = 1.5.1
unicodecsv = 0.14.1 unicodecsv = 0.14.1
uritemplate = 3.0.0
urllib3 = 1.26.12
wcwidth = 0.2.5 wcwidth = 0.2.5
Werkzeug = 2.0.2
wheel = 0.35.1:whl wheel = 0.35.1:whl
xml-marshaller = 1.0.2 xml-marshaller = 1.0.2
zc.lockfile = 1.4 zc.lockfile = 1.4
ZConfig = 2.9.3
zdaemon = 4.2.0 zdaemon = 4.2.0
zipp = 1.2.0:whl zipp = 1.2.0:whl
zodburi = 2.5.0 zodburi = 2.5.0
zope.event = 3.5.2 zope.event = 3.5.2
paramiko = 2.11.0
PyNaCl = 1.3.0
bcrypt = 3.1.4
CacheControl = 0.12.6:whl
msgpack = 0.6.2
Flask = 1.1.2
GitPython = 2.1.11
gitdb2 = 2.0.5
smmap2 = 2.0.5
PyRSS2Gen = 1.1
apache-libcloud = 2.4.0
atomize = 0.2.0
croniter = 0.3.25
dnspython = 1.16.0
enum34 = 1.1.10
erp5.util = 0.4.74
feedparser = 5.2.1
functools32 = 3.2.3.post2
attrs = 22.1.0
pyrsistent = 0.18.1
pytest-runner = 5.2:whl
ipaddress = 1.0.23
jsonschema = 3.0.2:whl
lockfile = 0.12.2:whl
netifaces = 0.10.7
packaging = 16.8
passlib = 1.7.1
pyasn1 = 0.4.5
pycparser = 2.20
pycurl = 7.43.0
python-dateutil = 2.7.3:whl
rpdb = 0.1.5
supervisor = 4.1.0
tzlocal = 1.5.1
uritemplate = 3.0.0
zope.interface = 5.4.0 zope.interface = 5.4.0
certifi = 2022.6.15
chardet = 3.0.4
urllib3 = 1.26.12
pkgconfig = 1.5.1
distro = 1.7.0
[versions:python2] [versions:python2]
attrs = 18.2.0 attrs = 18.2.0
certifi = 2020.4.5.1
charset-normalizer = 2.0.12
click = 6.7 click = 6.7
distro = 1.6.0 distro = 1.6.0
Werkzeug = 1.0.1
requests = 2.27.1
charset-normalizer = 2.0.12
pyparsing = 2.2.0 pyparsing = 2.2.0
certifi = 2020.4.5.1
pyrsistent = 0.16.1 pyrsistent = 0.16.1
requests = 2.27.1
Werkzeug = 1.0.1
[versions:sys.version_info < (3,8)] [versions:sys.version_info < (3,8)]
MarkupSafe = 1.0 MarkupSafe = 1.0
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment