Commit f619403c authored by Julien Muchembled's avatar Julien Muchembled Committed by Xavier Thompson

version up: slapos.cookbook 1.0.213

Adapt all promise sections to changes in plugin promise recipe.
parent a759248b
master 1.0 armin arnau arnau-RD-future arnau-RD-py3-master arnau-zope4py2 backup bbu cert-auto check_system_gfortran default_software_type_default django-tutorial drupal-lamp e2e e2e-ors erp5-component erp5-zope2-tests extend-erp5-wcfs-integration-tests feat/buildout_testing_python3_new feat/erp5-incremenatal-repozo feat/fix_kvm_wipe feat/inkscape-1.3 feat/ipv6_range feat/kvm-import-nice feat/libreoffice-24.2 feat/libreoffice-7.6 feat/mariadb-10.11 feat/mariadb-10.11-unsafe feat/mariadb-10.11bis feat/mariadb-10.11ter feat/mariadb-10.5 feat/mariadb-10.6 feat/mariadb-11.4 feat/mariadb-11.4-unsafe feat/new_vps_software feat/nxdbom feat/obs_test feat/openssl-3.0 feat/openssl-3.0-bis feat/openssl-3.0-py2 feat/telegraf-version-up feat/theia-1.24.0 feat/version_up_ldap_graphviz feat/zope-testrunner feat/zope5.11-pygolang-zodbtools fix-1.0.240.1 fix-libdb fix/erp5-haproxy-wait-reload fix/erp5_remove_unused_eggs fix/gcc_min_version fix/golang-20 fix/haproxy-up fix/netrc-parse-error fix/re6st-token-manager fix/slapos.toolbox-py2 fix/version_up fix_kvm_monitor for_testrunner_1 gitlab-v14.10 goodbye-openssl-1.0.x html5as-tutorial intentially_broken_cloudooo jm logrotate/truncate master+ZODB4-wc2 matomo-tutorial neo-reflink netframe nextcloud nextcloud-fix-tests nirina_tutorial nofile old-mail-server ors ors-amarisoft/aw2s ors-dev ors-dev2 ors-dev3 ors-dev3-bak ors-dnsmasq ors-fake-lteenb ors-handover ors-oran-ru ors-oran-ru-toolbox ors-promise ors-ptt ors-publish ors-ssb-nr-arfcn ors-test ors-tests ors-ue osc patrowl-dev public-deltachat-core push-to-wendolin python3.12 remove_inkscape repman-test revert-e7b48c0b ru-alarms simpleran-dev slapos-node_python3.9 slapos_master_load_balancer_test sozu-dev systemd-python tdd_config_5ms_6ul_3dl telecom-matomo-hotfix textsynth tomo_openradio_e2e_testing top-stand1 upgrade_responses vanilla-slapos webdav wrapper xy/lte-multiru y/wc2-next zope2py2-reorder-versions-priority zope2zope4py2 zope4py2-faketime zope4py3 zope4py3-bstr zope4py3-kaz 1.0.396 1.0.395 1.0.394 1.0.393 1.0.392 1.0.391 1.0.390 1.0.389 1.0.388 1.0.387 1.0.386 1.0.385 1.0.384 1.0.383 1.0.382 1.0.381 1.0.380 1.0.379 1.0.378 1.0.377 1.0.376 1.0.375 1.0.374 1.0.373 1.0.372 1.0.371 1.0.370 1.0.369 1.0.368 1.0.367 1.0.366 1.0.365 1.0.364.3 1.0.364.2 1.0.364.1 1.0.364 1.0.363 1.0.362 1.0.361 1.0.360 1.0.359 1.0.358 1.0.357 1.0.356 1.0.355 1.0.354 1.0.354-mariadb-replication-efc8bfe3c 1.0.354-mariadb-replication-8b12a75b5 1.0.353 1.0.352 1.0.351 1.0.350 1.0.349 1.0.348 1.0.347 1.0.346 1.0.345 1.0.344.1 1.0.344 1.0.343 1.0.342 1.0.341 1.0.340 1.0.339 1.0.338 1.0.337 1.0.336 1.0.335 1.0.334 1.0.333 1.0.332 1.0.331 1.0.330 1.0.329 1.0.328 1.0.327 1.0.326 1.0.325 1.0.324 1.0.323 1.0.322 1.0.321 1.0.320 1.0.319 1.0.318 1.0.317 1.0.316 1.0.315 1.0.314 1.0.313 1.0.312 1.0.311 1.0.310 1.0.309 1.0.308 1.0.307 1.0.306 1.0.305 1.0.304 1.0.303 1.0.302 1.0.301 1.0.300 1.0.299 1.0.298 1.0.297 1.0.296 1.0.295 1.0.294 1.0.293 1.0.292 1.0.291 1.0.290 1.0.289 1.0.288 1.0.287 1.0.286 1.0.285 1.0.284 1.0.283 1.0.282 1.0.281 1.0.280 1.0.279 1.0.278 1.0.277 1.0.276 1.0.275 1.0.274 1.0.273 1.0.272 1.0.271 1.0.270 1.0.269 1.0.268 1.0.267 1.0.266 1.0.265 1.0.264 1.0.263 1.0.262 1.0.261 1.0.260 1.0.259 1.0.258 1.0.257 1.0.256 1.0.255 1.0.254 1.0.253 1.0.252 1.0.251 1.0.250 1.0.249 1.0.248 1.0.247 1.0.246 1.0.245 1.0.244 1.0.243 1.0.242 1.0.241 1.0.240.1 1.0.240 1.0.239 1.0.238 1.0.237 1.0.236 1.0.235 1.0.234 1.0.233 1.0.232 1.0.231 1.0.230 1.0.229 1.0.228 1.0.227 1.0.226 1.0.225 1.0.224 1.0.223 1.0.222 1.0.221 1.0.220 1.0.219 1.0.218 1.0.217 1.0.216 1.0.215 1.0.214
No related merge requests found
Pipeline #17806 failed with stage
......@@ -46,7 +46,7 @@ md5sum = ae4a0043414336a521b524d9c95f1c68
[template-pullrdiffbackup]
filename = instance-pullrdiffbackup.cfg.in
md5sum = f2e6f30a0e8228cbfb93eaaae10fe884
md5sum = 45a4faa217ea5b83ecf271791e1632dd
[template]
filename = instance.cfg.in
......
......@@ -107,8 +107,7 @@ recipe = slapos.cookbook:promise.plugin
eggs =
slapos.toolbox
output = $${directory:plugin}/{{ slave_reference }}_check_backup.py
content =
from slapos.promise.plugin.backupserver_check_backup import RunPromise
module = slapos.promise.plugin.backupserver_check_backup
config-status_dirbasename = $${variables:status_dirbasename}
config-status_name = {{ '$${' ~ slave_reference }}-backup-script:status_name}
config-status_fullpath = {{ '$${' ~ slave_reference }}-backup-script:status_log}
......@@ -117,7 +116,6 @@ config-cron_frequency = {{ frequency }}
config-monitor_url = $${monitor-publish:monitor-base-url}
config-statistic_dirbasename = $${variables:statistic_dirbasename}
config-statistic_name = {{ '$${' ~ slave_reference }}-backup-script:statistic_name}
mode = 600
{% do part_list.append("%s-promise-check-backup" % slave_reference) -%}
[{{ slave_reference }}-backup-script]
......@@ -194,7 +192,7 @@ virtual-depends =
[nginx-listen-promise]
<= monitor-promise-base
module = check_socket_listening
promise = check_socket_listening
name = nginx_listen.py
config-host = $${nginx-configuration:ip}
config-port = $${nginx-configuration:port}
......
......@@ -22,15 +22,15 @@ md5sum = 5784bea3bd608913769ff9a8afcccb68
[profile-caddy-frontend]
filename = instance-apache-frontend.cfg.in
md5sum = 385674eb3e79bdd773ef6cec1d1281e8
md5sum = 0950e09ad1f03f0789308f5f7a7eb1b8
[profile-caddy-replicate]
filename = instance-apache-replicate.cfg.in
md5sum = 99741e618b1c249bd17c9e02778d74ee
md5sum = 7c2e52b76c42bed95702763c344e41dd
[profile-slave-list]
_update_hash_filename_ = templates/apache-custom-slave-list.cfg.in
md5sum = 9bb51f663f69d66b5b3708bf892dd3e6
md5sum = 313671d343ceccfca5af1baa642132c5
[profile-replicate-publish-slave-information]
_update_hash_filename_ = templates/replicate-publish-slave-information.cfg.in
......@@ -102,7 +102,7 @@ md5sum = 38792c2dceae38ab411592ec36fff6a8
[profile-kedifa]
filename = instance-kedifa.cfg.in
md5sum = eab5ae579471ca86b40bd2da3b53fefa
md5sum = dfb4dabd1e4094de1276d171f998ef47
[template-backend-haproxy-rsyslogd-conf]
_update_hash_filename_ = templates/backend-haproxy-rsyslogd.conf.in
......
......@@ -539,7 +539,7 @@ context =
[trafficserver-promise-listen-port]
<= monitor-promise-base
module = check_socket_listening
promise = check_socket_listening
name = trafficserver-port-listening.py
config-host = ${trafficserver-variable:local-ip}
config-port = ${trafficserver-variable:input-port}
......@@ -552,7 +552,7 @@ environment = TS_ROOT=${buildout:directory}
[trafficserver-promise-cache-availability]
<= monitor-promise-base
module = trafficserver_cache_availability
promise = trafficserver_cache_availability
name = trafficserver-cache-availability.py
config-wrapper-path = ${trafficserver-ctl:wrapper-path}
......@@ -653,48 +653,48 @@ context =
[promise-frontend-caddy-configuration]
<= monitor-promise-base
module = validate_frontend_configuration
promise = validate_frontend_configuration
name = frontend-caddy-configuration-promise.py
config-verification-script = ${promise-helper-last-configuration-state:rendered}
[promise-caddy-frontend-v4-https]
<= monitor-promise-base
module = check_socket_listening
promise = check_socket_listening
name = caddy_frontend_ipv4_https.py
config-host = {{ instance_parameter_dict['ipv4-random'] }}
config-port = ${configuration:port}
[promise-caddy-frontend-v4-http]
<= monitor-promise-base
module = check_socket_listening
promise = check_socket_listening
name = caddy_frontend_ipv4_http.py
config-host = {{ instance_parameter_dict['ipv4-random'] }}
config-port = ${configuration:plain_http_port}
[promise-caddy-frontend-v6-https]
<= monitor-promise-base
module = check_socket_listening
promise = check_socket_listening
name = caddy_frontend_ipv6_https.py
config-host = {{ instance_parameter_dict['ipv6-random'] }}
config-port = ${configuration:port}
[promise-caddy-frontend-v6-http]
<= monitor-promise-base
module = check_socket_listening
promise = check_socket_listening
name = caddy_frontend_ipv6_http.py
config-host = {{ instance_parameter_dict['ipv6-random'] }}
config-port = ${configuration:plain_http_port}
[promise-backend-haproxy-http]
<= monitor-promise-base
module = check_socket_listening
promise = check_socket_listening
name = backend_haproxy_http.py
config-host = {{ instance_parameter_dict['ipv4-random'] }}
config-port = ${backend-haproxy-configuration:http-port}
[promise-backend-haproxy-https]
<= monitor-promise-base
module = check_socket_listening
promise = check_socket_listening
name = backend_haproxy_https.py
config-host = {{ instance_parameter_dict['ipv4-random'] }}
config-port = ${backend-haproxy-configuration:https-port}
......@@ -798,7 +798,7 @@ extra-context =
[promise-backend-haproxy-configuration]
<= monitor-promise-base
module = validate_frontend_configuration
promise = validate_frontend_configuration
name = backend-haproxy-configuration.py
config-verification-script = ${promise-backend-haproxy-configuration-helper:rendered}
......@@ -888,7 +888,7 @@ context =
[monitor-verify-re6st-connectivity]
<= monitor-promise-base
module = check_url_available
promise = check_url_available
name = re6st-connectivity.py
config-url = ${configuration:re6st-verification-url}
......@@ -924,7 +924,7 @@ return = domain secure_access
[backend-haproxy-statistic-frontend-promise]
<= monitor-promise-base
module = check_url_available
promise = check_url_available
name = backend-haproxy-statistic-frontend.py
config-http-code = 401
config-url =
......@@ -976,7 +976,7 @@ extra-context =
[promise-slave-introspection-configuration]
<= monitor-promise-base
module = validate_frontend_configuration
promise = validate_frontend_configuration
name = slave-introspection-configuration.py
config-verification-script = ${promise-slave-introspection-configuration-helper:rendered}
......@@ -993,7 +993,7 @@ context =
[promise-slave-introspection-https]
<= monitor-promise-base
module = check_socket_listening
promise = check_socket_listening
name = slave_introspection_https.py
config-host = {{ instance_parameter_dict['ipv6-random'] }}
config-port = ${frontend-configuration:slave-introspection-https-port}
......@@ -1008,7 +1008,7 @@ delaycompress =
[promise-logrotate-setup]
<= monitor-promise-base
module = check_command_execute
promise = check_command_execute
name = ${:_buildout_section_name_}.py
config-command =
${logrotate:wrapper-path} -d
......
......@@ -382,7 +382,7 @@ kedifa-csr_id-certificate = ${request-kedifa:connection-csr_id-certificate}
{% set section_part = '${request-' + frontend %}
[{{ part_name }}]
<= monitor-promise-base
module = check_url_available
promise = check_url_available
name = check-backend-haproxy-statistic-url-{{ frontend }}.py
config-url =
{{ section_part }}:connection-backend-haproxy-statistic-url}
......@@ -865,15 +865,15 @@ rendered = ${directory:etc}/nginx-rejected-slave.conf
[promise-rejected-slave-publish-ip-port]
<= monitor-promise-base
module = check_socket_listening
promise = check_socket_listening
name = rejected-slave-publish-ip-port-listening.py
config-host = ${rejected-slave-publish-configuration:ip}
config-port = ${rejected-slave-publish-configuration:port}
[rejected-slave-promise]
<= monitor-promise-base
module = check_socket_listening
module = check_file_state
promise = check_socket_listening
promise = check_file_state
name = rejected-slave.py
config-filename = ${rejected-slave-json:rendered}
config-state = empty
......
......@@ -193,7 +193,7 @@ template = inline:
[promise-expose-csr_id-ip-port]
<= monitor-promise-base
module = check_socket_listening
promise = check_socket_listening
name = expose-csr_id-ip-port-listening.py
config-host = ${expose-csr_id-configuration:ip}
config-port = ${expose-csr_id-configuration:port}
......@@ -250,7 +250,7 @@ extra-context =
[promise-kedifa-http-reply]
<= monitor-promise-base
module = check_url_available
promise = check_url_available
name = kedifa-http-reply.py
# Kedifa replies 400 on /, so use it to be sure that Kedifa replied
config-http-code = 400
......@@ -331,7 +331,7 @@ monitor-base-url = ${monitor-instance-parameter:monitor-base-url}
[promise-logrotate-setup]
<= monitor-promise-base
module = check_command_execute
promise = check_command_execute
name = ${:_buildout_section_name_}.py
config-command =
${logrotate:wrapper-path} -d
......
......@@ -353,7 +353,7 @@ filename = {{ '%s.conf' % slave_reference }}
{%- do part_list.append(monitor_ipv6_section_title) %}
[{{ monitor_ipv6_section_title }}]
<= monitor-promise-base
module = check_icmp_packet_lost
promise = check_icmp_packet_lost
name = {{ monitor_ipv6_section_title }}.py
config-address = {{ dumps(monitor_ipv6_test) }}
# promise frequency in minutes (2 times/day)
......@@ -365,7 +365,7 @@ config-frequency = 720
{%- do part_list.append(monitor_ipv4_section_title) %}
[{{ monitor_ipv4_section_title }}]
<= monitor-promise-base
module = check_icmp_packet_lost
promise = check_icmp_packet_lost
name = {{ monitor_ipv4_section_title }}.py
config-address = {{ dumps(monitor_ipv4_test) }}
config-ipv4 = true
......@@ -632,7 +632,7 @@ template = inline:
[promise-expose-csr_id-ip-port]
<= monitor-promise-base
module = check_socket_listening
promise = check_socket_listening
name = expose-csr_id-ip-port-listening.py
config-host = ${expose-csr_id-configuration:ip}
config-port = ${expose-csr_id-configuration:port}
......@@ -655,7 +655,7 @@ commands =
[promise-logrotate-setup]
<= monitor-promise-base
module = check_command_execute
promise = check_command_execute
name = ${:_buildout_section_name_}.py
config-command =
${logrotate:wrapper-path} -d
\ No newline at end of file
......@@ -18,4 +18,4 @@ md5sum = e986de01a57161b32425f1cd3ccac924
[template-cloudooo-instance]
filename = instance-cloudooo.cfg.in
md5sum = 6e4bdb1df02aed5c96ccf7b9c3c71b89
md5sum = 3e6830c21c206b3ae1140375e5e63b46
......@@ -105,7 +105,7 @@ ssl-session-cache = ${directory:log}/apache-ssl-session-cache
[apache-promise]
# Check any apache port in ipv4, expect other ports and ipv6 to behave consistently
<= monitor-promise-base
module = check_url_available
promise = check_url_available
name = apache.py
config-url = https://{{ ipv4 }}:{{ apache_dict.values()[0][0] }}
# XXX cloudooo replies "400 Bad Request" for GET on / but what we want to check
......
[instance]
filename = instance.cfg
md5sum = 646e50cfa93681e8bd85767621c7a39d
md5sum = ddd17fab15afa5a27cdc0761fbc8f34c
......@@ -41,7 +41,7 @@ wrapper-path = $${directory:service}/dream_platform
[dream-platform-url-available]
<= monitor-promise-base
module = check_url_available
promise = check_url_available
name = $${:_buildout_section_name_}.py
config-url= $${dream_platform_parameter:url}
......
......@@ -18,4 +18,4 @@ md5sum = 6dcbe21f99aa6675e8e3b74bc9cbb0e6
[template-default]
filename = instance-default.cfg.jinja.in
md5sum = ffb6b74b55e7ca01666254353ae1cebe
md5sum = 536a28ff250c691ca374f75aa5f0aa76
......@@ -261,7 +261,7 @@ instance-promises =
[shellinabox-frontend-listen-promise]
<= monitor-promise-base
module = check_socket_listening
promise = check_socket_listening
name = $${:_buildout_section_name_}.py
config-host = $${shellinabox-frontend:hostname}
config-port = $${shellinabox-frontend:port}
......@@ -278,6 +278,6 @@ config-port = $${shellinabox-frontend:port}
[testnode-log-frontend-promise]
<= monitor-promise-base
module = check_url_available
promise = check_url_available
name = $${:_buildout_section_name_}.py
config-url = $${testnode-log-frontend:connection-secure_access}
......@@ -18,4 +18,4 @@ md5sum = c265bf7ec199190ba9f77960cac5da38
[template-fluentd]
filename = instance-fluentd.cfg
md5sum = 49c6f3b9edda5225d141220b15197845
md5sum = 35f9d95f6a75e28bfeafc3568ca16f05
......@@ -12,7 +12,7 @@ environment =
{% do part_list.append(promise_section_title) -%}
[{{ promise_section_title }}]
<= monitor-promise-base
module = check_socket_listening
promise = check_socket_listening
name = {{ promise_section_title }}.py
config-host = $${slap-configuration:ipv6-random}
config-port = {{ port }}
......
......@@ -54,7 +54,7 @@ md5sum = 0f1ec4077dab586cc003ae13f689eda2
[instance-gitlab.cfg.in]
_update_hash_filename_ = instance-gitlab.cfg.in
md5sum = 6b34d4b96ae0067977fa509046d71231
md5sum = f099d01baefe41c8f0944c2437b30881
[instance-gitlab-export.cfg.in]
_update_hash_filename_ = instance-gitlab-export.cfg.in
......
......@@ -430,7 +430,7 @@ tune-command =
# [promise-<something>] to check <something> by url
[promise-byurl]
<= monitor-promise-base
module = check_command_execute
promise = check_command_execute
name = ${:_buildout_section_name_}.py
config-http-code = 200
......@@ -472,7 +472,7 @@ depend =
[promise-postgresql]
<= monitor-promise-base
module = check_command_execute
promise = check_command_execute
name = promise-postgresql.py
config-command =
{{ postgresql_location }}/bin/psql \
......@@ -515,7 +515,7 @@ depend =
[promise-redis]
<= monitor-promise-base
module = check_command_execute
promise = check_command_execute
name = promise-redis.py
config-command = ${service-redis:promise_wrapper}
......@@ -769,7 +769,7 @@ depend =
# XXX this depends on gitlab-workhorse being up
# (nginx is configured to proxy all requests to gitlab-workhorse)
config-url = ${backend-info:url}/users/sign_in
module = check_url_available
promise = check_url_available
[logrotate-entry-nginx]
<= logrotate-entry-base
......
......@@ -4,7 +4,7 @@ md5sum = 5dfeeb5eca125dcaa5f9e537f941dd41
[instance-headless-chromium]
_update_hash_filename_ = instance-headless-chromium.cfg.in
md5sum = fad685238b26ca20537c12ce7432e7e7
md5sum = 7392d20e48dbc599eb8e9d02e8095bbf
[template-nginx-conf]
_update_hash_filename_ = templates/nginx.conf.in
......
......@@ -174,7 +174,7 @@ monitor-httpd-port = {{ parameter_dict['monitor-httpd-port'] }}
# queried with the correct credentials.
[frontend-ok-promise]
<= monitor-promise-base
module = check_url_available
promise = check_url_available
name = headless-chromium-frontend-ok.py
url = ${remote-debugging-frontend:connection-secure_access}
config-url = ${:url}
......@@ -185,7 +185,7 @@ config-password = ${frontend-instance-password:passwd}
# when queried with no credentials.
[frontend-secure-promise]
<= monitor-promise-base
module = check_url_available
promise = check_url_available
name = headless-chromium-frontend-secure.py
url = ${remote-debugging-frontend:connection-secure_access}
config-url = ${:url}
......
......@@ -14,4 +14,4 @@
# not need these here).
[instance-profile]
filename = instance.cfg.in
md5sum = c771dee1ef9aedad7c6ebf9418afe08e
md5sum = 483b76d8e6bf72d72a38a3f7bf66fe08
......@@ -87,7 +87,7 @@ wrapper-path = ${directory:service}/helloweb-${:kind}
# promise, that checks that helloweb service is alive
[helloweb-promise]
<= monitor-promise-base
module = check_socket_listening
promise = check_socket_listening
name = helloweb-${:kind}.py
{# macro to instantiate service of `kind` to listen on `port` #}
......
......@@ -21,7 +21,7 @@ md5sum = 9e486efe4ab1aba8cb72b04f6c6da8ad
[instance_html5as]
_update_hash_filename_ = instance_html5as.cfg.in
md5sum = 283440057c659bde2ae7fcc2c4c5b781
md5sum = f86b2f37c0acd21ca1f41d90c5477d75
[template_nginx_conf]
_update_hash_filename_ = templates/nginx_conf.in
......
......@@ -185,7 +185,7 @@ context =
# Port Listening checking promise
[port-listening-promise]
<= monitor-promise-base
module = check_socket_listening
promise = check_socket_listening
name = nginx-port-listening.py
config-host = ${html5as:ip}
config-port = ${html5as:port}
......@@ -235,7 +235,7 @@ return = domain secure_access
# Add a promise to make sure the cdn is properly configured
[html5as-frontend-promise]
<= monitor-promise-base
module = check_url_available
promise = check_url_available
name = html5as-http-frontend.py
url = ${html5as-frontend:connection-secure_access}
config-url = ${:url}
......@@ -22,7 +22,7 @@ md5sum = 87781e6bcb523bb8434888d5f984f36c
[template-validator]
filename = instance-validator.cfg.in
md5sum = 9d12472bb2e337d3cc18f2cc6f235425
md5sum = dc8b8d03b0af9cd32398d1fe86267bb7
[template]
filename = instance.cfg.in
......
......@@ -55,7 +55,7 @@ scheme = https
[tomcat-listen-promise]
<= monitor-promise-base
module = check_socket_listening
promise = check_socket_listening
name = tomcat_listen.py
config-host = $${tomcat-configuration:ip}
config-port = $${tomcat-configuration:port}
......
......@@ -15,7 +15,7 @@
[template-cfg]
filename = instance.cfg.in
md5sum = 9653104b2217dc26b23f9c1b997124ca
md5sum = 2e4bed8c7b78c410b28f8becf81da596
[template_nginx_conf]
_update_hash_filename_ = templates/nginx_conf.in
......
......@@ -124,7 +124,7 @@ hash-files =
[hugo-port-listening-promise]
<= monitor-promise-base
module = check_socket_listening
promise = check_socket_listening
name = hugo-port-listening.py
config-host = ${hugo:ip}
config-port = ${hugo:hugo-port}
......@@ -144,7 +144,7 @@ return = domain secure_access
[hugo-frontend-promise]
<= monitor-promise-base
module = check_url_available
promise = check_url_available
name = hugo-http-frontend.py
url = ${hugo-frontend:connection-secure_access}
config-url = ${:url}
......@@ -212,7 +212,7 @@ template =
[nginx-port-listening-promise]
<= monitor-promise-base
module = check_socket_listening
promise = check_socket_listening
name = nginx-port-listening.py
config-host = ${hugo:ip}
config-port = ${hugo:nginx-port}
......@@ -238,7 +238,7 @@ return = domain secure_access
[nginx-frontend-promise]
<= monitor-promise-base
module = check_url_available
promise = check_url_available
name = nginx-http-frontend.py
url = ${nginx-frontend:connection-secure_access}
config-url = ${:url}
......
......@@ -19,7 +19,7 @@ md5sum = 6c17361a49cfc47564063b867aab6e8c
[template-jscrawler]
filename = instance-jscrawler.cfg.jinja2.in
md5sum = f61e0507717447e47c76a2b2712f17f4
md5sum = 33bfddbc23fa794ab97770ef7776b390
[template-jscrawler-builder]
filename = template-jscrawler.builder.sh.in
......
......@@ -47,7 +47,7 @@ return = secure_access domain
[jscrawler-frontend-promise]
<= monitor-promise-base
module = check_url_available
promise = check_url_available
name = jscrawler_frontend.py
config-url = ${request-jscrawler-frontend:connection-secure_access}
......@@ -59,7 +59,7 @@ log = ${httpd-wrapper:log-file}
[httpd-listen-promise]
<= monitor-promise-base
module = check_socket_listening
promise = check_socket_listening
name = httpd-listen.py
config-host = ${httpd-wrapper:host}
config-port = ${httpd-wrapper:port}
......
......@@ -19,7 +19,7 @@ md5sum = de37ec3d4adb0be4c67bcc7397f27c91
[instance-jupyter]
filename = instance-jupyter.cfg.in
md5sum = cbc90e517ae3680ab8bef04c6f503af5
md5sum = 95e3da48abdd257fb9d5dbdf14ea87b9
[jupyter-notebook-config]
filename = jupyter_notebook_config.py.jinja
......
......@@ -146,7 +146,7 @@ sla-instance_guid = ${slap-parameter:frontend-instance-guid}
[frontend-promise]
<= monitor-promise-base
module = check_url_available
promise = check_url_available
name = frontend_promise.py
config-url = ${publish-connection-parameter:url}
......@@ -160,7 +160,7 @@ sla-instance_guid = ${slap-parameter:frontend-additional-instance-guid}
[frontend-additional-promise]
<= monitor-promise-base
module = check_url_available
promise = check_url_available
name = frontend_additional_promise.py
config-url = ${publish-connection-parameter:url-additional}
{% endif %}
......
......@@ -19,7 +19,7 @@ md5sum = b129c9b2a614563d3f7c3f9e906d59f2
[template-kvm]
filename = instance-kvm.cfg.jinja2
md5sum = d0f96be4e80b96e6ac33f6d474767b13
md5sum = 78c3b69a2b18106cd6e289732116881d
[template-kvm-cluster]
filename = instance-kvm-cluster.cfg.jinja2.in
......@@ -27,11 +27,11 @@ md5sum = 59b92e1300aad4e9b116c532caf7d042
[template-kvm-resilient]
filename = instance-kvm-resilient.cfg.jinja2
md5sum = 7de5756f59ef7d823cd8ed33e6d15230
md5sum = a0fd4911401cbbda74323e8d1c7b18ad
[template-kvm-import]
filename = instance-kvm-import.cfg.jinja2.in
md5sum = 7b15fdc6f19b1f44ff5a56586102ffe2
md5sum = 0415353c961ece593dd5d6457dab5200
[template-kvm-import-script]
filename = template/kvm-import.sh.jinja2
......@@ -47,7 +47,7 @@ md5sum = b617d64de73de1eed518185f310bbc82
[template-nbd]
filename = instance-nbd.cfg.jinja2
md5sum = 259e06f289f68297e0609e4ab1af8e86
md5sum = 4bcb07c1a9223e2d956651aa25d23654
[template-ansible-promise]
filename = template/ansible-promise.in
......@@ -75,7 +75,7 @@ md5sum = fb330a796fadb6cd5c85217f80a42af3
[template-httpd]
filename = instance-kvm-http.cfg.in
md5sum = f4bcde62e008c2da9c65617ba7f73f08
md5sum = d57764bb7135037b4d21543b2f56ce1d
[image-download-controller]
_update_hash_filename_ = template/image-download-controller.py
......
......@@ -65,7 +65,7 @@ stop-on-error = true
[httpd-promise]
<= monitor-promise-base
module = check_socket_listening
promise = check_socket_listening
name = apache-httpd.py
config-host = ${apache-conf:ip}
config-port = ${apache-conf:port}
......@@ -91,6 +91,6 @@ mode = 700
[kvm-disk-image-corruption-promise]
# Check that disk image is not corrupted
<= monitor-promise-base
module = check_command_execute
promise = check_command_execute
name = kvm-disk-image-corruption.py
config-command = ${kvm-disk-image-corruption-bin:output}
......@@ -102,13 +102,13 @@ mode = 700
[kvm-frontend-url-promise]
# Check that url parameter is complete
<= monitor-promise-base
module = check_command_execute
promise = check_command_execute
name = kvm-frontend-url.py
config-command = ${kvm-frontend-url-bin:output}
[kvm-backend-url-promise]
# Check that backend url is reachable
<= monitor-promise-base
module = check_url_available
promise = check_url_available
name = frontend_promise.py
config-url = ${publish-connection-information:url}
......@@ -91,7 +91,7 @@ bytes = 8
## boot-image-url-select support BEGIN
[empty-file-state-base-select-promise]
<= monitor-promise-base
module = check_file_state
promise = check_file_state
name = ${:_buildout_section_name_}.py
config-state = empty
# It's very hard to put the username and password correctly, after schema://
......@@ -190,7 +190,7 @@ config-filename = ${boot-image-url-select-download-wrapper:error-state-file}
## boot-image-url-list support BEGIN
[empty-file-state-base-list-promise]
<= monitor-promise-base
module = check_file_state
promise = check_file_state
name = ${:_buildout_section_name_}.py
config-state = empty
# It's very hard to put the username and password correctly, after schema://
......@@ -289,7 +289,7 @@ config-filename = ${boot-image-url-list-download-wrapper:error-state-file}
## virtual-hard-drive-url support BEGIN
[empty-file-state-base-virtual-promise]
<= monitor-promise-base
module = check_file_state
promise = check_file_state
name = ${:_buildout_section_name_}.py
config-state = empty
# It's very hard to put the username and password correctly, after schema://
......@@ -586,7 +586,7 @@ command-line = ${kvm-controller:rendered}
[kvm-vnc-promise]
<= monitor-promise-base
module = check_socket_listening
promise = check_socket_listening
name = vnc_promise.py
config-host = ${kvm-parameter-dict:vnc-ip}
config-port = ${kvm-parameter-dict:vnc-port}
......@@ -614,7 +614,7 @@ mode = 700
[kvm-disk-image-corruption-promise]
# Check that disk image is not corrupted
<= monitor-promise-base
module = check_command_execute
promise = check_command_execute
name = kvm-disk-image-corruption.py
config-command = ${kvm-disk-image-corruption-bin:output}
......@@ -643,7 +643,7 @@ context =
[kvm-started-promise]
<= monitor-promise-base
module = check_command_execute
promise = check_command_execute
name = qemu-virtual-machine-is-ready.py
config-command = ${kvm-started-bin:rendered}
......@@ -708,7 +708,7 @@ wrapper = ${directory:bin}/websockify
[novnc-promise]
<= monitor-promise-base
module = check_socket_listening
promise = check_socket_listening
name = novnc_promise.py
config-host = ${novnc-instance:ip}
config-port = ${novnc-instance:port}
......@@ -764,7 +764,7 @@ sla-instance_guid = ${slap-parameter:frontend-instance-guid}
[frontend-promise]
<= monitor-promise-base
module = check_url_available
promise = check_url_available
name = frontend_promise.py
config-url = ${publish-connection-information:url}
......@@ -778,7 +778,7 @@ sla-instance_guid = ${slap-parameter:frontend-additional-instance-guid}
[frontend-additional-promise]
<= monitor-promise-base
module = check_url_available
promise = check_url_available
name = frontend_additional_promise.py
config-url = ${publish-connection-information:url-additional}
{% endif %}
......@@ -801,7 +801,7 @@ hash-existing-files = ${buildout:directory}/software_release/buildout.cfg
[httpd-promise]
<= monitor-promise-base
module = check_socket_listening
promise = check_socket_listening
name = httpd.py
config-host = ${httpd:host}
config-port = ${httpd:port}
......@@ -1029,7 +1029,7 @@ context =
[ansible-vm-promise]
<= monitor-promise-base
module = check_command_execute
promise = check_command_execute
name = ansible_{{ name }}.py
config-command = ${ansible-vm-bin:rendered}
......
......@@ -43,7 +43,7 @@ rendered = ${rootdirectory:bin}/check-nbd-running.sh
[nbd-promise]
<= monitor-promise-base
module = check_command_execute
promise = check_command_execute
name = nbd_promise.py
config-command = ${nbd-checker-bin:rendered}
......@@ -65,7 +65,7 @@ key = ${gen-passwd:passwd}
[onetimeupload-promise]
<= monitor-promise-base
module = check_socket_listening
promise = check_socket_listening
name = onetimeupload_promise.py
config-host = ${onetimeupload-instance:ip}
config-port = ${onetimeupload-instance:port}
......
[instance-profile]
filename = instance.cfg.in
md5sum = 8e48fa7c66a59b3d5faf0216922a574f
md5sum = 143f46b125389f39905226ec9482ce2a
......@@ -45,7 +45,7 @@ promises =
[metabase-promise]
<= monitor-promise-base
module = check_url_available
promise = check_url_available
name = $${:_buildout_section_name_}.py
config-url= $${metabase-instance:url}/api/session/properties
......@@ -91,7 +91,7 @@ command-line =
[postgresql-promise]
<= monitor-promise-base
module = check_command_execute
promise = check_command_execute
name = promise-postgresql.py
config-command = $${postgresql-psql:wrapper-path} -c '\q'
......
......@@ -18,7 +18,7 @@ md5sum = b6c2df0d4a62473d6dae26b10c0a4adc
[template-monitor]
_update_hash_filename_ = instance-monitor.cfg.jinja2
md5sum = 165a15672fc85981f68b9af2d6253254
md5sum = f23c007d6d6aed137cfd54aaa7ba52ab
[json-test-template]
_update_hash_filename_ = json-test-template.json.in.jinja2
......@@ -26,7 +26,7 @@ md5sum = 2eb5596544d9c341acf653d4f7ce2680
[template-monitor-edgetest-basic]
_update_hash_filename_ = instance-monitor-edgetest-basic.cfg.jinja2
md5sum = 61309a48f7b0135cba21b09247a2d8fd
md5sum = 05c00ac393b50cfdef5d3bc5af93fe98
[template-monitor-edgetest]
_update_hash_filename_ = instance-monitor-edgetest.cfg.jinja2
......@@ -34,7 +34,7 @@ md5sum = 3c8ab4e78f66c974eb95afc595a13514
[template-monitor-edgebot]
_update_hash_filename_ = instance-monitor-edgebot.cfg.jinja2
md5sum = 2ac74559d6108ca0dbabb872f1071e44
md5sum = 436bb5251c8f1cd1e64bd5d3987d699c
[network-bench-cfg]
filename = network_bench.cfg.in
......
......@@ -36,7 +36,7 @@
{%- set safe_name = part_id.replace('_', '').replace('.', '-').replace(' ', '-') %}
[{{part_id}}]
<= monitor-promise-base
module = check_surykatka_json
promise = check_surykatka_json
name = {{ safe_name }}.py
config-report = http_query
config-url = {{ slave['url'] }}
......@@ -51,7 +51,7 @@ config-json-file = ${surykatka-config-{{ class }}:json}
[surykatka-bot-promise-{{ class }}]
<= monitor-promise-base
module = check_surykatka_json
promise = check_surykatka_json
name = surykatka-bot-promise-{{ class }}.py
config-report = bot_status
config-json-file = ${surykatka-config-{{ class }}:json}
......
......@@ -58,7 +58,7 @@
{%- do PART_LIST.append(part_name) %}
[{{part_name}}]
<= monitor-promise-base
module = check_surykatka_json
promise = check_surykatka_json
name = {{ promise_name }}.py
config-report = http_query
config-url = {{ url }}
......@@ -75,7 +75,7 @@ config-json-file = ${surykatka-config-{{ class }}:json}
{%- do PART_LIST.append('surykatka-bot-%i-promise' % (class,)) %}
[surykatka-bot-{{ class }}-promise]
<= monitor-promise-base
module = check_surykatka_json
promise = check_surykatka_json
name = surykatka-bot-{{ class }}.py
config-report = bot_status
config-json-file = ${surykatka-config-{{ class }}:json}
......@@ -110,7 +110,7 @@ hash-existing-files = ${buildout:directory}/software_release/buildout.cfg
{%- do PART_LIST.append('surykatka-json-%i-promise'% (class,)) %}
[surykatka-json-{{ class }}-promise]
<= monitor-promise-base
module = check_file_state
promise = check_file_state
name = surykatka-json-{{ class }}.py
config-filename = ${surykatka-config-{{ class }}:json}
config-state = not-empty
......
......@@ -82,7 +82,7 @@ wrapper-path = ${monitor-directory:bin}/monitor-collect-csv-dump
[monitor-check-memory-usage]
<= monitor-promise-base
module = check_command_execute
promise = check_command_execute
name = check-computer-memory-usage.py
config-command = {{ buildout_bin}}/check-computer-memory -db ${monitor-instance-parameter:collector-db} --threshold ${slap-parameter:memory-percent-threshold} --unit percent
......@@ -90,11 +90,8 @@ config-command = {{ buildout_bin}}/check-computer-memory -db ${monitor-instance-
recipe = slapos.cookbook:promise.plugin
eggs =
slapos.toolbox
file = ${monitor-conf-parameters:promise-output-file}
content =
from slapos.promise.plugin.check_server_cpu_load import RunPromise
module = slapos.promise.plugin.check_server_cpu_load
output = ${directory:plugins}/system-CPU-load-check.py
mode = 600
config-cpu-load-threshold = ${slap-parameter:cpu-load-threshold}
[publish-connection-information]
......
......@@ -14,4 +14,4 @@
# not need these here).
[template-instance]
filename = instance.cfg
md5sum = 0974248c0b0ad5da45670386a5301e47
md5sum = 14132bba14a1e66e7abb1a7c58b333e5
......@@ -76,7 +76,7 @@ command-line = sudo -V
[promise-sudo-on-host]
# assert sudo is installed, as it is required to enter the chroot 'cros_sdk'
<= monitor-promise-base
module = check_command_execute
promise = check_command_execute
name = ${:_buildout_section_name_}.py
config-command = ${promise-sudo-on-host-bin:wrapper-path}
......
......@@ -22,7 +22,7 @@ md5sum = c13b4f1a5aa526a8d3f8e02bf6baf785
[instance-neo-admin]
filename = instance-neo-admin.cfg.in
md5sum = dabc1e50475055b3ee9184dcace5e8d2
md5sum = b6e1ccb1d90160110202e5111eec2afa
[instance-neo-master]
filename = instance-neo-master.cfg.in
......
......@@ -18,7 +18,7 @@ plugin = ${:etc}/plugin
[monitor-neo-health]
<= monitor-promise-base
module = check_neo_health
promise = check_neo_health
name = ${:_buildout_section_name_}.py
config-neoctl = ${neoctl:wrapper-path}
{%- if bang_on_problem != None %}
......
......@@ -26,4 +26,4 @@ md5sum = 6f42f0a8c5e5c0c657541a65c4d9ee57
[template-nextcloud-instance]
filename = nextcloud-instance.cfg.in
md5sum = 86a92f542e516ac92802908b85354073
md5sum = 05f946a6523677e5dcf80e9fad230d1c
......@@ -28,7 +28,7 @@ depend =
[redis-promise]
<= monitor-promise-base
module = check_command_execute
promise = check_command_execute
name = redis.py
config-command = ${service-redis:promise_wrapper}
......@@ -106,7 +106,7 @@ depends =
[nextcloud-install-promise]
<= monitor-promise-base
module = check_command_execute
promise = check_command_execute
name = check-nextcloud-install.py
config-command = ${nc-install-wrapper:output}
......
[template]
filename = instance.cfg.in
md5sum = f9b6d01e29f2edddd9d6f99591976c33
md5sum = 56e986c74ef236f261834c57f5861ce0
[template-nginx-configuration]
filename = template-nginx.cfg.in
......
......@@ -75,7 +75,7 @@ promises =
[nginx-available-promise]
<= monitor-promise-base
module = check_url_available
promise = check_url_available
name = $${:_buildout_section_name_}.py
config-url = $${nginx-configuration:base-url}/status
......
......@@ -15,7 +15,7 @@
[instance]
filename = instance.cfg.in
md5sum = c962079a88a6ce97d8ce20fa4e8edfd1
md5sum = de38ed0348a9d50e01dbf383a661d53e
[tomcat-server-xml]
filename = server.xml.in
......
......@@ -87,7 +87,7 @@ instance-promises =
[tomcat-listen-promise]
<= monitor-promise-base
module = check_socket_listening
promise = check_socket_listening
name = $${:_buildout_section_name_}.py
config-host = $${tomcat-instance:ip}
config-port = $${tomcat-instance:port}
......
......@@ -18,7 +18,7 @@ md5sum = fddea033e1aa9d6147a1a47bd7cc4b62
[template-powerdns]
filename = instance-powerdns.cfg
md5sum = c04c3b490e7f9f35af3d204a9df51f35
md5sum = a6fcfcef942cd9b57c2b0c69e318362c
[template-pdns-configuration]
_update_hash_filename_ = template/pdns.conf.jinja2
......@@ -26,7 +26,7 @@ md5sum = 851353e1d4dd562ace58b3345c2da515
[template-dns-replicate]
_update_hash_filename_ = instance-powerdns-replicate.cfg.jinja2
md5sum = bad5bcf578ad9700281419bbf3c9fe32
md5sum = 5b4b46136c6547c27508c4789ac5d0ee
[iso-list]
_update_hash_filename_ = template/zz.countries.nexedi.dk.rbldnsd
......
......@@ -73,7 +73,7 @@ sla-{{ parameter }} = {{ slapparameter_dict.pop( sla_key + parameter ) }}
[{{promise_section_title}}]
<= monitor-promise-base
module = check_socket_listening
promise = check_socket_listening
name = {{promise_section_title}}.py
config-host = {{ '${' ~ request_section_title ~ ':connection-powerdns-ipv6}' }}
config-port = {{ '${' ~ request_section_title ~ ':connection-powerdns-port}' }}
......
......@@ -137,7 +137,7 @@ extra-context =
# Promises
[pdns-promise-listen-port]
<= monitor-promise-base
module = check_socket_listening
promise = check_socket_listening
name = pdns-port-listening.py
config-host = $${pdns:ipv4}
config-port = $${pdns:port}
......
......@@ -19,7 +19,7 @@ md5sum = efb4238229681447aa7fe73898dffad4
[instance-default]
filename = instance-default.cfg.in
md5sum = c6dce31a36e4e13de62687e9888aeb77
md5sum = f6c583d24940a3a6838bd421dbb84a20
[proftpd-config-file]
filename = proftpd-config-file.cfg.in
......
......@@ -86,7 +86,7 @@ template = inline:{{ slapparameter_dict['ssh-key'] | indent }}
[proftpd-listen-promise]
<= monitor-promise-base
module = check_socket_listening
promise = check_socket_listening
name = ${:_buildout_section_name_}.py
config-host = ${proftpd:ipv6}
config-port = ${proftpd:sftp-port}
......
......@@ -15,4 +15,4 @@
[instance-profile]
filename = instance.cfg.in
md5sum = 500b773d1a63a6a895f9b8038a582b05
md5sum = 9c4336f1f5143d3281c6706ff14abdd3
......@@ -33,7 +33,7 @@ pureftpd-dir = ${:srv}/pureftpd/
[check-port-listening-promise]
<= monitor-promise-base
module = check_socket_listening
promise = check_socket_listening
name = check_nginx_port.py
[pureftpd-listen-promise]
......
......@@ -18,7 +18,7 @@ md5sum = 71531ed9c9b79fa769ab367e7ea2d2a5
[template-re6stnet]
filename = instance-re6stnet.cfg.in
md5sum = 870c34cf58acaaee21c71182dd3cb0cf
md5sum = 98f86d2a10d909215ae88ba6a602da27
[template-apache-conf]
filename = apache.conf.in
......
......@@ -170,14 +170,14 @@ context =
[re6st-registry-promise]
<= monitor-promise-base
module = check_socket_listening
promise = check_socket_listening
name = re6st-registry.py
config-host = ${re6st-registry:ipv4}
config-port = ${re6st-registry:port}
[apache-registry-promise]
<= monitor-promise-base
module = check_socket_listening
promise = check_socket_listening
name = apache-re6st-registry.py
config-host = ${apache-conf:ipv6}
config-port = ${apache-conf:port}
......
......@@ -18,7 +18,7 @@ md5sum = 8a08be95a04f1a47098c4fdef80bdfed
[instance-repman.cfg]
_update_hash_filename_ = instance-repman.cfg.jinja2.in
md5sum = 839642d7a56447b3f08fa69729faca61
md5sum = 697a1b546c883da45c14dbcd2d73b2b9
[config-toml.in]
_update_hash_filename_ = templates/config.toml.in
......@@ -34,7 +34,7 @@ md5sum = 0eeb24c6aa0760f0d33c4cc2828ddf30
[template-mariadb.cfg]
_update_hash_filename_ = instance-mariadb.cfg.jinja2.in
md5sum = 21a29a41768b2370d671d3086b3ef2bb
md5sum = a5c204cac552754520aee0570d379723
[template-my-cnf]
_update_hash_filename_ = templates/my.cnf.in
......
......@@ -327,13 +327,13 @@ dash = {{ dumps(dash) }}
[{{ section('promise-check-computer-memory') }}]
<= monitor-promise-base
module = check_command_execute
promise = check_command_execute
name = check-computer-memory.py
config-command = "{{ parameter_dict["check-computer-memory-binary"] }}" -db ${monitor-instance-parameter:collector-db} --threshold "{{ slapparameter_dict["computer-memory-percent-threshold"] }}" --unit percent
[{{ section('promise') }}]
<= monitor-promise-base
module = check_command_execute
promise = check_command_execute
name = mariadb.py
config-command = "{{ parameter_dict['bin-directory'] }}/is-local-tcp-port-opened" "{{ ip }}" "{{ port }}"
......
......@@ -216,21 +216,21 @@ depends =
[proxysql-{{ name }}-admin-promise]
<= monitor-promise-base
module = check_socket_listening
promise = check_socket_listening
name = proxysql-{{ name }}-admin-port-listening.py
config-host = {{ ipv4 }}
config-port = {{ '${' ~ name ~ '-cluster-parameter:proxy-admin-port}' }}
[proxysql-{{ name }}-promise]
<= monitor-promise-base
module = check_socket_listening
promise = check_socket_listening
name = proxysql-{{ name }}-port-listening.py
config-host = {{ ipv4 }}
config-port = {{ '${' ~ name ~ '-cluster-parameter:proxy-port}' }}
[proxysql-{{ name }}-ipv6-promise]
<= monitor-promise-base
module = check_socket_listening
promise = check_socket_listening
name = proxysql-{{ name }}-ipv6-port-listening.py
config-host = {{ ip }}
config-port = {{ '${' ~ name ~ '-cluster-parameter:proxy-port}' }}
......@@ -403,14 +403,14 @@ context =
[repman-listen-promise]
<= monitor-promise-base
module = check_socket_listening
promise = check_socket_listening
name = repman_service_listen.py
config-host = ${repman-parameter:ipv4}
config-port = ${repman-parameter:port}
[repman-listen-ssl-promise]
<= monitor-promise-base
module = check_socket_listening
promise = check_socket_listening
name = repman_service_ssl_listen.py
config-host = ${repman-parameter:ipv4}
config-port = ${repman-parameter:secure-port}
......@@ -508,13 +508,13 @@ return = domain secure_access
[repman-frontend-promise]
<= monitor-promise-base
module = check_url_available
promise = check_url_available
name = check_repman_frontend.py
config-url = https://${repman-frontend:connection-domain}
[repman-backend-promise]
<= monitor-promise-base
module = check_url_available
promise = check_url_available
name = check_repman_backend.py
config-url = ${nginx-parameter:backend-ssl-url}
......
......@@ -19,4 +19,4 @@ md5sum = 0084214fae4ee1aad2c878aa393757af
[template-selenium]
filename = instance-selenium.cfg.in
md5sum = 884196ea35de35fa9159517912441ce6
md5sum = 35ba19f7cb4fe7fc9469611f2446c94e
......@@ -283,7 +283,7 @@ instance-promises =
[check-port-listening-promise]
<= monitor-promise-base
module = check_socket_listening
promise = check_socket_listening
name = $${:_buildout_section_name_}.py
[sshd-listen-promise]
......@@ -304,7 +304,7 @@ config-port = $${selenium-server-hub-instance:port}
# Promise waiting for all nodes to be registered
[selenium-server-hub-nodes-registered-promise]
<= monitor-promise-base
module = check_command_execute
promise = check_command_execute
name = $${:_buildout_section_name_}.py
config-command =
$${selenium-server-check-nodes-registered:rendered} $${selenium-server-hub-instance:api-url} $${:expected-node-count}
......
......@@ -18,7 +18,7 @@ md5sum = 84f099cc9852c4f53a075dccbb3880f0
[template-balancer]
filename = instance-balancer.cfg.in
md5sum = c7c0bb9abbd0f8cc6c7956d83a61c4b3
md5sum = f565956476c31881b6e51ae1c27793ad
[template-apache-backend-conf]
filename = apache-backend.conf.in
......
......@@ -203,7 +203,7 @@ input = inline:
[{{ section('apache-promise') }}]
# Check any apache port in ipv4, expect other ports and ipv6 to behave consistently
<= monitor-promise-base
module = check_socket_listening
promise = check_socket_listening
name = apache.py
config-host = {{ ipv4 }}
config-port = {{ apache_dict.values()[0][0] }}
......@@ -297,13 +297,13 @@ promise-threshold = {{ slapparameter_dict['apachedex-promise-threshold'] }}
[{{ section('monitor-promise-apachedex-result') }}]
<= monitor-promise-base
module = check_command_execute
promise = check_command_execute
name = check-apachedex-result.py
config-command = "{{ parameter_dict['promise-check-apachedex-result'] }}" --apachedex_path "${directory:apachedex}" --status_file ${monitor-directory:private}/apachedex.report.json --threshold "${apachedex-parameters:promise-threshold}"
[{{ section('promise-check-computer-memory') }}]
<= monitor-promise-base
module = check_command_execute
promise = check_command_execute
name = check-computer-memory.py
config-command = "{{ parameter_dict["check-computer-memory-binary"] }}" -db ${monitor-instance-parameter:collector-db} --threshold "{{ slapparameter_dict["computer-memory-percent-threshold"] }}" --unit percent
......
......@@ -18,7 +18,7 @@ md5sum = 8d6878ff1d2e75010c50a1a2b0c13b24
[template-runner]
filename = instance-runner.cfg
md5sum = 2a09b11c7dbade65d50e66287bf4c7b9
md5sum = 384285ab789396b6e674a8125ce2d030
[template-runner-import-script]
filename = template/runner-import.sh.jinja2
......@@ -26,7 +26,7 @@ md5sum = f2e2493bc5da90a53f86e5bcf64d2d57
[instance-runner-import]
filename = instance-runner-import.cfg.in
md5sum = ea7667f9af952bc4bdf43aad4520759f
md5sum = a4ebf6918a2c68c02898b2142357f490
[instance-runner-export]
filename = instance-runner-export.cfg.in
......
......@@ -134,7 +134,7 @@ mode = 755
[importer-consistency-promise]
<= monitor-promise-base
module = check_command_execute
promise = check_command_execute
name = importer-consistency-promise.py
config-command = ${importer-consistency-promise-bin:output}
......@@ -158,7 +158,7 @@ mode = 755
[software-release-deployment-promise]
<= monitor-promise-base
module = check_command_execute
promise = check_command_execute
name = software-release-deployment-promise.py
config-command =${software-release-deployment-bin:output}
......
......@@ -87,7 +87,7 @@ return = site_url domain
[custom-frontend-promise]
<= monitor-promise-base
module = check_url_available
promise = check_url_available
name = custom_frontend_promise.py
config-url = https://$${request-custom-frontend:connection-domain}
{% if slapparameter_dict.get('custom-frontend-basic-auth') -%}
......@@ -111,7 +111,7 @@ template = inline:
[custom-frontend-url-ready-promise]
<= monitor-promise-base
module = check_command_execute
promise = check_command_execute
name = custom_frontend_ready_promise.py
config-command = $${custom-frontend-url-ready-promise-bin:rendered}
......@@ -436,7 +436,7 @@ mode = 700
[apache-httpd-promise]
<= monitor-promise-base
module = check_url_available
promise = check_url_available
name = $${:filename}.py
filename = apache-httpd-listening-on-tcp
config-url = $${apache-httpd:access-url}
......@@ -537,7 +537,7 @@ return = site_url domain
[slaprunner-frontend-promise]
<= monitor-promise-base
module = check_url_available
promise = check_url_available
name = slaprunner_frontend.py
config-url = https://$${request-frontend:connection-domain}/login
......@@ -556,7 +556,7 @@ return = secure_access domain
[httpd-frontend-promise]
<= monitor-promise-base
module = check_url_available
promise = check_url_available
name = slaprunner-apache-http-frontend.py
config-url = $${request-httpd-frontend:connection-secure_access}
......@@ -619,14 +619,14 @@ monitor-password = $${monitor-publish-parameters:monitor-password}
[slaprunner-promise]
<= monitor-promise-base
module = check_socket_listening
promise = check_socket_listening
name = slaprunner.py
config-host = $${slaprunner:ipv6}
config-port = $${slaprunner:runner_port}
[runner-sshd-promise]
<= monitor-promise-base
module = check_socket_listening
promise = check_socket_listening
name = runner-sshd.py
config-host = $${slap-network-information:global-ipv6}
config-port = $${runner-sshd-port:port}
......@@ -863,20 +863,20 @@ log = $${runnerdirectory:home}/instance/*/.slapgrid/log/instance.log $${runnerdi
[supervisord-promise]
<= monitor-promise-base
module = check_socket_listening
promise = check_socket_listening
name = supervisord.py
config-host = $${slaprunner:ipv4}
config-port = $${supervisord:port}
[slapos-supervisord-promise]
<= monitor-promise-base
module = check_command_execute
promise = check_command_execute
name = instance_supervisord.py
config-command = ${buildout:bin-directory}/slapos node supervisorctl --cfg=$${slaprunner:slapos.cfg} pid
[slapos-proxy-promise]
<= monitor-promise-base
module = check_socket_listening
promise = check_socket_listening
name = slaproxy.py
config-host = $${slaprunner:ipv4}
config-port = $${slaprunner:proxy_port}
......
......@@ -15,7 +15,7 @@
[instance-theia]
_update_hash_filename_ = instance-theia.cfg.jinja.in
md5sum = e39925b69a8bc17d17be54c075ae2f88
md5sum = f396d9a0780f4fb17016dbd32b56d7b8
[instance]
_update_hash_filename_ = instance.cfg.in
......@@ -23,11 +23,11 @@ md5sum = a7d78b4002266c69ece05a476df82791
[instance-import]
_update_hash_filename_ = instance-import.cfg.jinja.in
md5sum = 861ef130f27175c2978a9b946b138dd5
md5sum = 57b707cf0ed83be1959d26a88c131906
[instance-export]
_update_hash_filename_ = instance-export.cfg.jinja.in
md5sum = b3cedaa1603ca8ed83fdd94ef4b35cc8
md5sum = 190a736471f0e0cffcb2838968e01d84
[instance-resilient]
_update_hash_filename_ = instance-resilient.cfg.jinja
......
......@@ -69,7 +69,7 @@ export-promises =
[export-promise]
<= monitor-promise-base
module = check_command_execute
promise = check_command_execute
name = resiliency-export-promise.py
config-command = $${export-promise-script:rendered}
......
......@@ -127,7 +127,7 @@ import-promises =
[import-promise]
<= monitor-promise-base
module = check_command_execute
promise = check_command_execute
name = resiliency-import-promise.py
config-command = $${import-promise-script:rendered}
......
......@@ -82,21 +82,21 @@ instance-promises =
[theia-listen-promise]
<= monitor-promise-base
module = check_socket_listening
promise = check_socket_listening
name = $${:_buildout_section_name_}.py
config-host = $${theia-instance:ip}
config-port = $${theia-instance:port}
[frontend-listen-promise]
<= monitor-promise-base
module = check_socket_listening
promise = check_socket_listening
name = $${:_buildout_section_name_}.py
config-host = $${frontend-instance:ip}
config-port = $${frontend-instance:port}
[frontend-authentification-promise]
<= monitor-promise-base
module = check_url_available
promise = check_url_available
name = $${:_buildout_section_name_}.py
username = $${frontend-instance-password:username}
password = $${frontend-instance-password:passwd}
......@@ -106,7 +106,7 @@ config-url = https://$${:username}:$${:password}@[$${:ip}]:$${:port}
[remote-frontend-url-available-promise]
<= monitor-promise-base
module = check_url_available
promise = check_url_available
name = $${:_buildout_section_name_}.py
config-url = $${remote-frontend:connection-secure_access}
config-http-code = 401
......@@ -114,7 +114,7 @@ config-http-code = 401
{% if additional_frontend %}
[remote-additional-frontend-url-available-promise]
<= monitor-promise-base
module = check_url_available
promise = check_url_available
name = $${:_buildout_section_name_}.py
config-url = $${remote-additional-frontend:connection-secure_access}
config-http-code = 401
......@@ -122,7 +122,7 @@ config-http-code = 401
[slapos-standalone-listen-promise]
<= monitor-promise-base
module = check_socket_listening
promise = check_socket_listening
# XXX promise plugins can not contain "slapos" in their names
name = standalone-listen-promise.py
config-host = $${slapos-standalone-instance:hostname}
......@@ -130,13 +130,13 @@ config-port = $${slapos-standalone-instance:port}
[slapos-standalone-ready-promise]
<= monitor-promise-base
module = check_socket_listening
promise = check_socket_listening
name = standalone-ready-promise.py
config-abstract = $${directory:runner}/standalone_ready
[slapos-autorun-promise]
<= monitor-promise-base
module = check_service_state
promise = check_service_state
name = autorun-state-promise.py
config-service = $${slapos-autorun:service-name}
config-expect = $${slapos-autorun:autorun}
......
......@@ -19,8 +19,8 @@ md5sum = b43d5e8d1fc2d0eeb54f91cefe6a5bae
[template-turnserver]
filename = instance-turnserver.cfg.jinja2.in
md5sum = 7af3318d7249e9afe22436d9fe200159
md5sum = 932c4d82faa8e28b62bfbfc3dfe31c02
[template-insecure-turnserver]
filename = instance-insecure-turnserver.cfg.jinja2.in
md5sum = 3db65c3a16eb76ab438ac3817d1a5fea
md5sum = 504f0f5ead8600b80ba43f828a0f82b6
......@@ -57,7 +57,7 @@ hash-existing-files = ${buildout:directory}/software_release/buildout.cfg
[promise-check-turnserver-port]
<= monitor-promise-base
module = check_socket_listening
promise = check_socket_listening
name = turnserver-port-listening.py
config-host = {{ listening_ip }}
config-port = {{ turn_port }}
......
......@@ -123,14 +123,14 @@ hash-existing-files = ${buildout:directory}/software_release/buildout.cfg
[promise-check-turnserver-port]
<= monitor-promise-base
module = check_socket_listening
promise = check_socket_listening
name = turnserver-port-listening.py
config-host = {{ listening_ip }}
config-port = {{ turn_port }}
[promise-check-turnserver-tls-port]
<= monitor-promise-base
module = check_socket_listening
promise = check_socket_listening
name = turnserver-tls-port-listening.py
config-host = {{ listening_ip }}
config-port = {{ turn_tls_port }}
......
......@@ -3,7 +3,7 @@ request = portal.REQUEST
reference = request['reference']
data_chunk = request['data_chunk']
module = portal.data_stream_module
promise = portal.data_stream_module
try:
data_stream = module[reference]
except KeyError:
......
......@@ -15,4 +15,4 @@
[caucase-jinja2-library]
filename = caucase.jinja2.library
md5sum = a5c7a46c6fb85aa22a371d9d2cd9e57e
md5sum = 1e3607e514320441ddccdb6d1a21f705
......@@ -37,7 +37,7 @@ command-line = '{{ buildout_bin_directory }}/caucased'
{% if promise -%}
[{{ prefix }}-promise]
<= monitor-promise-base
module = check_command_execute
promise = check_command_execute
name = {{ prefix }}.py
config-command = '{{ buildout_bin_directory }}/caucase-probe' 'http://{{ netloc }}'
{%- endif %}
......@@ -119,7 +119,7 @@ command-line = '{{ buildout_bin_directory }}/caucase-updater'
{% if promise -%}
[{{ prefix }}-promise]
<= monitor-promise-base
module = check_certificate
promise = check_certificate
name = {{ prefix }}.py
config-certificate = {{ crt_path }}
config-key = {{ key_path }}
......
......@@ -26,11 +26,11 @@ md5sum = d10b8e35b02b5391cf46bf0c7dbb1196
[template-mariadb]
filename = instance-mariadb.cfg.in
md5sum = 7d064777c1c4e7b275b255db4f4b1da9
md5sum = c82ea00c4514b72fb97a6fa7ac36ec52
[template-kumofs]
filename = instance-kumofs.cfg.in
md5sum = fed6dd2bdc389b4fc7e7b7ca32c5d4b6
md5sum = cfe4696a67bf4886a5d8252a5274a941
[template-zope-conf]
filename = zope.conf.in
......@@ -50,7 +50,7 @@ md5sum = 1102c3e37a5a2e8aa2d8a2607ab633c8
[template-postfix]
filename = instance-postfix.cfg.in
md5sum = 2a68a3e7c5c509cbd4cfa9e670ac91c7
md5sum = 0f666e5e7e52afda433feb9f02452717
[template-postfix-master-cf]
filename = postfix_master.cf.in
......@@ -78,7 +78,7 @@ md5sum = fcc8470824c448a56e2282c43b870cb5
[template-zeo]
filename = instance-zeo.cfg.in
md5sum = 79b6b422df512b5a075eba54a6895a01
md5sum = 1f33f3b93da32b34e2fd11471648835d
[template-zodb-base]
filename = instance-zodb-base.cfg.in
......@@ -86,11 +86,11 @@ md5sum = bc821f9f9696953b10a03ad7b59a1936
[template-zope]
filename = instance-zope.cfg.in
md5sum = 58ca95f6e0c067702a03fc3be66d50c1
md5sum = 769e81946c346530cebfce6ad7553165
[template-balancer]
filename = instance-balancer.cfg.in
md5sum = c6c1b3e4b2f3c6f256153dcfe9fbecad
md5sum = d6166515fda7b09df754672536b131be
[template-haproxy-cfg]
filename = haproxy.cfg.in
......@@ -102,4 +102,4 @@ md5sum = 5cf0316fdd17a940031e4083bbededd8
[instance-wcfs.cfg.in]
filename = instance-wcfs.cfg.in
md5sum = 945e8e4552a6bdf228b9609567b09399
md5sum = eb4be2669a9a56187cc4366272e11d18
......@@ -272,7 +272,7 @@ hash-files = ${rsyslogd-cfg:rendered}
[{{ section ('rsyslogd-listen-promise') }}]
<= monitor-promise-base
module = check_command_execute
promise = check_command_execute
name = rsyslogd_listen_promise.py
config-command = test -S ${rsyslogd-cfg-parameter-dict:log-socket}
......@@ -303,7 +303,7 @@ certificate-and-key = ${directory:etc}/certificate-and-key-generated.pem
[{{ section('haproxy-promise') }}]
<= monitor-promise-base
# Check any haproxy port in ipv4, expect other ports and ipv6 to behave consistently
module = check_socket_listening
promise = check_socket_listening
name = haproxy.py
config-host = {{ ipv4 }}
config-port = {{ haproxy_dict.values()[0][0] }}
......@@ -382,13 +382,13 @@ promise-threshold = {{ slapparameter_dict['apachedex-promise-threshold'] }}
[{{ section('monitor-promise-apachedex-result') }}]
<= monitor-promise-base
module = check_command_execute
promise = check_command_execute
name = check-apachedex-result.py
config-command = "{{ parameter_dict['promise-check-apachedex-result'] }}" --apachedex_path "${directory:apachedex}" --status_file ${monitor-directory:private}/apachedex.report.json --threshold "${apachedex-parameters:promise-threshold}"
[{{ section('promise-check-computer-memory') }}]
<= monitor-promise-base
module = check_command_execute
promise = check_command_execute
name = check-computer-memory.py
config-command = "{{ parameter_dict["check-computer-memory-binary"] }}" -db ${monitor-instance-parameter:collector-db} --threshold "{{ slapparameter_dict["computer-memory-percent-threshold"] }}" --unit percent
......
......@@ -86,7 +86,7 @@ rendered = ${directory:srv}/exporter.exclude
# Deploy zope promises scripts
[promise-template]
<= monitor-promise-base
module = check_socket_listening
promise = check_socket_listening
config-host = ${kumofs-instance:ip}
config-port = ${kumofs-instance:server-listen-port}
......@@ -112,7 +112,7 @@ config-port = ${kumofs-instance:manager-port}
[promise-check-computer-memory]
<= monitor-promise-base
module = check_command_execute
promise = check_command_execute
name = check-computer-memory.py
config-command = "{{ parameter_dict["check-computer-memory-binary"] }}" -db ${monitor-instance-parameter:collector-db} --threshold "{{ slapparameter_dict["computer-memory-percent-threshold"] }}" --unit percent
......
......@@ -332,7 +332,7 @@ context =
{%if slapparameter_dict.get('max-slowqueries-threshold') and slapparameter_dict.get('slowest-query-threshold') %}
[{{ section('monitor-promise-slowquery-result') }}]
<= monitor-promise-base
module = check_command_execute
promise = check_command_execute
name = check-slow-query-pt-digest-result.py
config-command = "{{ parameter_dict['promise-check-slow-queries-digest-result'] }}" --ptdigest_path "${directory:slowquery}" --status_file ${monitor-directory:private}/mariadb_slow_query.report.json --max_queries_threshold "${:max_queries_threshold}" --slowest_query_threshold "${:slowest_queries_threshold}"
max_queries_threshold = {{ slapparameter_dict['max-slowqueries-threshold'] }}
......@@ -341,13 +341,13 @@ slowest_queries_threshold = {{ slapparameter_dict['slowest-query-threshold'] }}
[{{ section('promise-check-computer-memory') }}]
<= monitor-promise-base
module = check_command_execute
promise = check_command_execute
name = check-computer-memory.py
config-command = "{{ parameter_dict["check-computer-memory-binary"] }}" -db ${monitor-instance-parameter:collector-db} --threshold "{{ slapparameter_dict["computer-memory-percent-threshold"] }}" --unit percent
[{{ section('promise') }}]
<= monitor-promise-base
module = check_command_execute
promise = check_command_execute
name = mariadb.py
config-command = "${binary-wrap-mysql:wrapper-path}" --execute ';' {% if database_list and database_list[0].get('user') %} --host="${my-cnf-parameters:ip}" --port="${my-cnf-parameters:port}" --user="{{ database_list[0]['user'] }}" --password="{{ database_list[0]['password'] }}" {% endif %}
......
......@@ -80,7 +80,7 @@ wrapper-path = ${directory:run}/munnel
[{{ section('munnel-promise') }}]
<= monitor-promise-base
module = check_socket_listening
promise = check_socket_listening
name = munnel.py
config-host = {{ ip }}
config-port = {{ milter_port }}
......@@ -262,14 +262,14 @@ wrapper-path = ${directory:run}/postfix-master
[{{ section('postfix-promise') }}]
<= monitor-promise-base
module = check_socket_listening
promise = check_socket_listening
name = postfix.py
config-host = {{ ip }}
config-port = {{ tcpv4_port }}
[{{ section('promise-check-computer-memory') }}]
<= monitor-promise-base
module = check_command_execute
promise = check_command_execute
name = check-computer-memory.py
config-command = "{{ parameter_dict["check-computer-memory-binary"] }}" -db ${monitor-instance-parameter:collector-db} --threshold "{{ slapparameter_dict["computer-memory-percent-threshold"] }}" --unit percent
......
......@@ -52,7 +52,7 @@ wrapper-path = ${directory:service-on-watch}/wcfs
[wcfs-promise]
<= monitor-promise-base
module = check_command_execute
promise = check_command_execute
name = ${:_buildout_section_name_}.py
config-command = {{ bin_directory }}/wcfs status {{ zurl }}
......
......@@ -55,7 +55,7 @@ post = test ! -s {{ "${" ~ zeo_section_name ~":pid-path}" }} || {{ bin_directory
[{{ section(zeo_section_name ~ "-promise") }}]
<= monitor-promise-base
module = check_socket_listening
promise = check_socket_listening
name = zeo-{{ family }}.py
config-host = {{ "${" ~ zeo_section_name ~ ":ip}" }}
config-port = {{ "${" ~ zeo_section_name ~ ":port}" }}
......@@ -89,7 +89,7 @@ tidstorage-wrapper = ${directory:services}/tidstoraged
[{{ section("promise-tidstorage") }}]
<= monitor-promise-base
module = check_socket_listening
promise = check_socket_listening
name = tidstorage.py
config-host = ${tidstorage:ip}
config-port = ${tidstorage:port}
......@@ -177,7 +177,7 @@ mode = 755
[{{ section('promise-check-computer-memory') }}]
<= monitor-promise-base
module = check_command_execute
promise = check_command_execute
name = check-computer-memory.py
config-command = "{{ parameter_dict["check-computer-memory-binary"] }}" -db ${monitor-instance-parameter:collector-db} --threshold "{{ slapparameter_dict["computer-memory-percent-threshold"] }}" --unit percent
......
......@@ -358,7 +358,7 @@ hash-existing-files =
[{{ section("promise-" ~ name) }}]
<= monitor-promise-base
module = check_socket_listening
promise = check_socket_listening
name = {{ name }}.py
config-host = {{ ipv4 }}
config-port = {{ port }}
......@@ -373,7 +373,7 @@ ipv4-port = {{ port }}
[{{ section("promise-tunnel-" ~ name) }}]
<= monitor-promise-base
module = check_socket_listening
promise = check_socket_listening
name = {{ zope_tunnel_base_name }}.py
config-host = {{ '${' ~ zope_tunnel_section_name ~ ':ipv6}' }}
config-port = {{ '${' ~ zope_tunnel_section_name ~ ':ipv6-port}' }}
......@@ -384,7 +384,7 @@ config-port = {{ '${' ~ zope_tunnel_section_name ~ ':ipv6-port}' }}
{% if longrequest_logger_interval > 0 -%}
[{{ section('promise-check-' ~name ~ '-longrequest-error-log') }}]
<= monitor-promise-base
module = check_error_on_zope_longrequest_log
promise = check_error_on_zope_longrequest_log
name = {{'check-' ~ name ~ '-longrequest-error-log.py'}}
config-log-file = {{ '${' ~ conf_parameter_name ~ ':longrequest-logger-file}' }}
config-error-threshold = {{ slapparameter_dict["zope-longrequest-logger-error-threshold"] }}
......@@ -528,7 +528,7 @@ expected-value =
[{{ section("promise-test-runner-apache-url") }}]
<= monitor-promise-base
module = check_command_execute
promise = check_command_execute
name = ${:_buildout_section_name_}.py
config-command = ${promise-test-runner-apache-url-executable:path}
......@@ -537,7 +537,7 @@ config-command = ${promise-test-runner-apache-url-executable:path}
[{{ section('promise-check-computer-memory') }}]
<= monitor-promise-base
module = check_command_execute
promise = check_command_execute
name = check-computer-memory.py
config-command = "{{ parameter_dict["check-computer-memory-binary"] }}" -db ${monitor-instance-parameter:collector-db} --threshold "{{ slapparameter_dict["computer-memory-percent-threshold"] }}" --unit percent
......
......@@ -18,11 +18,11 @@ md5sum = e4e070f93adaf917f9427ae9f35573d9
[instance-apache-php]
filename = instance-apache-php.cfg.in
md5sum = 4afee4377fa9cbc1e4ff80647b2f279c
md5sum = e7a14c01e6314e2bffebd7d80cf1c488
[instance-lamp]
filename = instance-lamp.cfg.jinja2.in
md5sum = 79f562260895df2665a85df5cb442193
md5sum = e0e2e88b6deeb011b998b78e4e468555
[template-apache.conf]
filename = apache.conf.in
......
......@@ -207,7 +207,7 @@ backend-url = ${apache-php-configuration:url}
[promise]
# Check any apache port in ipv4, expect other ports and ipv6 to behave consistently
<= monitor-promise-base
module = check_socket_listening
promise = check_socket_listening
name = apache-httpd-port-listening.py
config-host = ${apache-php-configuration:ip}
config-port = ${apache-php-configuration:port}
......
......@@ -77,7 +77,7 @@ return = domain secure_access
[lamp-frontend-promise]
<= monitor-promise-base
module = check_url_available
promise = check_url_available
name = lamp-http-frontend.py
url = ${request-frontend:connection-secure_access}
config-url = ${:url}
......
......@@ -14,7 +14,7 @@
# not need these here).
[monitor2-template]
filename = instance-monitor.cfg.jinja2.in
md5sum = d4185c191e8b9df20e1f98cd8c556b1d
md5sum = 3cba541a8b0b22c2648848ed1d259174
[monitor-httpd-conf]
_update_hash_filename_ = templates/monitor-httpd.conf.in
......
......@@ -302,14 +302,12 @@ pre = {{ monitor_statistic }} --history_folder ${monitor-directory:public}
recipe = slapos.cookbook:promise.plugin
eggs =
slapos.toolbox
content =
from slapos.promise.plugin.${:module} import RunPromise
mode = 600
module = slapos.promise.plugin.${:promise}
output = ${directory:plugins}/${:name}
[monitor-httpd-promise]
<= monitor-promise-base
module = check_url_available
promise = check_url_available
name = monitor-httpd-listening-on-tcp.py
config-url = ${monitor-httpd-conf-parameter:url}
config-http-code = 401
......@@ -360,7 +358,7 @@ return = domain secure_access
# credentials.
[check-monitor-password-promise]
<= monitor-promise-base
module = check_url_available
promise = check_url_available
name = check-monitor-frontend-password.py
url = ${monitor-frontend:connection-secure_access}
config-url = ${:url}
......@@ -371,7 +369,7 @@ config-password = ${monitor-instance-parameter:password}
# supplied.
[monitor-frontend-promise]
<= monitor-promise-base
module = check_url_available
promise = check_url_available
name = monitor-http-frontend.py
url = ${monitor-frontend:connection-secure_access}
config-url = ${:url}
......@@ -379,7 +377,7 @@ config-http-code = 401
[monitor-bootstrap-promise]
<= monitor-promise-base
module = monitor_bootstrap_status
promise = monitor_bootstrap_status
name = monitor-bootstrap-status.py
config-process-pid-file = ${monitor-conf-parameters:pid-file}
config-process-name = ${start-monitor:name}
......@@ -387,13 +385,13 @@ config-status-file = ${monitor-conf-parameters:promise-output-file}
[promise-check-slapgrid]
<= monitor-promise-base
module = check_partition_deployment_state
promise = check_partition_deployment_state
name = buildout-${slap-connection:partition-id}-status.py
config-monitor-url = ${monitor-instance-parameter:monitor-base-url}
[promise-check-free-disk-space]
<= monitor-promise-base
module = check_free_disk_space
promise = check_free_disk_space
name = check-free-disk-space.py
config-collectordb = ${monitor-instance-parameter:collector-db}
config-threshold-file = ${directory:etc}/min-free-disk-size
......
......@@ -14,23 +14,23 @@
# not need these here).
[pbsready]
filename = pbsready.cfg.in
md5sum = 005125621d157b3ae04c428ea6060e37
md5sum = 1d3aba1ba770ad3fcc2ab6c0b9266409
[pbsready-import]
filename = pbsready-import.cfg.in
md5sum = dd8f0728e53b49822eed5d613839558f
md5sum = a8c9821951425bedbdea30a870fb5138
[pbsready-export]
filename = pbsready-export.cfg.in
md5sum = 2b0c71b085cfe8017f28098c160b1f49
md5sum = 8f15263c4a27ec315eb3a12dbf7a7b34
[template-pull-backup]
filename = instance-pull-backup.cfg.in
md5sum = b240dc76a663190304d8bcb9cabcda8f
md5sum = 4425db50d551fb8a974e547308990bac
[template-replicated]
filename = template-replicated.cfg.in
md5sum = 41aee09e9f9abbae59b0442e1e76387f
md5sum = c4012ccc2c473ae5c7cad9dcac61e0f1
[template-parts]
filename = template-parts.cfg.in
......
......@@ -217,7 +217,7 @@ wrapper-path = $${rootdirectory:bin}/stalled-pull-push
[pull-push-stalled-promise]
<= monitor-promise-base
module = check_command_execute
promise = check_command_execute
name = stalled-pull-push.py
config-command = $${pull-push-stalled-promise-bin:wrapper-path}
......@@ -233,7 +233,7 @@ context =
[notifier-feed-status-promise]
<= monitor-promise-base
module = check_command_execute
promise = check_command_execute
name = notifier-feed-check-malformed-or-failure.py
config-command = $${notifier-feed-status-promise-bin:rendered}
......
......@@ -60,7 +60,7 @@ rendered = ${rootdirectory:bin}/exporter-status
[notifier-exporter-promise]
<= monitor-promise-base
module = check_command_execute
promise = check_command_execute
name = exporter-status.py
config-command = ${notifier-exporter-promise-bin:rendered}
......
......@@ -81,7 +81,7 @@ mode = 700
[backup-checksum-integrity-promise]
<= monitor-promise-base
module = check_command_execute
promise = check_command_execute
name = backup-checksum-integrity.py
config-command = $${backup-checksum-integrity-promise-bin:rendered}
......@@ -159,7 +159,7 @@ wrapper-path = $${basedirectory:services}/resilient-web-takeover-httpd
[resilient-web-takeover-httpd-promise]
<= monitor-promise-base
module = check_url_available
promise = check_url_available
name = resilient-web-takeover-httpd.py
config-url = http://[$${resilient-web-takeover-httpd-configuration-file:listening-ip}]:$${resilient-web-takeover-httpd-configuration-file:listening-port}/
......
......@@ -165,7 +165,7 @@ wrapper-path = $${rootdirectory:bin}/stalled-notifier-callbacks
[notifier-stalled-promise]
<= monitor-promise-base
module = check_command_execute
promise = check_command_execute
name = stalled-notifier-callbacks.py
config-command = $${notifier-stalled-promise-bin:wrapper-path}
......@@ -217,7 +217,7 @@ wrapper-path = $${basedirectory:scripts}/sshd-graceful
[sshd-promise]
<= monitor-promise-base
module = check_socket_listening
promise = check_socket_listening
name = sshd.py
config-host = $${slap-network-information:global-ipv6}
config-port = $${sshd-port:port}
......@@ -261,7 +261,7 @@ mode = 700
[resilient-sshkeys-sshd-promise]
<= monitor-promise-base
module = check_command_execute
promise = check_command_execute
name = public-key-existence.py
config-command = $${resilient-sshkeys-sshd-promise-bin:output}
......@@ -281,7 +281,7 @@ context =
[notifier-feed-status-promise]
<= monitor-promise-base
module = check_command_execute
promise = check_command_execute
name = notifier-feed-check-malformed-or-failure.py
config-command = $${notifier-feed-status-promise-bin:rendered}
#----------------
......
This diff is collapsed.
This diff is collapsed.
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment