Commit a0e53d46 authored by Joanne Hugé's avatar Joanne Hugé

Update Release Candidate

parents fdc04b69 4968d55c
# The lightweight log processor https://fluentbit.io/
#
# This file is used for packaging with OBS.
# "-nodep" means "no build dependencies", it is faster and therefore useful for development
[buildout]
# delete slapos.rebootstrap as we use python from the host
extensions =
slapos.extension.strip
slapos.extension.shared
extends =
buildout.cfg
parts =
fluent-bit
[python2.7]
recipe =
environment =
location = /usr
[python3]
recipe =
environment =
location = /usr
[cmake]
recipe =
environment =
location = /usr
# The lightweight log processor https://fluentbit.io/
#
# This file is used for packaging with OBS.
[buildout]
# delete slapos.rebootstrap as we use python from the host
extensions =
slapos.extension.strip
slapos.extension.shared
extends =
buildout.cfg
parts =
fluent-bit
...@@ -12,8 +12,8 @@ parts = ...@@ -12,8 +12,8 @@ parts =
[fluentbit-plugin-wendelin] [fluentbit-plugin-wendelin]
recipe = slapos.recipe.cmmi recipe = slapos.recipe.cmmi
share = true share = true
url = https://lab.nexedi.com/nexedi/fluentbit-plugin-wendelin/-/archive/v0.2/fluentbit-plugin-wendelin-v0.2.tar.gz url = https://lab.nexedi.com/nexedi/fluentbit-plugin-wendelin/-/archive/0.2.1/fluentbit-plugin-wendelin-0.2.1.tar.gz
md5sum = 4985017f5575528240bf5cd5b148396b md5sum = 3a4ab7f11f3d0fb8b3312b86e77c6597
configure-command = echo "No configure command." configure-command = echo "No configure command."
environment = environment =
PATH=${golang1.17:location}/bin:%(PATH)s PATH=${golang1.17:location}/bin:%(PATH)s
......
# This is part of the OCEAN project. # This is part of the OCEAN project.
# MCA stands for Metadata Collect Agent # fluentbit-plugin-wendelin is a .so plugin for fluent-bit.
# "dep--" means it does not compile all the building dependencies # It is compiled for fluent-bit to send data to Wendelin.
# "--static" indicates that it is the statically linked version of it # https://lab.nexedi.com/nexedi/fluentbit-plugin-wendelin
#
# This file is used for packaging with OBS.
[buildout]
# delete slapos.rebootstrap as we use python from the host
extensions =
slapos.extension.strip
slapos.extension.shared
[buildout]
extends = extends =
mca--static.cfg obs.cfg
parts = parts =
fluentbit-plugin-wendelin fluentbit-plugin-wendelin
mca
[python2.7] [python2.7]
recipe = recipe =
......
# This is part of the OCEAN project.
# fluentbit-plugin-wendelin is a .so plugin for fluent-bit.
# It is compiled for fluent-bit to send data to Wendelin.
# https://lab.nexedi.com/nexedi/fluentbit-plugin-wendelin
#
# This file is used for packaging with OBS.
[buildout]
# delete slapos.rebootstrap as we use python from the host in the packaging process with OBS
extensions =
slapos.extension.strip
slapos.extension.shared
extends =
buildout.cfg
parts =
fluentbit-plugin-wendelin
[golang1.17]
# Using "./make.bash" instead of "./all.bash" disables golang tests. Some of these tests attempt to use the network, which fails on OBS' VM.
# A less radical way to fix the issue may be investigated in the future.
make-targets = cd src && unset GOBIN && ./make.bash && cp -alf .. ${:location}
post-install =
${findutils:location}/bin/find ${:location}/src -type d -name testdata -exec rm -rf {} \; || true
# This line relocates the golang cached modules directory so that the modules are sent to OBS along with the code to compile.
${:location}/bin/go env -w GOPATH=${buildout:directory}/go
...@@ -12,7 +12,7 @@ parts = ...@@ -12,7 +12,7 @@ parts =
[c-astral-headers] [c-astral-headers]
recipe = slapos.recipe.build:gitclone recipe = slapos.recipe.build:gitclone
repository = https://lab.nexedi.com/nexedi/c-astral-c-library repository = https://lab.nexedi.com/nexedi/c-astral-c-library
revision = v0.1 revision = v1.0
git-executable = ${git:location}/bin/git git-executable = ${git:location}/bin/git
[gcc] [gcc]
...@@ -21,7 +21,7 @@ min_version = 7.1 ...@@ -21,7 +21,7 @@ min_version = 7.1
[mavsdk-source] [mavsdk-source]
recipe = slapos.recipe.build:gitclone recipe = slapos.recipe.build:gitclone
repository = https://github.com/mavlink/MAVSDK.git repository = https://github.com/mavlink/MAVSDK.git
revision = v0.37.0 revision = v0.39.0
git-executable = ${git:location}/bin/git git-executable = ${git:location}/bin/git
ignore-cloning-submodules = true ignore-cloning-submodules = true
......
# This is part of the OCEAN project. # This is part of the OCEAN project.
# MCA stands for Metadata Collect Agent # MCA stands for Metadata Collect Agent
# "dep--" means it does not compile all the building dependencies # https://lab.nexedi.com/nexedi/metadata-collect-agent
#
# This file is used for packaging with OBS.
# "-nodep" means "no build dependencies", it is faster and therefore useful for development
[buildout] [buildout]
# delete slapos.rebootstrap as we use python from the host # delete slapos.rebootstrap as we use python from the host
...@@ -9,10 +12,9 @@ extensions = ...@@ -9,10 +12,9 @@ extensions =
slapos.extension.shared slapos.extension.shared
extends = extends =
software.cfg obs.cfg
parts = parts =
fluentbit-plugin-wendelin
mca mca
[python2.7] [python2.7]
...@@ -25,12 +27,6 @@ recipe = ...@@ -25,12 +27,6 @@ recipe =
environment = environment =
location = /usr location = /usr
[golang1.17]
recipe = plone.recipe.command
command = ${:location}/bin/go env -w GOPATH=${buildout:directory}/go
environment =
location = /usr
[cmake] [cmake]
recipe = recipe =
environment = environment =
......
# This is part of the OCEAN project.
# MCA stands for Metadata Collect Agent
# https://lab.nexedi.com/nexedi/metadata-collect-agent
#
# This file is used for packaging with OBS.
# "-static" indicates that it is the statically linked version
# "-nodep" means "no build dependencies", it is faster and therefore useful for development
[buildout]
# delete slapos.rebootstrap as we use python from the host
extensions =
slapos.extension.strip
slapos.extension.shared
extends =
obs-static.cfg
parts =
mca
[python2.7]
recipe =
environment =
location = /usr
[python3]
recipe =
environment =
location = /usr
[cmake]
recipe =
environment =
location = /usr
# This is part of the OCEAN project. # This is part of the OCEAN project.
# MCA stands for Metadata Collect Agent # MCA stands for Metadata Collect Agent
# https://lab.nexedi.com/nexedi/metadata-collect-agent # https://lab.nexedi.com/nexedi/metadata-collect-agent
# "--static" indicates that it is the statically linked version of it #
# This file is used for packaging with OBS.
# "-static" indicates that it is the statically linked version
[buildout] [buildout]
extends = extends =
software.cfg obs.cfg
parts = parts =
fluentbit-plugin-wendelin
mca mca
# ENABLE STATIC LINKING # ENABLE STATIC LINKING
# note: as a general matter, one has to tweak each dependency by hand to get a static executable # note: as a general matter, one has to tweak each dependency by hand to get a static executable
[mca] [mca]
......
# This is part of the OCEAN project. # This is part of the OCEAN project.
# MCA stands for Metadata Collect Agent # MCA stands for Metadata Collect Agent
# https://lab.nexedi.com/nexedi/metadata-collect-agent # https://lab.nexedi.com/nexedi/metadata-collect-agent
#
# This file is used for packaging with OBS.
[buildout] [buildout]
extends = extends =
../../component/fluentbit-plugin-wendelin/buildout.cfg buildout.cfg
../../component/mca/buildout.cfg
parts = parts =
fluentbit-plugin-wendelin
mca mca
[golang1.17] [golang1.17]
......
...@@ -10,8 +10,8 @@ parts = qjs-wrapper ...@@ -10,8 +10,8 @@ parts = qjs-wrapper
recipe = slapos.recipe.cmmi recipe = slapos.recipe.cmmi
shared = true shared = true
configure-command = true configure-command = true
url = https://lab.nexedi.com/nexedi/qjs-wrapper/-/archive/v0.1/qjs-wrapper-v0.1.tar.gz url = https://lab.nexedi.com/nexedi/qjs-wrapper/-/archive/v1.0/qjs-wrapper-v1.0.tar.gz
md5sum = 4f0eb6f3bc5719a8ec043ce4f4d20747 md5sum = 0f1393fa15d46b2b551836197af9de46
environment = environment =
C_INCLUDE_PATH=include:${open62541:location}/include:${open62541:location}/deps:${open62541:location}/src/pubsub:${quickjs:location}/include C_INCLUDE_PATH=include:${open62541:location}/include:${open62541:location}/deps:${open62541:location}/src/pubsub:${quickjs:location}/include
CPLUS_INCLUDE_PATH=include:${mavsdk:location}/include:${mavsdk:location}/include/mavsdk CPLUS_INCLUDE_PATH=include:${mavsdk:location}/include:${mavsdk:location}/include/mavsdk
......
...@@ -66,6 +66,10 @@ post-install = ...@@ -66,6 +66,10 @@ post-install =
${patchelf:location}/bin/patchelf --set-rpath \ ${patchelf:location}/bin/patchelf --set-rpath \
${libsecret:location}/lib:${gettext:location}/lib:${glib:location}/lib \ ${libsecret:location}/lib:${gettext:location}/lib:${glib:location}/lib \
keytar.node keytar.node
# remove some binaries using musl that are confusing check software
# and anyway not used once the software is installed
rm -f %(location)s/node_modules/@msgpackr-extract/*/*.node
rm -rf $HOME/.cache/yarn/
[theia-wrapper] [theia-wrapper]
recipe = slapos.recipe.template recipe = slapos.recipe.template
......
...@@ -19,7 +19,7 @@ md5sum = 8157c22134200bd862a07c6521ebf799 ...@@ -19,7 +19,7 @@ md5sum = 8157c22134200bd862a07c6521ebf799
[yarn.lock] [yarn.lock]
_update_hash_filename_ = yarn.lock _update_hash_filename_ = yarn.lock
md5sum = 3b8ac3a6933a1d52b31c74fad8af2d8e md5sum = d3fae8f0a2c911611d96a5dd870674c6
[ms-python-disable-jedi-buildout.patch] [ms-python-disable-jedi-buildout.patch]
_update_hash_filename_ = ms-python-disable-jedi-buildout.patch _update_hash_filename_ = ms-python-disable-jedi-buildout.patch
......
This diff is collapsed.
This source diff could not be displayed because it is too large. You can view the blob instead.
...@@ -96,9 +96,9 @@ eggs += ...@@ -96,9 +96,9 @@ eggs +=
[beremiz] [beremiz]
recipe = slapos.recipe.build:download-unpacked recipe = slapos.recipe.build:download-unpacked
# download beremiz at revision f703a6206832e14d7545d88428a7c81335a75004 # download beremiz at revision caee3ad3b7bb2865c77328de5ffdaeec61dbf49f
url = https://github.com/beremiz/beremiz/archive/f703a6206832e14d7545d88428a7c81335a75004.tar.gz url = https://github.com/beremiz/beremiz/archive/caee3ad3b7bb2865c77328de5ffdaeec61dbf49f.tar.gz
md5sum = 01e191324837c9365121a31438b0d350 md5sum = 52f9407e1706cdecf01fabfc61090276
[beremiz-setup] [beremiz-setup]
recipe = zc.recipe.egg:develop recipe = zc.recipe.egg:develop
......
...@@ -26,6 +26,8 @@ configure-options = ...@@ -26,6 +26,8 @@ configure-options =
-DUA_ENABLE_PUBSUB=ON -DUA_ENABLE_PUBSUB=ON
-DUA_ENABLE_SUBSCRIPTIONS=ON -DUA_ENABLE_SUBSCRIPTIONS=ON
-DUA_NAMESPACE_ZERO=REDUCED -DUA_NAMESPACE_ZERO=REDUCED
-DUA_ENABLE_ENCRYPTION=OPENSSL
-DOPENSSL_ROOT_DIR=${openssl:location}
# Beremiz need it to be in folder parts/open62541 # Beremiz need it to be in folder parts/open62541
# as Beremiz search for open62541 to BEREMIZ_PATH/../open62541 # as Beremiz search for open62541 to BEREMIZ_PATH/../open62541
...@@ -37,7 +39,7 @@ post-install = ...@@ -37,7 +39,7 @@ post-install =
[beremiz-source] [beremiz-source]
recipe = slapos.recipe.build:gitclone recipe = slapos.recipe.build:gitclone
repository = https://github.com/beremiz/beremiz.git repository = https://github.com/beremiz/beremiz.git
revision = c3847f87bde2d520b856e353498cb2bad9d83911 revision = caee3ad3b7bb2865c77328de5ffdaeec61dbf49f
git-executable = ${git:location}/bin/git git-executable = ${git:location}/bin/git
[beremiz] [beremiz]
......
...@@ -22,11 +22,11 @@ md5sum = 5784bea3bd608913769ff9a8afcccb68 ...@@ -22,11 +22,11 @@ md5sum = 5784bea3bd608913769ff9a8afcccb68
[profile-frontend] [profile-frontend]
filename = instance-frontend.cfg.in filename = instance-frontend.cfg.in
md5sum = ae04e2c6137cb5bd5ef1a9b3c6738c49 md5sum = 1ec349747487b45dd6b30bb2aa45acbe
[profile-master] [profile-master]
filename = instance-master.cfg.in filename = instance-master.cfg.in
md5sum = 2c599c22f6ab9bf2bbc48da8a12ae6e0 md5sum = 50dbd9bd25fbc608b576ac83f4e13b0f
[profile-slave-list] [profile-slave-list]
filename = instance-slave-list.cfg.in filename = instance-slave-list.cfg.in
...@@ -82,15 +82,15 @@ md5sum = 905e395b67c0a2b214b9f0099c8dc939 ...@@ -82,15 +82,15 @@ md5sum = 905e395b67c0a2b214b9f0099c8dc939
[template-validate-script] [template-validate-script]
_update_hash_filename_ = templates/validate-script.sh.in _update_hash_filename_ = templates/validate-script.sh.in
md5sum = 53e5d7ba2827bff003051f74f24ffe4f md5sum = ffa38c0110cf7a91cfc514e013b37a6d
[template-configuration-state-script] [template-configuration-state-script]
_update_hash_filename_ = templates/configuration-state-script.sh.in _update_hash_filename_ = templates/configuration-state-script.sh.in
md5sum = 4d2537d2698d32a7e909989f8778d144 md5sum = b16a6ebdfc0be3aa7cdeaf47f47751ec
[template-rotate-script] [template-rotate-script]
_update_hash_filename_ = templates/rotate-script.sh.in _update_hash_filename_ = templates/rotate-script.sh.in
md5sum = 8c150e1e6c993708d31936742f3a7302 md5sum = b6fda7e51b6847199c3d142fc3f9a671
[software-setup] [software-setup]
filename = setup.py filename = setup.py
...@@ -102,7 +102,7 @@ md5sum = e82ccdb0b26552a1c88ff523d8fae24a ...@@ -102,7 +102,7 @@ md5sum = e82ccdb0b26552a1c88ff523d8fae24a
[profile-kedifa] [profile-kedifa]
filename = instance-kedifa.cfg.in filename = instance-kedifa.cfg.in
md5sum = 40fe46a1fe653406b18ca46351d195b8 md5sum = d790e23ebf7b07bb245322629d402551
[template-backend-haproxy-rsyslogd-conf] [template-backend-haproxy-rsyslogd-conf]
_update_hash_filename_ = templates/backend-haproxy-rsyslogd.conf.in _update_hash_filename_ = templates/backend-haproxy-rsyslogd.conf.in
......
...@@ -644,6 +644,7 @@ pattern = *.old ...@@ -644,6 +644,7 @@ pattern = *.old
keep_days = 365 keep_days = 365
extra-context = extra-context =
raw find_executable {{ software_parameter_dict['findutils'] }}/bin/find
key log_dir trafficserver-directory:log key log_dir trafficserver-directory:log
key rotate_dir trafficserver-directory:logrotate-backup key rotate_dir trafficserver-directory:logrotate-backup
key xz_binary :xz_binary key xz_binary :xz_binary
...@@ -669,6 +670,8 @@ path_list = ${caddy-configuration:frontend-configuration} ${caddy-directory:slav ...@@ -669,6 +670,8 @@ path_list = ${caddy-configuration:frontend-configuration} ${caddy-directory:slav
sha256sum = {{ software_parameter_dict['sha256sum'] }} sha256sum = {{ software_parameter_dict['sha256sum'] }}
extra-context = extra-context =
import os_module os
raw find_executable {{ software_parameter_dict['findutils'] }}/bin/find
key path_list :path_list key path_list :path_list
key sha256sum :sha256sum key sha256sum :sha256sum
key signature_file :signature_file key signature_file :signature_file
...@@ -697,6 +700,7 @@ output = ${directory:bin}/frontend-caddy-validate ...@@ -697,6 +700,7 @@ output = ${directory:bin}/frontend-caddy-validate
last_state_file = ${directory:run}/caddy_configuration_last_state last_state_file = ${directory:run}/caddy_configuration_last_state
validate_command = ${caddy-wrapper:output} -validate validate_command = ${caddy-wrapper:output} -validate
extra-context = extra-context =
raw find_executable {{ software_parameter_dict['findutils'] }}/bin/find
key validate_command :validate_command key validate_command :validate_command
key configuration_state_command frontend-caddy-configuration-state-validate:output key configuration_state_command frontend-caddy-configuration-state-validate:output
key last_state_file :last_state_file key last_state_file :last_state_file
...@@ -832,6 +836,8 @@ path_list = ${backend-haproxy-configuration:file} ${backend-client-login-config: ...@@ -832,6 +836,8 @@ path_list = ${backend-haproxy-configuration:file} ${backend-client-login-config:
sha256sum = {{ software_parameter_dict['sha256sum'] }} sha256sum = {{ software_parameter_dict['sha256sum'] }}
extra-context = extra-context =
import os_module os
raw find_executable {{ software_parameter_dict['findutils'] }}/bin/find
key path_list :path_list key path_list :path_list
key sha256sum :sha256sum key sha256sum :sha256sum
key signature_file :signature_file key signature_file :signature_file
...@@ -860,6 +866,7 @@ output = ${directory:bin}/backend-haproxy-validate ...@@ -860,6 +866,7 @@ output = ${directory:bin}/backend-haproxy-validate
last_state_file = ${directory:run}/backend_haproxy_configuration_last_state last_state_file = ${directory:run}/backend_haproxy_configuration_last_state
validate_command = {{ software_parameter_dict['haproxy_executable'] }} -f ${backend-haproxy-configuration:file} -c validate_command = {{ software_parameter_dict['haproxy_executable'] }} -f ${backend-haproxy-configuration:file} -c
extra-context = extra-context =
raw find_executable {{ software_parameter_dict['findutils'] }}/bin/find
key validate_command :validate_command key validate_command :validate_command
key configuration_state_command backend-haproxy-configuration-state-validate:output key configuration_state_command backend-haproxy-configuration-state-validate:output
key last_state_file :last_state_file key last_state_file :last_state_file
...@@ -1001,6 +1008,8 @@ path_list = ${frontend-configuration:slave-introspection-configuration} ${fronte ...@@ -1001,6 +1008,8 @@ path_list = ${frontend-configuration:slave-introspection-configuration} ${fronte
sha256sum = {{ software_parameter_dict['sha256sum'] }} sha256sum = {{ software_parameter_dict['sha256sum'] }}
extra-context = extra-context =
import os_module os
raw find_executable {{ software_parameter_dict['findutils'] }}/bin/find
key path_list :path_list key path_list :path_list
key sha256sum :sha256sum key sha256sum :sha256sum
key signature_file :signature_file key signature_file :signature_file
...@@ -1029,6 +1038,7 @@ output = ${directory:bin}/slave-introspection-validate ...@@ -1029,6 +1038,7 @@ output = ${directory:bin}/slave-introspection-validate
last_state_file = ${directory:run}/slave_introspection_configuration_last_state last_state_file = ${directory:run}/slave_introspection_configuration_last_state
validate_command = {{ software_parameter_dict['nginx'] }} -c ${frontend-configuration:slave-introspection-configuration} -t validate_command = {{ software_parameter_dict['nginx'] }} -c ${frontend-configuration:slave-introspection-configuration} -t
extra-context = extra-context =
raw find_executable {{ software_parameter_dict['findutils'] }}/bin/find
key validate_command :validate_command key validate_command :validate_command
key configuration_state_command slave-introspection-configuration-state-validate:output key configuration_state_command slave-introspection-configuration-state-validate:output
key last_state_file :last_state_file key last_state_file :last_state_file
......
...@@ -288,7 +288,7 @@ directory = ${directory:auth-ready} ...@@ -288,7 +288,7 @@ directory = ${directory:auth-ready}
init = init =
import os import os
if os.path.isdir(options['directory']): if os.path.isdir(options['directory']):
os.system("find %s -type f -name *-auth-random -size 0 -delete" % (options['directory'],)) os.system("{{ software_parameter_dict['findutils'] }}/bin/find %s -type f -name *-auth-random -size 0 -delete" % (options['directory'],))
[auth-random] [auth-random]
recipe = slapos.recipe.build recipe = slapos.recipe.build
...@@ -335,7 +335,7 @@ config-command = ...@@ -335,7 +335,7 @@ config-command =
recipe = plone.recipe.command recipe = plone.recipe.command
output = ${directory:var}/${:_buildout_section_name_}.txt output = ${directory:var}/${:_buildout_section_name_}.txt
update-command = ${:command} update-command = ${:command}
command = find ${directory:auth-ready} -type f > ${:output} command = {{ software_parameter_dict['findutils'] }}/bin/find ${directory:auth-ready} -type f > ${:output}
[promise-kedifa-auth-ready] [promise-kedifa-auth-ready]
<= monitor-promise-base <= monitor-promise-base
......
...@@ -856,7 +856,7 @@ key = ${:certificate} ...@@ -856,7 +856,7 @@ key = ${:certificate}
stop-on-error = True stop-on-error = True
update-command = ${:command} update-command = ${:command}
command = command =
[ -f ${:certificate} ] && find ${:certificate} -type f -mtime +3 -delete [ -f ${:certificate} ] && {{ software_parameter_dict['findutils'] }}/bin/find ${:certificate} -type f -mtime +3 -delete
if ! [ -f ${:certificate} ] ; then if ! [ -f ${:certificate} ] ; then
openssl req -new -newkey rsa:2048 -sha256 -subj \ openssl req -new -newkey rsa:2048 -sha256 -subj \
"/CN=${rejected-slave-publish-configuration:ip}" \ "/CN=${rejected-slave-publish-configuration:ip}" \
......
...@@ -13,6 +13,7 @@ extends = ...@@ -13,6 +13,7 @@ extends =
../../component/numpy/buildout.cfg ../../component/numpy/buildout.cfg
../../component/haproxy/buildout.cfg ../../component/haproxy/buildout.cfg
../../component/nginx/buildout.cfg ../../component/nginx/buildout.cfg
../../component/findutils/buildout.cfg
../../stack/caucase/buildout.cfg ../../stack/caucase/buildout.cfg
...@@ -125,6 +126,7 @@ xz_location = ${xz-utils:location} ...@@ -125,6 +126,7 @@ xz_location = ${xz-utils:location}
htpasswd = ${:bin_directory}/htpasswd htpasswd = ${:bin_directory}/htpasswd
smart_caucase_signer = ${:bin_directory}/smart-caucase-signer smart_caucase_signer = ${:bin_directory}/smart-caucase-signer
caucase_csr_sign_check = ${:bin_directory}/caucase-csr-sign-check caucase_csr_sign_check = ${:bin_directory}/caucase-csr-sign-check
findutils = ${findutils:location}
[template] [template]
recipe = slapos.recipe.template:jinja2 recipe = slapos.recipe.template:jinja2
......
...@@ -5,7 +5,16 @@ SIGNATURE_FILE={{ signature_file }} ...@@ -5,7 +5,16 @@ SIGNATURE_FILE={{ signature_file }}
NSIGNATURE_FILE={{ signature_file }}.tmp NSIGNATURE_FILE={{ signature_file }}.tmp
touch $SIGNATURE_FILE touch $SIGNATURE_FILE
{{ sha256sum }} {{ path_list }} 2> /dev/null | sort -k 66 > $NSIGNATURE_FILE rm -f $NSIGNATURE_FILE $NSIGNATURE_FILE.wrk
set +e
{%- for path in path_list.split() %}
{%- set directory = os_module.path.dirname(path) %}
{%- set name = os_module.path.basename(path) %}
{{ find_executable }} {{ directory }} -type f -name '{{ name }}' -exec {{ sha256sum }} {} \; >> $NSIGNATURE_FILE.wrk
{%- endfor %}
set -e
sort -k 2,2 $NSIGNATURE_FILE.wrk > $NSIGNATURE_FILE
rm -f $NSIGNATURE_FILE.wrk
if diff "$SIGNATURE_FILE" "$NSIGNATURE_FILE" > /dev/null ; then if diff "$SIGNATURE_FILE" "$NSIGNATURE_FILE" > /dev/null ; then
# No changes since last run just propagate information # No changes since last run just propagate information
......
...@@ -9,10 +9,10 @@ COMPRESS={{ xz_binary }} ...@@ -9,10 +9,10 @@ COMPRESS={{ xz_binary }}
KEEP_DAYS={{ keep_days }} KEEP_DAYS={{ keep_days }}
# Move out ${PATTERN} files # Move out ${PATTERN} files
find ${LOG_DIR} -maxdepth 1 -type f -name ${PATTERN} -exec mv {} ${LOGROTATE_DIR}/ \; {{ find_executable }} ${LOG_DIR} -maxdepth 1 -type f -name ${PATTERN} -exec mv {} ${LOGROTATE_DIR}/ \;
# Compress # Compress
find ${LOGROTATE_DIR} -maxdepth 1 -type f -name ${PATTERN} -exec ${COMPRESS} -9 {} \; {{ find_executable }} ${LOGROTATE_DIR} -maxdepth 1 -type f -name ${PATTERN} -exec ${COMPRESS} -9 {} \;
# Retent old files # Retent old files
find ${LOGROTATE_DIR} -maxdepth 1 -type f -mtime +${KEEP_DAYS} -delete {{ find_executable }} ${LOGROTATE_DIR} -maxdepth 1 -type f -mtime +${KEEP_DAYS} -delete
...@@ -7,7 +7,7 @@ LAST_STATE_FILE={{ last_state_file }} ...@@ -7,7 +7,7 @@ LAST_STATE_FILE={{ last_state_file }}
# force validation each 2 hours # force validation each 2 hours
old_found=1 old_found=1
if [ -f $LAST_STATE_FILE ] ; then if [ -f $LAST_STATE_FILE ] ; then
old_found=$(find $LAST_STATE_FILE -mmin +120 | wc -l) old_found=$({{ find_executable }} $LAST_STATE_FILE -mmin +120 | wc -l)
fi fi
if [ "$old_found" -eq 1 ] || {{ configuration_state_command }} ; then if [ "$old_found" -eq 1 ] || {{ configuration_state_command }} ; then
......
...@@ -14,8 +14,8 @@ parts = ...@@ -14,8 +14,8 @@ parts =
[dufs] [dufs]
recipe = slapos.recipe.cmmi recipe = slapos.recipe.cmmi
shared = true shared = true
url = https://github.com/sigoden/dufs/archive/refs/tags/v0.30.0.tar.gz url = https://github.com/sigoden/dufs/archive/refs/tags/v0.31.0.tar.gz
md5sum = a146cb32028d025143667c3a70e2b696 md5sum = 4340e59915605e30dcdb70aa9eb06acb
configure-command = : configure-command = :
make-binary = cargo install --root=%(location)s --path . make-binary = cargo install --root=%(location)s --path .
make-targets = make-targets =
......
...@@ -525,6 +525,46 @@ ...@@ -525,6 +525,46 @@
"default": true, "default": true,
"type": "boolean" "type": "boolean"
}, },
"coverage": {
"type": "object",
"title": "Coverage",
"description": "Coverage configuration",
"additionalProperties": false,
"properties": {
"enabled": {
"description": "Collect python coverage data during test run.",
"default": false,
"type": "boolean"
},
"include": {
"description": "File name patterns to include in coverage data, relative to software buildout's directory. Default to all repositories defined in software by ${erp5_repository_list:repository_id_list}.",
"type": "array",
"items": {
"type": "string"
},
"examples": [
[
"parts/erp5/*",
"parts/custom-repository/*",
"develop-eggs/custom-egg/*"
]
]
},
"branch": {
"description": "Enable branch coverage",
"type": "boolean",
"default": false
},
"upload-url": {
"description": "URL to upload coverage data. This is interpreted as a RFC 6570 URI Template, with the following parameters: test_name, test_result_id and test_result_revision. The request will be a PUT request with the coverage file content as body, suitable for WebDav servers. If the URL contains user and password, they will be used to attempt authentication using Digest and Basic authentication schemes.",
"type": "string",
"format": "uri",
"examples": [
"https://user:password@example.com/{test_result_id}/{test_name}.coverage.sqlite3"
]
}
}
},
"node-count": { "node-count": {
"description": "Number of tests this instance can execute in parallel. This must be at least equal to the number of nodes configured on testnode running the test", "description": "Number of tests this instance can execute in parallel. This must be at least equal to the number of nodes configured on testnode running the test",
"default": 3, "default": 3,
......
{
"$schema": "https://json-schema.org/draft/2019-09/schema",
"description": "Parameters to instantiate Fluent-bit",
"type": "object",
"configuration": {
"input": {
"description": "The desired fluent-bit input, which defines in fluent-bit's configuration the inputs.",
"type": "object",
"default": ""
},
"output": {
"description": "The desired fluent-bit output, which defines in fluent-bit's configuration the outputs.",
"type": "object",
"default": ""
}
}
}
{
"$schema": "https://json-schema.org/draft/2019-09/schema",
"description": "Values returned by Fluent-bit instantiation.",
"additionalProperties": false,
"properties": {},
"type": "object"
}
{
"name": "Fluent-bit",
"description": "Fluent-bit",
"serialisation": "xml",
"software-type": {
"default": {
"title": "Default",
"description": "Fluent-bit",
"request": "instance-input-schema.json",
"response": "instance-output-schema.json",
"index": 0
}
}
}
...@@ -2,7 +2,7 @@ ...@@ -2,7 +2,7 @@
## Presentation ## ## Presentation ##
* Deploy 3 different scripts (`cli.js`, `demo.js` and `manual-flight.js`) on a drone to fly it * Deploy `user.js` script on a drone to fly it
* Compile all required libraries to run flight scripts * Compile all required libraries to run flight scripts
...@@ -14,12 +14,14 @@ ...@@ -14,12 +14,14 @@
* is-a-simulation: Must be set to 'true' to automatically take off during simulation * is-a-simulation: Must be set to 'true' to automatically take off during simulation
* leader-id: Id of the drone chosen to be the leader of the swarm
* multicast-ipv6: IPv6 of the multicast group of the swarm * multicast-ipv6: IPv6 of the multicast group of the swarm
* drone-id-list: Comma seperated list of the drone IDs of the swarm (recommanded to add the current drone ID) * net-if: Network interface used for multicast traffic
* drone-id-list: List of the drone IDs of the swarm (recommended to add the current drone ID)
* flight-script: User script to execute to fly drone swarm
## How it works ## ## How it works ##
Run `quickjs binary location` `desired script location` Run `quickjs binary location` `scripts location`/main.js `scripts location`/user.js
...@@ -14,24 +14,16 @@ ...@@ -14,24 +14,16 @@
# not need these here). # not need these here).
[instance-profile] [instance-profile]
filename = instance.cfg filename = instance.cfg
md5sum = 99017d061d3be30746f6daef3a7bd8c4 md5sum = 7d4969239eb9d46bb44d57fc32b68c44
[cli] [main]
filename = cli.js filename = main.js
md5sum = 33271aeec124301604fdd406f0b339d1 md5sum = 4b1b27ea3e06b8d40cbc33f0ec617601
[common]
filename = common.js
md5sum = bacc70f2683c279ba4e0751de616d4ff
[demo]
filename = demo.js
md5sum = 31d8511e6d297643e65febe9a3ed2428
[manual-flight]
filename = manual-flight.js
md5sum = 175813fc8b2f19f91dae27ad4e14ab03
[pubsub] [pubsub]
filename = pubsub.js filename = pubsub.js
md5sum = d949c9a6cdaaa94e7bdd22df5e52fbf4 md5sum = 4a0c63f9e088fa525a3699484d193c4d
[worker]
filename = worker.js
md5sum = 5ed534e9ca56b9c0ee321b96b5d7c458
/*jslint indent2 */
/*global console, std */
import {
loiter,
setAirspeed,
setAltitude,
setTargetLatLong,
reboot
} from "{{ qjs_wrapper }}"; //jslint-quiet
import {
connect,
displayDronePositions,
land,
quit,
startPubsub,
takeOff
} from "{{ common }}"; //jslint-quiet
/*jslint-disable*/
import * as std from "std";
/*jslint-enable*/
var running = false;
const wrongParameters = displayMessage.bind(null, "Wrong parameters");
function checkNumber(value, toExecute) {
return (
Number.isNaN(value)
? wrongParameters
: toExecute.bind(null, value)
);
}
function displayMessage(message) {
console.log(message);
return 0;
}
function exit() {
running = false;
quit();
return 0;
}
function getInput() {
let undefined_cmd;
let altitude;
let cmd;
let latitude;
let longitude;
let s;
let speed;
const help = `
connect
takeoff
land
goto(point)
gotoCoord(latitude, longitude)
altitude(altitude)
speed(speed)
positions
reboot
exit
help
`;
const f = std.fdopen(std.in, "r");
running = true;
while (running) {
std.printf("> ");
s = f.getline();
undefined_cmd = false;
switch (s) {
case "altitude":
std.printf("Altitude: ");
altitude = parseFloat(f.getline());
cmd = checkNumber(altitude, setAltitude);
break;
case "connect":
cmd = connect;
startPubsub();
break;
case "exit":
cmd = exit;
break;
case "gotoCoord":
std.printf("Latitude: ");
latitude = parseFloat(f.getline());
std.printf("Longitude: ");
longitude = parseFloat(f.getline());
cmd = checkNumber(longitude, checkNumber(latitude, setTargetLatLong));
break;
case "help":
cmd = displayMessage.bind(null, help);
break;
case "land":
cmd = land;
break;
case "loiter":
cmd = loiter;
break;
case "positions":
cmd = displayDronePositions;
break;
case "reboot":
cmd = reboot;
break;
case "speed":
std.printf("Speed: ");
speed = parseFloat(f.getline());
cmd = checkNumber(speed, setAirspeed);
break;
case "takeoff":
cmd = takeOff.bind(null, 60);
break;
default:
undefined_cmd = true;
cmd = displayMessage.bind(null, " Undefined command");
}
let ret = cmd();
if (ret) {
console.log(" [ERROR] function:\n", cmd, "\nreturn value:", ret);
}
else if (s !== "help" && !undefined_cmd) {
console.log(" Command successful");
}
};
f.close();
}
getInput();
/*jslint-disable*/
{% set comma_separated_drone_id_list = ', '.join(drone_id_list.split()) -%}
/*jslint-enable*/
import {
arm,
doParachute,
getAltitude,
getLatitude,
getLongitude,
getYaw,
initPubsub,
setAltitude,
setTargetLatLong,
start,
stop,
stopPubsub,
takeOffAndWait,
Drone
} from "{{ qjs_wrapper }}"; //jslint-quiet
import {exit} from "std";
import {sleep, Worker} from "os";
const IP = "{{ autopilot_ip }}";
const PORT = "7909";
export const IS_LEADER = {{ is_leader }};
export const LEADER_ID = {{ leader_id }};
export const SIMULATION = {{ is_a_simulation }};
export const EPSILON = 105;
const EPSILON_YAW = 6;
const EPSILON_ALTITUDE = 5;
const TARGET_YAW = 0;
export const ALTITUDE_DIFF = 30;
const URL = "udp://" + IP + ":" + PORT;
const LOG_FILE = "{{ log_dir }}/mavsdk-log";
const droneIdList = [{{ comma_separated_drone_id_list }}];
const droneDict = {};
var pubsubRunning = false;
var pubsubWorker;
export function connect() {
console.log("Will connect to", URL);
exit_on_fail(start(URL, LOG_FILE, 60), "Failed to connect to " + URL);
}
export function distance(lat1, lon1, lat2, lon2) {
const R = 6371e3; // meters
const la1 = lat1 * Math.PI/180; // la, lo in radians
const la2 = lat2 * Math.PI/180;
const lo1 = lon1 * Math.PI/180;
const lo2 = lon2 * Math.PI/180;
//haversine formula
const sinLat = Math.sin((la2 - la1)/2);
const sinLon = Math.sin((lo2 - lo1)/2);
const h = sinLat*sinLat + Math.cos(la1)*Math.cos(la2)*sinLon*sinLon
return 2*R*Math.asin(Math.sqrt(h));
}
export function displayDronePositions() {
if(!pubsubRunning)
console.log("You must start pubsub first !");
else {
for (const [id, drone] of Object.entries(droneDict)) {
console.log(id, drone.latitude, drone.longitude, drone.altitudeAbs, drone.altitudeRel);
}
}
return 0;
}
function exit_on_fail(ret, msg) {
if(ret) {
console.log(msg);
quit();
exit(-1);
}
}
export function quit() {
stop();
if(pubsubRunning) {
stopPubsub();
}
}
export function goToAltitude(target_altitude, wait, go) {
if(go) {
exit_on_fail(
setAltitude(target_altitude),
`Failed to go to altitude ${target_altitude} m`
);
}
if(wait) {
waitForAltitude(target_altitude);
}
}
export function land() {
var yaw;
while(true) {
yaw = getYaw();
console.log(`[DEMO] Waiting for yaw... (${yaw} , ${TARGET_YAW})`);
if(Math.abs(yaw - TARGET_YAW) < EPSILON_YAW) {
break;
}
sleep(250);
}
console.log("[DEMO] Deploying parachute...");
exit_on_fail(doParachute(2), "Failed to deploy parachute");
}
export function setLatLong(latitude, longitude, target_altitude) {
var cur_latitude;
var cur_longitude;
var d;
if(target_altitude !== 0) {
setAltitude(target_altitude, false, true);
}
console.log(`Going to (${latitude}, ${longitude}) from
(${getLatitude()}, ${getLongitude()})`);
exit_on_fail(
setTargetLatLong(latitude, longitude),
`Failed to go to (${latitude}, ${longitude})`
);
sleep(500);
while(true) {
cur_latitude = getLatitude();
cur_longitude = getLongitude();
d = distance(cur_latitude, cur_longitude, latitude, longitude);
console.log(`Waiting for drone to get to destination (${d} m),
(${cur_latitude} , ${cur_longitude}), (${latitude}, ${longitude})`);
if(d < EPSILON) {
sleep(6000);
return;
}
sleep(1000);
}
}
export function startPubsub() {
pubsubWorker = new Worker("{{ pubsub_script }}");
pubsubWorker.onmessage = function(e) {
if (!e.data.publishing)
pubsubWorker.onmessage = null;
}
initPubsub(droneIdList.length);
for (let i = 0; i < droneIdList.length; i++) {
let id = droneIdList[i]
droneDict[id] = new Drone(id);
droneDict[id].init(i);
}
pubsubWorker.postMessage({ action: "run" });
pubsubRunning = true;
return droneDict;
}
export function takeOff(altitude) {
exit_on_fail(arm(), "Failed to arm");
takeOffAndWait();
goToAltitude(altitude, true, true);
}
function waitForAltitude(target_altitude) {
var altitude = getAltitude();
while(Math.abs(altitude - target_altitude) > EPSILON_ALTITUDE) {
console.log(
`[DEMO] Waiting for altitude... (${altitude} , ${target_altitude})`);
sleep(1000);
altitude = getAltitude();
}
}
/*jslint indent2 */
/*global console */
import {
getAltitude,
getAltitudeRel,
getInitialAltitude,
getLatitude,
getLongitude,
landed,
loiter,
setCheckpoint,
setTargetCoordinates
} from "{{ qjs_wrapper }}"; //jslint-quiet
import {sleep} from "os";
import {
connect,
distance,
goToAltitude,
land,
quit,
setLatLong,
startPubsub,
takeOff,
ALTITUDE_DIFF,
IS_LEADER,
LEADER_ID,
SIMULATION
} from "{{ common }}"; //jslint-quiet
const FLIGH_ALTITUDE = 100;
const PARACHUTE_ALTITUDE = 35;
const checkpointList = [
{
"latitude": 45.64492790560583,
"longitude": 14.25334942966329,
"altitude": 585.1806861589965
},
{
"latitude": 45.64316335436476,
"longitude": 14.26332880184475,
"altitude": 589.8802607573035
},
{
"latitude": 45.64911917196595,
"longitude": 14.26214792790128,
"altitude": 608.6648153348965
},
{
"latitude": 45.64122685351364,
"longitude": 14.26590493128597,
"altitude": 606.1448368129072
},
{
"latitude": 45.64543355564817,
"longitude": 14.27242391207985,
"altitude": 630.0829598206344
},
{
"latitude": 45.6372792927328,
"longitude": 14.27533492411138,
"altitude": 616.1839898415284
},
{
"latitude": 45.64061299543953,
"longitude": 14.26161958465814,
"altitude": 598.0603137354178
},
{
"latitude": 45.64032340702919,
"longitude": 14.2682896662383,
"altitude": 607.1243119862851
}
];
const landingPoint = [
{
"latitude": 45.6398451,
"longitude": 14.2699217
}
];
let INITIAL_ALTITUDE;
let START_ALTITUDE;
var nextCheckpoint = 0;
var distanceToLandingPoint = 100;
var leaderAltitudeAbs;
var leaderAltitudeRel;
var leaderLatitude;
var leaderLongitude;
function followLeader(leaderId, initialAltitude, altitudeDiff) {
goToAltitude(START_ALTITUDE + ALTITUDE_DIFF, false, true);
while(droneDict[leaderId].altitudeAbs == 0) {
console.log("[DEMO] Waiting for leader to send its altitude");
sleep(1000);
}
while(droneDict[leaderId].altitudeAbs < initialAltitude) {
console.log(`[DEMO] Waiting for leader to reach altitude ${initialAltitude} (currently ${droneDict[leaderId].altitudeAbs})`);
sleep(1000);
}
console.log("[DEMO] Switching to following mode...\n");
do {
leaderAltitudeAbs = droneDict[leaderId].altitudeAbs;
leaderAltitudeRel = droneDict[leaderId].altitudeRel;
leaderLatitude = droneDict[leaderId].latitude;
leaderLongitude = droneDict[leaderId].longitude;
setTargetCoordinates(
leaderLatitude,
leaderLongitude,
leaderAltitudeAbs + altitudeDiff,
0
);
sleep(500);
} while(leaderAltitudeRel > PARACHUTE_ALTITUDE);
console.log("[DEMO] Stop following...\n");
nextCheckpoint = droneDict[leaderId].lastCheckpoint + 1;
}
function waitForAltitude(altitude) {
var curAltitude;
do {
sleep(1000);
curAltitude = getAltitude();
console.log(
`[DEMO] Waiting for altitude... (${curAltitude} , ${altitude})`);
}
while(curAltitude < altitude);
}
function waitForLanding() {
while(!landed()) {
sleep(1000);
}
}
console.log("[DEMO] Connecting...\n");
connect();
const droneDict = startPubsub();
INITIAL_ALTITUDE = getInitialAltitude();
START_ALTITUDE = INITIAL_ALTITUDE + FLIGH_ALTITUDE;
if(SIMULATION) {
takeOff(START_ALTITUDE + 1);
}
waitForAltitude(START_ALTITUDE);
console.log("[DEMO] Setting loiter mode...\n");
loiter();
sleep(3000);
if(!IS_LEADER) {
followLeader(LEADER_ID, START_ALTITUDE, ALTITUDE_DIFF);
}
for (let i = nextCheckpoint; i < checkpointList.length; i++) {
console.log(`[DEMO] Going to Checkpoint ${i}\n`);
setLatLong(checkpointList[i].latitude, checkpointList[i].longitude, checkpointList[i].altitude + FLIGH_ALTITUDE);
console.log(`[DEMO] Reached Checkpoint ${i}\n`);
setCheckpoint(i);
sleep(30000);
}
console.log("[DEMO] Setting altitude...\n");
goToAltitude(getAltitude() - getAltitudeRel() + PARACHUTE_ALTITUDE, true, true);
if(!IS_LEADER) {
setLatLong(
checkpointList[checkpointList.length - 1].latitude,
checkpointList[checkpointList.length - 1].longitude,
0
);
}
while(distanceToLandingPoint > 20) {
console.log(`[DEMO] Waiting to reache landing point (current distance is ${distanceToLandingPoint})`);
distanceToLandingPoint = distance(getLatitude(), getLongitude(), landingPoint.latitude, landingPoint.longitude);
}
console.log("[DEMO] Landing...\n");
land();
waitForLanding();
quit();
{
"$schema": "http://json-schema.org/draft-06/schema",
"type": "object",
"description": "Parameters to instantiate JS drone",
"additionalProperties": false,
"properties": {
"autopilot-ip": {
"title": "IP address of the drone's autopilot",
"description": "IP used to create a connection with the autopilot.",
"type": "string",
"default": "192.168.27.1"
},
"id": {
"title": "Drone ID",
"description": "Unique identifier of the drone.",
"type": "integer",
"default": 1
},
"is-a-simulation": {
"title": "Set the flight as a simulation",
"description": "The option used to determine if the flight is real or if it is a simulation. This affects the context of the flight (e.g. if the take off is manual or automatic).",
"type": "boolean",
"default": false
},
"multicast-ipv6": {
"title": "IP of the multicast group",
"description": "IP address used to communicate with the other drones.",
"type": "string",
"default": "ff15::1111"
},
"net-if": {
"title": "Network interface",
"description": "Interface used for multicast traffic.",
"type": "string",
"default": "eth0"
},
"drone-id-list": {
"title": "List of drones IDs",
"description": "List of identifiers of drones.",
"type": "array",
"default": []
},
"flight-script": {
"title": "Script of the flight",
"description": "Script which will be executed for the flight",
"type": "string",
"textarea": true
}
}
}
{
"$schema": "http://json-schema.org/draft-04/schema#",
"description": "Values returned by drone swarm (default) instantiation",
"additionalProperties": false,
"properties": {},
"type": "object"
}
[buildout] [buildout]
parts = parts =
cli main
demo user
manual-flight
eggs-directory = ${buildout:eggs-directory} eggs-directory = ${buildout:eggs-directory}
develop-eggs-directory = ${buildout:develop-eggs-directory} develop-eggs-directory = ${buildout:develop-eggs-directory}
...@@ -27,51 +26,71 @@ cert = $${slap_connection:cert_file} ...@@ -27,51 +26,71 @@ cert = $${slap_connection:cert_file}
recipe = slapos.recipe.build recipe = slapos.recipe.build
slapparameter-dict = $${slap-configuration:configuration} slapparameter-dict = $${slap-configuration:configuration}
init = init =
options['autopilot-ip'] = options['slapparameter-dict'].get('autopilot_ip', '192.168.27.1') options['autopilot-ip'] = options['slapparameter-dict'].get('autopilot-ip', '192.168.27.1')
options['id'] = options['slapparameter-dict'].get('id', '1') options['id'] = options['slapparameter-dict'].get('id', 1)
options['is-a-simulation'] = options['slapparameter-dict'].get('is_a_simulation', 'false') options['is-a-simulation'] = options['slapparameter-dict'].get('is-a-simulation', False)
options['leader-id'] = options['slapparameter-dict'].get('leader_id', '1') options['multicast-ipv6'] = options['slapparameter-dict'].get('multicast-ip', 'ff15::1111')
options['is-leader'] = 'true' if options['id'] == options['leader-id'] else 'false' options['net-if'] = options['slapparameter-dict'].get('net-if', 'eth0')
options['multicast-ipv6'] = options['slapparameter-dict'].get('multicast_ip', 'ff15::1111') options['drone-id-list'] = options['slapparameter-dict'].get('drone-id-list', [])
options['drone-id-list'] = options['slapparameter-dict'].get('drone_id_list', '') options['is-a-drone'] = 'flight-script' in options['slapparameter-dict']
subscription_script = '''
me.onStart = function() {
me.f = me.fdopen(me.in, "r");
console.log("Use q to quit");
};
me.onUpdate= function(timestamp) {
while(me.f.getline() != "q") {
continue;
}
try {
me.f.close();;
} catch (error) {
console.error(error);
}
me.exit(0);
};
'''
options['flight-script'] = options['slapparameter-dict'].get('flight-script', subscription_script)
[js-dynamic-template] [js-dynamic-template]
recipe = slapos.recipe.template:jinja2 recipe = slapos.recipe.template:jinja2
rendered = $${directory:etc}/$${:_buildout_section_name_} rendered = $${directory:etc}/$${:_buildout_section_name_}.js
template = ${buildout:directory}/$${:_buildout_section_name_}.js template = ${buildout:directory}/$${:_buildout_section_name_}.js
extra-context = extra-context =
context = context =
raw qjs_wrapper ${qjs-wrapper:location}/lib/libqjswrapper.so raw qjs_wrapper ${qjs-wrapper:location}/lib/libqjswrapper.so
$${:extra-context} $${:extra-context}
[common] [main]
<= js-dynamic-template <= js-dynamic-template
extra-context = extra-context =
key autopilot_ip drone:autopilot-ip key autopilot_ip drone:autopilot-ip
key drone_id_list drone:drone-id-list key id drone:id
key is_a_simulation drone:is-a-simulation key is_a_simulation drone:is-a-simulation
key is_leader drone:is-leader key is_a_drone drone:is-a-drone
key leader_id drone:leader-id
key log_dir directory:log key log_dir directory:log
key pubsub_script pubsub:rendered key pubsub_script pubsub:rendered
key worker_script worker:rendered
[cli] [pubsub]
<= js-dynamic-template
extra-context =
key common common:rendered
[demo]
<= js-dynamic-template <= js-dynamic-template
extra-context = extra-context =
key common common:rendered key ipv6 drone:multicast-ipv6
key net_if drone:net-if
[manual-flight] [user]
<= js-dynamic-template recipe = slapos.recipe.template:jinja2
extra-context = output = $${directory:etc}/user.js
key common common:rendered context =
key script drone:flight-script
inline = {{ script }}
[pubsub] [worker]
<= js-dynamic-template <= js-dynamic-template
extra-context = extra-context =
key drone_id_list drone:drone-id-list
key id drone:id key id drone:id
key ipv6 drone:multicast-ipv6 key is_a_drone drone:is-a-drone
/* global console */
import {
arm,
start,
stop,
stopPubsub,
takeOffAndWait
} from "{{ qjs_wrapper }}";
import { setTimeout, Worker } from "os";
import { exit } from "std";
(function (console, setTimeout, Worker) {
"use strict";
const IP = "{{ autopilot_ip }}",
URL = "udp://" + IP + ":7909",
LOG_FILE = "{{ log_dir }}/mavsdk-log",
IS_A_DRONE = {{ 'true' if is_a_drone else 'false' }},
SIMULATION = {{ 'true' if is_a_simulation else 'false' }};
// Use a Worker to ensure the user script
// does not block the main script
// (preventing it to be stopped for example)
// Create the update loop in the main script
// to prevent it to finish (and so, exit the quickjs process)
var pubsubWorker,
worker = new Worker("{{ worker_script }}"),
user_script = scriptArgs[1],
// Use the same FPS than browser's requestAnimationFrame
FPS = 1000 / 60,
previous_timestamp,
can_update = false;
function connect() {
console.log("Will connect to", URL);
exitOnFail(start(URL, LOG_FILE, 60), "Failed to connect to " + URL);
}
function exitOnFail(ret, msg) {
if (ret) {
console.log(msg);
quit(1);
}
}
function quit(is_a_drone, exit_code) {
if (is_a_drone) {
stop();
}
stopPubsub();
exit(exit_code);
}
if (IS_A_DRONE) {
console.log("Connecting to aupilot\n");
connect();
}
pubsubWorker = new Worker("{{ pubsub_script }}");
pubsubWorker.onmessage = function(e) {
if (!e.data.publishing) {
pubsubWorker.onmessage = null;
}
}
worker.postMessage({type: "initPubsub"});
function takeOff() {
exitOnFail(arm(), "Failed to arm");
takeOffAndWait();
}
function load() {
if (IS_A_DRONE && SIMULATION) {
takeOff();
}
// First argument must provide the user script path
if (user_script === undefined) {
console.log('Please provide the user_script path.');
quit(1);
}
worker.postMessage({
type: "load",
path: user_script
});
}
function loop() {
let timestamp = Date.now(),
timeout;
if (can_update) {
if (FPS <= (timestamp - previous_timestamp)) {
// Expected timeout between every update
can_update = false;
worker.postMessage({
type: "update",
timestamp: timestamp
});
// Try to stick to the expected FPS
timeout = FPS - (timestamp - previous_timestamp - FPS);
previous_timestamp = timestamp;
} else {
timeout = FPS - (timestamp - previous_timestamp);
}
} else {
// If timeout occurs, but update is not yet finished
// wait a bit
timeout = FPS / 4;
}
// Ensure loop is not done with timeout < 1ms
setTimeout(loop, Math.max(1, timeout));
}
worker.onmessage = function (e) {
let type = e.data.type;
if (type === 'initialized') {
pubsubWorker.postMessage({
action: "run",
id: {{ id }},
interval: FPS,
publish: IS_A_DRONE
});
load();
} else if (type === 'loaded') {
previous_timestamp = -FPS;
can_update = true;
// Start the update loop
loop();
} else if (type === 'updated') {
can_update = true;
} else if (type === 'exited') {
worker.onmessage = null;
quit(IS_A_DRONE, e.data.exit);
} else {
console.log('Unsupported message type', type);
quit(IS_A_DRONE, 1);
}
};
}(console, setTimeout, Worker));
/*jslint indent2 */
/*global console */
import {
getAltitude,
getInitialAltitude,
landed,
loiter,
setTargetCoordinates
} from "{{ qjs_wrapper }}"; //jslint-quiet
import {sleep} from "os";
import {
connect,
goToAltitude,
quit,
startPubsub,
takeOff,
ALTITUDE_DIFF,
IS_LEADER,
LEADER_ID,
SIMULATION
} from "{{ common }}"; //jslint-quiet
const FLIGH_ALTITUDE = 100;
const PARACHUTE_ALTITUDE = 35;
let INITIAL_ALTITUDE;
let START_ALTITUDE;
var leaderAltitudeAbs;
var leaderAltitudeRel;
var leaderLatitude;
var leaderLongitude;
function followLeader(leaderId, initialAltitude, altitudeDiff) {
goToAltitude(START_ALTITUDE + ALTITUDE_DIFF, false, true);
while(droneDict[leaderId].altitudeAbs == 0) {
console.log("[DEMO] Waiting for leader to send its altitude");
sleep(1000);
}
while(droneDict[leaderId].altitudeAbs < initialAltitude) {
console.log(`[DEMO] Waiting for leader to reach altitude ${initialAltitude} (currently ${droneDict[leaderId].altitudeAbs})`);
sleep(1000);
}
console.log("[DEMO] Switching to following mode...\n");
do {
leaderAltitudeAbs = droneDict[leaderId].altitudeAbs;
leaderAltitudeRel = droneDict[leaderId].altitudeRel;
leaderLatitude = droneDict[leaderId].latitude;
leaderLongitude = droneDict[leaderId].longitude;
setTargetCoordinates(
leaderLatitude,
leaderLongitude,
leaderAltitudeAbs + altitudeDiff,
0
);
sleep(500);
} while(leaderAltitudeRel > PARACHUTE_ALTITUDE);
console.log("[DEMO] Stop following...\n");
}
function waitForAltitude(altitude) {
var curAltitude;
do {
sleep(1000);
curAltitude = getAltitude();
console.log(
`[DEMO] Waiting for altitude... (${curAltitude} , ${altitude})`);
}
while(curAltitude < altitude);
}
function waitForLanding() {
while(!landed()) {
sleep(1000);
}
}
const droneDict = startPubsub();
console.log("[DEMO] Connecting...\n");
connect();
while(getInitialAltitude() == 0) {
console.log("[DEMO] Waiting for first telemetry\n");
sleep(1000);
}
INITIAL_ALTITUDE = getInitialAltitude();
START_ALTITUDE = INITIAL_ALTITUDE + FLIGH_ALTITUDE;
if(SIMULATION) {
takeOff(START_ALTITUDE + 1);
}
waitForAltitude(START_ALTITUDE);
console.log("[DEMO] Setting loiter mode...\n");
loiter();
sleep(3000);
if(!IS_LEADER) {
followLeader(LEADER_ID, START_ALTITUDE, ALTITUDE_DIFF);
}
console.log("[DEMO] Loitering until manual intructions are given\n")
waitForLanding();
quit();
...@@ -4,12 +4,12 @@ import {Worker} from "os"; ...@@ -4,12 +4,12 @@ import {Worker} from "os";
const PORT = "4840"; const PORT = "4840";
const IPV6 = "{{ ipv6 }}"; const IPV6 = "{{ ipv6 }}";
var parent = Worker.parent; let parent = Worker.parent;
function handle_msg(e) { function handle_msg(e) {
switch(e.data.action) { switch(e.data.action) {
case "run": case "run":
runPubsub(IPV6, PORT, "eth0", {{ id }}); runPubsub(IPV6, PORT, "{{ net_if }}", e.data.id, e.data.interval, e.data.publish);
parent.postMessage({running: false}); parent.postMessage({running: false});
parent.onmessage = null; parent.onmessage = null;
break; break;
......
...@@ -6,11 +6,9 @@ extends = ...@@ -6,11 +6,9 @@ extends =
parts = parts =
instance-profile instance-profile
common main
cli
demo
manual-flight
pubsub pubsub
worker
slapos-cookbook slapos-cookbook
[download-file-base] [download-file-base]
...@@ -23,17 +21,11 @@ recipe = slapos.recipe.template ...@@ -23,17 +21,11 @@ recipe = slapos.recipe.template
url = ${:_profile_base_location_}/${:filename} url = ${:_profile_base_location_}/${:filename}
output = ${buildout:directory}/template.cfg output = ${buildout:directory}/template.cfg
[common] [main]
<= download-file-base <= download-file-base
[cli] [pubsub]
<= download-file-base
[demo]
<= download-file-base
[manual-flight]
<= download-file-base <= download-file-base
[pubsub] [worker]
<= download-file-base <= download-file-base
{
"name": "JS Drone",
"description": "JS Drone",
"serialisation": "xml",
"software-type": {
"default": {
"title": "Default",
"software-type": "default",
"description": "Default",
"request": "instance-input-schema.json",
"response": "instance-output-schema.json",
"index": 0
}
}
}
/* global console, std */
import {
Drone,
triggerParachute,
getAltitude,
getAltitudeRel,
getInitialAltitude,
getLatitude,
getLongitude,
getYaw,
initPubsub,
isInManualMode,
landed,
loiter,
setAirspeed,
setAltitude,
setManualControlInput,
setMessage,
setTargetCoordinates
} from "{{ qjs_wrapper }}";
import * as std from "std";
import { Worker } from "os";
(function (console, Worker) {
// Every script is evaluated per drone
"use strict";
const drone_dict = {},
drone_id_list = [{{ drone_id_list }}],
IS_A_DRONE = {{ 'true' if is_a_drone else 'false' }};
let parent = Worker.parent,
user_me = {
//for debugging purpose
fdopen: std.fdopen,
in: std.in,
//required to fly
triggerParachute: triggerParachute,
drone_dict: {},
exit: function(exit_code) {
parent.postMessage({type: "exited", exit: exit_code});
parent.onmessage = null;
},
getAltitudeAbs: getAltitude,
getCurrentPosition: function() {
return {
x: getLatitude(),
y: getLongitude(),
z: getAltitudeRel()
};
},
getInitialAltitude: getInitialAltitude,
getYaw: getYaw,
id: {{ id }},
landed: landed,
loiter: loiter,
sendMsg: function(msg, id = -1) {
setMessage(JSON.stringify({ content: msg, dest_id: id }));
},
setAirspeed: setAirspeed,
setAltitude: setAltitude,
setTargetCoordinates: setTargetCoordinates
};
function loadUserScript(path) {
let script_content = std.loadFile(path);
if (script_content === null) {
console.log("Failed to load user script " + path);
std.exit(1);
}
try {
std.evalScript(
"function execUserScript(from, me) {" +
script_content +
"};"
);
} catch (e) {
console.log("Failed to evaluate user script", e);
std.exit(1);
}
execUserScript(null, user_me);
// Call the drone onStart function
if (user_me.hasOwnProperty("onStart")) {
user_me.onStart();
}
}
function handleMainMessage(evt) {
let type = evt.data.type,
message,
drone_id;
if (type === "initPubsub") {
initPubsub(drone_id_list.length);
for (let i = 0; i < drone_id_list.length; i++) {
drone_id = drone_id_list[i];
user_me.drone_dict[drone_id] = new Drone(drone_id);
user_me.drone_dict[drone_id].init(i);
}
parent.postMessage({type: "initialized"});
} else if (type === "load") {
loadUserScript(evt.data.path);
parent.postMessage({type: "loaded"});
} else if (type === "update") {
for (const [id, drone] of Object.entries(user_me.drone_dict)) {
message = drone.message
if (message.length > 0) {
message = JSON.parse(message);
if (user_me.id === id) {
continue;
}
if (user_me.hasOwnProperty("onGetMsg") &&
[-1, user_me.id].includes(message.dest_id)) {
user_me.onGetMsg(message.content);
}
}
}
// Call the drone onStart function
if (user_me.hasOwnProperty("onUpdate")) {
if (IS_A_DRONE && isInManualMode()) {
setManualControlInput();
}
user_me.onUpdate(evt.data.timestamp);
}
parent.postMessage({type: "updated"});
} else {
throw new Error("Unsupported message type", type);
}
}
parent.onmessage = function (evt) {
try {
handleMainMessage(evt);
} catch (error) {
// Catch all potential bug to exit the main process
// if it occurs
console.log(error);
std.exit(1);
}
};
}(console, Worker));
...@@ -125,7 +125,7 @@ inline = ...@@ -125,7 +125,7 @@ inline =
exec "$basedir/bin/mysqld" --defaults-file='{{defaults_file}}' "$@" exec "$basedir/bin/mysqld" --defaults-file='{{defaults_file}}' "$@"
[versions] [versions]
coverage = 4.5.1 coverage = 5.5
ecdsa = 0.13 ecdsa = 0.13
mysqlclient = 1.3.12 mysqlclient = 1.3.12
pycrypto = 2.6.1 pycrypto = 2.6.1
......
#!{{ python_path }}
import json
import logging
from logging.handlers import RotatingFileHandler
import time
from websocket import create_connection
class enbWebSocket:
def __init__(self):
log_file = "{{ log_file }}"
self.logger = logging.getLogger('logger')
self.logger.setLevel(logging.INFO)
handler = RotatingFileHandler(log_file, maxBytes=30000, backupCount=2)
formatter = logging.Formatter('{"time": "%(asctime)s", "log_level": "%(levelname)s", "message": "%(message)s", "data": %(data)s}')
handler.setFormatter(formatter)
self.logger.addHandler(handler)
if {{ testing }}:
return
self.ws_url = "ws://127.0.1.2:9001"
self.ws = create_connection(self.ws_url)
def close(self):
if {{ testing }}:
return
self.ws.close()
def send(self, msg):
self.ws.send(json.dumps(msg))
def recv(self, message_type):
for i in range(1,20):
r = json.loads(self.ws.recv())
if r['message'] == message_type:
return r
def stats(self):
if {{ testing }}:
from random import randint
r = {
'message': 'stats',
'samples': {'rx': [
{'sat': 0, 'max': randint(-500,-100) / 10.0},
{'sat': 0, 'max': randint(-500,-100) / 10.0},
]}
}
else:
self.send({
"message": "stats",
"samples": True,
"rf": True
})
r = self.recv('stats')
self.logger.info('Samples stats', extra={'data': r})
if __name__ == '__main__':
ws = enbWebSocket()
try:
while True:
ws.stats()
time.sleep({{ stats_period }})
finally:
ws.close()
...@@ -16,27 +16,31 @@ ...@@ -16,27 +16,31 @@
[template] [template]
filename = instance.cfg filename = instance.cfg
md5sum = 6e5c63c35b13f8920ea13e24c27d7b50 md5sum = 8a6e6e065e3eeffc3d5d4d8bb68f4a43
[amarisoft-stats.jinja2.py]
_update_hash_filename_ = amarisoft-stats.jinja2.py
md5sum = 6e0a052bd0ca08cc0c7b4880d3deffcc
[template-lte-enb-epc] [template-lte-enb-epc]
_update_hash_filename_ = instance-enb-epc.jinja2.cfg _update_hash_filename_ = instance-enb-epc.jinja2.cfg
md5sum = 42959a0525d2e06e61310836656d5de6 md5sum = f15c81f0f4c2aa59bc794d855e7d01c0
[template-lte-enb] [template-lte-enb]
_update_hash_filename_ = instance-enb.jinja2.cfg _update_hash_filename_ = instance-enb.jinja2.cfg
md5sum = 04dbc8396cb112e86d5d1564df033b82 md5sum = 68362f58fccc534757b6bb566d285fa6
[template-lte-gnb-epc] [template-lte-gnb-epc]
_update_hash_filename_ = instance-gnb-epc.jinja2.cfg _update_hash_filename_ = instance-gnb-epc.jinja2.cfg
md5sum = 2b589e23486661c52f379c4d9b35e7a9 md5sum = dd98a2d0cb4fadc869638166bde6d9c8
[template-lte-epc] [template-lte-epc]
_update_hash_filename_ = instance-epc.jinja2.cfg _update_hash_filename_ = instance-epc.jinja2.cfg
md5sum = 9ffc54bf10757403c4b1809d1102ea52 md5sum = 48755eb0b2eb87f80130d42026ea8f7b
[template-lte-gnb] [template-lte-gnb]
_update_hash_filename_ = instance-gnb.jinja2.cfg _update_hash_filename_ = instance-gnb.jinja2.cfg
md5sum = 01f4de736594a0810a66f847753fadf1 md5sum = 45ff1f5f4e98867449dcff7caae678f8
[template-lte-mme] [template-lte-mme]
_update_hash_filename_ = instance-mme.jinja2.cfg _update_hash_filename_ = instance-mme.jinja2.cfg
...@@ -44,11 +48,11 @@ md5sum = 728f4d3ae248710c23e4b73eea4d628e ...@@ -44,11 +48,11 @@ md5sum = 728f4d3ae248710c23e4b73eea4d628e
[template-lte-ue-lte] [template-lte-ue-lte]
_update_hash_filename_ = instance-ue-lte.jinja2.cfg _update_hash_filename_ = instance-ue-lte.jinja2.cfg
md5sum = 4517baee6700afbf78de3b2d7dd4ba22 md5sum = 38ef57bc93f53338187bd9c39fe4f0c5
[template-lte-ue-nr] [template-lte-ue-nr]
_update_hash_filename_ = instance-ue-nr.jinja2.cfg _update_hash_filename_ = instance-ue-nr.jinja2.cfg
md5sum = 45e7ade19a579d6750d2733a0659dc1a md5sum = eaaf8fb3806a941f188eecdb90d38bda
[ue_db.jinja2.cfg] [ue_db.jinja2.cfg]
filename = config/ue_db.jinja2.cfg filename = config/ue_db.jinja2.cfg
...@@ -56,7 +60,7 @@ md5sum = dcaac06553a3222b14c0013a13f4a149 ...@@ -56,7 +60,7 @@ md5sum = dcaac06553a3222b14c0013a13f4a149
[enb.jinja2.cfg] [enb.jinja2.cfg]
filename = config/enb.jinja2.cfg filename = config/enb.jinja2.cfg
md5sum = 9fd6abb1e69fe59af0dafbb336f24f8d md5sum = 1c9909f691b5bb00d3fc30d41613e26a
[sib23.asn] [sib23.asn]
filename = config/sib23.asn filename = config/sib23.asn
...@@ -64,7 +68,7 @@ md5sum = b377dac7f1fcf94fb9ce9ebed617f36a ...@@ -64,7 +68,7 @@ md5sum = b377dac7f1fcf94fb9ce9ebed617f36a
[gnb.jinja2.cfg] [gnb.jinja2.cfg]
filename = config/gnb.jinja2.cfg filename = config/gnb.jinja2.cfg
md5sum = 407c20d11c5417636b985fe4886bf135 md5sum = 16698f0b4ea3e6cf40e9f070af2439e5
[ltelogs.jinja2.sh] [ltelogs.jinja2.sh]
filename = ltelogs.jinja2.sh filename = ltelogs.jinja2.sh
...@@ -88,8 +92,24 @@ md5sum = b3078deab008d7e81ddd88ac02b8b698 ...@@ -88,8 +92,24 @@ md5sum = b3078deab008d7e81ddd88ac02b8b698
[sdr-busy-promise] [sdr-busy-promise]
_update_hash_filename_ = promise/check_sdr_busy.py _update_hash_filename_ = promise/check_sdr_busy.py
md5sum = 2e25144d1d952ed5bebb194360b60afb md5sum = b0c65aefa60a9d5b9f7e7b38383db48b
[cell-gain-saturated-promise]
_update_hash_filename_ = promise/check_cell_gain_saturated.py
md5sum = 764ca8913ea40964382848cf3233c1f7
[rx-saturated-promise]
_update_hash_filename_ = promise/check_rx_saturated.py
md5sum = 40cf5389fd91845f4426ef159fb20fc0
[amarisoft-stats-log-promise]
_update_hash_filename_ = promise/check_amarisoft_stats_log.py
md5sum = 5bf57a9074ea5b054d999789cbbe2c87
[cpu-temperature-promise]
_update_hash_filename_ = promise/check_cpu_temperature.py
md5sum = 8e8f836554d81e2979e39dfb50e05b88
[interface-up-promise] [interface-up-promise]
_update_hash_filename_ = promise/check_interface_up.py _update_hash_filename_ = promise/check_interface_up.py
md5sum = 7797aeaf02e055226249f2c08209775a md5sum = 44ae5693f62b7a4dbc98f700f68d8600
...@@ -14,6 +14,7 @@ ...@@ -14,6 +14,7 @@
}, },
tx_gain: {{ slapparameter_dict.get('tx_gain', slap_configuration['configuration.default_lte_tx_gain']) }}, tx_gain: {{ slapparameter_dict.get('tx_gain', slap_configuration['configuration.default_lte_tx_gain']) }},
rx_gain: {{ slapparameter_dict.get('rx_gain', slap_configuration['configuration.default_lte_rx_gain']) }}, rx_gain: {{ slapparameter_dict.get('rx_gain', slap_configuration['configuration.default_lte_rx_gain']) }},
com_addr: "127.0.1.2:9001",
mme_list: [ mme_list: [
{% if slapparameter_dict.get('mme_list', '') %} {% if slapparameter_dict.get('mme_list', '') %}
......
...@@ -38,6 +38,7 @@ ...@@ -38,6 +38,7 @@
tx_gain: {{ slapparameter_dict.get('tx_gain', slap_configuration['configuration.default_nr_tx_gain']) }}, tx_gain: {{ slapparameter_dict.get('tx_gain', slap_configuration['configuration.default_nr_tx_gain']) }},
rx_gain: {{ slapparameter_dict.get('rx_gain', slap_configuration['configuration.default_nr_rx_gain']) }}, rx_gain: {{ slapparameter_dict.get('rx_gain', slap_configuration['configuration.default_nr_rx_gain']) }},
{% endif %} {% endif %}
com_addr: "127.0.1.2:9001",
amf_list: [ amf_list: [
{% if slapparameter_dict.get('amf_list', '') %} {% if slapparameter_dict.get('amf_list', '') %}
......
...@@ -14,6 +14,7 @@ parts = ...@@ -14,6 +14,7 @@ parts =
directory directory
lte-enb-request lte-enb-request
lte-mme-request lte-mme-request
cpu-temperature-promise
publish-connection-information publish-connection-information
{% for part in part_list -%} {% for part in part_list -%}
{{ ' %s' % part }} {{ ' %s' % part }}
...@@ -59,6 +60,7 @@ cert-file = {{ slap_connection['cert-file'] }} ...@@ -59,6 +60,7 @@ cert-file = {{ slap_connection['cert-file'] }}
sla-computer_guid = {{ slap_connection['computer-id'] }} sla-computer_guid = {{ slap_connection['computer-id'] }}
config-monitor-password = ${monitor-htpasswd:passwd} config-monitor-password = ${monitor-htpasswd:passwd}
config-sub-instance = true
[lte-mme-request] [lte-mme-request]
<= request-common-base <= request-common-base
...@@ -85,6 +87,14 @@ return = monitor-base-url ...@@ -85,6 +87,14 @@ return = monitor-base-url
lte-mme-request = ${lte-mme-request:connection-monitor-base-url} lte-mme-request = ${lte-mme-request:connection-monitor-base-url}
lte-enb-request = ${lte-enb-request:connection-monitor-base-url} lte-enb-request = ${lte-enb-request:connection-monitor-base-url}
[cpu-temperature-promise]
recipe = slapos.cookbook:promise.plugin
eggs = slapos.core
file = {{ cpu_temperature_promise }}
output = ${directory:plugins}/check-cpu-temperature.py
config-testing = {{ slapparameter_dict.get("testing", False) }}
config-max-temp = {{ slapparameter_dict.get("promise_cpu_temperature_threshold", 80) }}
[publish-connection-information] [publish-connection-information]
recipe = slapos.cookbook:publish.serialised recipe = slapos.cookbook:publish.serialised
<= monitor-publish <= monitor-publish
......
...@@ -119,6 +119,24 @@ ...@@ -119,6 +119,24 @@
"title": "Use IPv4", "title": "Use IPv4",
"description": "Set to true to use IPv4 for AMF / MME addresses", "description": "Set to true to use IPv4 for AMF / MME addresses",
"type": "boolean" "type": "boolean"
},
"enb_stats_fetch_period": {
"title": "eNB statistics fetch period (seconds)",
"description": "Describes how often a call to Amarisoft remote API is made to get eNB statistics",
"type": "number",
"default": 60
},
"max_rx_sample_db": {
"title": "Maximum RX sample value (dB)",
"description": "Maximum RX sample threshold above which RX saturated promise will fail",
"type": "number",
"default": 0
},
"promise_cpu_temperature_threshold": {
"title": "CPU temperature promise threshold",
"description": "Temperature threshold above which CPU temperature promise will fail",
"type": "number",
"default": 80
} }
} }
} }
...@@ -4,7 +4,14 @@ parts = ...@@ -4,7 +4,14 @@ parts =
ltelogs ltelogs
lte-enb-config lte-enb-config
lte-enb-service lte-enb-service
amarisoft-stats-service
sdr-busy-promise sdr-busy-promise
cell-gain-saturated-promise
rx-saturated-promise
amarisoft-stats-log-promise
{% if not slapparameter_dict.get("sub-instance", False) %}
cpu-temperature-promise
{% endif %}
monitor-base monitor-base
publish-connection-information publish-connection-information
...@@ -97,6 +104,30 @@ environment = ...@@ -97,6 +104,30 @@ environment =
LD_LIBRARY_PATH={{ openssl_location }}/lib LD_LIBRARY_PATH={{ openssl_location }}/lib
AMARISOFT_PATH=/opt/amarisoft/.amarisoft AMARISOFT_PATH=/opt/amarisoft/.amarisoft
[amarisoft-stats-template]
recipe = slapos.recipe.template:jinja2
extensions = jinja2.ext.do
log-output = ${directory:var}/log/amarisoft-stats.json.log
context =
section directory directory
key slapparameter_dict slap-configuration:configuration
key log_file :log-output
raw stats_period {{ slapparameter_dict.get("enb_stats_fetch_period", 60) }}
raw testing {{ slapparameter_dict.get("testing", False) }}
raw python_path {{ buildout_directory}}/bin/pythonwitheggs
mode = 0775
url = {{ amarisoft_stats_template }}
output = ${directory:bin}/amarisoft-stats.py
### eNodeB (enb)
[amarisoft-stats-service]
recipe = slapos.cookbook:wrapper
command-line = ${amarisoft-stats-template:output}
wrapper-path = ${directory:service}/amarisoft-stats
mode = 0775
hash-files =
${amarisoft-stats-template:output}
[config-base] [config-base]
recipe = slapos.recipe.template:jinja2 recipe = slapos.recipe.template:jinja2
extensions = jinja2.ext.do extensions = jinja2.ext.do
...@@ -130,7 +161,6 @@ monitor-title = {{ slapparameter_dict['name'] | string }} ...@@ -130,7 +161,6 @@ monitor-title = {{ slapparameter_dict['name'] | string }}
password = {{ slapparameter_dict['monitor-password'] | string }} password = {{ slapparameter_dict['monitor-password'] | string }}
{% endif %} {% endif %}
# Add custom promise to check if /dev/sdr0 is busy
[sdr-busy-promise] [sdr-busy-promise]
recipe = slapos.cookbook:promise.plugin recipe = slapos.cookbook:promise.plugin
eggs = slapos.core eggs = slapos.core
...@@ -138,3 +168,44 @@ file = {{ sdr_busy_promise }} ...@@ -138,3 +168,44 @@ file = {{ sdr_busy_promise }}
output = ${directory:plugins}/check-sdr-busy.py output = ${directory:plugins}/check-sdr-busy.py
config-testing = {{ slapparameter_dict.get("testing", False) }} config-testing = {{ slapparameter_dict.get("testing", False) }}
config-sdr = {{ sdr }} config-sdr = {{ sdr }}
[cell-gain-saturated-promise]
recipe = slapos.cookbook:promise.plugin
eggs =
slapos.core
python-dateutil
file = {{ cell_gain_saturated_promise }}
output = ${directory:plugins}/check-cell-gain-saturated.py
config-testing = {{ slapparameter_dict.get("testing", False) }}
config-amarisoft-stats-log = ${amarisoft-stats-template:log-output}
config-stats-period = {{ slapparameter_dict.get("enb_stats_fetch_period", 60) }}
[rx-saturated-promise]
recipe = slapos.cookbook:promise.plugin
eggs =
slapos.core
python-dateutil
file = {{ rx_saturated_promise }}
output = ${directory:plugins}/check-rx-saturated.py
config-testing = {{ slapparameter_dict.get("testing", False) }}
config-amarisoft-stats-log = ${amarisoft-stats-template:log-output}
config-stats-period = {{ slapparameter_dict.get("enb_stats_fetch_period", 60) }}
config-max-rx-sample-db = {{ slapparameter_dict.get("max_rx_sample_db", 0) }}
[amarisoft-stats-log-promise]
recipe = slapos.cookbook:promise.plugin
eggs =
slapos.core
python-dateutil
file = {{ amarisoft_stats_log_promise }}
output = ${directory:plugins}/check-amarisoft-stats-log.py
config-amarisoft-stats-log = ${amarisoft-stats-template:log-output}
config-stats-period = {{ slapparameter_dict.get("enb_stats_fetch_period", 60) }}
[cpu-temperature-promise]
recipe = slapos.cookbook:promise.plugin
eggs = slapos.core
file = {{ cpu_temperature_promise }}
output = ${directory:plugins}/check-cpu-temperature.py
config-testing = {{ slapparameter_dict.get("testing", False) }}
config-maxtemp = {{ slapparameter_dict.get("promise_cpu_temperature_threshold", 80) }}
...@@ -26,6 +26,12 @@ ...@@ -26,6 +26,12 @@
"title": "Use IPv4", "title": "Use IPv4",
"description": "Set to true to use IPv4 for AMF / MME addresses", "description": "Set to true to use IPv4 for AMF / MME addresses",
"type": "boolean" "type": "boolean"
},
"promise_cpu_temperature_threshold": {
"title": "CPU temperature promise threshold",
"description": "Temperature threshold above which CPU temperature promise will fail",
"type": "number",
"default": 80
} }
} }
} }
...@@ -13,6 +13,9 @@ info = Your SIM card has been attached to service ${slap-configuration:instance- ...@@ -13,6 +13,9 @@ info = Your SIM card has been attached to service ${slap-configuration:instance-
parts = parts =
directory directory
lte-mme-request lte-mme-request
{% if not slapparameter_dict.get("sub-instance", False) %}
cpu-temperature-promise
{% endif %}
publish-connection-information publish-connection-information
{% for part in part_list -%} {% for part in part_list -%}
{{ ' %s' % part }} {{ ' %s' % part }}
...@@ -73,6 +76,14 @@ config-slave-list = {{ dumps(slave_instance_list) }} ...@@ -73,6 +76,14 @@ config-slave-list = {{ dumps(slave_instance_list) }}
[monitor-base-url-dict] [monitor-base-url-dict]
lte-mme-request = ${lte-mme-request:connection-monitor-base-url} lte-mme-request = ${lte-mme-request:connection-monitor-base-url}
[cpu-temperature-promise]
recipe = slapos.cookbook:promise.plugin
eggs = slapos.core
file = {{ cpu_temperature_promise }}
output = ${directory:plugins}/check-cpu-temperature.py
config-testing = {{ slapparameter_dict.get("testing", False) }}
config-max-temp = {{ slapparameter_dict.get("promise_cpu_temperature_threshold", 80) }}
[publish-connection-information] [publish-connection-information]
recipe = slapos.cookbook:publish.serialised recipe = slapos.cookbook:publish.serialised
<= monitor-publish <= monitor-publish
......
...@@ -14,6 +14,7 @@ parts = ...@@ -14,6 +14,7 @@ parts =
directory directory
lte-gnb-request lte-gnb-request
lte-mme-request lte-mme-request
cpu-temperature-promise
publish-connection-information publish-connection-information
{% for part in part_list -%} {% for part in part_list -%}
{{ ' %s' % part }} {{ ' %s' % part }}
...@@ -59,6 +60,7 @@ cert-file = {{ slap_connection['cert-file'] }} ...@@ -59,6 +60,7 @@ cert-file = {{ slap_connection['cert-file'] }}
sla-computer_guid = {{ slap_connection['computer-id'] }} sla-computer_guid = {{ slap_connection['computer-id'] }}
config-monitor-password = ${monitor-htpasswd:passwd} config-monitor-password = ${monitor-htpasswd:passwd}
config-sub-instance = true
[lte-mme-request] [lte-mme-request]
<= request-common-base <= request-common-base
...@@ -85,6 +87,14 @@ return = monitor-base-url ...@@ -85,6 +87,14 @@ return = monitor-base-url
lte-mme-request = ${lte-mme-request:connection-monitor-base-url} lte-mme-request = ${lte-mme-request:connection-monitor-base-url}
lte-gnb-request = ${lte-gnb-request:connection-monitor-base-url} lte-gnb-request = ${lte-gnb-request:connection-monitor-base-url}
[cpu-temperature-promise]
recipe = slapos.cookbook:promise.plugin
eggs = slapos.core
file = {{ cpu_temperature_promise }}
output = ${directory:plugins}/check-cpu-temperature.py
config-testing = {{ slapparameter_dict.get("testing", False) }}
config-max-temp = {{ slapparameter_dict.get("promise_cpu_temperature_threshold", 80) }}
[publish-connection-information] [publish-connection-information]
recipe = slapos.cookbook:publish.serialised recipe = slapos.cookbook:publish.serialised
<= monitor-publish <= monitor-publish
......
...@@ -171,6 +171,24 @@ ...@@ -171,6 +171,24 @@
"title": "Use IPv4", "title": "Use IPv4",
"description": "Set to true to use IPv4 for AMF / MME addresses", "description": "Set to true to use IPv4 for AMF / MME addresses",
"type": "boolean" "type": "boolean"
},
"enb_stats_fetch_period": {
"title": "eNB statistics fetch period (seconds)",
"description": "Describes how often a call to Amarisoft remote API is made to get eNB statistics",
"type": "number",
"default": 60
},
"max_rx_sample_db": {
"title": "Maximum RX sample value (dB)",
"description": "Maximum RX sample threshold above which RX saturated promise will fail",
"type": "number",
"default": 0
},
"promise_cpu_temperature_threshold": {
"title": "CPU temperature promise threshold",
"description": "Temperature threshold above which CPU temperature promise will fail",
"type": "number",
"default": 80
} }
} }
} }
...@@ -4,7 +4,14 @@ parts = ...@@ -4,7 +4,14 @@ parts =
ltelogs ltelogs
lte-gnb-config lte-gnb-config
lte-enb-service lte-enb-service
amarisoft-stats-service
sdr-busy-promise sdr-busy-promise
cell-gain-saturated-promise
rx-saturated-promise
amarisoft-stats-log-promise
{% if not slapparameter_dict.get("sub-instance", False) %}
cpu-temperature-promise
{% endif %}
monitor-base monitor-base
publish-connection-information publish-connection-information
...@@ -98,6 +105,30 @@ environment = ...@@ -98,6 +105,30 @@ environment =
LD_LIBRARY_PATH={{ openssl_location }}/lib LD_LIBRARY_PATH={{ openssl_location }}/lib
AMARISOFT_PATH=/opt/amarisoft/.amarisoft AMARISOFT_PATH=/opt/amarisoft/.amarisoft
[amarisoft-stats-template]
recipe = slapos.recipe.template:jinja2
extensions = jinja2.ext.do
log-output = ${directory:var}/log/amarisoft-stats.json.log
context =
section directory directory
key slapparameter_dict slap-configuration:configuration
key log_file :log-output
raw stats_period {{ slapparameter_dict.get("enb_stats_fetch_period", 60) }}
raw testing {{ slapparameter_dict.get("testing", False) }}
raw python_path {{ buildout_directory}}/bin/pythonwitheggs
mode = 0775
url = {{ amarisoft_stats_template }}
output = ${directory:bin}/amarisoft-stats.py
### eNodeB (enb)
[amarisoft-stats-service]
recipe = slapos.cookbook:wrapper
command-line = ${amarisoft-stats-template:output}
wrapper-path = ${directory:service}/amarisoft-stats
mode = 0775
hash-files =
${amarisoft-stats-template:output}
[config-base] [config-base]
recipe = slapos.recipe.template:jinja2 recipe = slapos.recipe.template:jinja2
extensions = jinja2.ext.do extensions = jinja2.ext.do
...@@ -130,7 +161,6 @@ monitor-title = {{ slapparameter_dict['name'] | string }} ...@@ -130,7 +161,6 @@ monitor-title = {{ slapparameter_dict['name'] | string }}
password = {{ slapparameter_dict['monitor-password'] | string }} password = {{ slapparameter_dict['monitor-password'] | string }}
{% endif %} {% endif %}
# Add custom promise to check if /dev/sdr0 is busy
[sdr-busy-promise] [sdr-busy-promise]
recipe = slapos.cookbook:promise.plugin recipe = slapos.cookbook:promise.plugin
eggs = slapos.core eggs = slapos.core
...@@ -138,3 +168,44 @@ file = {{ sdr_busy_promise }} ...@@ -138,3 +168,44 @@ file = {{ sdr_busy_promise }}
output = ${directory:plugins}/check-sdr-busy.py output = ${directory:plugins}/check-sdr-busy.py
config-testing = {{ slapparameter_dict.get("testing", False) }} config-testing = {{ slapparameter_dict.get("testing", False) }}
config-sdr = {{ sdr }} config-sdr = {{ sdr }}
[cell-gain-saturated-promise]
recipe = slapos.cookbook:promise.plugin
eggs =
slapos.core
python-dateutil
file = {{ cell_gain_saturated_promise }}
output = ${directory:plugins}/check-cell-gain-saturated.py
config-testing = {{ slapparameter_dict.get("testing", False) }}
config-amarisoft-stats-log = ${amarisoft-stats-template:log-output}
config-stats-period = {{ slapparameter_dict.get("enb_stats_fetch_period", 60) }}
[rx-saturated-promise]
recipe = slapos.cookbook:promise.plugin
eggs =
slapos.core
python-dateutil
file = {{ rx_saturated_promise }}
output = ${directory:plugins}/check-rx-saturated.py
config-testing = {{ slapparameter_dict.get("testing", False) }}
config-amarisoft-stats-log = ${amarisoft-stats-template:log-output}
config-stats-period = {{ slapparameter_dict.get("enb_stats_fetch_period", 60) }}
config-max-rx-sample-db = {{ slapparameter_dict.get("max_rx_sample_db", 0) }}
[amarisoft-stats-log-promise]
recipe = slapos.cookbook:promise.plugin
eggs =
slapos.core
python-dateutil
file = {{ amarisoft_stats_log_promise }}
output = ${directory:plugins}/check-amarisoft-stats-log.py
config-amarisoft-stats-log = ${amarisoft-stats-template:log-output}
config-stats-period = {{ slapparameter_dict.get("enb_stats_fetch_period", 60) }}
[cpu-temperature-promise]
recipe = slapos.cookbook:promise.plugin
eggs = slapos.core
file = {{ cpu_temperature_promise }}
output = ${directory:plugins}/check-cpu-temperature.py
config-testing = {{ slapparameter_dict.get("testing", False) }}
config-max-temp = {{ slapparameter_dict.get("promise_cpu_temperature_threshold", 80) }}
...@@ -117,6 +117,24 @@ ...@@ -117,6 +117,24 @@
"title": "Use IPv4", "title": "Use IPv4",
"description": "Set to true to use IPv4 for AMF / MME addresses", "description": "Set to true to use IPv4 for AMF / MME addresses",
"type": "boolean" "type": "boolean"
},
"enb_stats_fetch_period": {
"title": "eNB statistics fetch period (seconds)",
"description": "Describes how often a call to Amarisoft remote API is made to get eNB statistics",
"type": "number",
"default": 60
},
"max_rx_sample_db": {
"title": "Maximum RX sample value (dB)",
"description": "Maximum RX sample threshold above which RX saturated promise will fail",
"type": "number",
"default": 0
},
"promise_cpu_temperature_threshold": {
"title": "CPU temperature promise threshold",
"description": "Temperature threshold above which CPU temperature promise will fail",
"type": "number",
"default": 80
} }
} }
} }
...@@ -99,6 +99,24 @@ ...@@ -99,6 +99,24 @@
"title": "Use IPv4", "title": "Use IPv4",
"description": "Set to true to use IPv4 for AMF / MME addresses", "description": "Set to true to use IPv4 for AMF / MME addresses",
"type": "boolean" "type": "boolean"
},
"enb_stats_fetch_period": {
"title": "eNB statistics fetch period (seconds)",
"description": "Describes how often a call to Amarisoft remote API is made to get eNB statistics",
"type": "number",
"default": 60
},
"max_rx_sample_db": {
"title": "Maximum RX sample value (dB)",
"description": "Maximum RX sample threshold above which RX saturated promise will fail",
"type": "number",
"default": 0
},
"promise_cpu_temperature_threshold": {
"title": "CPU temperature promise threshold",
"description": "Temperature threshold above which CPU temperature promise will fail",
"type": "number",
"default": 80
} }
} }
} }
...@@ -169,6 +169,24 @@ ...@@ -169,6 +169,24 @@
"title": "Use IPv4", "title": "Use IPv4",
"description": "Set to true to use IPv4 for AMF / MME addresses", "description": "Set to true to use IPv4 for AMF / MME addresses",
"type": "boolean" "type": "boolean"
},
"enb_stats_fetch_period": {
"title": "eNB statistics fetch period (seconds)",
"description": "Describes how often a call to Amarisoft remote API is made to get eNB statistics",
"type": "number",
"default": 60
},
"max_rx_sample_db": {
"title": "Maximum RX sample value (dB)",
"description": "Maximum RX sample threshold above which RX saturated promise will fail",
"type": "number",
"default": 0
},
"promise_cpu_temperature_threshold": {
"title": "CPU temperature promise threshold",
"description": "Temperature threshold above which CPU temperature promise will fail",
"type": "number",
"default": 80
} }
} }
} }
...@@ -151,6 +151,24 @@ ...@@ -151,6 +151,24 @@
"title": "Use IPv4", "title": "Use IPv4",
"description": "Set to true to use IPv4 for AMF / MME addresses", "description": "Set to true to use IPv4 for AMF / MME addresses",
"type": "boolean" "type": "boolean"
},
"enb_stats_fetch_period": {
"title": "eNB statistics fetch period (seconds)",
"description": "Describes how often a call to Amarisoft remote API is made to get eNB statistics",
"type": "number",
"default": 60
},
"max_rx_sample_db": {
"title": "Maximum RX sample value (dB)",
"description": "Maximum RX sample threshold above which RX saturated promise will fail",
"type": "number",
"default": 0
},
"promise_cpu_temperature_threshold": {
"title": "CPU temperature promise threshold",
"description": "Temperature threshold above which CPU temperature promise will fail",
"type": "number",
"default": 80
} }
} }
} }
...@@ -117,6 +117,24 @@ ...@@ -117,6 +117,24 @@
"title": "Use IPv4", "title": "Use IPv4",
"description": "Set to true to use IPv4 for AMF / MME addresses", "description": "Set to true to use IPv4 for AMF / MME addresses",
"type": "boolean" "type": "boolean"
},
"enb_stats_fetch_period": {
"title": "eNB statistics fetch period (seconds)",
"description": "Describes how often a call to Amarisoft remote API is made to get eNB statistics",
"type": "number",
"default": 60
},
"max_rx_sample_db": {
"title": "Maximum RX sample value (dB)",
"description": "Maximum RX sample threshold above which RX saturated promise will fail",
"type": "number",
"default": 0
},
"promise_cpu_temperature_threshold": {
"title": "CPU temperature promise threshold",
"description": "Temperature threshold above which CPU temperature promise will fail",
"type": "number",
"default": 80
} }
} }
} }
...@@ -99,6 +99,24 @@ ...@@ -99,6 +99,24 @@
"title": "Use IPv4", "title": "Use IPv4",
"description": "Set to true to use IPv4 for AMF / MME addresses", "description": "Set to true to use IPv4 for AMF / MME addresses",
"type": "boolean" "type": "boolean"
},
"enb_stats_fetch_period": {
"title": "eNB statistics fetch period (seconds)",
"description": "Describes how often a call to Amarisoft remote API is made to get eNB statistics",
"type": "number",
"default": 60
},
"max_rx_sample_db": {
"title": "Maximum RX sample value (dB)",
"description": "Maximum RX sample threshold above which RX saturated promise will fail",
"type": "number",
"default": 0
},
"promise_cpu_temperature_threshold": {
"title": "CPU temperature promise threshold",
"description": "Temperature threshold above which CPU temperature promise will fail",
"type": "number",
"default": 80
} }
} }
} }
...@@ -169,6 +169,24 @@ ...@@ -169,6 +169,24 @@
"title": "Use IPv4", "title": "Use IPv4",
"description": "Set to true to use IPv4 for AMF / MME addresses", "description": "Set to true to use IPv4 for AMF / MME addresses",
"type": "boolean" "type": "boolean"
},
"enb_stats_fetch_period": {
"title": "eNB statistics fetch period (seconds)",
"description": "Describes how often a call to Amarisoft remote API is made to get eNB statistics",
"type": "number",
"default": 60
},
"max_rx_sample_db": {
"title": "Maximum RX sample value (dB)",
"description": "Maximum RX sample threshold above which RX saturated promise will fail",
"type": "number",
"default": 0
},
"promise_cpu_temperature_threshold": {
"title": "CPU temperature promise threshold",
"description": "Temperature threshold above which CPU temperature promise will fail",
"type": "number",
"default": 80
} }
} }
} }
...@@ -151,6 +151,24 @@ ...@@ -151,6 +151,24 @@
"title": "Use IPv4", "title": "Use IPv4",
"description": "Set to true to use IPv4 for AMF / MME addresses", "description": "Set to true to use IPv4 for AMF / MME addresses",
"type": "boolean" "type": "boolean"
},
"enb_stats_fetch_period": {
"title": "eNB statistics fetch period (seconds)",
"description": "Describes how often a call to Amarisoft remote API is made to get eNB statistics",
"type": "number",
"default": 60
},
"max_rx_sample_db": {
"title": "Maximum RX sample value (dB)",
"description": "Maximum RX sample threshold above which RX saturated promise will fail",
"type": "number",
"default": 0
},
"promise_cpu_temperature_threshold": {
"title": "CPU temperature promise threshold",
"description": "Temperature threshold above which CPU temperature promise will fail",
"type": "number",
"default": 80
} }
} }
} }
...@@ -117,6 +117,24 @@ ...@@ -117,6 +117,24 @@
"title": "Use IPv4", "title": "Use IPv4",
"description": "Set to true to use IPv4 for AMF / MME addresses", "description": "Set to true to use IPv4 for AMF / MME addresses",
"type": "boolean" "type": "boolean"
},
"enb_stats_fetch_period": {
"title": "eNB statistics fetch period (seconds)",
"description": "Describes how often a call to Amarisoft remote API is made to get eNB statistics",
"type": "number",
"default": 60
},
"max_rx_sample_db": {
"title": "Maximum RX sample value (dB)",
"description": "Maximum RX sample threshold above which RX saturated promise will fail",
"type": "number",
"default": 0
},
"promise_cpu_temperature_threshold": {
"title": "CPU temperature promise threshold",
"description": "Temperature threshold above which CPU temperature promise will fail",
"type": "number",
"default": 80
} }
} }
} }
...@@ -99,6 +99,24 @@ ...@@ -99,6 +99,24 @@
"title": "Use IPv4", "title": "Use IPv4",
"description": "Set to true to use IPv4 for AMF / MME addresses", "description": "Set to true to use IPv4 for AMF / MME addresses",
"type": "boolean" "type": "boolean"
},
"enb_stats_fetch_period": {
"title": "eNB statistics fetch period (seconds)",
"description": "Describes how often a call to Amarisoft remote API is made to get eNB statistics",
"type": "number",
"default": 60
},
"max_rx_sample_db": {
"title": "Maximum RX sample value (dB)",
"description": "Maximum RX sample threshold above which RX saturated promise will fail",
"type": "number",
"default": 0
},
"promise_cpu_temperature_threshold": {
"title": "CPU temperature promise threshold",
"description": "Temperature threshold above which CPU temperature promise will fail",
"type": "number",
"default": 80
} }
} }
} }
...@@ -169,6 +169,24 @@ ...@@ -169,6 +169,24 @@
"title": "Use IPv4", "title": "Use IPv4",
"description": "Set to true to use IPv4 for AMF / MME addresses", "description": "Set to true to use IPv4 for AMF / MME addresses",
"type": "boolean" "type": "boolean"
},
"enb_stats_fetch_period": {
"title": "eNB statistics fetch period (seconds)",
"description": "Describes how often a call to Amarisoft remote API is made to get eNB statistics",
"type": "number",
"default": 60
},
"max_rx_sample_db": {
"title": "Maximum RX sample value (dB)",
"description": "Maximum RX sample threshold above which RX saturated promise will fail",
"type": "number",
"default": 0
},
"promise_cpu_temperature_threshold": {
"title": "CPU temperature promise threshold",
"description": "Temperature threshold above which CPU temperature promise will fail",
"type": "number",
"default": 80
} }
} }
} }
...@@ -151,6 +151,24 @@ ...@@ -151,6 +151,24 @@
"title": "Use IPv4", "title": "Use IPv4",
"description": "Set to true to use IPv4 for AMF / MME addresses", "description": "Set to true to use IPv4 for AMF / MME addresses",
"type": "boolean" "type": "boolean"
},
"enb_stats_fetch_period": {
"title": "eNB statistics fetch period (seconds)",
"description": "Describes how often a call to Amarisoft remote API is made to get eNB statistics",
"type": "number",
"default": 60
},
"max_rx_sample_db": {
"title": "Maximum RX sample value (dB)",
"description": "Maximum RX sample threshold above which RX saturated promise will fail",
"type": "number",
"default": 0
},
"promise_cpu_temperature_threshold": {
"title": "CPU temperature promise threshold",
"description": "Temperature threshold above which CPU temperature promise will fail",
"type": "number",
"default": 80
} }
} }
} }
...@@ -117,6 +117,24 @@ ...@@ -117,6 +117,24 @@
"title": "Use IPv4", "title": "Use IPv4",
"description": "Set to true to use IPv4 for AMF / MME addresses", "description": "Set to true to use IPv4 for AMF / MME addresses",
"type": "boolean" "type": "boolean"
},
"enb_stats_fetch_period": {
"title": "eNB statistics fetch period (seconds)",
"description": "Describes how often a call to Amarisoft remote API is made to get eNB statistics",
"type": "number",
"default": 60
},
"max_rx_sample_db": {
"title": "Maximum RX sample value (dB)",
"description": "Maximum RX sample threshold above which RX saturated promise will fail",
"type": "number",
"default": 0
},
"promise_cpu_temperature_threshold": {
"title": "CPU temperature promise threshold",
"description": "Temperature threshold above which CPU temperature promise will fail",
"type": "number",
"default": 80
} }
} }
} }
...@@ -99,6 +99,24 @@ ...@@ -99,6 +99,24 @@
"title": "Use IPv4", "title": "Use IPv4",
"description": "Set to true to use IPv4 for AMF / MME addresses", "description": "Set to true to use IPv4 for AMF / MME addresses",
"type": "boolean" "type": "boolean"
},
"enb_stats_fetch_period": {
"title": "eNB statistics fetch period (seconds)",
"description": "Describes how often a call to Amarisoft remote API is made to get eNB statistics",
"type": "number",
"default": 60
},
"max_rx_sample_db": {
"title": "Maximum RX sample value (dB)",
"description": "Maximum RX sample threshold above which RX saturated promise will fail",
"type": "number",
"default": 0
},
"promise_cpu_temperature_threshold": {
"title": "CPU temperature promise threshold",
"description": "Temperature threshold above which CPU temperature promise will fail",
"type": "number",
"default": 80
} }
} }
} }
...@@ -169,6 +169,24 @@ ...@@ -169,6 +169,24 @@
"title": "Use IPv4", "title": "Use IPv4",
"description": "Set to true to use IPv4 for AMF / MME addresses", "description": "Set to true to use IPv4 for AMF / MME addresses",
"type": "boolean" "type": "boolean"
},
"enb_stats_fetch_period": {
"title": "eNB statistics fetch period (seconds)",
"description": "Describes how often a call to Amarisoft remote API is made to get eNB statistics",
"type": "number",
"default": 60
},
"max_rx_sample_db": {
"title": "Maximum RX sample value (dB)",
"description": "Maximum RX sample threshold above which RX saturated promise will fail",
"type": "number",
"default": 0
},
"promise_cpu_temperature_threshold": {
"title": "CPU temperature promise threshold",
"description": "Temperature threshold above which CPU temperature promise will fail",
"type": "number",
"default": 80
} }
} }
} }
...@@ -151,6 +151,24 @@ ...@@ -151,6 +151,24 @@
"title": "Use IPv4", "title": "Use IPv4",
"description": "Set to true to use IPv4 for AMF / MME addresses", "description": "Set to true to use IPv4 for AMF / MME addresses",
"type": "boolean" "type": "boolean"
},
"enb_stats_fetch_period": {
"title": "eNB statistics fetch period (seconds)",
"description": "Describes how often a call to Amarisoft remote API is made to get eNB statistics",
"type": "number",
"default": 60
},
"max_rx_sample_db": {
"title": "Maximum RX sample value (dB)",
"description": "Maximum RX sample threshold above which RX saturated promise will fail",
"type": "number",
"default": 0
},
"promise_cpu_temperature_threshold": {
"title": "CPU temperature promise threshold",
"description": "Temperature threshold above which CPU temperature promise will fail",
"type": "number",
"default": 80
} }
} }
} }
...@@ -5,6 +5,7 @@ parts = ...@@ -5,6 +5,7 @@ parts =
lte-ue-lte-config lte-ue-lte-config
lte-ue-service lte-ue-service
sdr-busy-promise sdr-busy-promise
cpu-temperature-promise
monitor-base monitor-base
publish-connection-information publish-connection-information
...@@ -128,3 +129,11 @@ file = {{ sdr_busy_promise }} ...@@ -128,3 +129,11 @@ file = {{ sdr_busy_promise }}
output = ${directory:plugins}/check-sdr-busy.py output = ${directory:plugins}/check-sdr-busy.py
config-testing = {{ slapparameter_dict.get("testing", False) }} config-testing = {{ slapparameter_dict.get("testing", False) }}
config-sdr = {{ sdr }} config-sdr = {{ sdr }}
[cpu-temperature-promise]
recipe = slapos.cookbook:promise.plugin
eggs = slapos.core
file = {{ cpu_temperature_promise }}
output = ${directory:plugins}/check-cpu-temperature.py
config-testing = {{ slapparameter_dict.get("testing", False) }}
config-maxtemp = {{ slapparameter_dict.get("promise_cpu_temperature_threshold", 80) }}
...@@ -5,6 +5,7 @@ parts = ...@@ -5,6 +5,7 @@ parts =
lte-ue-nr-config lte-ue-nr-config
lte-ue-service lte-ue-service
sdr-busy-promise sdr-busy-promise
cpu-temperature-promise
monitor-base monitor-base
publish-connection-information publish-connection-information
...@@ -131,3 +132,11 @@ file = {{ sdr_busy_promise }} ...@@ -131,3 +132,11 @@ file = {{ sdr_busy_promise }}
output = ${directory:plugins}/check-sdr-busy.py output = ${directory:plugins}/check-sdr-busy.py
config-testing = {{ slapparameter_dict.get("testing", False) }} config-testing = {{ slapparameter_dict.get("testing", False) }}
config-sdr = {{ sdr }} config-sdr = {{ sdr }}
[cpu-temperature-promise]
recipe = slapos.cookbook:promise.plugin
eggs = slapos.core
file = {{ cpu_temperature_promise }}
output = ${directory:plugins}/check-cpu-temperature.py
config-testing = {{ slapparameter_dict.get("testing", False) }}
config-maxtemp = {{ slapparameter_dict.get("promise_cpu_temperature_threshold", 80) }}
...@@ -75,6 +75,7 @@ extensions = jinja2.ext.do ...@@ -75,6 +75,7 @@ extensions = jinja2.ext.do
extra-context = extra-context =
raw monitor_template ${monitor2-template:output} raw monitor_template ${monitor2-template:output}
key slave_instance_list slap-configuration:slave-instance-list key slave_instance_list slap-configuration:slave-instance-list
raw cpu_temperature_promise ${cpu-temperature-promise:target}
[dynamic-template-lte-gnb-epc] [dynamic-template-lte-gnb-epc]
< = jinja2-template-base < = jinja2-template-base
...@@ -84,6 +85,7 @@ extensions = jinja2.ext.do ...@@ -84,6 +85,7 @@ extensions = jinja2.ext.do
extra-context = extra-context =
raw monitor_template ${monitor2-template:output} raw monitor_template ${monitor2-template:output}
key slave_instance_list slap-configuration:slave-instance-list key slave_instance_list slap-configuration:slave-instance-list
raw cpu_temperature_promise ${cpu-temperature-promise:target}
[dynamic-template-lte-epc] [dynamic-template-lte-epc]
< = jinja2-template-base < = jinja2-template-base
...@@ -93,6 +95,7 @@ extensions = jinja2.ext.do ...@@ -93,6 +95,7 @@ extensions = jinja2.ext.do
extra-context = extra-context =
raw monitor_template ${monitor2-template:output} raw monitor_template ${monitor2-template:output}
key slave_instance_list slap-configuration:slave-instance-list key slave_instance_list slap-configuration:slave-instance-list
raw cpu_temperature_promise ${cpu-temperature-promise:target}
[dynamic-template-lte-enb] [dynamic-template-lte-enb]
< = jinja2-template-base < = jinja2-template-base
...@@ -106,7 +109,12 @@ extra-context = ...@@ -106,7 +109,12 @@ extra-context =
raw enb_template ${enb.jinja2.cfg:target} raw enb_template ${enb.jinja2.cfg:target}
raw sib23 ${sib23.asn:target} raw sib23 ${sib23.asn:target}
raw ltelogs_template ${ltelogs.jinja2.sh:target} raw ltelogs_template ${ltelogs.jinja2.sh:target}
raw amarisoft_stats_template ${amarisoft-stats.jinja2.py:target}
raw sdr_busy_promise ${sdr-busy-promise:target} raw sdr_busy_promise ${sdr-busy-promise:target}
raw cell_gain_saturated_promise ${cell-gain-saturated-promise:target}
raw rx_saturated_promise ${rx-saturated-promise:target}
raw amarisoft_stats_log_promise ${amarisoft-stats-log-promise:target}
raw cpu_temperature_promise ${cpu-temperature-promise:target}
raw openssl_location ${openssl:location} raw openssl_location ${openssl:location}
raw default_dl_earfcn ${default-params:default-dl-earfcn} raw default_dl_earfcn ${default-params:default-dl-earfcn}
raw default_lte_dl_freq ${default-params:default-lte-dl-freq} raw default_lte_dl_freq ${default-params:default-lte-dl-freq}
...@@ -129,7 +137,12 @@ extra-context = ...@@ -129,7 +137,12 @@ extra-context =
key sdr amarisoft:sdr key sdr amarisoft:sdr
raw gnb_template ${gnb.jinja2.cfg:target} raw gnb_template ${gnb.jinja2.cfg:target}
raw ltelogs_template ${ltelogs.jinja2.sh:target} raw ltelogs_template ${ltelogs.jinja2.sh:target}
raw amarisoft_stats_template ${amarisoft-stats.jinja2.py:target}
raw sdr_busy_promise ${sdr-busy-promise:target} raw sdr_busy_promise ${sdr-busy-promise:target}
raw cell_gain_saturated_promise ${cell-gain-saturated-promise:target}
raw rx_saturated_promise ${rx-saturated-promise:target}
raw amarisoft_stats_log_promise ${amarisoft-stats-log-promise:target}
raw cpu_temperature_promise ${cpu-temperature-promise:target}
raw openssl_location ${openssl:location} raw openssl_location ${openssl:location}
raw default_dl_nr_arfcn ${default-params:default-dl-nr-arfcn} raw default_dl_nr_arfcn ${default-params:default-dl-nr-arfcn}
raw default_nr_band ${default-params:default-nr-band} raw default_nr_band ${default-params:default-nr-band}
...@@ -141,6 +154,7 @@ extra-context = ...@@ -141,6 +154,7 @@ extra-context =
raw default_nr_inactivity_timer ${default-params:default-nr-inactivity-timer} raw default_nr_inactivity_timer ${default-params:default-nr-inactivity-timer}
raw min_frequency ${default-params:min-frequency} raw min_frequency ${default-params:min-frequency}
raw max_frequency ${default-params:max-frequency} raw max_frequency ${default-params:max-frequency}
raw python_path ${python3:location}
[dynamic-template-lte-mme] [dynamic-template-lte-mme]
< = jinja2-template-base < = jinja2-template-base
...@@ -172,6 +186,7 @@ extra-context = ...@@ -172,6 +186,7 @@ extra-context =
raw ue_lte_template ${ue-lte.jinja2.cfg:target} raw ue_lte_template ${ue-lte.jinja2.cfg:target}
raw ltelogs_template ${ltelogs.jinja2.sh:target} raw ltelogs_template ${ltelogs.jinja2.sh:target}
raw sdr_busy_promise ${sdr-busy-promise:target} raw sdr_busy_promise ${sdr-busy-promise:target}
raw cpu_temperature_promise ${cpu-temperature-promise:target}
raw openssl_location ${openssl:location} raw openssl_location ${openssl:location}
raw default_dl_earfcn ${default-params:default-dl-earfcn} raw default_dl_earfcn ${default-params:default-dl-earfcn}
raw default_lte_dl_freq ${default-params:default-lte-dl-freq} raw default_lte_dl_freq ${default-params:default-lte-dl-freq}
...@@ -194,6 +209,7 @@ extra-context = ...@@ -194,6 +209,7 @@ extra-context =
key ue amarisoft:ue key ue amarisoft:ue
key sdr amarisoft:sdr key sdr amarisoft:sdr
raw ue_nr_template ${ue-nr.jinja2.cfg:target} raw ue_nr_template ${ue-nr.jinja2.cfg:target}
raw cpu_temperature_promise ${cpu-temperature-promise:target}
raw ltelogs_template ${ltelogs.jinja2.sh:target} raw ltelogs_template ${ltelogs.jinja2.sh:target}
raw sdr_busy_promise ${sdr-busy-promise:target} raw sdr_busy_promise ${sdr-busy-promise:target}
raw openssl_location ${openssl:location} raw openssl_location ${openssl:location}
......
import errno
import json
import os
from datetime import datetime
from dateutil import parser
from zope.interface import implementer
from slapos.grid.promise import interface
from slapos.grid.promise.generic import GenericPromise
# Get latest timestamp from JSON log
def get_latest_timestamp(log):
log_number = 0
while True:
try:
f = open("{}.{}".format(log, log_number) if log_number else log, "rb")
except OSError:
return 0
try:
f.seek(0, os.SEEK_END)
try:
while f.seek(-2, os.SEEK_CUR) and f.read(1) != b'\n':
pass
except OSError:
break
l = json.loads(f.readline().decode().replace("'", '"'))
return parser.parse(l['time'])
finally:
f.close()
log_number += 1
return 0
@implementer(interface.IPromise)
class RunPromise(GenericPromise):
def __init__(self, config):
super(RunPromise, self).__init__(config)
self.setPeriodicity(minute=1)
def sense(self):
amarisoft_stats_log = self.getConfig('amarisoft-stats-log')
stats_period = int(self.getConfig('stats-period'))
latest_timestamp = get_latest_timestamp(amarisoft_stats_log)
delta = (datetime.now() - latest_timestamp).total_seconds()
if delta > stats_period * 2:
self.logger.error("Latest entry from amarisoft statistics log too "\
"old (%s seconds old)" % (delta,))
else:
self.logger.info("Latest entry from amarisoft statistics is "\
"%s seconds old" % (delta,))
def test(self):
"""
Called after sense() if the instance is still converging.
Returns success or failure based on sense results.
In this case, fail if the previous sensor result is negative.
"""
return self._test(result_count=1, failure_amount=1)
def anomaly(self):
"""
Called after sense() if the instance has finished converging.
Returns success or failure based on sense results.
Failure signals the instance has diverged.
In this case, fail if two out of the last three results are negative.
"""
return self._anomaly(result_count=1, failure_amount=1)
import errno
import json
import logging
import os
from dateutil import parser
from zope.interface import implementer
from slapos.grid.promise import interface
from slapos.grid.promise.generic import GenericPromise
# Get latest data and data from "interval" seconds ago from JSON log
def get_data_interval(log, interval):
log_number = 0
latest_timestamp = 0
latest_data = {}
while True:
try:
f = open("{}.{}".format(log, log_number) if log_number else log, "rb")
except OSError:
return latest_data, {}
try:
f.seek(0, os.SEEK_END)
while True:
try:
while f.seek(-2, os.SEEK_CUR) and f.read(1) != b'\n':
pass
except OSError:
break
pos = f.tell()
l = json.loads(f.readline().decode().replace("'", '"'))
timestamp = parser.parse(l['time'])
if not latest_timestamp:
latest_timestamp = timestamp
latest_data = l['data']
if (latest_timestamp - timestamp).total_seconds() > interval:
return latest_data, l['data']
f.seek(pos, os.SEEK_SET)
finally:
f.close()
log_number += 1
@implementer(interface.IPromise)
class RunPromise(GenericPromise):
def __init__(self, config):
self.__name = config.get('name', None)
self.__log_folder = config.get('log-folder', None)
super(RunPromise, self).__init__(config)
self.setPeriodicity(minute=1)
self.__title = os.path.splitext(self.__name)[0]
self.__log_file = os.path.join(self.__log_folder, '%s.json.log' % self.__title)
self.json_logger = logging.getLogger('json_logger')
self.json_logger.setLevel(logging.INFO)
handler = logging.FileHandler(self.__log_file)
formatter = logging.Formatter('{"time": "%(asctime)s", "log_level": "%(levelname)s", "message": "%(message)s", "data": %(data)s}')
handler.setFormatter(formatter)
self.json_logger.addHandler(handler)
def sense(self):
testing = self.getConfig('testing') == "True"
amarisoft_stats_log = self.getConfig('amarisoft-stats-log')
stats_period = int(self.getConfig('stats-period'))
latest_data, previous_data = get_data_interval(amarisoft_stats_log, stats_period * 2)
def get_saturation_events(data):
if data:
return sum([x['sat'] for x in data['samples']['rx']])
return 0
saturation_events = get_saturation_events(latest_data) - \
get_saturation_events(previous_data)
self.json_logger.info("Saturation events",
extra={'data': {'recent_saturation_events': saturation_events}})
if saturation_events:
self.logger.error("Reception saturated, please lower cell_gain")
else:
self.logger.info("No saturation events on reception")
def test(self):
"""
Called after sense() if the instance is still converging.
Returns success or failure based on sense results.
In this case, fail if the previous sensor result is negative.
"""
return self._test(result_count=1, failure_amount=1)
def anomaly(self):
"""
Called after sense() if the instance has finished converging.
Returns success or failure based on sense results.
Failure signals the instance has diverged.
In this case, fail if two out of the last three results are negative.
"""
return self._anomaly(result_count=1, failure_amount=1)
import socket
import errno
import logging
import json
import os
import psutil
from zope.interface import implementer
from slapos.grid.promise import interface
from slapos.grid.promise.generic import GenericPromise
@implementer(interface.IPromise)
class RunPromise(GenericPromise):
def __init__(self, config):
self.__name = config.get('name', None)
self.__log_folder = config.get('log-folder', None)
super(RunPromise, self).__init__(config)
self.setPeriodicity(minute=2)
self.__title = os.path.splitext(self.__name)[0]
self.__log_file = os.path.join(self.__log_folder, '%s.json.log' % self.__title)
self.json_logger = logging.getLogger('json_logger')
self.json_logger.setLevel(logging.INFO)
handler = logging.FileHandler(self.__log_file)
formatter = logging.Formatter('{"time": "%(asctime)s", "log_level": "%(levelname)s", "message": "%(message)s", "data": %(data)s}')
handler.setFormatter(formatter)
self.json_logger.addHandler(handler)
def sense(self):
max_temp = int(self.getConfig('max-temp', 80))
testing = self.getConfig('testing') == "True"
if testing:
from random import randint
cpu_temp = randint(40, 75)
else:
data = psutil.sensors_temperatures()
cpu_temp = data['coretemp'][0][1]
data = json.dumps({'cpu_temperature': cpu_temp})
if cpu_temp > max_temp:
self.logger.error("Temperature too high (%s > %s)" % (cpu_temp, max_temp))
self.json_logger.info("Temperature too high (%s > %s)" % (cpu_temp, max_temp), extra={'data': data})
else:
self.logger.info("Temperature OK")
self.json_logger.info("Temperature OK", extra={'data': data})
def test(self):
"""
Called after sense() if the instance is still converging.
Returns success or failure based on sense results.
In this case, fail if the previous sensor result is negative.
"""
return self._test(result_count=1, failure_amount=1)
def anomaly(self):
"""
Called after sense() if the instance has finished converging.
Returns success or failure based on sense results.
Failure signals the instance has diverged.
In this case, fail if two out of the last three results are negative.
"""
return self._anomaly(result_count=3, failure_amount=2)
...@@ -5,28 +5,17 @@ from zope.interface import implementer ...@@ -5,28 +5,17 @@ from zope.interface import implementer
from slapos.grid.promise import interface from slapos.grid.promise import interface
from slapos.grid.promise.generic import GenericPromise from slapos.grid.promise.generic import GenericPromise
@implementer(interface.IPromise) @implementer(interface.IPromise)
class RunPromise(GenericPromise): class RunPromise(GenericPromise):
def __init__(self, config): def __init__(self, config):
"""
Called when initialising the promise before testing.
Sets the configuration and the periodicity.
"""
super(RunPromise, self).__init__(config) super(RunPromise, self).__init__(config)
self.setPeriodicity(minute=2) self.setPeriodicity(minute=2)
def sense(self): def sense(self):
"""
Called every time the promise is tested.
Signals a positive or negative result.
In this case, check whether the file exists.
"""
ifname = self.getConfig('ifname') ifname = self.getConfig('ifname')
testing = self.getConfig('testing') == "True" testing = self.getConfig('testing') == "True"
......
import errno
import json
import logging
import os
from dateutil import parser
from zope.interface import implementer
from slapos.grid.promise import interface
from slapos.grid.promise.generic import GenericPromise
# Get all data in the last "interval" seconds from JSON log
def get_data_interval(log, interval):
log_number = 0
latest_timestamp = 0
data_list = []
while True:
try:
f = open("{}.{}".format(log, log_number) if log_number else log, "rb")
except OSError:
return data_list
try:
f.seek(0, os.SEEK_END)
while True:
try:
while f.seek(-2, os.SEEK_CUR) and f.read(1) != b'\n':
pass
except OSError:
break
pos = f.tell()
l = json.loads(f.readline().decode().replace("'", '"'))
timestamp = parser.parse(l['time'])
data_list.append(l['data'])
if not latest_timestamp:
latest_timestamp = timestamp
if (latest_timestamp - timestamp).total_seconds() > interval:
return data_list
f.seek(pos, os.SEEK_SET)
finally:
f.close()
log_number += 1
@implementer(interface.IPromise)
class RunPromise(GenericPromise):
def __init__(self, config):
self.__name = config.get('name', None)
self.__log_folder = config.get('log-folder', None)
super(RunPromise, self).__init__(config)
self.setPeriodicity(minute=1)
self.__title = os.path.splitext(self.__name)[0]
self.__log_file = os.path.join(self.__log_folder, '%s.json.log' % self.__title)
self.json_logger = logging.getLogger('json_logger')
self.json_logger.setLevel(logging.INFO)
handler = logging.FileHandler(self.__log_file)
formatter = logging.Formatter('{"time": "%(asctime)s", "log_level": "%(levelname)s", "message": "%(message)s", "data": %(data)s}')
handler.setFormatter(formatter)
self.json_logger.addHandler(handler)
def sense(self):
testing = self.getConfig('testing') == "True"
amarisoft_stats_log = self.getConfig('amarisoft-stats-log')
stats_period = int(self.getConfig('stats-period'))
max_rx_sample_db = float(self.getConfig('max-rx-sample-db'))
data_list = get_data_interval(amarisoft_stats_log, stats_period * 2)
max_rx = []
saturated = False
for data in data_list:
rx_list = data['samples']['rx']
if not max_rx:
max_rx = [-99.9 for x in rx_list]
for i, rx in enumerate(rx_list):
max_rx[i] = max(max_rx[i], float(rx['max']))
if max_rx[i] >= max_rx_sample_db:
saturated = True
self.json_logger.info("RX maximum sample values (dB)",
extra={'data': max_rx})
if not max_rx:
self.logger.error("No RX samples data available")
elif saturated:
self.logger.error("RX antenna saturated, please lower rx_gain")
else:
self.logger.info("No saturation detected on RX antenna")
def test(self):
"""
Called after sense() if the instance is still converging.
Returns success or failure based on sense results.
In this case, fail if the previous sensor result is negative.
"""
return self._test(result_count=1, failure_amount=1)
def anomaly(self):
"""
Called after sense() if the instance has finished converging.
Returns success or failure based on sense results.
Failure signals the instance has diverged.
In this case, fail if two out of the last three results are negative.
"""
return self._anomaly(result_count=1, failure_amount=1)
...@@ -6,28 +6,17 @@ from zope.interface import implementer ...@@ -6,28 +6,17 @@ from zope.interface import implementer
from slapos.grid.promise import interface from slapos.grid.promise import interface
from slapos.grid.promise.generic import GenericPromise from slapos.grid.promise.generic import GenericPromise
@implementer(interface.IPromise) @implementer(interface.IPromise)
class RunPromise(GenericPromise): class RunPromise(GenericPromise):
def __init__(self, config): def __init__(self, config):
"""
Called when initialising the promise before testing.
Sets the configuration and the periodicity.
"""
super(RunPromise, self).__init__(config) super(RunPromise, self).__init__(config)
self.setPeriodicity(minute=1) self.setPeriodicity(minute=1)
def sense(self): def sense(self):
"""
Called every time the promise is tested.
Signals a positive or negative result.
In this case, check whether the file exists.
"""
testing = self.getConfig('testing') == "True" testing = self.getConfig('testing') == "True"
sdr = self.getConfig('sdr') sdr = self.getConfig('sdr')
......
...@@ -6,6 +6,7 @@ extends = ...@@ -6,6 +6,7 @@ extends =
../../component/logrotate/buildout.cfg ../../component/logrotate/buildout.cfg
../../component/nghttp2/buildout.cfg ../../component/nghttp2/buildout.cfg
../../component/iperf3/buildout.cfg ../../component/iperf3/buildout.cfg
../../component/python3/buildout.cfg
parts += parts +=
template template
...@@ -21,6 +22,7 @@ parts += ...@@ -21,6 +22,7 @@ parts +=
ue-nr.jinja2.cfg ue-nr.jinja2.cfg
sib23.asn sib23.asn
iperf3 iperf3
eggs
# unimplemented parts - the http monitor and better log handling using logrotate # unimplemented parts - the http monitor and better log handling using logrotate
# apache-php # apache-php
# logrotate # logrotate
...@@ -35,6 +37,9 @@ output = ${buildout:directory}/template.cfg ...@@ -35,6 +37,9 @@ output = ${buildout:directory}/template.cfg
recipe = slapos.recipe.build:download recipe = slapos.recipe.build:download
url = ${:_profile_base_location_}/${:_update_hash_filename_} url = ${:_profile_base_location_}/${:_update_hash_filename_}
[amarisoft-stats.jinja2.py]
<= download-base
[template-lte-enb-epc] [template-lte-enb-epc]
<= download-base <= download-base
...@@ -61,7 +66,14 @@ url = ${:_profile_base_location_}/${:_update_hash_filename_} ...@@ -61,7 +66,14 @@ url = ${:_profile_base_location_}/${:_update_hash_filename_}
[sdr-busy-promise] [sdr-busy-promise]
<= download-base <= download-base
[cell-gain-saturated-promise]
<= download-base
[rx-saturated-promise]
<= download-base
[amarisoft-stats-log-promise]
<= download-base
[cpu-temperature-promise]
<= download-base
[interface-up-promise] [interface-up-promise]
<= download-base <= download-base
...@@ -100,3 +112,13 @@ filename = ue-lte.jinja2.cfg ...@@ -100,3 +112,13 @@ filename = ue-lte.jinja2.cfg
[ue-nr.jinja2.cfg] [ue-nr.jinja2.cfg]
<= copy-config-to-instance <= copy-config-to-instance
filename = ue-nr.jinja2.cfg filename = ue-nr.jinja2.cfg
[eggs]
recipe = zc.recipe.egg
eggs =
websocket-client
python-dateutil
interpreter = pythonwitheggs
[versions]
websocket-client = 1.4.2
...@@ -72,6 +72,7 @@ parts += ...@@ -72,6 +72,7 @@ parts +=
mroonga-mariadb mroonga-mariadb
tesseract tesseract
zabbix-agent zabbix-agent
.coveragerc
# Buildoutish # Buildoutish
eggs-all-scripts eggs-all-scripts
...@@ -351,14 +352,52 @@ entry-points = ...@@ -351,14 +352,52 @@ entry-points =
runUnitTest=runUnitTest:main runUnitTest=runUnitTest:main
scripts = runUnitTest scripts = runUnitTest
initialization = initialization =
import glob, os, sys import glob, os, sys, json
buildout_directory = '''${buildout:directory}'''
parts_directory = '''${buildout:parts-directory}'''
repository_id_list = \
'''${erp5_repository_list:repository_id_list}'''.split()[::-1]
# read testrunner configuration from slapos instance parameters to
# configure coverage if enabled.
with open(os.environ['ERP5_TEST_RUNNER_CONFIGURATION']) as f:
test_runner_configuration = json.load(f)
test_runner_configuration.setdefault('coverage', {})
test_runner_configuration['coverage'].setdefault('enabled', False)
coverage_process = None
if test_runner_configuration['coverage']['enabled']:
test_runner_configuration['coverage'].setdefault(
'include', [os.path.join('parts', repo, '*') for repo in repository_id_list])
assets_directory = ''
test_name = sys.argv[-1].replace(':', '_')
if os.environ.get('SLAPOS_TEST_LOG_DIRECTORY'):
assets_directory = os.path.join(os.environ['SLAPOS_TEST_LOG_DIRECTORY'], test_name)
if not os.path.exists(assets_directory):
os.makedirs(assets_directory)
coverage_data_file = os.path.abspath(
os.path.join(assets_directory, 'coverage.sqlite3'))
curdir = os.path.abspath(os.curdir)
# change current directory when importing coverage so that it considers paths
# relative to the root of the software
os.chdir(buildout_directory)
import coverage
coverage_process = coverage.Coverage(
include=test_runner_configuration['coverage']['include'],
data_file=coverage_data_file,
branch=test_runner_configuration['coverage'].get('branch'),
)
coverage_process.set_option('run:relative_files', 'true')
coverage_process.set_option('run:plugins', ['erp5_coverage_plugin'])
coverage_process.start()
os.chdir(curdir)
import Products import Products
Products.__path__[:0] = filter(None, Products.__path__[:0] = filter(None,
os.getenv('INSERT_PRODUCTS_PATH', '').split(os.pathsep)) os.getenv('INSERT_PRODUCTS_PATH', '').split(os.pathsep))
os.environ['ZOPE_SCRIPTS'] = '' os.environ['ZOPE_SCRIPTS'] = ''
parts_directory = '''${buildout:parts-directory}'''
repository_id_list = \
'''${erp5_repository_list:repository_id_list}'''.split()[::-1]
os.environ['erp5_tests_bt5_path'] = ','.join( os.environ['erp5_tests_bt5_path'] = ','.join(
os.path.join(parts_directory, x, 'bt5') for x in repository_id_list) os.path.join(parts_directory, x, 'bt5') for x in repository_id_list)
extra_path_list = '''${:extra-paths}'''.split() extra_path_list = '''${:extra-paths}'''.split()
...@@ -371,6 +410,59 @@ initialization = ...@@ -371,6 +410,59 @@ initialization =
sys.path[:0] = sum(( sys.path[:0] = sum((
glob.glob(os.path.join(x, 'Products', '*', 'tests')) glob.glob(os.path.join(x, 'Products', '*', 'tests'))
for x in os.getenv('INSERT_PRODUCTS_PATH', '').split(os.pathsep)), []) for x in os.getenv('INSERT_PRODUCTS_PATH', '').split(os.pathsep)), [])
import runUnitTest
try:
sys.exit(runUnitTest.main())
finally:
if coverage_process:
coverage_process.stop()
coverage_process.save()
# upload the coverage so that they can be combined from another machine
upload_url = test_runner_configuration['coverage'].get('upload-url')
if upload_url:
import requests
import time
import uritemplate
from six.moves.urllib.parse import urlparse
auth_list = (None, )
parsed_url = urlparse(upload_url)
if parsed_url.username:
# try Digest and Basic authentication and retry 5 times to tolerate transiant errors
auth_list = (
requests.auth.HTTPDigestAuth(parsed_url.username, parsed_url.password),
requests.auth.HTTPBasicAuth(parsed_url.username, parsed_url.password),
) * 5
url = uritemplate.URITemplate(upload_url).expand(
test_name=test_name,
# Environment variables are set in parts/erp5/product/ERP5Type/tests/runTestSuite.py
test_result_id=os.environ.get('ERP5_TEST_RESULT_ID', 'unknown_test_result_id'),
test_result_revision=os.environ.get('ERP5_TEST_RESULT_REVISION', 'unknown_test_result_revision'),
)
for auth in auth_list:
with open(coverage_data_file, 'rb') as f:
resp = requests.put(url, data=f, auth=auth)
if resp.ok:
# print just the hostname, not to include the auth part
print('Uploaded coverage data to {parsed_url.hostname}'.format(parsed_url=parsed_url))
break
print('Error {resp.status_code} uploading coverage data to {parsed_url.hostname} with {auth.__class__.__name__}'.format(
resp=resp, parsed_url=parsed_url, auth=auth))
time.sleep(1)
else:
sys.stderr.write('Error uploading coverage data to {parsed_url.hostname}\n'.format(parsed_url=parsed_url))
[.coveragerc]
recipe = slapos.recipe.template
output = ${buildout:directory}/${:_buildout_section_name_}
inline =
# coverage configuration file, useful when making html report
[run]
plugins =
erp5_coverage_plugin
relative_files = true
[test-suite-runner] [test-suite-runner]
# XXX: Workaround for fact ERP5Type is not an distribution and does not # XXX: Workaround for fact ERP5Type is not an distribution and does not
...@@ -466,7 +558,6 @@ eggs = ${neoppod:eggs} ...@@ -466,7 +558,6 @@ eggs = ${neoppod:eggs}
SOAPpy SOAPpy
chardet chardet
collective.recipe.template collective.recipe.template
coverage
erp5diff erp5diff
interval interval
ipdb ipdb
...@@ -606,6 +697,12 @@ Acquisition-patch-options = -p1 ...@@ -606,6 +697,12 @@ Acquisition-patch-options = -p1
python-magic-patches = ${:_profile_base_location_}/../../component/egg-patch/python_magic/magic.patch#de0839bffac17801e39b60873a6c2068 python-magic-patches = ${:_profile_base_location_}/../../component/egg-patch/python_magic/magic.patch#de0839bffac17801e39b60873a6c2068
python-magic-patch-options = -p1 python-magic-patch-options = -p1
# neoppod installs bin/coverage so we inject erp5 plugin here so that coverage script can use it in report
[neoppod]
eggs +=
erp5_coverage_plugin
[eggs-all-scripts] [eggs-all-scripts]
recipe = zc.recipe.egg recipe = zc.recipe.egg
eggs = eggs =
...@@ -683,6 +780,7 @@ PyStemmer = 1.3.0 ...@@ -683,6 +780,7 @@ PyStemmer = 1.3.0
Pympler = 0.4.3 Pympler = 0.4.3
StructuredText = 2.11.1 StructuredText = 2.11.1
WSGIUtils = 0.7 WSGIUtils = 0.7
erp5-coverage-plugin = 0.0.1
erp5diff = 0.8.1.8 erp5diff = 0.8.1.8
five.formlib = 1.0.4 five.formlib = 1.0.4
five.localsitemanager = 2.0.5 five.localsitemanager = 2.0.5
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment