Commit c7abfb01 authored by Joanne Hugé's avatar Joanne Hugé

Update Release Candidate

parents 13898f68 784aa033
...@@ -8,9 +8,9 @@ parts = ...@@ -8,9 +8,9 @@ parts =
[libffi] [libffi]
recipe = slapos.recipe.cmmi recipe = slapos.recipe.cmmi
shared = true shared = true
version = 3.4.3 version = 3.4.6
url = http://sourceware.org/pub/libffi/libffi-${:version}.tar.gz url = https://github.com/libffi/libffi/releases/download/v${:version}/libffi-${:version}.tar.gz
md5sum = b57b0ac1d1072681cee9148a417bd2ec md5sum = b9cac6c5997dca2b3787a59ede34e0eb
location = @@LOCATION@@ location = @@LOCATION@@
patch-options = -p1 patch-options = -p1
patches = patches =
......
...@@ -58,7 +58,7 @@ md5sum = 35bb9f1d8f4fd6675bd768d8a7e1253c ...@@ -58,7 +58,7 @@ md5sum = 35bb9f1d8f4fd6675bd768d8a7e1253c
[instance-gitlab-export.cfg.in] [instance-gitlab-export.cfg.in]
_update_hash_filename_ = instance-gitlab-export.cfg.in _update_hash_filename_ = instance-gitlab-export.cfg.in
md5sum = c8231583d04bf0d3fe2d26230b94d78d md5sum = 330e1b13f793303e069ec8b7b8c882ea
[macrolib.cfg.in] [macrolib.cfg.in]
_update_hash_filename_ = macrolib.cfg.in _update_hash_filename_ = macrolib.cfg.in
...@@ -82,7 +82,7 @@ md5sum = b1becd9ec4c2eeefe573af4bb53c9751 ...@@ -82,7 +82,7 @@ md5sum = b1becd9ec4c2eeefe573af4bb53c9751
[template-gitlab-resiliency-restore.sh.in] [template-gitlab-resiliency-restore.sh.in]
_update_hash_filename_ = template/template-gitlab-resiliency-restore.sh.in _update_hash_filename_ = template/template-gitlab-resiliency-restore.sh.in
md5sum = 8ce31a27e814e750dfd38c92a278fb9e md5sum = c1b5f7ab33de40f301fa6931f84999a1
[puma.rb.in] [puma.rb.in]
_update_hash_filename_ = template/puma.rb.in _update_hash_filename_ = template/puma.rb.in
......
...@@ -76,6 +76,8 @@ context = ...@@ -76,6 +76,8 @@ context =
raw redis_script ${service-redis:wrapper} raw redis_script ${service-redis:wrapper}
raw puma_script ${service-puma:wrapper-path} raw puma_script ${service-puma:wrapper-path}
raw sidekiq_script ${service-sidekiq:wrapper-path} raw sidekiq_script ${service-sidekiq:wrapper-path}
raw gitaly_script ${service-gitaly:wrapper-path}
raw gitaly_pid_file ${gitaly:pid}
raw gitlab_backup_dir ${gitlab-backup-directory:backup-gitlab.git} raw gitlab_backup_dir ${gitlab-backup-directory:backup-gitlab.git}
raw redis_pid_file ${service-redis:pid-file} raw redis_pid_file ${service-redis:pid-file}
raw postgres_pid_file ${service-postgresql:pgdata-directory}/postmaster.pid raw postgres_pid_file ${service-postgresql:pgdata-directory}/postmaster.pid
......
...@@ -30,6 +30,8 @@ puma_script="{{ puma_script }}" ...@@ -30,6 +30,8 @@ puma_script="{{ puma_script }}"
puma_pid_file="{{ puma_pid_file }}" puma_pid_file="{{ puma_pid_file }}"
sidekiq_script="{{ sidekiq_script }}" sidekiq_script="{{ sidekiq_script }}"
var_location="{{ var_directory }}" var_location="{{ var_directory }}"
gitaly_script="{{ gitaly_script }}"
gitaly_pid_file="{{ gitaly_pid_file }}"
# export GIT_EXEC_PATH=$git_location/libexec/git-core/ # export GIT_EXEC_PATH=$git_location/libexec/git-core/
...@@ -57,6 +59,7 @@ kill_process () { ...@@ -57,6 +59,7 @@ kill_process () {
check_process $postgres_pid_file "Postgres" check_process $postgres_pid_file "Postgres"
check_process $redis_pid_file "Redis" check_process $redis_pid_file "Redis"
check_process $puma_pid_file "Puma" check_process $puma_pid_file "Puma"
check_process $gitaly_pid_file "Gitaly"
if [ -f "$postgres_pid_file" ]; then if [ -f "$postgres_pid_file" ]; then
rm $postgres_pid_file rm $postgres_pid_file
...@@ -66,7 +69,7 @@ fi ...@@ -66,7 +69,7 @@ fi
# restoration will created them at every run # restoration will created them at every run
echo "Cleanup gitlab backup and old repositories folders..." echo "Cleanup gitlab backup and old repositories folders..."
rm -rf $var_location/backup/* rm -rf $var_location/backup/*
rm -rf $var_location/repositories* rm -rf $var_location/repositories.*
echo "Starting Postgres..." echo "Starting Postgres..."
$postgres_executable & $postgres_executable &
...@@ -77,6 +80,20 @@ echo "Starting Redis server..." ...@@ -77,6 +80,20 @@ echo "Starting Redis server..."
$redis_executable & $redis_executable &
redis_pid=$! redis_pid=$!
trap "kill $postgres_pid $redis_pid" EXIT TERM INT trap "kill $postgres_pid $redis_pid" EXIT TERM INT
echo "Starting gitaly service..."
$gitaly_script &
gitaly_pid=$!
trap "kill $gitaly_pid $postgres_pid $redis_pid" EXIT TERM INT
echo "Starting Puma"
$puma_script &
puma_pid=$!
trap "kill $gitaly_pid $postgres_pid $redis_pid $puma_pid" EXIT TERM INT
# wait until Puma is started and pid file exists
sleep 200
if [ -s "$puma_pid_file" ]; then
puma_pid=$(head -n 1 $puma_pid_file) > /dev/null 2>&1
trap "kill $postgres_pid $redis_pid $puma_pid" EXIT TERM INT
fi
echo "[OK]" echo "[OK]"
echo "Restoring gitlab data..." echo "Restoring gitlab data..."
...@@ -90,15 +107,7 @@ echo "Checking gitlab promises..." ...@@ -90,15 +107,7 @@ echo "Checking gitlab promises..."
echo "[info] Not all promises are checked!" echo "[info] Not all promises are checked!"
$promise_check/gitlab-app $promise_check/gitlab-app
echo "Starting Puma to check gitlab-shell promise..." echo "Check gitlab-shell promise..."
$puma_script &
puma_pid=$!
trap "kill $postgres_pid $redis_pid $puma_pid" EXIT TERM INT
sleep 60
if [ -s "$puma_pid_file" ]; then
puma_pid=$(head -n 1 $puma_pid_file) > /dev/null 2>&1
trap "kill $postgres_pid $redis_pid $puma_pid" EXIT TERM INT
fi
$promise_check/gitlab-shell $promise_check/gitlab-shell
#echo "starting Sidekiq to check sidekiq promise..." #echo "starting Sidekiq to check sidekiq promise..."
...@@ -110,6 +119,7 @@ $promise_check/gitlab-shell ...@@ -110,6 +119,7 @@ $promise_check/gitlab-shell
kill_process $postgres_pid kill_process $postgres_pid
kill_process $redis_pid kill_process $redis_pid
kill_process $puma_pid kill_process $puma_pid
kill_process $gitaly_pid
RESTORE_EXIT_CODE=$? RESTORE_EXIT_CODE=$?
......
...@@ -16,7 +16,7 @@ ...@@ -16,7 +16,7 @@
[template] [template]
filename = instance.cfg filename = instance.cfg
md5sum = 9013df1ac77d35a1fc8df37bb5615dcd md5sum = c1588b160ca830cbd42a295e043d4405
[template-ors] [template-ors]
filename = instance-ors.cfg filename = instance-ors.cfg
...@@ -60,7 +60,7 @@ md5sum = 52da9fe3a569199e35ad89ae1a44c30e ...@@ -60,7 +60,7 @@ md5sum = 52da9fe3a569199e35ad89ae1a44c30e
[template-enb] [template-enb]
_update_hash_filename_ = instance-enb.jinja2.cfg _update_hash_filename_ = instance-enb.jinja2.cfg
md5sum = 04b723fc2a3d5555243921823b0e087b md5sum = de757719d6cf4bd731394f50dbce8d99
[template-ors-enb] [template-ors-enb]
_update_hash_filename_ = instance-ors-enb.jinja2.cfg _update_hash_filename_ = instance-ors-enb.jinja2.cfg
...@@ -142,6 +142,10 @@ md5sum = f02fbfd31ba89cf243e2752adcae28d9 ...@@ -142,6 +142,10 @@ md5sum = f02fbfd31ba89cf243e2752adcae28d9
_update_hash_filename_ = promise/check_frequency_outofbounds.py _update_hash_filename_ = promise/check_frequency_outofbounds.py
md5sum = 7c83eab2df4f5a5d519e3eb16e4077a3 md5sum = 7c83eab2df4f5a5d519e3eb16e4077a3
[gps_lock_promise]
_update_hash_filename_ = promise/check_gps_lock.py
md5sum = c79fb837cc32bc0182ebf15078115b10
[nginx_conf.in] [nginx_conf.in]
_update_hash_filename_ = config/nginx_conf.in _update_hash_filename_ = config/nginx_conf.in
md5sum = e2496564695fb76b242c3e0f8d0ab6c3 md5sum = e2496564695fb76b242c3e0f8d0ab6c3
Changelog Changelog
========= =========
Version 1.0.384 (2024-12-16)
-------------
* Add promise to check if GPS is synchronized when enabled
Version 1.0.383 (2024-12-11) Version 1.0.383 (2024-12-11)
------------- -------------
......
...@@ -65,6 +65,9 @@ parts = ...@@ -65,6 +65,9 @@ parts =
xlog-fluentbit-service xlog-fluentbit-service
check-xlog-fluentbit-forward-host.py check-xlog-fluentbit-forward-host.py
check-xlog-fluentbit-health.py check-xlog-fluentbit-health.py
{%- endif %}
{%- if slapparameter_dict.get('gps_sync', False) %}
gps-lock-promise
{%- endif %} {%- endif %}
nginx-launcher nginx-launcher
nginx-graceful nginx-graceful
...@@ -385,3 +388,12 @@ config-amarisoft-stats-log = ${ru_amarisoft-stats-template:log-output} ...@@ -385,3 +388,12 @@ config-amarisoft-stats-log = ${ru_amarisoft-stats-template:log-output}
config-stats-period = {{ slapparameter_dict.get("enb_stats_fetch_period", 60) }} config-stats-period = {{ slapparameter_dict.get("enb_stats_fetch_period", 60) }}
config-min-rxtx-delay = {{ slapparameter_dict.get("min_rxtx_delay", 0) }} config-min-rxtx-delay = {{ slapparameter_dict.get("min_rxtx_delay", 0) }}
{%- if slapparameter_dict.get('gps_sync', False) %}
[gps-lock-promise]
recipe = slapos.cookbook:promise.plugin
eggs = slapos.core
file = {{ gps_lock_promise }}
output = ${directory:plugins}/check-gps-lock.py
config-amarisoft-rf-info-log = ${ru_amarisoft-rf-info-template:log-output}
config-stats-period = {{ slapparameter_dict.get("enb_stats_fetch_period", 60) }}
{%- endif %}
...@@ -243,6 +243,7 @@ extra-context = ...@@ -243,6 +243,7 @@ extra-context =
raw fluent_bit_location ${fluent-bit:location} raw fluent_bit_location ${fluent-bit:location}
raw openssh_location ${openssh:location} raw openssh_location ${openssh:location}
raw openssh_output_keygen ${openssh-output:keygen} raw openssh_output_keygen ${openssh-output:keygen}
raw gps_lock_promise ${gps_lock_promise:target}
[dynamic-template-core-network] [dynamic-template-core-network]
< = jinja2-template-base < = jinja2-template-base
......
import itertools
import json
import logging
import os
import re
from dateutil import parser as dateparser
from datetime import datetime
from zope.interface import implementer
from slapos.grid.promise import interface
from slapos.grid.promise.generic import GenericPromise
def iter_reverse_lines(f):
"""
Read lines from the end of the file
"""
f.seek(0, os.SEEK_END)
while True:
try:
while f.seek(-2, os.SEEK_CUR) and f.read(1) != b'\n':
pass
except OSError:
return
pos = f.tell()
yield f.readline()
f.seek(pos, os.SEEK_SET)
def iter_logrotate_file_handle(path, mode='r'):
"""
Yield successive file handles for rotated logs
(XX.log, XX.log.1, XX.log.2, ...)
"""
for i in itertools.count():
path_i = path + str(i or '')
try:
with open(path_i, mode) as f:
yield f
except OSError:
break
def get_json_log_data_interval(json_log_file, interval):
"""
Get all data in the last "interval" seconds from JSON log
Reads rotated logs too (XX.log, XX.log.1, XX.log.2, ...)
"""
current_time = datetime.now()
data_list = []
for f in iter_logrotate_file_handle(json_log_file, 'rb'):
for line in iter_reverse_lines(f):
l = json.loads(line)
timestamp = dateparser.parse(l['time'])
if (current_time - timestamp).total_seconds() > interval:
return data_list
data_list.append(l['data'])
return data_list
class JSONPromise(GenericPromise):
def __init__(self, config):
self.__name = config.get('name', None)
self.__log_folder = config.get('log-folder', None)
super(JSONPromise, self).__init__(config)
json_log_name = os.path.splitext(self.__name)[0] + '.json.log'
self.__json_log_file = os.path.join(self.__log_folder, json_log_name)
self.json_logger = self.__make_json_logger(self.__json_log_file)
def __make_json_logger(self, json_log_file):
logger = logging.getLogger('json-logger')
logger.setLevel(logging.INFO)
handler = logging.FileHandler(json_log_file)
formatter = logging.Formatter(
'{"time": "%(asctime)s", "log_level": "%(levelname)s"'
', "message": "%(message)s", "data": %(data)s}'
)
handler.setFormatter(formatter)
logger.addHandler(handler)
return logger
@implementer(interface.IPromise)
class RunPromise(JSONPromise):
def __init__(self, config):
super(RunPromise, self).__init__(config)
self.setPeriodicity(minute=1)
self.amarisoft_rf_info_log = self.getConfig('amarisoft-rf-info-log')
self.stats_period = int(self.getConfig('stats-period'))
def sense(self):
data_list = get_json_log_data_interval(self.amarisoft_rf_info_log, self.stats_period * 2)
if len(data_list) < 1:
self.logger.error("rf_info: stale data")
return
rf_info_text = data_list[0]['rf_info']
if 'Sync: gps (locked)' in rf_info_text:
self.logger.info("GPS locked")
else:
self.logger.error("GPS not locked")
def test(self):
"""
Called after sense() if the instance is still converging.
Returns success or failure based on sense results.
In this case, fail if the previous sensor result is negative.
"""
return self._test(result_count=1, failure_amount=1)
def anomaly(self):
"""
Called after sense() if the instance has finished converging.
Returns success or failure based on sense results.
Failure signals the instance has diverged.
In this case, fail if two out of the last three results are negative.
"""
return self._anomaly(result_count=1, failure_amount=1)
...@@ -222,6 +222,9 @@ update-command = ${:command} ...@@ -222,6 +222,9 @@ update-command = ${:command}
<= setcap <= setcap
exe = ${dnsmasq:location}/sbin/dnsmasq exe = ${dnsmasq:location}/sbin/dnsmasq
[gps_lock_promise]
<= download-base
[versions] [versions]
websocket-client = 1.4.2 websocket-client = 1.4.2
ncclient = 0.6.13 ncclient = 0.6.13
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment