Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Support
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
S
slapos
Project overview
Project overview
Details
Activity
Releases
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Issues
0
Issues
0
List
Boards
Labels
Milestones
Merge Requests
5
Merge Requests
5
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Analytics
Analytics
CI / CD
Repository
Value Stream
Wiki
Wiki
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
Jérome Perrin
slapos
Commits
0c612c2a
Commit
0c612c2a
authored
Sep 19, 2022
by
Jérome Perrin
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
grafana
parent
df578b8a
Changes
8
Expand all
Hide whitespace changes
Inline
Side-by-side
Showing
8 changed files
with
408 additions
and
84 deletions
+408
-84
software/grafana/buildout.hash.cfg
software/grafana/buildout.hash.cfg
+6
-2
software/grafana/instance.cfg.in
software/grafana/instance.cfg.in
+190
-20
software/grafana/loki-config-file.cfg.in
software/grafana/loki-config-file.cfg.in
+68
-37
software/grafana/loki-nginx-config-file.cfg.in
software/grafana/loki-nginx-config-file.cfg.in
+77
-0
software/grafana/software.cfg
software/grafana/software.cfg
+33
-9
software/grafana/test/test.py
software/grafana/test/test.py
+30
-14
stack/erp5/buildout.hash.cfg
stack/erp5/buildout.hash.cfg
+1
-1
stack/erp5/instance-mariadb.cfg.in
stack/erp5/instance-mariadb.cfg.in
+3
-1
No files found.
software/grafana/buildout.hash.cfg
View file @
0c612c2a
...
...
@@ -15,7 +15,7 @@
[instance-profile]
filename = instance.cfg.in
md5sum =
eb2175a37e36bb486b586b709229958f
md5sum =
aae326b9fa18f77d5a397ac3fc0071b0
[influxdb-config-file]
filename = influxdb-config-file.cfg.in
...
...
@@ -39,4 +39,8 @@ md5sum = 5616679a9c5c2757540175ead3f5500a
[loki-config-file]
filename = loki-config-file.cfg.in
md5sum = ad2baf4599a937d7352034a41fa24814
md5sum = 19a7f5cb904b3287b0bc7cb3e8a27429
[loki-nginx-config-file]
filename = loki-nginx-config-file.cfg.in
md5sum = b08ce1e4abb34eb79e26133459c27c3a
software/grafana/instance.cfg.in
View file @
0c612c2a
This diff is collapsed.
Click to expand it.
software/grafana/loki-config-file.cfg.in
View file @
0c612c2a
# insipired from
# https://github.com/grafana/loki/blob/1489c1731277c327e3661da182bfc6c90d4559f4/tools/dev/loki-boltdb-storage-s3/docker-compose.yml
# and othe configuration examples with microservices, because the single binary
# mode assumes running on 127.0.0.1, but in slapos we want to bind on partition's
# addresses
auth_enabled: false
http_prefix:
server:
http_listen_address: {{ loki['ip'] }}
http_listen_port: {{ loki['port'] }}
grpc_listen_address: {{ loki['ip'] }}
grpc_listen_port: {{ loki['grpc-port'] }}
grpc_server_max_recv_msg_size: 1.048576e+08
grpc_server_max_send_msg_size: 1.048576e+08
ingester:
lifecycler:
address: {{ loki['ip'] }}
ring:
kvstore:
store: inmemory
replication_factor: 1
chunk_idle_period: 15m
# # TODO ?
# wal:
# enabled: true
# dir: /loki/wal
common:
compactor_address: http://{{ loki['ip'] }}:{{ loki['write-http-port'] }}
schema_config:
configs:
- from: 20
18-04
-15
store: boltdb
- from: 20
20-05
-15
store: boltdb
-shipper
object_store: filesystem
schema: v
9
schema: v
11
index:
prefix: index_
period:
168
h
period:
24
h
storage_config:
boltdb:
directory: {{ loki['storage-boltdb-dir
'] }}
boltdb
_shipper
:
active_index_directory: {{ loki['boltdb-shipper-active-index-directory
'] }}
cache_location: {{ loki['boltdb-shipper-cache-location'] }}
filesystem:
directory: {{ loki['storage-filesystem-dir'] }}
directory: {{ loki['storage-filesystem-dir
ectory
'] }}
limits_config:
reject_old_samples: false
enforce_metric_name: false
reject_old_samples: true
reject_old_samples_max_age: 168h
chunk_store_config:
max_look_back_period: 0
table_manager:
chunk_tables_provisioning:
inactive_read_throughput: 0
inactive_write_throughput: 0
provisioned_read_throughput: 0
provisioned_write_throughput: 0
index_tables_provisioning:
inactive_read_throughput: 0
inactive_write_throughput: 0
provisioned_read_throughput: 0
provisioned_write_throughput: 0
retention_deletes_enabled: false
retention_period: 0
ingestion_rate_mb: 1024
ingestion_burst_size_mb: 1024
ingester:
lifecycler:
address: {{ loki['ip'] }}
ring:
kvstore:
store: memberlist
replication_factor: 1
compactor:
compaction_interval: 1m
retention_enabled: true
working_directory: {{ loki['compactor-working-directory'] }}
frontend:
log_queries_longer_than: 5s
compress_responses: true
max_outstanding_per_tenant: 2048
tail_proxy_url: http://{{ loki['ip'] }}:{{ loki['querier-http-port']}}
frontend_worker:
scheduler_address: {{ loki['ip'] }}:{{ loki['query-scheduler-grpc-port'] }}
#testERP5Type
memberlist:
bind_addr:
- {{ loki['ip'] }}
join_members:
# - {{ loki['ip'] }}:{{ loki['read-1-memberlist-port'] }}
- {{ loki['ip'] }}:{{ loki['querier-memberlist-port'] }}
# - {{ loki['ip'] }}:{{ loki['write-memberlist-port'] }}
query_scheduler:
max_outstanding_requests_per_tenant: 1024
querier:
query_ingesters_within: 2h
software/grafana/loki-nginx-config-file.cfg.in
0 → 100644
View file @
0c612c2a
daemon off;
events {
worker_connections 1024;
}
error_log /dev/stdout;
http {
default_type application/octet-stream;
access_log /dev/stdout;
sendfile on;
tcp_nopush on;
upstream read {
server {{ loki['ip'] }}:{{ loki['query-frontend-http-port'] }};
}
upstream write {
server {{ loki['ip'] }}:{{ loki['write-http-port'] }};
}
upstream cluster {
server {{ loki['ip'] }}:{{ loki['write-http-port'] }};
server {{ loki['ip'] }}:{{ loki['query-frontend-http-port'] }};
server {{ loki['ip'] }}:{{ loki['querier-http-port'] }};
}
upstream query-frontend {
server {{ loki['ip'] }}:{{ loki['query-frontend-http-port'] }};
}
server {
listen {{ loki['ip'] }}:{{ loki['nginx-port'] }};
# XXX while debugging
listen [{{ loki['ipv6'] }}]:{{ loki['nginx-port'] }};
location / {
return 200 'OK';
}
location = /ring {
proxy_pass http://cluster$request_uri;
}
location = /memberlist {
proxy_pass http://cluster$request_uri;
}
location = /config {
proxy_pass http://cluster$request_uri;
}
location = /metrics {
proxy_pass http://cluster$request_uri;
}
location = /ready {
proxy_pass http://cluster$request_uri;
}
location = /loki/api/v1/push {
proxy_pass http://write$request_uri;
}
location = /loki/api/v1/tail {
proxy_pass http://read$request_uri;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection "upgrade";
}
location ~ /loki/api/.* {
proxy_pass http://query-frontend$request_uri;
}
}
}
software/grafana/software.cfg
View file @
0c612c2a
...
...
@@ -7,6 +7,7 @@ extends =
../../component/openssl/buildout.cfg
../../component/curl/buildout.cfg
../../component/dash/buildout.cfg
../../component/nginx/buildout.cfg
buildout.hash.cfg
parts =
...
...
@@ -19,21 +20,30 @@ parts =
grafana-provisioning-datasources-config-file
grafana-provisioning-dashboards-config-file
loki-config-file
loki-nginx-config-file
[nodejs]
<= nodejs-14.16.0
<= nodejs-16.14.0
[gowork]
golang = ${golang1.19:location}
# XXX speed up development cycle by not rebuilding workspace on every software run
# XXX does not work ?
update-command =
[go_github.com_grafana_grafana]
<= go-git-package
go.importpath = github.com/grafana/grafana
repository = https://github.com/grafana/grafana
revision = v
7.5.2-0-gca413c612f
revision = v
9.1.5-0-gdf015a9301
[go_github.com_grafana_loki]
<= go-git-package
go.importpath = github.com/grafana/loki
repository = https://github.com/grafana/loki
revision = v2.3.0-0-gf5fd02966
revision = v2.1.0-2075-gafd63c598
# tag helm-loki-3.1.0 which supports golang 1.19
[go_github.com_influxdata_influxdb]
<= go-git-package
...
...
@@ -45,7 +55,7 @@ revision = v1.8.4-0-gbc8ec4384e
<= go-git-package
go.importpath = github.com/influxdata/telegraf
repository = https://github.com/influxdata/telegraf
revision = v1.2
0.2-0-gf721f53d
revision = v1.2
4.0-0-g3c4a6516e
[go_github.com_perrinjerome_slapos_telegraf_input]
<= go-git-package
...
...
@@ -53,6 +63,12 @@ go.importpath = github.com/perrinjerome/telegraf-input-slapos
repository = https://github.com/perrinjerome/telegraf-input-slapos
revision = v0.0.1-0-gf8981f3
# [go_github.com_jaegertracking_jaeger]
# <= go-git-package
# go.importpath = github.com/jaegertracking/jaeger
# repository = https://github.com/jaegertracking/jaeger
# revision = v1.20.0-623-gcac21f82
[gowork]
install =
${go_github.com_grafana_loki:location}:./cmd/loki
...
...
@@ -78,14 +94,18 @@ promtail-bin = ${:bin}/promtail
[grafana]
recipe = plone.recipe.command
command = bash -c "
cd ${:homepath} &&
. ${gowork:env.sh} &&
command = bash -ce "
cd ${:homepath} && \
. ${gowork:env.sh} && \
go install github.com/google/wire/cmd/wire@v0.5.0 && \
wire gen -tags oss ./pkg/server ./pkg/cmd/grafana-cli/runner && \
# Unlike the loki, grafana _needs_ CGO_ENABLED, so we override here
export CGO_ENABLED=1 &&
export CGO_ENABLED=1 &&
\
go run build.go setup && \
go run build.go build && \
${yarn:location}/bin/yarn install --pure-lockfile && \
export NODE_OPTIONS=--max_old_space_size=8192 && \
${yarn:location}/bin/yarn install --immutable && \
${yarn:location}/bin/yarn run themes:generate && \
${yarn:location}/bin/yarn run build && \
${yarn:location}/bin/yarn run plugins:build-bundled && \
# Cleanup yarn and Cypress caches
...
...
@@ -117,6 +137,9 @@ url = ${:_profile_base_location_}/${:filename}
[loki-config-file]
<= download-file-base
[loki-nginx-config-file]
<= download-file-base
[instance-eggs]
recipe = zc.recipe.egg
eggs =
...
...
@@ -138,6 +161,7 @@ context =
key grafana_bin gowork:grafana-bin
key grafana_homepath gowork:grafana-homepath
key loki_bin gowork:loki-bin
raw nginx_bin ${nginx:location}/sbin/nginx
key promtail_bin gowork:promtail-bin
key curl_bin :curl-bin
key dash_bin :dash-bin
...
...
software/grafana/test/test.py
View file @
0c612c2a
...
...
@@ -192,16 +192,32 @@ class TestLoki(GrafanaTestCase):
cls
.
_logfile
=
tempfile
.
NamedTemporaryFile
(
suffix
=
'log'
)
parameter_dict
=
{
"applications"
:
[
{
"name"
:
"System"
,
"instance-root"
:
"/"
,
"partitions"
:
[
{
# no slapos for system application
# XXX example
"name"
:
"syslog"
,
"reference"
:
"syslog"
,
"files"
:
[
"/srv/slapgrid/slappart15/grosgzip/bench.log"
,
]
},
]
},
{
"name"
:
"ERP5"
,
"instance-root"
:
"/srv/slapgrid/slappart
4/srv/slapos/inst
/"
,
"instance-root"
:
"/srv/slapgrid/slappart
15/srv/runner/instance
/"
,
"urls"
:
[
"https://softinst12345-erp5.host.vifib.net/"
,
# TODO
# "https://XXX.host.vifib.net/erp5/",
],
"partitions"
:
[
{
"name"
:
"jerome-dev-mariadb"
,
"reference"
:
"slappart
6
"
,
"reference"
:
"slappart
3
"
,
"type"
:
"erp5/mariadb"
,
#"static-tags": {
# "XXX": "needed?"
...
...
@@ -209,7 +225,7 @@ class TestLoki(GrafanaTestCase):
},
{
"name"
:
"jerome-dev-zodb"
,
"reference"
:
"slappart
7
"
,
"reference"
:
"slappart
4
"
,
"type"
:
"erp5/zeo"
,
#"static-tags": {
# "XXX": "needed?"
...
...
@@ -217,7 +233,7 @@ class TestLoki(GrafanaTestCase):
},
{
"name"
:
"jerome-dev-balancer"
,
"reference"
:
"slappart
9
"
,
"reference"
:
"slappart
6
"
,
"type"
:
"erp5/balancer"
,
#"static-tags": {
# "XXX": "needed?"
...
...
@@ -225,20 +241,20 @@ class TestLoki(GrafanaTestCase):
},
{
"name"
:
"jerome-dev-zope-front"
,
"reference"
:
"slappart
8
"
,
"reference"
:
"slappart
5
"
,
"type"
:
"erp5/zope-front"
,
#"static-tags": {
# "XXX": "needed?"
#}
},
{
"name"
:
"jerome-dev-zope-front"
,
"reference"
:
"slappart13"
,
"type"
:
"erp5/zope-activity"
,
#"static-tags": {
# "XXX": "needed?"
#}
}
#
{
#
"name": "jerome-dev-zope-front",
#
"reference": "slappart13",
#
"type": "erp5/zope-activity",
#
#
"static-tags": {
#
#
"XXX": "needed?"
#
#
}
#
}
]
}
],
...
...
stack/erp5/buildout.hash.cfg
View file @
0c612c2a
...
...
@@ -26,7 +26,7 @@ md5sum = d10b8e35b02b5391cf46bf0c7dbb1196
[template-mariadb]
filename = instance-mariadb.cfg.in
md5sum =
cee995829fbd138a8c2c9209d72d01a0
md5sum =
a730096498087e4b24c1a2018af5bd47
[template-kumofs]
filename = instance-kumofs.cfg.in
...
...
stack/erp5/instance-mariadb.cfg.in
View file @
0c612c2a
...
...
@@ -6,6 +6,7 @@
{% for database_count in range(slapparameter_dict.get('test-database-amount', 1)) -%}
{% do test_database_list.append({'name': 'erp5_test_' ~ database_count, 'user': 'testuser_' ~ database_count, 'password': 'testpassword' ~ database_count}) -%}
{% endfor -%}
{% set character_set_server = slapparameter_dict.get('character-set-server', 'utf8mb4') -%}
{% set catalog_backup = slapparameter_dict.get('catalog-backup', {}) -%}
{% set backup_periodicity = slapparameter_dict.get('backup-periodicity', 'daily') -%}
{% set full_backup_retention_days = catalog_backup.get('full-retention-days', 7) -%}
...
...
@@ -99,7 +100,8 @@ time = {{ dumps(backup_periodicity) }}
# can be fully restored.
# master-data: use value "2" as we are not in a replication case
#}
command = "${binary-wrap-mysqldump:wrapper-path}" --all-databases --flush-privileges --single-transaction --max-allowed-packet=128M {% if incremental_backup_retention_days > -1 %}--flush-logs --master-data=2 {% endif %}| {{ parameter_dict['gzip-location'] }}/bin/gzip > "${directory:mariadb-backup-full}/$({{ parameter_dict['coreutils-location'] }}/bin/date "+%Y%m%d%H%M%S").sql.gz"
command = "${binary-wrap-mysqldump:wrapper-path}" --all-databases --default-character-set={{ character_set_server }} --flush-privileges --single-transaction --max-allowed-packet=128M {% if incremental_backup_retention_days > -1 %}--flush-logs --master-data=2 {% endif %}| {{ parameter_dict['gzip-location'] }}/bin/gzip > "${directory:mariadb-backup-full}/$({{ parameter_dict['coreutils-location'] }}/bin/date "+%Y%m%d%H%M%S").sql.gz"
xcommand = "${binary-wrap-mysqldump:wrapper-path}" --all-databases--flush-privileges --single-transaction --max-allowed-packet=128M {% if incremental_backup_retention_days > -1 %}--flush-logs --master-data=2 {% endif %}| {{ parameter_dict['gzip-location'] }}/bin/gzip > "${directory:mariadb-backup-full}/$({{ parameter_dict['coreutils-location'] }}/bin/date "+%Y%m%d%H%M%S").sql.gz"
{# KEEP GLOB PATTERN IN SYNC with generated filenames above
# YYYYmmddHHMMSS -#}
file-glob = ??????????????.sql.gz
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment