Commit 38b9ebab authored by Aurélien Vermylen's avatar Aurélien Vermylen

Merge branch 'master' into aurelien-test-wendelin

parents 57b5d1f4 3df26781
...@@ -34,9 +34,9 @@ md5sum = 2202b18f269ad606d70e1864857ed93c ...@@ -34,9 +34,9 @@ md5sum = 2202b18f269ad606d70e1864857ed93c
[apache] [apache]
# inspired on http://old.aclark.net/team/aclark/blog/a-lamp-buildout-for-wordpress-and-other-php-apps/ # inspired on http://old.aclark.net/team/aclark/blog/a-lamp-buildout-for-wordpress-and-other-php-apps/
recipe = slapos.recipe.cmmi recipe = slapos.recipe.cmmi
version = 2.4.27 version = 2.4.29
url = https://archive.apache.org/dist/httpd/httpd-${:version}.tar.bz2 url = https://archive.apache.org/dist/httpd/httpd-${:version}.tar.bz2
md5sum = 97b6bbfa83c866dbe20ef317e3afd108 md5sum = 0c599404ef6b69eee95bcd9fcd094407
pre-configure = pre-configure =
cp -ar ${apr:location}/apr-${apr:version} srclib/apr/ && cp -ar ${apr:location}/apr-${apr:version} srclib/apr/ &&
cp -ar ${apr-util:location}/apr-util-${apr-util:version} srclib/apr-util cp -ar ${apr-util:location}/apr-util-${apr-util:version} srclib/apr-util
......
From 8089fa02122fed0a6394eba14bbedcb1d18e2384 Mon Sep 17 00:00:00 2001
From: Kevin Atkinson <kevina@gnu.org>
Date: Thu, 29 Dec 2016 00:50:31 -0500
Subject: Compile Fixes for GCC 7.
Closes #519.
---
modules/filter/tex.cpp | 2 +-
prog/check_funs.cpp | 2 +-
2 files changed, 2 insertions(+), 2 deletions(-)
diff --git a/modules/filter/tex.cpp b/modules/filter/tex.cpp
index a979539..19ab63c 100644
--- a/modules/filter/tex.cpp
+++ b/modules/filter/tex.cpp
@@ -174,7 +174,7 @@ namespace {
if (c == '{') {
- if (top.in_what == Parm || top.in_what == Opt || top.do_check == '\0')
+ if (top.in_what == Parm || top.in_what == Opt || *top.do_check == '\0')
push_command(Parm);
top.in_what = Parm;
diff --git a/prog/check_funs.cpp b/prog/check_funs.cpp
index db54f3d..89ee09d 100644
--- a/prog/check_funs.cpp
+++ b/prog/check_funs.cpp
@@ -647,7 +647,7 @@ static void print_truncate(FILE * out, const char * word, int width) {
}
}
if (i == width-1) {
- if (word == '\0')
+ if (*word == '\0')
put(out,' ');
else if (word[len] == '\0')
put(out, word, len);
--
2.15.0.rc0
...@@ -5,6 +5,7 @@ parts = ...@@ -5,6 +5,7 @@ parts =
extends = extends =
../ncurses/buildout.cfg ../ncurses/buildout.cfg
../patch/buildout.cfg
../perl/buildout.cfg ../perl/buildout.cfg
[aspell-dictionary-common] [aspell-dictionary-common]
...@@ -15,8 +16,11 @@ configure-command = ./configure --vars ASPELL=${aspell:location}/bin/aspell PREZ ...@@ -15,8 +16,11 @@ configure-command = ./configure --vars ASPELL=${aspell:location}/bin/aspell PREZ
recipe = slapos.recipe.cmmi recipe = slapos.recipe.cmmi
url = http://ftp.gnu.org/gnu/aspell/aspell-0.60.6.1.tar.gz url = http://ftp.gnu.org/gnu/aspell/aspell-0.60.6.1.tar.gz
md5sum = e66a9c9af6a60dc46134fdacf6ce97d7 md5sum = e66a9c9af6a60dc46134fdacf6ce97d7
patch-options = -p1
patches =
${:_profile_base_location_}/Compile-Fixes-for-GCC-7.patch#0159a8841e1241153d2105d157fc8225
environment = environment =
PATH=${perl:location}/bin:%(PATH)s PATH=${patch:location}/bin:${perl:location}/bin:%(PATH)s
CPPFLAGS=-I${ncurses:location}/include CPPFLAGS=-I${ncurses:location}/include
LDFLAGS=-L${ncurses:location}/lib -Wl,-rpath=${ncurses:location}/lib LDFLAGS=-L${ncurses:location}/lib -Wl,-rpath=${ncurses:location}/lib
......
diff -N -u -r bazel.orig/src/main/java/com/google/devtools/build/lib/rules/SkylarkRuleImplementationFunctions.java bazel/src/main/java/com/google/devtools/build/lib/rules/SkylarkRuleImplementationFunctions.java
--- bazel.orig/src/main/java/com/google/devtools/build/lib/rules/SkylarkRuleImplementationFunctions.java 1980-01-01 00:00:00.000000000 +0900
+++ bazel/src/main/java/com/google/devtools/build/lib/rules/SkylarkRuleImplementationFunctions.java 2017-01-02 22:19:09.326924214 +0900
@@ -172,7 +172,7 @@
@Param(
name = "use_default_shell_env",
type = Boolean.class,
- defaultValue = "False",
+ defaultValue = "True",
named = true,
positional = false,
doc = "whether the action should use the built in shell environment or not"
diff -u -r a/compile.sh b/compile.sh
--- a/compile.sh 1980-01-01 00:00:00.000000000 +0900
+++ b/compile.sh 2017-10-24 10:57:21.469702614 +0900
@@ -49,6 +49,8 @@
# a chance of overriding this in case they want to do so.
: ${VERBOSE:=no}
+export LDFLAGS=$LDFLAGS
+
source scripts/bootstrap/buildenv.sh
mkdir -p output
diff -u -r a/src/main/java/com/google/devtools/build/lib/analysis/skylark/SkylarkRuleImplementationFunctions.java b/src/main/java/com/google/devtools/build/lib/analysis/skylark/SkylarkRuleImplementationFunctions.java
--- a/src/main/java/com/google/devtools/build/lib/analysis/skylark/SkylarkRuleImplementationFunctions.java 1980-01-01 00:00:00.000000000 +0900
+++ b/src/main/java/com/google/devtools/build/lib/analysis/skylark/SkylarkRuleImplementationFunctions.java 2017-10-18 17:44:51.975049512 +0900
@@ -165,7 +165,7 @@
@Param(
name = "use_default_shell_env",
type = Boolean.class,
- defaultValue = "False",
+ defaultValue = "True",
named = true,
positional = false,
doc = "whether the action should use the built in shell environment or not."
# Description:
# The Bazel launcher.
package(
default_visibility = ["//visibility:public"],
)
WIN_LINK_OPTS = [
"-DEFAULTLIB:advapi32.lib", # GetUserNameW
"-DEFAULTLIB:ole32.lib", # CoTaskMemFree
"-DEFAULTLIB:shell32.lib", # SHGetKnownFolderPath
"-DEFAULTLIB:ws2_32.lib", # grpc
]
cc_library(
name = "blaze_util",
srcs = [
"blaze_util.cc",
"global_variables.h",
"startup_options.h",
] + select({
"//src:darwin": [
"blaze_util_darwin.cc",
"blaze_util_posix.cc",
],
"//src:darwin_x86_64": [
"blaze_util_darwin.cc",
"blaze_util_posix.cc",
],
"//src:freebsd": [
"blaze_util_freebsd.cc",
"blaze_util_posix.cc",
],
"//src:windows": [
"blaze_util_windows.cc",
],
"//src:windows_msys": [
"blaze_util_windows.cc",
],
"//src:windows_msvc": [
"blaze_util_windows.cc",
],
"//conditions:default": [
"blaze_util_linux.cc",
"blaze_util_posix.cc",
],
}),
hdrs = [
"blaze_util.h",
"blaze_util_platform.h",
],
linkopts = select({
"//src:darwin": [
"-framework CoreFoundation",
],
"//src:darwin_x86_64": [
"-framework CoreFoundation",
],
"//src:freebsd": [
],
"//src:windows": WIN_LINK_OPTS,
"//src:windows_msvc": WIN_LINK_OPTS,
"//conditions:default": [
"-lrt",
"{{ linkopts }}",
],
}),
deps = [
"//src/main/cpp/util",
"//src/main/cpp/util:blaze_exit_code",
] + select({
"//src:windows": ["//src/main/native/windows:lib-file"],
"//src:windows_msys": ["//src/main/native/windows:lib-file"],
"//src:windows_msvc": ["//src/main/native/windows:lib-file"],
"//conditions:default": [],
}),
)
cc_binary(
name = "client",
srcs = [
"blaze.cc",
"blaze.h",
"global_variables.cc",
"global_variables.h",
"main.cc",
] + select({
"//src:windows": ["//src/main/native/windows:resources.o"],
"//src:windows_msvc": ["//src/main/native/windows:resources.o"],
"//conditions:default": [],
}),
copts = select({
"//src:windows": ["/wd4018"],
"//src:windows_msvc": ["/wd4018"],
"//conditions:default": ["-Wno-sign-compare"],
}),
linkopts = select({
"//src:darwin": [
],
"//src:darwin_x86_64": [
],
"//src:freebsd": [
"-lprocstat",
"-lm",
],
"//src:windows_msvc": [
],
"//conditions:default": [
"-lrt",
"-ldl",
],
}),
visibility = ["//src:__pkg__"],
deps = [
":blaze_util",
":option_processor",
":startup_options",
":workspace_layout",
"//src/main/cpp/util",
"//src/main/cpp/util:errors",
"//src/main/cpp/util:logging",
"//src/main/cpp/util:strings",
"//src/main/protobuf:command_server_cc_proto",
"//third_party/ijar:zip",
],
)
cc_library(
name = "option_processor",
srcs = ["option_processor.cc"],
hdrs = [
"option_processor.h",
"option_processor-internal.h",
],
visibility = [
"//src:__pkg__",
"//src/test/cpp:__pkg__",
],
deps = [
":blaze_util",
":startup_options",
":workspace_layout",
"//src/main/cpp/util",
"//src/main/cpp/util:blaze_exit_code",
"//src/main/cpp/util:logging",
],
)
cc_library(
name = "startup_options",
srcs = ["startup_options.cc"],
hdrs = ["startup_options.h"],
visibility = [
"//src:__pkg__",
"//src/test/cpp:__pkg__",
],
deps = [
":blaze_util",
":workspace_layout",
"//src/main/cpp/util",
"//src/main/cpp/util:blaze_exit_code",
"//src/main/cpp/util:errors",
],
)
cc_library(
name = "workspace_layout",
srcs = ["workspace_layout.cc"],
hdrs = ["workspace_layout.h"],
visibility = [
"//src:__pkg__",
"//src/test/cpp:__pkg__",
],
deps = [
":blaze_util",
"//src/main/cpp/util",
],
)
filegroup(
name = "srcs",
srcs = glob(["**"]) + ["//src/main/cpp/util:srcs"],
visibility = ["//src:__pkg__"],
)
package(default_visibility = ["//src:__subpackages__"])
cc_library(
name = "logging",
srcs = ["logging.cc"],
hdrs = ["logging.h"],
)
cc_library(
name = "process-tools",
srcs = ["process-tools.cc"],
hdrs = ["process-tools.h"],
deps = [":logging"],
)
cc_binary(
name = "process-wrapper",
srcs = select({
"//src:windows": ["process-wrapper-windows.cc"],
"//src:windows_msvc": ["process-wrapper-windows.cc"],
"//conditions:default": [
"process-wrapper.cc",
"process-wrapper.h",
"process-wrapper-legacy.cc",
"process-wrapper-legacy.h",
"process-wrapper-options.cc",
"process-wrapper-options.h",
],
}),
linkopts = ["-lm", "{{ linkopts }}"],
deps = select({
"//src:windows": [],
"//src:windows_msvc": [],
"//conditions:default": [
":process-tools",
":logging",
],
}),
)
cc_binary(
name = "build-runfiles",
srcs = select({
"//src:windows": ["build-runfiles-windows.cc"],
"//src:windows_msvc": ["build-runfiles-windows.cc"],
"//conditions:default": ["build-runfiles.cc"],
}),
linkopts = ["{{ linkopts }}"],
)
cc_binary(
name = "linux-sandbox",
srcs = select({
"//src:darwin": ["dummy-sandbox.c"],
"//src:darwin_x86_64": ["dummy-sandbox.c"],
"//src:freebsd": ["dummy-sandbox.c"],
"//src:windows": ["dummy-sandbox.c"],
"//src:windows_msys": ["dummy-sandbox.c"],
"//src:windows_msvc": ["dummy-sandbox.c"],
"//conditions:default": [
"linux-sandbox.cc",
"linux-sandbox.h",
"linux-sandbox-options.cc",
"linux-sandbox-options.h",
"linux-sandbox-pid1.cc",
"linux-sandbox-pid1.h",
],
}),
linkopts = ["-lm", "{{ linkopts }}"],
deps = select({
"//src:darwin": [],
"//src:darwin_x86_64": [],
"//src:freebsd": [],
"//src:windows": [],
"//src:windows_msys": [],
"//src:windows_msvc": [],
"//conditions:default": [
":logging",
":process-tools",
],
}),
)
filegroup(
name = "jdk-support",
srcs = [
"jdk.BUILD",
],
)
exports_files([
"build_interface_so",
])
filegroup(
name = "srcs",
srcs = glob(["**"]),
visibility = ["//src:__pkg__"],
)
# Description:
# singlejar C++ implementation.
package(default_visibility = ["//src:__subpackages__"])
JAR_TOOL_PATH_COPT_TPL = "-DJAR_TOOL_PATH=\\\"external/local_jdk/bin/jar%s\\\""
JAR_TOOL_PATH_COPTS = select({
"//src:windows": [JAR_TOOL_PATH_COPT_TPL % ".exe"],
"//src:windows_msvc": [JAR_TOOL_PATH_COPT_TPL % ".exe"],
"//src:windows_msys": [JAR_TOOL_PATH_COPT_TPL % ".exe"],
"//conditions:default": [JAR_TOOL_PATH_COPT_TPL % ""],
})
filegroup(
name = "srcs",
srcs = glob(["**"]),
visibility = ["//src:__pkg__"],
)
filegroup(
name = "embedded_tools",
srcs = [
"BUILD",
"combiners.cc",
"combiners.h",
"diag.h",
"input_jar.cc",
"input_jar.h",
"mapped_file.h",
"options.cc",
"options.h",
"output_jar.cc",
"output_jar.h",
"singlejar_main.cc",
"token_stream.h",
"transient_bytes.h",
"zip_headers.h",
"zlib_interface.h",
],
visibility = ["//visibility:public"],
)
cc_binary(
name = "singlejar",
srcs = [
"singlejar_main.cc",
],
linkopts = select({
"//src:freebsd": ["-lm"],
"//conditions:default": ["{{ linkopts }}"],
}),
linkstatic = 1,
visibility = ["//visibility:public"],
deps = [
"options",
"output_jar",
"//third_party/zlib",
],
)
cc_test(
name = "combiners_test",
size = "large",
srcs = [
"combiners_test.cc",
":zip_headers",
":zlib_interface",
],
deps = [
":combiners",
":input_jar",
"//third_party:gtest",
"//third_party/zlib",
],
)
cc_test(
name = "input_jar_empty_jar_test",
srcs = [
"input_jar_empty_jar_test.cc",
],
data = [
"data/empty.zip",
],
deps = [
":input_jar",
":test_util",
"//src/main/cpp/util",
"//third_party:gtest",
],
)
cc_test(
name = "input_jar_preambled_test",
srcs = [
"input_jar_preambled_test.cc",
],
data = [
":test1",
],
deps = [
":input_jar",
":test_util",
"//src/main/cpp/util",
"//third_party:gtest",
],
)
cc_test(
name = "input_jar_scan_jartool_test",
size = "large",
srcs = [
"input_jar_scan_entries_test.h",
"input_jar_scan_jartool_test.cc",
],
copts = JAR_TOOL_PATH_COPTS,
data = [
"@local_jdk//:jar",
"@local_jdk//:jdk",
],
# Timing out, see https://github.com/bazelbuild/bazel/issues/1555
tags = ["manual"],
deps = [
":input_jar",
":test_util",
"//third_party:gtest",
],
)
cc_test(
name = "input_jar_scan_ziptool_test",
size = "large",
srcs = [
"input_jar_scan_entries_test.h",
"input_jar_scan_ziptool_test.cc",
],
# Timing out, see https://github.com/bazelbuild/bazel/issues/1555
tags = ["manual"],
deps = [
":input_jar",
":test_util",
"//third_party:gtest",
],
)
cc_test(
name = "input_jar_bad_jar_test",
srcs = [
"input_jar_bad_jar_test.cc",
],
deps = [
":input_jar",
":test_util",
"//third_party:gtest",
],
)
cc_test(
name = "options_test",
srcs = [
"options.h",
"options_test.cc",
],
deps = [
":options",
":test_util",
":token_stream",
"//src/main/cpp/util",
"//third_party:gtest",
],
)
sh_test(
name = "output_jar_bash_test",
srcs = ["output_jar_shell_test.sh"],
args = [
"src/test/shell",
"$(location :singlejar)",
],
data = [
":singlejar",
],
deps = ["//src/test/shell:bashunit"],
)
cc_test(
name = "output_jar_simple_test",
srcs = [
"output_jar_simple_test.cc",
],
copts = JAR_TOOL_PATH_COPTS,
data = [
":data1",
":data2",
":stored_jar",
":test1",
":test2",
"@local_jdk//:jar",
"@local_jdk//:jdk-default",
],
deps = [
":input_jar",
":options",
":output_jar",
":test_util",
"//src/main/cpp/util",
"//third_party:gtest",
],
)
cc_test(
name = "token_stream_test",
srcs = [
"token_stream_test.cc",
],
deps = [
":test_util",
":token_stream",
"//third_party:gtest",
],
)
cc_test(
name = "transient_bytes_test",
size = "large",
srcs = [
"transient_bytes_test.cc",
":transient_bytes",
":zlib_interface",
],
# Timing out, see https://github.com/bazelbuild/bazel/issues/1555
tags = ["manual"],
deps = [
":input_jar",
":test_util",
"//third_party:gtest",
"//third_party/zlib",
],
)
cc_test(
name = "zip_headers_test",
size = "small",
srcs = [
"zip_headers_test.cc",
":zip_headers",
],
deps = ["//third_party:gtest"],
)
cc_test(
name = "zlib_interface_test",
srcs = [
"zlib_interface_test.cc",
":zlib_interface",
],
deps = [
":test_util",
"//third_party:gtest",
"//third_party/zlib",
],
)
sh_test(
name = "zip64_test",
srcs = ["zip64_test.sh"],
args = [
"src/test/shell",
"$(location :singlejar)",
"$(location @local_jdk//:jar)",
],
data = [
":singlejar",
"//tools/defaults:jdk",
"@local_jdk//:jar",
],
deps = ["//src/test/shell:bashunit"],
)
cc_library(
name = "combiners",
srcs = [
"combiners.cc",
":transient_bytes",
":zip_headers",
],
hdrs = ["combiners.h"],
deps = ["//third_party/zlib"],
)
cc_library(
name = "input_jar",
srcs = [
"diag.h",
"input_jar.cc",
"mapped_file.h",
],
hdrs = [
"input_jar.h",
"zip_headers.h",
],
)
cc_library(
name = "options",
srcs = [
"diag.h",
"options.cc",
"options.h",
],
hdrs = ["options.h"],
deps = [
":token_stream",
],
)
cc_library(
name = "output_jar",
srcs = [
"diag.h",
"mapped_file.h",
"output_jar.cc",
"output_jar.h",
":zip_headers",
],
hdrs = ["output_jar.h"],
deps = [
":combiners",
":input_jar",
":options",
"//src/main/cpp/util",
"//third_party/zlib",
],
)
cc_library(
name = "test_util",
srcs = ["test_util.cc"],
hdrs = ["test_util.h"],
deps = [
"//src/main/cpp/util",
"//third_party:gtest",
],
)
cc_library(
name = "token_stream",
srcs = ["diag.h"],
hdrs = ["token_stream.h"],
)
filegroup(
name = "transient_bytes",
srcs = [
"diag.h",
"transient_bytes.h",
"zlib_interface.h",
":zip_headers",
],
)
filegroup(
name = "zip_headers",
srcs = ["zip_headers.h"],
)
filegroup(
name = "zlib_interface",
srcs = [
"diag.h",
"zlib_interface.h",
],
)
java_library(
name = "test1",
resources = [
"options.cc",
"zip_headers.h",
"zlib_interface.h",
],
)
java_library(
name = "test2",
resources = [
"token_stream.h",
"transient_bytes.h",
],
)
java_library(
name = "data1",
resources = [
"data/extra_file1",
"data/extra_file2",
],
)
java_library(
name = "data2",
resources = [
"data/extra_file1",
"data/extra_file3",
],
)
genrule(
name = "stored_jar",
srcs = [
"output_jar.cc",
"//tools/defaults:jdk",
],
outs = ["stored.jar"],
cmd = "$(location @local_jdk//:jar) -0cf \"$@\" $(location :output_jar.cc)",
tools = ["@local_jdk//:jar"],
)
...@@ -39,6 +39,21 @@ default_toolchain { ...@@ -39,6 +39,21 @@ default_toolchain {
toolchain_identifier: "local_linux" toolchain_identifier: "local_linux"
} }
default_toolchain {
cpu: "ppc"
toolchain_identifier: "local_linux"
}
default_toolchain {
cpu: "ppc64"
toolchain_identifier: "local_linux"
}
default_toolchain {
cpu: "ios_x86_64"
toolchain_identifier: "ios_x86_64"
}
toolchain { toolchain {
abi_version: "armeabi-v7a" abi_version: "armeabi-v7a"
abi_libc_version: "armeabi-v7a" abi_libc_version: "armeabi-v7a"
...@@ -90,8 +105,8 @@ toolchain { ...@@ -90,8 +105,8 @@ toolchain {
target_system_name: "local" target_system_name: "local"
toolchain_identifier: "local_linux" toolchain_identifier: "local_linux"
tool_path { name: "ar" path: "/usr/bin/ar" } tool_path { name: "ar" path: "{{ binutils_path }}/ar" }
tool_path { name: "compat-ld" path: "/usr/bin/ld" } tool_path { name: "compat-ld" path: "{{ binutils_path }}/ld" }
tool_path { name: "cpp" path: "{{ cpp_path }}" } tool_path { name: "cpp" path: "{{ cpp_path }}" }
tool_path { name: "dwp" path: "/usr/bin/dwp" } tool_path { name: "dwp" path: "/usr/bin/dwp" }
tool_path { name: "gcc" path: "{{ gcc_path }}" } tool_path { name: "gcc" path: "{{ gcc_path }}" }
...@@ -112,14 +127,14 @@ toolchain { ...@@ -112,14 +127,14 @@ toolchain {
# C(++) compiles invoke the compiler (as that is the one knowing where # C(++) compiles invoke the compiler (as that is the one knowing where
# to find libraries), but we provide LD so other rules can invoke the linker. # to find libraries), but we provide LD so other rules can invoke the linker.
tool_path { name: "ld" path: "/usr/bin/ld" } tool_path { name: "ld" path: "{{ binutils_path }}/ld" }
tool_path { name: "nm" path: "/usr/bin/nm" } tool_path { name: "nm" path: "{{ binutils_path }}/nm" }
tool_path { name: "objcopy" path: "/usr/bin/objcopy" } tool_path { name: "objcopy" path: "{{ binutils_path }}/objcopy" }
objcopy_embed_flag: "-I" objcopy_embed_flag: "-I"
objcopy_embed_flag: "binary" objcopy_embed_flag: "binary"
tool_path { name: "objdump" path: "/usr/bin/objdump" } tool_path { name: "objdump" path: "{{ binutils_path }}/objdump" }
tool_path { name: "strip" path: "/usr/bin/strip" } tool_path { name: "strip" path: "{{ binutils_path }}/strip" }
# Anticipated future default. # Anticipated future default.
unfiltered_cxx_flag: "-no-canonical-prefixes" unfiltered_cxx_flag: "-no-canonical-prefixes"
...@@ -160,9 +175,6 @@ toolchain { ...@@ -160,9 +175,6 @@ toolchain {
linker_flag: "-no-canonical-prefixes" linker_flag: "-no-canonical-prefixes"
# Have gcc return the exit code from ld. # Have gcc return the exit code from ld.
linker_flag: "-pass-exit-codes" linker_flag: "-pass-exit-codes"
# Stamp the binary with a unique identifier.
linker_flag: "-Wl,--build-id=md5"
linker_flag: "-Wl,--hash-style=gnu"
# Gold linker only? Can we enable this by default? # Gold linker only? Can we enable this by default?
# linker_flag: "-Wl,--warn-execstack" # linker_flag: "-Wl,--warn-execstack"
# linker_flag: "-Wl,--detect-odr-violations" # linker_flag: "-Wl,--detect-odr-violations"
...@@ -215,9 +227,6 @@ toolchain { ...@@ -215,9 +227,6 @@ toolchain {
tool_path { name: "dwp" path: "/usr/bin/dwp" } tool_path { name: "dwp" path: "/usr/bin/dwp" }
tool_path { name: "gcc" path: "osx_cc_wrapper.sh" } tool_path { name: "gcc" path: "osx_cc_wrapper.sh" }
cxx_flag: "-std=c++0x" cxx_flag: "-std=c++0x"
ar_flag: "-static"
ar_flag: "-s"
ar_flag: "-o"
linker_flag: "-lstdc++" linker_flag: "-lstdc++"
linker_flag: "-undefined" linker_flag: "-undefined"
linker_flag: "dynamic_lookup" linker_flag: "dynamic_lookup"
...@@ -378,8 +387,6 @@ toolchain { ...@@ -378,8 +387,6 @@ toolchain {
# Have gcc return the exit code from ld. # Have gcc return the exit code from ld.
#linker_flag: "-pass-exit-codes" #linker_flag: "-pass-exit-codes"
# Stamp the binary with a unique identifier. # Stamp the binary with a unique identifier.
#linker_flag: "-Wl,--build-id=md5"
linker_flag: "-Wl,--hash-style=gnu"
# Gold linker only? Can we enable this by default? # Gold linker only? Can we enable this by default?
# linker_flag: "-Wl,--warn-execstack" # linker_flag: "-Wl,--warn-execstack"
# linker_flag: "-Wl,--detect-odr-violations" # linker_flag: "-Wl,--detect-odr-violations"
...@@ -520,40 +527,40 @@ toolchain { ...@@ -520,40 +527,40 @@ toolchain {
} }
toolchain { toolchain {
abi_version: "local" abi_version: "local"
abi_libc_version: "local" abi_libc_version: "local"
builtin_sysroot: "" builtin_sysroot: ""
compiler: "windows_msys64" compiler: "windows_msys64"
host_system_name: "local" host_system_name: "local"
needsPic: false needsPic: false
target_libc: "local" target_libc: "local"
target_cpu: "x64_windows" target_cpu: "x64_windows"
target_system_name: "local" target_system_name: "local"
toolchain_identifier: "local_windows_msys64" toolchain_identifier: "local_windows_msys64"
tool_path { name: "ar" path: "C:/tools/msys64/usr/bin/ar" } tool_path { name: "ar" path: "C:/tools/msys64/usr/bin/ar" }
tool_path { name: "compat-ld" path: "C:/tools/msys64/usr/bin/ld" } tool_path { name: "compat-ld" path: "C:/tools/msys64/usr/bin/ld" }
tool_path { name: "cpp" path: "C:/tools/msys64/usr/bin/cpp" } tool_path { name: "cpp" path: "C:/tools/msys64/usr/bin/cpp" }
tool_path { name: "dwp" path: "C:/tools/msys64/usr/bin/dwp" } tool_path { name: "dwp" path: "C:/tools/msys64/usr/bin/dwp" }
# Use gcc instead of g++ so that C will compile correctly. # Use gcc instead of g++ so that C will compile correctly.
tool_path { name: "gcc" path: "C:/tools/msys64/usr/bin/gcc" } tool_path { name: "gcc" path: "C:/tools/msys64/usr/bin/gcc" }
cxx_flag: "-std=gnu++0x" cxx_flag: "-std=gnu++0x"
linker_flag: "-lstdc++" linker_flag: "-lstdc++"
# TODO(bazel-team): In theory, the path here ought to exactly match the path # TODO(bazel-team): In theory, the path here ought to exactly match the path
# used by gcc. That works because bazel currently doesn't track files at # used by gcc. That works because bazel currently doesn't track files at
# absolute locations and has no remote execution, yet. However, this will need # absolute locations and has no remote execution, yet. However, this will need
# to be fixed, maybe with auto-detection? # to be fixed, maybe with auto-detection?
cxx_builtin_include_directory: "C:/tools/msys64/" cxx_builtin_include_directory: "C:/tools/msys64/"
cxx_builtin_include_directory: "/usr/" cxx_builtin_include_directory: "/usr/"
tool_path { name: "gcov" path: "C:/tools/msys64/usr/bin/gcov" } tool_path { name: "gcov" path: "C:/tools/msys64/usr/bin/gcov" }
tool_path { name: "ld" path: "C:/tools/msys64/usr/bin/ld" } tool_path { name: "ld" path: "C:/tools/msys64/usr/bin/ld" }
tool_path { name: "nm" path: "C:/tools/msys64/usr/bin/nm" } tool_path { name: "nm" path: "C:/tools/msys64/usr/bin/nm" }
tool_path { name: "objcopy" path: "C:/tools/msys64/usr/bin/objcopy" } tool_path { name: "objcopy" path: "C:/tools/msys64/usr/bin/objcopy" }
objcopy_embed_flag: "-I" objcopy_embed_flag: "-I"
objcopy_embed_flag: "binary" objcopy_embed_flag: "binary"
tool_path { name: "objdump" path: "C:/tools/msys64/usr/bin/objdump" } tool_path { name: "objdump" path: "C:/tools/msys64/usr/bin/objdump" }
tool_path { name: "strip" path: "C:/tools/msys64/usr/bin/strip" } tool_path { name: "strip" path: "C:/tools/msys64/usr/bin/strip" }
linking_mode_flags { mode: DYNAMIC } linking_mode_flags { mode: DYNAMIC }
} }
toolchain { toolchain {
...@@ -618,7 +625,6 @@ toolchain { ...@@ -618,7 +625,6 @@ toolchain {
compiler_flag: "-m64" compiler_flag: "-m64"
compiler_flag: "/D__inline__=__inline" compiler_flag: "/D__inline__=__inline"
# TODO(pcloudy): Review those flags below, they should be defined by cl.exe # TODO(pcloudy): Review those flags below, they should be defined by cl.exe
compiler_flag: "/DOS_WINDOWS=OS_WINDOWS"
compiler_flag: "/DCOMPILER_MSVC" compiler_flag: "/DCOMPILER_MSVC"
# Don't pollute with GDI macros in windows.h. # Don't pollute with GDI macros in windows.h.
...@@ -664,46 +670,6 @@ toolchain { ...@@ -664,46 +670,6 @@ toolchain {
compiler_flag: "/wd4996" compiler_flag: "/wd4996"
linker_flag: "-m64" linker_flag: "-m64"
feature {
name: 'include_paths'
flag_set {
action: 'preprocess-assemble'
action: 'c-compile'
action: 'c++-compile'
action: 'c++-header-parsing'
action: 'c++-header-preprocessing'
action: 'c++-module-compile'
flag_group {
flag: '/I%{quote_include_paths}'
}
flag_group {
flag: '/I%{include_paths}'
}
flag_group {
flag: '/I%{system_include_paths}'
}
}
}
feature {
name: 'dependency_file'
flag_set {
action: 'assemble'
action: 'preprocess-assemble'
action: 'c-compile'
action: 'c++-compile'
action: 'c++-module-compile'
action: 'c++-header-preprocessing'
action: 'c++-header-parsing'
expand_if_all_available: 'dependency_file'
flag_group {
flag: '/DEPENDENCY_FILE'
flag: '%{dependency_file}'
}
}
}
# Stop passing -frandom-seed option # Stop passing -frandom-seed option
feature { feature {
name: 'random_seed' name: 'random_seed'
...@@ -745,6 +711,10 @@ toolchain { ...@@ -745,6 +711,10 @@ toolchain {
flag: '/Fi%{output_preprocess_file}' flag: '/Fi%{output_preprocess_file}'
} }
} }
implies: 'legacy_compile_flags'
implies: 'user_compile_flags'
implies: 'sysroot'
implies: 'unfiltered_compile_flags'
} }
action_config { action_config {
...@@ -778,6 +748,132 @@ toolchain { ...@@ -778,6 +748,132 @@ toolchain {
flag: '/Fi%{output_preprocess_file}' flag: '/Fi%{output_preprocess_file}'
} }
} }
implies: 'legacy_compile_flags'
implies: 'user_compile_flags'
implies: 'sysroot'
implies: 'unfiltered_compile_flags'
}
# TODO(b/65151735): Remove legacy_compile_flags feature when legacy fields are
# not used in this crosstool
feature {
name: 'legacy_compile_flags'
flag_set {
expand_if_all_available: 'legacy_compile_flags'
action: 'assemble'
action: 'preprocess-assemble'
action: 'c-compile'
action: 'c++-compile'
action: 'c++-header-parsing'
action: 'c++-header-preprocessing'
action: 'c++-module-compile'
action: 'c++-module-codegen'
flag_group {
iterate_over: 'legacy_compile_flags'
flag: '%{legacy_compile_flags}'
}
}
}
feature {
name: 'include_paths'
flag_set {
action: 'preprocess-assemble'
action: 'c-compile'
action: 'c++-compile'
action: 'c++-header-parsing'
action: 'c++-header-preprocessing'
action: 'c++-module-compile'
flag_group {
iterate_over: 'quote_include_paths'
flag: '/I%{quote_include_paths}'
}
flag_group {
iterate_over: 'include_paths'
flag: '/I%{include_paths}'
}
flag_group {
iterate_over: 'system_include_paths'
flag: '/I%{system_include_paths}'
}
}
}
feature {
name: 'dependency_file'
flag_set {
action: 'assemble'
action: 'preprocess-assemble'
action: 'c-compile'
action: 'c++-compile'
action: 'c++-module-compile'
action: 'c++-header-preprocessing'
action: 'c++-header-parsing'
expand_if_all_available: 'dependency_file'
flag_group {
flag: '/DEPENDENCY_FILE'
flag: '%{dependency_file}'
}
}
}
feature {
name: 'user_compile_flags'
flag_set {
expand_if_all_available: 'user_compile_flags'
action: 'assemble'
action: 'preprocess-assemble'
action: 'c-compile'
action: 'c++-compile'
action: 'c++-header-parsing'
action: 'c++-header-preprocessing'
action: 'c++-module-compile'
action: 'c++-module-codegen'
flag_group {
iterate_over: 'user_compile_flags'
flag: '%{user_compile_flags}'
}
}
}
feature {
name: 'sysroot'
flag_set {
expand_if_all_available: 'sysroot'
action: 'assemble'
action: 'preprocess-assemble'
action: 'c-compile'
action: 'c++-compile'
action: 'c++-header-parsing'
action: 'c++-header-preprocessing'
action: 'c++-module-compile'
action: 'c++-module-codegen'
action: 'c++-link-executable'
action: 'c++-link-dynamic-library'
flag_group {
iterate_over: 'sysroot'
flag: '--sysroot=%{sysroot}'
}
}
}
feature {
name: 'unfiltered_compile_flags'
flag_set {
expand_if_all_available: 'unfiltered_compile_flags'
action: 'assemble'
action: 'preprocess-assemble'
action: 'c-compile'
action: 'c++-compile'
action: 'c++-header-parsing'
action: 'c++-header-preprocessing'
action: 'c++-module-compile'
action: 'c++-module-codegen'
flag_group {
iterate_over: 'unfiltered_compile_flags'
flag: '%{unfiltered_compile_flags}'
}
}
} }
compilation_mode_flags { compilation_mode_flags {
...@@ -805,3 +901,37 @@ toolchain { ...@@ -805,3 +901,37 @@ toolchain {
compiler_flag: "-Xcompilation-mode=opt" compiler_flag: "-Xcompilation-mode=opt"
} }
} }
# Stub crosstool to allow construction of apple crosstool configuration in a
# static configuration set.
# TODO(b/33804121): Remove this once dynamic configurations are in place.
toolchain {
toolchain_identifier: "ios_x86_64"
host_system_name: "x86_64-apple-macosx"
target_system_name: "x86_64-apple-ios"
target_cpu: "ios_x86_64"
target_libc: "ios"
compiler: "compiler"
abi_version: "local"
abi_libc_version: "local"
supports_gold_linker: false
supports_incremental_linker: false
supports_fission: false
supports_interface_shared_objects: false
supports_normalizing_ar: false
supports_start_end_lib: false
tool_path { name: "ar" path: "/bin/false" }
tool_path { name: "compat-ld" path: "/bin/false" }
tool_path { name: "cpp" path: "/bin/false" }
tool_path { name: "dwp" path: "/bin/false" }
tool_path { name: "gcc" path: "/bin/false" }
tool_path { name: "gcov" path: "/bin/false" }
tool_path { name: "ld" path: "/bin/false" }
tool_path { name: "nm" path: "/bin/false" }
tool_path { name: "objcopy" path: "/bin/false" }
tool_path { name: "objdump" path: "/bin/false" }
tool_path { name: "strip" path: "/bin/false" }
linking_mode_flags { mode: DYNAMIC }
}
build --verbose_failures
...@@ -24,36 +24,74 @@ template = ${:_profile_base_location_}/${:filename}.in ...@@ -24,36 +24,74 @@ template = ${:_profile_base_location_}/${:filename}.in
rendered = ${:location}/${:filename} rendered = ${:location}/${:filename}
cpp_path = ${gcc:location}/bin/cpp cpp_path = ${gcc:location}/bin/cpp
gcc_path = ${gcc:location}/bin/gcc gcc_path = ${gcc:location}/bin/gcc
binutils_path = ${binutils:location}/bin
gconv_path = ${gcc:location}/bin/gconv gconv_path = ${gcc:location}/bin/gconv
include_path = ${gcc:location}/include include_path = ${gcc:location}/include
gcc_lib64_path = ${gcc:location}/lib64 gcc_lib64_path = ${gcc:location}/lib64
context = context =
key cpp_path template-bazel-crosstool:cpp_path key cpp_path template-bazel-crosstool:cpp_path
key gcc_path template-bazel-crosstool:gcc_path key gcc_path template-bazel-crosstool:gcc_path
key binutils_path template-bazel-crosstool:binutils_path
key gconv_path template-bazel-crosstool:gconv_path key gconv_path template-bazel-crosstool:gconv_path
key include_path template-bazel-crosstool:include_path key include_path template-bazel-crosstool:include_path
key gcc_lib64_path template-bazel-crosstool:gcc_lib64_path key gcc_lib64_path template-bazel-crosstool:gcc_lib64_path
[template-bazel-src-main-cpp-build]
recipe = slapos.recipe.template:jinja2
location = ${buildout:parts-directory}/${:_buildout_section_name_}
mode = 640
filename = bazel_src_main_cpp_BUILD
template = ${:_profile_base_location_}/${:filename}.in
rendered = ${:location}/${:filename}
linkopts = -Wl,-rpath,${gcc:location}/lib64
context =
key linkopts template-bazel-src-main-cpp-build:linkopts
[template-bazel-src-main-tools-build]
<= template-bazel-src-main-cpp-build
filename = bazel_src_main_tools_BUILD
[template-bazel-src-tools-singlejar-build]
<= template-bazel-src-main-cpp-build
filename = bazel_src_tools_singlejar_BUILD
[bazel] [bazel]
recipe = slapos.recipe.build recipe = slapos.recipe.build
url = https://github.com/bazelbuild/bazel/releases/download/0.4.3/bazel-0.4.3-dist.zip url = https://github.com/bazelbuild/bazel/releases/download/0.6.1/bazel-0.6.1-dist.zip
md5sum = cbd53f6f59915506da8998dab2098921 md5sum = 8c5c827e33d3ff74c263c1299810b485
patch-binary = ${patch:location}/bin/patch patch-binary = ${patch:location}/bin/patch
patch-file-path = ${:_profile_base_location_}/bazel-0.4.3.patch patch-file-path = ${:_profile_base_location_}/bazel-0.6.1.patch
bazel-crosstool-modified-file-path = ${template-bazel-crosstool:rendered} bazel-crosstool-modified-file-path = ${template-bazel-crosstool:rendered}
bazel-src-main-cpp-build-path = ${template-bazel-src-main-cpp-build:rendered}
bazel-src-main-tools-build-path = ${template-bazel-src-main-tools-build:rendered}
bazel-src-tools-singlejar-build-path = ${template-bazel-src-tools-singlejar-build:rendered}
unzip-bin = ${unzip:location}/bin unzip-bin = ${unzip:location}/bin
zip-bin = ${zip:location}/bin zip-bin = ${zip:location}/bin
gcc-bin = ${gcc:location}/bin gcc-bin = ${gcc:location}/bin
gcc-lib = ${gcc:location}/lib gcc-lib = ${gcc:location}/lib
gcc-lib64 = ${gcc:location}/lib64 gcc-lib64 = ${gcc:location}/lib64
java_home = ${zulu:location} java_home = ${zulu:location}
bazelrc = ${:_profile_base_location_}/bazelrc
script = script =
extract_dir = self.extract(self.download(self.options['url'], self.options['md5sum'])) extract_dir = self.extract(self.download(self.options['url'], self.options['md5sum']))
crosstool_path = os.path.join(extract_dir, 'tools', 'cpp', 'CROSSTOOL') crosstool_path = os.path.join(extract_dir, 'tools', 'cpp', 'CROSSTOOL')
os.chmod(crosstool_path, 0644) os.chmod(crosstool_path, 0644)
shutil.copy(self.options['bazel-crosstool-modified-file-path'], shutil.copy(self.options['bazel-crosstool-modified-file-path'],
crosstool_path) crosstool_path)
target_path = extract_dir+'/src/main/java/com/google/devtools/build/lib/rules/SkylarkRuleImplementationFunctions.java' src_main_cpp_build_path = os.path.join(extract_dir, 'src', 'main', 'cpp', 'BUILD')
os.chmod(src_main_cpp_build_path, 0644)
shutil.copy(self.options['bazel-src-main-cpp-build-path'],
src_main_cpp_build_path)
src_main_tools_build_path = os.path.join(extract_dir, 'src', 'main', 'tools', 'BUILD')
os.chmod(src_main_tools_build_path, 0644)
shutil.copy(self.options['bazel-src-main-tools-build-path'],
src_main_tools_build_path)
src_tools_singlejar_build_path = os.path.join(extract_dir, 'src', 'tools', 'singlejar', 'BUILD')
os.chmod(src_tools_singlejar_build_path, 0644)
shutil.copy(self.options['bazel-src-tools-singlejar-build-path'],
src_tools_singlejar_build_path)
target_path = extract_dir+'/src/main/java/com/google/devtools/build/lib/analysis/skylark/SkylarkRuleImplementationFunctions.java'
os.chmod(target_path, 0644) os.chmod(target_path, 0644)
call([self.buildout['bazel']['patch-binary'], '-p1', '-d', extract_dir, '-i', self.buildout['bazel']['patch-file-path']]) call([self.buildout['bazel']['patch-binary'], '-p1', '-d', extract_dir, '-i', self.buildout['bazel']['patch-file-path']])
path = ':'.join(( path = ':'.join((
...@@ -65,13 +103,15 @@ script = ...@@ -65,13 +103,15 @@ script =
env = {'JAVA_HOME':self.options['java_home'], env = {'JAVA_HOME':self.options['java_home'],
'PATH':path, 'PATH':path,
'LD_LIBRARY_PATH':':'.join(( 'LD_LIBRARY_PATH':':'.join((
self.options['gcc-lib'], self.options['gcc-lib'],
self.options['gcc-lib64'], self.options['gcc-lib64'],
os.environ.get('LD_LIBRARY_PATH', '') os.environ.get('LD_LIBRARY_PATH', '')
)), )),
'LDFLAGS':'-Wl,-rpath='+self.options['gcc-lib64'], 'LDFLAGS':'-Wl,-rpath='+self.options['gcc-lib64'],
'CC':self.options['gcc-bin']+'/gcc', 'CC':self.options['gcc-bin']+'/gcc',
'CXX':self.options['gcc-bin']+'/g++', 'CXX':self.options['gcc-bin']+'/g++',
'BAZELRC':self.options['bazelrc'],
'VERBOSE':'yes',
} }
bin_dir = os.path.join(self.options['location'], 'bin') bin_dir = os.path.join(self.options['location'], 'bin')
os.makedirs(bin_dir) os.makedirs(bin_dir)
......
...@@ -11,9 +11,9 @@ parts = ...@@ -11,9 +11,9 @@ parts =
[ca-certificates] [ca-certificates]
recipe = slapos.recipe.cmmi recipe = slapos.recipe.cmmi
url = http://http.debian.net/debian/pool/main/c/ca-certificates/ca-certificates_20161130+nmu1.tar.xz url = http://http.debian.net/debian/pool/main/c/ca-certificates/ca-certificates_20170717.tar.xz
patch-binary = ${patch:location}/bin/patch patch-binary = ${patch:location}/bin/patch
md5sum = a09e8b63126188fd0ed77f6fbaf5d35f md5sum = 55a6bb6b98afb16b3cde8e3ad1e262eb
patches = patches =
${:_profile_base_location_}/ca-certificates-any-python.patch#087b5e860c7a4b8ff6656c95c5835ee2 ${:_profile_base_location_}/ca-certificates-any-python.patch#087b5e860c7a4b8ff6656c95c5835ee2
${:_profile_base_location_}/ca-certificates-sbin-dir.patch#0b4e7d82ce768823c01954ee41ef177b ${:_profile_base_location_}/ca-certificates-sbin-dir.patch#0b4e7d82ce768823c01954ee41ef177b
......
...@@ -40,7 +40,7 @@ interpreter = chainer-python ...@@ -40,7 +40,7 @@ interpreter = chainer-python
scripts = chainer-python scripts = chainer-python
[versions] [versions]
chainer = 1.22.0 chainer = 3.0.0
filelock = 2.0.7 filelock = 2.0.12
nose = 1.3.7 nose = 1.3.7
six = 1.10.0 six = 1.11.0
...@@ -6,14 +6,15 @@ extends = ...@@ -6,14 +6,15 @@ extends =
../openssl/buildout.cfg ../openssl/buildout.cfg
../perl/buildout.cfg ../perl/buildout.cfg
../pkgconfig/buildout.cfg ../pkgconfig/buildout.cfg
../xz-utils/buildout.cfg
../zlib/buildout.cfg ../zlib/buildout.cfg
parts = parts =
curl curl
[curl] [curl]
recipe = slapos.recipe.cmmi recipe = slapos.recipe.cmmi
url = http://curl.haxx.se/download/curl-7.54.1.tar.bz2 url = http://curl.haxx.se/download/curl-7.56.0.tar.xz
md5sum = 6b6eb722f512e7a24855ff084f54fe55 md5sum = 18ebc36c5dc9317d4a0b5db94a4e12ad
configure-options = configure-options =
--disable-static --disable-static
--disable-ldap --disable-ldap
...@@ -44,6 +45,6 @@ configure-options = ...@@ -44,6 +45,6 @@ configure-options =
--without-nghttp2 --without-nghttp2
environment = environment =
PATH=${perl:location}/bin:${pkgconfig:location}/bin:%(PATH)s PATH=${perl:location}/bin:${pkgconfig:location}/bin:${xz-utils:location}/bin:%(PATH)s
PKG_CONFIG_PATH=${openssl:location}/lib/pkgconfig PKG_CONFIG_PATH=${openssl:location}/lib/pkgconfig
LDFLAGS=-Wl,-rpath=${zlib:location}/lib -Wl,-rpath=${openssl:location}/lib LDFLAGS=-Wl,-rpath=${zlib:location}/lib -Wl,-rpath=${openssl:location}/lib
# ethtool - query or control network driver and hardware settings
# https://www.kernel.org/pub/software/network/ethtool
[buildout]
parts = ethtool
[ethtool]
recipe = slapos.recipe.cmmi
url = https://www.kernel.org/pub/software/network/ethtool/ethtool-4.11.tar.xz
md5sum = 16d38f4ebe23e44f96f7d8b38ed3652c
...@@ -19,7 +19,6 @@ configure-options = ...@@ -19,7 +19,6 @@ configure-options =
--without-libidn --without-libidn
--without-x --without-x
--with-drivers=FILES --with-drivers=FILES
--with-fontpath=${ghostscript-fonts:location}
# it seems that parallel build sometimes fails for ghostscript. # it seems that parallel build sometimes fails for ghostscript.
make-options = -j1 make-options = -j1
environment = environment =
...@@ -35,9 +34,3 @@ environment = ...@@ -35,9 +34,3 @@ environment =
<= ghostscript-common <= ghostscript-common
url = https://github.com/ArtifexSoftware/ghostpdl-downloads/releases/download/gs920/ghostscript-9.20.tar.xz url = https://github.com/ArtifexSoftware/ghostpdl-downloads/releases/download/gs920/ghostscript-9.20.tar.xz
md5sum = 8f3d383d48da22345937b66b01ab2960 md5sum = 8f3d383d48da22345937b66b01ab2960
[ghostscript-fonts]
recipe = slapos.recipe.build:download-unpacked
strip-top-level-dir = false
url = http://downloads.ghostscript.com/public/fonts/urw-base35-v1.10.zip
md5sum = 66e8bbd8228519d5dba82b9433a61bb0
...@@ -6,6 +6,7 @@ extends = ...@@ -6,6 +6,7 @@ extends =
../perl/buildout.cfg ../perl/buildout.cfg
../xz-utils/buildout.cfg ../xz-utils/buildout.cfg
../zlib/buildout.cfg ../zlib/buildout.cfg
../python-2.7/buildout.cfg
parts = parts =
glib glib
......
[buildout]
extends =
../bzip2/buildout.cfg
../file/buildout.cfg
../gnutls/buildout.cfg
../sqlite3/buildout.cfg
parts = gnupg
[npth]
<= gpg-common
version = 1.5
md5sum = 9ba2dc4302d2f32c66737c43ed191b1b
configure-options-extra =
--disable-tests
[libassuan]
<= with-gpg-error
version = 2.4.3
md5sum = 8e01a7c72d3e5d154481230668e6eb5a
[libksba]
<= with-gpg-error
version = 1.3.5
md5sum = 8302a3e263a7c630aa7dea7d341f07a2
[gnupg]
<= libgcrypt
version = 2.2.1
md5sum = f781efae8756f6cf5d500aad8e4b33e2
configure-options +=
--disable-ldap
--disable-card-support
--disable-photo-viewers
--with-bzip2=${bzip2:location}
--with-ksba-prefix=${libksba:location}
--with-libassuan-prefix=${libassuan:location}
--with-libgcrypt-prefix=${libgcrypt:location}
--with-npth-prefix=${npth:location}
--with-readline=${readline:location}
environment =
PATH=${pkgconfig:location}/bin:${gettext:location}/bin:%(PATH)s
PKG_CONFIG_PATH=${gnutls:pkg-config-path}:${gnutls:location}/lib/pkgconfig:${sqlite3:location}/lib/pkgconfig
CPPFLAGS=-I${file:location}/include -I${libgcrypt:location}/include
LDFLAGS=-L${file:location}/lib -Wl,-rpath=${zlib:location}/lib -Wl,-rpath=${readline:location}/lib -Wl,-rpath=${bzip2:location}/lib -Wl,-rpath=${sqlite3:location}/lib -Wl,-rpath=${gettext:location}/lib -Wl,-rpath=${file:location}/lib -Wl,-rpath=${libgpg-error:location}/lib -Wl,-rpath -Wl,${gnutls:location}/lib -Wl,-rpath=${libksba:location}/lib -Wl,-rpath=${libassuan:location}/lib -Wl,-rpath=${libgcrypt:location}/lib -Wl,-rpath=${npth:location}/lib
...@@ -14,7 +14,6 @@ url = http://www.gnupg.org/ftp/gcrypt/${:_buildout_section_name_}/${:_buildout_s ...@@ -14,7 +14,6 @@ url = http://www.gnupg.org/ftp/gcrypt/${:_buildout_section_name_}/${:_buildout_s
configure-options = ${:configure-options-extra} configure-options = ${:configure-options-extra}
--enable-option-checking=fatal --enable-option-checking=fatal
--disable-dependency-tracking --disable-dependency-tracking
--disable-doc
environment = ${:environment-extra} environment = ${:environment-extra}
PATH=${gettext:location}/bin:%(PATH)s PATH=${gettext:location}/bin:%(PATH)s
environment-extra = environment-extra =
...@@ -24,22 +23,30 @@ environment-extra = ...@@ -24,22 +23,30 @@ environment-extra =
version = 1.27 version = 1.27
md5sum = 5217ef3e76a7275a2a3b569a12ddc989 md5sum = 5217ef3e76a7275a2a3b569a12ddc989
configure-options-extra = configure-options-extra =
--disable-doc
--disable-tests --disable-tests
[libgcrypt] [with-gpg-error]
<= gpg-common <= gpg-common
version = 1.8.1 configure-options-extra = ${:configure-options-extra2}
md5sum = b21817f9d850064d2177285f1073ec55
configure-options-extra =
--with-gpg-error-prefix=${libgpg-error:location} --with-gpg-error-prefix=${libgpg-error:location}
configure-options-extra2 =
environment-extra = environment-extra =
LDFLAGS=-Wl,-rpath=${libgpg-error:location}/lib LDFLAGS=-Wl,-rpath=${libgpg-error:location}/lib
[libgcrypt]
<= with-gpg-error
version = 1.8.1
md5sum = b21817f9d850064d2177285f1073ec55
configure-options-extra2 =
--disable-doc
[gnutls] [gnutls]
<= gpg-common <= gpg-common
url = http://www.gnupg.org/ftp/gcrypt/gnutls/v3.5/gnutls-3.5.15.tar.xz url = http://www.gnupg.org/ftp/gcrypt/gnutls/v3.5/gnutls-3.5.15.tar.xz
md5sum = bcdcbc65c50a7499617ad9f4d0058de9 md5sum = bcdcbc65c50a7499617ad9f4d0058de9
configure-options-extra = configure-options-extra =
--disable-doc
--disable-static --disable-static
--disable-libdane --disable-libdane
--disable-guile --disable-guile
......
# Go language - https://golang.org/ # Go language - https://golang.org/
[buildout] [buildout]
parts = golang extends =
../findutils/buildout.cfg
../git/buildout.cfg
[golang] parts = gowork
<= golang18
# ---- Go builds itself ----
[golang-common] [golang-common]
recipe = slapos.recipe.cmmi recipe = slapos.recipe.cmmi
...@@ -12,42 +14,108 @@ configure-command = : ...@@ -12,42 +14,108 @@ configure-command = :
location = ${buildout:parts-directory}/${:_buildout_section_name_} location = ${buildout:parts-directory}/${:_buildout_section_name_}
make-binary = make-binary =
make-targets= cd src && ./all.bash && cp -alf .. ${:location} make-targets= cd src && ./all.bash && cp -alf .. ${:location}
# some testdata files have an issue with slapos.extension.strip.
post-install = ${findutils:location}/bin/find ${:location}/src -type d -name testdata -exec rm -rf {} \; || true
environment = environment =
GOROOT_FINAL=${:location} GOROOT_FINAL=${:location}
${:environment-extra} ${:environment-extra}
[golang14-repository]
<= git-repository
repository = https://github.com/golang/go.git
revision = go1.4.3-16-g4d5426a
[golang14] [golang14]
<= golang-common <= golang-common
url = https://storage.googleapis.com/golang/go1.4-bootstrap-20161024.tar.gz path = ${golang14-repository:location}
md5sum = 76e42c8152e8560ded880a6d1d1f53cb
environment-extra = environment-extra =
make-targets= cd src && ./make.bash && cp -alf .. ${:location}
[golang15] [golang18]
<= golang-common <= golang-common
url = https://storage.googleapis.com/golang/go1.5.4.src.tar.gz url = https://storage.googleapis.com/golang/go1.8.3.src.tar.gz
md5sum = a04d570515c46e4935c63605cbd3a04e md5sum = 64e9380e07bba907e26a00cf5fcbe77e
# go1.5 needs go1.4 to bootstrap # go1.8 needs go1.4 to bootstrap
environment-extra = environment-extra =
GOROOT_BOOTSTRAP=${golang14:location} GOROOT_BOOTSTRAP=${golang14:location}
[golang19]
[golang16]
<= golang-common <= golang-common
url = https://storage.googleapis.com/golang/go1.6.3.src.tar.gz url = https://storage.googleapis.com/golang/go1.9.2.src.tar.gz
md5sum = bf3fce6ccaadd310159c9e874220e2a2 md5sum = 44105c865a1a810464df79233a05a568
# go1.6 needs go1.4 to bootstrap # go1.9 needs go1.4 to bootstrap
environment-extra = environment-extra =
GOROOT_BOOTSTRAP=${golang14:location} GOROOT_BOOTSTRAP=${golang14:location}
[golang18]
<= golang-common
url = https://storage.googleapis.com/golang/go1.8.src.tar.gz
md5sum = 7743960c968760437b6e39093cfe6f67
# go1.8 needs go1.4 to bootstrap # ---- infrastructure to build Go workspaces / projects ----
environment-extra =
GOROOT_BOOTSTRAP=${golang14:location} # gowork is a top-level section representing workspace
#
# users should add `install` field to [gowork] to describe packages they want to
# be installed (+ automatically their dependencies are installed too). e.g.
#
# [gowork]
# install =
# lab.nexedi.com/kirr/neo/go/... \
# github.com/pkg/profile \
# golang.org/x/perf/cmd/benchstat
[gowork]
directory = ${buildout:directory}/go.work
src = ${:directory}/src
bin = ${:directory}/bin
depends = ${gowork.goinstall:recipe}
# go version used for the workspace (possible to override in applications)
golang = ${golang19:location}
# everything is done by dependent parts
recipe = plone.recipe.command
command = :
# env.sh for compiling and running go programs
[gowork]
env.sh = ${gowork-env.sh:output}
[gowork-env.sh]
recipe = slapos.recipe.template
url = ${:_profile_base_location_}/goenv.sh.in
output = ${gowork:directory}/env.sh
depends = ${gowork.mkdir:recipe}
md5sum = a9a265135931b3da53f4392870748264
[gowork.mkdir]
# NOTE do not use slapos.cookbook:mkdirectory here - if anything in software (not instance)
# uses slapos.cookbook:* in recipe - slapos.cookbook will get compiled against system
# libxml/libxslt or fail to bootstrap at all if those are not present.
recipe = plone.recipe.command
command = mkdir -p ${gowork:directory}
update-command = ${:command}
stop-on-error = true
# install go packages
# clients should put package list to install to gowork:install ("..." requests installing everything)
[gowork.goinstall]
recipe = plone.recipe.command
command = bash -c ". ${gowork:env.sh} && go install -v ${gowork:install}"
update-command = ${:command}
stop-on-error = true
[git-repository]
recipe = slapos.recipe.build:gitclone
git-executable = ${git:location}/bin/git
# a go package should:
# 1) <= go-git-package
# 2) provide go.importpath
# 3) provide repository (which is not the same as importpath in general case)
#
# the list of go packages for a go workspace state can be automatically
# generated with the help of go-pkg-snapshot tool.
[go-git-package]
<= git-repository
location = ${gowork:src}/${:go.importpath}
# env.sh for a Go workspace
# Usage: env.sh [/path/to/env.sh]
# ---- 8< ---- (buildout substitution here)
# PATH so that go & friends work out of the box
export PATH=${gowork:golang}/bin:${git:location}/bin:${buildout:bin-directory}:$PATH
X=${gowork:directory}
# ---- 8< ----
export GOPATH=$X:$GOPATH
export PATH=$X/bin:$PATH
export PS1="(`basename $X`) $PS1"
# strip trailing : from $GOPATH
GOPATH=$${GOPATH%:}
#!/bin/bash
# gowork-snapshot - find out installed go packages and produce buildout code to install them pinned
# FIXME currently works only with cwd=gowork/src
echo "# Code generated by gowork-snapshot; DO NOT EDIT."
# list installed go git repositories.
#
# this gives something like:
# github.com/cznic/strutil https://github.com/cznic/strutil 529a34b1c1
# golang.org/x/net https://go.googlesource.com/net 1087133bc4
# ...
gogit_list() {
find . -name .git | sort | \
while read repo; do
importpath=${repo#./}
importpath=${importpath%/.git}
echo -ne "${importpath}\t"
echo -ne "`git_upstream_url $repo`\t"
git -C $repo describe --long --always --abbrev=10
done
}
# git_upstream_url <repo> - show current branch upstream URL
git_upstream_url() {
repo=$1
head="`git -C $repo symbolic-ref --short HEAD`" # current branch - e.g. "t"
remote="`git -C $repo config --get branch.$head.remote`" # upstream name, e.g. "kirr"
url="`git -C $repo config --get remote.$remote.url`" # upstream URL
echo "$url"
}
# buildout_safe <name> - canonicalize name to be allowed to be used in buildout section name
# e.g. aaa/bbb -> aaa_bbb
#
# XXX can't use e.g. "go!lab.nexedi.com/kirr/neo" because buildout disallows
# "!" or "/" in section name references.
buildout_safe() {
# see _simple regex in slapos.buildout
echo -n "$1" | tr --complement -- "-a-zA-Z0-9 ._" _
}
echo
echo "# list of go git repositories to fetch"
echo "[gowork.goinstall]"
echo "depends_gitfetch ="
gogit_list | \
while read pkg _ _; do
echo " \${go_`buildout_safe $pkg`:recipe}"
done
echo
gogit_list | \
while read pkg url rev; do
echo
echo "[go_`buildout_safe $pkg`]"
echo "<= go-git-package"
echo "go.importpath = $pkg"
echo "repository = $url"
echo "revision = $rev"
done
...@@ -12,8 +12,8 @@ parts = haproxy ...@@ -12,8 +12,8 @@ parts = haproxy
[haproxy] [haproxy]
recipe = slapos.recipe.cmmi recipe = slapos.recipe.cmmi
url = http://www.haproxy.org/download/1.7/src/haproxy-1.7.5.tar.gz url = http://www.haproxy.org/download/1.7/src/haproxy-1.7.9.tar.gz
md5sum = ed84c80cb97852d2aa3161ed16c48a1c md5sum = a2bbbdd45ffe18d99cdcf26aa992f92d
configure-command = true configure-command = true
# If the system is running on Linux 2.6, we use "linux26" as the TARGET, # If the system is running on Linux 2.6, we use "linux26" as the TARGET,
# otherwise use "generic". # otherwise use "generic".
......
...@@ -89,7 +89,7 @@ input = inline: ...@@ -89,7 +89,7 @@ input = inline:
recipe = slapos.recipe.cmmi recipe = slapos.recipe.cmmi
path = ${helloweb-repository:location}/go/ path = ${helloweb-repository:location}/go/
go = ${golang18:location}/bin/go go = ${golang19:location}/bin/go
configure-command = : configure-command = :
make-binary = make-binary =
make-targets= cd ${:path} && make-targets= cd ${:path} &&
......
...@@ -54,7 +54,6 @@ configure-options = ...@@ -54,7 +54,6 @@ configure-options =
--without-wmf --without-wmf
--with-bzlib=${bzip2:location} --with-bzlib=${bzip2:location}
--with-zlib=${zlib:location} --with-zlib=${zlib:location}
--with-gs-font-dir=${ghostscript-fonts:location}
--with-frozenpaths --with-frozenpaths
patch-options = -p1 patch-options = -p1
patches = patches =
......
# ioping - simple disk I/O latency measuring tool
# https://github.com/koct9i/ioping
[buildout]
extends =
../git/buildout.cfg
parts =
ioping
[ioping-repository]
recipe = slapos.recipe.build:gitclone
git-executable = ${git:location}/bin/git
# NOTE we use a bit patched ioping version which shows not only avg latency but also its distribution
# repository = https://github.com/koct9i/ioping.git
repository = https://lab.nexedi.com/kirr/ioping.git
revision = v1.0-9-g34c97f7636
[ioping]
recipe = slapos.recipe.cmmi
path = ${ioping-repository:location}
configure-command = :
make-binary =
# XXX without vvv PREFIX=${:location} does not work
location= ${buildout:parts-directory}/${:_buildout_section_name_}
make-targets= make install PREFIX=${:location}
...@@ -25,7 +25,7 @@ interpreter = keras-python ...@@ -25,7 +25,7 @@ interpreter = keras-python
scripts = keras-python scripts = keras-python
[versions] [versions]
Keras = 2.0.1 Keras = 2.1.0
tensorflow = 1.0.1 tensorflow = 1.4.0
h5py = 2.7.0rc2 h5py = 2.7.0rc2
Cython = 0.25.2 Cython = 0.25.2
...@@ -18,15 +18,15 @@ environment = ...@@ -18,15 +18,15 @@ environment =
[libpng12] [libpng12]
<= libpng-common <= libpng-common
url = http://download.sourceforge.net/libpng/libpng-1.2.57.tar.xz url = http://download.sourceforge.net/libpng/libpng-1.2.58.tar.xz
md5sum = 307052e5e8af97b82b17b64fb1b3677a md5sum = 1fe68fa3cdab99dbcfd2a6b4de95645f
[libpng15] [libpng15]
<= libpng-common <= libpng-common
url = http://download.sourceforge.net/libpng/libpng-1.5.28.tar.xz url = http://download.sourceforge.net/libpng/libpng-1.5.29.tar.xz
md5sum = 847aa2a1b231c07466d7f4167537424a md5sum = b9e5452ee9681c313638efedb16c12a6
[libpng] [libpng]
<= libpng-common <= libpng-common
url = http://download.sourceforge.net/libpng/libpng-1.6.28.tar.xz url = http://download.sourceforge.net/libpng/libpng-1.6.32.tar.xz
md5sum = 425354f86c392318d31aedca71019372 md5sum = e01be057a9369183c959b793a685ad15
# LMbench - Tools for Performance Analysis
# http://www.bitmover.com/lmbench/
[buildout]
extends =
../git/buildout.cfg
../golang/buildout.cfg
parts =
lmbench
[lmbench-repository]
<= git-repository
# NOTE we use a bit patched lmbench version with fixes to lat_tcp for errors not to go unnoticed and other addons
# repository = https://svn.code.sf.net/p/lmbench/code
repository = https://lab.nexedi.com/kirr/lmbench.git
revision = 9b108b6ff3
[lmbench]
recipe = slapos.recipe.cmmi
path = ${lmbench-repository:location}
configure-command = :
make-binary =
# XXX without vvv BASE=${:location} does not work
location= ${buildout:parts-directory}/${:_buildout_section_name_}
make-targets= cd ${lmbench-repository:location}/lmbench3/src
&& make install BASE=${:location}
&& bash -c ". ${gowork:env.sh} && go build -o ${:location}/bin/lat_tcp_go lat_tcp.go"
...@@ -20,14 +20,15 @@ extends = ...@@ -20,14 +20,15 @@ extends =
# The following lines are only for mariarocks.cfg # The following lines are only for mariarocks.cfg
# to be extended last without touching 'parts'. # to be extended last without touching 'parts'.
../gcc/buildout.cfg ../gcc/buildout.cfg
../zstd/buildout.cfg
parts = parts =
mariadb mariadb
[mariadb] [mariadb]
recipe = slapos.recipe.cmmi recipe = slapos.recipe.cmmi
url = https://downloads.mariadb.org/f/mariadb-10.1.26/source/mariadb-10.1.26.tar.gz/from/http%3A//fr.mirror.babylon.network/mariadb/?serve url = https://downloads.mariadb.org/f/mariadb-10.1.28/source/mariadb-10.1.28.tar.gz/from/http%3A//fr.mirror.babylon.network/mariadb/?serve
md5sum = bb88afb72434c6d567c742896dd50d41 md5sum = 38acd5b44c56791701d80fddf088ef38
patch-options = -p0 patch-options = -p0
patches = patches =
${:_profile_base_location_}/mariadb_10.1.21_create_system_tables__no_test.patch#3c76aa9564a162f13aced7c0a3f783b3 ${:_profile_base_location_}/mariadb_10.1.21_create_system_tables__no_test.patch#3c76aa9564a162f13aced7c0a3f783b3
...@@ -61,12 +62,14 @@ configure-options = ...@@ -61,12 +62,14 @@ configure-options =
-DCMAKE_C_FLAGS="${:CMAKE_CFLAGS}" -DCMAKE_C_FLAGS="${:CMAKE_CFLAGS}"
-DCMAKE_CXX_FLAGS="${:CMAKE_CFLAGS}" -DCMAKE_CXX_FLAGS="${:CMAKE_CFLAGS}"
-DCMAKE_INSTALL_RPATH=${:CMAKE_LIBRARY_PATH} -DCMAKE_INSTALL_RPATH=${:CMAKE_LIBRARY_PATH}
CMAKE_CFLAGS = -I${bzip2:location}/include -I${jemalloc:location}/include -I${libaio:location}/include -I${libxml2:location}/include -I${ncurses:location}/include -I${openssl:location}/include -I${pcre:location}/include -I${readline5:location}/include -I${xz-utils:location}/include -I${zlib:location}/include CMAKE_CFLAGS = -I${bzip2:location}/include -I${jemalloc:location}/include -I${libaio:location}/include -I${libxml2:location}/include -I${ncurses:location}/include -I${openssl:location}/include -I${pcre:location}/include -I${readline5:location}/include -I${xz-utils:location}/include -I${zlib:location}/include ${:extra_cflags}
CMAKE_LIBRARY_PATH = ${bzip2:location}/lib:${jemalloc:location}/lib:${libaio:location}/lib:${libxml2:location}/lib:${ncurses:location}/lib:${openssl:location}/lib:${pcre:location}/lib:${readline5:location}/lib:${xz-utils:location}/lib:${zlib:location}/lib${:extra_library_path} CMAKE_LIBRARY_PATH = ${bzip2:location}/lib:${jemalloc:location}/lib:${libaio:location}/lib:${libxml2:location}/lib:${ncurses:location}/lib:${openssl:location}/lib:${pcre:location}/lib:${readline5:location}/lib:${xz-utils:location}/lib:${zlib:location}/lib${:extra_library_path}
extra_cflags =
extra_include_path =
extra_library_path = extra_library_path =
environment = environment =
CMAKE_PROGRAM_PATH=${cmake:location}/bin CMAKE_PROGRAM_PATH=${cmake:location}/bin
CMAKE_INCLUDE_PATH=${bzip2:location}/include:${libaio:location}/include:${libaio:location}/include:${libxml2:location}/include:${ncurses:location}/include:${openssl:location}/include:${pcre:location}/include:${readline5:location}/include:${xz-utils:location}/include:${zlib:location}/include CMAKE_INCLUDE_PATH=${bzip2:location}/include:${libaio:location}/include:${libaio:location}/include:${libxml2:location}/include:${ncurses:location}/include:${openssl:location}/include:${pcre:location}/include:${readline5:location}/include:${xz-utils:location}/include:${zlib:location}/include${:extra_include_path}
CMAKE_LIBRARY_PATH=${:CMAKE_LIBRARY_PATH} CMAKE_LIBRARY_PATH=${:CMAKE_LIBRARY_PATH}
LDFLAGS=-L${bzip2:location}/lib -L${jemalloc:location}/lib -L${libaio:location}/lib -L${pcre:location}/lib -L${xz-utils:location}/lib -L${zlib:location}/lib LDFLAGS=-L${bzip2:location}/lib -L${jemalloc:location}/lib -L${libaio:location}/lib -L${pcre:location}/lib -L${xz-utils:location}/lib -L${zlib:location}/lib
PATH=${patch:location}/bin:%(PATH)s PATH=${patch:location}/bin:%(PATH)s
......
# Do not extend any file that touch buildout:parts. # Do not extend any file that touch buildout:parts.
[mariadb] [mariadb]
url = https://downloads.mariadb.org/f/mariadb-10.2.8/source/mariadb-10.2.8.tar.gz/from/http%3A//fr.mirror.babylon.network/mariadb/?serve url = https://downloads.mariadb.org/f/mariadb-10.2.9/source/mariadb-10.2.9.tar.gz/from/http%3A//fr.mirror.babylon.network/mariadb/?serve
md5sum = f93cbd5bfde3c0d082994764ff7db580 md5sum = c59999bd182ddeb3db3d55250aecd8f8
stable-patches = stable-patches =
configure-options += configure-options +=
-DPLUGIN_DAEMON_EXAMPLE=NO -DPLUGIN_DAEMON_EXAMPLE=NO
...@@ -10,4 +10,6 @@ configure-options += ...@@ -10,4 +10,6 @@ configure-options +=
-DPLUGIN_MROONGA=NO -DPLUGIN_MROONGA=NO
-DCMAKE_C_COMPILER=${gcc:location}/bin/gcc -DCMAKE_C_COMPILER=${gcc:location}/bin/gcc
-DCMAKE_CXX_COMPILER=${gcc:location}/bin/g++ -DCMAKE_CXX_COMPILER=${gcc:location}/bin/g++
extra_library_path = :${gcc:location}/lib:${gcc:location}/lib64 extra_cflags = -I${zstd:location}/include
extra_include_path = :${zstd:location}/include
extra_library_path = :${zstd:location}/lib:${gcc:location}/lib:${gcc:location}/lib64
[buildout]
extends =
../pkgconfig/buildout.cfg
../protobuf/buildout.cfg
../ncurses/buildout.cfg
../perl/buildout.cfg
../zlib/buildout.cfg
../openssl/buildout.cfg
[mosh]
recipe = slapos.recipe.cmmi
url = https://mosh.org/mosh-1.3.0.tar.gz
md5sum = d961276995936953bf2d5a794068b076
configure-options =
--with-curses=${ncurses:location}
environment =
PATH=${perl:location}/bin:${pkgconfig:location}/bin:${protobuf-cpp:location}/bin:%(PATH)s
CXXFLAGS =-I${zlib:location}/include
LDFLAGS=-L${zlib:location}/lib -Wl,-rpath=${zlib:location}/lib -L${protobuf-cpp:location}/lib -Wl,-rpath=${protobuf-cpp:location}/lib -L${openssl:location}/lib -Wl,-rpath=${openssl:location}/lib -L${ncurses:location}/lib -Wl,-rpath=${ncurses:location}/lib
PKG_CONFIG_PATH=${protobuf-cpp:location}/lib/pkgconfig:${openssl:location}/lib/pkgconfig:${ncurses:location}/lib/pkgconfig:${zlib:location}/lib/pkgconfig
...@@ -27,4 +27,13 @@ environment = ...@@ -27,4 +27,13 @@ environment =
configure-options = configure-options =
--prefix=${buildout:parts-directory}/${:_buildout_section_name_} --prefix=${buildout:parts-directory}/${:_buildout_section_name_}
--exec-prefix=${buildout:parts-directory}/${:_buildout_section_name_} --exec-prefix=${buildout:parts-directory}/${:_buildout_section_name_}
--with-privsep-path=${buildout:parts-directory}/${:_buildout_section_name_}/var/empty --with-privsep-path=${buildout:parts-directory}/${:_buildout_section_name_}/var/empty
\ No newline at end of file
[openssh-output]
# Shared binary location to ease migration
recipe = plone.recipe.command
stop-on-error = true
update-command = ${:command}
command = ${coreutils-output:test} -x ${:ssh} -a -x ${:keygen}
ssh = ${openssh:location}/bin/ssh
keygen = ${openssh:location}/bin/ssh-keygen
\ No newline at end of file
...@@ -16,8 +16,8 @@ parts = ...@@ -16,8 +16,8 @@ parts =
[openssl] [openssl]
recipe = slapos.recipe.cmmi recipe = slapos.recipe.cmmi
url = https://www.openssl.org/source/openssl-1.0.2k.tar.gz url = https://www.openssl.org/source/openssl-1.0.2m.tar.gz
md5sum = f965fc0bf01bf882b31314b61391ae65 md5sum = 10e9e37f492094b9ef296f68f24a7666
location = ${buildout:parts-directory}/${:_buildout_section_name_} location = ${buildout:parts-directory}/${:_buildout_section_name_}
# 'prefix' option to override --openssldir/--prefix (which is useful # 'prefix' option to override --openssldir/--prefix (which is useful
# when combined with INSTALL_PREFIX). Used by slapos.package.git/obs # when combined with INSTALL_PREFIX). Used by slapos.package.git/obs
......
...@@ -13,8 +13,8 @@ recipe = slapos.recipe.cmmi ...@@ -13,8 +13,8 @@ recipe = slapos.recipe.cmmi
depends = depends =
${perl-DBI:location} ${perl-DBI:location}
${perl-Devel-CheckLib:location} ${perl-Devel-CheckLib:location}
url = http://www.cpan.org/modules/by-module/DBD/DBD-mysql-4.042.tar.gz url = http://www.cpan.org/modules/by-module/DBD/DBD-mysql-4.043.tar.gz
md5sum = a144bd950b55af68835d44bc4ea6e5aa md5sum = 4a00dd7f1c057931147c65dfc4901c36
patches = patches =
${:_profile_base_location_}/DBD-mysql-4.027.rpathsupport.patch#a932982b7725e6621cfce3a3d7917e03 ${:_profile_base_location_}/DBD-mysql-4.027.rpathsupport.patch#a932982b7725e6621cfce3a3d7917e03
${:_profile_base_location_}/DBD-mysql-4.042.mariadb.patch#5864d36d19c4a05034b3a4873f7c659a ${:_profile_base_location_}/DBD-mysql-4.042.mariadb.patch#5864d36d19c4a05034b3a4873f7c659a
......
...@@ -7,9 +7,9 @@ parts = ...@@ -7,9 +7,9 @@ parts =
[perl] [perl]
recipe = slapos.recipe.cmmi recipe = slapos.recipe.cmmi
version = 5.26.0 version = 5.26.1
url = http://www.cpan.org/src/5.0/perl-${:version}.tar.bz2 url = http://www.cpan.org/src/5.0/perl-${:version}.tar.bz2
md5sum = 8d34cb5a4eccd66e7a6a80e62b7b4aec md5sum = 467cd0c43514b9b5e397c8b385581f53
siteprefix = ${buildout:parts-directory}/site_${:_buildout_section_name_} siteprefix = ${buildout:parts-directory}/site_${:_buildout_section_name_}
patch-options = -p1 patch-options = -p1
patches = patches =
......
...@@ -35,5 +35,5 @@ environment = ...@@ -35,5 +35,5 @@ environment =
[postgresql92] [postgresql92]
<= postgresql-common <= postgresql-common
url = http://ftp.postgresql.org/pub/source/v9.2.22/postgresql-9.2.22.tar.bz2 url = http://ftp.postgresql.org/pub/source/v9.2.23/postgresql-9.2.23.tar.bz2
md5sum = c5d3fb5229baf9e94ee2287980c55321 md5sum = c972e32b7f17dbc652d2462b7690d116
...@@ -3,6 +3,15 @@ parts = protobuf ...@@ -3,6 +3,15 @@ parts = protobuf
[protobuf] [protobuf]
recipe = slapos.recipe.cmmi recipe = slapos.recipe.cmmi
url = https://github.com/google/protobuf/releases/download/v3.1.0/protobuf-python-3.1.0.tar.gz url = https://github.com/google/protobuf/releases/download/v3.4.0/protobuf-python-3.4.0.tar.gz
md5sum = 7a227a21379a2ea08cc5d7ba1fb1ba5b md5sum = 0820cc2e56d71aef8e99794fcbd184cd
location = ${buildout:parts-directory}/${:_buildout_section_name_} location = ${buildout:parts-directory}/${:_buildout_section_name_}
[protobuf-cpp]
recipe = slapos.recipe.cmmi
url = https://github.com/google/protobuf/releases/download/v3.4.0/protobuf-cpp-3.4.0.tar.gz
md5sum = 6d59dad503bea5ad420fd09ddad84481
configure-command =
./autogen.sh
./configure --prefix=${buildout:parts-directory}/${:_buildout_section_name_}
...@@ -28,9 +28,9 @@ python = python2.7 ...@@ -28,9 +28,9 @@ python = python2.7
[python2.7] [python2.7]
recipe = slapos.recipe.cmmi recipe = slapos.recipe.cmmi
package_version = 2.7.13 package_version = 2.7.14
package_version_suffix = package_version_suffix =
md5sum = 53b43534153bb2a0363f08bae8b9d990 md5sum = 1f6db41ad91d9eb0a6f0c769b8613c5b
# This is actually the default setting for prefix, but we can't use it in # This is actually the default setting for prefix, but we can't use it in
# other settings in this part if we don't set it explicitly here. # other settings in this part if we don't set it explicitly here.
......
[buildout]
extends =
../../stack/slapos.cfg
../gcc/buildout.cfg
../cython/buildout.cfg
../scipy/buildout.cfg
parts =
python-cocoapi-build-install-egg
[python-cocoapi-repository]
recipe = plone.recipe.command
stop-on-error = true
repository = https://github.com/cocodataset/cocoapi.git
tag = master
git-binary = ${git:location}/bin/git
patch-binary = ${patch:location}/bin/patch
location = ${buildout:parts-directory}/${:_buildout_section_name_}
command = export HOME=${:location}; (${:git-binary} clone --quiet -b ${:tag} ${:repository} ${:location}; cd ${:location}; ${:patch-binary} -p1 -d . < ${:_profile_base_location_}/setup.py.patch ) || (rm -fr ${:location}; exit 1)
[python-cocoapi-build-interpreter]
recipe = zc.recipe.egg
initialization =
import scipy.spatial.ckdtree # load our own libstdc++ explicitly at the very beginning
eggs =
setuptools
${cython:egg}
${scipy:egg}
${numpy:egg}
interpreter = python-cocoapi-build-interpreter
scripts = python-cocoapi-build-interpreter
location = ${buildout:parts-directory}/${:_buildout_section_name_}
[python-cocoapi-build]
recipe = slapos.recipe.build
location = ${buildout:parts-directory}/${:_buildout_section_name_}
workdir = ${python-cocoapi-repository:location}/PythonAPI
python-bin = ${buildout:bin-directory}/${python-cocoapi-build-interpreter:interpreter}
gcc-location = ${gcc:location}
script =
os.makedirs(location)
workdir = self.options['workdir']
python_bin = self.options['python-bin']
gcc_location = self.options['gcc-location']
env = {'PATH':':'.join([gcc_location+'/bin',
os.environ['PATH']]),
'CC':gcc_location+'/bin/gcc',
'CXX':gcc_location+'/bin/g++',
}
call([python_bin, 'setup.py', 'build_ext'], cwd=workdir, env=env)
call([python_bin, 'setup.py', 'bdist_egg'], cwd=workdir, env=env)
[python-cocoapi-build-install-egg]
recipe = slapos.recipe.build
unzip-binary = ${unzip:location}/bin/unzip
python-cocoapi-repository-path = ${python-cocoapi-repository:location}
location = ${buildout:parts-directory}/${:_buildout_section_name_}
need-python-cocoapi-build = ${python-cocoapi-build:location}
egg = pycocotools
script =
os.makedirs(location)
workdir = self.options['python-cocoapi-repository-path']+'/PythonAPI'
egg_name = 'pycocotools-2.0-py2.7-linux-x86_64.egg'
dist_dir = os.path.join(workdir, 'dist')
dest_dir = os.path.join(self.buildout['buildout']['eggs-directory'], egg_name)
call([self.options['unzip-binary'], '-o', os.path.join(dist_dir, egg_name), '-d', dest_dir])
diff --git a/PythonAPI/setup.py b/PythonAPI/setup.py
index eb3d508..d619b3c 100644
--- a/PythonAPI/setup.py
+++ b/PythonAPI/setup.py
@@ -1,4 +1,5 @@
-from distutils.core import setup
+#from distutils.core import setup
+from setuptools import setup
from Cython.Build import cythonize
from distutils.extension import Extension
import numpy as np
@@ -21,4 +22,4 @@ setup(name='pycocotools',
version='2.0',
ext_modules=
cythonize(ext_modules)
- )
\ No newline at end of file
+ )
...@@ -2,7 +2,6 @@ ...@@ -2,7 +2,6 @@
extends = extends =
../mariadb/buildout.cfg ../mariadb/buildout.cfg
../openssl/buildout.cfg ../openssl/buildout.cfg
../patch/buildout.cfg
../zlib/buildout.cfg ../zlib/buildout.cfg
parts = parts =
...@@ -14,11 +13,6 @@ PATH =${mariadb:location}/bin:%(PATH)s ...@@ -14,11 +13,6 @@ PATH =${mariadb:location}/bin:%(PATH)s
[python-mysqlclient] [python-mysqlclient]
recipe = zc.recipe.egg:custom recipe = zc.recipe.egg:custom
egg = mysqlclient egg = mysqlclient
patches =
https://github.com/PyMySQL/mysqlclient-python/commit/1693848c9f6ca863868d94d63499830f7f4f3a1f.diff#a493a91f9263243eb331fcab9901b8b0
${:_profile_base_location_}/mysqlclient-1.3.10-mariadb-10.2.8.patch#807b694fcd7ade4da4bcca321b1a29d2
patch-options = -p1
patch-binary = ${patch:location}/bin/patch
environment = python-mysqlclient-env environment = python-mysqlclient-env
library-dirs = library-dirs =
${zlib:location}/lib/ ${zlib:location}/lib/
......
diff -ur mysqlclient-1.3.10.orig/_mysql.c mysqlclient-1.3.10/_mysql.c
--- mysqlclient-1.3.10.orig/_mysql.c 2017-01-04 13:47:08.000000000 +0100
+++ mysqlclient-1.3.10/_mysql.c 2017-08-21 10:53:21.014929937 +0200
@@ -1060,7 +1060,7 @@
if (self && PyModule_Check((PyObject*)self))
self = NULL;
if (self && self->open) {
-#if MYSQL_VERSION_ID >= 50707 && !defined(MARIADB_BASE_VERSION)
+#if MYSQL_VERSION_ID >= 50707 && (!defined(MARIADB_BASE_VERSION) && !defined(MARIADB_VERSION_ID))
len = mysql_real_escape_string_quote(&(self->connection), out, in, size, '\'');
#else
len = mysql_real_escape_string(&(self->connection), out, in, size);
@@ -1118,7 +1118,7 @@
out = PyBytes_AS_STRING(str);
check_server_init(NULL);
if (self && self->open) {
-#if MYSQL_VERSION_ID >= 50707 && !defined(MARIADB_BASE_VERSION)
+#if MYSQL_VERSION_ID >= 50707 && (!defined(MARIADB_BASE_VERSION) && !defined(MARIADB_VERSION_ID))
len = mysql_real_escape_string_quote(&(self->connection), out+1, in, size, '\'');
#else
len = mysql_real_escape_string(&(self->connection), out+1, in, size);
diff -ur mysqlclient-1.3.10.orig/setup_posix.py mysqlclient-1.3.10/setup_posix.py
--- mysqlclient-1.3.10.orig/setup_posix.py 2016-07-26 10:12:24.000000000 +0200
+++ mysqlclient-1.3.10/setup_posix.py 2017-08-21 10:54:59.643071374 +0200
@@ -63,7 +63,7 @@
if extra_compile_args[i] == '-arch':
extra_link_args += ['-arch', extra_compile_args[i + 1]]
- include_dirs = [dequote(i[2:])
+ include_dirs = [dequote(os.path.isdir(i[2:]+'/server') and i[2:]+'/server' or i[2:])
for i in mysql_config('include') if i.startswith('-I')]
if static:
[buildout]
extends =
../../stack/slapos.cfg
../gcc/buildout.cfg
../openblas/buildout.cfg
../cmake/buildout.cfg
../python-cffi/buildout.cfg
../python-PyYAML/buildout.cfg
../python-cocoapi/buildout.cfg
../pillow/buildout.cfg
../scipy/buildout.cfg
../matplotlib/buildout.cfg
../unzip/buildout.cfg
parts =
pytorch-egg
[pytorch-repository]
recipe = plone.recipe.command
stop-on-error = true
repository = https://github.com/pytorch/pytorch
tag = master
commit = 4af66c43045a317b477918c503d105f565b4a66b
git-binary = ${git:location}/bin/git
patch-binary = ${patch:location}/bin/patch
location = ${buildout:parts-directory}/${:_buildout_section_name_}
command = export HOME=${:location}; (${:git-binary} clone --recursive --quiet -b ${:tag} ${:repository} ${:location}; cd ${:location}; ${:git-binary} checkout ${:commit}; ${:patch-binary} -p1 -d . < ${:_profile_base_location_}/pytorch.4af66c4.patch ) || (rm -fr ${:location}; exit 1)
[pytorch-build-interpreter]
recipe = zc.recipe.egg
initialization =
import scipy.spatial.ckdtree # load our own libstdc++ explicitly at the very beginning
import sys
sys.path.append('.')
sys.path.append('${pytorch-repository:location}/torch/lib/ATen')
eggs =
setuptools
${scipy:egg}
${numpy:egg}
${python-cffi:egg}
${python-PyYAML:egg}
interpreter = pytorch-build-interpreter
scripts = pytorch-build-interpreter
location = ${buildout:parts-directory}/${:_buildout_section_name_}
[pytorch-build]
recipe = slapos.recipe.build
location = ${buildout:parts-directory}/${:_buildout_section_name_}
workdir = ${pytorch-repository:location}
python-bin = ${buildout:bin-directory}/${pytorch-build-interpreter:interpreter}
no-cuda = 0
cmake-bin = ${cmake:location}/bin
git-bin = ${git:location}/bin
binutils-location = ${binutils:location}
gcc-location = ${gcc:location}
openblas-location = ${openblas:location}
script =
os.makedirs(location)
workdir = self.options['workdir']
python_bin = self.options['python-bin']
binutils_location = self.options['binutils-location']
gcc_location = self.options['gcc-location']
openblas_location = self.options['openblas-location']
env = {'PYTHONPATH':workdir,
'PATH':':'.join([self.options['cmake-bin'],
self.options['git-bin'],
binutils_location+'/bin',
gcc_location+'/bin',
os.environ['PATH']]),
'CMAKE_INCLUDE_PATH':':'.join([gcc_location+'/include',
binutils_location+'/include',
openblas_location+'/include',
]),
'CMAKE_LIBRARY_PATH':':'.join([gcc_location+'/lib',
gcc_location+'/lib64',
binutils_location+'/lib',
openblas_location+'/lib',
]),
'CC':gcc_location+'/bin/gcc',
'CXX':gcc_location+'/bin/g++',
'PYTORCH_PYTHON':python_bin,
'SLAPOS_COMPILE_ARGS':' '.join(['-Wl,-rpath,'+gcc_location+'/lib',
'-Wl,-rpath,'+gcc_location+'/lib64',
'-Wl,-rpath,'+binutils_location+'/lib',
'-Wl,-rpath,'+openblas_location+'/lib',
])
}
if self.options.get('no-cuda') == '1':
env['NO_CUDA'] = '1'
import os.path
call([python_bin, 'setup.py', 'build'], cwd=workdir, env=env)
call([python_bin, 'setup.py', 'bdist_egg'], cwd=workdir, env=env)
[pytorch-build-install-egg]
recipe = slapos.recipe.build
unzip-binary = ${unzip:location}/bin/unzip
pytorch-repository-path = ${pytorch-repository:location}
location = ${buildout:parts-directory}/${:_buildout_section_name_}
need-pytorch-build = ${pytorch-build:location}
egg = torch
script =
os.makedirs(location)
workdir = self.options['pytorch-repository-path']
egg_name = 'torch-0.2.0+4af66c4-py2.7-linux-x86_64.egg'
dist_dir = os.path.join(workdir, 'dist')
dest_dir = os.path.join(self.buildout['buildout']['eggs-directory'], egg_name)
call([self.options['unzip-binary'], '-o', os.path.join(dist_dir, egg_name), '-d', dest_dir])
[pytorch-egg]
recipe = zc.recipe.egg
initialization =
import scipy.spatial.ckdtree # load our own libstdc++ explicitly at the very beginning
eggs =
${scipy:egg}
${numpy:egg}
${python-PyYAML:egg}
${pytorch-build-install-egg:egg}
${pillow-python:egg}
${python-cocoapi-build-install-egg:egg}
${matplotlib:egg}
six
torchvision
interpreter = pytorch-python
scripts = pytorch-python
[versions]
torchvision = 0.1.6
diff --git a/setup.py b/setup.py
index 1d9a765..a50e9cb 100644
--- a/setup.py
+++ b/setup.py
@@ -90,7 +90,7 @@ def build_libs(libs):
assert lib in dep_libs, 'invalid lib: {}'.format(lib)
build_libs_cmd = ['bash', 'torch/lib/build_libs.sh']
my_env = os.environ.copy()
- my_env["PYTORCH_PYTHON"] = sys.executable
+ #my_env["PYTORCH_PYTHON"] = sys.executable
if WITH_SYSTEM_NCCL:
my_env["NCCL_ROOT_DIR"] = NCCL_ROOT_DIR
if WITH_CUDA:
diff --git a/torch/lib/build_libs.sh b/torch/lib/build_libs.sh
index af7020e..a91bd05 100755
--- a/torch/lib/build_libs.sh
+++ b/torch/lib/build_libs.sh
@@ -24,7 +24,7 @@ C_FLAGS=" -DTH_INDEX_BASE=0 -I$INSTALL_DIR/include \
-I$INSTALL_DIR/include/THS -I$INSTALL_DIR/include/THCS \
-I$INSTALL_DIR/include/THPP -I$INSTALL_DIR/include/THNN \
-I$INSTALL_DIR/include/THCUNN"
-LDFLAGS="-L$INSTALL_DIR/lib "
+LDFLAGS="-L$INSTALL_DIR/lib $SLAPOS_COMPILE_ARGS"
LD_POSTFIX=".so.1"
LD_POSTFIX_UNVERSIONED=".so"
if [[ $(uname) == 'Darwin' ]]; then
...@@ -22,6 +22,13 @@ rpath = ...@@ -22,6 +22,13 @@ rpath =
# and pin the egg in the [versions] section of the stack or SR. # and pin the egg in the [versions] section of the stack or SR.
find-links = http://pkgs.fedoraproject.org/repo/pkgs/rdiff-backup/rdiff-backup-1.0.5.tar.gz/fa2a165fa07a94be52c52e3545bc7758/rdiff-backup-1.0.5.tar.gz find-links = http://pkgs.fedoraproject.org/repo/pkgs/rdiff-backup/rdiff-backup-1.0.5.tar.gz/fa2a165fa07a94be52c52e3545bc7758/rdiff-backup-1.0.5.tar.gz
[rdiff-backup-build-1.3.4]
<= rdiff-backup-build
# use our own version
find-links = http://www.nexedi.org/static/packages/source/rdiff-backup-1.3.4nxd2.tar.gz
patches =
${:_profile_base_location_}/rdiff-backup-1.3.4-librsync-1.0.0.patch#31fafc8bc4a00f002f52008a9f3b671f
[rdiff-backup] [rdiff-backup]
# Scripts only generation part for rdiff-backup # Scripts only generation part for rdiff-backup
recipe = zc.recipe.egg recipe = zc.recipe.egg
......
Patch by Roman Tereshonkov and Kari Hautio for rdiff-backup <= 1.2.8 to avoid a build failure with
librsync >= 1.0.0 (which is a security bugfix release). The discussion and solution finding can be
found at https://bugs.launchpad.net/duplicity/+bug/1416344 (for duplicity).
--- rdiff-backup-1.3.4/rdiff_backup/_librsyncmodule.c 2009-03-16 15:36:21.000000000 +0100
+++ rdiff-backup-1.3.4/rdiff_backup/_librsyncmodule.c.librsync-1.0.0 2015-03-02 00:54:24.000000000 +0100
@@ -59,8 +59,13 @@
if (sm == NULL) return NULL;
sm->x_attr = NULL;
+#ifdef RS_DEFAULT_STRONG_LEN
sm->sig_job = rs_sig_begin((size_t)blocklen,
(size_t)RS_DEFAULT_STRONG_LEN);
+#else
+ sm->sig_job = rs_sig_begin((size_t)blocklen,
+ (size_t)8, RS_MD4_SIG_MAGIC);
+#endif
return (PyObject*)sm;
}
...@@ -4,6 +4,12 @@ ...@@ -4,6 +4,12 @@
# /opt/slapos folder, adapt this script as you please. # /opt/slapos folder, adapt this script as you please.
# #
# Be carefull to not run this script were the script is already installed. # Be carefull to not run this script were the script is already installed.
#
# Before run this script, ensure dependencies are installed, on debian, you can
# use the command bellow:
#
# apt-get install python gcc g++ make uml-utilities bridge-utils patch wget
#
# Use sudo or superuser and create slapos directory (you can pick a different directory) # Use sudo or superuser and create slapos directory (you can pick a different directory)
mkdir -p /opt/slapos/log/ mkdir -p /opt/slapos/log/
......
diff --git a/tensorboard/pip_package/build_pip_package.sh b/tensorboard/pip_package/build_pip_package.sh
index b386d59..f03b056 100755
--- a/tensorboard/pip_package/build_pip_package.sh
+++ b/tensorboard/pip_package/build_pip_package.sh
@@ -26,6 +26,7 @@ function main() {
DEST=$1
TMPDIR=$(mktemp -d -t tmp.XXXXXXXXXX)
RUNFILES="bazel-bin/tensorboard/pip_package/build_pip_package.runfiles/org_tensorflow_tensorboard"
+ WORKDIR=$(pwd)
echo $(date) : "=== Using tmpdir: ${TMPDIR}"
@@ -45,8 +46,8 @@ function main() {
rm -f MANIFEST
echo $(date) : "=== Building wheel"
echo $(pwd)
- python setup.py bdist_wheel >/dev/null
- python3 setup.py bdist_wheel >/dev/null
+ PYTHONPATH=${WORKDIR}/${RUNFILES} $PYTHON_BIN_PATH setup.py bdist_egg >/dev/null
+ #python3 setup.py bdist_wheel >/dev/null
mkdir -p ${DEST}
cp dist/* ${DEST}
popd
[buildout]
extends =
../zip/buildout.cfg
../bazel/buildout.cfg
parts =
slapos-cookbook-develop
slapos-cookbook
tensorboard-build-install-egg
[tensorboard-repository]
recipe = plone.recipe.command
stop-on-error = true
repository = https://github.com/tensorflow/tensorboard
tag = 0.4
git-binary = ${git:location}/bin/git
patch-binary = ${patch:location}/bin/patch
location = ${buildout:parts-directory}/${:_buildout_section_name_}
command = export HOME=${:location}; (${:git-binary} clone --quiet -b ${:tag} ${:repository} ${:location}; cd ${buildout:parts-directory} ; ${:patch-binary} -p1 -d ${:_buildout_section_name_} < ${:_profile_base_location_}/0.4.patch ) || (rm -fr ${:location}; exit 1)
[tensorboard-build]
recipe = slapos.recipe.build
location = ${buildout:parts-directory}/${:_buildout_section_name_}
workdir = ${tensorboard-repository:location}
gcc-bin = ${gcc:location}/bin
gcc-lib = ${gcc:location}/lib
gcc-lib64 = ${gcc:location}/lib64
numpy-python-command = ${buildout:bin-directory}/${numpy-egg:interpreter}
python27-lib = ${python2.7:location}/lib
java_home_bin = ${bazel:java_home}/bin
bazel-bin = ${bazel:location}/bin
script =
os.makedirs(location)
workdir = self.options['workdir']
env = {'PATH':':'.join([self.options['gcc-bin'],
self.options['java_home_bin'],
self.options['bazel-bin'],
os.environ['PATH']]),
'COMPILER_PATH':':'.join([self.options['gcc-bin'],
os.environ.get('COMPILER_PATH') or '']),
'LIBRARY_PATH':':'.join([self.options['gcc-lib'],
self.options['gcc-lib64'],
os.environ.get('LIBRARY_PATH') or '']),
'PYTHON_BIN_PATH':self.options['numpy-python-command'],
'PYTHON_LIB_PATH':self.options['python27-lib'],
}
import os.path
env['LD_LIBRARY_PATH'] = env['LIBRARY_PATH']
bazel_command = ['bazel', 'build', '--spawn_strategy=standalone', '--verbose_failures', '--sandbox_debug', '//tensorboard/pip_package:build_pip_package']
call(bazel_command, cwd=workdir, env=env)
[tensorboard-build-install-egg]
recipe = slapos.recipe.build
unzip-binary = ${unzip:location}/bin/unzip
tensorboard-repository-path = ${tensorboard-repository:location}
location = ${buildout:parts-directory}/${:_buildout_section_name_}
need-tensorboard-build = ${tensorboard-build:location}
egg = tensorflow-tensorboard
bazel-bin = ${bazel:location}/bin
java_home_bin = ${bazel:java_home}/bin
numpy-python-command = ${buildout:bin-directory}/${numpy-egg:interpreter}
script =
os.makedirs(location)
workdir = self.options['tensorboard-repository-path']
egg_name = 'tensorflow_tensorboard-0.4.0rc3-py2.7.egg'
dist_dir = os.path.join(workdir, 'dist')
dest_dir = os.path.join(self.buildout['buildout']['eggs-directory'], egg_name)
env = {'PATH':':'.join([self.options['bazel-bin'],
self.options['java_home_bin'],
os.environ['PATH']]),
'PYTHON_BIN_PATH':self.options['numpy-python-command'],
}
call(['tensorboard/pip_package/build_pip_package.sh', dist_dir], cwd=workdir, env=env)
call([self.options['unzip-binary'], '-o', os.path.join(dist_dir, egg_name), '-d', dest_dir])
[versions]
...@@ -5,6 +5,7 @@ extends = ...@@ -5,6 +5,7 @@ extends =
../zip/buildout.cfg ../zip/buildout.cfg
../bazel/buildout.cfg ../bazel/buildout.cfg
../protobuf-python/buildout.cfg ../protobuf-python/buildout.cfg
../tensorboard/buildout.cfg
parts = parts =
slapos-cookbook-develop slapos-cookbook-develop
slapos-cookbook slapos-cookbook
...@@ -26,11 +27,11 @@ location = ${buildout:parts-directory}/${:_buildout_section_name_} ...@@ -26,11 +27,11 @@ location = ${buildout:parts-directory}/${:_buildout_section_name_}
recipe = plone.recipe.command recipe = plone.recipe.command
stop-on-error = true stop-on-error = true
repository = https://github.com/tensorflow/tensorflow repository = https://github.com/tensorflow/tensorflow
tag = v1.0.1 tag = v1.4.0
git-binary = ${git:location}/bin/git git-binary = ${git:location}/bin/git
patch-binary = ${patch:location}/bin/patch patch-binary = ${patch:location}/bin/patch
location = ${buildout:parts-directory}/${:_buildout_section_name_} location = ${buildout:parts-directory}/${:_buildout_section_name_}
command = export HOME=${:location}; (${:git-binary} clone --recurse-submodules --quiet -b ${:tag} ${:repository} ${:location}; cd ${buildout:parts-directory} ; ${:patch-binary} -p1 -d ${:_buildout_section_name_} < ${:_profile_base_location_}/tensorflow-v1.0.1.patch ) || (rm -fr ${:location}; exit 1) command = export HOME=${:location}; (${:git-binary} clone --recurse-submodules --quiet -b ${:tag} ${:repository} ${:location}; cd ${buildout:parts-directory} ; ${:patch-binary} -p1 -d ${:_buildout_section_name_} < ${:_profile_base_location_}/tensorflow-r1.4.patch ) || (rm -fr ${:location}; exit 1)
[cuda] [cuda]
tf_need_cuda = 1 tf_need_cuda = 1
...@@ -72,6 +73,10 @@ script = ...@@ -72,6 +73,10 @@ script =
'TF_NEED_GCP':'0', 'TF_NEED_GCP':'0',
'TF_NEED_HDFS':'0', 'TF_NEED_HDFS':'0',
'TF_NEED_OPENCL':'0', 'TF_NEED_OPENCL':'0',
'TF_NEED_MKL':'0',
'TF_NEED_VERBS':'0',
'TF_CUDA_CLANG':'0',
'TF_NEED_MPI':'0',
'TF_NEED_CUDA':self.buildout['cuda']['tf_need_cuda'], 'TF_NEED_CUDA':self.buildout['cuda']['tf_need_cuda'],
##### FOR CUDA ##### ##### FOR CUDA #####
'GCC_HOST_COMPILER_PATH':os.path.join(self.options['gcc-bin'], 'gcc'), 'GCC_HOST_COMPILER_PATH':os.path.join(self.options['gcc-bin'], 'gcc'),
...@@ -98,18 +103,17 @@ recipe = slapos.recipe.build ...@@ -98,18 +103,17 @@ recipe = slapos.recipe.build
unzip-binary = ${unzip:location}/bin/unzip unzip-binary = ${unzip:location}/bin/unzip
tensorflow-repository-path = ${tensorflow-repository:location} tensorflow-repository-path = ${tensorflow-repository:location}
location = ${buildout:parts-directory}/${:_buildout_section_name_} location = ${buildout:parts-directory}/${:_buildout_section_name_}
need-tensorboard-build = ${tensorboard-build:location}
need-tensorflow-build = ${tensorflow-build:location} need-tensorflow-build = ${tensorflow-build:location}
need-protobuf-python = ${protobuf-python:egg} need-protobuf-python = ${protobuf-python:egg}
egg = tensorflow egg = tensorflow
script = script =
os.makedirs(location) os.makedirs(location)
workdir = self.options['tensorflow-repository-path'] workdir = self.options['tensorflow-repository-path']
egg_name = 'tensorflow-1.0.1-py2.7-linux-x86_64.egg' egg_name = 'tensorflow-1.4.0-py2.7-linux-x86_64.egg'
dist_dir = os.path.join(workdir, 'dist') dist_dir = os.path.join(workdir, 'dist')
dest_dir = os.path.join(self.buildout['buildout']['eggs-directory'], egg_name) dest_dir = os.path.join(self.buildout['buildout']['eggs-directory'], egg_name)
call(['bazel-bin/tensorflow/tools/pip_package/build_pip_package', dist_dir], cwd=workdir) call(['bazel-bin/tensorflow/tools/pip_package/build_pip_package', dist_dir], cwd=workdir)
call([self.options['unzip-binary'], '-o', os.path.join(dist_dir, egg_name), '-d', dest_dir]) call([self.options['unzip-binary'], '-o', os.path.join(dist_dir, egg_name), '-d', dest_dir])
[versions] [versions]
protobuf = 3.1.0.post1
wheel = 0.30.0a0
diff --git a/tensorflow/tools/pip_package/build_pip_package.sh b/tensorflow/tools/pip_package/build_pip_package.sh diff --git a/tensorflow/tools/pip_package/build_pip_package.sh b/tensorflow/tools/pip_package/build_pip_package.sh
index e2bee21..1df90c1 100755 index cbf06a9..226a423 100755
--- a/tensorflow/tools/pip_package/build_pip_package.sh --- a/tensorflow/tools/pip_package/build_pip_package.sh
+++ b/tensorflow/tools/pip_package/build_pip_package.sh +++ b/tensorflow/tools/pip_package/build_pip_package.sh
@@ -136,7 +136,7 @@ function main() { @@ -162,7 +162,7 @@ function main() {
pushd ${TMPDIR} pushd ${TMPDIR}
rm -f MANIFEST rm -f MANIFEST
echo $(date) : "=== Building wheel" echo $(date) : "=== Building wheel"
- "${PYTHON_BIN_PATH:-python}" setup.py bdist_wheel ${GPU_FLAG} >/dev/null - "${PYTHON_BIN_PATH:-python}" setup.py bdist_wheel ${PKG_NAME_FLAG} >/dev/null
+ "${PYTHON_BIN_PATH:-python}" setup.py bdist_egg ${GPU_FLAG} >/dev/null + "${PYTHON_BIN_PATH:-python}" setup.py bdist_egg ${PKG_NAME_FLAG} >/dev/null
mkdir -p ${DEST} mkdir -p ${DEST}
cp dist/* ${DEST} cp dist/* ${DEST}
popd popd
[buildout]
extends =
../libevent/buildout.cfg
../ncurses/buildout.cfg
parts +=
tmux
[tmux]
recipe = slapos.recipe.cmmi
url = https://github.com/tmux/tmux/releases/download/2.0/tmux-2.0.tar.gz
md5sum = 9fb6b443392c3978da5d599f1e814eaa
environment =
CFLAGS=-I${ncurses:location}/include -I${libevent:location}/include/
LDFLAGS=-L${ncurses:location}/lib/ -L${libevent:location}/lib/ -Wl,-rpath=${ncurses:location}/lib/ -Wl,-rpath=${libevent:location}/lib/
configure-options =
--prefix=${buildout:parts-directory}/${:_buildout_section_name_}
...@@ -4,12 +4,13 @@ parts = ...@@ -4,12 +4,13 @@ parts =
extends = extends =
../ncurses/buildout.cfg ../ncurses/buildout.cfg
../gettext/buildout.cfg
[vim] [vim]
recipe = slapos.recipe.cmmi recipe = slapos.recipe.cmmi
version = 7.4 url = ftp://ftp.vim.org/pub/vim/unix/vim-8.0.586.tar.bz2
url = http://ftp.vim.org/pub/vim/unix/vim-7.4.tar.bz2 md5sum = b35e794140c196ff59b492b56c1e73db
md5sum = 607e135c559be642f210094ad023dc65
environment= environment=
CFLAGS=-I${ncurses:location}/include CPPFLAGS=-I${ncurses:location}/include
LDFLAGS=-L${ncurses:location}/lib/ -Wl,-rpath=${ncurses:location}/lib/ LDFLAGS=-L${ncurses:location}/lib -Wl,-rpath=${ncurses:location}/lib
\ No newline at end of file LD_LIBRARY_PATH=${ncurses:location}/lib:${gettext:location}/lib
...@@ -4,5 +4,5 @@ parts = ...@@ -4,5 +4,5 @@ parts =
[zlib] [zlib]
recipe = slapos.recipe.cmmi recipe = slapos.recipe.cmmi
url = http://downloads.sourceforge.net/project/libpng/zlib/1.2.8/zlib-1.2.8.tar.gz url = http://downloads.sourceforge.net/project/libpng/zlib/1.2.11/zlib-1.2.11.tar.gz
md5sum = 44d667c142d7cda120332623eab69f40 md5sum = 1c9f62f0778697a09d36121ead88e08e
[buildout]
parts =
zstd
[zstd]
recipe = slapos.recipe.cmmi
location = ${buildout:parts-directory}/${:_buildout_section_name_}
url = https://github.com/facebook/zstd/archive/v1.3.1.tar.gz
md5sum = e849ceef2f090240f690c13fba6ca70b
configure-command = :
make-options = PREFIX=${:location}
...@@ -55,11 +55,12 @@ setup(name=name, ...@@ -55,11 +55,12 @@ setup(name=name,
packages=find_packages(), packages=find_packages(),
include_package_data=True, include_package_data=True,
install_requires=[ install_requires=[
'enum34', # for inotify-simple
'jsonschema', 'jsonschema',
'hexagonit.recipe.download', 'hexagonit.recipe.download',
'netaddr', # to manipulate on IP addresses 'netaddr', # to manipulate on IP addresses
'setuptools', # namespaces 'setuptools', # namespaces
'inotifyx', # to watch filesystem changes (used in lockfile) 'inotify_simple',
'lock_file', #another lockfile implementation for multiprocess 'lock_file', #another lockfile implementation for multiprocess
'slapos.core', # uses internally 'slapos.core', # uses internally
'zc.buildout', # plays with buildout 'zc.buildout', # plays with buildout
...@@ -133,7 +134,6 @@ setup(name=name, ...@@ -133,7 +134,6 @@ setup(name=name,
'lamp.static = slapos.recipe.lamp:Static', 'lamp.static = slapos.recipe.lamp:Static',
'libcloud = slapos.recipe.libcloud:Recipe', 'libcloud = slapos.recipe.libcloud:Recipe',
'libcloudrequest = slapos.recipe.libcloudrequest:Recipe', 'libcloudrequest = slapos.recipe.libcloudrequest:Recipe',
'lockfile = slapos.recipe.lockfile:Recipe',
'logrotate = slapos.recipe.logrotate:Recipe', 'logrotate = slapos.recipe.logrotate:Recipe',
'logrotate.d = slapos.recipe.logrotate:Part', 'logrotate.d = slapos.recipe.logrotate:Part',
'memcached = slapos.recipe.memcached:Recipe', 'memcached = slapos.recipe.memcached:Recipe',
......
...@@ -80,7 +80,7 @@ class Recipe(GenericBaseRecipe): ...@@ -80,7 +80,7 @@ class Recipe(GenericBaseRecipe):
mysql_script_list = [] mysql_script_list = []
# user defined functions # user defined functions
udf_registration = "" udf_registration = "DROP FUNCTION IF EXISTS last_insert_grn_id;\nDROP FUNCTION IF EXISTS mroonga_snippet;\nDROP FUNCTION IF EXISTS mroonga_command;\n"
mroonga = self.options.get('mroonga', 'ha_mroonga.so') mroonga = self.options.get('mroonga', 'ha_mroonga.so')
if mroonga: if mroonga:
udf_registration += "CREATE FUNCTION last_insert_grn_id RETURNS " \ udf_registration += "CREATE FUNCTION last_insert_grn_id RETURNS " \
......
...@@ -3,47 +3,31 @@ import os ...@@ -3,47 +3,31 @@ import os
import signal import signal
import subprocess import subprocess
import time import time
from collections import defaultdict
import inotifyx from inotify_simple import INotify, flags
def _wait_files_creation(file_list): def _wait_files_creation(file_list):
# Etablish a list of directory and subfiles # Establish a list of directory and subfiles.
directories = dict() # and test existence before watching, so that we don't miss an event.
for dirname, filename in [os.path.split(f) for f in file_list]: directories = defaultdict(dict)
directories.setdefault(dirname, dict()) for f in file_list:
directories[dirname][filename] = False dirname, filename = os.path.split(f)
directories[dirname][filename] = os.path.lexists(f)
def all_files_exists(): def all_files_exists():
return all([all(files.values()) for files in directories.values()]) return all(all(files.itervalues()) for files in directories.itervalues())
fd = inotifyx.init() with INotify() as inotify:
try: watchdescriptors = {inotify.add_watch(dirname,
# Watch every directories where the file are flags.CREATE | flags.DELETE | flags.MOVED_TO | flags.MOVED_FROM
watchdescriptors = dict() ): dirname
for dirname in directories.keys(): for dirname in directories}
wd = inotifyx.add_watch(fd,
dirname,
inotifyx.IN_CREATE | inotifyx.IN_DELETE | inotifyx.IN_MOVE)
watchdescriptors[wd] = dirname
# Set to True the file wich exists
for dirname, filename in [os.path.split(f) for f in file_list]:
directories[dirname][filename] = os.path.exists(os.path.join(dirname,
filename))
# Let's wait for every file creation
while not all_files_exists():
events_list = inotifyx.get_events(fd)
for event in events_list:
dirname = watchdescriptors[event.wd]
if event.name in directories[dirname]:
# One of watched file was created or deleted
if event.mask & inotifyx.IN_DELETE:
directories[dirname][event.name] = False
else:
directories[dirname][event.name] = True
finally: while not all_files_exists():
os.close(fd) for event in inotify.read():
directory = directories[watchdescriptors[event.wd]]
if event.name in directory:
directory[event.name] = event.mask & (flags.CREATE | flags.MOVED_TO)
def execute(args): def execute(args):
"""Portable execution with process replacement""" """Portable execution with process replacement"""
...@@ -83,8 +67,8 @@ def generic_exec(args): ...@@ -83,8 +67,8 @@ def generic_exec(args):
os.execve(exec_list[0], exec_list + sys.argv[1:], exec_env) os.execve(exec_list[0], exec_list + sys.argv[1:], exec_env)
def sig_handler(signal, frame): def sig_handler(sig, frame):
print 'Received signal %r, killing children and exiting' % signal print 'Received signal %r, killing children and exiting' % sig
if child_pg is not None: if child_pg is not None:
os.killpg(child_pg, signal.SIGHUP) os.killpg(child_pg, signal.SIGHUP)
os.killpg(child_pg, signal.SIGTERM) os.killpg(child_pg, signal.SIGTERM)
...@@ -97,6 +81,7 @@ signal.signal(signal.SIGTERM, sig_handler) ...@@ -97,6 +81,7 @@ signal.signal(signal.SIGTERM, sig_handler)
def execute_with_signal_translation(args): def execute_with_signal_translation(args):
"""Run process as children and translate from SIGTERM to another signal""" """Run process as children and translate from SIGTERM to another signal"""
global child_pg
child = subprocess.Popen(args, close_fds=True, preexec_fn=os.setsid) child = subprocess.Popen(args, close_fds=True, preexec_fn=os.setsid)
child_pg = child.pid child_pg = child.pid
try: try:
......
...@@ -25,8 +25,7 @@ ...@@ -25,8 +25,7 @@
# #
############################################################################## ##############################################################################
import os import os
from inotify_simple import INotify, flags
import inotifyx
def subfiles(directory): def subfiles(directory):
"""Return the list of subfiles of a directory, and wait for the newly created """Return the list of subfiles of a directory, and wait for the newly created
...@@ -34,18 +33,12 @@ def subfiles(directory): ...@@ -34,18 +33,12 @@ def subfiles(directory):
CAUTION : *DONT TRY TO CONVERT THE RESULT OF THIS FUNCTION INTO A LIST ! CAUTION : *DONT TRY TO CONVERT THE RESULT OF THIS FUNCTION INTO A LIST !
ALWAYS ITERATE OVER IT !!!*""" ALWAYS ITERATE OVER IT !!!*"""
watchfd = inotifyx.init()
inotifyx.add_watch(watchfd, directory, inotifyx.IN_CREATE)
try:
subfiles = set(os.listdir(directory)) with INotify() as inotify:
subfiles |= set([file_.name for file_ in inotifyx.get_events(watchfd, 0)]) inotify.add_watch(directory, flags.CLOSE_WRITE | flags.MOVED_TO)
names = os.listdir(directory)
while True: while True:
for file_ in subfiles: for name in names:
yield os.path.join(directory, file_) yield os.path.join(directory, name)
names = (event.name for event in inotify.read())
subfiles = [file_.name for file_ in inotifyx.get_events(watchfd)]
finally:
os.close(watchfd)
##############################################################################
#
# Copyright (c) 2010 Vifib SARL and Contributors. All Rights Reserved.
#
# WARNING: This program as such is intended to be used by professional
# programmers who take the whole responsibility of assessing all potential
# consequences resulting from its eventual inadequacies and bugs
# End users who are looking for a ready-to-use solution with commercial
# guarantees and support are strongly adviced to contract a Free Software
# Service Company
#
# This program is Free Software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 3
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
##############################################################################
import os
import sys
import subprocess
import inotifyx
from slapos.recipe.librecipe import GenericBaseRecipe
class LockFile(object):
class LockException(Exception):
pass
def __init__(self, filename, wait=True, exit=False):
self.filename = filename
if wait:
self.callback = lambda: self.waitDeletion()
elif not exit:
self.callback = lambda: self.raiseException()
else:
self.callback = lambda: sys.exit(1)
def raiseException(self):
raise LockFile.LockException("Not able to lock the file")
def waitDeletion(self):
inotify_fd = inotifyx.init()
try:
inotifyx.add_watch(inotify_fd, self.filename, inotifyx.IN_DELETE)
inotifyx.get_events(inotify_fd)
except IOError: # add_watch failed
pass
finally:
os.close(inotify_fd)
self.__enter__()
def __enter__(self):
try:
# Atomic file acquisition
self._fd = os.open(self.filename, os.O_CREAT | os.O_EXCL)
except OSError:
self.callback()
def __exit__(self, exc_type, exc_value, traceback):
os.close(self._fd)
os.unlink(self.filename)
def locked_run(args):
with LockFile(args['filename'], wait=args['wait'], exit=True):
subprocess.check_call([args['binary']])
class Recipe(GenericBaseRecipe):
def install(self):
wrapper = self.createPythonScript(self.options['wrapper'],
__name__ + '.locked_run',
dict(
filename=self.options['lock-file'],
wait=self.optionIsTrue('wait', False),
binary=self.options['binary'],
)
)
return [wrapper]
...@@ -92,6 +92,8 @@ class Storage(NeoBaseRecipe): ...@@ -92,6 +92,8 @@ class Storage(NeoBaseRecipe):
engine = self.options.get('engine') engine = self.options.get('engine')
if engine: # old versions of NEO don't support -e if engine: # old versions of NEO don't support -e
r += '-e', engine r += '-e', engine
if self.options.get('dedup'):
r.append('--dedup')
if self.options.get('disable-drop-partitions'): if self.options.get('disable-drop-partitions'):
r.append('--disable-drop-partitions') r.append('--disable-drop-partitions')
return r return r
......
import os, shutil, tempfile, threading, unittest
from slapos.recipe.librecipe import execute, inotify
class TestInotify(unittest.TestCase):
def setUp(self):
self.tmp = tempfile.mkdtemp()
def tearDown(self):
shutil.rmtree(self.tmp)
def test_subfiles(self):
p = lambda x: os.path.join(self.tmp, x)
def create(name, text):
a = open(p(name), 'w')
a.write(text)
a.flush()
return a
def check(name, text):
path = next(notified)
self.assertEqual(path, p(name))
with open(path) as f:
self.assertEqual(f.read(), text)
a = create('first', 'blah')
a.write('...')
notified = inotify.subfiles(self.tmp)
check('first', 'blah')
os.link(p(a.name), p('a hard link')) # ignored
b = create('other', 'hello')
b.close()
check('other', 'hello')
c = create('last', '!!!')
a.close()
check('first', 'blah...')
os.rename(p(a.name), p(b.name))
check('other', 'blah...')
c.close()
check('last', '!!!')
def test_wait_files_creation(self):
file_list = (
'foo',
'bar',
'hello/world',
'hello/world!',
'a/b/c',
)
create = lambda x: open(x, 'w').close()
p = lambda x: os.path.join(self.tmp, x)
P = lambda x: p(file_list[x])
create(P(1))
os.mkdir(p('hello'))
os.makedirs(p('a/b'))
t = threading.Thread(target=execute._wait_files_creation,
args=(map(p, file_list),))
t.daemon = True
t.start()
def check():
t.join(.2)
self.assertTrue(t.is_alive())
check()
for x in P(3), p('a/b/d'), P(0):
create(x)
check()
os.rename(P(3), P(2))
os.rename(p('a/b/d'), P(4))
check()
os.remove(P(1))
for x in P(3), P(1):
create(x)
t.join(10)
self.assertFalse(t.is_alive())
...@@ -3,6 +3,7 @@ import json ...@@ -3,6 +3,7 @@ import json
import mock import mock
import os import os
import unittest import unittest
import tempfile
from collections import defaultdict from collections import defaultdict
from slapos.recipe import slapconfiguration from slapos.recipe import slapconfiguration
from slapos import format as slapformat from slapos import format as slapformat
...@@ -12,8 +13,7 @@ class SlapConfigurationTest(unittest.TestCase): ...@@ -12,8 +13,7 @@ class SlapConfigurationTest(unittest.TestCase):
def setUp(self): def setUp(self):
"""Prepare files on filesystem.""" """Prepare files on filesystem."""
self.instance_root = "/tmp/instance_test_resourcefile" self.instance_root = tempfile.mkdtemp()
os.mkdir(self.instance_root)
# create testing resource file # create testing resource file
self.resource_file = os.path.join(self.instance_root, slapformat.Partition.resource_file) self.resource_file = os.path.join(self.instance_root, slapformat.Partition.resource_file)
self.resource = { self.resource = {
......
...@@ -50,8 +50,8 @@ gitdb = 0.6.4 ...@@ -50,8 +50,8 @@ gitdb = 0.6.4
pycrypto = 2.6.1 pycrypto = 2.6.1
pycurl = 7.43.0 pycurl = 7.43.0
slapos.recipe.download = 1.0 slapos.recipe.download = 1.0
slapos.recipe.template = 3.0 slapos.recipe.template = 4.1
slapos.toolbox = 0.71 slapos.toolbox = 0.73
smmap = 0.9.0 smmap = 0.9.0
# Required by: # Required by:
......
...@@ -39,7 +39,7 @@ md5sum = 665e83d660c9b779249b2179d7ce4b4e ...@@ -39,7 +39,7 @@ md5sum = 665e83d660c9b779249b2179d7ce4b4e
[template-apache-frontend-configuration] [template-apache-frontend-configuration]
filename = templates/apache.conf.in filename = templates/apache.conf.in
md5sum = 82cdb4ab02fec36285b9c1ce502f82f0 md5sum = a56045e7b53ff00ab34d2a8f911fc1a1
[template-custom-slave-list] [template-custom-slave-list]
filename = templates/apache-custom-slave-list.cfg.in filename = templates/apache-custom-slave-list.cfg.in
......
...@@ -10,8 +10,8 @@ gitdb = 0.6.4 ...@@ -10,8 +10,8 @@ gitdb = 0.6.4
plone.recipe.command = 1.1 plone.recipe.command = 1.1
pycrypto = 2.6.1 pycrypto = 2.6.1
rdiff-backup = 1.0.5+SlapOSPatched001 rdiff-backup = 1.0.5+SlapOSPatched001
slapos.recipe.template = 3.0 slapos.recipe.template = 4.1
slapos.toolbox = 0.71 slapos.toolbox = 0.73
smmap = 0.9.0 smmap = 0.9.0
numpy = 1.11.2 numpy = 1.11.2
pyasn1 = 0.2.3 pyasn1 = 0.2.3
......
...@@ -166,8 +166,8 @@ SSLProxyCheckPeerCN off ...@@ -166,8 +166,8 @@ SSLProxyCheckPeerCN off
SSLProxyCheckPeerExpire off SSLProxyCheckPeerExpire off
include {{frontend_configuration.get('log-access-configuration')}} include {{frontend_configuration.get('log-access-configuration')}}
include {{ slave_configuration_directory }}/*.conf includeoptional {{ slave_configuration_directory }}/*.conf
include {{ slave_with_cache_configuration_directory }}/*.conf includeoptional {{ slave_with_cache_configuration_directory }}/*.conf
ErrorDocument 404 /notfound.html ErrorDocument 404 /notfound.html
RewriteRule (.*) /notfound.html [R=404,L] RewriteRule (.*) /notfound.html [R=404,L]
...@@ -27,6 +27,7 @@ statistic = $${:srv}/statistic ...@@ -27,6 +27,7 @@ statistic = $${:srv}/statistic
backupscript = $${:etc}/backup backupscript = $${:etc}/backup
www = $${:srv}/www www = $${:srv}/www
home = $${:etc}/home home = $${:etc}/home
promises = $${:etc}/promise
ssl = $${:etc}/ssl ssl = $${:etc}/ssl
ssh = $${:home}/.ssh ssh = $${:home}/.ssh
...@@ -49,12 +50,13 @@ logfile = $${directory:log}/crond.log ...@@ -49,12 +50,13 @@ logfile = $${directory:log}/crond.log
{% set frequency = slave_instance.get('frequency', '') -%} {% set frequency = slave_instance.get('frequency', '') -%}
{% set hostname = slave_instance.get('hostname', '') -%} {% set hostname = slave_instance.get('hostname', '') -%}
{% set connection = slave_instance.get('connection', '') -%} {% set connection = slave_instance.get('connection', '') -%}
{% set connection_port = slave_instance.get('connection_port', '22') -%}
{% set include = slave_instance.get('include', '') -%} {% set include = slave_instance.get('include', '') -%}
{% set include_string = "' --include='".join(include.split(' ')) -%} {% set include_string = "' --include='".join(include.split(' ')) -%}
{% set exclude = slave_instance.get('exclude', '') -%} {% set exclude = slave_instance.get('exclude', '') -%}
{% set exclude_string = '' -%} {% set exclude_string = '' -%}
{% set sudo = slave_instance.get('sudo', 'False') -%} {% set sudo = slave_instance.get('sudo', 'False') -%}
{% set remote_schema = 'rdiff-backup --server --restrict-read-only / -- "$@"' -%} {% set remote_schema = slave_instance.get('remote_rdiff_path', 'rdiff-backup') + ' --server --restrict-read-only / -- "$@"' -%}
{% if (exclude != '') -%} {% if (exclude != '') -%}
{% set exclude_string = "' --exclude='".join(exclude.split(' ')) -%} {% set exclude_string = "' --exclude='".join(exclude.split(' ')) -%}
...@@ -73,23 +75,19 @@ directory = $${directory:backup}/$${:_buildout_section_name_} ...@@ -73,23 +75,19 @@ directory = $${directory:backup}/$${:_buildout_section_name_}
[{{ slave_reference }}-backup-private_key] [{{ slave_reference }}-backup-private_key]
recipe = plone.recipe.command recipe = plone.recipe.command
stop-on-error = false stop-on-error = true
command = ${dropbear-output:keygen} -t $${:type} -s 2048 -f $${:key} command = ${coreutils-output:rm} -f $${:key} $${:public_key} && ${openssh-output:keygen} -t $${:type} -b 2048 -f $${:key} -q -N ""
key = $${directory:ssh}/$${:_buildout_section_name_} key = $${directory:ssh}/$${:_buildout_section_name_}
public_key = $${:key}.pub
location = $${:public_key}
type = rsa type = rsa
[{{ slave_reference }}-backup-public_key]
recipe = plone.recipe.command
stop-on-error = true
command = ${coreutils-output:rm} -f $${:key} && ${dropbear-output:keygen} -y -f {{ '$${' ~ slave_reference }}-backup-private_key:key} | ${grep-output:grep} {{ '$${' ~ slave_reference }}-backup-private_key:type} > $${:key}
key = {{ '$${' ~ slave_reference }}-backup-private_key:key}.pub
location = $${:key}
# Insert as a beginning part, to ensure that all public keys are generated before trying to publish. This will reduce the number of slapgrid-cp run. # Insert as a beginning part, to ensure that all public keys are generated before trying to publish. This will reduce the number of slapgrid-cp run.
{% do part_list.insert(0, "%s-backup-public_key" % slave_reference) -%} {% do part_list.insert(0, "%s-backup-private_key" % slave_reference) -%}
[{{ slave_reference }}-backup-read-public_key] [{{ slave_reference }}-backup-read-public_key]
recipe = slapos.cookbook:readline recipe = slapos.cookbook:readline
storage-path = {{ '$${' ~ slave_reference }}-backup-public_key:key} storage-path = {{ '$${' ~ slave_reference }}-backup-private_key:public_key}
# Publish slave {{ slave_reference }} information # Publish slave {{ slave_reference }} information
[{{ slave_reference }}-backup-publish] [{{ slave_reference }}-backup-publish]
...@@ -107,6 +105,7 @@ mode = 0700 ...@@ -107,6 +105,7 @@ mode = 0700
datadirectory = {{ '$${' ~ slave_reference }}-backup-directory:directory} datadirectory = {{ '$${' ~ slave_reference }}-backup-directory:directory}
sshkey = {{ '$${' ~ slave_reference }}-backup-private_key:key} sshkey = {{ '$${' ~ slave_reference }}-backup-private_key:key}
connection = {{ connection }} connection = {{ connection }}
connection_port = {{ connection_port }}
hostname = {{ hostname }} hostname = {{ hostname }}
include = {{ include_string }} include = {{ include_string }}
exclude_string = {{ exclude_string }} exclude_string = {{ exclude_string }}
...@@ -169,6 +168,12 @@ mode = 0700 ...@@ -169,6 +168,12 @@ mode = 0700
virtual-depends = virtual-depends =
$${nginx-configuration:ip} $${nginx-configuration:ip}
[nginx-listen-promise]
recipe = slapos.cookbook:check_port_listening
hostname = $${nginx-configuration:ip}
port = $${nginx-configuration:port}
path = $${directory:promises}/nginx_listen
[nginx-configuration] [nginx-configuration]
recipe = slapos.recipe.template recipe = slapos.recipe.template
url = ${template-nginx-configuration:output} url = ${template-nginx-configuration:output}
...@@ -187,6 +192,7 @@ ssl_crt = $${directory:ssl}/nginx.crt ...@@ -187,6 +192,7 @@ ssl_crt = $${directory:ssl}/nginx.crt
parts = parts =
dcron-service dcron-service
nginx-service nginx-service
nginx-listen-promise
activate-crontab-file activate-crontab-file
publish-global-rss publish-global-rss
{% for part in part_list -%} {% for part in part_list -%}
......
...@@ -10,7 +10,7 @@ extends = ...@@ -10,7 +10,7 @@ extends =
# ../../component/git/buildout.cfg # ../../component/git/buildout.cfg
# ../../component/subversion/buildout.cfg # ../../component/subversion/buildout.cfg
../../component/rsync/buildout.cfg ../../component/rsync/buildout.cfg
../../component/dropbear/buildout.cfg ../../component/openssh/buildout.cfg
../../component/grep/buildout.cfg ../../component/grep/buildout.cfg
../../component/findutils/buildout.cfg ../../component/findutils/buildout.cfg
# ../../stack/flask.cfg # ../../stack/flask.cfg
...@@ -67,7 +67,7 @@ mode = 0644 ...@@ -67,7 +67,7 @@ mode = 0644
[template-backup-script] [template-backup-script]
recipe = slapos.recipe.template recipe = slapos.recipe.template
url = ${:_profile_base_location_}/template-backup-script.sh.in url = ${:_profile_base_location_}/template-backup-script.sh.in
md5sum = 47b20031db3b575651d8515d5add23e6 md5sum = fa79e0307e12e2f5b1f2adbd261995fc
output = ${buildout:directory}/template-backup-script.sh.in output = ${buildout:directory}/template-backup-script.sh.in
mode = 0644 mode = 0644
...@@ -105,7 +105,7 @@ mode = 0644 ...@@ -105,7 +105,7 @@ mode = 0644
[template-pullrdiffbackup] [template-pullrdiffbackup]
recipe = slapos.recipe.template recipe = slapos.recipe.template
url = ${:_profile_base_location_}/instance-pullrdiffbackup.cfg.in url = ${:_profile_base_location_}/instance-pullrdiffbackup.cfg.in
md5sum = 061b98d001b501c9e1beb424e8802d3d md5sum = a2fb7b0cdd944be99da4122eb6f07749
output = ${buildout:directory}/template-pullrdiffbackup.cfg output = ${buildout:directory}/template-pullrdiffbackup.cfg
mode = 0644 mode = 0644
...@@ -116,8 +116,14 @@ md5sum = 42021b325159dff29e4bd4e33b8ff2f3 ...@@ -116,8 +116,14 @@ md5sum = 42021b325159dff29e4bd4e33b8ff2f3
output = ${buildout:directory}/template.cfg output = ${buildout:directory}/template.cfg
mode = 0644 mode = 0644
[rdiff-backup]
eggs =
${rdiff-backup-build-1.3.4:egg}
[versions] [versions]
rdiff-backup = 1.0.5+SlapOSPatched001 # 1.3.4nxd2 is invalid version string, thus pached version string is not '1.3.4nxd2+SlapOSPatched001'
# but '1.3.4nxd2-SlapOSPatched001'.
rdiff-backup = 1.3.4nxd2-SlapOSPatched001
gunicorn = 19.1.1 gunicorn = 19.1.1
plone.recipe.command = 1.1 plone.recipe.command = 1.1
slapos.recipe.template = 2.4.2 slapos.recipe.template = 2.4.2
......
...@@ -18,7 +18,7 @@ ${rdiff-backup-output:rdiff-backup} \ ...@@ -18,7 +18,7 @@ ${rdiff-backup-output:rdiff-backup} \
$${:exclude_string} \ $${:exclude_string} \
--include='$${:include}' \ --include='$${:include}' \
--exclude='**' \ --exclude='**' \
--remote-schema '${dropbear-output:ssh} -T -y -i $${:sshkey} %s $${:remote_schema}' \ --remote-schema '${openssh-output:ssh} -6 -q -T -y -o "StrictHostKeyChecking no" -i $${:sshkey} -p $${:connection_port} %s $${:remote_schema}' \
$${:connection}::/ ./ $${:connection}::/ ./
RESULT=$? RESULT=$?
......
#!{{runTestSuite_py}}
from __future__ import print_function
import argparse, os, re, subprocess, sys
from time import gmtime, strftime, time
from erp5.util import taskdistribution
from erp5.util.testsuite import SubprocessError, TestSuite
from zc.buildout.buildout import Buildout
slapos_buildout = {{repr(slapos_buildout)}}
test_dict = {
'zc.buildout': slapos_buildout,
'zc.recipe.egg': os.path.join(slapos_buildout, 'zc.recipe.egg_'),
}
class DummyTestResult:
class DummyTestResultLine:
def stop(self, **kw):
pass
done = 0
def __init__(self, test_name_list):
self.test_name_list = test_name_list
def start(self):
test_result_line = self.DummyTestResultLine()
try:
test_result_line.name = self.test_name_list[self.done]
except IndexError:
return
self.done += 1
return test_result_line
class BuildoutTestSuite(TestSuite):
RUN_RE = re.compile(
r'Ran (?P<all_tests>\d+) tests with'
' (?P<failures>\d+) failures,'
' (?P<errors>\d+) errors and'
' (?P<skips>\d+) skipped in')
def run(self, test):
start = time()
try:
status_dict = self.spawn(os.path.join('bin', 'zope-testrunner'),
'--test-path', os.path.join(test_dict[test], 'src'))
except SubprocessError, e:
status_dict = e.status_dict
end = time()
status_dict.update(
date = strftime("%Y/%m/%d %H:%M:%S", gmtime(end)),
duration = end - start)
search = self.RUN_RE.search(status_dict['stdout'])
if search:
groupdict = search.groupdict()
status_dict.update(
test_count = int(groupdict['all_tests']),
error_count = int(groupdict['errors']),
failure_count = int(groupdict['failures']),
skip_count = int(groupdict['skips']))
return status_dict
def main():
os.environ['TEMP'] = {{repr(temp_directory)}}
parser = argparse.ArgumentParser(description='Run a test suite.')
parser.add_argument('--test_suite', help='The test suite name')
parser.add_argument('--test_suite_title', help='The test suite title')
parser.add_argument('--test_node_title', help='The test node title')
parser.add_argument('--project_title', help='The project title')
parser.add_argument('--revision', help='The revision to test',
default='dummy_revision')
parser.add_argument('--master_url',
help='The Url of Master controling many suites')
args = parser.parse_args()
test_title = args.test_suite_title or args.test_suite
if args.master_url:
tool = taskdistribution.TaskDistributionTool(args.master_url)
test_result = tool.createTestResult(args.revision,
list(test_dict),
args.test_node_title,
test_title=test_title,
project_title=args.project_title)
if test_result is None:
return
else:
test_result = DummyTestResult(list(test_dict))
fd = os.open('buildout.cfg', os.O_CREAT | os.O_EXCL | os.O_WRONLY, 0666)
try:
os.write(fd, """\
[buildout]
extends = %s
develop =%s
parts = test
newest = false
[versions]
%s
[bootstrap]
recipe = zc.recipe.egg
eggs = zc.buildout
[test]
recipe = zc.recipe.egg
eggs +=
zope.testrunner
scripts =
zope-testrunner
""" % (os.path.join(slapos_buildout, 'buildout.cfg'),
''.join('\n ' + x for x in test_dict.itervalues()),
'\n'.join(x + ' =' for x in test_dict)))
finally:
os.close(fd)
Buildout('buildout.cfg', {}).install(['bootstrap'])
subprocess.check_call((os.path.join('bin', 'buildout'),))
test_suite = BuildoutTestSuite(1)
while 1:
test_result_line = test_result.start()
if not test_result_line:
break
test_result_line.stop(**test_suite.run(test_result_line.name))
if __name__ == "__main__":
main()
[buildout]
extends = software.cfg
[versions]
setuptools = 36.6.0
[buildout]
extends =
../../stack/slapos.cfg
parts =
slapos-cookbook
template
[slapos.buildout-repository]
recipe = slapos.recipe.build:gitclone
repository = https://lab.nexedi.com/nexedi/slapos.buildout.git
git-executable = ${git:location}/bin/git
[runTestSuite_py]
recipe = zc.recipe.egg
eggs = erp5.util
zc.buildout
scripts = ${:interpreter}
interpreter = ${:_buildout_section_name_}
[template]
recipe = slapos.recipe.template:jinja2
# XXX: "template.cfg" is hardcoded in instanciation recipe
rendered = ${buildout:directory}/template.cfg
template =
inline:
[buildout]
eggs-directory = ${buildout:eggs-directory}
develop-eggs-directory = ${buildout:develop-eggs-directory}
parts = runTestSuite
[slap-configuration]
recipe = slapos.cookbook:slapconfiguration.serialised
computer = $${slap-connection:computer-id}
partition = $${slap-connection:partition-id}
url = $${slap-connection:server-url}
key = $${slap-connection:key-file}
cert = $${slap-connection:cert-file}
[directory]
recipe = slapos.cookbook:mkdirectory
bin = $${buildout:directory}/bin
tmp = $${buildout:directory}/tmp
[slapos.buildout-repository]
recipe = slapos.recipe.build:gitclone
repository = ${slapos.buildout-repository:location}
git-executable = ${git:location}/bin/git
shared = true
[runTestSuite]
recipe = slapos.recipe.template:jinja2
rendered = $${directory:bin}/$${:_buildout_section_name_}
template = ${:_profile_base_location_}/$${:_buildout_section_name_}.in
mode = 0755
context =
key slapparameter_dict slap-configuration:configuration
key slapos_buildout slapos.buildout-repository:location
key temp_directory directory:tmp
raw runTestSuite_py ${buildout:bin-directory}/${runTestSuite_py:interpreter}
[buildout]
extends =
../../stack/caddy/buildout.cfg
\ No newline at end of file
...@@ -50,13 +50,13 @@ ...@@ -50,13 +50,13 @@
"crl-life-period": { "crl-life-period": {
"title": "CRL life time period", "title": "CRL life time period",
"description": "Number of individual certificate validity periods during which the CRL is valid. Default: 1/50.0", "description": "Number of individual certificate validity periods during which the CRL is valid. Default: 1/50.0",
"type": "float", "type": "number",
"default": 0.2 "default": 0.2
}, },
"ca-life-period": { "ca-life-period": {
"title": "CA Certificate life period", "title": "CA Certificate life period",
"description": "Number of individual certificate validity periods during which the CA certificate is valid. Default: 10", "description": "Number of individual certificate validity periods during which the CA certificate is valid. Default: 10",
"type": "float", "type": "number",
"default": 10 "default": 10
}, },
"crt-keep-time": { "crt-keep-time": {
......
...@@ -40,4 +40,4 @@ cns.recipe.symlink = 0.2.3 ...@@ -40,4 +40,4 @@ cns.recipe.symlink = 0.2.3
collective.recipe.environment = 0.2.0 collective.recipe.environment = 0.2.0
erp5.util = 0.4.49 erp5.util = 0.4.49
plone.recipe.command = 1.1 plone.recipe.command = 1.1
slapos.recipe.template = 3.0 slapos.recipe.template = 4.1
...@@ -48,12 +48,12 @@ scipy = 0.13.3 ...@@ -48,12 +48,12 @@ scipy = 0.13.3
simpy = 3.0.5 simpy = 3.0.5
zope.dottedname = 4.1.0 zope.dottedname = 4.1.0
tablib = 0.10.0 tablib = 0.10.0
mysqlclient = 1.3.10+SlapOSPatched002 mysqlclient = 1.3.12
# indirect dependancies # indirect dependancies
cp.recipe.cmd = 0.5 cp.recipe.cmd = 0.5
plone.recipe.command = 1.1 plone.recipe.command = 1.1
slapos.recipe.template = 3.0 slapos.recipe.template = 4.1
zope.exceptions = 4.0.7 zope.exceptions = 4.0.7
zope.testing = 4.1.3 zope.testing = 4.1.3
zc.recipe.testrunner = 2.0.0 zc.recipe.testrunner = 2.0.0
......
...@@ -17,6 +17,14 @@ ...@@ -17,6 +17,14 @@
"default": "erp5", "default": "erp5",
"type": "string" "type": "string"
}, },
"bt5": {
"description": "Business Template to install at automatic site creation. By default, all configurators are installed.",
"type": "string"
},
"id-store-interval": {
"description": "Set Store Interval of default SQL Non Continuous Increasing Id Generator at automatic site creation. If unset, the value from the erp5_core Business Template is not touched.",
"type": "integer"
},
"timezone": { "timezone": {
"description": "Zope's timezone. Possible values are determined by host's libc, and typically come from a separate package (tzdata, ...)", "description": "Zope's timezone. Possible values are determined by host's libc, and typically come from a separate package (tzdata, ...)",
"default": "UTC", "default": "UTC",
......
...@@ -27,10 +27,9 @@ eggs = ...@@ -27,10 +27,9 @@ eggs =
zc.buildout zc.buildout
slapos.libnetworkcache slapos.libnetworkcache
slapos.core slapos.core
supervisor
jsonschema jsonschema
hexagonit.recipe.download hexagonit.recipe.download
netaddr
inotifyx
lock_file lock_file
pytz pytz
erp5.util erp5.util
...@@ -60,7 +59,7 @@ mode = 0644 ...@@ -60,7 +59,7 @@ mode = 0644
[versions] [versions]
PyXML = 0.8.5 PyXML = 0.8.5
erp5.util = 0.4.49 erp5.util = 0.4.49
slapos.recipe.template = 3.0 slapos.recipe.template = 4.1
ipython = 5.3.0 ipython = 5.3.0
apache-libcloud = 2.1.0 apache-libcloud = 2.1.0
gitdb2 = 2.0.2 gitdb2 = 2.0.2
......
...@@ -55,10 +55,20 @@ if [[ ! -s "$DEPLOYMENT_SCRIPT" ]] ; then ...@@ -55,10 +55,20 @@ if [[ ! -s "$DEPLOYMENT_SCRIPT" ]] ; then
echo "exit 1" > $DEPLOYMENT_SCRIPT echo "exit 1" > $DEPLOYMENT_SCRIPT
fi fi
function add_log ()
{
LOG_FILE=$1
for f in /opt/slapos/log/slapos-node-{software,instance}.log ; do
echo "Tail of '$f':" >> $LOG_FILE
tail -n 500 $f >> $LOG_FILE
done
}
function upload () function upload ()
{ {
try=$1 try=$1
LOG_FILE=$2 LOG_FILE=$2
add_log $LOG_FILE
t=`date '+%Y%m%d%H%S'` t=`date '+%Y%m%d%H%S'`
mv $LOG_FILE ${LOG_FILE}.$t mv $LOG_FILE ${LOG_FILE}.$t
# just to be sure flush all disk operations before uploading # just to be sure flush all disk operations before uploading
......
...@@ -55,7 +55,7 @@ output = ${buildout:directory}/template-original.kvm.cfg ...@@ -55,7 +55,7 @@ output = ${buildout:directory}/template-original.kvm.cfg
[deploy-script-controller-script] [deploy-script-controller-script]
filename = deploy-script-controller filename = deploy-script-controller
location = ${:_profile_base_location_}/${:filename} location = ${:_profile_base_location_}/${:filename}
md5sum = d2b92f45257a52e5a7ff5c311d46d4ae md5sum = 31aadc895acf9fc2fc6e1cbe815339c6
# configuration # configuration
waittime = 360 waittime = 360
tries = 80 tries = 80
......
...@@ -14,4 +14,4 @@ md5sum = efd3b712a2294207f265a9c45648d5cf ...@@ -14,4 +14,4 @@ md5sum = efd3b712a2294207f265a9c45648d5cf
mode = 0644 mode = 0644
[versions] [versions]
slapos.recipe.template = 3.0 slapos.recipe.template = 4.1
...@@ -19,5 +19,5 @@ context = ...@@ -19,5 +19,5 @@ context =
[fluentd] [fluentd]
gems += gems +=
fluent-plugin-wendelin==0.1 fluent-plugin-wendelin==0.3
fluent-plugin-bin fluent-plugin-bin
...@@ -14,8 +14,19 @@ passwd = root1234 ...@@ -14,8 +14,19 @@ passwd = root1234
environment = environment =
GITLAB_ROOT_PASSWORD=${root-password:passwd} GITLAB_ROOT_PASSWORD=${root-password:passwd}
[backend-info] [service-postgresql]
# host = ${instance-parameter:ipv4-random} pgdata-directory = ${directory:srv}/pg
[gitlab-workhorse-dir]
recipe = slapos.cookbook:mkdirectory
srv = ${directory:srv}/glab-wh
[gitlab-workhorse]
srv = ${gitlab-workhorse-dir:srv}
socket = ${gitlab-workhorse:srv}/wh.socket
[unicorn]
socket = ${:srv}/unc.socket
[publish-instance-info] [publish-instance-info]
password = ${root-password:passwd} password = ${root-password:passwd}
......
...@@ -27,7 +27,7 @@ extends = ...@@ -27,7 +27,7 @@ extends =
parts = parts =
ruby2.1 ruby2.1
golang16 golang19
git git
postgresql92 postgresql92
redis28 redis28
...@@ -211,7 +211,7 @@ make-targets= cd ${git2go-repository:location} ...@@ -211,7 +211,7 @@ make-targets= cd ${git2go-repository:location}
&& cp -a ${git-backup-repository:location}/contrib/gitlab-backup ${gopath:bin} && cp -a ${git-backup-repository:location}/contrib/gitlab-backup ${gopath:bin}
environment = environment =
PKG_CONFIG_PATH=${openssl:location}/lib/pkgconfig:${zlib:location}/lib/pkgconfig PKG_CONFIG_PATH=${openssl:location}/lib/pkgconfig:${zlib:location}/lib/pkgconfig
PATH=${cmake:location}/bin:${pkgconfig:location}/bin:${git:location}/bin:${golang16:location}/bin:${buildout:bin-directory}:%(PATH)s PATH=${cmake:location}/bin:${pkgconfig:location}/bin:${git:location}/bin:${golang19:location}/bin:${buildout:bin-directory}:%(PATH)s
GOPATH=${gopath:directory} GOPATH=${gopath:directory}
[xnice-repository] [xnice-repository]
...@@ -245,7 +245,7 @@ configure-command = : ...@@ -245,7 +245,7 @@ configure-command = :
make-targets= ${:_buildout_section_name_} make-targets= ${:_buildout_section_name_}
environment = environment =
PATH=${golang16:location}/bin:%(PATH)s PATH=${golang19:location}/bin:%(PATH)s
############################### ###############################
...@@ -336,7 +336,7 @@ md5sum = 319d7dbe3ad9b260c1e292cfc0d13b11 ...@@ -336,7 +336,7 @@ md5sum = 319d7dbe3ad9b260c1e292cfc0d13b11
[instance-gitlab-test.cfg.in] [instance-gitlab-test.cfg.in]
<= download-file <= download-file
md5sum = cc8065104458af311c2ffa9ae20235a6 md5sum = a4ad76856db98e508af7e773d9ff78f9
[macrolib.cfg.in] [macrolib.cfg.in]
<= download-file <= download-file
...@@ -381,5 +381,5 @@ cns.recipe.symlink = 0.2.3 ...@@ -381,5 +381,5 @@ cns.recipe.symlink = 0.2.3
docutils = 0.12 docutils = 0.12
plone.recipe.command = 1.1 plone.recipe.command = 1.1
rubygemsrecipe = 0.2.2+slapos001 rubygemsrecipe = 0.2.2+slapos001
slapos.recipe.template = 3.0 slapos.recipe.template = 4.1
z3c.recipe.scripts = 1.0.1 z3c.recipe.scripts = 1.0.1
...@@ -72,7 +72,7 @@ async = 0.6.1 ...@@ -72,7 +72,7 @@ async = 0.6.1
gitdb = 0.5.4 gitdb = 0.5.4
pycrypto = 2.6 pycrypto = 2.6
rdiff-backup = 1.0.5+SlapOSPatched001 rdiff-backup = 1.0.5+SlapOSPatched001
slapos.recipe.template = 2.4.2 slapos.recipe.template = 4.1
slapos.toolbox = 0.40.4 slapos.toolbox = 0.40.4
smmap = 0.8.2 smmap = 0.8.2
plone.recipe.command = 1.1 plone.recipe.command = 1.1
......
...@@ -48,4 +48,4 @@ md5sum = 8cde04bfd0c0e9bd56744b988275cfd8 ...@@ -48,4 +48,4 @@ md5sum = 8cde04bfd0c0e9bd56744b988275cfd8
PyRSS2Gen = 1.1 PyRSS2Gen = 1.1
cns.recipe.symlink = 0.2.3 cns.recipe.symlink = 0.2.3
plone.recipe.command = 1.1 plone.recipe.command = 1.1
slapos.recipe.template = 3.0 slapos.recipe.template = 4.1
...@@ -113,5 +113,5 @@ mode = 0644 ...@@ -113,5 +113,5 @@ mode = 0644
[versions] [versions]
erp5.util = 0.4.49 erp5.util = 0.4.49
slapos.recipe.template = 3.0 slapos.recipe.template = 4.1
selenium = 2.53.1 selenium = 2.53.1
...@@ -85,7 +85,7 @@ pyzmq = 16.0.2 ...@@ -85,7 +85,7 @@ pyzmq = 16.0.2
scikit-learn = 0.18.1 scikit-learn = 0.18.1
seaborn = 0.7.1 seaborn = 0.7.1
simplegeneric = 0.8.1 simplegeneric = 0.8.1
slapos.recipe.template = 3.0 slapos.recipe.template = 4.1
statsmodels = 0.8.0 statsmodels = 0.8.0
terminado = 0.6 terminado = 0.6
tornado = 4.4.2 tornado = 4.4.2
......
...@@ -89,7 +89,7 @@ command = ...@@ -89,7 +89,7 @@ command =
[template] [template]
recipe = slapos.recipe.template recipe = slapos.recipe.template
url = ${:_profile_base_location_}/instance.cfg.in url = ${:_profile_base_location_}/instance.cfg.in
md5sum = f40a938400e789361c95d5a9246bf0ef md5sum = bf5ef731c0d8da0267a4939882b4eeee
output = ${buildout:directory}/template.cfg output = ${buildout:directory}/template.cfg
mode = 0644 mode = 0644
...@@ -98,7 +98,7 @@ recipe = hexagonit.recipe.download ...@@ -98,7 +98,7 @@ recipe = hexagonit.recipe.download
ignore-existing = true ignore-existing = true
url = ${:_profile_base_location_}/instance-kvm.cfg.jinja2 url = ${:_profile_base_location_}/instance-kvm.cfg.jinja2
mode = 644 mode = 644
md5sum = a849d4a6060fdb4e9e86917fb77ef153 md5sum = e2b8f86bdc12c86e7d959b55c6d54f6d
download-only = true download-only = true
on-update = true on-update = true
...@@ -107,7 +107,7 @@ recipe = hexagonit.recipe.download ...@@ -107,7 +107,7 @@ recipe = hexagonit.recipe.download
ignore-existing = true ignore-existing = true
url = ${:_profile_base_location_}/instance-kvm-cluster.cfg.jinja2.in url = ${:_profile_base_location_}/instance-kvm-cluster.cfg.jinja2.in
mode = 644 mode = 644
md5sum = d9745bc9bd1d22a640a628c005f88ffb md5sum = 05b6004e8c7a94de14f247affcef4971
download-only = true download-only = true
on-update = true on-update = true
...@@ -143,7 +143,7 @@ recipe = hexagonit.recipe.download ...@@ -143,7 +143,7 @@ recipe = hexagonit.recipe.download
ignore-existing = true ignore-existing = true
url = ${:_profile_base_location_}/instance-kvm-export.cfg.jinja2 url = ${:_profile_base_location_}/instance-kvm-export.cfg.jinja2
mode = 644 mode = 644
md5sum = 13387d37bbf430f1d2b827c8f1acc804 md5sum = fbad91193be6ebde5fc4c05a38a55e7b
download-only = true download-only = true
on-update = true on-update = true
......
...@@ -157,12 +157,6 @@ ...@@ -157,12 +157,6 @@
"description": "Text content which will be written in a file data of cluster http server. All VM will be able to download that file via the static URL of cluster HTTP server: https://10.0.2.101/FOLDER_HASH/data.", "description": "Text content which will be written in a file data of cluster http server. All VM will be able to download that file via the static URL of cluster HTTP server: https://10.0.2.101/FOLDER_HASH/data.",
"type": "string" "type": "string"
}, },
"enable-monitor": {
"title": "Enable Monitoring on this cluster",
"description": "Deploy monitor instance to this kvm instance. It help to check instance status, log and promise results.",
"type": "boolean",
"default": true
},
"monitor-interface-url": { "monitor-interface-url": {
"title": "Monitor Web Interface URL", "title": "Monitor Web Interface URL",
"description": "Give Url of HTML web interface that will be used to render this monitor instance.", "description": "Give Url of HTML web interface that will be used to render this monitor instance.",
...@@ -341,6 +335,20 @@ ...@@ -341,6 +335,20 @@
"default": "qcow2", "default": "qcow2",
"enum": ["qcow2", "raw", "vdi", "vmdk", "cloop", "qed"] "enum": ["qcow2", "raw", "vdi", "vmdk", "cloop", "qed"]
}, },
"wipe-disk-ondestroy": {
"title": "Wipe disks when destroy the VM",
"description": "Say if disks should be wiped by writing new data over every single bit before delete them. This option is used to securely delete VM disks",
"type": "boolean",
"default": false
},
"wipe-disk-iterations": {
"title": "Wipe disk iterations",
"description": "Number of disk overwrite iterations with random data. Default is 1. WARNING: Increase this value will slow down partition destruction and increase IO.",
"type": "integer",
"default": 1,
"minimum": 1,
"maximum": 5
},
"use-tap": { "use-tap": {
"title": "Enable QEMU TAP network interface", "title": "Enable QEMU TAP network interface",
"description": "Use QEMU TAP network interface, might require a bridge on SlapOS Node.", "description": "Use QEMU TAP network interface, might require a bridge on SlapOS Node.",
......
...@@ -9,7 +9,13 @@ ...@@ -9,7 +9,13 @@
{% set kvm_instance_dict = {} -%} {% set kvm_instance_dict = {} -%}
{% set kvm_hostname_list = [] -%} {% set kvm_hostname_list = [] -%}
{% set monitor_url_list = [] -%} {% set monitor_url_list = [] -%}
{% set enable_monitoring = slapparameter_dict.get('enable-monitor', True) -%}
{% macro setconfig(name, value) -%}
{# will set a config-name = value if value is not empty -#}
{% if value and value != '' -%}
config-{{ name }} = {{ dumps(value) }}
{% endif -%}
{% endmacro -%}
[request-common] [request-common]
recipe = slapos.cookbook:request recipe = slapos.cookbook:request
...@@ -37,36 +43,38 @@ state = stopped ...@@ -37,36 +43,38 @@ state = stopped
config-frontend-instance-name = {{ instance_name ~ ' VNC Frontend' }} config-frontend-instance-name = {{ instance_name ~ ' VNC Frontend' }}
config-frontend-software-type = {{ dumps(frontend_dict.get('software-type', 'frontend')) }} config-frontend-software-type = {{ dumps(frontend_dict.get('software-type', 'frontend')) }}
config-frontend-software-url = {{ dumps(frontend_dict.get('software-url', 'http://git.erp5.org/gitweb/slapos.git/blob_plain/refs/tags/slapos-0.92:/software/kvm/software.cfg')) }} config-frontend-software-url = {{ dumps(frontend_dict.get('software-url', 'http://git.erp5.org/gitweb/slapos.git/blob_plain/refs/tags/slapos-0.92:/software/kvm/software.cfg')) }}
config-frontend-instance-guid = {{ dumps(frontend_dict.get('instance-guid', '')) }} {{ setconfig('frontend-instance-guid', kvm_parameter_dict.get('instance-guid', '')) }}
config-name = {{ instance_name }} config-name = {{ instance_name }}
{% if slapparameter_dict.get('authorized-keys', []) -%} {% if slapparameter_dict.get('authorized-keys', []) -%}
config-authorized-key = {{ dumps(slapparameter_dict.get('authorized-keys') | join('\n')) }} config-authorized-key = {{ dumps(slapparameter_dict.get('authorized-keys') | join('\n')) }}
{% endif -%} {% endif -%}
config-nbd-port = {{ dumps(kvm_parameter_dict.get('nbd-port', 1024)) }} config-nbd-port = {{ dumps(kvm_parameter_dict.get('nbd-port', 1024)) }}
config-nbd-host = {{ dumps(kvm_parameter_dict.get('nbd-host', '')) }}
config-nbd2-port = {{ dumps(kvm_parameter_dict.get('nbd-port2', 1024)) }} config-nbd2-port = {{ dumps(kvm_parameter_dict.get('nbd-port2', 1024)) }}
config-nbd2-host = {{ dumps(kvm_parameter_dict.get('nbd-host2', '')) }}
config-ram-size = {{ dumps(kvm_parameter_dict.get('ram-size', 1024)) }} config-ram-size = {{ dumps(kvm_parameter_dict.get('ram-size', 1024)) }}
config-disk-size = {{ dumps(kvm_parameter_dict.get('disk-size', 10)) }} config-disk-size = {{ dumps(kvm_parameter_dict.get('disk-size', 10)) }}
config-disk-type = {{ dumps(kvm_parameter_dict.get('disk-type', 'virtio')) }} config-disk-type = {{ dumps(kvm_parameter_dict.get('disk-type', 'virtio')) }}
config-cpu-count = {{ dumps(kvm_parameter_dict.get('cpu-count', 1)) }} config-cpu-count = {{ dumps(kvm_parameter_dict.get('cpu-count', 1)) }}
config-cpu-options = {{ dumps(kvm_parameter_dict.get('cpu-options', '')) }} {{ setconfig('numa', kvm_parameter_dict.get('numa', '')) }}
config-numa = {{ dumps(kvm_parameter_dict.get('numa', '')) }} {{ setconfig('machine-options', kvm_parameter_dict.get('machine-options', '')) }}
config-disk-cache = {{ dumps(kvm_parameter_dict.get('disk-cache', '')) }} {{ setconfig('cpu-options', kvm_parameter_dict.get('cpu-options', '')) }}
config-disk-aio = {{ dumps(kvm_parameter_dict.get('disk-aio', '')) }} {{ setconfig('nbd-host', kvm_parameter_dict.get('nbd-host', '')) }}
{{ setconfig('host2', kvm_parameter_dict.get('host2', '')) }}
config-auto-ballooning = {{ dumps(kvm_parameter_dict.get('auto-ballooning', True)) }} config-auto-ballooning = {{ dumps(kvm_parameter_dict.get('auto-ballooning', True)) }}
config-machine-options = {{ dumps(kvm_parameter_dict.get('machine-options', '')) }} {{ setconfig('disk-cache', kvm_parameter_dict.get('disk-cache', '')) }}
config-cpu-model = {{ dumps(kvm_parameter_dict.get('cpu-model', '')) }} {{ setconfig('disk-aio', kvm_parameter_dict.get('disk-aio', '')) }}
{{ setconfig('cpu-model', kvm_parameter_dict.get('cpu-model', '')) }}
{{ setconfig('disk-cache', kvm_parameter_dict.get('disk-cache', '')) }}
{% set nat_rules_list = kvm_parameter_dict.get('nat-rules', []) -%} {% set nat_rules_list = kvm_parameter_dict.get('nat-rules', []) -%}
config-nat-rules = {{ nat_rules_list | join(' ') }} {{ setconfig('nat-rules', ' '.join(nat_rules_list)) }}
config-publish-nat-url = True config-publish-nat-url = True
config-use-nat = {{ use_nat }} config-use-nat = {{ use_nat }}
config-use-tap = {{ dumps(kvm_parameter_dict.get('use-tap', True)) }} config-use-tap = {{ dumps(kvm_parameter_dict.get('use-tap', True)) }}
config-nat-restrict-mode = {{ dumps(kvm_parameter_dict.get('nat-restrict-mode', False)) }} config-nat-restrict-mode = {{ dumps(kvm_parameter_dict.get('nat-restrict-mode', False)) }}
config-enable-vhost = {{ dumps(kvm_parameter_dict.get('enable-vhost', False)) }} config-enable-vhost = {{ dumps(kvm_parameter_dict.get('enable-vhost', False)) }}
config-virtual-hard-drive-url = {{ dumps(kvm_parameter_dict.get('virtual-hard-drive-url', '')) }} {{ setconfig('virtual-hard-drive-url', kvm_parameter_dict.get('virtual-hard-drive-url', '')) }}
config-virtual-hard-drive-md5sum = {{ dumps(kvm_parameter_dict.get('virtual-hard-drive-md5sum', '')) }} {{ setconfig('virtual-hard-drive-md5sum', kvm_parameter_dict.get('virtual-hard-drive-md5sum', '')) }}
config-virtual-hard-drive-gzipped = {{ dumps(kvm_parameter_dict.get('virtual-hard-drive-gzipped', False)) }} config-virtual-hard-drive-gzipped = {{ dumps(kvm_parameter_dict.get('virtual-hard-drive-gzipped', False)) }}
config-hard-drive-url-check-certificate = {{ dumps(kvm_parameter_dict.get('hard-drive-url-check-certificate', True)) }} config-hard-drive-url-check-certificate = {{ dumps(kvm_parameter_dict.get('hard-drive-url-check-certificate', True)) }}
config-external-disk-number = {{ dumps(kvm_parameter_dict.get('external-disk-number', 0)) }} config-external-disk-number = {{ dumps(kvm_parameter_dict.get('external-disk-number', 0)) }}
...@@ -74,15 +82,18 @@ config-external-disk-size = {{ dumps(kvm_parameter_dict.get('external-disk-size' ...@@ -74,15 +82,18 @@ config-external-disk-size = {{ dumps(kvm_parameter_dict.get('external-disk-size'
config-external-disk-format = {{ dumps(kvm_parameter_dict.get('external-disk-format', 'qcow2')) }} config-external-disk-format = {{ dumps(kvm_parameter_dict.get('external-disk-format', 'qcow2')) }}
config-enable-http-server = {{ dumps(kvm_parameter_dict.get('enable-http-server', True)) }} config-enable-http-server = {{ dumps(kvm_parameter_dict.get('enable-http-server', True)) }}
config-httpd-port = {{ dumps(kvm_parameter_dict.get('httpd-port', 8081)) }} config-httpd-port = {{ dumps(kvm_parameter_dict.get('httpd-port', 8081)) }}
{% if kvm_parameter_dict.get('data-to-vm', '') -%}
config-data-to-vm = {{ dumps(kvm_parameter_dict.get('data-to-vm', '')) }} {{ setconfig('data-to-vm', kvm_parameter_dict.get('data-to-vm', '')) }}
{% endif -%}
config-disable-ansible-promise = {{ dumps(kvm_parameter_dict.get('disable-ansible-promise', False)) }} config-disable-ansible-promise = {{ dumps(kvm_parameter_dict.get('disable-ansible-promise', False)) }}
config-enable-monitor = {{ enable_monitoring }}
config-monitor-cors-domains = {{ slapparameter_dict.get('monitor-cors-domains', 'monitor.app.officejs.com') }} config-monitor-cors-domains = {{ slapparameter_dict.get('monitor-cors-domains', 'monitor.app.officejs.com') }}
config-monitor-username = ${monitor-instance-parameter:username} config-monitor-username = ${monitor-instance-parameter:username}
config-monitor-password = ${monitor-htpasswd:passwd} config-monitor-password = ${monitor-htpasswd:passwd}
# Enable disk wipe options
{% if kvm_parameter_dict.get('wipe-disk-ondestroy', False) -%}
config-wipe-disk-ondestroy = True
config-wipe-disk-iterations = {{ dumps(kvm_parameter_dict.get('wipe-disk-iterations', 1)) }}
{% endif -%}
# Enable simple http server on ipv6 so all VMs will access it # Enable simple http server on ipv6 so all VMs will access it
config-document-host = ${apache-conf:ip} config-document-host = ${apache-conf:ip}
config-document-port = ${apache-conf:port} config-document-port = ${apache-conf:port}
...@@ -109,13 +120,11 @@ return = ...@@ -109,13 +120,11 @@ return =
{{ ' ' }}nat-rule-url-{{ port }} {{ ' ' }}nat-rule-url-{{ port }}
{% endfor -%} {% endfor -%}
{% endif -%} {% endif -%}
{% if enable_monitoring -%}
{{ ' ' }}monitor-base-url {{ ' ' }}monitor-base-url
{% do monitor_url_list.append('${' ~ section ~ ':connection-monitor-base-url}') -%}
{% endif -%}
{% if str(kvm_parameter_dict.get('use-tap', 'True')).lower() == 'true' -%} {% if str(kvm_parameter_dict.get('use-tap', 'True')).lower() == 'true' -%}
{{ ' ' }}tap-ipv4 {{ ' ' }}tap-ipv4
{% do monitor_url_list.append('${' ~ section ~ ':connection-monitor-base-url}') -%}
{% do publish_dict.__setitem__('lan-' ~ instance_name, '${' ~ section ~ ':connection-tap-ipv4}') -%} {% do publish_dict.__setitem__('lan-' ~ instance_name, '${' ~ section ~ ':connection-tap-ipv4}') -%}
{% do kvm_hostname_list.append(instance_name ~ ' ' ~ '${' ~ section ~ ':connection-tap-ipv4}') -%} {% do kvm_hostname_list.append(instance_name ~ ' ' ~ '${' ~ section ~ ':connection-tap-ipv4}') -%}
{% endif -%} {% endif -%}
...@@ -151,7 +160,7 @@ name = Frontend {{ name }} ...@@ -151,7 +160,7 @@ name = Frontend {{ name }}
software-type = {{ slave_frontend_stype }} software-type = {{ slave_frontend_stype }}
slave = true slave = true
config-url = {{ url }} config-url = {{ url }}
config-custom_domain = {{ dumps(frontend_parameter_dict.get('domain', '')) }} {{ setconfig('custom_domain', kvm_parameter_dict.get('domain', '')) }}
config-enable_cache = {{ dumps(frontend_parameter_dict.get('enable-cache', False)) }} config-enable_cache = {{ dumps(frontend_parameter_dict.get('enable-cache', False)) }}
config-https-only = {{ dumps(frontend_parameter_dict.get('https-only', False)) }} config-https-only = {{ dumps(frontend_parameter_dict.get('https-only', False)) }}
{% if frontend_parameter_dict.get('type', '') -%} {% if frontend_parameter_dict.get('type', '') -%}
...@@ -245,18 +254,14 @@ recipe = slapos.cookbook:publish ...@@ -245,18 +254,14 @@ recipe = slapos.cookbook:publish
{% for name, value in publish_dict.items() -%} {% for name, value in publish_dict.items() -%}
{{ name }} = {{ value }} {{ name }} = {{ value }}
{% endfor %} {% endfor %}
{% if enable_monitoring -%} {% set monitor_interface_url = slapparameter_dict.get('monitor-interface-url', 'https://monitor.app.officejs.com') -%}
{% set monitor_interface_url = slapparameter_dict.get('monitor-interface-url', 'https://monitor.app.officejs.com') -%}
{% do part_list.append('monitor-base') -%}
monitor-setup-url = {{ monitor_interface_url }}/#page=settings_configurator&url=${monitor-publish-parameters:monitor-url}&username=${monitor-publish-parameters:monitor-user}&password=${monitor-publish-parameters:monitor-password} monitor-setup-url = {{ monitor_interface_url }}/#page=settings_configurator&url=${monitor-publish-parameters:monitor-url}&username=${monitor-publish-parameters:monitor-user}&password=${monitor-publish-parameters:monitor-password}
{% endif -%} {% do part_list.append('monitor-base') -%}
[buildout] [buildout]
extends = extends =
{{ template_httpd_cfg }} {{ template_httpd_cfg }}
{% if enable_monitoring -%}
{{ ' ' ~ template_monitor }} {{ ' ' ~ template_monitor }}
{% endif -%}
parts = parts =
httpd httpd
......
...@@ -34,17 +34,17 @@ context = ...@@ -34,17 +34,17 @@ context =
# Extends publish section with resilient parameters # Extends publish section with resilient parameters
[publish-connection-information] [publish-connection-information]
<= resilient-publish-connection-parameter <= resilient-publish-connection-parameter
monitor-base-url = ${monitor-publish:monitor-base-url}
monitor-setup-url = ${monitor-publish:monitor-setup-url}
{% if str(slapparameter_dict.get('enable-monitor', True)).lower() == 'true' -%}
[monitor-instance-parameter] [monitor-instance-parameter]
monitor-httpd-port = 8026 monitor-httpd-port = 8026
monitor-title = {{ slapparameter_dict.get('name', 'KVM Standalone') }} monitor-title = {{ slapparameter_dict.get('name', 'KVM Standalone') }}
cors-domains = {{ slapparameter_dict.get('monitor-cors-domains', 'monitor.app.officejs.com') }} cors-domains = {{ slapparameter_dict.get('monitor-cors-domains', 'monitor.app.officejs.com') }}
{% if slapparameter_dict.get('monitor-username', '') -%} {% if slapparameter_dict.get('monitor-username', '') -%}
username = {{ slapparameter_dict['monitor-username'] }} username = {{ slapparameter_dict['monitor-username'] }}
{% endif -%} {% endif -%}
{% if slapparameter_dict.get('monitor-password', '') -%} {% if slapparameter_dict.get('monitor-password', '') -%}
password = {{ slapparameter_dict['monitor-password'] }} password = {{ slapparameter_dict['monitor-password'] }}
{% endif -%}
{% endif -%} {% endif -%}
...@@ -159,6 +159,21 @@ ...@@ -159,6 +159,21 @@
"enum": ["qcow2", "raw", "vdi", "vmdk", "cloop", "qed"] "enum": ["qcow2", "raw", "vdi", "vmdk", "cloop", "qed"]
}, },
"wipe-disk-ondestroy": {
"title": "Wipe disks when destroy the VM",
"description": "Say if disks should be wiped by writing new data over every single bit before delete them. This option is used to securely delete VM disks",
"type": "boolean",
"default": false
},
"wipe-disk-iterations": {
"title": "Wipe disk iterations",
"description": "Number of disk overwrite iterations with random data. Default is 1. WARNING: Increase this value will slow down partition destruction and increase IO.",
"type": "integer",
"default": 1,
"minimum": 1,
"maximum": 5
},
"use-tap": { "use-tap": {
"title": "Use QEMU TAP network interface", "title": "Use QEMU TAP network interface",
"description": "Use QEMU TAP network interface, might require a bridge on SlapOS Node.", "description": "Use QEMU TAP network interface, might require a bridge on SlapOS Node.",
...@@ -188,12 +203,6 @@ ...@@ -188,12 +203,6 @@
"type": "boolean", "type": "boolean",
"default": false "default": false
}, },
"enable-monitor": {
"title": "Deploy monitoring tools",
"description": "Deploy monitor instance to this kvm instance. It help to check instance status, log and promise results.",
"type": "boolean",
"default": true
},
"monitor-interface-url": { "monitor-interface-url": {
"title": "Monitor Web Interface URL", "title": "Monitor Web Interface URL",
"description": "Give Url of HTML web interface that will be used to render this monitor instance.", "description": "Give Url of HTML web interface that will be used to render this monitor instance.",
......
{% set enable_http = slapparameter_dict.get('enable-http-server', 'False').lower() -%} {% set enable_http = slapparameter_dict.get('enable-http-server', 'False').lower() -%}
{% set use_tap = slapparameter_dict.get('use-tap', 'False').lower() -%} {% set use_tap = slapparameter_dict.get('use-tap', 'False').lower() -%}
{% set use_nat = slapparameter_dict.get('use-nat', 'True').lower() -%} {% set use_nat = slapparameter_dict.get('use-nat', 'True').lower() -%}
{% set wipe_disk = slapparameter_dict.get('wipe-disk-ondestroy', 'False').lower() -%}
{% set nat_restrict = slapparameter_dict.get('nat-restrict-mode', 'False').lower() -%} {% set nat_restrict = slapparameter_dict.get('nat-restrict-mode', 'False').lower() -%}
{% set name = slapparameter_dict.get('name', 'localhost') -%} {% set name = slapparameter_dict.get('name', 'localhost') -%}
{% set disable_ansible_promise = slapparameter_dict.get('disable-ansible-promise', 'True').lower() -%} {% set disable_ansible_promise = slapparameter_dict.get('disable-ansible-promise', 'True').lower() -%}
...@@ -9,7 +10,6 @@ ...@@ -9,7 +10,6 @@
{% set frontend_software_type = 'default' -%} {% set frontend_software_type = 'default' -%}
{% set extends_list = [] -%} {% set extends_list = [] -%}
{% set part_list = [] -%} {% set part_list = [] -%}
{% set monitor = str(slapparameter_dict.get('enable-monitor', True)).lower() == 'true' -%}
{% set bootstrap_url = '' -%} {% set bootstrap_url = '' -%}
{% set bootstrap_url_md5sum = '' -%} {% set bootstrap_url_md5sum = '' -%}
...@@ -26,10 +26,9 @@ ...@@ -26,10 +26,9 @@
{% set nat_rule_list = '' %} {% set nat_rule_list = '' %}
{% endif -%} {% endif -%}
{% if monitor -%}
{% do extends_list.append(template_monitor) -%} {% do extends_list.append(template_monitor) -%}
{% endif -%}
{% do extends_list.append(logrotate_cfg) -%} {% do extends_list.append(logrotate_cfg) -%}
[directory] [directory]
recipe = slapos.cookbook:mkdirectory recipe = slapos.cookbook:mkdirectory
etc = ${buildout:directory}/etc etc = ${buildout:directory}/etc
...@@ -42,6 +41,7 @@ services = ${:etc}/service ...@@ -42,6 +41,7 @@ services = ${:etc}/service
promises = ${:etc}/promise promises = ${:etc}/promise
novnc-conf = ${:etc}/novnc novnc-conf = ${:etc}/novnc
run = ${:var}/run run = ${:var}/run
prerm = ${:etc}/prerm
ca-dir = ${:srv}/ssl ca-dir = ${:srv}/ssl
public = ${:srv}/public/ public = ${:srv}/public/
cron-entries = ${:etc}/cron.d cron-entries = ${:etc}/cron.d
...@@ -219,6 +219,19 @@ input = inline:#!/bin/sh ...@@ -219,6 +219,19 @@ input = inline:#!/bin/sh
output = ${directory:promises}/kvm-disk-image-corruption output = ${directory:promises}/kvm-disk-image-corruption
mode = 700 mode = 700
{% if wipe_disk == 'true' -%}
{% do part_list.append('wipe-disk-wrapper') -%}
{% set wipe_file_list = '${kvm-parameter-dict:disk-path}' -%}
{% if storage_dict -%}
{% set wipe_file_list = '${kvm-parameter-dict:disk-path}' ~ ' ' ~ '/* '.join(storage_dict.values()) ~ '/*' -%}
{% endif -%}
[wipe-disk-wrapper]
recipe = slapos.cookbook:wrapper
wrapper-path = ${directory:prerm}/slapos_wipe_qemu_disk
command-line =
{{ wipe_disk_wrapper }} -n {{ slapparameter_dict.get('wipe-disk-iterations', 1) }} -suz --check-pid-file ${kvm-parameter-dict:pid-file-path} --file {{ wipe_file_list }}
{% endif -%}
[kvm-started-promise] [kvm-started-promise]
recipe = slapos.recipe.template:jinja2 recipe = slapos.recipe.template:jinja2
template = {{ qemu_start_promise_tpl }} template = {{ qemu_start_promise_tpl }}
...@@ -343,22 +356,20 @@ hostname = ${httpd:host} ...@@ -343,22 +356,20 @@ hostname = ${httpd:host}
port = ${httpd:port} port = ${httpd:port}
{% endif %} {% endif %}
{% if monitor -%}
[monitor-instance-parameter] [monitor-instance-parameter]
monitor-httpd-port = 8026 monitor-httpd-port = 8026
monitor-title = {{ slapparameter_dict.get('name', 'KVM Standalone') }} monitor-title = {{ slapparameter_dict.get('name', 'KVM Standalone') }}
cors-domains = {{ slapparameter_dict.get('monitor-cors-domains', 'monitor.app.officejs.com') }} cors-domains = {{ slapparameter_dict.get('monitor-cors-domains', 'monitor.app.officejs.com') }}
{% if slapparameter_dict.get('monitor-username', '') -%} {% if slapparameter_dict.get('monitor-username', '') -%}
username = {{ slapparameter_dict['monitor-username'] }} username = {{ slapparameter_dict['monitor-username'] }}
{% endif -%} {% endif -%}
{% if slapparameter_dict.get('monitor-password', '') -%} {% if slapparameter_dict.get('monitor-password', '') -%}
password = {{ slapparameter_dict['monitor-password'] }} password = {{ slapparameter_dict['monitor-password'] }}
{% endif -%}
{% endif -%} {% endif -%}
interface-url = {{ slapparameter_dict.get('monitor-interface-url', 'https://monitor.app.officejs.com') }}
[publish-connection-information] [publish-connection-information]
<= monitor-publish
recipe = slapos.cookbook:publish recipe = slapos.cookbook:publish
ipv6 = ${slap-network-information:global-ipv6} ipv6 = ${slap-network-information:global-ipv6}
backend-url = https://[${novnc-instance:ip}]:${novnc-instance:port}/vnc_auto.html?host=[${novnc-instance:ip}]&port=${novnc-instance:port}&encrypt=1&password=${kvm-controller-parameter-dict:vnc-passwd} backend-url = https://[${novnc-instance:ip}]:${novnc-instance:port}/vnc_auto.html?host=[${novnc-instance:ip}]&port=${novnc-instance:port}&encrypt=1&password=${kvm-controller-parameter-dict:vnc-passwd}
...@@ -392,13 +403,6 @@ tap-ipv4 = ${slap-network-information:tap-ipv4} ...@@ -392,13 +403,6 @@ tap-ipv4 = ${slap-network-information:tap-ipv4}
7_info = Get the publick key file in your VM with the command: wget {{ kvm_http }}/authorized_keys 7_info = Get the publick key file in your VM with the command: wget {{ kvm_http }}/authorized_keys
{% endif %} {% endif %}
{% endif %} {% endif %}
{% if monitor -%}
monitor-base-url = ${monitor-publish-parameters:monitor-base-url}
{% set monitor_interface_url = slapparameter_dict.get('monitor-interface-url', 'https://monitor.app.officejs.com') -%}
{% if monitor_interface_url -%}
monitor-setup-url = {{ monitor_interface_url }}/#page=settings_configurator&url=${monitor-publish-parameters:monitor-url}&username=${monitor-publish-parameters:monitor-user}&password=${monitor-publish-parameters:monitor-password}
{% endif -%}
{% endif -%}
{% if use_tap == 'true' and tap_network_dict.has_key('ipv4') -%} {% if use_tap == 'true' and tap_network_dict.has_key('ipv4') -%}
1_info = Use these configurations below to configure interface {{ iface }} in your VM. 1_info = Use these configurations below to configure interface {{ iface }} in your VM.
...@@ -631,10 +635,8 @@ parts = ...@@ -631,10 +635,8 @@ parts =
cron cron
cron-entry-logrotate cron-entry-logrotate
frontend-promise frontend-promise
{% if monitor -%}
# monitor parts # monitor parts
monitor-base monitor-base
{% endif -%}
# Complete parts with sections # Complete parts with sections
{{ part_list | join('\n ') }} {{ part_list | join('\n ') }}
......
...@@ -103,6 +103,7 @@ context = ...@@ -103,6 +103,7 @@ context =
raw template_kvm_run ${template-kvm-run:location}/${template-kvm-run:filename} raw template_kvm_run ${template-kvm-run:location}/${template-kvm-run:filename}
raw template_monitor ${monitor2-template:rendered} raw template_monitor ${monitor2-template:rendered}
raw websockify_executable_location ${buildout:directory}/bin/websockify raw websockify_executable_location ${buildout:directory}/bin/websockify
raw wipe_disk_wrapper ${buildout:directory}/bin/securedelete
template-parts-destination = ${template-parts:target} template-parts-destination = ${template-parts:target}
template-replicated-destination = ${template-replicated:target} template-replicated-destination = ${template-replicated:target}
import-list = file parts :template-parts-destination import-list = file parts :template-parts-destination
......
...@@ -5,13 +5,13 @@ extends = common.cfg ...@@ -5,13 +5,13 @@ extends = common.cfg
# XXX - use websockify = 0.5.1 for compatibility with kvm frontend # XXX - use websockify = 0.5.1 for compatibility with kvm frontend
websockify = 0.5.1 websockify = 0.5.1
slapos.toolbox = 0.71 slapos.toolbox = 0.73
erp5.util = 0.4.49 erp5.util = 0.4.49
apache-libcloud = 1.1.0 apache-libcloud = 1.1.0
collective.recipe.environment = 0.2.0 collective.recipe.environment = 0.2.0
gitdb = 0.6.4 gitdb = 0.6.4
pycurl = 7.43.0 pycurl = 7.43.0
slapos.recipe.template = 3.0 slapos.recipe.template = 4.1
smmap = 0.9.0 smmap = 0.9.0
# websockify = 0.8.0 # websockify = 0.8.0
......
...@@ -17,7 +17,7 @@ parts += ...@@ -17,7 +17,7 @@ parts +=
versions = versions versions = versions
[versions] [versions]
slapos.recipe.template = 3.0 slapos.recipe.template = 4.1
[template-instance] [template-instance]
......
...@@ -70,6 +70,11 @@ ...@@ -70,6 +70,11 @@
"default": 1, "default": 1,
"type": "integer" "type": "integer"
}, },
"data-deduplication": {
"description": "Set the --dedup option for storage nodes.",
"default": false,
"type": "boolean"
},
"disable-drop-partitions": { "disable-drop-partitions": {
"description": "Set the --disable-drop-partitions option for storage nodes.", "description": "Set the --disable-drop-partitions option for storage nodes.",
"default": false, "default": false,
......
...@@ -87,6 +87,7 @@ masters = ${publish:masters} ...@@ -87,6 +87,7 @@ masters = ${publish:masters}
database-adapter = MySQL database-adapter = MySQL
wait-database = -1 wait-database = -1
engine = {{ slapparameter_dict.get('engine', '') }} engine = {{ slapparameter_dict.get('engine', '') }}
dedup = {{ dumps(bool(slapparameter_dict.get('data-deduplication'))) }}
disable-drop-partitions = {{ dumps(bool(slapparameter_dict.get('disable-drop-partitions'))) }} disable-drop-partitions = {{ dumps(bool(slapparameter_dict.get('disable-drop-partitions'))) }}
{% for i in range(slapparameter_dict.get('storage-count', 1)) -%} {% for i in range(slapparameter_dict.get('storage-count', 1)) -%}
......
...@@ -98,7 +98,7 @@ md5sum = 1fee10f02c2fa2a581e21878ca0fd704 ...@@ -98,7 +98,7 @@ md5sum = 1fee10f02c2fa2a581e21878ca0fd704
[instance-neo-storage-mysql] [instance-neo-storage-mysql]
<= download-base-neo <= download-base-neo
md5sum = 67d623d631c2f99e33bcabc79fc9cccf md5sum = 366e51c0dbd85e511a31e403b8704735
[template-neo-my-cnf] [template-neo-my-cnf]
<= download-base-neo <= download-base-neo
...@@ -112,16 +112,16 @@ apache-libcloud = 1.5.0 ...@@ -112,16 +112,16 @@ apache-libcloud = 1.5.0
ecdsa = 0.13 ecdsa = 0.13
gitdb2 = 2.0.0 gitdb2 = 2.0.0
msgpack-python = 0.4.8 msgpack-python = 0.4.8
mysqlclient = 1.3.10+SlapOSPatched002 mysqlclient = 1.3.12
persistent = 4.2.3 persistent = 4.2.3
pycrypto = 2.6.1 pycrypto = 2.6.1
pycurl = 7.43.0 pycurl = 7.43.0
slapos.recipe.template = 3.0 slapos.recipe.template = 4.1
slapos.toolbox = 0.71 slapos.toolbox = 0.73
smmap2 = 2.0.1 smmap2 = 2.0.1
transaction = 1.7.0 transaction = 1.7.0
zodbpickle = 0.6.0 zodbpickle = 0.6.0
zodbtools = 0.0.0.dev3 zodbtools = 0.0.0.dev4
# Required by: # Required by:
# slapos.toolbox==0.71 # slapos.toolbox==0.71
...@@ -153,3 +153,20 @@ paramiko = 2.1.2 ...@@ -153,3 +153,20 @@ paramiko = 2.1.2
# Required by: # Required by:
# slapos.toolbox==0.71 # slapos.toolbox==0.71
passlib = 1.7.1 passlib = 1.7.1
# Required by:
# zodbtools==0.0.0dev4
zodburi = 2.3.0
# Required by:
# zodburi==2.0
# ZEO 5 requires transaction >= 2
ZEO = 4.3.1
# Required by:
# zodburi==2.0
mock = 2.0.0
# Required by:
# ZEO==4.3.1
zdaemon = 4.2.0
...@@ -3,4 +3,14 @@ extends = software.cfg ...@@ -3,4 +3,14 @@ extends = software.cfg
[versions] [versions]
ZODB = 5.3.0 ZODB = 5.3.0
ZEO = 5.1.0
transaction = 2.1.2 transaction = 2.1.2
# Required by:
# ZEO==5.1.0
# trollius==2.1
futures = 3.1.1
# Required by:
# ZEO==5.1.0
trollius = 2.1
# Code generated by gowork-snapshot; DO NOT EDIT.
# list of go git repositories to fetch
[gowork.goinstall]
depends_gitfetch =
${go_github.com_cznic_strutil:recipe}
${go_github.com_golang_glog:recipe}
${go_github.com_kisielk_og-rek:recipe}
${go_github.com_kylelemons_godebug:recipe}
${go_github.com_pkg_errors:recipe}
${go_github.com_pkg_profile:recipe}
${go_github.com_someonegg_gocontainer:recipe}
${go_golang.org_x_net:recipe}
${go_golang.org_x_perf:recipe}
${go_golang.org_x_sync:recipe}
${go_golang.org_x_tools:recipe}
${go_lab.nexedi.com_kirr_go123:recipe}
${go_lab.nexedi.com_kirr_neo:recipe}
[go_github.com_cznic_strutil]
<= go-git-package
go.importpath = github.com/cznic/strutil
repository = https://github.com/cznic/strutil
revision = 529a34b1c1
[go_github.com_golang_glog]
<= go-git-package
go.importpath = github.com/golang/glog
repository = https://github.com/golang/glog
revision = 23def4e6c1
[go_github.com_kisielk_og-rek]
<= go-git-package
go.importpath = github.com/kisielk/og-rek
repository = https://github.com/kisielk/og-rek
revision = dd41cde712
[go_github.com_kylelemons_godebug]
<= go-git-package
go.importpath = github.com/kylelemons/godebug
repository = https://github.com/kylelemons/godebug
revision = d65d576e93
[go_github.com_pkg_errors]
<= go-git-package
go.importpath = github.com/pkg/errors
repository = https://github.com/pkg/errors
revision = v0.8.0-6-g2b3a18b5f0
[go_github.com_pkg_profile]
<= go-git-package
go.importpath = github.com/pkg/profile
repository = https://github.com/pkg/profile
revision = v1.2.1-0-g5b67d42886
[go_github.com_someonegg_gocontainer]
<= go-git-package
go.importpath = github.com/someonegg/gocontainer
repository = https://github.com/someonegg/gocontainer
revision = fc2c7e84b5
[go_golang.org_x_net]
<= go-git-package
go.importpath = golang.org/x/net
repository = https://go.googlesource.com/net
revision = 1087133bc4
[go_golang.org_x_perf]
<= go-git-package
go.importpath = golang.org/x/perf
repository = https://go.googlesource.com/perf
revision = 4469e6ce8c
[go_golang.org_x_sync]
<= go-git-package
go.importpath = golang.org/x/sync
repository = https://go.googlesource.com/sync
revision = 8e0aa688b6
[go_golang.org_x_tools]
<= go-git-package
go.importpath = golang.org/x/tools
repository = https://go.googlesource.com/tools
revision = 9bd2f44268
[go_lab.nexedi.com_kirr_go123]
<= go-git-package
go.importpath = lab.nexedi.com/kirr/go123
repository = https://lab.nexedi.com/kirr/go123.git
revision = 2578d58311
[go_lab.nexedi.com_kirr_neo]
<= go-git-package
go.importpath = lab.nexedi.com/kirr/neo
repository = https://lab.nexedi.com/kirr/neo.git
revision = v1.8-1326-g4d0cd89484
# TODO instance which runs this test periodically automatically and ingests results to ERP5
# env.sh for putting everything needed to run neotest on path
. ${gowork:env.sh}
PATH="${ethtool:location}/sbin:$PATH"
PATH="${ioping:location}/bin:$PATH"
PATH="${lmbench:location}/bin:$PATH"
PATH="${mariadb:location}/bin:$PATH"
PATH="${mariadb:location}/scripts:$PATH"
PATH="${python2.7:location}/bin:$PATH"
# add all eggs and develop-eggs to py path
#
# XXX better leverage something like zc.recipe.egg to generate eggs path buildout way
# XXX see e.g. https://lab.nexedi.com/nexedi/slapos/merge_requests/242#note_49644 for why
# TODO(kirr) try doing so
PYTHONPATH=${buildout:develop-eggs-directory}:$PYTHONPATH # for .egg-link to be found by setuptools
for egglink in ${buildout:develop-eggs-directory}/*.egg-link; do
PYTHONPATH=`cat $egglink |head -1`:$PYTHONPATH
done
for egg in ${buildout:develop-eggs-directory}/*.egg ${buildout:eggs-directory}/*.egg; do
export PYTHONPATH=$egg:$PYTHONPATH
done
export PATH
export PYTHONPATH
export PS1="(neotest-`basename ${buildout:directory}`) $PS1"
#!/bin/bash -e
# neotest wraper so it could be run without any environment preset
. ${buildout:directory}/neotest-env.sh
exec ${gowork:src}/lab.nexedi.com/kirr/neo/go/neo/t/neotest "$@"
# neotest SR with main components taken from latest git versions
[buildout]
extends = software.cfg
[go_lab.nexedi.com_kirr_go123]
branch = master
revision=
[go_lab.nexedi.com_kirr_neo]
branch = t
revision=
[lmbench-repository]
branch = x/kirr
revision=
[ioping-repository]
branch = x/hist
revision=
# NEO test software-release
#
# This software-release prepares environment so that neotest NEO
# testing/benchmarking can be run in a SlapOS WebRunner.
[buildout]
extends =
../../stack/slapos.cfg
../neoppod/software-common.cfg
../../component/golang/buildout.cfg
gowork.cfg
../../component/wendelin.core/buildout.cfg
../../component/ethtool/buildout.cfg
../../component/ioping/buildout.cfg
../../component/lmbench/buildout.cfg
parts =
gowork
lmbench
ioping
ethtool
neoppod-develop
neoppod
wendelin.core-dev
eggs
neotest-env.sh
neotest
# for instance
slapos-deps-eggs
slapos-cookbook
# instance.cfg
# go packages to install (+ automatically their dependencies)
[gowork]
install =
lab.nexedi.com/kirr/neo/go/... \
github.com/pkg/profile \
golang.org/x/perf/cmd/benchstat
# dev-install neo from go checkout
[neoppod-develop]
setup = ${go_lab.nexedi.com_kirr_neo:location}
# neotest-env.sh + neotest wrapper
[buildout-template]
recipe = slapos.recipe.template
url = ${:_profile_base_location_}/${:_buildout_section_name_}.in
output = ${buildout:directory}/${:_buildout_section_name_}
[neotest-env.sh]
<= buildout-template
md5sum = 6e2203c7a86e8a3e8e56b8086115f5d4
[neotest]
<= buildout-template
output = ${buildout:bin-directory}/${:_buildout_section_name_}
mode = 0755
md5sum = fb3b4109128c1db1739ef5bb6abd1d94
# instance (TODO)
[instance.cfg]
<= buildout-template
md5sum = 410e1b2d72829824b28cc0299adb472e
# eggs:
[eggs]
recipe = zc.recipe.egg
eggs =
# wendelin.core still requires ZODB3 but having ZODB4 or 5 installed satisfies latest ZODB3
ZODB3
# also for wc
numpy
# to install not only wendelin.core modules but also scripts
wendelin.core
# for ZEO scripts (runzeo)
ZEO
# wendelin.core: latest not yet released
[wendelin.core-repository]
revision= v0.11-4-g38fbc83ceb
# ping eggs versions
[versions]
pyasn1 = 0.3.7
ZODB3 = 3.11.0
numpy = 1.13.3
zope.testing = 4.6.2
# Required by:
# ZEO==4.3.1
# ZODB==4.4.5
# zodburi==2.3.0
ZConfig = 3.2.0
# Required by:
# ZEO==4.3.1
# ZODB==4.4.5
zc.lockfile = 1.2.1
# Required by:
# slapos.toolbox==0.73
erp5.util = 0.4.49
...@@ -45,5 +45,5 @@ output = ${buildout:directory}/instance-nginx.cfg.in ...@@ -45,5 +45,5 @@ output = ${buildout:directory}/instance-nginx.cfg.in
mode = 0644 mode = 0644
[versions] [versions]
slapos.recipe.template = 3.0 slapos.recipe.template = 4.1
slapos.toolbox = 0.71 slapos.toolbox = 0.73
...@@ -65,4 +65,4 @@ mode = 0644 ...@@ -65,4 +65,4 @@ mode = 0644
PyRSS2Gen = 1.1 PyRSS2Gen = 1.1
cns.recipe.symlink = 0.2.3 cns.recipe.symlink = 0.2.3
plone.recipe.command = 1.1 plone.recipe.command = 1.1
slapos.recipe.template = 3.0 slapos.recipe.template = 4.1
...@@ -110,8 +110,8 @@ gitdb = 0.6.4 ...@@ -110,8 +110,8 @@ gitdb = 0.6.4
plone.recipe.command = 1.1 plone.recipe.command = 1.1
pycrypto = 2.6.1 pycrypto = 2.6.1
pycurl = 7.43.0 pycurl = 7.43.0
slapos.recipe.template = 3.0 slapos.recipe.template = 4.1
slapos.toolbox = 0.71 slapos.toolbox = 0.73
smmap = 0.9.0 smmap = 0.9.0
# Required by: # Required by:
......
...@@ -47,4 +47,4 @@ mode = 0644 ...@@ -47,4 +47,4 @@ mode = 0644
[versions] [versions]
plone.recipe.command = 1.1 plone.recipe.command = 1.1
slapos.recipe.template = 3.0 slapos.recipe.template = 4.1
...@@ -71,21 +71,3 @@ mode = 0644 ...@@ -71,21 +71,3 @@ mode = 0644
recipe = zc.recipe.egg recipe = zc.recipe.egg
eggs = eggs =
collective.recipe.template collective.recipe.template
# Add slapos.libnetworkcache to path of slapos.core.
[slapos-cookbook]
eggs =
${lxml-python:egg}
slapos.cookbook
cliff
hexagonit.recipe.download
inotifyx
netaddr
netifaces
requests
slapos.core
supervisor
xml_marshaller
pytz
slapos.libnetworkcache
...@@ -11,4 +11,4 @@ extends = common.cfg ...@@ -11,4 +11,4 @@ extends = common.cfg
Pygments = 1.6 Pygments = 1.6
collective.recipe.environment = 0.2.0 collective.recipe.environment = 0.2.0
collective.recipe.template = 1.10 collective.recipe.template = 1.10
slapos.recipe.template = 3.0 slapos.recipe.template = 4.1
...@@ -108,12 +108,16 @@ SSLProxyEngine On ...@@ -108,12 +108,16 @@ SSLProxyEngine On
# As backend is trusting REMOTE_USER header unset it always # As backend is trusting REMOTE_USER header unset it always
RequestHeader unset REMOTE_USER RequestHeader unset REMOTE_USER
RequestHeader unset SSL_CLIENT_SERIAL
{% if parameter_dict['ca-cert'] -%} {% if parameter_dict['ca-cert'] -%}
SSLVerifyClient require SSLVerifyClient optional
RequestHeader set REMOTE_USER %{SSL_CLIENT_S_DN_CN}s RequestHeader set REMOTE_USER %{SSL_CLIENT_S_DN_CN}s
RequestHeader set SSL_CLIENT_SERIAL "%{SSL_CLIENT_M_SERIAL}s"
SSLCACertificateFile {{ parameter_dict['ca-cert'] }} SSLCACertificateFile {{ parameter_dict['ca-cert'] }}
{% if parameter_dict['crl'] -%}
SSLCARevocationCheck chain SSLCARevocationCheck chain
SSLCARevocationFile {{ parameter_dict['crl'] }} SSLCARevocationFile {{ parameter_dict['crl'] }}
{%- endif %}
{%- endif %} {%- endif %}
ErrorLog "{{ parameter_dict['error-log'] }}" ErrorLog "{{ parameter_dict['error-log'] }}"
...@@ -133,20 +137,20 @@ RewriteEngine On ...@@ -133,20 +137,20 @@ RewriteEngine On
Listen {{ ip }}:{{ port }} Listen {{ ip }}:{{ port }}
{% endfor -%} {% endfor -%}
<VirtualHost *:{{ port }}> <VirtualHost *:{{ port }}>
{% if enable_authentication -%} SSLEngine on
{% if enable_authentication and parameter_dict['shared-ca-cert'] and parameter_dict['shared-crl'] -%}
SSLVerifyClient require SSLVerifyClient require
# Custom block we use for now different parameters.
RequestHeader set REMOTE_USER %{SSL_CLIENT_S_DN_CN}s RequestHeader set REMOTE_USER %{SSL_CLIENT_S_DN_CN}s
SSLCACertificateFile {{ parameter_dict['shared-ca-cert'] }} SSLCACertificateFile {{ parameter_dict['shared-ca-cert'] }}
SSLCARevocationPath {{ parameter_dict['shared-crl'] }} SSLCARevocationPath {{ parameter_dict['shared-crl'] }}
LogFormat "%h %l %{REMOTE_USER}i %t \"%r\" %>s %b \"%{Referer}i\" \"%{User-Agent}i\" %D" combined LogFormat "%h %l %{REMOTE_USER}i %t \"%r\" %>s %b \"%{Referer}i\" \"%{User-Agent}i\" %D" combined
# We would like to separate the the authentificated logs. # We would like to separate the the authentificated logs.
ErrorLog "{{ parameter_dict['log-dir'] }}/apache-service-error.log" ErrorLog "{{ parameter_dict['log-dir'] }}/apache-service-error.log"
CustomLog "{{ parameter_dict['log-dir'] }}/apache-service-access.log" combined CustomLog "{{ parameter_dict['log-dir'] }}/apache-service-access.log" combined
{% endif -%} {% endif -%}
SSLEngine on
RewriteRule ^/(.*) {{ backend }}/$1 [L,P] RewriteRule ^/(.*) {{ backend }}/$1 [L,P]
</VirtualHost> </VirtualHost>
{% endfor -%} {% endfor -%}
...@@ -15,16 +15,12 @@ ...@@ -15,16 +15,12 @@
# not need these here). # not need these here).
[template-erp5] [template-erp5]
filename = instance-erp5.cfg.in filename = instance-erp5.cfg.in
md5sum = aa10af616493b97d2b630c46d869a9d7 md5sum = 4a77ee4a6367fee27552f8bfe9d87aab
[template-balancer] [template-balancer]
filename = instance-balancer.cfg.in filename = instance-balancer.cfg.in
md5sum = c5ce18fa4d4be9b9a2d789f3bbd37840 md5sum = c76c4b36c351b7383dad7bed04e3bc90
[template-apache-backend-conf] [template-apache-backend-conf]
filename = apache-backend.conf.in filename = apache-backend.conf.in
md5sum = ea77222f440bb72fee4939fe1b72976e md5sum = 991bfcc4b6abae2ec59b69d71013e8ad
[template-create-erp5-site-real]
filename = instance-create-erp5-site-real.cfg.in
md5sum = 86a2b244341218cd0c4b6d398c61ee20
{% set part_list = [] -%} {% set part_list = [] -%}
{% set ssl_parameter_dict = slapparameter_dict.get('ssl', {}) %} {% set ssl_parameter_dict = slapparameter_dict.get('ssl', {}) %}
{% set caucase_url = slapparameter_dict.get('caucase-url', '') -%}
{% macro section(name) %}{% do part_list.append(name) %}{{ name }}{% endmacro -%} {% macro section(name) %}{% do part_list.append(name) %}{{ name }}{% endmacro -%}
{% set use_ipv6 = slapparameter_dict.get('use-ipv6', False) -%} {% set use_ipv6 = slapparameter_dict.get('use-ipv6', False) -%}
{% set shared_ca_path = slapparameter_dict['shared-certificate-authority-path'] -%} {% set shared_ca_path = slapparameter_dict['shared-certificate-authority-path'] -%}
...@@ -37,6 +38,56 @@ context = key content {{content_section_name}}:content ...@@ -37,6 +38,56 @@ context = key content {{content_section_name}}:content
mode = {{ mode }} mode = {{ mode }}
{%- endmacro %} {%- endmacro %}
[certificate-request-base]
recipe = slapos.cookbook:wrapper
wrapper-path = ${directory:bin}/request-instance-certificate
parameters-extra = true
command-line = {{ parameter_dict['bin-directory'] }}/caucase-cliweb
--crt-file ${apache-conf-ssl:cert}
--key-file ${apache-conf-ssl:key}
--crl-file ${apache-conf-ssl:crl}
--ca-url {{ caucase_url }}
--ca-crt-file ${apache-conf-ssl:ca-cert}
{% macro request_cert(name, common_name) -%}
{% set get_crl_periodicity = slapparameter_dict.get('crl-update-periodicity', 'daily') -%}
[{{ section(name ~ '-certificate-request') }}]
recipe = slapos.cookbook:wrapper
wrapper-path = ${directory:services}/request-{{ name }}-certificate
command-line =
${certificate-request-base:wrapper-path}
--cn {{ common_name }}
--request
[{{ section(name ~ '-renew-cron-entry') }}]
recipe = slapos.cookbook:cron.d
cron-entries = ${cron:cron-entries}
name = {{ name }}-certificate-auto-renew
time = weekly
# 2592000 = 30*24*60*60 equivalent to one month in seconds
command = ${certificate-request-base:wrapper-path} --renew --threshold 2592000 --on-renew="${apache-graceful:output}"
[{{ section(name ~ '-download-crl') }}]
# download the crl for the first time
recipe = plone.recipe.command
command =
if [ ! -s "${apache-conf-ssl:crl}" ]; then
${certificate-request-base:wrapper-path} --update-crl
fi
update-command = ${:command}
stop-on-error = true
[{{ section(name ~ '-update-crl-cron-entry') }}]
recipe = slapos.cookbook:cron.d
cron-entries = ${cron:cron-entries}
name = {{ name }}-update-crl
time = {{ get_crl_periodicity }}
# XXX - Update crl call apache graceful restart, it's not recommended to check crl too often, Apache
# has an issue with reload and can be frozen and stop responding. Default periodicity time = daily
command = ${certificate-request-base:wrapper-path} --update-crl --on-crl-update="${apache-graceful:output}"
{%- endmacro %}
{% if use_ipv6 -%} {% if use_ipv6 -%}
[zope-tunnel-base] [zope-tunnel-base]
recipe = slapos.cookbook:ipv4toipv6 recipe = slapos.cookbook:ipv4toipv6
...@@ -82,6 +133,7 @@ ipv6 = {{ zope_address.split(']:')[0][1:] }} ...@@ -82,6 +133,7 @@ ipv6 = {{ zope_address.split(']:')[0][1:] }}
-#} -#}
{% do zope_family_address_list[0][0] -%} {% do zope_family_address_list[0][0] -%}
{% set haproxy_port = next_port() -%} {% set haproxy_port = next_port() -%}
{% set backend_path = slapparameter_dict['backend-path-dict'][family_name] -%}
{% do haproxy_dict.__setitem__(family_name, (haproxy_port, zope_family_address_list)) -%} {% do haproxy_dict.__setitem__(family_name, (haproxy_port, zope_family_address_list)) -%}
{% if has_webdav -%} {% if has_webdav -%}
{% set internal_scheme = 'http' -%}{# mod_rewrite does not recognise webdav scheme -#} {% set internal_scheme = 'http' -%}{# mod_rewrite does not recognise webdav scheme -#}
...@@ -90,8 +142,7 @@ ipv6 = {{ zope_address.split(']:')[0][1:] }} ...@@ -90,8 +142,7 @@ ipv6 = {{ zope_address.split(']:')[0][1:] }}
{% set internal_scheme = 'http' -%} {% set internal_scheme = 'http' -%}
{% set external_scheme = 'https' -%} {% set external_scheme = 'https' -%}
{% endif -%} {% endif -%}
{% set backend_path = slapparameter_dict['backend-path-dict'][family_name] -%} {% set ssl_authentication = slapparameter_dict['ssl-authentication-dict'].get(family_name, False) -%}
{% set ssl_authentication = slapparameter_dict['ssl-authentication-dict'][family_name] -%}
{% do apache_dict.__setitem__(family_name, (next_port(), external_scheme, internal_scheme ~ '://' ~ ipv4 ~ ':' ~ haproxy_port ~ backend_path, ssl_authentication)) -%} {% do apache_dict.__setitem__(family_name, (next_port(), external_scheme, internal_scheme ~ '://' ~ ipv4 ~ ':' ~ haproxy_port ~ backend_path, ssl_authentication)) -%}
{% endfor -%} {% endfor -%}
...@@ -125,9 +176,9 @@ crl = ${directory:apache-conf}/crl.pem ...@@ -125,9 +176,9 @@ crl = ${directory:apache-conf}/crl.pem
backend-list = {{ dumps(apache_dict.values()) }} backend-list = {{ dumps(apache_dict.values()) }}
ip-list = {{ dumps(apache_ip_list) }} ip-list = {{ dumps(apache_ip_list) }}
pid-file = ${directory:run}/apache.pid pid-file = ${directory:run}/apache.pid
log-dir = ${directory:log}
error-log = ${directory:log}/apache-error.log error-log = ${directory:log}/apache-error.log
access-log = ${directory:log}/apache-access.log access-log = ${directory:log}/apache-access.log
log-dir = ${directory:log}
# Apache 2.4's default value (60 seconds) can be a bit too short # Apache 2.4's default value (60 seconds) can be a bit too short
timeout = 300 timeout = 300
# Basic SSL server configuration # Basic SSL server configuration
...@@ -144,8 +195,6 @@ shared-ca-cert = {{ shared_ca_path }}/cacert.pem ...@@ -144,8 +195,6 @@ shared-ca-cert = {{ shared_ca_path }}/cacert.pem
shared-crl = {{ shared_ca_path }}/crl shared-crl = {{ shared_ca_path }}/crl
{%- endif %} {%- endif %}
[apache-conf] [apache-conf]
< = jinja2-template-base < = jinja2-template-base
template = {{ parameter_dict['template-apache-conf'] }} template = {{ parameter_dict['template-apache-conf'] }}
...@@ -156,6 +205,18 @@ context = section parameter_dict apache-conf-parameter-dict ...@@ -156,6 +205,18 @@ context = section parameter_dict apache-conf-parameter-dict
recipe = slapos.cookbook:wrapper recipe = slapos.cookbook:wrapper
wrapper-path = ${directory:services}/apache wrapper-path = ${directory:services}/apache
command-line = "{{ parameter_dict['apache'] }}/bin/httpd" -f "${apache-conf:rendered}" -DFOREGROUND command-line = "{{ parameter_dict['apache'] }}/bin/httpd" -f "${apache-conf:rendered}" -DFOREGROUND
wait-for-files =
${apache-conf-ssl:cert}
${apache-conf-ssl:key}
[apache-graceful]
recipe = collective.recipe.template
input = inline:
#!/bin/sh
kill -USR1 "$(cat '${apache-conf-parameter-dict:pid-file}')"
output = ${directory:bin}/apache-httpd-graceful
mode = 700
[{{ section('apache-promise') }}] [{{ section('apache-promise') }}]
# Check any apache port in ipv4, expect other ports and ipv6 to behave consistently # Check any apache port in ipv4, expect other ports and ipv6 to behave consistently
...@@ -164,7 +225,7 @@ path = ${directory:promise}/apache ...@@ -164,7 +225,7 @@ path = ${directory:promise}/apache
hostname = {{ ipv4 }} hostname = {{ ipv4 }}
port = {{ apache_dict.values()[0][0] }} port = {{ apache_dict.values()[0][0] }}
[publish] [{{ section('publish') }}]
recipe = slapos.cookbook:publish.serialised recipe = slapos.cookbook:publish.serialised
{% for family_name, (apache_port, scheme, _, _) in apache_dict.items() -%} {% for family_name, (apache_port, scheme, _, _) in apache_dict.items() -%}
{{ family_name ~ '-v6' }} = {% if ipv6_set %}{{ scheme ~ '://[' ~ ipv6 ~ ']:' ~ apache_port }}{% endif %} {{ family_name ~ '-v6' }} = {% if ipv6_set %}{{ scheme ~ '://[' ~ ipv6 ~ ']:' ~ apache_port }}{% endif %}
...@@ -178,6 +239,11 @@ key = ${apache-ssl-key:rendered} ...@@ -178,6 +239,11 @@ key = ${apache-ssl-key:rendered}
cert = ${apache-ssl-cert:rendered} cert = ${apache-ssl-cert:rendered}
{{ simplefile('apache-ssl-key', '${apache-conf-ssl:key}', ssl_parameter_dict['key']) }} {{ simplefile('apache-ssl-key', '${apache-conf-ssl:key}', ssl_parameter_dict['key']) }}
{{ simplefile('apache-ssl-cert', '${apache-conf-ssl:cert}', ssl_parameter_dict['cert']) }} {{ simplefile('apache-ssl-cert', '${apache-conf-ssl:cert}', ssl_parameter_dict['cert']) }}
{% elif caucase_url -%}
key = ${apache-conf-ssl:key}
cert = ${apache-conf-ssl:cert}
{{ request_cert('erp5', 'instance.apache@erp5') }}
{% else %} {% else %}
recipe = plone.recipe.command recipe = plone.recipe.command
command = "{{ parameter_dict['openssl'] }}/bin/openssl" req -newkey rsa -batch -new -x509 -days 3650 -nodes -keyout "${:key}" -out "${:cert}" command = "{{ parameter_dict['openssl'] }}/bin/openssl" req -newkey rsa -batch -new -x509 -days 3650 -nodes -keyout "${:key}" -out "${:cert}"
...@@ -191,12 +257,15 @@ cert = ${apache-ssl-ca:rendered} ...@@ -191,12 +257,15 @@ cert = ${apache-ssl-ca:rendered}
crl = ${apache-ssl-crl:rendered} crl = ${apache-ssl-crl:rendered}
{{ simplefile('apache-ssl-ca', '${apache-conf-ssl:ca-cert}', ssl_parameter_dict['ca-cert']) }} {{ simplefile('apache-ssl-ca', '${apache-conf-ssl:ca-cert}', ssl_parameter_dict['ca-cert']) }}
{{ simplefile('apache-ssl-crl', '${apache-conf-ssl:crl}', ssl_parameter_dict['crl']) }} {{ simplefile('apache-ssl-crl', '${apache-conf-ssl:crl}', ssl_parameter_dict['crl']) }}
{% elif caucase_url -%}
cert = ${apache-conf-ssl:ca-cert}
crl = ${apache-conf-ssl:crl}
{% else %} {% else %}
cert = cert =
crl = crl =
{%- endif %} {%- endif %}
{% set apache_service_log_list = {} -%} {% set apache_service_log_list = {} -%}
{% for family_name, (_, _, _, authentication) in apache_dict.items() -%} {% for family_name, (_, _, _, authentication) in apache_dict.items() -%}
{% if authentication -%} {% if authentication -%}
...@@ -211,7 +280,7 @@ post = test ! -s ${apache-conf-parameter-dict:pid-file} || {{ parameter_dict['bi ...@@ -211,7 +280,7 @@ post = test ! -s ${apache-conf-parameter-dict:pid-file} || {{ parameter_dict['bi
{% endif -%} {% endif -%}
{% endfor -%} {% endfor -%}
[logrotate-apache] [{{ section('logrotate-apache') }}]
< = logrotate-entry-base < = logrotate-entry-base
name = apache name = apache
log = ${apache-conf-parameter-dict:error-log} ${apache-conf-parameter-dict:access-log} log = ${apache-conf-parameter-dict:error-log} ${apache-conf-parameter-dict:access-log}
...@@ -222,7 +291,7 @@ recipe = slapos.cookbook:mkdirectory ...@@ -222,7 +291,7 @@ recipe = slapos.cookbook:mkdirectory
apache-conf = ${:etc}/apache apache-conf = ${:etc}/apache
bin = ${buildout:directory}/bin bin = ${buildout:directory}/bin
etc = ${buildout:directory}/etc etc = ${buildout:directory}/etc
promise = ${directory:etc}/promise promise = ${:etc}/promise
services = ${:etc}/run services = ${:etc}/run
var = ${buildout:directory}/var var = ${buildout:directory}/var
run = ${:var}/run run = ${:var}/run
...@@ -235,39 +304,33 @@ newcerts = ${:ca-dir}/newcerts ...@@ -235,39 +304,33 @@ newcerts = ${:ca-dir}/newcerts
crl = ${:ca-dir}/crl crl = ${:ca-dir}/crl
apachedex = ${monitor-directory:private}/apachedex apachedex = ${monitor-directory:private}/apachedex
[monitor-generate-apachedex-report] [{{ section('monitor-generate-apachedex-report') }}]
recipe = slapos.cookbook:wrapper recipe = slapos.cookbook:wrapper
wrapper-path = ${monitor-directory:reports}/${:command} wrapper-path = ${monitor-directory:reports}/${:command}
command-line = "{{ parameter_dict['run-apachedex-location'] }}" "{{ parameter_dict['apachedex-location'] }}" "${directory:apachedex}" --default "${apachedex-parameters:default}" --apache-log-list "${apachedex-parameters:apache-log-list}" --base-list "${apachedex-parameters:base-list}" --skip-base-list "${apachedex-parameters:skip-base-list}" --erp5-base-list "${apachedex-parameters:erp5-base-list}" command-line = "{{ parameter_dict['run-apachedex-location'] }}" "{{ parameter_dict['apachedex-location'] }}" "${directory:apachedex}" ${monitor-publish-parameters:monitor-base-url}/private/apachedex --apache-log-list "${apachedex-parameters:apache-log-list}" --config "${apachedex-parameters:configuration}"
command = apachedex_every_3_hour command = apachedex_every_23_hour
[apachedex-parameters] [apachedex-parameters]
default_parameter =
# XXX - Sample log file with curent date: apache_access.log-%(date)s.gz # XXX - Sample log file with curent date: apache_access.log-%(date)s.gz
# which will be equivalent to apache_access.log-20150112.gz if the date is 2015-01-12 # which will be equivalent to apache_access.log-20150112.gz if the date is 2015-01-12
apache-log-list = ${apache-conf-parameter-dict:access-log} apache-log-list = ${apache-conf-parameter-dict:access-log}
default = ${monitor-directory:etc}/apdex_default configuration = {{ slapparameter_dict['apachedex-configuration'] }}
base-list = ${monitor-directory:etc}/apdex_base_list promise-threshold = {{ slapparameter_dict['apachedex-promise-threshold'] }}
skip-base-list = ${monitor-directory:etc}/apdex_skip_base_list
erp5-base-list = ${monitor-directory:etc}/apdex_erp5_base_list [{{ section('monitor-promise-apachedex-result') }}]
recipe = slapos.cookbook:wrapper
wrapper-path = ${directory:promise}/check-apachedex-result
command-line = "{{ parameter_dict['promise-check-apachedex-result'] }}" --apachedex_path "${directory:apachedex}" --status_file ${monitor-directory:private}/apachedex.report.json --threshold "${apachedex-parameters:promise-threshold}"
[monitor-instance-parameter] [monitor-instance-parameter]
monitor-httpd-ipv6 = {{ (ipv6_set | list)[0] }} monitor-httpd-ipv6 = {{ (ipv6_set | list)[0] }}
monitor-httpd-port = {{ next_port() }} monitor-httpd-port = {{ next_port() }}
monitor-title = Balancer monitor monitor-title = {{ slapparameter_dict['name'] }}
password = {{ slapparameter_dict['monitor-passwd'] }} password = {{ slapparameter_dict['monitor-passwd'] }}
instance-configuration =
file apachedex-default ${apachedex-parameters:default}
file apachedex-base-list ${apachedex-parameters:base-list}
file apachedex-skip-base-list ${apachedex-parameters:skip-base-list}
file apachedex-erp5-base-list ${apachedex-parameters:erp5-base-list}
[buildout] [buildout]
extends = extends =
{{ logrotate_cfg }} {{ logrotate_cfg }}
{{ parameter_dict['template-monitor'] }} {{ parameter_dict['template-monitor'] }}
parts += parts +=
publish
logrotate-apache
monitor-generate-apachedex-report
{{ part_list | join('\n ') }} {{ part_list | join('\n ') }}
[directory]
recipe = slapos.cookbook:mkdirectory
etc = ${buildout:directory}/etc
services = ${:etc}/run
promise = ${:etc}/promise
[erp5-bootstrap]
recipe = slapos.cookbook:erp5.bootstrap
runner-path = ${directory:services}/erp5-bootstrap
{# Note: a random domain name will be picked if several point to the same IP -#}
{% set reverse_hosts = {} -%}
{% for x, y in publish['hosts-dict'].iteritems() -%}
{% do reverse_hosts.__setitem__(y, x) -%}
{% endfor -%}
{# XXX: Expect the first database to be the one to use for catalog. -#}
{% set mysql_parsed = urlparse.urlparse(publish['mariadb-database-list'][0]) -%}
mysql-url = {{ dumps(urlparse.urlunparse(mysql_parsed[:1] + (mysql_parsed.username + ":" + mysql_parsed.password + "@" + reverse_hosts.get(mysql_parsed.hostname, mysql_parsed.hostname) + ':' ~ mysql_parsed.port, ) + mysql_parsed[2:])) }}
{# Pick the first http[s] family found, they should be all equivalent anyway. -#}
{# Don't pick the https[s] configurated with ssl-authenticat=true. By convention, this family name contain 'service'. -#}
{% set family_list = [] -%}
{% for key, value in publish.items() -%}
{% if key.startswith('family-') and value.startswith('http') and not 'service' in key -%}
{% do family_list.append(value.split('://', 1)) -%}
{% endif -%}
{% endfor -%}
zope-url = {{ dumps(family_list[0][0] + '://' + publish['inituser-login'] + ':' + publish_early['inituser-password'] + '@' + family_list[0][1] + '/' + publish['site-id']) }}
[promise-erp5-site]
recipe = slapos.cookbook:check_url_available
url = ${erp5-bootstrap:zope-url}
path = ${directory:promise}/erp5-site
dash_path = {{ parameter_dict['dash-location'] }}/bin/dash
curl_path = {{ parameter_dict['curl-location'] }}/bin/curl
[buildout]
parts = promise-erp5-site
eggs-directory = {{ eggs_directory }}
develop-eggs-directory = {{ develop_eggs_directory }}
{% import "root_common" as root_common with context %} {% import "root_common" as root_common with context -%}
{% set frontend_dict = slapparameter_dict.get('frontend', {}) -%} {% set frontend_dict = slapparameter_dict.get('frontend', {}) -%}
{% set has_frontend = frontend_dict.get('software-url', '') != '' -%} {% set has_frontend = frontend_dict.get('software-url', '') != '' -%}
{% set site_id = slapparameter_dict.get('site-id', 'erp5') -%} {% set site_id = slapparameter_dict.get('site-id', 'erp5') -%}
...@@ -9,7 +9,9 @@ ...@@ -9,7 +9,9 @@
{% set has_jupyter = jupyter_dict.get('enable', jupyter_enable_default.lower() in ('true', 'yes')) -%} {% set has_jupyter = jupyter_dict.get('enable', jupyter_enable_default.lower() in ('true', 'yes')) -%}
{% set jupyter_zope_family = jupyter_dict.get('zope-family', '') -%} {% set jupyter_zope_family = jupyter_dict.get('zope-family', '') -%}
{% set monitor_base_url_dict = {} -%} {% set monitor_base_url_dict = {} -%}
{% set caucase_url = slapparameter_dict.get('caucase', {}).pop('url', '') -%}
{% set monitor_dict = slapparameter_dict.get('monitor', {}) %} {% set monitor_dict = slapparameter_dict.get('monitor', {}) %}
{% set crl_update_period = slapparameter_dict.get('caucase', {}).pop('crl-update-periodicity', 'daily') -%}
[request-common] [request-common]
<= request-common-base <= request-common-base
config-use-ipv6 = {{ dumps(slapparameter_dict.get('use-ipv6', False)) }} config-use-ipv6 = {{ dumps(slapparameter_dict.get('use-ipv6', False)) }}
...@@ -52,6 +54,14 @@ config-name = {{ name }} ...@@ -52,6 +54,14 @@ config-name = {{ name }}
connection-url = smtp://127.0.0.2:0/ connection-url = smtp://127.0.0.2:0/
{%- endif %} {%- endif %}
{% if caucase_url -%}
{% do publish_dict.__setitem__('caucase-http-url', caucase_url) -%}
[request-caucase]
connection-http-url = {{ caucase_url }}
{%- else %}
{{ request('caucase', 'caucase', 'caucase', {'server-port': 8890, 'server-https-port': 8891, 'auto-sign-csr-amount': 2}, {'http-url': True, 'https-url': False}) }}
{% endif -%}
{# ZODB -#} {# ZODB -#}
{% set zodb_dict = {} -%} {% set zodb_dict = {} -%}
{% set storage_dict = {} -%} {% set storage_dict = {} -%}
...@@ -95,16 +105,16 @@ recipe = slapos.cookbook:publish-early ...@@ -95,16 +105,16 @@ recipe = slapos.cookbook:publish-early
{%- if neo %} {%- if neo %}
neo-cluster gen-neo-cluster:name neo-cluster gen-neo-cluster:name
{%- if neo[0] %} {%- if neo[0] %}
neo-cluster = {{ neo[0] }} neo-cluster = {{ dumps(neo[0]) }}
{%- endif %} {%- endif %}
{%- endif %} {%- endif %}
{%- set inituser_password = slapparameter_dict.get('inituser-password') %} {%- set inituser_password = slapparameter_dict.get('inituser-password') %}
{%- if inituser_password %} {%- if inituser_password %}
inituser-password = {{ inituser_password }} inituser-password = {{ dumps(inituser_password) }}
{%- endif %} {%- endif %}
{%- set deadlock_debugger_password = slapparameter_dict.get('deadlock-debugger-password') -%} {%- set deadlock_debugger_password = slapparameter_dict.get('deadlock-debugger-password') -%}
{%- if deadlock_debugger_password %} {%- if deadlock_debugger_password %}
deadlock-debugger-password = {{ deadlock_debugger_password }} deadlock-debugger-password = {{ dumps(deadlock_debugger_password) }}
{%- endif %} {%- endif %}
[gen-password] [gen-password]
...@@ -133,17 +143,19 @@ return = ...@@ -133,17 +143,19 @@ return =
zope-address-list zope-address-list
hosts-dict hosts-dict
monitor-base-url monitor-base-url
{% set bt5_default_list = 'erp5_full_text_myisam_catalog erp5_configurator_standard erp5_configurator_maxma_demo erp5_configurator_ung erp5_configurator_run_my_doc slapos_configurator' -%} {% set bt5_default_list = 'erp5_full_text_myisam_catalog slapos_configurator' -%}
{% if has_jupyter -%} {% if has_jupyter -%}
{% set bt5_default_list = bt5_default_list + ' erp5_data_notebook' -%} {% set bt5_default_list = bt5_default_list + ' erp5_data_notebook' -%}
{% endif -%} {% endif -%}
config-bt5 = {{ dumps(slapparameter_dict.get('bt5', bt5_default_list)) }} config-bt5 = {{ dumps(slapparameter_dict.get('bt5', bt5_default_list)) }}
config-bt5-repository-url = {{ dumps(slapparameter_dict.get('bt5-repository-url', local_bt5_repository)) }} config-bt5-repository-url = {{ dumps(slapparameter_dict.get('bt5-repository-url', local_bt5_repository)) }}
config-caucase-url = ${request-caucase:connection-http-url}
config-cloudooo-url = ${request-cloudooo:connection-url} config-cloudooo-url = ${request-cloudooo:connection-url}
config-deadlock-debugger-password = ${publish-early:deadlock-debugger-password} config-deadlock-debugger-password = ${publish-early:deadlock-debugger-password}
config-developer-list = {{ dumps(slapparameter_dict.get('developer-list', [inituser_login])) }} config-developer-list = {{ dumps(slapparameter_dict.get('developer-list', [inituser_login])) }}
config-hosts-dict = {{ dumps(slapparameter_dict.get('hosts-dict', {})) }} config-hosts-dict = {{ dumps(slapparameter_dict.get('hosts-dict', {})) }}
config-hostalias-dict = {{ dumps(slapparameter_dict.get('hostalias-dict', {})) }} config-hostalias-dict = {{ dumps(slapparameter_dict.get('hostalias-dict', {})) }}
config-id-store-interval = {{ dumps(slapparameter_dict.get('id-store-interval')) }}
config-inituser-login = {{ dumps(inituser_login) }} config-inituser-login = {{ dumps(inituser_login) }}
config-inituser-password = ${publish-early:inituser-password} config-inituser-password = ${publish-early:inituser-password}
config-kumofs-url = ${request-memcached-persistent:connection-url} config-kumofs-url = ${request-memcached-persistent:connection-url}
...@@ -154,7 +166,7 @@ config-mysql-url-list = ${request-mariadb:connection-database-list} ...@@ -154,7 +166,7 @@ config-mysql-url-list = ${request-mariadb:connection-database-list}
config-site-id = {{ dumps(site_id) }} config-site-id = {{ dumps(site_id) }}
config-smtp-url = ${request-smtp:connection-url} config-smtp-url = ${request-smtp:connection-url}
config-timezone = {{ dumps(slapparameter_dict.get('timezone', 'UTC')) }} config-timezone = {{ dumps(slapparameter_dict.get('timezone', 'UTC')) }}
config-wendelin-core-zblk-fmt = {{ dumps(slapparameter_dict.get('wendelin-core-zblk-fmt', '')) }} config-wendelin-core-zblk-fmt = {{ dumps(slapparameter_dict.get('wendelin-core-zblk-fmt', '')) }}
config-ca-path = ${directory:ca-dir} config-ca-path = ${directory:ca-dir}
config-zodb-dict = {{ dumps(zodb_dict) }} config-zodb-dict = {{ dumps(zodb_dict) }}
{% for server_type, server_dict in storage_dict.iteritems() -%} {% for server_type, server_dict in storage_dict.iteritems() -%}
...@@ -171,23 +183,22 @@ config-tidstorage-port = ${request-zodb:connection-tidstorage-port} ...@@ -171,23 +183,22 @@ config-tidstorage-port = ${request-zodb:connection-tidstorage-port}
software-type = zope software-type = zope
{% set zope_family_dict = {} -%} {% set zope_family_dict = {} -%}
{% set jupyter_zope_family_default = [] -%}
{% set zope_backend_path_dict = {} -%} {% set zope_backend_path_dict = {} -%}
{% set ssl_authentication_dict = {} -%} {% set ssl_authentication_dict = {} -%}
{% set jupyter_zope_family_default = [] -%}
{% for custom_name, zope_parameter_dict in zope_partition_dict.items() -%} {% for custom_name, zope_parameter_dict in zope_partition_dict.items() -%}
{% set partition_name = 'zope-' ~ custom_name -%} {% set partition_name = 'zope-' ~ custom_name -%}
{% set section_name = 'request-' ~ partition_name -%} {% set section_name = 'request-' ~ partition_name -%}
{% set backend_path = zope_parameter_dict.get('backend-path', '/') % {'site-id': site_id} %}
{% set zope_family = zope_parameter_dict.get('family', 'default') -%} {% set zope_family = zope_parameter_dict.get('family', 'default') -%}
{% set backend_path = zope_parameter_dict.get('backend-path', '/') % {'site-id': site_id} %}
{# # default jupyter zope family is first zope family. -#} {# # default jupyter zope family is first zope family. -#}
{# # use list.append() to update it, because in jinja2 set changes only local scope. -#} {# # use list.append() to update it, because in jinja2 set changes only local scope. -#}
{% if not jupyter_zope_family_default -%} {% if not jupyter_zope_family_default -%}
{% do jupyter_zope_family_default.append(zope_family) -%} {% do jupyter_zope_family_default.append(zope_family) -%}
{% endif -%} {% endif -%}
{% do zope_family_dict.setdefault(zope_family, []).append(section_name) -%} {% do zope_family_dict.setdefault(zope_family, []).append(section_name) -%}
{% do zope_backend_path_dict.setdefault(zope_parameter_dict.get('family', 'default'), backend_path) -%} {% do zope_backend_path_dict.__setitem__(zope_family, backend_path) -%}
{% do ssl_authentication_dict.setdefault(zope_parameter_dict.get('family', 'default'), zope_parameter_dict.get('ssl-authentication', False)) -%} {% do ssl_authentication_dict.__setitem__(zope_family, zope_parameter_dict.get('ssl-authentication', False)) -%}
[{{ section_name }}] [{{ section_name }}]
<= request-zope-base <= request-zope-base
name = {{ partition_name }} name = {{ partition_name }}
...@@ -201,7 +212,6 @@ config-longrequest-logger-interval = {{ dumps(zope_parameter_dict.get('longreque ...@@ -201,7 +212,6 @@ config-longrequest-logger-interval = {{ dumps(zope_parameter_dict.get('longreque
config-longrequest-logger-timeout = {{ dumps(zope_parameter_dict.get('longrequest-logger-timeout', 1)) }} config-longrequest-logger-timeout = {{ dumps(zope_parameter_dict.get('longrequest-logger-timeout', 1)) }}
config-port-base = {{ dumps(zope_parameter_dict.get('port-base', 2200)) }} config-port-base = {{ dumps(zope_parameter_dict.get('port-base', 2200)) }}
config-webdav = {{ dumps(zope_parameter_dict.get('webdav', False)) }} config-webdav = {{ dumps(zope_parameter_dict.get('webdav', False)) }}
config-name = {{ partition_name }}
{% endfor -%} {% endfor -%}
{# if not explicitly configured, connect jupyter to first zope family, which -#} {# if not explicitly configured, connect jupyter to first zope family, which -#}
...@@ -211,7 +221,6 @@ config-name = {{ partition_name }} ...@@ -211,7 +221,6 @@ config-name = {{ partition_name }}
{% endif -%} {% endif -%}
{# We need to concatenate lists that we cannot read as lists, so this gets hairy. -#} {# We need to concatenate lists that we cannot read as lists, so this gets hairy. -#}
{% set zope_address_list_id_dict = {} -%}
{% set zope_family_parameter_dict = {} -%} {% set zope_family_parameter_dict = {} -%}
{% for family_name, zope_section_id_list in zope_family_dict.items() -%} {% for family_name, zope_section_id_list in zope_family_dict.items() -%}
{% for zope_section_id in zope_section_id_list -%} {% for zope_section_id in zope_section_id_list -%}
...@@ -300,6 +309,7 @@ return = ...@@ -300,6 +309,7 @@ return =
{{ family }}-v6 {{ family }}-v6
{% endfor -%} {% endfor -%}
{% do monitor_base_url_dict.__setitem__('request-balancer', '${' ~ 'request-balancer' ~ ':connection-monitor-base-url}') -%} {% do monitor_base_url_dict.__setitem__('request-balancer', '${' ~ 'request-balancer' ~ ':connection-monitor-base-url}') -%}
config-zope-family-dict = {{ dumps(zope_family_parameter_dict) }} config-zope-family-dict = {{ dumps(zope_family_parameter_dict) }}
config-tcpv4-port = {{ dumps(balancer_dict.get('tcpv4-port', 2150)) }} config-tcpv4-port = {{ dumps(balancer_dict.get('tcpv4-port', 2150)) }}
{% for zope_section_id, name in zope_address_list_id_dict.items() -%} {% for zope_section_id, name in zope_address_list_id_dict.items() -%}
...@@ -307,16 +317,17 @@ config-{{ name }} = {{ ' ${' ~ zope_section_id ~ ':connection-zope-address-list} ...@@ -307,16 +317,17 @@ config-{{ name }} = {{ ' ${' ~ zope_section_id ~ ':connection-zope-address-list}
{% endfor -%} {% endfor -%}
# XXX: should those really be same for all families ? # XXX: should those really be same for all families ?
config-haproxy-server-check-path = {{ dumps(balancer_dict.get('haproxy-server-check-path', '/') % {'site-id': site_id}) }} config-haproxy-server-check-path = {{ dumps(balancer_dict.get('haproxy-server-check-path', '/') % {'site-id': site_id}) }}
config-backend-path = {{ dumps(balancer_dict.get('apache-backend-path', '/') % {'site-id': site_id}) }}
config-ssl = {{ dumps(balancer_dict.get('ssl', {})) }} config-ssl = {{ dumps(balancer_dict.get('ssl', {})) }}
config-backend-path-dict = {{ dumps(zope_backend_path_dict) }}
config-ssl-authentication-dict = {{ dumps(ssl_authentication_dict) }}
config-shared-certificate-authority-path = ${directory:ca-dir}
config-monitor-passwd = ${monitor-htpasswd:passwd} config-monitor-passwd = ${monitor-htpasswd:passwd}
config-name = ${:name} config-name = ${:name}
config-caucase-url = ${request-caucase:connection-http-url}
config-crl-update-periodicity = {{ crl_update_period }}
config-shared-certificate-authority-path = ${directory:ca-dir}
config-backend-path-dict = {{ dumps(zope_backend_path_dict) }}
config-ssl-authentication-dict = {{ dumps(ssl_authentication_dict) }}
config-apachedex-promise-threshold = {{ dumps(monitor_dict.get('apachedex-promise-threshold', 70)) }} config-apachedex-promise-threshold = {{ dumps(monitor_dict.get('apachedex-promise-threshold', 70)) }}
config-apachedex-configuration = {{ dumps(monitor_dict.get('apachedex-configuration', config-apachedex-configuration = {{ dumps(monitor_dict.get('apachedex-configuration',
'--erp5-base "/erp5(/|$|/\?)" --skip-user-agent Zabbix --error-detail --js-embed --quiet')) }} '--erp5-base "/erp5(/|$|/\?)" --skip-user-agent Zabbix --error-detail --js-embed --quiet')) }}
[request-frontend-base] [request-frontend-base]
{% if has_frontend -%} {% if has_frontend -%}
...@@ -365,10 +376,10 @@ monitor-httpd-port = 8386 ...@@ -365,10 +376,10 @@ monitor-httpd-port = 8386
[buildout] [buildout]
extends = {{ template_monitor }} extends = {{ template_monitor }}
parts +=
parts +=
apache-certificate-authority apache-certificate-authority
fix-ca-folder fix-ca-folder
publish
monitor-base monitor-base
[monitor-conf-parameters] [monitor-conf-parameters]
......
...@@ -58,9 +58,5 @@ filename = instance-balancer.cfg.in ...@@ -58,9 +58,5 @@ filename = instance-balancer.cfg.in
url = ${:_profile_base_location_}/${:filename} url = ${:_profile_base_location_}/${:filename}
filename = apache-backend.conf.in filename = apache-backend.conf.in
[template-create-erp5-site-real]
< = download-base-part
filename = instance-create-erp5-site-real.cfg.in
[versions] [versions]
python-memcached = 1.47 python-memcached = 1.47
...@@ -97,4 +97,4 @@ mode = 640 ...@@ -97,4 +97,4 @@ mode = 640
Pygments = 2.1.3 Pygments = 2.1.3
collective.recipe.template = 1.10 collective.recipe.template = 1.10
plone.recipe.command = 1.1 plone.recipe.command = 1.1
slapos.recipe.template = 3.0 slapos.recipe.template = 4.1
...@@ -19,11 +19,11 @@ md5sum = 713db528880282d568278f09458d2aab ...@@ -19,11 +19,11 @@ md5sum = 713db528880282d568278f09458d2aab
[template-runner] [template-runner]
filename = instance-runner.cfg filename = instance-runner.cfg
md5sum = 8f49df215a1596efcec94e2cca009711 md5sum = 7df86928aeef0fbada832b0283b9a454
[template-runner-import-script] [template-runner-import-script]
filename = template/runner-import.sh.jinja2 filename = template/runner-import.sh.jinja2
md5sum = 130193114cbbcd014af9704851410605 md5sum = 1edd9c7a20e208b6cb647886bfb6d1bb
[instance-runner-import] [instance-runner-import]
filename = instance-runner-import.cfg.in filename = instance-runner-import.cfg.in
...@@ -31,7 +31,7 @@ md5sum = 5cfa49bcf20612844e1c50a85740d0b3 ...@@ -31,7 +31,7 @@ md5sum = 5cfa49bcf20612844e1c50a85740d0b3
[template-runner-export-script] [template-runner-export-script]
filename = template/runner-export.sh.jinja2 filename = template/runner-export.sh.jinja2
md5sum = 94c0eddb2af3290942f64e04f95a707c md5sum = 98ce179badc6af5979a64a7c3d0a2ceb
[instance-runner-export] [instance-runner-export]
filename = instance-runner-export.cfg.in filename = instance-runner-export.cfg.in
...@@ -55,7 +55,7 @@ md5sum = 525e37ea8b2acf6209869999b15071a6 ...@@ -55,7 +55,7 @@ md5sum = 525e37ea8b2acf6209869999b15071a6
[template-slapos-cfg] [template-slapos-cfg]
filename = template/slapos.cfg.in filename = template/slapos.cfg.in
md5sum = ef16446d432e1397182b1654fe920ffb md5sum = 035e027e9cb9bbdca0509ac895fc4696
[template-parameters] [template-parameters]
filename = parameters.xml.in filename = parameters.xml.in
...@@ -63,11 +63,7 @@ md5sum = f8446fcf254b4929eb828a9a1d7e5f62 ...@@ -63,11 +63,7 @@ md5sum = f8446fcf254b4929eb828a9a1d7e5f62
[template-bash-profile] [template-bash-profile]
filename = template/bash_profile.in filename = template/bash_profile.in
md5sum = 1c88cbca0c1e705eeb4b544ef4616097 md5sum = 712ca70488051f97e7a7b11a02a06bb1
[template-bashrc]
filename = template/bashrc.in
md5sum = d0b9060ec96d4d5b188e36c84240ea7d
[template-supervisord] [template-supervisord]
filename = template/supervisord.conf.in filename = template/supervisord.conf.in
......
...@@ -13,6 +13,7 @@ extends = ...@@ -13,6 +13,7 @@ extends =
../../component/nano/buildout.cfg ../../component/nano/buildout.cfg
../../component/nginx/buildout.cfg ../../component/nginx/buildout.cfg
../../component/openssh/buildout.cfg ../../component/openssh/buildout.cfg
../../component/mosh/buildout.cfg
../../component/rsync/buildout.cfg ../../component/rsync/buildout.cfg
../../component/pycurl/buildout.cfg ../../component/pycurl/buildout.cfg
../../component/python-2.7/buildout.cfg ../../component/python-2.7/buildout.cfg
...@@ -137,15 +138,10 @@ filename = resilient_software_release_information.py.in ...@@ -137,15 +138,10 @@ filename = resilient_software_release_information.py.in
< = template-download-base < = template-download-base
filename = slapos-slapuser-script.in filename = slapos-slapuser-script.in
[template-bashrc]
< = template-download-base
filename = bashrc.in
[eggs] [eggs]
recipe = zc.recipe.egg recipe = zc.recipe.egg
eggs = eggs =
${pycurl:egg} ${pycurl:egg}
collective.recipe.environment
collective.recipe.template collective.recipe.template
cns.recipe.symlink cns.recipe.symlink
erp5.util erp5.util
...@@ -153,12 +149,10 @@ eggs = ...@@ -153,12 +149,10 @@ eggs =
plone.recipe.command plone.recipe.command
slapos.recipe.build slapos.recipe.build
slapos.toolbox[flask_auth] slapos.toolbox[flask_auth]
# gunicorn downgraded because of bug in latter versions gunicorn==19.7.1
# setting version here allows to use an older version in tests
# see more here : https://lab.nexedi.com/nexedi/slapos/commit/391b026e22b05e9a6fba5e063f64f18544a34b92
gunicorn==19.4.5
futures futures
${slapos-cookbook:eggs} ${slapos-cookbook:eggs}
slapos.core # listed explicitly for scripts generation
[extra-eggs] [extra-eggs]
recipe = zc.recipe.egg recipe = zc.recipe.egg
......
...@@ -137,6 +137,8 @@ working-directory = $${runnerdirectory:home} ...@@ -137,6 +137,8 @@ working-directory = $${runnerdirectory:home}
project-directory = $${runnerdirectory:project} project-directory = $${runnerdirectory:project}
instance_root = $${runnerdirectory:instance-root} instance_root = $${runnerdirectory:instance-root}
software_root = $${runnerdirectory:software-root} software_root = $${runnerdirectory:software-root}
pidfile-software = $${directory:run}/slapgrid-cp.pid
pidfile-instance = $${directory:run}/slapgrid-sr.pid
ssh_client = ${openssh:location}/bin/ssh ssh_client = ${openssh:location}/bin/ssh
public_key = $${runner-sshd-raw-server:rsa-keyfile}.pub public_key = $${runner-sshd-raw-server:rsa-keyfile}.pub
private_key = $${runner-sshd-raw-server:rsa-keyfile} private_key = $${runner-sshd-raw-server:rsa-keyfile}
...@@ -167,7 +169,7 @@ minishell_cwd_file = $${directory:etc}/.minishell-cwd ...@@ -167,7 +169,7 @@ minishell_cwd_file = $${directory:etc}/.minishell-cwd
minishell_history_file = $${directory:etc}/.minishell_history minishell_history_file = $${directory:etc}/.minishell_history
software_info_json = $${runnerdirectory:home}/software_info.json software_info_json = $${runnerdirectory:home}/software_info.json
instance_info_json = $${runnerdirectory:home}/instance_info.json instance_info_json = $${runnerdirectory:home}/instance_info.json
path = $${shell:path} path = $${shell-environment:path}
instance_name = $${slap-parameter:instance-name} instance_name = $${slap-parameter:instance-name}
default_repository = $${slap-parameter:slapos-repository} default_repository = $${slap-parameter:slapos-repository}
...@@ -201,7 +203,8 @@ recipe = slapos.cookbook:wrapper ...@@ -201,7 +203,8 @@ recipe = slapos.cookbook:wrapper
arguments = --server_url=$${slap-connection:server-url} --key_file=$${slap-connection:key-file} --cert_file=$${slap-connection:cert-file} --computer_id=$${slap-connection:computer-id} --partition_id=$${slap-connection:partition-id} arguments = --server_url=$${slap-connection:server-url} --key_file=$${slap-connection:key-file} --cert_file=$${slap-connection:cert-file} --computer_id=$${slap-connection:computer-id} --partition_id=$${slap-connection:partition-id}
command-line = ${buildout:directory}/bin/slaprunnertest $${:arguments} command-line = ${buildout:directory}/bin/slaprunnertest $${:arguments}
wrapper-path = $${directory:bin}/runTestSuite wrapper-path = $${directory:bin}/runTestSuite
environment = RUNNER_CONFIG=$${slapos-cfg:rendered} environment = PATH=$${shell-environment:path}
RUNNER_CONFIG=$${slapos-cfg:rendered}
parameters-extra = true parameters-extra = true
# Deploy openssh-server # Deploy openssh-server
...@@ -226,7 +229,7 @@ template = inline: ...@@ -226,7 +229,7 @@ template = inline:
PasswordAuthentication no PasswordAuthentication no
PubkeyAuthentication yes PubkeyAuthentication yes
AuthorizedKeysFile $${buildout:directory}/.ssh/authorized_keys AuthorizedKeysFile $${buildout:directory}/.ssh/authorized_keys
ForceCommand if [ -z "$SSH_ORIGINAL_COMMAND" ]; then ${bash:location}/bin/bash -l; else eval "$SSH_ORIGINAL_COMMAND"; fi ForceCommand if [ -z "$SSH_ORIGINAL_COMMAND" ]; then $${shell-environment:shell} -l; else SHELL=$${shell-environment:shell} PATH=$${shell-environment:path} eval "$SSH_ORIGINAL_COMMAND"; fi
Subsystem sftp ${openssh:location}/libexec/sftp-server Subsystem sftp ${openssh:location}/libexec/sftp-server
[runner-sshd-raw-server] [runner-sshd-raw-server]
...@@ -424,7 +427,7 @@ recipe = slapos.cookbook:wrapper ...@@ -424,7 +427,7 @@ recipe = slapos.cookbook:wrapper
command-line = $${gunicorn:bin_gunicorn} slapos.runner.run:app -p $${gunicorn:path_pid} -b unix:$${gunicorn:socket} -e RUNNER_CONFIG=$${slaprunner:slapos.cfg} --error-logfile $${directory:log}/$${:error-log-file} --timeout 200 --threads 3 --log-level error --preload command-line = $${gunicorn:bin_gunicorn} slapos.runner.run:app -p $${gunicorn:path_pid} -b unix:$${gunicorn:socket} -e RUNNER_CONFIG=$${slaprunner:slapos.cfg} --error-logfile $${directory:log}/$${:error-log-file} --timeout 200 --threads 3 --log-level error --preload
error-log-file = gunicorn-error.log error-log-file = gunicorn-error.log
wrapper-path = $${gunicorn:bin_launcher} wrapper-path = $${gunicorn:bin_launcher}
environment = PATH=$${environ:PATH}:${git:location}/bin/ environment = PATH=$${shell-environment:path}
RUNNER_CONFIG=$${slaprunner:slapos.cfg} RUNNER_CONFIG=$${slaprunner:slapos.cfg}
LANG=en_GB.UTF-8 LANG=en_GB.UTF-8
...@@ -640,18 +643,11 @@ template = inline: ...@@ -640,18 +643,11 @@ template = inline:
--disable-ssl \ --disable-ssl \
--disable-ssl-menu \ --disable-ssl-menu \
--unixdomain-only=$${:socket}:$(id -u):$(id -g):0600 \ --unixdomain-only=$${:socket}:$(id -u):$(id -g):0600 \
--service "/:$(id -u):$(id -g):HOME:$${shell:wrapper} -l" --service "/:$(id -u):$(id -g):HOME:$${shell-environment:shell} -l"
[shell] [shell-environment]
recipe = slapos.cookbook:shell
wrapper = $${directory:bin}/bash
shell = ${bash:location}/bin/bash shell = ${bash:location}/bin/bash
home = $${buildout:directory} path = ${nano:location}/bin:${vim:location}/bin:${screen:location}/bin:${git:location}/bin:${curl:location}/bin:${python2.7:location}/bin:${tig:location}/bin:${zip:location}/bin:${mosh:location}/bin:${bash:location}/bin:$${buildout:directory}/bin/:/usr/bin:/bin/
path = $${environ:PATH}:/usr/bin:/bin/:${nano:location}/bin:${vim:location}/bin:${screen:location}/bin:${git:location}/bin:${tig:location}/bin
ps1 = "\\w> "
[environ]
recipe = collective.recipe.environment
[prepare-software] [prepare-software]
recipe = slapos.cookbook:wrapper recipe = slapos.cookbook:wrapper
...@@ -712,16 +708,10 @@ recipe = slapos.recipe.template:jinja2 ...@@ -712,16 +708,10 @@ recipe = slapos.recipe.template:jinja2
template = ${template-bash-profile:location}/${template-bash-profile:filename} template = ${template-bash-profile:location}/${template-bash-profile:filename}
rendered = $${buildout:directory}/.bash_profile rendered = $${buildout:directory}/.bash_profile
context = context =
raw path $PATH:${nano:location}/bin:${vim:location}/bin:${screen:location}/bin:${git:location}/bin:${curl:location}/bin:${python2.7:location}/bin:${tig:location}/bin:${zip:location}/bin raw path $${shell-environment:path}
raw shell $${shell-environment:shell}
key workdir runnerdirectory:home key workdir runnerdirectory:home
[bashrc]
recipe = slapos.recipe.template:jinja2
template = ${template-bashrc:location}/${template-bashrc:filename}
rendered = $${buildout:directory}/.bashrc
context =
raw bin_folder ${buildout:bin-directory}
#--------------------------- #---------------------------
#-- #--
#-- supervisord managing slaprunner automation features #-- supervisord managing slaprunner automation features
...@@ -734,20 +724,18 @@ exitcodes = 0 ...@@ -734,20 +724,18 @@ exitcodes = 0
logfile = $${directory:log}/supervisord.log logfile = $${directory:log}/supervisord.log
no_logfile = NONE no_logfile = NONE
numprocs = 1 numprocs = 1
path = $${shell:path} path = $${shell-environment:path}
pidfile = $${directory:run}/supervisord.pid pidfile = $${directory:run}/supervisord.pid
ip = $${slaprunner:ipv4} ip = $${slaprunner:ipv4}
server = $${:ip}:$${:port} server = $${:ip}:$${:port}
port = 39986 port = 39986
slapgrid-cp = slapgrid-cp slapgrid-cp = slapgrid-cp
slapgrid-cp-command = $${slaprunner:slapos} node instance --all --cfg $${:slapos-cfg} --pidfile $${:slapgrid-cp-pid} --verbose --logfile $${:slapgrid-cp-log} slapgrid-cp-command = $${slaprunner:slapos} node instance --all --cfg $${:slapos-cfg} --verbose --logfile $${:slapgrid-cp-log}
slapgrid-cp-log = $${runnerdirectory:home}/instance.log slapgrid-cp-log = $${runnerdirectory:home}/instance.log
slapgrid-cp-pid = $${directory:run}/slapgrid-cp.pid
slapgrid-cp-startretries = 0 slapgrid-cp-startretries = 0
slapgrid-sr = slapgrid-sr slapgrid-sr = slapgrid-sr
slapgrid-sr-command = $${slaprunner:slapos} node software --all --cfg $${:slapos-cfg} --pidfile $${:slapgrid-sr-pid} --verbose --logfile $${:slapgrid-sr-log} slapgrid-sr-command = $${slaprunner:slapos} node software --all --cfg $${:slapos-cfg} --verbose --logfile $${:slapgrid-sr-log}
slapgrid-sr-log = $${runnerdirectory:home}/software.log slapgrid-sr-log = $${runnerdirectory:home}/software.log
slapgrid-sr-pid = $${directory:run}/slapgrid-sr.pid
slapgrid-sr-startretries = 0 slapgrid-sr-startretries = 0
slapproxy = slapproxy slapproxy = slapproxy
slapproxy-autorestart = true slapproxy-autorestart = true
......
...@@ -10,14 +10,13 @@ extends = common.cfg ...@@ -10,14 +10,13 @@ extends = common.cfg
Flask-Auth = 0.85 Flask-Auth = 0.85
apache-libcloud = 1.2.1 apache-libcloud = 1.2.1
cns.recipe.symlink = 0.2.3 cns.recipe.symlink = 0.2.3
collective.recipe.environment = 0.2.0
futures = 3.0.5 futures = 3.0.5
gitdb = 0.6.4 gitdb = 0.6.4
gunicorn = 19.4.5 gunicorn = 19.7.1
prettytable = 0.7.2 prettytable = 0.7.2
pycurl = 7.43.0 pycurl = 7.43.0
slapos.recipe.template = 3.0 slapos.recipe.template = 4.1
slapos.toolbox = 0.71 slapos.toolbox = 0.73
smmap = 0.9.0 smmap = 0.9.0
# Required by: # Required by:
......
...@@ -4,6 +4,7 @@ ...@@ -4,6 +4,7 @@
cd {{ workdir }} cd {{ workdir }}
export PATH={{- path }} export PATH={{- path }}
export SHELL={{- shell }}
export PS1="$ " export PS1="$ "
if [ -f "$HOME/.bashrc" ] ; then if [ -f "$HOME/.bashrc" ] ; then
...@@ -29,13 +30,4 @@ alias l='ls -CF' ...@@ -29,13 +30,4 @@ alias l='ls -CF'
echo "Welcome to SlapOS slaprunner shell" echo "Welcome to SlapOS slaprunner shell"
echo echo
# TODO: how to run slapos ?
#echo "You can use the following slapos CLI commands:"
#echo "slapos node software"
#echo "slapos node instance"
#echo "slapos request"
#echo ""
# XXX for now we can use:
#echo ~/bin/supervisorctl -c ~/etc/supervisorctl.conf
#echo ~/bin/supervisorctl -c ~/srv/runner/instance/etc/supervisorctl.conf
export PATH={{ bin_folder }}:$PATH
...@@ -6,6 +6,7 @@ umask 077 ...@@ -6,6 +6,7 @@ umask 077
# Exit on any error, to prevent inconsistent backup # Exit on any error, to prevent inconsistent backup
# Error on unset variable expansion # Error on unset variable expansion
set -eu set -eu
set -o pipefail
# Redirect output to log # Redirect output to log
exec > >(tee -ai '{{ output_log_file }}') exec > >(tee -ai '{{ output_log_file }}')
...@@ -19,8 +20,12 @@ etc_directory='{{ directory["etc"] }}' ...@@ -19,8 +20,12 @@ etc_directory='{{ directory["etc"] }}'
tmp_directory='{{ directory["tmp"] }}' tmp_directory='{{ directory["tmp"] }}'
rsync () { rsync () {
# Workaround for bug https://bugzilla.samba.org/show_bug.cgi?id=3653
IGNOREEXIT=24
IGNOREOUT='^(file has vanished: |rsync warning: some files vanished before they could be transferred)'
set -x set -x
'{{ rsync_binary }}' -rlptgov --stats --safe-links --ignore-missing-args --delete --delete-excluded "$@" '{{ rsync_binary }}' -rlptgov --stats --safe-links --ignore-missing-args --delete --delete-excluded "$@" 2>&1 | (egrep -v "$IGNOREOUT" || true) || [ $? = "$IGNOREEXIT" ]
set +x set +x
} }
......
...@@ -155,22 +155,20 @@ rm '{{ instance_folder }}'/etc/supervisord.conf.d/* || true ...@@ -155,22 +155,20 @@ rm '{{ instance_folder }}'/etc/supervisord.conf.d/* || true
SLAPOSCFG='{{ supervisord["slapos-cfg"] }}' SLAPOSCFG='{{ supervisord["slapos-cfg"] }}'
SLAPGRIDSRLOG='{{ supervisord["slapgrid-sr-log"] }}' SLAPGRIDSRLOG='{{ supervisord["slapgrid-sr-log"] }}'
SLAPGRIDSRPID='{{ supervisord["slapgrid-sr-pid"] }}'
SLAPGRIDCPLOG='{{ supervisord["slapgrid-cp-log"] }}' SLAPGRIDCPLOG='{{ supervisord["slapgrid-cp-log"] }}'
SLAPGRIDCPPID='{{ supervisord["slapgrid-cp-pid"] }}'
log_message "Building newest Software Release..." log_message "Building newest Software Release..."
"$SLAPOS" node software --cfg "$SLAPOSCFG" --all --master-url="$MASTERURL" --logfile "$SLAPGRIDSRLOG" --pidfile "$SLAPGRIDSRPID" >/dev/null 2>&1 || "$SLAPOS" node software --cfg "$SLAPOSCFG" --all --master-url="$MASTERURL" --logfile "$SLAPGRIDSRLOG" >/dev/null 2>&1 ||
"$SLAPOS" node software --cfg "$SLAPOSCFG" --all --master-url="$MASTERURL" --logfile "$SLAPGRIDSRLOG" --pidfile "$SLAPGRIDSRPID" >/dev/null 2>&1 || "$SLAPOS" node software --cfg "$SLAPOSCFG" --all --master-url="$MASTERURL" --logfile "$SLAPGRIDSRLOG" >/dev/null 2>&1 ||
"$SLAPOS" node software --cfg "$SLAPOSCFG" --all --master-url="$MASTERURL" --logfile "$SLAPGRIDSRLOG" --pidfile "$SLAPGRIDSRPID" >/dev/null 2>&1 || "$SLAPOS" node software --cfg "$SLAPOSCFG" --all --master-url="$MASTERURL" --logfile "$SLAPGRIDSRLOG" >/dev/null 2>&1 ||
(tail -n 200 "$SLAPGRIDSRLOG" && false) (tail -n 200 "$SLAPGRIDSRLOG" && false)
# Remove defined scripts to force buildout to recreate them to have updated paths # Remove defined scripts to force buildout to recreate them to have updated paths
rm "$srv_directory"/runner/instance/slappart*/srv/runner-import-restore || true rm "$srv_directory"/runner/instance/slappart*/srv/runner-import-restore || true
log_message "Fixing Instances as needed after import..." log_message "Fixing Instances as needed after import..."
# XXX hardcoded # XXX hardcoded
"$SLAPOS" node instance --cfg "$SLAPOSCFG" --master-url=$MASTERURL --logfile "$SLAPGRIDCPLOG" --pidfile "$SLAPGRIDCPPID" >/dev/null 2>&1 || "$SLAPOS" node instance --cfg "$SLAPOSCFG" --master-url=$MASTERURL --logfile "$SLAPGRIDCPLOG" >/dev/null 2>&1 ||
"$SLAPOS" node instance --cfg "$SLAPOSCFG" --master-url=$MASTERURL --logfile "$SLAPGRIDCPLOG" --pidfile "$SLAPGRIDCPPID" >/dev/null 2>&1 || "$SLAPOS" node instance --cfg "$SLAPOSCFG" --master-url=$MASTERURL --logfile "$SLAPGRIDCPLOG" >/dev/null 2>&1 ||
"$SLAPOS" node instance --cfg "$SLAPOSCFG" --master-url=$MASTERURL --logfile "$SLAPGRIDCPLOG" --pidfile "$SLAPGRIDCPPID" >/dev/null 2>&1 || "$SLAPOS" node instance --cfg "$SLAPOSCFG" --master-url=$MASTERURL --logfile "$SLAPGRIDCPLOG" >/dev/null 2>&1 ||
(tail -n 200 "$SLAPGRIDCPLOG" && false) (tail -n 200 "$SLAPGRIDCPLOG" && false)
# Invoke defined scripts for each partition inside of slaprunner # Invoke defined scripts for each partition inside of slaprunner
......
...@@ -6,6 +6,8 @@ computer_id = slaprunner ...@@ -6,6 +6,8 @@ computer_id = slaprunner
maximal_delay = 0 maximal_delay = 0
root_check = {{ slaprunner['root_check'] }} root_check = {{ slaprunner['root_check'] }}
forbid_supervisord_automatic_launch = true forbid_supervisord_automatic_launch = true
pidfile_software = {{slaprunner['pidfile-software']}}
pidfile_instance = {{slaprunner['pidfile-instance']}}
[slapformat] [slapformat]
partition_amount = {{ slaprunner['partition-amount'] }} partition_amount = {{ slaprunner['partition-amount'] }}
......
...@@ -59,7 +59,7 @@ eggs = collective.recipe.template ...@@ -59,7 +59,7 @@ eggs = collective.recipe.template
collective.recipe.template = 1.11 collective.recipe.template = 1.11
plone.recipe.command = 1.1 plone.recipe.command = 1.1
slapos.recipe.build = 0.28 slapos.recipe.build = 0.28
slapos.recipe.template = 3.0 slapos.recipe.template = 4.1
# Replicate slapos stack, but without shacache to not have to compile the entire world for a simple test. # Replicate slapos stack, but without shacache to not have to compile the entire world for a simple test.
[buildout] [buildout]
...@@ -106,23 +106,11 @@ versions = versions ...@@ -106,23 +106,11 @@ versions = versions
networkcache-section = networkcache networkcache-section = networkcache
# Install slapos.cookbook containing all officials recipes # Install slapos.cookbook containing all officials recipes
# Explicitely define dependencies as well, because of buildout limitation
# if using "develop"
[slapos-cookbook] [slapos-cookbook]
recipe = zc.recipe.egg recipe = zc.recipe.egg
eggs = eggs =
${lxml-python:egg} ${lxml-python:egg}
slapos.cookbook slapos.cookbook
cliff
hexagonit.recipe.download
inotifyx
netaddr
netifaces
requests
slapos.core
supervisor
xml_marshaller
pytz
[versions] [versions]
# Use SlapOS patched zc.buildout # Use SlapOS patched zc.buildout
......
...@@ -51,7 +51,6 @@ MarkupSafe = 0.18 ...@@ -51,7 +51,6 @@ MarkupSafe = 0.18
Werkzeug = 0.8.3 Werkzeug = 0.8.3
buildout-versions = 1.7 buildout-versions = 1.7
hexagonit.recipe.cmmi = 2.0 hexagonit.recipe.cmmi = 2.0
inotifyx = 0.2.0-1
lxml = 3.2.1 lxml = 3.2.1
meld3 = 0.6.10 meld3 = 0.6.10
netaddr = 0.7.10 netaddr = 0.7.10
......
...@@ -44,7 +44,6 @@ hexagonit.recipe.download = 1.6nxd002 ...@@ -44,7 +44,6 @@ hexagonit.recipe.download = 1.6nxd002
# Required by: # Required by:
# slapos.cookbook==0.73.1 # slapos.cookbook==0.73.1
inotifyx = 0.2.0
# Required by: # Required by:
# slapos.cookbook==0.73.1 # slapos.cookbook==0.73.1
......
...@@ -62,4 +62,4 @@ md5sum = 0ea12a4ad2d2e3d406476e35b8d3e3fb ...@@ -62,4 +62,4 @@ md5sum = 0ea12a4ad2d2e3d406476e35b8d3e3fb
mode = 640 mode = 640
[versions] [versions]
slapos.recipe.template = 3.0 slapos.recipe.template = 4.1
# ERP5: id-store-interval must be big enough to avoid conflicts (e.g. 1000)
[buildout]
extends = test-fluentd-common.cfg
[template-erp5-patched]
extra =
{%- for family_name, zope_section_id_list in zope_family_dict.items() %}
{%- for zope_section_id in zope_section_id_list %}
[{{zope_section_id}}]
config-family = {{family_name}}
{%- endfor %}
{%- endfor %}
[template-zope]
recipe =
target = ${template-zope-patched:location}
[template-zope-base]
<= template-zope
recipe = slapos.recipe.build:download
[template-zope-patched]
<= template-fluentd
base = ${template-zope-base:target}
tags =
{%- if slapparameter_dict['family'] == 'fluentd' %}
{%- for i, zope in enumerate(publish_list) %}
{%- do tags.__setitem__(
node_id_base ~ (node_id_index_format % i) ~ '-stream', zope[0]) %}
{%- endfor %}
{%- endif %}
[feeder]
feeder =
#
import os, struct
from random import lognormvariate
pack = struct.Struct('!d').pack
size = 65536
data = os.urandom(size - 8)
while True:
emit('',
''.join((pack(time.time()) + data[:int(lognormvariate(10, 1))]
).ljust(size, '\0') for i in xrange(16)))
time.sleep(interval)
[buildout]
extends =
../../component/gnupg/buildout.cfg
../fluentd/software.cfg
../wendelin/software.cfg
../../component/mariadb/mariarocks.cfg
[local-bt5-repository]
list += ${slapos.cookbook-repository:location}/software/wendelin-scalability
[patch-template]
recipe = slapos.recipe.build
location = ${buildout:directory}/${:_buildout_section_name_}.cfg.in
script =
with open(self.options['location'], 'w') as dst, \
open(self.options['base']) as src:
src = src.read()
i = src.index('[buildout]')
dst.write(src[:i] + self.options['extra'] + '\n' + src[i:])
[template-erp5]
recipe =
target = ${template-erp5-patched:location}
[template-erp5-base]
<= template-erp5
recipe = slapos.recipe.build:download
[template-erp5-patched]
<= patch-template
base = ${template-erp5-base:target}
[template-fluentd]
<= patch-template
method = unpack
extra =
{%- set tags = {} %}
${:tags}
{%- if tags %}
[feeder]
recipe = slapos.cookbook:wrapper
wrapper-path = $${directory:services}/$${:_buildout_section_name_}
command-line = ${feeder:bin-directory}/feeder $${fluentd-agent-conf:rendered}
$${:_buildout_section_name_} .4
{%- for tag in tags %}
[{{ section(tag) }}]
<= feeder
{%- endfor %}
[{{ section('fluentd') }}]
recipe = slapos.cookbook:wrapper
wrapper-path = $${directory:services}/$${:_buildout_section_name_}
command-line = ${fluentd:location}/bin/fluentd
-v -c $${fluentd-agent-conf:rendered}
environment =
GEM_PATH=${fluentd:location}/lib/ruby/gems/1.8
[fluentd-agent-conf]
recipe = slapos.recipe.template:jinja2
rendered = $${directory:etc}/fluentd-agent.conf
template =
inline:{% raw -%}
<match debug.**>
@type stdout
@id stdout_output
</match>
<source>
# in_exec is unusable because it logs dropped events in case of buffer overflow
@type unix
path $${directory:run}/fluentd.sock
</source>
{%- for tag, netloc in tags.iteritems() %}
<match {{ tag }}>
@type wendelin
@id {{ tag }}
streamtool_uri http://{{ netloc }}/erp5/portal_ingestion_policies/scalability_test_${:method}
user {{ user }}
password {{ password }}
buffer_type memory
buffer_queue_limit 4
</match>
{%- endfor %}
{%- endraw %}
context =
key tags :tags
raw user {{ slapparameter_dict['inituser-login'] }}
raw password {{ slapparameter_dict['inituser-password'] }}
tags = {{ dumps(tags) }}
{%- endif %}
[feeder]
recipe = zc.recipe.egg
eggs = fluent-logger
entry-points = ${:_buildout_section_name_}=__main__:main
initialization =
import re, time, urlparse
from urllib import FancyURLopener
from fluent.sender import FluentSender
def main():
conf, tag, interval = sys.argv[1:]
with open(conf) as f:
for k, v in re.compile(r"<(.+?)>\s*(.*?)\s*</.+?>",
re.S).findall(f.read()):
v = dict(v.split() for v in v.splitlines())
if k == 'source':
sock = v['path']
elif v['@type'] == 'wendelin':
if k.split(None, 1)[1] == tag:
conf = v
emit = FluentSender(tag, host='unix://'+sock, timeout=None,
msgpack_kwargs={'use_bin_type': True}).emit
url = urlparse.urlsplit(conf['streamtool_uri'])
url = urlparse.urlunsplit(url._replace(
netloc='%s:%s@%s' % (conf['user'], conf['password'], url.netloc),
path='/erp5/isPortalBeingCreated'))
urlopen = FancyURLopener().open
while True:
try:
f = urlopen(url)
except IOError:
pass
else:
try:
if not f.read():
break
finally:
f.close()
time.sleep(5)
interval = float(interval)
${:feeder}
[versions]
fluent-logger = 0.5.3
# ERP5: id-store-interval must be big enough to avoid conflicts (e.g. 1000)
# XXX: Because supervisord can't handle too many processes
# ("too many open files to spawn" errors), these SR
# should instantiate the source processes differently.
[buildout]
extends = test-fluentd-common.cfg
[template-erp5-patched]
extra =
[request-balancer]
config-inituser-login = {{ dumps(inituser_login) }}
config-inituser-password = $${publish-early:inituser-password}
[template-balancer]
recipe =
target = ${template-balancer-patched:location}
[template-balancer-base]
<= template-balancer
recipe = slapos.recipe.build:download
[template-balancer-patched]
<= template-fluentd
base = ${template-balancer-base:target}
tags =
{%- set port, backend_list = haproxy_dict['fluentd'] %}
{%- for i in range(100*len(backend_list)) %}
{%- do tags.__setitem__('wendelin_tag_' ~ i, ipv4 ~ ':' ~ port) %}
{%- endfor %}
[feeder]
feeder =
#
import collections, random, struct
pack = struct.Struct('!d').pack
data = collections.deque(
''.join(chr(int(random.gauss(0, .68)) % 256) for _ in xrange(2500))
# With a period greater than 64kiB (zlib dictionary size),
# we avoid extra compression due to repetition.
for _ in xrange(30))
interval = 60 # XXX: same as fluentd flush internal
time.sleep(interval * random.random())
while True:
emit('', pack(time.time()) + data[0])
data.rotate()
time.sleep(interval)
# NEO: data deduplication must be enabled
[buildout]
extends = test-fluentd-common.cfg
[template-erp5-patched]
extra =
[request-balancer]
config-inituser-login = {{ dumps(inituser_login) }}
config-inituser-password = $${publish-early:inituser-password}
[template-balancer]
recipe =
target = ${template-balancer-patched:location}
[template-balancer-base]
<= template-balancer
recipe = slapos.recipe.build:download
[template-balancer-patched]
<= template-fluentd
base = ${template-balancer-base:target}
method = raw_lf
tags =
{%- set port, backend_list = haproxy_dict['fluentd'] %}
{%- for i in range(len(backend_list)) %}
{%- do tags.__setitem__('wendelin_tag_' ~ i, ipv4 ~ ':' ~ port) %}
{%- endfor %}
[data.gpg]
recipe = slapos.recipe.build:download
url = http://download.shacache.org/0f56a59ab3bc3193e70c1996fe69260e1b225bc53bbb7509cb3ae036006653dec69b3384cd624eefed81c85e692e4b4b7cb3d25c6244c298070569dddf7216b1
md5sum = 374e00dfbd28f27819ccfff31d169c9c
[gpg-decrypt]
recipe = slapos.recipe.build
gpg = ${gnupg:location}/bin/gpg
script =
import subprocess, zc.buildout
options = self.options
args = (options['gpg'], '-d', '--batch', '--passphrase-fd', '0',
'-o', options['location'], options['input'])
p = subprocess.Popen(args, stdin=subprocess.PIPE)
p.communicate(options['password'])
retcode = p.poll()
if retcode:
raise subprocess.CalledProcessError(retcode, ' '.join(args))
[data]
<= gpg-decrypt
location = ${data.gpg:location}/data.log
input = ${data.gpg:target}
[feeder]
feeder =
#
import mmap, os, random
data = mmap.mmap(os.open('${data:location}', os.O_RDONLY),
0, prot=mmap.PROT_READ)
size = 1024 * 1024
max_seek = len(data) - size
while True:
a = random.randint(0, max_seek)
a = {'data': data[a:a+size]}
for i in xrange(20):
emit('', a)
time.sleep(interval)
<?xml version="1.0"?>
<ZopeData>
<record id="1" aka="AAAAAAAAAAE=">
<pickle>
<global name="Ingestion Policy" module="erp5.portal_type"/>
</pickle>
<pickle>
<dictionary>
<item>
<key> <string>_Access_contents_information_Permission</string> </key>
<value>
<tuple>
<string>Assignee</string>
<string>Assignor</string>
<string>Associate</string>
<string>Auditor</string>
<string>Manager</string>
</tuple>
</value>
</item>
<item>
<key> <string>_Add_portal_content_Permission</string> </key>
<value>
<tuple>
<string>Assignee</string>
<string>Assignor</string>
<string>Associate</string>
<string>Manager</string>
</tuple>
</value>
</item>
<item>
<key> <string>_Modify_portal_content_Permission</string> </key>
<value>
<tuple>
<string>Assignee</string>
<string>Assignor</string>
<string>Associate</string>
<string>Manager</string>
</tuple>
</value>
</item>
<item>
<key> <string>_View_Permission</string> </key>
<value>
<tuple>
<string>Assignee</string>
<string>Assignor</string>
<string>Associate</string>
<string>Auditor</string>
<string>Manager</string>
</tuple>
</value>
</item>
<item>
<key> <string>id</string> </key>
<value> <string>scalability_test_raw_lf</string> </value>
</item>
<item>
<key> <string>portal_type</string> </key>
<value> <string>Ingestion Policy</string> </value>
</item>
<item>
<key> <string>script_id</string> </key>
<value> <string>ERP5Site_handleRawDataFluentdIngestion</string> </value>
</item>
<item>
<key> <string>workflow_history</string> </key>
<value>
<persistent> <string encoding="base64">AAAAAAAAAAI=</string> </persistent>
</value>
</item>
</dictionary>
</pickle>
</record>
<record id="2" aka="AAAAAAAAAAI=">
<pickle>
<global name="PersistentMapping" module="Persistence.mapping"/>
</pickle>
<pickle>
<dictionary>
<item>
<key> <string>data</string> </key>
<value>
<dictionary>
<item>
<key> <string>validation_workflow</string> </key>
<value>
<persistent> <string encoding="base64">AAAAAAAAAAM=</string> </persistent>
</value>
</item>
</dictionary>
</value>
</item>
</dictionary>
</pickle>
</record>
<record id="3" aka="AAAAAAAAAAM=">
<pickle>
<global name="WorkflowHistoryList" module="Products.ERP5Type.patches.WorkflowTool"/>
</pickle>
<pickle>
<tuple>
<none/>
<list>
<dictionary>
<item>
<key> <string>action</string> </key>
<value> <string>validate</string> </value>
</item>
<item>
<key> <string>actor</string> </key>
<value> <string>zope</string> </value>
</item>
<item>
<key> <string>comment</string> </key>
<value> <string></string> </value>
</item>
<item>
<key> <string>error_message</string> </key>
<value> <string></string> </value>
</item>
<item>
<key> <string>time</string> </key>
<value>
<object>
<klass>
<global name="DateTime" module="DateTime.DateTime"/>
</klass>
<tuple>
<none/>
</tuple>
<state>
<tuple>
<float>1507057895.87</float>
<string>UTC</string>
</tuple>
</state>
</object>
</value>
</item>
<item>
<key> <string>validation_state</string> </key>
<value> <string>validated</string> </value>
</item>
</dictionary>
</list>
</tuple>
</pickle>
</record>
</ZopeData>
<?xml version="1.0"?>
<ZopeData>
<record id="1" aka="AAAAAAAAAAE=">
<pickle>
<global name="Ingestion Policy" module="erp5.portal_type"/>
</pickle>
<pickle>
<dictionary>
<item>
<key> <string>_Access_contents_information_Permission</string> </key>
<value>
<tuple>
<string>Assignee</string>
<string>Assignor</string>
<string>Associate</string>
<string>Auditor</string>
<string>Manager</string>
</tuple>
</value>
</item>
<item>
<key> <string>_Add_portal_content_Permission</string> </key>
<value>
<tuple>
<string>Assignee</string>
<string>Assignor</string>
<string>Associate</string>
<string>Manager</string>
</tuple>
</value>
</item>
<item>
<key> <string>_Modify_portal_content_Permission</string> </key>
<value>
<tuple>
<string>Assignee</string>
<string>Assignor</string>
<string>Associate</string>
<string>Manager</string>
</tuple>
</value>
</item>
<item>
<key> <string>_View_Permission</string> </key>
<value>
<tuple>
<string>Assignee</string>
<string>Assignor</string>
<string>Associate</string>
<string>Auditor</string>
<string>Manager</string>
</tuple>
</value>
</item>
<item>
<key> <string>description</string> </key>
<value>
<none/>
</value>
</item>
<item>
<key> <string>id</string> </key>
<value> <string>scalability_test_unpack</string> </value>
</item>
<item>
<key> <string>portal_type</string> </key>
<value> <string>Ingestion Policy</string> </value>
</item>
<item>
<key> <string>script_id</string> </key>
<value> <string>ERP5Site_handleRawDataFluentdIngestion</string> </value>
</item>
<item>
<key> <string>title</string> </key>
<value>
<none/>
</value>
</item>
<item>
<key> <string>workflow_history</string> </key>
<value>
<persistent> <string encoding="base64">AAAAAAAAAAI=</string> </persistent>
</value>
</item>
</dictionary>
</pickle>
</record>
<record id="2" aka="AAAAAAAAAAI=">
<pickle>
<global name="PersistentMapping" module="Persistence.mapping"/>
</pickle>
<pickle>
<dictionary>
<item>
<key> <string>data</string> </key>
<value>
<dictionary>
<item>
<key> <string>validation_workflow</string> </key>
<value>
<persistent> <string encoding="base64">AAAAAAAAAAM=</string> </persistent>
</value>
</item>
</dictionary>
</value>
</item>
</dictionary>
</pickle>
</record>
<record id="3" aka="AAAAAAAAAAM=">
<pickle>
<global name="WorkflowHistoryList" module="Products.ERP5Type.patches.WorkflowTool"/>
</pickle>
<pickle>
<tuple>
<none/>
<list>
<dictionary>
<item>
<key> <string>action</string> </key>
<value> <string>validate</string> </value>
</item>
<item>
<key> <string>actor</string> </key>
<value> <string>zope</string> </value>
</item>
<item>
<key> <string>comment</string> </key>
<value> <string></string> </value>
</item>
<item>
<key> <string>error_message</string> </key>
<value> <string></string> </value>
</item>
<item>
<key> <string>time</string> </key>
<value>
<object>
<klass>
<global name="DateTime" module="DateTime.DateTime"/>
</klass>
<tuple>
<none/>
</tuple>
<state>
<tuple>
<float>1507057913.93</float>
<string>UTC</string>
</tuple>
</state>
</object>
</value>
</item>
<item>
<key> <string>validation_state</string> </key>
<value> <string>validated</string> </value>
</item>
</dictionary>
</list>
</tuple>
</pickle>
</record>
</ZopeData>
<?xml version="1.0"?>
<ZopeData>
<record id="1" aka="AAAAAAAAAAE=">
<pickle>
<global name="PythonScript" module="Products.PythonScripts.PythonScript"/>
</pickle>
<pickle>
<dictionary>
<item>
<key> <string>Script_magic</string> </key>
<value> <int>3</int> </value>
</item>
<item>
<key> <string>_bind_names</string> </key>
<value>
<object>
<klass>
<global name="NameAssignments" module="Shared.DC.Scripts.Bindings"/>
</klass>
<tuple/>
<state>
<dictionary>
<item>
<key> <string>_asgns</string> </key>
<value>
<dictionary>
<item>
<key> <string>name_container</string> </key>
<value> <string>container</string> </value>
</item>
<item>
<key> <string>name_context</string> </key>
<value> <string>context</string> </value>
</item>
<item>
<key> <string>name_m_self</string> </key>
<value> <string>script</string> </value>
</item>
<item>
<key> <string>name_subpath</string> </key>
<value> <string>traverse_subpath</string> </value>
</item>
</dictionary>
</value>
</item>
</dictionary>
</state>
</object>
</value>
</item>
<item>
<key> <string>_params</string> </key>
<value> <string></string> </value>
</item>
<item>
<key> <string>id</string> </key>
<value> <string>DataStreamModule_getTotalSize</string> </value>
</item>
</dictionary>
</pickle>
</record>
</ZopeData>
portal = context.getPortalObject()
request = portal.REQUEST
reference = request['reference']
data_chunk = request['data_chunk']
module = portal.data_stream_module
try:
data_stream = module[reference]
except KeyError:
data_stream = module.newContent(reference, 'Data Stream')
append_method = context.getId()
if append_method == 'scalability_test_raw_lf':
data_stream.appendData(data_chunk + '\n')
else:
assert append_method == 'scalability_test_unpack', append_method
for time, data_chunk in context.unpack(data_chunk):
data_stream.appendData(data_chunk)
<?xml version="1.0"?>
<ZopeData>
<record id="1" aka="AAAAAAAAAAE=">
<pickle>
<global name="PythonScript" module="Products.PythonScripts.PythonScript"/>
</pickle>
<pickle>
<dictionary>
<item>
<key> <string>Script_magic</string> </key>
<value> <int>3</int> </value>
</item>
<item>
<key> <string>_bind_names</string> </key>
<value>
<object>
<klass>
<global name="NameAssignments" module="Shared.DC.Scripts.Bindings"/>
</klass>
<tuple/>
<state>
<dictionary>
<item>
<key> <string>_asgns</string> </key>
<value>
<dictionary>
<item>
<key> <string>name_container</string> </key>
<value> <string>container</string> </value>
</item>
<item>
<key> <string>name_context</string> </key>
<value> <string>context</string> </value>
</item>
<item>
<key> <string>name_m_self</string> </key>
<value> <string>script</string> </value>
</item>
<item>
<key> <string>name_subpath</string> </key>
<value> <string>traverse_subpath</string> </value>
</item>
</dictionary>
</value>
</item>
</dictionary>
</state>
</object>
</value>
</item>
<item>
<key> <string>_params</string> </key>
<value> <string></string> </value>
</item>
<item>
<key> <string>id</string> </key>
<value> <string>ERP5Site_handleRawDataFluentdIngestion</string> </value>
</item>
</dictionary>
</pickle>
</record>
</ZopeData>
portal_ingestion_policies/scalability_test_*
portal_skins/custom/DataStreamModule_getTotalSize
portal_skins/custom/ERP5Site_handleRawDataFluentdIngestion
\ No newline at end of file
test_scalability_fluentd
\ No newline at end of file
...@@ -2,13 +2,16 @@ ...@@ -2,13 +2,16 @@
extends = extends =
../../component/keras/buildout.cfg ../../component/keras/buildout.cfg
../../component/chainer/buildout.cfg ../../component/chainer/buildout.cfg
../../component/pytorch/buildout.cfg
software.cfg software.cfg
parts += parts +=
keras-egg keras-egg
chainer-egg chainer-egg
pytorch-egg
[eggs] [eggs]
eggs += eggs +=
${tensorboard-build-install-egg:egg}
${tensorflow-build-install-egg:egg} ${tensorflow-build-install-egg:egg}
${protobuf-python:egg} ${protobuf-python:egg}
${h5py:egg} ${h5py:egg}
...@@ -19,6 +22,9 @@ eggs += ...@@ -19,6 +22,9 @@ eggs +=
filelock filelock
nose nose
${chainer:egg} ${chainer:egg}
${pytorch-build-install-egg:egg}
${python-cocoapi-build-install-egg:egg}
torchvision
[cuda] [cuda]
# If you use cuda, please adjust paramters for your environment # If you use cuda, please adjust paramters for your environment
......
...@@ -69,7 +69,7 @@ pycrypto = 2.6 ...@@ -69,7 +69,7 @@ pycrypto = 2.6
apache-libcloud = 0.12.4 apache-libcloud = 0.12.4
async = 0.6.1 async = 0.6.1
gitdb = 0.5.4 gitdb = 0.5.4
mysqlclient = 1.3.10+SlapOSPatched002 mysqlclient = 1.3.12
plone.recipe.command = 1.1 plone.recipe.command = 1.1
slapos.recipe.template = 2.4.2 slapos.recipe.template = 2.4.2
slapos.toolbox = 0.40.4 slapos.toolbox = 0.40.4
......
{%- if parameter_dict['domain'] -%}
{{ parameter_dict['domain'] }} {
tls {{ custom_cert_dict['cert-file'] }} {{ custom_cert_dict['key-file'] }} {
ca {{ca_custom_frontend_dict['rendered']}}
}
log {{caddy_configuration_dict['access_log']}}
errors {{caddy_configuration_dict['error_log']}}
root {{ directory_dict['public_html'] }}
{% if parameter_dict['enable-basic-auth'] == 'true' -%}
basicauth / {{ parameter_dict['username'] }} {{parameter_dict['password']}}
{%- endif %}
}
bind {{caddy_configuration_dict['ipv6']}}
{%- endif %}
[{{caddy_configuration_dict['ipv6']}}]:{{parameter_dict['port-ipv6']}} {
tls {{ custom_cert_dict['cert-file'] }} {{ custom_cert_dict['key-file'] }} {
ca {{ certificate_authority_dict['ca-cert-file'] }}
}
log {{caddy_configuration_dict['access_log']}}
errors {{caddy_configuration_dict['error_log']}}
root {{ directory_dict['public_html'] }}
{% if parameter_dict['enable-basic-auth'] == 'true' -%}
basicauth / {{ parameter_dict['username'] }} {{parameter_dict['password']}}
{%- endif %}
bind {{caddy_configuration_dict['ipv6']}}
}
[{{caddy_configuration_dict['local_ip']}}]:{{parameter_dict['port-ipv4']}} {
log {{caddy_configuration_dict['access_log']}}
errors {{caddy_configuration_dict['error_log']}}
root {{ directory_dict['public_html'] }}
{% if parameter_dict['enable-basic-auth'] == 'true' -%}
basicauth / {{ parameter_dict['username'] }} {{parameter_dict['password']}}
{%- endif %}
bind {{caddy_configuration_dict['local_ip']}}
}
\ No newline at end of file
[buildout]
extends =
../../component/golang/buildout.cfg
../../stack/slapos.cfg
../../component/dash/buildout.cfg
gowork.cfg
parts =
gowork
slapos-cookbook
instance-profile
template-caddy
caddy
[gowork]
install =
github.com/mholt/caddy
[instance-profile]
recipe = slapos.recipe.template
url = ${:_profile_base_location_}/instance.cfg.in
md5sum = 403f86b667f7a5d397993735bcd162ab
output =${buildout:directory}/instance.cfg
filename = instance.cfg
mode = 0644
[template-caddyfile]
recipe = slapos.recipe.build:download
url = ${:_profile_base_location_}/Caddyfile.in
md5sum = 88c4c33e374ea3f61cdd36b2816d24ba
filename = Caddyfile.in
location = ${buildout:parts-directory}/${:_buildout_section_name_}
mode = 0644
[template-caddy]
recipe = slapos.recipe.template
url = ${:_profile_base_location_}/instance-caddy.cfg.in
md5sum = 3b796dee5c509393fd396fd019914a2e
output = ${buildout:directory}/instance-caddy.cfg.in
mode = 0644
[template-public-html]
recipe = slapos.recipe.template
url = ${:_profile_base_location_}/templates/index.html
md5sum = b5794ac8b10ed90173ad566e6e324b35
output = ${buildout:directory}/index.html
mode = 0644
[template-caddy-service]
recipe = slapos.recipe.template
url = ${:_profile_base_location_}/template-caddy-service.sh.in
md5sum = 5467fac7b95acde681e613ae98ce990d
output = ${buildout:directory}/template-caddy-service.sh.in
mode = 0644
[caddy]
recipe = slapos.recipe.cmmi
path = ${go_github.com_mholt_caddy:location}
go = ${gowork:golang}/bin/go
configure-command = :
make-targets =
make-binary = cd ${:path}/caddy && ${:go} install -v
environment =
PATH=${pkgconfig:location}/bin:${gowork:golang}/bin:${buildout:bin-directory}:%(PATH)s
GOPATH=${gowork:directory}
output = ${gowork:bin}/caddy
\ No newline at end of file
# Code generated by gowork-snapshot; DO NOT EDIT.
# list of go git repositories to fetch
[gowork.goinstall]
depends_gitfetch =
${go_github.com_mholt_caddy:recipe}
[go_github.com_mholt_caddy]
<= go-git-package
go.importpath = github.com/mholt/caddy
repository = https://github.com/mholt/caddy.git
revision = v0.10.10-0-gc4dfbb9956
[buildout]
parts =
caddy-service
caddy-configuration
certificate-authority
custom-cert
htpasswd
public-html
publish-connection-information
eggs-directory = ${buildout:eggs-directory}
develop-eggs-directory = ${buildout:develop-eggs-directory}
offline = true
[directory]
recipe = slapos.cookbook:mkdirectory
etc = $${buildout:directory}/etc
bin = $${buildout:directory}/bin
srv = $${buildout:directory}/srv
var = $${buildout:directory}/var
service = $${:etc}/service
public_html = $${buildout:directory}/public_html
run = $${:var}/run
log = $${:var}/log
ca-dir = $${:srv}/ssl
#################################
# caddy service
#################################
[caddy-service]
recipe = slapos.recipe.template:jinja2
template = ${template-caddy-service:output}
rendered = $${directory:service}/caddy
mode = 0700
context =
key caddy_exec caddy-exec-dict:caddy-exec-file
section caddy_configuration_dict caddy-configuration
section parameter_dict slap-parameter
[caddy-exec-dict]
caddy-exec-file = ${caddy:output}
[caddy-configuration]
recipe = slapos.recipe.template:jinja2
template = ${template-caddyfile:location}/${template-caddyfile:filename}
rendered = $${directory:etc}/Caddyfile
mode = 0600
access_log = $${directory:log}/caddy-access.log
error_log = $${directory:log}/caddy-error.log
ipv6 = $${slap-network-information:global-ipv6}
local_ip = $${slap-network-information:local-ipv4}
context =
section parameter_dict slap-parameter
section directory_dict directory
section caddy_configuration_dict caddy-configuration
section certificate_authority_dict certificate-authority
key htpasswd_dict htpasswd:passwd
section custom_cert_dict custom-cert
section ca_custom_frontend_dict ca-custom-frontend
[ca-directory]
recipe = slapos.cookbook:mkdirectory
requests = $${directory:ca-dir}/requests/
private = $${directory:ca-dir}/private/
certs = $${directory:ca-dir}/certs
newcerts = $${directory:ca-dir}/newcerts/
crl = $${directory:ca-dir}/crl/
[certificate-authority]
recipe = slapos.cookbook:certificate_authority
openssl-binary = ${openssl:location}/bin/openssl
ca-dir = $${directory:ca-dir}
requests-directory = $${ca-directory:requests}
wrapper = $${directory:service}/certificate_authority
ca-private = $${ca-directory:private}
ca-certs = $${ca-directory:certs}
ca-newcerts = $${ca-directory:newcerts}
ca-crl = $${ca-directory:crl}
ca-cert-file = $${:ca-dir}/cacert.pem
ca-key-file = $${:ca-private}/cakey.pem
[custom-cert]
<= certificate-authority
recipe = slapos.cookbook:certificate_authority.request
executable = $${directory:service}/caddy
wrapper = $${directory:service}/caddy
key-file = $${ca-directory:private}/custom.key
cert-file = $${ca-directory:certs}/custom.crt
key-content = $${slap-parameter:key-content}
cert-content = $${slap-parameter:cert-content}
[ca-custom-frontend]
recipe = slapos.recipe.template:jinja2
template = $${template-empty:target}
rendered = $${ca-directory:certs}/caddy_frontend.ca.crt
context =
key content slap-parameter:caddy-ca-certificate
[template-empty]
recipe = slapos.recipe.build:download
url = ${:_profile_base_location_}/templates/$${:filename}
filename = empty.in
[htpasswd]
recipe = slapos.cookbook:generate.password
storage-path = $${directory:etc}/.pwd
bytes = 8
[public-html]
recipe = slapos.recipe.template
url = ${template-public-html:output}
output = $${directory:public_html}/index.html
mode = 0600
[publish-connection-information]
recipe = slapos.cookbook:publish
password = $${slap-parameter:password}
user = $${slap-parameter:username}
secure_access = https://[$${caddy-configuration:ipv6}]:$${slap-parameter:port-ipv6}
[slap-parameter]
domain =
key-content =
cert-content =
caddy-ca-certificate =
port-ipv6 = 9443
port-ipv4 = 4443
enable-quic = true
enable-basic-auth =
username = admin
password = $${htpasswd:passwd}
\ No newline at end of file
#############################
#
# Deploy caddy instance
#
#############################
[buildout]
parts =
switch-softwaretype
# publish-connection-parameter
# Define egg directories to be the one from Software Release
# (/opt/slapgrid/...)
# Always the same.
eggs-directory = ${buildout:eggs-directory}
develop-eggs-directory = ${buildout:develop-eggs-directory}
offline = true
[switch-softwaretype]
recipe = slapos.cookbook:softwaretype
default = $${:caddy}
caddy = $${dynamic-template-caddy:rendered}
[dynamic-template-caddy]
recipe = slapos.recipe.template:jinja2
template = ${template-caddy:output}
rendered = $${buildout:parts-directory}/${:_buildout_section_name_}/${:filename}
filename = instance-caddy.cfg
[slap-connection]
computer-id = $${slap_connection:computer_id}
partition-id = $${slap_connection:partition_id}
server-url = $${slap_connection:server_url}
software-release-url = $${slap_connection:software_release_url}
key-file = $${slap_connection:key_file}
cert-file = $${slap_connection:cert_file}
[instance-parameter]
# Fetch arbitrary parameters defined by the user in SlapOS Master for his instance.
# We use the slapconfiguration recipe with a few parameters (partition id,
# computer id, certificate, etc).
# It will then authenticate to SlapOS Master and fetch the instance parameters.
# The parameters are accessible from {instance-parameter:configuration.name-of-parameter}
# Always the same. Just copy/paste.
# See docstring of slapos.cookbook:slapconfiguration for more information.
recipe = slapos.cookbook:slapconfiguration
computer = $${slap_connection:computer_id}
partition = $${slap_connection:partition_id}
url = $${slap_connection:server_url}
key = $${slap_connection:key_file}
cert = $${slap_connection:cert_file}
\ No newline at end of file
#!${dash-output:dash}
# BEWARE: This file is operated by slapgrid
# BEWARE: It will be overwritten automatically
{{ caddy_exec }} -conf {{caddy_configuration_dict['rendered']}}
{%- if parameter_dict['enable-quic'] != 'false' %}
-quic
{%- endif -%}
\ No newline at end of file
{{ content }}
\ No newline at end of file
<h1>Welcome</h1>
\ No newline at end of file
...@@ -87,15 +87,15 @@ context = ...@@ -87,15 +87,15 @@ context =
[versions] [versions]
Flask-User = 0.6.11 Flask-User = 0.6.19
apache-libcloud = 2.1.0 apache-libcloud = 2.1.0
bcrypt = 3.1.3 bcrypt = 3.1.3
caucase = 0.1.4 caucase = 0.1.4
futures = 3.1.1 futures = 3.1.1
gitdb2 = 2.0.2 gitdb2 = 2.0.2
gunicorn = 19.7.1 gunicorn = 19.7.1
slapos.recipe.template = 3.0 slapos.recipe.template = 4.1
slapos.toolbox = 0.71 slapos.toolbox = 0.73
smmap2 = 2.0.3 smmap2 = 2.0.3
# Required by: # Required by:
...@@ -103,24 +103,24 @@ smmap2 = 2.0.3 ...@@ -103,24 +103,24 @@ smmap2 = 2.0.3
Flask-AlchemyDumps = 0.0.10 Flask-AlchemyDumps = 0.0.10
# Required by: # Required by:
# Flask-User==0.6.11 # Flask-User==0.6.19
Flask-Login = 0.4.0 Flask-Login = 0.4.0
# Required by: # Required by:
# Flask-User==0.6.11 # Flask-User==0.6.19
Flask-Mail = 0.9.1 Flask-Mail = 0.9.1
# Required by: # Required by:
# Flask-AlchemyDumps==0.0.10 # Flask-AlchemyDumps==0.0.10
# Flask-User==0.6.11 # Flask-User==0.6.19
Flask-SQLAlchemy = 2.2 Flask-SQLAlchemy = 2.3.2
# Required by: # Required by:
# Flask-AlchemyDumps==0.0.10 # Flask-AlchemyDumps==0.0.10
Flask-Script = 2.0.5 Flask-Script = 2.0.6
# Required by: # Required by:
# Flask-User==0.6.11 # Flask-User==0.6.19
Flask-WTF = 0.14.2 Flask-WTF = 0.14.2
# Required by: # Required by:
...@@ -133,7 +133,7 @@ PyRSS2Gen = 1.1 ...@@ -133,7 +133,7 @@ PyRSS2Gen = 1.1
# Required by: # Required by:
# Flask-AlchemyDumps==0.0.10 # Flask-AlchemyDumps==0.0.10
SQLAlchemy = 1.1.11 SQLAlchemy = 1.1.15
# Required by: # Required by:
# Flask-AlchemyDumps==0.0.10 # Flask-AlchemyDumps==0.0.10
...@@ -173,8 +173,9 @@ passlib = 1.7.1 ...@@ -173,8 +173,9 @@ passlib = 1.7.1
# Required by: # Required by:
# caucase==0.1.4 # caucase==0.1.4
pem = 16.1.0 pem = 17.1.0
# Required by:
# caucase==0.1.4 # caucase==0.1.4
pyasn1 = 0.2.3 pyasn1 = 0.2.3
...@@ -183,5 +184,5 @@ pyasn1 = 0.2.3 ...@@ -183,5 +184,5 @@ pyasn1 = 0.2.3
pyasn1-modules = 0.0.9 pyasn1-modules = 0.0.9
# Required by: # Required by:
# Flask-User==0.6.11 # Flask-User==0.6.19
pycryptodome = 3.4.6 pycryptodome = 3.4.7
...@@ -28,7 +28,7 @@ md5sum = a317d2f948cd3d16c860d05cc07ecf42 ...@@ -28,7 +28,7 @@ md5sum = a317d2f948cd3d16c860d05cc07ecf42
[template-caucase] [template-caucase]
filename = instance-caucase.cfg.jinja2.in filename = instance-caucase.cfg.jinja2.in
md5sum = 7db9e8bf23cf4689e7986c381b94d2cb md5sum = d31d4c9855d653d3d74c6133a7d85530
[instance-caucase] [instance-caucase]
filename = instance.cfg.in filename = instance.cfg.in
......
...@@ -157,7 +157,7 @@ mode = 700 ...@@ -157,7 +157,7 @@ mode = 700
[caucase-gunicorn] [caucase-gunicorn]
recipe = slapos.cookbook:wrapper recipe = slapos.cookbook:wrapper
socket = ${directory:ca-dir}/ca.flaskserver.sock socket = ${directory:ca-dir}/ng.sock
command-line = {{ gunicorn_bin }} caucase.wsgi:app -b unix:${:socket} -e CA_CONFIGURATION_FILE=${caucase-conf:output} --error-logfile ${:log-file} --pid ${:pid-file} --capture-output --timeout 60 --threads 2 --log-level error --preload command-line = {{ gunicorn_bin }} caucase.wsgi:app -b unix:${:socket} -e CA_CONFIGURATION_FILE=${caucase-conf:output} --error-logfile ${:log-file} --pid ${:pid-file} --capture-output --timeout 60 --threads 2 --log-level error --preload
log-file = ${directory:log}/ca-gunicorn-error.log log-file = ${directory:log}/ca-gunicorn-error.log
pid-file = ${directory:run}/ca-gunicorn.pid pid-file = ${directory:run}/ca-gunicorn.pid
......
...@@ -89,7 +89,7 @@ PasteScript = 2.0.2 ...@@ -89,7 +89,7 @@ PasteScript = 2.0.2
WSGIUtils = 0.7 WSGIUtils = 0.7
python-magic = 0.4.6 python-magic = 0.4.6
rdiff-backup = 1.0.5+SlapOSPatched001 rdiff-backup = 1.0.5+SlapOSPatched001
slapos.recipe.template = 3.0 slapos.recipe.template = 4.1
# Required by: # Required by:
# PasteScript==2.0 # PasteScript==2.0
......
...@@ -8,11 +8,6 @@ General: ...@@ -8,11 +8,6 @@ General:
- make postfix log inside partition - make postfix log inside partition
- document postfix parameters (only once it actually works) - document postfix parameters (only once it actually works)
Monitoring:
- daily slow-query digest
make percona toolkit available in mysql instance and decide how to send digest
- daily apachedex
Backups: Backups:
- flush binlogs independently from full backups (in addition to anyway flushing them on full backup creation) - flush binlogs independently from full backups (in addition to anyway flushing them on full backup creation)
- rotate tidstorage consistency points - rotate tidstorage consistency points
......
...@@ -167,11 +167,11 @@ stop-on-error = true ...@@ -167,11 +167,11 @@ stop-on-error = true
update-command = ${:command} update-command = ${:command}
command = grep parts ${buildout:develop-eggs-directory}/slapos.cookbook.egg-link command = grep parts ${buildout:develop-eggs-directory}/slapos.cookbook.egg-link
[mariadb-resiliency-after-import-script] [mariadb-start-clone-from-backup]
<= download-base <= download-base
mode = 755 mode = 755
[mariadb-slowquery-check-script] [mariadb-resiliency-after-import-script]
<= download-base <= download-base
mode = 755 mode = 755
...@@ -233,8 +233,6 @@ context = ...@@ -233,8 +233,6 @@ context =
key mariadb_link_binary template-mariadb:link-binary key mariadb_link_binary template-mariadb:link-binary
key zope_link_binary template-zope:link-binary key zope_link_binary template-zope:link-binary
key apache_location apache:location key apache_location apache:location
key apdex_result_check_script apdex-result-check-script:target
key mariadb_slowquery_check_script mariadb-slowquery-check-script:target
key aspell_location aspell:location key aspell_location aspell:location
key bin_directory buildout:bin-directory key bin_directory buildout:bin-directory
key buildout_bin_directory buildout:bin-directory key buildout_bin_directory buildout:bin-directory
...@@ -282,6 +280,7 @@ context = ...@@ -282,6 +280,7 @@ context =
key mariadb_location mariadb:location key mariadb_location mariadb:location
key mariadb_resiliency_after_import_script mariadb-resiliency-after-import-script:target key mariadb_resiliency_after_import_script mariadb-resiliency-after-import-script:target
key mariadb_slow_query_report_script mariadb-slow-query-report-script:target key mariadb_slow_query_report_script mariadb-slow-query-report-script:target
key mariadb_start_clone_from_backup mariadb-start-clone-from-backup:target
key matplotlibrc_location matplotlibrc:location key matplotlibrc_location matplotlibrc:location
key mesa_location mesa:location key mesa_location mesa:location
key onlyoffice_x2t_location onlyoffice-x2t:location key onlyoffice_x2t_location onlyoffice-x2t:location
...@@ -353,9 +352,6 @@ link-binary = ...@@ -353,9 +352,6 @@ link-binary =
[template-balancer] [template-balancer]
<= download-base <= download-base
[apdex-result-check-script]
<= download-base
[template-haproxy-cfg] [template-haproxy-cfg]
<= download-base <= download-base
...@@ -480,7 +476,6 @@ eggs = ${neoppod:eggs} ...@@ -480,7 +476,6 @@ eggs = ${neoppod:eggs}
collective.recipe.template collective.recipe.template
coverage coverage
erp5diff erp5diff
inotifyx
interval interval
ipdb ipdb
Jinja2 Jinja2
...@@ -622,6 +617,8 @@ eggs += ...@@ -622,6 +617,8 @@ eggs +=
scripts += scripts +=
is-local-tcp-port-opened is-local-tcp-port-opened
is-process-older-than-dependency-set is-process-older-than-dependency-set
check-slow-queries-digest-result
check-apachedex-result
runApacheDex runApacheDex
zodbpack zodbpack
...@@ -690,8 +687,6 @@ PyXML = 0.8.5 ...@@ -690,8 +687,6 @@ PyXML = 0.8.5
Pympler = 0.4.3 Pympler = 0.4.3
StructuredText = 2.11.1 StructuredText = 2.11.1
WSGIUtils = 0.7 WSGIUtils = 0.7
# ZEO 5 requires transaction >= 2
ZEO = 4.3.1
ZODB3 = 3.11.0 ZODB3 = 3.11.0
# astroid 1.4.1 breaks testDynamicClassGeneration # astroid 1.4.1 breaks testDynamicClassGeneration
astroid = 1.3.8 astroid = 1.3.8
......
...@@ -15,19 +15,19 @@ ...@@ -15,19 +15,19 @@
# not need these here). # not need these here).
[mariadb-resiliency-after-import-script] [mariadb-resiliency-after-import-script]
filename = instance-mariadb-resiliency-after-import-script.sh.in filename = instance-mariadb-resiliency-after-import-script.sh.in
md5sum = 844d62cd6f9d6e3d1d78d52de2b72a49 md5sum = c1f1083bf6c911a0e65dcb841fba327d
[mariadb-slow-query-report-script] [mariadb-slow-query-report-script]
filename = mysql-querydigest.sh.in filename = mysql-querydigest.sh.in
md5sum = cfe6ab8ae54a521ecb269e9d9762cbeb md5sum = cfe6ab8ae54a521ecb269e9d9762cbeb
[mariadb-slowquery-check-script] [mariadb-start-clone-from-backup]
filename = instance-mariadb-check-slowquery-result.sh.in filename = instance-mariadb-start-clone-from-backup.sh.in
md5sum = 356e0e2db1da0e8b479908fb739e5cc0 md5sum = 1af531c51f575a1d1362f2ca2d61620d
[template-mariadb] [template-mariadb]
filename = instance-mariadb.cfg.in filename = instance-mariadb.cfg.in
md5sum = 7ee2e801dda1181d1b42281e6466fc4d md5sum = 8ea5033142f450a2e90431817771cb44
[template-kumofs] [template-kumofs]
filename = instance-kumofs.cfg.in filename = instance-kumofs.cfg.in
...@@ -39,7 +39,7 @@ md5sum = 76f9e8c8cdc352081e34539d8fc17026 ...@@ -39,7 +39,7 @@ md5sum = 76f9e8c8cdc352081e34539d8fc17026
[template-zope-conf] [template-zope-conf]
filename = zope.conf.in filename = zope.conf.in
md5sum = bbea91bc8f0b2d455d9824928abfad5b md5sum = 3524ef2e14cea4a5bd40fdc9e95cfc0c
[site-zcml] [site-zcml]
filename = site.zcml filename = site.zcml
...@@ -51,7 +51,7 @@ md5sum = d814b984abf2dc444af2a0bc6287e7f5 ...@@ -51,7 +51,7 @@ md5sum = d814b984abf2dc444af2a0bc6287e7f5
[template-mariadb-initial-setup] [template-mariadb-initial-setup]
filename = mariadb_initial_setup.sql.in filename = mariadb_initial_setup.sql.in
md5sum = 6465212fdc7fe9076a0c929d9f14da14 md5sum = dec33a617fa1b307c8ddb883efcfe3ce
[template-postfix] [template-postfix]
filename = instance-postfix.cfg.in filename = instance-postfix.cfg.in
...@@ -71,7 +71,7 @@ md5sum = 0969fbb25b05c02ef3c2d437b2f4e1a0 ...@@ -71,7 +71,7 @@ md5sum = 0969fbb25b05c02ef3c2d437b2f4e1a0
[template] [template]
filename = instance.cfg.in filename = instance.cfg.in
md5sum = 258146a9d979b9981d18875fcac4de73 md5sum = 47d09a83d44f38d3ea62743f004e866b
[monitor-template-dummy] [monitor-template-dummy]
filename = dummy.cfg filename = dummy.cfg
...@@ -79,7 +79,7 @@ md5sum = d41d8cd98f00b204e9800998ecf8427e ...@@ -79,7 +79,7 @@ md5sum = d41d8cd98f00b204e9800998ecf8427e
[template-erp5] [template-erp5]
filename = instance-erp5.cfg.in filename = instance-erp5.cfg.in
md5sum = 909c8eb4f1f2f2d58ad982cec67228bc md5sum = 2465af81147af322056cee9f6c7de14f
[template-zeo] [template-zeo]
filename = instance-zeo.cfg.in filename = instance-zeo.cfg.in
...@@ -87,15 +87,11 @@ md5sum = d1f33d406d528ae27d973e2dd0efb1ba ...@@ -87,15 +87,11 @@ md5sum = d1f33d406d528ae27d973e2dd0efb1ba
[template-zope] [template-zope]
filename = instance-zope.cfg.in filename = instance-zope.cfg.in
md5sum = 27d26c6380883cf3bd7b2f003f7888d8 md5sum = fd7e8c507cef1950e6c0347ce2a01021
[template-balancer] [template-balancer]
filename = instance-balancer.cfg.in filename = instance-balancer.cfg.in
md5sum = d14ee7f13e2bd815cc96e28101e59670 md5sum = f64c568f1365eb1164f12f48fede9a99
[apdex-result-check-script]
filename = instance-balancer-check-apachedex-result.sh.in
md5sum = 421c68c97cadc49911382cd3185288a1
[template-haproxy-cfg] [template-haproxy-cfg]
filename = haproxy.cfg.in filename = haproxy.cfg.in
......
#!{{ bash }}
set -e
APACHEDEX_FILE='{{ apdex_file }}/ApacheDex-'$(date +%Y-%m-%d)'.html'
APACHEDEX_REPORT_JSON_FILE={{ apdex_status_file }}
DESIRED_THRESHOLD={{ user_threshold }}
# Check if the file is there
if [ ! -s "$APACHEDEX_FILE" ]; then
# If file doesn't exists create one
# If it is empty check for modification time
if [ ! -f "$APACHEDEX_FILE" ]; then
touch $APACHEDEX_FILE
else
MODIFIED_DATE=`stat -c '%Z' $APACHEDEX_FILE`
CURRENT_DATE=`date +%s`
if [[ `echo "$CURRENT_DATE - $MODIFIED_DATE" | bc` -gt 108000 ]]
then
echo "File modification date is greater than 30 hours"
JSON_CONTENT=`cat $APACHEDEX_REPORT_STATUS_FILE`
MESSAGE=`echo $JSON_CONTENT | python -c 'import json,sys;obj=json.load(sys.stdin);print obj["message"]'`
echo $MESSAGE
exit 2
else
echo "File is empty for now"
fi
fi
else
# Check if the result exists
{
REGEX="Overall<\/h2><table .*><tr>[[:space:]]<th>apdex<\/th><th>.*?<tr>[[:space:]]<td [^<]*>(.*?)%<\/td>"
FILE_CONTENT=`cat $APACHEDEX_FILE`
if [[ $FILE_CONTENT =~ $REGEX ]]
then
RESULT=${BASH_REMATCH[1]}
RESULT=${RESULT:-0}
if [[ `echo "$RESULT > $DESIRED_THRESHOLD" | bc` -eq 1 ]]
then
echo "Your score is $RESULT %, Thanks for keeping it all clean"
else
echo "Threshold is lower than exptected: Expected was $DESIRED_THRESHOLD % and we current is $RESULT %"
exit 2
fi
else
echo "No threshold found in the result"
fi
} || {
echo "Cannot parse the apdex result"
}
fi
...@@ -288,7 +288,7 @@ apachedex = ${monitor-directory:private}/apachedex ...@@ -288,7 +288,7 @@ apachedex = ${monitor-directory:private}/apachedex
recipe = slapos.cookbook:wrapper recipe = slapos.cookbook:wrapper
wrapper-path = ${monitor-directory:reports}/${:command} wrapper-path = ${monitor-directory:reports}/${:command}
command-line = "{{ parameter_dict['run-apachedex-location'] }}" "{{ parameter_dict['apachedex-location'] }}" "${directory:apachedex}" ${monitor-publish-parameters:monitor-base-url}/private/apachedex --apache-log-list "${apachedex-parameters:apache-log-list}" --config "${apachedex-parameters:configuration}" command-line = "{{ parameter_dict['run-apachedex-location'] }}" "{{ parameter_dict['apachedex-location'] }}" "${directory:apachedex}" ${monitor-publish-parameters:monitor-base-url}/private/apachedex --apache-log-list "${apachedex-parameters:apache-log-list}" --config "${apachedex-parameters:configuration}"
command = apachedex_every_3_hour command = apachedex_every_23_hour
[apachedex-parameters] [apachedex-parameters]
# XXX - Sample log file with curent date: apache_access.log-%(date)s.gz # XXX - Sample log file with curent date: apache_access.log-%(date)s.gz
...@@ -298,15 +298,9 @@ configuration = {{ slapparameter_dict['apachedex-configuration'] }} ...@@ -298,15 +298,9 @@ configuration = {{ slapparameter_dict['apachedex-configuration'] }}
promise-threshold = {{ slapparameter_dict['apachedex-promise-threshold'] }} promise-threshold = {{ slapparameter_dict['apachedex-promise-threshold'] }}
[{{ section('monitor-promise-apachedex-result') }}] [{{ section('monitor-promise-apachedex-result') }}]
recipe = slapos.recipe.template:jinja2 recipe = slapos.cookbook:wrapper
template = {{ parameter_dict['apdex-result-check-script'] }} wrapper-path = ${directory:promise}/check-apachedex-result
rendered = ${monitor-directory:promises}/check-apachedex-result command-line = "{{ parameter_dict['promise-check-apachedex-result'] }}" --apachedex_path "${directory:apachedex}" --status_file ${monitor-directory:private}/apachedex.report.json --threshold "${apachedex-parameters:promise-threshold}"
status-file = ${monitor-directory:private}/apachedex.report.json
context =
raw bash {{ parameter_dict['bash'] }}/bin/bash
raw user_threshold ${apachedex-parameters:promise-threshold}
key apdex_file directory:apachedex
key apdex_status_file :status-file
[monitor-instance-parameter] [monitor-instance-parameter]
monitor-httpd-ipv6 = {{ (ipv6_set | list)[0] }} monitor-httpd-ipv6 = {{ (ipv6_set | list)[0] }}
......
...@@ -143,7 +143,7 @@ return = ...@@ -143,7 +143,7 @@ return =
zope-address-list zope-address-list
hosts-dict hosts-dict
monitor-base-url monitor-base-url
{% set bt5_default_list = 'erp5_full_text_myisam_catalog erp5_configurator_standard erp5_configurator_maxma_demo erp5_configurator_ung erp5_configurator_run_my_doc' -%} {% set bt5_default_list = 'erp5_full_text_myisam_catalog erp5_configurator_standard erp5_configurator_maxma_demo erp5_configurator_run_my_doc' -%}
{% if has_jupyter -%} {% if has_jupyter -%}
{% set bt5_default_list = bt5_default_list + ' erp5_data_notebook' -%} {% set bt5_default_list = bt5_default_list + ' erp5_data_notebook' -%}
{% endif -%} {% endif -%}
...@@ -155,6 +155,7 @@ config-deadlock-debugger-password = ${publish-early:deadlock-debugger-password} ...@@ -155,6 +155,7 @@ config-deadlock-debugger-password = ${publish-early:deadlock-debugger-password}
config-developer-list = {{ dumps(slapparameter_dict.get('developer-list', [inituser_login])) }} config-developer-list = {{ dumps(slapparameter_dict.get('developer-list', [inituser_login])) }}
config-hosts-dict = {{ dumps(slapparameter_dict.get('hosts-dict', {})) }} config-hosts-dict = {{ dumps(slapparameter_dict.get('hosts-dict', {})) }}
config-hostalias-dict = {{ dumps(slapparameter_dict.get('hostalias-dict', {})) }} config-hostalias-dict = {{ dumps(slapparameter_dict.get('hostalias-dict', {})) }}
config-id-store-interval = {{ dumps(slapparameter_dict.get('id-store-interval')) }}
config-inituser-login = {{ dumps(inituser_login) }} config-inituser-login = {{ dumps(inituser_login) }}
config-inituser-password = ${publish-early:inituser-password} config-inituser-password = ${publish-early:inituser-password}
config-kumofs-url = ${request-memcached-persistent:connection-url} config-kumofs-url = ${request-memcached-persistent:connection-url}
......
#!{{ bash }}
set -e
DIGEST_FILE='{{ slow_query_digest }}/slowquery_digest.txt'
SLOW_QUERY_STATUS_FILE='{{ slow_query_status }}'
DESIRED_MAX_QUERY_THRESHOLD={{ max_queries_threshold }}
DESIRED_SLOW_QUERY_THRESHOLD={{ slowest_queries_threshold }}
# Check if the file is there
if [ ! -s "$DIGEST_FILE" ]; then
# If file doesn't exists create one
# If it is empty check for modification time
if [ ! -f "$DIGEST_FILE" ]; then
touch $DIGEST_FILE
else
MODIFIED_DATE=`stat -c '%Z' $DIGEST_FILE`
CURRENT_DATE=`date +%s`
if [[ `echo "$CURRENT_DATE - $MODIFIED_DATE" | bc` -gt 108000 ]]
then
echo "File modification date is greater than 30 hours"
JSON_CONTENT=`cat $SLOW_QUERY_STATUS_FILE`
MESSAGE=`echo $JSON_CONTENT | python -c 'import json,sys;obj=json.load(sys.stdin);print obj["message"]'`
echo $MESSAGE
exit 2
else
echo "File is empty for now"
fi
fi
else
# Check if the result exists
{
# get the total number of queries ran and the max time
# TODO: improve regex
# TODO: improve the parameters (currently we are using threshold on queries and max execute time)
# # Overall: (.*) total,(?:.*\n){4}# Exec time(?: *\d*m?s){2} *(.*?)m?s
REGEX="# Overall: (.*) total,.*# Exec time *[[:digit:]]*m?s *[[:digit:]]*m?s *([[:digit:]]*)m?s"
FILE_CONTENT=`cat $DIGEST_FILE`
if [[ $FILE_CONTENT =~ $REGEX ]]
then
TOTAL_QUERIES_EXEC=${BASH_REMATCH[1]}
SLOWEST_QUERY_TIME=${BASH_REMATCH[2]}
HAS_K="${TOTAL_QUERIES_EXEC: -1}"
if [[ "$HAS_K" == "k" ]]
then
PRE="${TOTAL_QUERIES_EXEC::-1}"
TOTAL_QUERIES_EXEC=$(echo "scale=4; ${PRE:-0}*1000" | bc)
else
TOTAL_QUERIES_EXEC=${TOTAL_QUERIES_EXEC:-0}
fi
# TODO: support ms
SLOWEST_QUERY_TIME="${SLOWEST_QUERY_TIME:-0}"
if [[ `echo "$TOTAL_QUERIES_EXEC < $DESIRED_MAX_QUERY_THRESHOLD" | bc` -eq 1 && `echo "$SLOWEST_QUERY_TIME < $DESIRED_SLOW_QUERY_THRESHOLD" | bc` -eq 1 ]]
then
echo "Total number of slow queries are: $TOTAL_QUERIES_EXEC"
echo "Time taken by slowest query is: $SLOWEST_QUERY_TIME"
echo "Thanks for keeping it all clean"
else
echo "Ops! One of the two expected parameters did not meet"
echo "Time taken by slowest query is: $SLOWEST_QUERY_TIME s and required maximum is $DESIRED_SLOW_QUERY_THRESHOLD s"
echo "Total slow queries are $TOTAL_QUERIES_EXEC and expected maximum value is $DESIRED_MAX_QUERY_THRESHOLD"
exit 2
fi
else
echo "No threshold found in the result"
fi
} || {
echo "Cannot parse the result"
}
fi
...@@ -15,7 +15,6 @@ set -e ...@@ -15,7 +15,6 @@ set -e
mysql_executable='{{ mysql_executable }}' mysql_executable='{{ mysql_executable }}'
mariadb_data_directory='{{ mariadb_data_directory }}' mariadb_data_directory='{{ mariadb_data_directory }}'
mariadb_backup_directory='{{ mariadb_backup_directory }}' mariadb_backup_directory='{{ mariadb_backup_directory }}'
instance_directory='{{ instance_directory }}'
pid_file='{{ pid_file }}' pid_file='{{ pid_file }}'
binlog_path='{{ binlog_path }}' binlog_path='{{ binlog_path }}'
server_executable='{{ server_executable }}' server_executable='{{ server_executable }}'
...@@ -71,7 +70,7 @@ fi ...@@ -71,7 +70,7 @@ fi
echo "Importing data..." echo "Importing data..."
# Use latest dump XXX can contain funny characters # Use latest dump XXX can contain funny characters
dump=$(ls -r "$mariadb_backup_directory" | head -1) dump=$(ls -r "$mariadb_backup_directory" | head -1)
zcat "$mariadb_backup_directory/$dump" | $mysql_executable -u root --socket="$instance_directory/var/run/mariadb.sock" zcat "$mariadb_backup_directory/$dump" | $mysql_executable -u root
RESTORE_EXIT_CODE=$? RESTORE_EXIT_CODE=$?
if [ $RESTORE_EXIT_CODE -eq 0 ]; then if [ $RESTORE_EXIT_CODE -eq 0 ]; then
......
#!{{ dash }}
set -eu
if [ $# -ne 7 ]; then
echo "Bootstrap a mariadb instance from available backup data so it replicates from given master."
echo " $0 <BACKUP FILE> <MASTER HOST> <MASTER PORT> <MASTER USER> <MASTER SSL CA FILE> <MASTER SSL CERT FILE> <MASTER SSL KEY FILE>"
exit 1
fi
BACKUP=$1
MASTER_HOST=$2
MASTER_PORT=$3
MASTER_USER=$4
MASTER_SSL_CA=$5
MASTER_SSL_CERT=$6
MASTER_SSL_KEY=$7
CLIENT='{{ client }}'
DATA_DIRECTORY='{{ data_directory }}'
PID_FILE='{{ pid_file }}'
SERVER='{{ server }}'
UPDATE='{{ update }}'
SOCKET='{{ socket }}'
# Make sure mariadb is not already running
if [ -e "$PID_FILE" ]; then
PID=$(cat "$PID_FILE")
if [ $? -ne 0 ]; then
echo "Cannot read Mariadb pidfile, assuming running. Aborting."
exit 1
fi
if kill -0 "$PID"; then
echo "Mariadb is already running with pid $PID. Aborting."
exit 1
fi
fi
BACKUP_HEAD="$(zcat "$BACKUP" | head -n 100)"
SQL_CHANGE_MASTER=$(echo "$BACKUP_HEAD" | grep "^--\s*CHANGE MASTER TO " | sed "s/^--\s*//")
if [ -z "$SQL_CHANGE_MASTER" ]; then
echo "'CHANGE MASTER TO' statement not found in given backup file."
echo "Is replication enabled on future master ?"
exit 1
fi
SQL_SET_GTID="$(echo "$BACKUP_HEAD" | grep "^--\s*SET GLOBAL gtid_slave_pos=" | sed "s/^--\s*//")"
if [ -z "$SQL_SET_GTID" ]; then
echo "Info: GTID not found in backup, it will not be enabled."
MASTER_USE_GTID=0
else
echo "Info: GTID found in backup, it will be enabled."
MASTER_USE_GTID=1
fi
echo "EXISTING DATABASE CONTENT WILL BE DESTROYED"
echo "You have 5 seconds to interrupt this script..."
if sleep 5; then
echo "Expired, proceeding"
else
echo "Interrupted, aborting"
exit 1
fi
echo "Emptying data directory..."
find "$DATA_DIRECTORY" -mindepth 1 -delete
echo -n "Starting mariadb for backup restoration"
"$SERVER" --innodb-flush-method=nosync --skip-innodb-doublewrite --innodb-flush-log-at-trx-commit=0 --sync-frm=0 --slow-query-log=0 --skip-log-bin &
PID=$!
trap "kill $PID; wait; exit 1" EXIT
while true; do
if [ ! -e "/proc/$PID" ]; then
trap EXIT
echo "Service exited, check logs"
wait
exit 1
fi
test -e "$SOCKET" && break
echo -n .
sleep 0.5
done
"$UPDATE"
echo "Importing $BACKUP ..."
zcat "$BACKUP" | "$CLIENT" -u root
echo "Configuring server as slave..."
if [ "$MASTER_USE_GTID" -eq 1 ]; then
MASTER_USE_GTID_SQL="current_pos"
else
MASTER_USE_GTID_SQL="NO"
fi
"$CLIENT" -u root -e "
CHANGE MASTER TO
MASTER_HOST='$MASTER_HOST',
MASTER_USER='$MASTER_USER',
MASTER_PORT=$MASTER_PORT,
MASTER_SSL=1,
MASTER_SSL_CA='$MASTER_SSL_CA',
MASTER_SSL_CERT='$MASTER_SSL_CERT',
MASTER_SSL_KEY='$MASTER_SSL_KEY',
MASTER_SSL_VERIFY_SERVER_CERT=1,
MASTER_USE_GTID=$MASTER_USE_GTID_SQL;
"
"$CLIENT" -u root -e "$SQL_CHANGE_MASTER"
test "$MASTER_USE_GTID" -eq 1 && "$CLIENT" -u root -e "$SQL_SET_GTID"
"$CLIENT" -u root -e "START SLAVE;"
echo "Stopping mariadb..."
trap EXIT
kill $PID
wait
echo "Done. Start mariadb normally. You may use 'show slave status' SQL command to monitor progress."
...@@ -252,6 +252,7 @@ mariadb-ssl = ${:etc}/mariadb-ssl ...@@ -252,6 +252,7 @@ mariadb-ssl = ${:etc}/mariadb-ssl
var = ${buildout:directory}/var var = ${buildout:directory}/var
log = ${:var}/log log = ${:var}/log
run = ${:var}/run run = ${:var}/run
slowquery = ${monitor-directory:private}/slowquerydex
[{{ section('resiliency-exclude-file') }}] [{{ section('resiliency-exclude-file') }}]
# Generate rdiff exclude file in case of resiliency # Generate rdiff exclude file in case of resiliency
...@@ -262,7 +263,20 @@ rendered = ${directory:srv}/exporter.exclude ...@@ -262,7 +263,20 @@ rendered = ${directory:srv}/exporter.exclude
[dash] [dash]
dash = {{ dumps(dash) }} dash = {{ dumps(dash) }}
[resiliency-after-import-script] [{{ section('start-clone-from-backup') }}]
< = jinja2-template-executable
template = {{ parameter_dict['mariadb-start-clone-from-backup'] }}
rendered = ${directory:bin}/start-clone-from-backup
context =
key dash dash:dash
key client binary-wrap-mysql:wrapper-path
key data_directory directory:mariadb-data
key pid_file my-cnf-parameters:pid-file
key server mysqld:rendered
key update update-mysql:output
key socket my-cnf-parameters:socket
[{{ section('resiliency-after-import-script') }}]
# Generate after import script used by importer instance of webrunner # Generate after import script used by importer instance of webrunner
< = jinja2-template-executable < = jinja2-template-executable
template = {{ parameter_dict['mariadb-resiliency-after-import-script'] }} template = {{ parameter_dict['mariadb-resiliency-after-import-script'] }}
...@@ -272,7 +286,6 @@ context = ...@@ -272,7 +286,6 @@ context =
key mysql_executable binary-wrap-mysql:wrapper-path key mysql_executable binary-wrap-mysql:wrapper-path
key mariadb_data_directory directory:mariadb-data key mariadb_data_directory directory:mariadb-data
key mariadb_backup_directory directory:mariadb-backup-full key mariadb_backup_directory directory:mariadb-backup-full
key instance_directory buildout:directory
key pid_file my-cnf-parameters:pid-file key pid_file my-cnf-parameters:pid-file
key binlog_path my-cnf-parameters:binlog-path key binlog_path my-cnf-parameters:binlog-path
key server_executable mysqld:rendered key server_executable mysqld:rendered
...@@ -288,24 +301,17 @@ context = ...@@ -288,24 +301,17 @@ context =
raw slow_query_path ${directory:srv}/backup/logrotate/mariadb_slowquery.log raw slow_query_path ${directory:srv}/backup/logrotate/mariadb_slowquery.log
raw pt_query_exec ${binary-wrap-pt-digest:wrapper-path} raw pt_query_exec ${binary-wrap-pt-digest:wrapper-path}
raw dash {{ parameter_dict['dash-location'] }}/bin/dash raw dash {{ parameter_dict['dash-location'] }}/bin/dash
key output_folder monitor-directory:private key output_folder directory:slowquery
[slow-query-digest-parameters] [slow-query-digest-parameters]
max_queries_threshold = {{ slapparameter_dict['max-slowqueries-threshold'] }} max_queries_threshold = {{ slapparameter_dict['max-slowqueries-threshold'] }}
slowest_queries_threshold = {{ slapparameter_dict['slowest-query-threshold'] }} slowest_queries_threshold = {{ slapparameter_dict['slowest-query-threshold'] }}
[{{ section('monitor-promise-slowquery-result') }}] [{{ section('monitor-promise-slowquery-result') }}]
recipe = slapos.recipe.template:jinja2 recipe = slapos.cookbook:wrapper
template = {{ parameter_dict['mariadb-slowquery-check-script'] }} wrapper-path = ${directory:promise}/check-slow-query-pt-digest-result
rendered = ${monitor-directory:promises}/mariadb-slow-queries-result command-line = "{{ parameter_dict['promise-check-slow-queries-digest-result'] }}" --ptdigest_path "${directory:slowquery}" --status_file ${monitor-directory:private}/mariadb_slow_query.report.json --max_queries_threshold "${slow-query-digest-parameters:max_queries_threshold}" --slowest_query_threshold "${slow-query-digest-parameters:slowest_queries_threshold}"
status-file = ${monitor-directory:private}/mariadb_slow_query.report.json
context =
raw default_threshold 4000
raw bash {{ parameter_dict['bash'] }}/bin/bash
key slow_query_digest monitor-directory:private
key slow_query_status :status-file
key max_queries_threshold slow-query-digest-parameters:max_queries_threshold
key slowest_queries_threshold slow-query-digest-parameters:slowest_queries_threshold
[{{ section('promise') }}] [{{ section('promise') }}]
recipe = slapos.cookbook:wrapper recipe = slapos.cookbook:wrapper
......
...@@ -230,6 +230,7 @@ sql-connection-string = {{ '%s@erp5-catalog-0:%s %s %s' % ( ...@@ -230,6 +230,7 @@ sql-connection-string = {{ '%s@erp5-catalog-0:%s %s %s' % (
mysql.path.split('/')[1], mysql.port, mysql.username, mysql.password) }} mysql.path.split('/')[1], mysql.port, mysql.username, mysql.password) }}
bt5 = {{ slapparameter_dict['bt5'] }} bt5 = {{ slapparameter_dict['bt5'] }}
bt5-repository-url = {{ slapparameter_dict['bt5-repository-url'] }} bt5-repository-url = {{ slapparameter_dict['bt5-repository-url'] }}
id-store-interval = {{ dumps(slapparameter_dict['id-store-interval']) }}
home = ${buildout:directory} home = ${buildout:directory}
# We only want to change the hostname to 'erp5-cloudooo' if we use the internal # We only want to change the hostname to 'erp5-cloudooo' if we use the internal
# cloudooo. We plan to remove the ability to have an internal one, so this # cloudooo. We plan to remove the ability to have an internal one, so this
......
...@@ -93,16 +93,12 @@ openssl-location = {{ openssl_location }} ...@@ -93,16 +93,12 @@ openssl-location = {{ openssl_location }}
[dynamic-template-balancer-parameters] [dynamic-template-balancer-parameters]
apache = {{ apache_location }} apache = {{ apache_location }}
apdex-result-check-script = {{ apdex_result_check_script }}
openssl = {{ openssl_location }} openssl = {{ openssl_location }}
haproxy = {{ haproxy_location }} haproxy = {{ haproxy_location }}
bin-directory = {{ bin_directory }} bin-directory = {{ bin_directory }}
apachedex-location = {{ bin_directory }}/apachedex apachedex-location = {{ bin_directory }}/apachedex
run-apachedex-location = {{ bin_directory }}/runApacheDex run-apachedex-location = {{ bin_directory }}/runApacheDex
6tunnel = {{ sixtunnel_location }} promise-check-apachedex-result = {{ bin_directory }}/check-apachedex-result
curl-location = {{ curl_location }}
dash = {{ dash_location }}
bash = {{ bash_location }}
template-haproxy-cfg = {{ template_haproxy_cfg }} template-haproxy-cfg = {{ template_haproxy_cfg }}
template-apache-conf = {{ template_apache_conf }} template-apache-conf = {{ template_apache_conf }}
template-monitor = {{ dumps(template_monitor) }} template-monitor = {{ dumps(template_monitor) }}
...@@ -183,7 +179,8 @@ link-binary = {{ dumps(mariadb_link_binary) }} ...@@ -183,7 +179,8 @@ link-binary = {{ dumps(mariadb_link_binary) }}
bin-directory = {{ bin_directory }} bin-directory = {{ bin_directory }}
mariadb-resiliency-after-import-script = {{ mariadb_resiliency_after_import_script }} mariadb-resiliency-after-import-script = {{ mariadb_resiliency_after_import_script }}
mariadb-slow-query-report-script = {{ mariadb_slow_query_report_script }} mariadb-slow-query-report-script = {{ mariadb_slow_query_report_script }}
mariadb-slowquery-check-script = {{ mariadb_slowquery_check_script}} mariadb-start-clone-from-backup = {{ mariadb_start_clone_from_backup }}
promise-check-slow-queries-digest-result = {{ bin_directory }}/check-slow-queries-digest-result
percona-tools-location = {{ percona_toolkit_location }} percona-tools-location = {{ percona_toolkit_location }}
template-monitor = {{ template_monitor }} template-monitor = {{ template_monitor }}
......
USE mysql; USE mysql;
DROP FUNCTION IF EXISTS last_insert_grn_id; DROP FUNCTION IF EXISTS last_insert_grn_id;
DROP FUNCTION IF EXISTS mroonga_snippet;
DROP FUNCTION IF EXISTS mroonga_command;
{% set mroonga = parameter_dict.get('mroonga', 'ha_mroonga.so') -%} {% set mroonga = parameter_dict.get('mroonga', 'ha_mroonga.so') -%}
{% if mroonga %}CREATE FUNCTION last_insert_grn_id RETURNS INTEGER SONAME '{{ mroonga }}';{% endif %} {% if mroonga %}CREATE FUNCTION last_insert_grn_id RETURNS INTEGER SONAME '{{ mroonga }}';
CREATE FUNCTION mroonga_snippet RETURNS STRING SONAME '{{ mroonga }}';
CREATE FUNCTION mroonga_command RETURNS STRING SONAME '{{ mroonga }}';{% endif %}
DROP FUNCTION IF EXISTS sphinx_snippets; DROP FUNCTION IF EXISTS sphinx_snippets;
#CREATE FUNCTION sphinx_snippets RETURNS STRING SONAME 'ha_sphinx.so'; #CREATE FUNCTION sphinx_snippets RETURNS STRING SONAME 'ha_sphinx.so';
......
...@@ -86,6 +86,9 @@ products {{ parameter_dict['instance-products'] }} ...@@ -86,6 +86,9 @@ products {{ parameter_dict['instance-products'] }}
cmf_activity_sql_connection_string {{ sql_connection_string }} cmf_activity_sql_connection_string {{ sql_connection_string }}
bt5_repository_url {{ ' '.join(bt5_repository_url) }} bt5_repository_url {{ ' '.join(bt5_repository_url) }}
bt5 {{ parameter_dict['bt5'] }} bt5 {{ parameter_dict['bt5'] }}
{%- if parameter_dict['id-store-interval'] != None %}
id_store_interval {{ parameter_dict['id-store-interval'] }}
{%- endif %}
cloudooo_url {{ parameter_dict['cloudooo-url'] }} cloudooo_url {{ parameter_dict['cloudooo-url'] }}
</product-config> </product-config>
......
...@@ -42,7 +42,7 @@ keep-compile-dir = false ...@@ -42,7 +42,7 @@ keep-compile-dir = false
apache-libcloud = 0.9.1 apache-libcloud = 0.9.1
async = 0.6.1 async = 0.6.1
gitdb = 0.5.4 gitdb = 0.5.4
mysqlclient = 1.3.10+SlapOSPatched002 mysqlclient = 1.3.12
plone.recipe.command = 1.1 plone.recipe.command = 1.1
slapos.recipe.template = 2.3 slapos.recipe.template = 2.3
slapos.toolbox = 0.40.4 slapos.toolbox = 0.40.4
......
...@@ -185,7 +185,7 @@ eggs = ...@@ -185,7 +185,7 @@ eggs =
apache-libcloud = 0.12.3 apache-libcloud = 0.12.3
async = 0.6.1 async = 0.6.1
gitdb = 0.5.4 gitdb = 0.5.4
mysqlclient = 1.3.10+SlapOSPatched002 mysqlclient = 1.3.12
pycrypto = 2.6 pycrypto = 2.6
rdiff-backup = 1.0.5+SlapOSPatched001 rdiff-backup = 1.0.5+SlapOSPatched001
slapos.recipe.template = 2.4.2 slapos.recipe.template = 2.4.2
......
...@@ -131,5 +131,5 @@ depends = ...@@ -131,5 +131,5 @@ depends =
PyRSS2Gen = 1.1 PyRSS2Gen = 1.1
cns.recipe.symlink = 0.2.3 cns.recipe.symlink = 0.2.3
pycurl = 7.43.0 pycurl = 7.43.0
slapos.toolbox = 0.71 slapos.toolbox = 0.73
...@@ -53,6 +53,9 @@ allow-hosts += ...@@ -53,6 +53,9 @@ allow-hosts +=
www.dabeaz.com www.dabeaz.com
www.owlfish.com www.owlfish.com
# Use an https index
index = https://pypi.python.org/simple/
# XXX: Workaround of SlapOS limitation # XXX: Workaround of SlapOS limitation
# Unzippig of eggs is required, as SlapOS do not yet provide nicely working # Unzippig of eggs is required, as SlapOS do not yet provide nicely working
# development / fast switching environment for whole software # development / fast switching environment for whole software
...@@ -75,8 +78,6 @@ recipe = zc.recipe.egg:develop ...@@ -75,8 +78,6 @@ recipe = zc.recipe.egg:develop
setup = ${slapos.cookbook-repository:location} setup = ${slapos.cookbook-repository:location}
# Install slapos.cookbook containing all officials recipes # Install slapos.cookbook containing all officials recipes
# Explicitely define dependencies as well, because of buildout limitation
# if using "develop". XXX really ?
[slapos-cookbook] [slapos-cookbook]
recipe = zc.recipe.egg recipe = zc.recipe.egg
eggs = eggs =
...@@ -88,19 +89,10 @@ eggs = ...@@ -88,19 +89,10 @@ eggs =
pyOpenSSL pyOpenSSL
slapos.cookbook slapos.cookbook
slapos.libnetworkcache slapos.libnetworkcache
hexagonit.recipe.download
inotifyx
netaddr
netifaces
requests
slapos.core
supervisor
xml_marshaller
pytz
[versions] [versions]
# Use SlapOS patched zc.buildout # Use SlapOS patched zc.buildout
zc.buildout = 2.5.2+slapos009 zc.buildout = 2.5.2+slapos011
# Use SlapOS patched zc.recipe.egg (zc.recipe.egg 2.x is for Buildout 2) # Use SlapOS patched zc.recipe.egg (zc.recipe.egg 2.x is for Buildout 2)
zc.recipe.egg = 2.0.3+slapos003 zc.recipe.egg = 2.0.3+slapos003
# Use own version of h.r.download to be able to open .xz and .lz archives # Use own version of h.r.download to be able to open .xz and .lz archives
...@@ -116,10 +108,10 @@ click = 6.7 ...@@ -116,10 +108,10 @@ click = 6.7
cliff = 2.4.0 cliff = 2.4.0
cmd2 = 0.7.0 cmd2 = 0.7.0
collective.recipe.template = 2.0 collective.recipe.template = 2.0
cryptography = 2.0.3 cryptography = 2.1.1
decorator = 4.0.11 decorator = 4.0.11
idna = 2.2 idna = 2.2
inotifyx = 0.2.2 inotify-simple = 1.1.1
itsdangerous = 0.24 itsdangerous = 0.24
lock-file = 2.0 lock-file = 2.0
lxml = 3.7.3 lxml = 3.7.3
...@@ -136,7 +128,7 @@ requests = 2.13.0 ...@@ -136,7 +128,7 @@ requests = 2.13.0
setuptools = 33.1.1 setuptools = 33.1.1
six = 1.10.0 six = 1.10.0
slapos.cookbook = 1.0.53 slapos.cookbook = 1.0.53
slapos.core = 1.4.0 slapos.core = 1.4.3
slapos.extension.strip = 0.4 slapos.extension.strip = 0.4
slapos.libnetworkcache = 0.15 slapos.libnetworkcache = 0.15
slapos.rebootstrap = 4.1 slapos.rebootstrap = 4.1
...@@ -148,7 +140,7 @@ xml-marshaller = 0.9.7 ...@@ -148,7 +140,7 @@ xml-marshaller = 0.9.7
paramiko = 2.1.3 paramiko = 2.1.3
# Required by: # Required by:
# slapos.core==1.4.0 # slapos.core==1.4.3
Flask = 0.12 Flask = 0.12
# Required by: # Required by:
...@@ -168,7 +160,7 @@ ipaddress = 1.0.18 ...@@ -168,7 +160,7 @@ ipaddress = 1.0.18
jsonschema = 2.6.0 jsonschema = 2.6.0
# Required by: # Required by:
# slapos.core==1.4.0 # slapos.core==1.4.3
# XXX 'slapos node format' raises an exception with netifaces 0.10.5. # XXX 'slapos node format' raises an exception with netifaces 0.10.5.
netifaces = 0.10.4 netifaces = 0.10.4
...@@ -181,15 +173,15 @@ packaging = 16.8 ...@@ -181,15 +173,15 @@ packaging = 16.8
pycparser = 2.17 pycparser = 2.17
# Required by: # Required by:
# slapos.core==1.4.0 # slapos.core==1.4.3
supervisor = 3.3.3 supervisor = 3.3.3
# Required by: # Required by:
# slapos.core==1.4.0 # slapos.core==1.4.3
uritemplate = 3.0.0 uritemplate = 3.0.0
# Required by: # Required by:
# slapos.core==1.4.0 # slapos.core==1.4.3
zope.interface = 4.3.3 zope.interface = 4.3.3
[networkcache] [networkcache]
...@@ -197,7 +189,6 @@ download-cache-url = http://download.shacache.org/ ...@@ -197,7 +189,6 @@ download-cache-url = http://download.shacache.org/
download-dir-url = http://dir.shacache.org/ download-dir-url = http://dir.shacache.org/
# signature certificates of the following uploaders. # signature certificates of the following uploaders.
# Romain Courteaud
# Sebastien Robin # Sebastien Robin
# Kazuhiko Shiozaki # Kazuhiko Shiozaki
# Gabriel Monnerat # Gabriel Monnerat
...@@ -206,20 +197,8 @@ download-dir-url = http://dir.shacache.org/ ...@@ -206,20 +197,8 @@ download-dir-url = http://dir.shacache.org/
# Rafael Monnerat # Rafael Monnerat
# Ivan Tyagov # Ivan Tyagov
# Julien Muchembled # Julien Muchembled
# Yusei Tahara
signature-certificate-list = signature-certificate-list =
-----BEGIN CERTIFICATE-----
MIIB4DCCAUkCADANBgkqhkiG9w0BAQsFADA5MQswCQYDVQQGEwJGUjEZMBcGA1UE
CBMQRGVmYXVsdCBQcm92aW5jZTEPMA0GA1UEChMGTmV4ZWRpMB4XDTExMDkxNTA5
MDAwMloXDTEyMDkxNTA5MDAwMlowOTELMAkGA1UEBhMCRlIxGTAXBgNVBAgTEERl
ZmF1bHQgUHJvdmluY2UxDzANBgNVBAoTBk5leGVkaTCBnzANBgkqhkiG9w0BAQEF
AAOBjQAwgYkCgYEApYZv6OstoqNzxG1KI6iE5U4Ts2Xx9lgLeUGAMyfJLyMmRLhw
boKOyJ9Xke4dncoBAyNPokUR6iWOcnPHtMvNOsBFZ2f7VA28em3+E1JRYdeNUEtX
Z0s3HjcouaNAnPfjFTXHYj4um1wOw2cURSPuU5dpzKBbV+/QCb5DLheynisCAwEA
ATANBgkqhkiG9w0BAQsFAAOBgQBCZLbTVdrw3RZlVVMFezSHrhBYKAukTwZrNmJX
mHqi2tN8tNo6FX+wmxUUAf3e8R2Ymbdbn2bfbPpcKQ2fG7PuKGvhwMG3BlF9paEC
q7jdfWO18Zp/BG7tagz0jmmC4y/8akzHsVlruo2+2du2freE8dK746uoMlXlP93g
QUUGLQ==
-----END CERTIFICATE-----
-----BEGIN CERTIFICATE----- -----BEGIN CERTIFICATE-----
MIIB8jCCAVugAwIBAgIJAPu2zchZ2BxoMA0GCSqGSIb3DQEBBQUAMBIxEDAOBgNV MIIB8jCCAVugAwIBAgIJAPu2zchZ2BxoMA0GCSqGSIb3DQEBBQUAMBIxEDAOBgNV
BAMMB3RzeGRldjMwHhcNMTExMDE0MTIxNjIzWhcNMTIxMDEzMTIxNjIzWjASMRAw BAMMB3RzeGRldjMwHhcNMTExMDE0MTIxNjIzWhcNMTIxMDEzMTIxNjIzWjASMRAw
...@@ -324,3 +303,16 @@ signature-certificate-list = ...@@ -324,3 +303,16 @@ signature-certificate-list =
SOvU9E1802JN7Q1mrMbgJeNmk+18o4ze7ABKljqtvo+ZjUXthuWw3BNt+PtQjlX3 SOvU9E1802JN7Q1mrMbgJeNmk+18o4ze7ABKljqtvo+ZjUXthuWw3BNt+PtQjlX3
5wE+Yc4sypYGmAfU7Gc= 5wE+Yc4sypYGmAfU7Gc=
-----END CERTIFICATE----- -----END CERTIFICATE-----
-----BEGIN CERTIFICATE-----
MIICAjCCAWugAwIBAgIJAP/k6bltwfZCMA0GCSqGSIb3DQEBCwUAMBkxFzAVBgNV
BAMMDmxvY2FsX2NvbXB1dGVyMCAXDTE3MTEwOTA2NTU1M1oYDzIxMTcxMDE2MDY1
NTUzWjAZMRcwFQYDVQQDDA5sb2NhbF9jb21wdXRlcjCBnzANBgkqhkiG9w0BAQEF
AAOBjQAwgYkCgYEAuWN8O1PQ23mulMu45fDxTHeoI33MDWDLHP9gthsMbTplftDf
k8BAKCp/ugbAkCD5LTx/6RH1RdyM/LApUI/n+fxFnnVZn8Fyuzwu/TlvGdNT5yrj
RhBkcxeCpWWz9Ysezj9jVnK+TCSjJeb/N55aMgXAVjrIeJkR4SkPDdMBtcMCAwEA
AaNQME4wHQYDVR0OBBYEFJ+1cOY78rvaNAzqshFjVOWfqoMzMB8GA1UdIwQYMBaA
FJ+1cOY78rvaNAzqshFjVOWfqoMzMAwGA1UdEwQFMAMBAf8wDQYJKoZIhvcNAQEL
BQADgYEAJDLdo6fXZxEaHYJuZ6XuYPNAYAcSOiSbJoMSSz7qQJ+4FWPGCBnhiZbU
SyslQFs59yqNxb046uKi7D4JeUd0zdBO3TtceGRK3iyEFNx8GF+em/6pNnC3A/XH
+L1VRZsQdvh7NHsl/dVvWmmlmOANQ/+JkLbe98c/rCmBDH48Ldc=
-----END CERTIFICATE-----
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment