diff --git a/.husky/.gitignore b/.husky/.gitignore new file mode 100644 index 0000000000000000000000000000000000000000..31354ec1389994b5f6708c7d915fdcc6bb76ba6e --- /dev/null +++ b/.husky/.gitignore @@ -0,0 +1 @@ +_ diff --git a/.husky/pre-commit b/.husky/pre-commit new file mode 100755 index 0000000000000000000000000000000000000000..36af219892fda8ea669cd4b6725cd7b892231967 --- /dev/null +++ b/.husky/pre-commit @@ -0,0 +1,4 @@ +#!/bin/sh +. "$(dirname "$0")/_/husky.sh" + +npx lint-staged diff --git a/component/ZODB/test-zodb4-wc2.cfg b/component/ZODB/test-zodb4-wc2.cfg index 7f0182e281fa6081cb76a34e7846c16946f99e02..46d57d91cccbb11a5a7d70a625cdbd1edc57eb16 100644 --- a/component/ZODB/test-zodb4-wc2.cfg +++ b/component/ZODB/test-zodb4-wc2.cfg @@ -109,7 +109,6 @@ template = inline: [versions] -mock = 3.0.5 random2 = 1.0.1 zope.testing = 4.7 zope.testrunner = 5.2 diff --git a/component/apache/buildout.cfg b/component/apache/buildout.cfg index 389c1deb0054e8183f633a126cb8b1c243fbc4a0..5871d0edaed0ab32394854598668c6011d07393c 100644 --- a/component/apache/buildout.cfg +++ b/component/apache/buildout.cfg @@ -125,4 +125,3 @@ environment = recipe = slapos.recipe.build:download shared = true url = ${:_profile_base_location_}/${:filename} -mode = 640 diff --git a/component/bash/buildout.cfg b/component/bash/buildout.cfg index 0fb0b825d27c7681226a1c4e96ffc54e3c608f8f..54549e33107b59aff390fd49b6cbb864166e02ef 100644 --- a/component/bash/buildout.cfg +++ b/component/bash/buildout.cfg @@ -1,68 +1,35 @@ [buildout] -parts = - bash - extends = + ../gettext/buildout.cfg ../ncurses/buildout.cfg ../patch/buildout.cfg +parts = + bash [bash] recipe = slapos.recipe.cmmi shared = true -url = http://ftp.gnu.org/gnu/bash/bash-4.3.tar.gz -md5sum = 81348932d5da294953e15d4814c74dd1 +url-prefix = https://ftp.gnu.org/pub/gnu/bash/bash-5.1 +url = ${:url-prefix}.tar.gz +md5sum = bb91a17fd6c9032c26d0b2b78b50aff5 +patch-binary = ${patch:location}/bin/patch +patch-prefix = ${:url-prefix}-patches/bash51 patches = - http://ftp.gnu.org/pub/gnu/bash/bash-4.3-patches/bash43-001#1ab682b4e36afa4cf1b426aa7ac81c0d - http://ftp.gnu.org/pub/gnu/bash/bash-4.3-patches/bash43-002#8fc22cf50ec85da00f6af3d66f7ddc1b - http://ftp.gnu.org/pub/gnu/bash/bash-4.3-patches/bash43-003#a41728eca78858758e26b5dea64ae506 - http://ftp.gnu.org/pub/gnu/bash/bash-4.3-patches/bash43-004#bf8d53d227829d67235927689a03cc7a - http://ftp.gnu.org/pub/gnu/bash/bash-4.3-patches/bash43-005#c0c00935c8b8ffff76e8ab77e7be7d15 - http://ftp.gnu.org/pub/gnu/bash/bash-4.3-patches/bash43-006#6f01e364cd092faa28dd7119f47ddb5f - http://ftp.gnu.org/pub/gnu/bash/bash-4.3-patches/bash43-007#dcf471d222bcd83283d3094e6ceeb6f8 - http://ftp.gnu.org/pub/gnu/bash/bash-4.3-patches/bash43-008#f7553416646dc26c266454c78a916d36 - http://ftp.gnu.org/pub/gnu/bash/bash-4.3-patches/bash43-009#7e73d2151f4064b484a4ba2c4b09960e - http://ftp.gnu.org/pub/gnu/bash/bash-4.3-patches/bash43-010#a275463d21735bb6d7161f9fbd320d8f - http://ftp.gnu.org/pub/gnu/bash/bash-4.3-patches/bash43-011#c17103ee20420d77e46b224c8d3fceda - http://ftp.gnu.org/pub/gnu/bash/bash-4.3-patches/bash43-012#3e2a057a19d02b3f92a3a09eacbc03ae - http://ftp.gnu.org/pub/gnu/bash/bash-4.3-patches/bash43-013#fb377143a996d4ff087a2771bc8332f9 - http://ftp.gnu.org/pub/gnu/bash/bash-4.3-patches/bash43-014#1a1aaecc99a9d0cbc310e8e247dcc8b6 - http://ftp.gnu.org/pub/gnu/bash/bash-4.3-patches/bash43-015#4f04387458a3c1b4d460d199f49991a8 - http://ftp.gnu.org/pub/gnu/bash/bash-4.3-patches/bash43-016#90e759709720c4f877525bebc9d5dc06 - http://ftp.gnu.org/pub/gnu/bash/bash-4.3-patches/bash43-017#11e4046e1b86070f6adbb7ffc89641be - http://ftp.gnu.org/pub/gnu/bash/bash-4.3-patches/bash43-018#cd5a9b46f5bea0dc0248c93c7dfac011 - http://ftp.gnu.org/pub/gnu/bash/bash-4.3-patches/bash43-019#cff4dc024d9d3456888aaaf8a36ca774 - http://ftp.gnu.org/pub/gnu/bash/bash-4.3-patches/bash43-020#167839c5f147347f4a03d88ab97ff787 - http://ftp.gnu.org/pub/gnu/bash/bash-4.3-patches/bash43-021#1d350671c48dec30b34d8b81f09cd79d - http://ftp.gnu.org/pub/gnu/bash/bash-4.3-patches/bash43-022#11c349af66a55481a3215ef2520bec36 - http://ftp.gnu.org/pub/gnu/bash/bash-4.3-patches/bash43-023#b3cb0d80fd0c47728264405cbb3b23c7 - http://ftp.gnu.org/pub/gnu/bash/bash-4.3-patches/bash43-024#b5ea5600942acceb4b6f07313d2de74e - http://ftp.gnu.org/pub/gnu/bash/bash-4.3-patches/bash43-025#193c06f578d38ffdbaebae9c51a7551f - http://ftp.gnu.org/pub/gnu/bash/bash-4.3-patches/bash43-026#922578e2be7ed03729454e92ee8d3f3a - http://ftp.gnu.org/pub/gnu/bash/bash-4.3-patches/bash43-027#8ff6948b16f2db5c29b1b9ae1085bbe7 - http://ftp.gnu.org/pub/gnu/bash/bash-4.3-patches/bash43-028#dd51fa67913b5dca45a702b672b3323f - http://ftp.gnu.org/pub/gnu/bash/bash-4.3-patches/bash43-029#0729364c977ef4271e9f8dfafadacf67 - http://ftp.gnu.org/pub/gnu/bash/bash-4.3-patches/bash43-030#efb709fdb1368945513de23ccbfae053 - http://ftp.gnu.org/pub/gnu/bash/bash-4.3-patches/bash43-031#236df1ac1130a033ed0dbe2d2115f28f - http://ftp.gnu.org/pub/gnu/bash/bash-4.3-patches/bash43-032#2360f7e79cfb28526f80021025ea5909 - http://ftp.gnu.org/pub/gnu/bash/bash-4.3-patches/bash43-033#b551c4ee7b8713759e4143499d0bbd48 - http://ftp.gnu.org/pub/gnu/bash/bash-4.3-patches/bash43-034#c9a56fbe0348e05a886dff97f2872b74 - http://ftp.gnu.org/pub/gnu/bash/bash-4.3-patches/bash43-035#e564e8ab44ed1ca3a4e315a9f6cabdc9 - http://ftp.gnu.org/pub/gnu/bash/bash-4.3-patches/bash43-036#b00ff66c41a7c0f06e191200981980b0 - http://ftp.gnu.org/pub/gnu/bash/bash-4.3-patches/bash43-037#be2a7b05f6ae560313f3c9d5f7127bda - http://ftp.gnu.org/pub/gnu/bash/bash-4.3-patches/bash43-038#61e0522830b24fbe8c0d1b010f132470 - http://ftp.gnu.org/pub/gnu/bash/bash-4.3-patches/bash43-039#a4775487abe958536751c8ce53cdf6f9 - http://ftp.gnu.org/pub/gnu/bash/bash-4.3-patches/bash43-040#80d3587c58854e226055ef099ffeb535 - http://ftp.gnu.org/pub/gnu/bash/bash-4.3-patches/bash43-041#20bf63eef7cb441c0b1cc49ef3191d03 - http://ftp.gnu.org/pub/gnu/bash/bash-4.3-patches/bash43-042#70790646ae61e207c995e44931390e50 - http://ftp.gnu.org/pub/gnu/bash/bash-4.3-patches/bash43-043#855a46955cb251534e80b4732b748e37 - http://ftp.gnu.org/pub/gnu/bash/bash-4.3-patches/bash43-044#29623d3282fcbb37e1158136509b5bb8 - http://ftp.gnu.org/pub/gnu/bash/bash-4.3-patches/bash43-045#4473244ca5abfd4b018ea26dc73e7412 - http://ftp.gnu.org/pub/gnu/bash/bash-4.3-patches/bash43-046#7e5fb09991c077076b86e0e057798913 - http://ftp.gnu.org/pub/gnu/bash/bash-4.3-patches/bash43-047#8483153bad1a6f52cadc3bd9a8df7835 - http://ftp.gnu.org/pub/gnu/bash/bash-4.3-patches/bash43-048#e9f5dc12a32b2e0d3961344e794f92b3 + ${:patch-prefix}-001#57641ddbf92fca25df92a443e36f285a + ${:patch-prefix}-002#aed44842ed1a05fcfc3ef146991fdaef + ${:patch-prefix}-003#bf96455600a86420d69f5166575192dd + ${:patch-prefix}-004#d2c524dba0eea5dc5f00849cc84376a0 + ${:patch-prefix}-005#5081278e6c35154e28d09f582251c529 + ${:patch-prefix}-006#f4a8bcda4b7bd2c72b29c107027608a3 + ${:patch-prefix}-007#bf7816d63ee0476054bf18a488d8bb1b + ${:patch-prefix}-008#7e5a30d864f834953b22a55c01c8690b + ${:patch-prefix}-009#8e35f11cbfcefe2c07c64d00601fd713 + ${:patch-prefix}-010#d78ad19986c0355a8d67c9a0e82ad4aa + ${:patch-prefix}-011#2416386b5ee94e499ccbf71f6fd4aebd + ${:patch-prefix}-012#879b2d8a03162faebb7234c4cd57c5cd configure-options = --with-curses environment = CPPFLAGS=-I${ncurses:location}/include LDFLAGS=-L${ncurses:location}/lib -Wl,-rpath=${ncurses:location}/lib - PATH=${patch:location}/bin:%(PATH)s + PATH=${gettext:location}/bin:%(PATH)s diff --git a/component/bazel/buildout.cfg b/component/bazel/buildout.cfg index de66224ce07c36a62492f1713a3a531c033548f3..b2084698d0024923b9972b05014c27eb0f1fb2c6 100644 --- a/component/bazel/buildout.cfg +++ b/component/bazel/buildout.cfg @@ -9,16 +9,14 @@ parts = do-not-strip-path = ${buildout:parts-directory}/bazel/bin/bazel [zulu] -recipe = hexagonit.recipe.download -ignore-existing = true +recipe = slapos.recipe.build:download-unpacked +shared = true url = http://cdn.azul.com/zulu/bin/zulu8.20.0.5-jdk8.0.121-linux_x64.tar.gz md5sum = e5f4b1d997e50ffe4998c68c8ec45403 -strip-top-level-dir = true [template-bazel-crosstool] recipe = slapos.recipe.template:jinja2 location = ${buildout:parts-directory}/${:_buildout_section_name_} -mode = 640 filename = bazel_tools_cpp_CROSSTOOL template = ${:_profile_base_location_}/${:filename}.in rendered = ${:location}/${:filename} @@ -29,7 +27,6 @@ context = [template-bazel-src-main-cpp-build] recipe = slapos.recipe.template:jinja2 location = ${buildout:parts-directory}/${:_buildout_section_name_} -mode = 640 filename = bazel_src_main_cpp_BUILD template = ${:_profile_base_location_}/${:filename}.in rendered = ${:location}/${:filename} diff --git a/component/boa/buildout.cfg b/component/boa/buildout.cfg deleted file mode 100644 index 79378523c7164dbbe2818396bf6720a009cd5bed..0000000000000000000000000000000000000000 --- a/component/boa/buildout.cfg +++ /dev/null @@ -1,39 +0,0 @@ -[buildout] -extends = - ../patch/buildout.cfg - -parts = boa - -[boa-patch-ENOSYS] -recipe = hexagonit.recipe.download -ignore-existing = true -url = http://sources.gentoo.org/cgi-bin/viewvc.cgi/gentoo-x86/www-servers/boa/files/boa-0.94.14_rc21-ENOSYS.patch?revision=1.1 -filename = boa-0.94.14_rc21-ENOSYS.patch -md5sum = 7206b342195961501ed1eae38486e5db -download-only = true - - -[boa] -recipe = slapos.recipe.build -url = http://www.boa.org/boa-0.94.14rc21.tar.gz -md5sum = e24b570bd767a124fcfb40a34d148ba9 -patches = - ${boa-patch-ENOSYS:location}/${boa-patch-ENOSYS:filename} -install = - import shutil - import os - url = self.download(self.options['url'], self.options['md5sum']) - extract_dir = self.extract(url) - workdir = guessworkdir(extract_dir) - self.applyPatchList(self.options.get('patches'), '-p1', cwd=workdir) - call(['./configure'], cwd=workdir) - call(['make'], cwd=workdir) - # Installation of boa. Manually, no make install - bindir = os.path.join(location, 'bin') - os.makedirs(bindir) - for name in 'boa', 'boa_indexer': - path = os.path.join(bindir, name) - shutil.copyfile(os.path.join(workdir, 'src', name), path) - os.chmod(path, 0755) -environment = - PATH=${patch:location}/bin:%(PATH)s diff --git a/component/chromedriver/buildout.cfg b/component/chromedriver/buildout.cfg index 5df9b8342edcb1a66db35a24f46b73f4eb717a14..ded44b7a710769bdc8cdccc52f80b6bb5f1044fa 100644 --- a/component/chromedriver/buildout.cfg +++ b/component/chromedriver/buildout.cfg @@ -71,7 +71,7 @@ version = md5sum-x86_64 = recipe = slapos.recipe.build -x86-64 = https://chromedriver.storage.googleapis.com/${:version}/chromedriver_linux64.zip ${:md5sum-x86_64} +x86_64-linux-gnu = https://chromedriver.storage.googleapis.com/${:version}/chromedriver_linux64.zip ${:md5sum-x86_64} library = ${nss:location}/lib ${nspr:location}/lib @@ -79,6 +79,6 @@ library = path = install = import os, shutil - extract_dir = self.extract(self.download(*options[guessPlatform()].split())) + extract_dir = self.extract(self.download(*options[multiarch()].split())) os.mkdir(location) shutil.copy(os.path.join(extract_dir, 'chromedriver'), location) diff --git a/component/chromium/buildout.cfg b/component/chromium/buildout.cfg index aa709b81845c424fa209e6264acabbec6c19fb8d..95cc3902be6d2b1ac599660f0efa89b83ed5d15e 100644 --- a/component/chromium/buildout.cfg +++ b/component/chromium/buildout.cfg @@ -116,7 +116,7 @@ md5sum-x86_64 = recipe = slapos.recipe.build -x86-64 = https://www.googleapis.com/download/storage/v1/b/chromium-browser-snapshots/o/Linux_x64%2F${:revision_x86-64}%2Fchrome-linux.zip?generation=${:generation-x86_64}&alt=media ${:md5sum-x86_64} +x86_64-linux-gnu = https://www.googleapis.com/download/storage/v1/b/chromium-browser-snapshots/o/Linux_x64%2F${:revision_x86-64}%2Fchrome-linux.zip?generation=${:generation-x86_64}&alt=media ${:md5sum-x86_64} library = @@ -167,7 +167,7 @@ library = path = ${fontconfig:location}/bin install = - url, md5sum = self.options[guessPlatform()].split() + url, md5sum = self.options[multiarch()].split() extract_dir = self.extract(self.download(url, md5sum)) self.copyTree(guessworkdir(extract_dir), location) # XXX adjust some permissions diff --git a/component/cmake/buildout.cfg b/component/cmake/buildout.cfg index e7c245f6fe9ae91a86e7f23c6421481596296fbe..3c7dc0fa8c9817da171aabf8aed1179c4ab8957c 100644 --- a/component/cmake/buildout.cfg +++ b/component/cmake/buildout.cfg @@ -14,8 +14,3 @@ md5sum = c0feb5855604f68b09bdb3acb623619e environment = CMAKE_INCLUDE_PATH=${ncurses:location}/include:${openssl:location}/include CMAKE_LIBRARY_PATH=${ncurses:location}/lib:${openssl:location}/lib - -[cmake-3.18] -<= cmake -url = https://cmake.org/files/v3.18/cmake-3.18.4.tar.gz -md5sum = 0380beaee1c39a22455db02651abe7be diff --git a/component/consul/buildout.cfg b/component/consul/buildout.cfg index 164c4a74d5ce0decdb39c2af8ed2f94d2b97f611..29704b9552972a702e4b0fff8b626e272dc0eaa5 100644 --- a/component/consul/buildout.cfg +++ b/component/consul/buildout.cfg @@ -1,24 +1,10 @@ [buildout] - - parts = consul [consul] -recipe = slapos.recipe.build - -# here, two %s are used, first one is for directory name (eg. x86_64), and second one is for filename (eg. x86-64). -url_x86-64 = https://releases.hashicorp.com/consul/0.8.3/consul_0.8.3_linux_amd64.zip -url_x86 = https://releases.hashicorp.com/consul/0.8.3/consul_0.8.3_linux_386.zip - -# supported architectures md5sums -md5sum_x86 = dfdc0eedd79baab7e6bc56c1582fd02e -md5sum_x86-64 = d6bc0898ea37ae2198370a9e1978d1bb - -install = - import shutil - platform = guessPlatform() - url = options['url_' + platform] - md5sum = options['md5sum_' + platform] - extract_dir = self.extract(self.download(url, md5sum)) - shutil.move(extract_dir, location) +recipe = slapos.recipe.build:download-unpacked +version = 0.8.3 +base = https://releases.hashicorp.com/consul/${:version}/consul_${:version} +i386-linux-gnu = ${:base}_linux_386.zip dfdc0eedd79baab7e6bc56c1582fd02e +x86_64-linux-gnu = ${:base}_linux_amd64.zip d6bc0898ea37ae2198370a9e1978d1bb diff --git a/component/curl/buildout.cfg b/component/curl/buildout.cfg index bac0870dd97a75f20ef49a69df74ff276eb319f3..375a4f9bc0813afec4e0c22b9df57d52368a952b 100644 --- a/component/curl/buildout.cfg +++ b/component/curl/buildout.cfg @@ -10,6 +10,7 @@ extends = ../zstd/buildout.cfg ../zlib/buildout.cfg ../nghttp2/buildout.cfg + ../ca-certificates/buildout.cfg parts = curl @@ -38,6 +39,7 @@ configure-options = --disable-alt-svc --with-zlib=${zlib:location} --with-ssl=${openssl:location} + --with-ca-path=${openssl:location}/etc/ssl/certs --without-gnutls --without-polarssl --without-mbedtls diff --git a/component/firefox/buildout.cfg b/component/firefox/buildout.cfg index 5bcca57c39584e1520864e9c0f621007082f5415..8cde312f10277dbff92cd48416807fd350aab651 100644 --- a/component/firefox/buildout.cfg +++ b/component/firefox/buildout.cfg @@ -136,8 +136,8 @@ recipe = slapos.recipe.build slapos_promise = file:firefox -x86 = https://download-installer.cdn.mozilla.net/pub/firefox/releases/${:version}/linux-i686/en-US/firefox-${:version}.tar.bz2 ${:i686-md5sum} -x86-64 = https://download-installer.cdn.mozilla.net/pub/firefox/releases/${:version}/linux-x86_64/en-US/firefox-${:version}.tar.bz2 ${:x86_64-md5sum} +i386-linux-gnu = https://download-installer.cdn.mozilla.net/pub/firefox/releases/${:version}/linux-i686/en-US/firefox-${:version}.tar.bz2 ${:i686-md5sum} +x86_64-linux-gnu = https://download-installer.cdn.mozilla.net/pub/firefox/releases/${:version}/linux-x86_64/en-US/firefox-${:version}.tar.bz2 ${:x86_64-md5sum} library = ${alsa:location}/lib @@ -185,7 +185,7 @@ path = ${fontconfig:location}/bin install = - url, md5sum = options[guessPlatform()].split() + url, md5sum = options[multiarch()].split() extract_dir = self.extract(self.download(url, md5sum)) self.copyTree(guessworkdir(extract_dir), location) ${:post-install} @@ -243,11 +243,11 @@ x86_64-md5sum = 4a185d3179862a35104603b9274452e7 recipe = slapos.recipe.build location = ${buildout:bin-directory}/${:_buildout_section_name_} -x86 = https://github.com/mozilla/geckodriver/releases/download/v${:version}/geckodriver-v${:version}-linux32.tar.gz ${:i686-md5sum} -x86-64 = https://github.com/mozilla/geckodriver/releases/download/v${:version}/geckodriver-v${:version}-linux64.tar.gz ${:x86_64-md5sum} +i386-linux-gnu = https://github.com/mozilla/geckodriver/releases/download/v${:version}/geckodriver-v${:version}-linux32.tar.gz ${:i686-md5sum} +x86_64-linux-gnu = https://github.com/mozilla/geckodriver/releases/download/v${:version}/geckodriver-v${:version}-linux64.tar.gz ${:x86_64-md5sum} install = import shutil - url, md5sum = options[guessPlatform()].split() + url, md5sum = options[multiarch()].split() extract_dir = self.extract(self.download(url, md5sum)) shutil.copy(extract_dir + '/geckodriver', location) diff --git a/component/firewalld/buildout.cfg b/component/firewalld/buildout.cfg index 4831c821af9cd84b19cb8581c820f98ed08f5112..f920bd6c577cc6e2207a9fedc918ca9d35e800fd 100644 --- a/component/firewalld/buildout.cfg +++ b/component/firewalld/buildout.cfg @@ -103,10 +103,8 @@ environment = GI_LIBS=-L${gobject-introspection:location}/lib -lgirepository-1.0 [trusted-config] -recipe = hexagonit.recipe.download -ignore-existing = true -url = https://raw.githubusercontent.com/t-woerner/firewalld/v0.3.14/config/zones/trusted.xml -download-only = true +recipe = slapos.recipe.build:download +url = https://raw.githubusercontent.com/t-woerner/firewalld/v0.3.14/config/zones/${:filename} filename = trusted.xml md5sum = 893752ba2e93a1e96334dfee19f884ad diff --git a/component/fontconfig/buildout.cfg b/component/fontconfig/buildout.cfg index a5bea6966e8c2a2fbdee588c397944bb97e1d970..eda2cad622e09c766713077b6ab102d7c74b35a8 100644 --- a/component/fontconfig/buildout.cfg +++ b/component/fontconfig/buildout.cfg @@ -33,4 +33,3 @@ environment = recipe = slapos.recipe.template url = ${:_profile_base_location_}/${:filename} output = ${buildout:parts-directory}/${:_buildout_section_name_} -mode = 640 diff --git a/component/golang/buildout.cfg b/component/golang/buildout.cfg index 7ddbd0bbf375913d5d665cdcf6b6bfdc79406834..5e00747e2268a2f8647905715b754feeee01bc72 100644 --- a/component/golang/buildout.cfg +++ b/component/golang/buildout.cfg @@ -6,6 +6,7 @@ extends = ../git/buildout.cfg ../pkgconfig/buildout.cfg ../swig/buildout.cfg + ../patch/buildout.cfg ./buildout.hash.cfg parts = gowork go @@ -22,10 +23,19 @@ make-targets= cd src && unset GOBIN && ./all.bash && cp -alf .. ${:location} # some testdata files have an issue with slapos.extension.strip. post-install = ${findutils:location}/bin/find ${:location}/src -type d -name testdata -exec rm -rf {} \; || true environment = - PATH=${swig:location}/bin:%(PATH)s + PATH=${swig:location}/bin:${patch:location}/bin:%(PATH)s GOROOT_FINAL=${:location} ${:environment-extra} +# TestChown and TestSCMCredentials currently fail in a user-namespace +# https://github.com/golang/go/issues/42525 +# the patches apply to go >= 1.12 +patch-options = -p1 +patches = + ${:_profile_base_location_}/skip-chown-tests.patch#d4e3c8ef83788fb2a5d80dd75034786f + ${:_profile_base_location_}/fix-TestSCMCredentials.patch#1d8dbc97cd579e03fafd8627d48f1c59 + + [golang14] <= golang-common # https://golang.org/doc/install/source#bootstrapFromSource @@ -34,6 +44,9 @@ md5sum = dbf727a4b0e365bf88d97cbfde590016 environment-extra = make-targets= cd src && unset GOBIN && ./make.bash && cp -alf .. ${:location} +# skip-chown-tests.patch does not apply to go1.4, but we don't run go1.4 tests. +patches = + [golang1.12] <= golang-common @@ -109,7 +122,6 @@ environment-extra = recipe = slapos.recipe.template:jinja2 exe = ${buildout:bin-directory}/go rendered= ${:exe} -mode = 755 template= inline: #!/bin/sh -e . ${gowork:env.sh} diff --git a/component/golang/fix-TestSCMCredentials.patch b/component/golang/fix-TestSCMCredentials.patch new file mode 100644 index 0000000000000000000000000000000000000000..c6609ba18dfd1deeef79d06498449be879993935 --- /dev/null +++ b/component/golang/fix-TestSCMCredentials.patch @@ -0,0 +1,29 @@ +From 385ca858ac89efccffd557eccc1113281306bd88 Mon Sep 17 00:00:00 2001 +From: Kirill Smelkov <kirr@nexedi.com> +Date: Mon, 6 Dec 2021 22:50:27 +0300 +Subject: [PATCH] syscall: tests: Fix TestSCMCredentials for `unshare -Umc` + +--- + src/syscall/creds_test.go | 6 ++++-- + 1 file changed, 4 insertions(+), 2 deletions(-) + +diff --git a/src/syscall/creds_test.go b/src/syscall/creds_test.go +index c1a8b516e8..ed6e80c0c3 100644 +--- a/src/syscall/creds_test.go ++++ b/src/syscall/creds_test.go +@@ -78,8 +78,10 @@ func TestSCMCredentials(t *testing.T) { + if sys, ok := err.(*os.SyscallError); ok { + err = sys.Err + } +- if err != syscall.EPERM { +- t.Fatalf("WriteMsgUnix failed with %v, want EPERM", err) ++ // can get EINVAL instead of EPERM under `unshare -Umc` because uid0 is not mapped and maps to -1 ++ // see also https://github.com/golang/go/issues/42525 ++ if !(err == syscall.EPERM || err == syscall.EINVAL) { ++ t.Fatalf("WriteMsgUnix failed with %v, want EPERM/EINVAL", err) + } + } + +-- +2.30.2 + diff --git a/component/golang/skip-chown-tests.patch b/component/golang/skip-chown-tests.patch new file mode 100644 index 0000000000000000000000000000000000000000..722d133a4f70c60908b4f50e7c5345bd82730511 --- /dev/null +++ b/component/golang/skip-chown-tests.patch @@ -0,0 +1,40 @@ +From: regnat <rg@regnat.ovh> +Date: Wed, 3 Nov 2021 10:17:28 +0100 +Subject: [PATCH] Disable the chown tests + +See https://github.com/golang/go/issues/42525 and +https://github.com/NixOS/nix/issues/3245 +--- + os/os_unix_test.go | 3 +++ + 1 file changed, 3 insertions(+) + +diff --git a/os/os_unix_test.go b/os/os_unix_test.go +index 51693fd..0936542 100644 +--- a/src/os/os_unix_test.go ++++ b/src/os/os_unix_test.go +@@ -40,6 +40,7 @@ func checkUidGid(t *testing.T, path string, uid, gid int) { + } + + func TestChown(t *testing.T) { ++ t.Skipf("https://github.com/golang/go/issues/42525") + // Use TempDir() to make sure we're on a local file system, + // so that the group ids returned by Getgroups will be allowed + // on the file. On NFS, the Getgroups groups are +@@ -83,6 +84,7 @@ func TestChown(t *testing.T) { + } + + func TestFileChown(t *testing.T) { ++ t.Skipf("https://github.com/golang/go/issues/42525") + // Use TempDir() to make sure we're on a local file system, + // so that the group ids returned by Getgroups will be allowed + // on the file. On NFS, the Getgroups groups are +@@ -126,6 +128,7 @@ func TestFileChown(t *testing.T) { + } + + func TestLchown(t *testing.T) { ++ t.Skipf("https://github.com/golang/go/issues/42525") + // Use TempDir() to make sure we're on a local file system, + // so that the group ids returned by Getgroups will be allowed + // on the file. On NFS, the Getgroups groups are +-- +2.31.1 diff --git a/component/helloweb/buildout.cfg b/component/helloweb/buildout.cfg index 7c03f06aa697551c87a86955c7f142e71d931fb2..ef5d107ab54c3bfd19cb259ae94c1ff2d72aefb8 100644 --- a/component/helloweb/buildout.cfg +++ b/component/helloweb/buildout.cfg @@ -34,11 +34,10 @@ install = [helloweb-go] # we already have gowork/bin/helloweb with helloweb Go build. # Add bin/helloweb-go that runs go version of helloweb without any environment preset needed. -recipe = collective.recipe.template -output = ${buildout:bin-directory}/${:_buildout_section_name_} -mode = 0755 -input = inline: - #!/bin/sh +recipe = slapos.recipe.template:jinja2 +rendered = ${buildout:bin-directory}/${:_buildout_section_name_} +template = + inline:#!/bin/sh -e . ${gowork:env.sh} exec helloweb "$@" @@ -71,7 +70,7 @@ scripts = helloweb=helloweb-python # rubygemsrecipe with fixed url and this way pinned rubygems version recipe = rubygemsrecipe url = https://rubygems.org/rubygems/rubygems-2.5.2.zip -ruby-location = ${ruby2.1:location} +ruby-location = ${ruby:location} ruby-executable = ${:ruby-location}/bin/ruby gems = bundler==1.11.2 @@ -93,15 +92,14 @@ path = ${helloweb:location}/ruby/ configure-command = : make-binary = -make-targets= cd ${:path} && ${bundler:bundle} install +make-targets= ${bundler:bundle} install [helloweb-ruby] # NOTE slapos.cookbook:wrapper also works, but currently _only_ in instance -recipe = collective.recipe.template -output = ${buildout:bin-directory}/${:_buildout_section_name_} -mode = 0755 -input = inline: - #!/bin/sh +recipe = slapos.recipe.template:jinja2 +rendered = ${buildout:bin-directory}/${:_buildout_section_name_} +template = + inline:#!/bin/sh -e export BUNDLE_GEMFILE=${helloweb-ruby-bundle:path}/Gemfile exec ${bundler:bundle} exec sh -c 'helloweb.rb "$@"' ${:_buildout_section_name_} "$@" diff --git a/component/java-jdk/buildout.cfg b/component/java-jdk/buildout.cfg index bd925acfb8517d5327be554280dd71a47eea3fa8..6ee411286fc4560e469b2658cab1376a55f1fd87 100644 --- a/component/java-jdk/buildout.cfg +++ b/component/java-jdk/buildout.cfg @@ -26,8 +26,7 @@ configure-command = : make-binary = : post-install = mv * %(location)s - for file in %(location)s/bin/* %(location)s/lib/*.so ; do - echo appending rpath to $file - ${patchelf:location}/bin/patchelf --set-rpath %(rpath)s $file - done + cd %(location)s + set -x + ${patchelf:location}/bin/patchelf --set-rpath %(rpath)s bin/* lib/*.so rpath = ${zlib:location}/lib:${alsa:location}/lib:${freetype:location}/lib:${libpng:location}/lib:${libXrender:location}/lib:${libXtst:location}/lib:${libX11:location}/lib:${libXau:location}/lib:${libXext:location}/lib:${libXdmcp:location}/lib:${libXi:location}/lib:${libxcb:location}/lib:@@LOCATION@@/lib:@@LOCATION@@/lib/server diff --git a/component/json-c/0001-Remove-unused-variable-size.patch b/component/json-c/0001-Remove-unused-variable-size.patch deleted file mode 100644 index e126ae12ed75677e0bb91d533d5783af5b777d5f..0000000000000000000000000000000000000000 --- a/component/json-c/0001-Remove-unused-variable-size.patch +++ /dev/null @@ -1,29 +0,0 @@ -From 3859e99f50abe11a8dade28efa9ea3d99dfaac11 Mon Sep 17 00:00:00 2001 -From: =?UTF-8?q?Petar=20Koreti=C4=87?= <petar.koretic@gmail.com> -Date: Fri, 11 Apr 2014 10:03:40 +0200 -Subject: [RFC 1/2] Remove unused variable 'size' - ---- - json_tokener.c | 2 -- - 1 file changed, 2 deletions(-) - -diff --git a/json_tokener.c b/json_tokener.c -index 19de8ef..9a76293 100644 ---- a/json_tokener.c -+++ b/json_tokener.c -@@ -352,12 +352,10 @@ struct json_object* json_tokener_parse_ex(struct json_tokener *tok, - - case json_tokener_state_inf: /* aka starts with 'i' */ - { -- int size; - int size_inf; - int is_negative = 0; - - printbuf_memappend_fast(tok->pb, &c, 1); -- size = json_min(tok->st_pos+1, json_null_str_len); - size_inf = json_min(tok->st_pos+1, json_inf_str_len); - char *infbuf = tok->pb->buf; - if (*infbuf == '-') --- -2.5.0 - diff --git a/component/json-c/0002-Fix-uninitialised-variable-compile-warning-and-also-.patch b/component/json-c/0002-Fix-uninitialised-variable-compile-warning-and-also-.patch deleted file mode 100644 index 9b58129afcf364cadbce5ffe2a52d5c4047b8cc6..0000000000000000000000000000000000000000 --- a/component/json-c/0002-Fix-uninitialised-variable-compile-warning-and-also-.patch +++ /dev/null @@ -1,28 +0,0 @@ -From 89ae583a8624fad6be4a7d1da084b0c410e4fc63 Mon Sep 17 00:00:00 2001 -From: Stuart Walsh <stu@ipng.org.uk> -Date: Tue, 31 Mar 2015 12:23:03 +0100 -Subject: [RFC 2/2] Fix uninitialised variable compile warning, and also fix - unused-when-used warning - ---- - json_object.h | 4 ++-- - 1 file changed, 2 insertions(+), 2 deletions(-) - -diff --git a/json_object.h b/json_object.h -index 200ac40..e6c6a4f 100644 ---- a/json_object.h -+++ b/json_object.h -@@ -339,8 +339,8 @@ extern void json_object_object_del(struct json_object* obj, const char *key); - #if defined(__GNUC__) && !defined(__STRICT_ANSI__) && __STDC_VERSION__ >= 199901L - - # define json_object_object_foreach(obj,key,val) \ -- char *key; \ -- struct json_object *val __attribute__((__unused__)); \ -+ char *key = NULL; \ -+ struct json_object *val = NULL; \ - for(struct lh_entry *entry ## key = json_object_get_object(obj)->head, *entry_next ## key = NULL; \ - ({ if(entry ## key) { \ - key = (char*)entry ## key->k; \ --- -2.5.0 - diff --git a/component/json-c/buildout.cfg b/component/json-c/buildout.cfg deleted file mode 100644 index 531cfab861091b6c7c05827da22f954b650d7a06..0000000000000000000000000000000000000000 --- a/component/json-c/buildout.cfg +++ /dev/null @@ -1,33 +0,0 @@ -[buildout] -extends = - ../patch/buildout.cfg - -parts = - json-c - -[json-c-patch-base] -recipe = hexagonit.recipe.download -ignore-existing = true -url = ${:_profile_base_location_}/${:filename} -download-only = true - -[json-c-patch-3859e99f50abe11a8dade28efa9ea3d99dfaac11] -< = json-c-patch-base -filename = 0001-Remove-unused-variable-size.patch -md5sum = e8ebc602fbab128d22f1216cb15c4626 - -[json-c-patch-89ae583a8624fad6be4a7d1da084b0c410e4fc63] -< = json-c-patch-base -filename = 0002-Fix-uninitialised-variable-compile-warning-and-also-.patch -md5sum = 5525ab9ee78157ce6d6100e374ac7767 - -[json-c] -recipe = slapos.recipe.cmmi -url = https://s3.amazonaws.com/json-c_releases/releases/json-c-0.12.tar.gz -md5sum = 3ca4bbb881dfc4017e8021b5e0a8c491 -patch-options = -p1 -patches = - ${json-c-patch-3859e99f50abe11a8dade28efa9ea3d99dfaac11:location}/${json-c-patch-3859e99f50abe11a8dade28efa9ea3d99dfaac11:filename} - ${json-c-patch-89ae583a8624fad6be4a7d1da084b0c410e4fc63:location}/${json-c-patch-89ae583a8624fad6be4a7d1da084b0c410e4fc63:filename} -environment = - PATH=${patch:location}/bin:%(PATH)s diff --git a/component/jupyter-py2/buildout.cfg b/component/jupyter-py2/buildout.cfg index 28218e5bd57d2a9b27d6f26d4d340457018cd235..b0bf1d61b59fa3fb8ddc27bc502811394dd5bea8 100644 --- a/component/jupyter-py2/buildout.cfg +++ b/component/jupyter-py2/buildout.cfg @@ -22,8 +22,6 @@ python_executable = ${buildout:bin-directory}/${:interpreter} [download-file-base] recipe = slapos.recipe.build:download url = ${:_profile_base_location_}/${:filename} -download-only = true -mode = 0644 [jupyter-notebook-config] <= download-file-base @@ -44,7 +42,6 @@ mode = 0644 recipe = slapos.recipe.template:jinja2 template = ${:_profile_base_location_}/${:filename} rendered = ${buildout:directory}/template.cfg -mode = 0644 context = key bin_directory buildout:bin-directory key develop_eggs_directory buildout:develop-eggs-directory diff --git a/component/jupyter-py2/buildout.hash.cfg b/component/jupyter-py2/buildout.hash.cfg index 84c27c5793377143fa1a33642b888821e0ac6021..4bb63215c46cf1ec8b6b4d1f779a0b1b1cf1dfae 100644 --- a/component/jupyter-py2/buildout.hash.cfg +++ b/component/jupyter-py2/buildout.hash.cfg @@ -15,7 +15,7 @@ [instance-jupyter-notebook] filename = instance.cfg.in -md5sum = 1d5fe6cc4e48672ae7be1c223794a932 +md5sum = 357f28614a13cdbf00a29a83cbfd2642 [jupyter-notebook-config] filename = jupyter_notebook_config.py.jinja diff --git a/component/jupyter-py2/instance.cfg.in b/component/jupyter-py2/instance.cfg.in index 7043599e62ae0b309aabf3733e99127dde8035e3..7a7853dae5c2cbc4ccaee9c2c884d77f9b4b94a0 100644 --- a/component/jupyter-py2/instance.cfg.in +++ b/component/jupyter-py2/instance.cfg.in @@ -37,10 +37,6 @@ key_file = ${generate-certificate:key_file} logfile = ${directory:log}/jupyter_notebook.log notebook_dir = ${directory:notebook_dir} -[dynamic-jinja2-template-base] -recipe = slapos.recipe.template:jinja2 -mode = 0644 - [generate-certificate] ; TODO: there is a slapos recipe to generate certificates. Use it instead recipe = plone.recipe.command @@ -76,10 +72,9 @@ environment = LANG=C.UTF-8 [jupyter-notebook-config] -<= dynamic-jinja2-template-base +recipe = slapos.recipe.template:jinja2 template = {{ jupyter_config_location }}/{{ jupyter_config_filename }} rendered = ${directory:jupyter_config_dir}/jupyter_notebook_config.py -mode = 0744 context = raw config_cfg ${buildout:directory}/knowledge0.cfg @@ -134,7 +129,7 @@ link-binary = {{ erp5_kernel_location }}/{{ erp5_kernel_filename }} target-directory = ${directory:erp5_kernel_dir} [kernel-json] -<= dynamic-jinja2-template-base +recipe = slapos.recipe.template:jinja2 template = {{ kernel_json_location }}/{{ kernel_json_filename }} rendered = ${directory:erp5_kernel_dir}/kernel.json # Use python2.7 executable bin file for kernel config diff --git a/component/libiconv/buildout.cfg b/component/libiconv/buildout.cfg index 52fe783a55bd09a87dd8904a2919d8b9be1c6345..4ab0044746d9187fdd818bdfe8f5b98d63b058d6 100644 --- a/component/libiconv/buildout.cfg +++ b/component/libiconv/buildout.cfg @@ -1,21 +1,15 @@ [buildout] +extends = + ../patch/buildout.cfg parts = libiconv -[libiconv.gets.patch] -recipe = hexagonit.recipe.download -ignore-existing = true -url = ${:_profile_base_location_}/${:filename} -md5sum = 8a20d8afe0617fce56f77537d2b84621 -download-only = true -filename = libiconv.gets.patch - [libiconv] -virtual-depends = - ${libiconv.gets.patch:md5sum} +patch-binary = ${patch:location}/bin/patch patch-options = -p1 patches = - ${libiconv.gets.patch:location}/${libiconv.gets.patch:filename} + ${:_profile_base_location_}/libiconv.gets.patch#8a20d8afe0617fce56f77537d2b84621 recipe = slapos.recipe.cmmi +shared = true url = http://ftp.gnu.org/pub/gnu/libiconv/libiconv-1.14.tar.gz md5sum = e34509b1623cec449dfeb73d7ce9c6c6 diff --git a/component/libmicrohttpd/buildout.cfg b/component/libmicrohttpd/buildout.cfg index 1227f3810f2aa9403629c5749fdb3772b35ee6c5..cc039d42c643a3d77badc90d6ed553953040b744 100644 --- a/component/libmicrohttpd/buildout.cfg +++ b/component/libmicrohttpd/buildout.cfg @@ -10,6 +10,7 @@ parts = [libmicrohttpd] recipe = slapos.recipe.cmmi +shared = true url = https://ftp.gnu.org/gnu/libmicrohttpd/libmicrohttpd-0.9.70.tar.gz md5sum = dcd6045ecb4ea18c120afedccbd1da74 configure-options = diff --git a/component/libreoffice-bin/buildout.cfg b/component/libreoffice-bin/buildout.cfg index f947b2d3a274d19f335f1311e49c644fce6456f3..e7dc9efea156001f5749af085c58d82b093a7f53 100644 --- a/component/libreoffice-bin/buildout.cfg +++ b/component/libreoffice-bin/buildout.cfg @@ -20,10 +20,12 @@ officedir = libreoffice5.2 install = import os import sys - ARCH_DIR_MAP = { 'x86': 'x86', 'x86-64': 'x86_64' } - platform = guessPlatform() - url = options['url'] % (ARCH_DIR_MAP[platform], platform) - md5sum = options['md5sum_' + platform] + arch = { + 'i386-linux-gnu': ('x86', 'x86'), + 'x86_64-linux-gnu': ('x86_64', 'x86-64'), + }[multiarch()] + url = options['url'] % arch + md5sum = options['md5sum_' + arch[1]] extract_dir = self.extract(self.download(url, md5sum)) workdir = guessworkdir(extract_dir) storagedir = os.path.join(workdir, 'storage') diff --git a/component/m4/buildout.cfg b/component/m4/buildout.cfg index 9146ac1b75faa77aefb8a0037d0342653490d0d7..4a9ed127bd52d7d729155b9b2f524bc561f3b340 100644 --- a/component/m4/buildout.cfg +++ b/component/m4/buildout.cfg @@ -1,18 +1,13 @@ [buildout] extends = ../xz-utils/buildout.cfg - ../patch/buildout.cfg parts = m4 [m4] recipe = slapos.recipe.cmmi shared = true -url = http://ftp.gnu.org/gnu/m4/m4-1.4.18.tar.xz -md5sum = 730bb15d96fffe47e148d1e09235af82 +url = http://ftp.gnu.org/gnu/m4/m4-1.4.19.tar.xz +md5sum = 0d90823e1426f1da2fd872df0311298d environment = PATH=${xz-utils:location}/bin:%(PATH)s -patch-binary = ${patch:location}/bin/patch -patch-options = -p1 -patches = - https://sources.debian.org/data/main/m/m4/1.4.18-5/debian/patches/01-fix-ftbfs-with-glibc-2.28.patch#058a786425e507f911649205b61ffcac diff --git a/component/matplotlib/buildout.cfg b/component/matplotlib/buildout.cfg index 838d3be927762a117800d9c06285f3d4d64f6980..82920bd77214133a3781f9be1cbe8c678bbdf808 100644 --- a/component/matplotlib/buildout.cfg +++ b/component/matplotlib/buildout.cfg @@ -14,7 +14,6 @@ recipe = slapos.recipe.template:jinja2 location = ${buildout:parts-directory}/${:_buildout_section_name_} template = ${:_profile_base_location_}/${:filename} rendered = ${matplotlibrc:location}/matplotlibrc -mode = 0644 backend = agg context = key backend matplotlibrc:backend diff --git a/component/memcached/buildout.cfg b/component/memcached/buildout.cfg deleted file mode 100644 index 00abe2adcd5e3380b1755eadf84f47e471c75298..0000000000000000000000000000000000000000 --- a/component/memcached/buildout.cfg +++ /dev/null @@ -1,43 +0,0 @@ -[buildout] -parts = memcached -extends = - ../autoconf/buildout.cfg - ../automake/buildout.cfg - ../libevent/buildout.cfg - -[memcached-fix-array-subscript-is-above-array-bounds] -recipe = hexagonit.recipe.download -ignore-existing = true -url = ${:_profile_base_location_}/${:filename} -filename = memcached-1.4-fix-array-subscript-is-above-array-bounds.patch -download-only = true -md5sum = 472508b9a4b6c0b9f5d6f2abce3444e3 - -[memcached-gcc-4.4.patch] -recipe = hexagonit.recipe.download -ignore-existing = true -url = ${:_profile_base_location_}/${:filename} -filename = memcached-gcc-4.4.patch -download-only = true -md5sum = fd98d0cbfc4d3a25ac9808472fbe62f8 - -[memcached] -recipe = slapos.recipe.cmmi -url = http://memcached.googlecode.com/files/memcached-1.4.8.tar.gz -md5sum = b7104e269511621c2777367d6d6315fe -patches = - ${memcached-fix-array-subscript-is-above-array-bounds:location}/${memcached-fix-array-subscript-is-above-array-bounds:filename} ${memcached-gcc-4.4.patch:location}/${memcached-gcc-4.4.patch:filename} -patch-options = -p1 -configure-command = - aclocal - autoheader - automake --foreign - autoconf - ./configure -configure-options = - --prefix=${buildout:parts-directory}/${:_buildout_section_name_} - --with-libevent=${libevent:location} - --disable-docs -environment = - PATH=${autoconf:location}/bin:${automake:location}/bin:%(PATH)s - LDFLAGS =-Wl,-rpath=${libevent:location}/lib diff --git a/component/memcached/memcached-1.4-fix-array-subscript-is-above-array-bounds.patch b/component/memcached/memcached-1.4-fix-array-subscript-is-above-array-bounds.patch deleted file mode 100644 index 8b8af2679ae4ff4c1b44129f5f4e487b084bb488..0000000000000000000000000000000000000000 --- a/component/memcached/memcached-1.4-fix-array-subscript-is-above-array-bounds.patch +++ /dev/null @@ -1,25 +0,0 @@ -Took originally from OpenSuse spec. Needed on opensuse to avoid "error: array subscript is above array bounds" ---- memcached-orig/memcached.c -+++ memcached-new/memcached.c 2010/05/06 11:40:56 -@@ -2335,15 +2335,18 @@ - inline static void process_stats_detail(conn *c, const char *command) { - assert(c != NULL); - -- if (strcmp(command, "on") == 0) { -+ char on[] = "on"; -+ char off[] = "off"; -+ char dump[] = "dump"; -+ if (strcmp(command, on) == 0) { - settings.detail_enabled = 1; - out_string(c, "OK"); - } -- else if (strcmp(command, "off") == 0) { -+ else if (strcmp(command, off) == 0) { - settings.detail_enabled = 0; - out_string(c, "OK"); - } -- else if (strcmp(command, "dump") == 0) { -+ else if (strcmp(command, dump) == 0) { - int len; - char *stats = stats_prefix_dump(&len); - write_and_free(c, stats, len); diff --git a/component/memcached/memcached-gcc-4.4.patch b/component/memcached/memcached-gcc-4.4.patch deleted file mode 100644 index f86324858ad92d1fdd860b27d7647252c16d99ea..0000000000000000000000000000000000000000 --- a/component/memcached/memcached-gcc-4.4.patch +++ /dev/null @@ -1,12 +0,0 @@ -# In OpenSuse 11.2, 'gcc -dumpversion' returns '4.4', not '4.4.*'. ---- memcached-1.4.8/configure.ac.orig -+++ memcached-1.4.8/configure.ac -@@ -502,7 +502,7 @@ - GCC_VERSION=`$CC -dumpversion` - CFLAGS="$CFLAGS -Wall -Werror -pedantic -Wmissing-prototypes -Wmissing-declarations -Wredundant-decls" - case $GCC_VERSION in -- 4.4.*) -+ 4.4 | 4.4.*) - CFLAGS="$CFLAGS -fno-strict-aliasing" - ;; - esac diff --git a/component/ncurses/buildout.cfg b/component/ncurses/buildout.cfg index 9a0083548a18eb8d30f104b6bb41ad25276de9d7..6d808b52614a5ef35b2fcf9c6f1d78e6f4f3fb08 100644 --- a/component/ncurses/buildout.cfg +++ b/component/ncurses/buildout.cfg @@ -1,6 +1,4 @@ [buildout] -extends = - ../patch/buildout.cfg parts = ncurses @@ -9,13 +7,10 @@ recipe = slapos.recipe.cmmi shared = true url = http://ftp.gnu.org/gnu/ncurses/ncurses-6.2.tar.gz md5sum = e812da327b1c2214ac1aed440ea3ae8d -patch-options = -p1 -patches = - ${:_profile_base_location_}/ncurses-5.9-gcc-5.patch#57f4cd0cc0c0a42a5ddb2167f9546d72 configure-options = - --prefix=@@LOCATION@@ + --prefix=%(location)s --enable-pc-files - --with-pkg-config-libdir=@@LOCATION@@/lib/pkgconfig + --with-pkg-config-libdir=%(location)s/lib/pkgconfig --with-shared --without-ada --without-manpages @@ -36,6 +31,5 @@ post-install = # pass dummy LDCONFIG to skip needless calling of ldconfig by non-root user environment = LDCONFIG=/bin/echo - PATH=${patch:location}/bin:%(PATH)s make-options = -j1 diff --git a/component/ncurses/ncurses-5.9-gcc-5.patch b/component/ncurses/ncurses-5.9-gcc-5.patch deleted file mode 100644 index 2448229b88e16311e6a51d94f6e83a2479097ec7..0000000000000000000000000000000000000000 --- a/component/ncurses/ncurses-5.9-gcc-5.patch +++ /dev/null @@ -1,46 +0,0 @@ -https://bugs.gentoo.org/545114 - -extracted from the upstream change (which had many unrelated commits in one) - -From 97bb4678dc03e753290b39bbff30ba2825df9517 Mon Sep 17 00:00:00 2001 -From: "Thomas E. Dickey" <dickey@invisible-island.net> -Date: Sun, 7 Dec 2014 03:10:09 +0000 -Subject: [PATCH] ncurses 5.9 - patch 20141206 - -+ modify MKlib_gen.sh to work around change in development version of - gcc introduced here: - https://gcc.gnu.org/ml/gcc-patches/2014-06/msg02185.html - https://gcc.gnu.org/ml/gcc-patches/2014-07/msg00236.html - (reports by Marcus Shawcroft, Maohui Lei). - -diff --git a/ncurses/base/MKlib_gen.sh b/ncurses/base/MKlib_gen.sh -index d8cc3c9..b91398c 100755 ---- a/ncurses/base/MKlib_gen.sh -+++ b/ncurses/base/MKlib_gen.sh -@@ -474,11 +474,22 @@ sed -n -f $ED1 \ - -e 's/gen_$//' \ - -e 's/ / /g' >>$TMP - -+cat >$ED1 <<EOF -+s/ / /g -+s/^ // -+s/ $// -+s/P_NCURSES_BOOL/NCURSES_BOOL/g -+EOF -+ -+# A patch discussed here: -+# https://gcc.gnu.org/ml/gcc-patches/2014-06/msg02185.html -+# introduces spurious #line markers. Work around that by ignoring the system's -+# attempt to define "bool" and using our own symbol here. -+sed -e 's/bool/P_NCURSES_BOOL/g' $TMP > $ED2 -+cat $ED2 >$TMP -+ - $preprocessor $TMP 2>/dev/null \ --| sed \ -- -e 's/ / /g' \ -- -e 's/^ //' \ -- -e 's/_Bool/NCURSES_BOOL/g' \ -+| sed -f $ED1 \ - | $AWK -f $AW2 \ - | sed -f $ED3 \ - | sed \ diff --git a/component/neon/buildout.cfg b/component/neon/buildout.cfg deleted file mode 100644 index 1203a0e5a31a1e14f9aba032302f5d68fdf3283f..0000000000000000000000000000000000000000 --- a/component/neon/buildout.cfg +++ /dev/null @@ -1,29 +0,0 @@ -[buildout] -parts = - neon -extends = - ../libxml2/buildout.cfg - ../openssl/buildout.cfg - ../pkgconfig/buildout.cfg - ../zlib/buildout.cfg - -[neon] -recipe = slapos.recipe.cmmi -shared = true -url = http://www.webdav.org/neon/neon-0.29.5.tar.gz -md5sum = ff369e69ef0f0143beb5626164e87ae2 -configure-options = - --disable-static - --enable-shared - --with-ssl=openssl - --without-expat - --without-gssapi - --with-libxml2 - --enable-threadsafe-ssl=posix - --disable-nls - -environment = - PATH=${libxml2:location}/bin:${pkgconfig:location}/bin:%(PATH)s - PKG_CONFIG_PATH=${openssl:location}/lib/pkgconfig:${libxml2:location}/lib/pkgconfig - CPPFLAGS=-I${openssl:location}/include -I${zlib:location}/include - LDFLAGS=-L${openssl:location}/lib -Wl,-rpath=${openssl:location}/lib -Wl,-rpath=${zlib:location}/lib -Wl,-rpath=${libxml2:location}/lib diff --git a/component/ninja/buildout.cfg b/component/ninja/buildout.cfg index 553abc16eef87127f3e2e8b2ef9ed67a16b2298f..5138e2fbb89b8881af6c35fb8bd7d7330fe0c73d 100644 --- a/component/ninja/buildout.cfg +++ b/component/ninja/buildout.cfg @@ -1,4 +1,6 @@ [buildout] +extends = + ../defaults.cfg parts = ninja @@ -8,8 +10,8 @@ shared = true version = 1.10.2 url = https://github.com/ninja-build/ninja/archive/refs/tags/v${:version}.tar.gz md5sum = 639f75bc2e3b19ab893eaf2c810d4eb4 -configure-command = ./configure.py -configure-options = --bootstrap +configure-command = ${python:executable} configure.py +configure-options = --bootstrap --verbose make-binary = true post-install = set %(location)s/bin diff --git a/component/nxdtest/buildout.cfg b/component/nxdtest/buildout.cfg index bd5abc492b66ecdc53aae6bf5195fff9ae5b2801..0c37dca9bd62e64313c2df1f51b404e8f091e585 100644 --- a/component/nxdtest/buildout.cfg +++ b/component/nxdtest/buildout.cfg @@ -1,25 +1,36 @@ [buildout] extends = ../pygolang/buildout.cfg + ../util-linux/buildout.cfg + ../python-prctl/buildout.cfg ../git/buildout.cfg +# nxdtest is bin/ program to run nxdtest. +# use ${nxdtest:exe} to run it. [nxdtest] -recipe = zc.recipe.egg:scripts -eggs = ${pygolang:egg} - ${nxdtest-egg:egg} -scripts = nxdtest -# convenience for nxdtest users +<= pyprog exe = ${buildout:bin-directory}/nxdtest +entry = nxdtest:main +eggs = ${nxdtest-egg:egg} +initialization = + # $PATH for unshare and mount + import os + path = os.environ.get('PATH', '') + if path != '': + path = ':' + path + os.environ['PATH'] = '${util-linux:location}/bin' + path + [nxdtest-egg] recipe = zc.recipe.egg:develop setup = ${nxdtest-repository:location} egg = nxdtest +depends = ${python-prctl:egg} [nxdtest-repository] recipe = slapos.recipe.build:gitclone repository = https://lab.nexedi.com/nexedi/nxdtest.git -revision = 9f413221 +revision = 6f75fa90 location = ${buildout:parts-directory}/nxdtest git-executable = ${git:location}/bin/git diff --git a/component/nxdtest/test.cfg b/component/nxdtest/test.cfg index 7ec1b2638e9bc29925a4e5b972c8db52e81519d6..ffc45fa10440ca14a73eb2c934784fdb4a4d66d3 100644 --- a/component/nxdtest/test.cfg +++ b/component/nxdtest/test.cfg @@ -17,7 +17,7 @@ egg = nxdtest[test] [python-interpreter] eggs += ${pytest:eggs} - ${nxdtest:eggs} + ${nxdtest-egg:egg} # env.sh for interpreter to be on $PATH. [nxdtest-env.sh] diff --git a/component/ocropus/buildout.cfg b/component/ocropus/buildout.cfg deleted file mode 100644 index 8ca464d37b1a0d376ff1939f425e9fb0b0a08418..0000000000000000000000000000000000000000 --- a/component/ocropus/buildout.cfg +++ /dev/null @@ -1,53 +0,0 @@ -[buildout] -extends = - ../patch/buildout.cfg - -parts = ocropus - -[ocropus-patch-scons] -recipe = hexagonit.recipe.download -ignore-existing = true -download-only = true -filename = ocropus.SConstruct-local-installation.patch -url = ${:_profile_base_location_}/${:filename} -md5sum = 08710ec022f3ce13e5c0b584dfee2c1c - -[ocropus] -recipe = slapos.recipe.cmmi -url = http://www.nexedi.org/static/packages/source/ocropus/ocropus-0.4.4.tar.gz -md5sum = 1485dbe9aab27574bfe3c8b4395cf3ce -configure-options = - ${iulib:location} -patches = ${ocropus-patch-scons:location}/${ocropus-patch-scons:filename} -patch-options = -p1 -environment = - PATH=${patch:destination_directory}:${scons-bin:destination_directory}:%(PATH)s - -[scons-local] -recipe = hexagonit.recipe.download -ignore-existing = true -url = http://prdownloads.sourceforge.net/scons/scons-local-2.0.1.tar.gz - -[scons-bin] -recipe = plone.recipe.command -destination_directory = ${scons-local:location} -command = - ln -sf ${scons-local:location}/scons.py ${:destination_directory}/scons - -[iulib-patch-scons] -recipe = hexagonit.recipe.download -ignore-existing = true -download-only = true -filename = iulib.SConstruct-libtiff-detection.patch -url = ${:_profile_base_location_}/${:filename} -md5sum = dbbdf909814fb84ffe47e2ff59980db7 - -[iulib] -recipe = slapos.recipe.cmmi -url = http://www.nexedi.org/static/packages/source/ocropus/iulib-0.4.4.tar.gz -md5sum = 3d9754dbd64c56029ce1cd7c2f61894c -# workaround http://code.google.com/p/iulib/issues/detail?id=27 -patches = ${iulib-patch-scons:location}/${iulib-patch-scons:filename} -patch-options = -p1 -environment = - PATH=${patch:destination_directory}:${scons-bin:destination_directory}:%(PATH)s diff --git a/component/ocropy/buildout.cfg b/component/ocropy/buildout.cfg index b3ce647ab8744787b7d4eb97ebf69b64f3119a02..42f19f3986d781a8672842f739a33ff5bf4f84b6 100644 --- a/component/ocropy/buildout.cfg +++ b/component/ocropy/buildout.cfg @@ -13,14 +13,14 @@ extends = parts = ocropy [ocropy-eng-traineddata] -recipe = hexagonit.recipe.download +recipe = slapos.recipe.build:download +shared = true filename = en-default.pyrnn.gz md5sum = cedd140c7d7650e910f0550ad0f04727 -download-only = true -url = https://raw.githubusercontent.com/zuphilip/ocropy-models/master/en-default.pyrnn.gz +url = https://raw.githubusercontent.com/zuphilip/ocropy-models/master/${:filename} [ocropy-env] -OCROPY_MODEL_PATH = ${ocropy-eng-traineddata:location}/${ocropy-eng-traineddata:filename} +OCROPY_MODEL_PATH = ${ocropy-eng-traineddata:target} HOME = ${ocropy:egg} [ocropy] diff --git a/component/oood/buildout.cfg b/component/oood/buildout.cfg deleted file mode 100644 index b82f5722a1d6c15e66adf4e561d4f2772adc327a..0000000000000000000000000000000000000000 --- a/component/oood/buildout.cfg +++ /dev/null @@ -1,12 +0,0 @@ -# OpenOffice.org daemon software buildout -[buildout] -extends = ../openoffice-bin/buildout.cfg -parts += - oood - -[oood] -recipe = hexagonit.recipe.download -ignore-existing = true -url = http://www.nexedi.org/static/tarballs/oood/oood-r36294.tar.gz -md5sum = 9e71251eea4b310fd6bd4ebf8abf890e -strip-top-level-dir = true diff --git a/component/openssh/buildout.cfg b/component/openssh/buildout.cfg index b0a38d75932c5bc61df6c01044a75db04235bfa8..58696d03cfc4f3a0d7de215cc2ad14417c0ff7ee 100644 --- a/component/openssh/buildout.cfg +++ b/component/openssh/buildout.cfg @@ -24,8 +24,8 @@ patch-options = -p1 patches = ${:_profile_base_location_}/no_create_privsep_path.patch#6ab983d16c9b4caf111c737dcad6ec9b environment = - CPPFLAGS=-I${zlib:location}/include -I${openssl-1.0:location}/include - LDFLAGS=-L${zlib:location}/lib -Wl,-rpath=${zlib:location}/lib -L${openssl-1.0:location}/lib -Wl,-rpath=${openssl-1.0:location}/lib + CPPFLAGS=-I${zlib:location}/include -I${openssl:location}/include + LDFLAGS=-L${zlib:location}/lib -Wl,-rpath=${zlib:location}/lib -L${openssl:location}/lib -Wl,-rpath=${openssl:location}/lib configure-options = --prefix=${:location} --exec-prefix=${:location} diff --git a/component/packer/buildout.cfg b/component/packer/buildout.cfg index 8a9c3e1a00861c7bed95813209b5e688a1ab2a3a..11a870203beba43e644bc834bce692e0e86ab55d 100644 --- a/component/packer/buildout.cfg +++ b/component/packer/buildout.cfg @@ -1,24 +1,10 @@ [buildout] - parts = packer [packer] -recipe = slapos.recipe.build - -# here, two %s are used, first one is for directory name (eg. x86_64), and second one is for filename (eg. x86-64). -url_x86-64 = https://releases.hashicorp.com/packer/${:version}/packer_${:version}_linux_amd64.zip -url_x86 = https://releases.hashicorp.com/packer/${:version}/packer_${:version}_linux_386.zip +recipe = slapos.recipe.build:download-unpacked version = 0.7.5 - -# supported architectures md5sums -md5sum_x86 = a545108a0ccfde7c1e74de6c4e6fdded -md5sum_x86-64 = f343d709b84db494e8d6ec38259aa4a6 - -install = - import shutil - platform = guessPlatform() - url = options['url_' + platform] - md5sum = options['md5sum_' + platform] - extract_dir = self.extract(self.download(url, md5sum)) - shutil.move(extract_dir, location) +base = https://releases.hashicorp.com/packer/${:version}/packer_${:version} +i386-linux-gnu = ${:base}_linux_386.zip a545108a0ccfde7c1e74de6c4e6fdded +x86_64-linux-gnu = ${:base}_linux_amd64.zip f343d709b84db494e8d6ec38259aa4a6 diff --git a/component/perl/buildout.cfg b/component/perl/buildout.cfg index 422894d4cfb8f42dfb45cd68f81752c3291930cb..d3bd978629c94323fa05840ad29b781715a4a2d0 100644 --- a/component/perl/buildout.cfg +++ b/component/perl/buildout.cfg @@ -124,6 +124,5 @@ template = inline: exec ${perl:location}/bin/perl \ {% for item in inc -%}{% if item %} -I "{{ item }}" \ {% endif %}{% endfor %} "$@" -mode = 0755 rendered = ${buildout:bin-directory}/${:_buildout_section_name_} perl-bin = ${:rendered} diff --git a/component/phantomjs/buildout.cfg b/component/phantomjs/buildout.cfg index 976f11b5973c54c467673fde4096674ebccf4f86..cee9679258061fd51534e3b76140d21f7ef871d0 100644 --- a/component/phantomjs/buildout.cfg +++ b/component/phantomjs/buildout.cfg @@ -10,13 +10,13 @@ recipe = slapos.recipe.build slapos_promise = file:phantomjs-slapos -x86 = https://bitbucket.org/ariya/phantomjs/downloads/phantomjs-1.9.7-linux-i686.tar.bz2 9c1426eef5b04679d65198b1bdd6ef88 -x86-64 = https://bitbucket.org/ariya/phantomjs/downloads/phantomjs-1.9.7-linux-x86_64.tar.bz2 f278996c3edd0e8d8ec4893807f27d71 +base = https://bitbucket.org/ariya/phantomjs/downloads/phantomjs-1.9.7-linux +i386-linux-gnu = ${:base}-i686.tar.bz2 9c1426eef5b04679d65198b1bdd6ef88 +x86_64-linux-gnu = ${:base}-x86_64.tar.bz2 f278996c3edd0e8d8ec4893807f27d71 install = import os - url, md5sum = options[guessPlatform()].split() - extract_dir = self.extract(self.download(url, md5sum)) + extract_dir = self.extract(self.download(*options[multiarch()].split())) workdir = guessworkdir(extract_dir) self.copyTree(workdir, location) with open(os.path.join(location, "phantomjs-slapos"), 'w') as wrapper: diff --git a/component/proftpd/buildout.cfg b/component/proftpd/buildout.cfg index 26c2ce672b37c8ec8e4813e965b8990d06c3dd2c..d8acfbd0b1e38da40c5c8c7a235e8ca1315a2938 100644 --- a/component/proftpd/buildout.cfg +++ b/component/proftpd/buildout.cfg @@ -1,8 +1,5 @@ # http://www.proftpd.org/ - Highly configurable GPL-licensed FTP server software -# -# Because it uses collective.recipe.environment this components needs slapos.core >= 1.4.7 -# ( grid: do not hide `$USER` when running buildout ) -# + [buildout] extends = ../openssl/buildout.cfg @@ -15,11 +12,12 @@ extends = ../zstd/buildout.cfg # proftpd server -[proftpd-environment] -recipe = collective.recipe.environment - -[proftpd-grp] -recipe = collective.recipe.grp +[proftpd-info] +recipe = slapos.recipe.build +init = + import grp, os, pwd + options['USER'] = pwd.getpwuid(os.getuid())[0] + options['GROUP'] = grp.getgrgid(os.getgid())[0] [proftpd] recipe = slapos.recipe.cmmi @@ -36,12 +34,16 @@ environment = CFLAGS=-DPR_RUN_DIR=\"/proc/self/cwd/var\" CPPFLAGS=-I${zlib:location}/include -I${openssl:location}/include LDFLAGS=-L${zlib:location}/lib -Wl,-rpath=${zlib:location}/lib -L${openssl:location}/lib -Wl,-rpath=${openssl:location}/lib - install_user=${proftpd-environment:USER} - install_group=${proftpd-grp:GROUP} + install_user=${proftpd-info:USER} + install_group=${proftpd-info:GROUP} patch-binary = ${patch:location}/bin/patch patch-options = -p1 patches = ${:_profile_base_location_}/0001-mod_rlimit-don-t-change-PR_SET_DUMPABLE-flag.patch#6e58a7a429ff96a51812dc9835e4c227 +post-install = + sed -i '1c\ + #!${perl:location}/bin/perl + ' %(prefix)s/bin/ftpasswd # mod_auth_web: a proftpd module to authenticate users against an HTTP service [proftpd-mod_auth_web-repository] @@ -63,16 +65,6 @@ command = ${proftpd-mod_auth_web-repository:location}/mod_auth_web.c location=${proftpd:location}/libexec/mod_auth_web.so - -# ftpasswd: a perl script to manage a proftpd AuthUserFile -[ftpasswd] -recipe = slapos.recipe.build:download -url = https://raw.githubusercontent.com/proftpd/proftpd/v1.3.6/contrib/ftpasswd -md5sum = 4a47df2cab86d8de7077a445bb416f31 -download-only = true -mode = 0755 - - [proftpd-output] # Shared binary location to ease migration recipe = plone.recipe.command @@ -82,8 +74,7 @@ command = ${coreutils-output:test} -x ${:proftpd} -a -x ${:ftpasswd} modules-deps = ${proftpd-mod_auth_web:recipe} -perl = ${perl:location}/bin/perl -ftpasswd = ${ftpasswd:target} +ftpasswd = ${proftpd:location}/bin/ftpasswd proftpd = ${proftpd:location}/sbin/proftpd prxs = ${proftpd:location}/bin/prxs ftpdctl = ${proftpd:location}/bin/ftpdctl diff --git a/component/proxysql/buildout.cfg b/component/proxysql/buildout.cfg index d7291f8db447d2783fbb85a679404637558f4533..74e66685e6e3d32ca213e70bce9fa117f93b1130 100644 --- a/component/proxysql/buildout.cfg +++ b/component/proxysql/buildout.cfg @@ -2,54 +2,49 @@ extends = ../autoconf/buildout.cfg ../automake/buildout.cfg - ../libtool/buildout.cfg - ../cmake/buildout.cfg - ../openssl/buildout.cfg - ../patch/buildout.cfg - ../git/buildout.cfg - ../openssl/buildout.cfg ../bzip2/buildout.cfg - ../perl/buildout.cfg - ../gnutls/buildout.cfg + ../cmake/buildout.cfg ../curl/buildout.cfg + ../git/buildout.cfg ../gnutls/buildout.cfg - ../libzip/buildout.cfg - ../m4/buildout.cfg - ../pcre/buildout.cfg ../jemalloc/buildout.cfg ../libmicrohttpd/buildout.cfg + ../libtool/buildout.cfg + ../m4/buildout.cfg + ../openssl/buildout.cfg + ../patch/buildout.cfg + ../pcre/buildout.cfg + ../perl/buildout.cfg +# https://github.com/libinjection/libinjection/issues/4 + ../python-2.7/buildout.cfg parts = proxysql [proxysql] recipe = slapos.recipe.cmmi -version = v2.0.12 +shared = true +version = v2.3.2 url = https://github.com/sysown/proxysql/archive/${:version}.tar.gz -md5sum = 70ec17fe73703a25730fdd44b6bc3ef5 -prefix = @@LOCATION@@ -# Patch installation path for SlapOS -pre-configure = - mkdir -p ${:prefix}/bin ${:prefix}/etc/init.d ${:prefix}/lib/systemd/system - sed -ri " - s#(\s)/usr/bin#\1${:prefix}/bin#g - s#(\s)/etc#\1${:prefix}/etc#g - s#(\s)/usr/lib#\1${:prefix}/lib# - s#(\s)/var/lib#\1${:prefix}/lib#g - s#(\s)useradd#\1echo useradd#g - s#(\s)systemctl#\1echo systemctl#g - s#(\s)chkconfig#\1echo chkconfig#g - s#(\s)update-rc.d#\1echo update-rc.d#g" Makefile - +md5sum = 969129ac43c9f64641509891a116e0e1 configure-command = true +make-options = GIT_VERSION=${:version} +# The install target does not honor DESTDIR: +# let's forget it and only install what we need. +make-targets = src/proxysql +post-install = + install -Dt %(location)s/bin %(make-targets)s environment = - GIT_VERSION=${:version} PKG_CONFIG_PATH=${openssl:location}/lib/pkgconfig:${gnutls:location}/lib/pkgconfig:${libgcrypt:location}/lib/pkgconfig:${zlib:location}/lib/pkgconfig:${pcre:location}/lib/pkgconfig - PATH=${m4:location}/bin:${libtool:location}/bin:${libgcrypt:location}/bin:${curl:location}/bin:${perl:location}/bin:${pkgconfig:location}/bin:${bzip2:location}/bin:${autoconf:location}/bin:${git:location}/bin:${automake:location}/bin:${patch:location}/bin:${cmake-3.18:location}/bin:%(PATH)s + PATH=${m4:location}/bin:${libtool:location}/bin:${libgcrypt:location}/bin:${curl:location}/bin:${perl:location}/bin:${pkgconfig:location}/bin:${bzip2:location}/bin:${autoconf:location}/bin:${git:location}/bin:${automake:location}/bin:${patch:location}/bin:${cmake:location}/bin:%(PATH)s:${python2.7:location}/bin CXXFLAGS=-I${openssl:location}/include -I${gnutls:location}/include -I${zlib:location}/include CFLAGS=-I${gnutls:location}/include LDFLAGS=-L${openssl:location}/lib -Wl,-rpath -Wl,${gnutls:location}/lib -L${gnutls:location}/lib -Wl,-rpath=${curl:location}/lib -L${libtool:location}/lib -L${zlib:location}/lib -Wl,-rpath -Wl,${zlib:location}/lib -L${curl:location}/lib -L${pcre:location}/lib -L${jemalloc:location}/lib -L${libmicrohttpd:location}/lib - CMAKE_INCLUDE_PATH=${openssl:location}/include:${gnutls:location}/include:${curl:location}/include:${pcre:location}/include:${jemalloc:location}/include:${libmicrohttpd:location}/include - CMAKE_LIBRARY_PATH=${openssl:location}/lib:${gnutls:location}/lib:${curl:location}/lib:${pcre:location}/lib:${jemalloc:location}/lib:${libmicrohttpd:location}/lib + CMAKE_INCLUDE_PATH=${openssl:location}/include:${gnutls:location}/include:${curl:location}/include:${pcre:location}/include:${jemalloc:location}/include:${libmicrohttpd:location}/include:${zlib:location}/include + CMAKE_LIBRARY_PATH=${openssl:location}/lib:${gnutls:location}/lib:${curl:location}/lib:${pcre:location}/lib:${jemalloc:location}/lib:${libmicrohttpd:location}/lib:${zlib:location}/lib LIBTOOL=libtool ACLOCAL_PATH=${pkgconfig:location}/share/aclocal:${libtool:location}/share/aclocal +patch-options = -p1 +patches = +# PR #3402 (mariadb_client: backport patch to fix syntax error in cmake 3.20) + https://github.com/sysown/proxysql/commit/a3cfa56d257219f7610cd5711045bb5d84485a91.patch diff --git a/component/pure-ftpd/buildout.cfg b/component/pure-ftpd/buildout.cfg index eb19b513baaffcfa74e2573b8f92a5338431d03b..650b36d8a739608ec40fd18ad5760ec6c2c3257d 100644 --- a/component/pure-ftpd/buildout.cfg +++ b/component/pure-ftpd/buildout.cfg @@ -3,8 +3,9 @@ parts = pure-ftpd [pure-ftpd] recipe = slapos.recipe.cmmi -url = https://download.pureftpd.org/pub/pure-ftpd/releases/pure-ftpd-1.0.49.tar.bz2 -md5sum = b7025f469711d88bd84a3518f67c1470 +url = https://github.com/jedisct1/pure-ftpd/releases/download/${:version}/pure-ftpd-${:version}.tar.bz2 +version = 1.0.50 +md5sum = f55af39fc85edceba4b02ee4fa116d00 # See https://download.pureftpd.org/pub/pure-ftpd/doc/README for more configurations # We need the trick about UPLOAD_PIPE_FILE and UPLOAD_PIPE_LOCK so that the files are created inside the $CWD/var/run diff --git a/component/pygolang/buildout.cfg b/component/pygolang/buildout.cfg index ad2e71e67a5407e37c22af7348011cef2be27ec2..c22ce7d9f10f5007a483e7f47847617226c89078 100644 --- a/component/pygolang/buildout.cfg +++ b/component/pygolang/buildout.cfg @@ -49,5 +49,64 @@ initialization = scripts = ${:interpreter} +# pyprog provides macro recipe to build python programs. +# +# Contrary to zc.recipe.egg:scripts it generates scripts that are run with +# sys.executable being correctly set. In particular it is valid to spawn +# sys.executable from inside the program and assume that all specified eggs are +# still accessible and can be imported. +# +# Usage example: +# +# [myprog] +# <= pyprog +# exe = ${buildout:bin-directory}/myprog +# entry = my.py.mod:main +# eggs = ... +[pyprog] +recipe = slapos.recipe.build +initialization = +depends = +_name = ${:_buildout_section_name_} +init = + name = options['_name'] # options['_buildout_section_name_'] does not work + exe = options['exe'] + entry = options['entry'] + eggs = options['eggs'] + pyinit = options['initialization'] + + options['depends'] += '$${.%s.pyprog:recipe}' % name + + # mod:func -> 'from mod import func; sys.exit(func())' + mod, func = entry.split(':') + entry_run = 'from %s import %s; sys.exit(%s())' % (mod, func, func) + + # indent pyinit with ' ' + __pyinit = '\n'.join([' '+_ for _ in pyinit.splitlines()]) + + self.buildout.parse(""" + # .X.pyprog is python program to start and run entry + # it uses .X.pyexe as underlying python interpreter + [.%(name)s.pyprog] + recipe = slapos.recipe.template:jinja2 + exe = %(exe)s + rendered= $${:exe} + mode = 755 + template= + inline: + #!$${.%(name)s.pyexe:exe} + import sys + %(__pyinit)s + %(entry_run)s + + # .X.pyexe is python interpreter used by .X.pyprog + [.%(name)s.pyexe] + <= python-interpreter + eggs += %(eggs)s + interpreter = $${:_buildout_section_name_} + exe = $${buildout:bin-directory}/$${:interpreter} + """ % locals()) + + [versions] -pygolang = 0.0.8 +pygolang = 0.0.9 diff --git a/component/pytest/buildout.cfg b/component/pytest/buildout.cfg index 0627e930a5fa2b964b471be0bcdcc3ddce1c35c7..dd6a700c525352d2fdde469b4e15a8d7862ebb4e 100644 --- a/component/pytest/buildout.cfg +++ b/component/pytest/buildout.cfg @@ -16,3 +16,4 @@ eggs = ${pytest:eggs} [versions] pytest = 4.6.11:whl pytest-timeout = 1.4.2 +pytest-mock = 2.0.0:whl diff --git a/component/python-prctl/buildout.cfg b/component/python-prctl/buildout.cfg new file mode 100644 index 0000000000000000000000000000000000000000..7b21d54fde5e6085ff8efb4240d2225f0ff93a4a --- /dev/null +++ b/component/python-prctl/buildout.cfg @@ -0,0 +1,22 @@ +[buildout] +extends = + ../libcap/buildout.cfg + +[python-prctl] +recipe = zc.recipe.egg:custom +egg = python-prctl +include-dirs = + ${libcap:location}/include +library-dirs = + ${libcap:location}/lib +rpath = + ${:library-dirs} + +# setup.py also calls cpp directly to verify for sys/capabilities.h +environment = python-prctl-env +[python-prctl-env] +C_INCLUDE_PATH=${libcap:location}/include + + +[versions] +python-prctl = 1.8.1 diff --git a/component/qemu-kvm/buildout.cfg b/component/qemu-kvm/buildout.cfg index a4fd37b9b85dd740abc532fe854f1a19b010dd2a..bfa112d3b5a1d335d69157b747305f3024d00e0e 100644 --- a/component/qemu-kvm/buildout.cfg +++ b/component/qemu-kvm/buildout.cfg @@ -72,11 +72,6 @@ archive = ${:version} <= debian-netinst-base arch = amd64 -[debian-amd64-wheezy-netinst.iso] -<= debian-amd64-netinst-base -version = 7.11.0 -md5sum = 096c1c18b44c269808bd815d58c53c8f - [debian-amd64-jessie-netinst.iso] <= debian-amd64-netinst-base version = 8.11.1 diff --git a/component/ruby/buildout.cfg b/component/ruby/buildout.cfg index f679407ee634e838275621f1940f631cf22e48ba..37ad222e58fa045e65ac19e947f6f732e9be9226 100644 --- a/component/ruby/buildout.cfg +++ b/component/ruby/buildout.cfg @@ -25,21 +25,10 @@ environment = PKG_CONFIG_PATH=${libyaml:location}/lib/ -[ruby2.1] -<= ruby-common -url = http://ftp.ruby-lang.org/pub/ruby/2.1/ruby-2.1.8.tar.xz -md5sum = f18ed96bd1d5890f97a17d0d17aaefdd - - -[ruby2.2] -<= ruby-common -url = http://ftp.ruby-lang.org/pub/ruby/2.2/ruby-2.2.2.tar.xz -md5sum = dbce9b9d79d90f213ba8d448b0b6ed86 - [ruby2.3] <= ruby-common url = http://ftp.ruby-lang.org/pub/ruby/2.3/ruby-2.3.8.tar.xz md5sum = 927e1857f3dd5a1bdec26892dbae2a05 [ruby] -<= ruby2.2 +<= ruby2.3 diff --git a/component/slapos/obs.cfg b/component/slapos/obs.cfg index 96c69a44289fc18440f6294f333a55e15873074d..e19574000447961f1a939e25db65e5a391c551fc 100644 --- a/component/slapos/obs.cfg +++ b/component/slapos/obs.cfg @@ -1,8 +1,20 @@ [buildout] # You need to define rootdir and destdir on an upper level +# +# rootdir is where the parts will be installed by the debian package (e.g. /opt/slapos) +# destdir is where we can put the files during compilation (e.g. /tmp/build-slapos) +# buildout MUST BE launched in $destdir/$rootdir (this is enforced by an assert) +# +# in this file, we teach many packages to use prefix = $rootdir/parts/package and we +# set DESTDIR = $destdir so in reality the files land in $destdir/$rootdir/parts/package +# which is EXACTLY what cmmi recipe uses by default (buildout run dir/parts/package) +# that's why everything is working +# +# please be careful when touching this file + #rootdir = %TARGET_DIRECTORY% -#destdir = %BUILD_ROOT_DIRECTORY% -#builddir = %BUILD_DIRECTORY% +#destdir = %BUILD_ROOT_DIRECTORY% + extends = buildout.cfg @@ -13,34 +25,24 @@ extensions = extends-cache = extends-cache download-cache = download-cache -# Uguu, upstream buildout.cfg must be patched as it works the other way -# around from a packager point of view at least, thus at the end static -# path, such as Python HOME directory, are wrong... -# -# Currently: -# ./configure --prefix=BUILD_DIRECTORY && make install -# Instead of: -# ./configure --prefix=INSTALL_DIRECTORY && make install DESTDIR=BUILD_DIRECTORY +[python] +init += + buildout = self.buildout['buildout'] + assert buildout['directory'] == buildout['destdir'] + buildout['rootdir'], ( + "Buildout MUST BE launched in destdir/rootdir (currently launched in %s but should be launched in %s)", + buildout['directory'], buildout['destdir'] + buildout['rootdir']) + [python2.7] configure-options += --prefix=${buildout:rootdir}/parts/${:_buildout_section_name_} environment += DESTDIR=${buildout:destdir} -[gettext] -# Add gettext library path to RPATH as its binaries are used to build -# glib for example -environment = - PATH=${perl:location}/bin:${lunzip:location}/bin:%(PATH)s - CPPFLAGS=-I${libxml2:location}/include -I${zlib:location}/include -I${ncurses:location}/include - LDFLAGS=-L${libxml2:location}/lib -Wl,-rpath=${libxml2:location}/lib -L${zlib:location}/lib -Wl,-rpath=${zlib:location}/lib -L${ncurses:location}/lib -Wl,-rpath=${ncurses:location}/lib -Wl,-rpath=${buildout:builddir}/parts/${:_buildout_section_name_}/lib - [bison] configure-options += --prefix=${buildout:rootdir}/parts/${:_buildout_section_name_} make-options += DESTDIR=${buildout:destdir} - environment += PERL5LIB=${perl:location}/lib/5.28.1/ @@ -62,7 +64,6 @@ environment += [dbus] -location = ${buildout:parts-directory}/${:_buildout_section_name_} configure-options += --prefix=${buildout:rootdir}/parts/${:_buildout_section_name_} make-options += @@ -74,7 +75,6 @@ post-install = mkdir -p ${buildout:destdir}/parts/${:_buildout_section_name_}/var/run/dbus [dbus-glib] -location = ${buildout:parts-directory}/${:_buildout_section_name_} environment += LD_LIBRARY_PATH=${dbus:location}/lib DBUS_CFLAGS=-I${dbus:location}/include/dbus-1.0 -I${dbus:location}/lib/dbus-1.0/include @@ -92,21 +92,13 @@ post-install = sed -i 's#${dbus:location}/lib/libdbus-1.la#/opt/slapos/parts/dbus/lib/libdbus-1.la#' ${dbus-glib:location}/lib/libdbus-glib-1.la [openssl] +shared = false prefix = ${buildout:rootdir}/parts/${:_buildout_section_name_} -location = ${buildout:parts-directory}/${:_buildout_section_name_} make-options += INSTALL_PREFIX=${buildout:destdir} DESTDIR=${buildout:destdir} - -environment = +environment += PERL5LIB=${perl:location}/lib/5.28.1/ - PERL=${perl:location}/bin/perl - -[bison-go] -<= bison -configure-options = - --prefix=${buildout:parts-directory}/${:_buildout_section_name_} -make-options = [gobject-introspection] pre-configure = @@ -115,10 +107,8 @@ configure-options += --enable-shared environment += PERL5LIB=${perl:location}/lib/5.28.1/ - -post-install = +post-install = sed -i 's#!${python:location}/bin/python${python:version}#!/opt/slapos/parts/python${python:version}/bin/python${python:version}#' ${python:location}/bin/python${python:version}-config - rm -rf ${bison-go:location} [pygobject3] pre-configure += @@ -126,18 +116,6 @@ pre-configure += post-install = sed -i 's#!${python:location}/bin/python${python:version}#!/opt/slapos/parts/python${python:version}/bin/python${python:version}#' ${python:location}/bin/python${python:version}-config -[ncurses] -configure-options = - --prefix=${buildout:parts-directory}/${:_buildout_section_name_} - --with-shared - --without-ada - --without-manpages - --without-tests - --without-normal - --without-debug - --without-gpm - --enable-rpath - [flex] environment += BISON_PKGDATADIR=${bison:location}/share/bison/ @@ -154,7 +132,6 @@ post-install = sed -i "s#'${buildout:destdir}/parts/site_perl#'/opt/slapos/parts/site_perl#" ${perl:location}/lib/5.28.1/*-linux-thread-multi/Config.pm [perl] -location = ${buildout:destdir}/parts/perl configure-command = sh Configure -des \ -Dprefix=${buildout:rootdir}/parts/${:_buildout_section_name_} \ diff --git a/component/subversion/buildout.cfg b/component/subversion/buildout.cfg index ea861bc372d76f7d50d6e5b2d97ad6ce48b139fe..c88f93fbbe4da2d6bdb1491c559109ea5ccd9bc6 100644 --- a/component/subversion/buildout.cfg +++ b/component/subversion/buildout.cfg @@ -6,7 +6,6 @@ extends = ../apache/buildout.cfg ../libexpat/buildout.cfg ../libuuid/buildout.cfg - ../neon/buildout.cfg ../openssl/buildout.cfg ../patch/buildout.cfg ../perl/buildout.cfg @@ -14,7 +13,6 @@ extends = ../serf/buildout.cfg ../sqlite3/buildout.cfg ../zlib/buildout.cfg - ../swig/buildout.cfg ../lz4/buildout.cfg parts = subversion @@ -55,45 +53,3 @@ environment = PKG_CONFIG_PATH=${apache:location}/lib/pkgconfig:${sqlite3:location}/lib/pkgconfig:${openssl:location}/lib/pkgconfig:${serf:location}/lib/pkgconfig CPPFLAGS=-I${libexpat:location}/include -I${libuuid:location}/include LDFLAGS=-L${libexpat:location}/lib -Wl,-rpath=${zlib:location}/lib -Wl,-rpath=${sqlite3:location}/lib -Wl,-rpath=${apache:location}/lib -L${libuuid:location}/lib -Wl,-rpath=${libuuid:location}/lib -Wl,-rpath=${openssl:location}/lib -Wl,-rpath=${serf:location}/lib -Wl,-rpath=${lz4:location}/lib - -[subversion-1.9] -recipe = hexagonit.recipe.cmmi -shared = true -url = http://www.apache.org/dist/subversion/subversion-1.7.9.tar.gz -md5sum = dfb083e8bfac88aa28d606168b08e4ff -configure-options = - --disable-static - --with-apr=${apache:location}/bin/apr-1-config - --with-apr-util=${apache:location}/bin/apu-1-config - --with-apxs=${apache:location}/bin/apxs - --with-zlib=${zlib:location} - --with-sqlite=${sqlite3:location} - --with-neon=${neon:location} - --with-swig=${swig:location}/bin/swig - --without-berkeley-db - --without-sasl - --without-apr_memcache - --without-gnome-keyring - --without-kwallet - --without-jdk - --without-jikes - --without-junit - --without-ctypesgen - --without-ruby-sitedir - --without-ruby-test-verbose - --disable-nls - -# it seems that parallel build sometimes fails. -make-options = - -j1 - -make-targets = - install - swig-py - install-swig-py - -environment = - PATH=${pkgconfig:location}/bin:${neon:location}/bin:%(PATH)s - PKG_CONFIG_PATH=${apache:location}/lib/pkgconfig:${sqlite3:location}/lib/pkgconfig:${openssl-1.0:location}/lib/pkgconfig:${neon:location}/lib/pkgconfig - CPPFLAGS=-I${libexpat:location}/include -I${libuuid:location}/include - LDFLAGS=-L${libexpat:location}/lib -Wl,-rpath=${zlib:location}/lib -Wl,-rpath=${sqlite3:location}/lib -Wl,-rpath=${neon:location}/lib -Wl,-rpath=${apache:location}/lib -L${libuuid:location}/lib -Wl,-rpath=${libuuid:location}/lib -Wl,-rpath=${openssl-1.0:location}/lib diff --git a/component/theia/buildout.cfg b/component/theia/buildout.cfg new file mode 100644 index 0000000000000000000000000000000000000000..a261dbad2d6f4d2419798bf906fdb4c89c528a3d --- /dev/null +++ b/component/theia/buildout.cfg @@ -0,0 +1,170 @@ +[buildout] +extends = + ../java-jdk/buildout.cfg + ../libsecret/buildout.cfg + ../pkgconfig/buildout.cfg + ../patchelf/buildout.cfg + ../yarn/buildout.cfg + download-plugins.cfg + buildout.hash.cfg + + +[preloadTemplate.html] +recipe = slapos.recipe.build:download +shared = true +url = ${:_profile_base_location_}/${:_buildout_section_name_} + +[yarn.lock] +recipe = slapos.recipe.build:download +shared = true +url = ${:_profile_base_location_}/${:_buildout_section_name_} + +[theia] +recipe = slapos.recipe.cmmi +shared = true +path = ${yarn.lock:location} +# To regenerate yarn.lock, use package.json instead +# path = ${package.json:location} +environment = + TMPDIR=@@LOCATION@@/tmp + PATH=${nodejs:location}/bin:${pkgconfig:location}/bin:%(PATH)s + PKG_CONFIG_PATH=${libsecret:pkg-config-path} + LDFLAGS=-Wl,-rpath=${libsecret:location}/lib -L${gettext:location}/lib -Wl,-rpath=${gettext:location}/lib -Wl,-rpath=${glib:location}/lib +pre-configure = + mkdir -p $TMPDIR + echo '${package.json:content}' > %(location)s/package.json + # To regenerate yarn.lock, comment the line below + cp yarn.lock %(location)s +configure-command = true +make-binary = cd %(location)s && ${yarn:location}/bin/yarn +make-targets = theia build +post-install = + # On Debian 9 the -rpath are not taken into account by yarn build for unknown reasons + cd %(location)s/node_modules/keytar/build/Release + ${patchelf:location}/bin/patchelf --set-rpath \ + ${libsecret:location}/lib:${gettext:location}/lib:${glib:location}/lib \ + keytar.node + +[theia-wrapper] +recipe = slapos.recipe.template:jinja2 +rendered = ${buildout:bin-directory}/${:_buildout_section_name_} +template = + inline: + #!/bin/sh + cd ${theia:location} + exec ${yarn:location}/bin/yarn theia start "$@" + +[theia-open] +recipe = slapos.recipe.template:jinja2 +rendered = ${buildout:bin-directory}/${:_buildout_section_name_} +template = + inline: + #!/bin/sh + exec ${nodejs:location}/bin/node ${theia:location}/node_modules/.bin/theia-open "$@" + +[theia-plugins] +recipe = slapos.recipe.build +urls = ${theia-download-plugins:urls} +install = + import os + for line in options['urls'].splitlines(): + extension_name, url, md5sum = line.split() + extract_dir = self.extract(self.download(url, md5sum)) + destination_dir = os.path.join(options['location'], extension_name) + self.copyTree(guessworkdir(extract_dir), destination_dir) + os.chmod(destination_dir, 0o750) + +[package.json] +content = + { + "private": true, + "theia": { + "backend": { + "config": { + "warnOnPotentiallyInsecureHostPattern": false + } + }, + "frontend": { + "config": { + "applicationName": "Theia SlapOS", + "preferences": { + "application.confirmExit": "always", + "files.associations": { + "*.cfg": "zc-buildout" + }, + "files.enableTrash": false, + "files.exclude": { + "**.pyc": true, + "**.egg-info": true, + "__pycache__": true, + ".git": true, + ".env": true, + "**/node_modules/**": true + }, + "files.watcherExclude": { + "**/.eggs/**": true, + "**/.env/**": true, + "**/.git/**": true, + "**/node_modules/**": true + }, + "editor.multiCursorModifier": "ctrlCmd", + "editor.tabSize": 2, + "plantuml.server": "https://plantuml.host.vifib.net/svg/", + "plantuml.render": "PlantUMLServer", + "gitlens.remotes": [{ "domain": "lab.nexedi.com", "type": "GitLab" }], + "java.home": "${java-jdk:location}" + }, + "warnOnPotentiallyInsecureHostPattern": false + } + }, + "generator": { + "config": { + "preloadTemplate": "${preloadTemplate.html:target}" + } + } + }, + "dependencies": { + "@theia/bulk-edit": "latest", + "@theia/callhierarchy": "latest", + "@theia/console": "latest", + "@theia/core": "latest", + "@theia/debug": "latest", + "@theia/editor": "latest", + "@theia/editor-preview": "latest", + "@theia/file-search": "latest", + "@theia/filesystem": "latest", + "@theia/getting-started": "latest", + "@theia/keymaps": "latest", + "@theia/markers": "latest", + "@theia/messages": "latest", + "@theia/metrics": "latest", + "@theia/mini-browser": "latest", + "@theia/monaco": "latest", + "@theia/navigator": "latest", + "@theia/outline-view": "latest", + "@theia/output": "latest", + "@theia/plugin-dev": "latest", + "@theia/plugin-ext": "latest", + "@theia/plugin-ext-vscode": "latest", + "@theia/preferences": "latest", + "@theia/preview": "latest", + "@theia/process": "latest", + "@theia/property-view": "latest", + "@theia/scm": "latest", + "@theia/scm-extra": "latest", + "@theia/search-in-workspace": "latest", + "@theia/task": "latest", + "@theia/terminal": "latest", + "@theia/timeline": "latest", + "@theia/typehierarchy": "latest", + "@theia/userstorage": "latest", + "@theia/variable-resolver": "latest", + "@theia/vsx-registry": "latest", + "@theia/workspace": "latest", + "@perrinjerome/theia-open": "latest", + "@perrinjerome/theia-open-cli": "latest" + }, + "devDependencies": { + "@theia/cli": "latest" + } + } diff --git a/component/theia/buildout.hash.cfg b/component/theia/buildout.hash.cfg new file mode 100644 index 0000000000000000000000000000000000000000..8a2c0b13f228d836a6033a3f9f48fabb49f2da4e --- /dev/null +++ b/component/theia/buildout.hash.cfg @@ -0,0 +1,22 @@ +# THIS IS NOT A BUILDOUT FILE, despite purposedly using a compatible syntax. +# The only allowed lines here are (regexes): +# - "^#" comments, copied verbatim +# - "^[" section beginings, copied verbatim +# - lines containing an "=" sign which must fit in the following categorie. +# - "^\s*filename\s*=\s*path\s*$" where "path" is relative to this file +# Copied verbatim. +# - "^\s*hashtype\s*=.*" where "hashtype" is one of the values supported +# by the re-generation script. +# Re-generated. +# - other lines are copied verbatim +# Substitution (${...:...}), extension ([buildout] extends = ...) and +# section inheritance (< = ...) are NOT supported (but you should really +# not need these here). + +[preloadTemplate.html] +_update_hash_filename_ = preloadTemplate.html +md5sum = 8157c22134200bd862a07c6521ebf799 + +[yarn.lock] +_update_hash_filename_ = yarn.lock +md5sum = 067d2db611b21f77885f3adfd7f81453 diff --git a/software/theia/download-plugins.cfg b/component/theia/download-plugins.cfg similarity index 100% rename from software/theia/download-plugins.cfg rename to component/theia/download-plugins.cfg diff --git a/software/theia/generate_download_plugins_cfg.py b/component/theia/generate_download_plugins_cfg.py similarity index 100% rename from software/theia/generate_download_plugins_cfg.py rename to component/theia/generate_download_plugins_cfg.py diff --git a/software/theia/preloadTemplate.html b/component/theia/preloadTemplate.html similarity index 100% rename from software/theia/preloadTemplate.html rename to component/theia/preloadTemplate.html diff --git a/software/theia/yarn.lock b/component/theia/yarn.lock similarity index 100% rename from software/theia/yarn.lock rename to component/theia/yarn.lock diff --git a/component/accords/buildout.cfg b/component/unstable/accords/buildout.cfg similarity index 100% rename from component/accords/buildout.cfg rename to component/unstable/accords/buildout.cfg diff --git a/component/condor/buildout.cfg b/component/unstable/condor/buildout.cfg similarity index 100% rename from component/condor/buildout.cfg rename to component/unstable/condor/buildout.cfg diff --git a/component/gateone/buildout.cfg b/component/unstable/gateone/buildout.cfg similarity index 100% rename from component/gateone/buildout.cfg rename to component/unstable/gateone/buildout.cfg diff --git a/component/kerberos/buildout.cfg b/component/unstable/kerberos/buildout.cfg similarity index 100% rename from component/kerberos/buildout.cfg rename to component/unstable/kerberos/buildout.cfg diff --git a/component/libatlas/add-rpath.patch b/component/unstable/libatlas/add-rpath.patch similarity index 100% rename from component/libatlas/add-rpath.patch rename to component/unstable/libatlas/add-rpath.patch diff --git a/component/libatlas/buildout.cfg b/component/unstable/libatlas/buildout.cfg similarity index 99% rename from component/libatlas/buildout.cfg rename to component/unstable/libatlas/buildout.cfg index ab395b09ff95e4164749c46d6c8d8e1184e9a0bc..6b6cdf1267afed2622393a304824bc8b22467006 100644 --- a/component/libatlas/buildout.cfg +++ b/component/unstable/libatlas/buildout.cfg @@ -13,7 +13,6 @@ filename = lapack-${:version}.tgz url = http://www.netlib.org/lapack/${:filename} md5sum = b1d3e3e425b2e44a06760ff173104bdf download-only = true -mode = 0644 [libatlas] recipe = slapos.recipe.cmmi diff --git a/component/libatlas/skip-throttle-check.patch b/component/unstable/libatlas/skip-throttle-check.patch similarity index 100% rename from component/libatlas/skip-throttle-check.patch rename to component/unstable/libatlas/skip-throttle-check.patch diff --git a/component/libmemcached/buildout.cfg b/component/unstable/libmemcached/buildout.cfg similarity index 100% rename from component/libmemcached/buildout.cfg rename to component/unstable/libmemcached/buildout.cfg diff --git a/component/lxc/buildout.cfg b/component/unstable/lxc/buildout.cfg similarity index 100% rename from component/lxc/buildout.cfg rename to component/unstable/lxc/buildout.cfg diff --git a/component/lxc/cap_get_flag-fix.patch b/component/unstable/lxc/cap_get_flag-fix.patch similarity index 100% rename from component/lxc/cap_get_flag-fix.patch rename to component/unstable/lxc/cap_get_flag-fix.patch diff --git a/component/lxc/libexecdir-fix.patch b/component/unstable/lxc/libexecdir-fix.patch similarity index 100% rename from component/lxc/libexecdir-fix.patch rename to component/unstable/lxc/libexecdir-fix.patch diff --git a/component/lxc/lxc-ls-fix.patch b/component/unstable/lxc/lxc-ls-fix.patch similarity index 100% rename from component/lxc/lxc-ls-fix.patch rename to component/unstable/lxc/lxc-ls-fix.patch diff --git a/component/memstrike/buildout.cfg b/component/unstable/memstrike/buildout.cfg similarity index 100% rename from component/memstrike/buildout.cfg rename to component/unstable/memstrike/buildout.cfg diff --git a/component/python-kerberos/buildout.cfg b/component/unstable/python-kerberos/buildout.cfg similarity index 100% rename from component/python-kerberos/buildout.cfg rename to component/unstable/python-kerberos/buildout.cfg diff --git a/component/sphinx/README b/component/unstable/sphinx/README similarity index 100% rename from component/sphinx/README rename to component/unstable/sphinx/README diff --git a/component/sphinx/buildout.cfg b/component/unstable/sphinx/buildout.cfg similarity index 100% rename from component/sphinx/buildout.cfg rename to component/unstable/sphinx/buildout.cfg diff --git a/component/sphinx/sphinx-1.10-beta-snowball.patch b/component/unstable/sphinx/sphinx-1.10-beta-snowball.patch similarity index 100% rename from component/sphinx/sphinx-1.10-beta-snowball.patch rename to component/unstable/sphinx/sphinx-1.10-beta-snowball.patch diff --git a/component/util-linux/buildout.cfg b/component/util-linux/buildout.cfg index 9624abd679c7f7652126b1e398540f0276f5072e..aac3fde9d3114d474671deda26a69375a6aa8ff9 100644 --- a/component/util-linux/buildout.cfg +++ b/component/util-linux/buildout.cfg @@ -24,7 +24,7 @@ configure-options = --disable-makeinstall-chown --disable-makeinstall-setuid --disable-more - --disable-mount + --enable-mount --disable-nls --disable-pivot_root --disable-pylibmount @@ -36,7 +36,7 @@ configure-options = --without-tinfo --disable-tls --disable-ul - --disable-unshare + --enable-unshare --disable-uuidd --disable-wall --without-libiconv-prefix diff --git a/component/vm-img/debian.cfg b/component/vm-img/debian.cfg index bd795756015767e63e3d3802e1a208434039b4ae..53b221e1ad4f56c0b2060e2af7074009bb430985 100644 --- a/component/vm-img/debian.cfg +++ b/component/vm-img/debian.cfg @@ -32,6 +32,17 @@ late-command = # a DNS proxy on both IPv4 and IPv6 without translating queries to what the # host supports. dpkg -P rdnssd +# Fix partially Let's Encrypt certificate on old OS due to expired root CA. +# This is enough for Python but not wget. + dpkg --compare-versions 20200601~ le `dpkg-query -f '$${Version}' -W ca-certificates 2>/dev/null ||echo 1:0` || ( + set ca-certificates_20200601~deb9u2_all.deb + wget http://security.debian.org/debian-security/pool/updates/main/c/ca-certificates/$1 + echo 6cb3ce4329229d71a6f06b9f13c710457c05a469012ea31853ac300873d5a3e1 $1 |sha256sum -c + dpkg -i $1 + rm $1 + cd /etc/ssl/certs + rm DST_Root_CA_X3.pem 2e5ac55d.0 12d55845.0 + ) mount |grep -q 'on / .*\bdiscard\b' || ! type fstrim || { apt-get clean sync @@ -40,10 +51,6 @@ late-command = debconf.debconf = debconf/frontend noninteractive debconf/priority critical -debian-wheezy/preseed.apt-setup/services-select = volatile -debian-wheezy/preseed.mirror/country = manual -debian-wheezy/preseed.mirror/http/hostname = archive.debian.org -debian-wheezy/preseed.mirror/http/directory = /debian debian-jessie/preseed.mirror/country = manual debian-jessie/preseed.mirror/http/hostname = archive.debian.org debian-jessie/preseed.mirror/http/directory = /debian @@ -61,10 +68,6 @@ x86_64.initrd = install.amd/initrd.gz <= debian-stable x86_64.iso = debian-amd64-testing-netinst.iso -[debian-wheezy] -<= debian-stable -x86_64.iso = debian-amd64-wheezy-netinst.iso - [debian-jessie] <= debian-stable x86_64.iso = debian-amd64-jessie-netinst.iso diff --git a/component/vnu/buildout.cfg b/component/vnu/buildout.cfg index 4bc561a51bbf21f29067b593fa0b779950012b4a..c1e79fce0c8845b4e2239cbaea6e69c6edc2f7e8 100644 --- a/component/vnu/buildout.cfg +++ b/component/vnu/buildout.cfg @@ -6,11 +6,10 @@ parts = vnu [vnu] -recipe = hexagonit.recipe.download -ignore-existing = true -strip-top-level-dir = true -url = https://github.com/validator/validator/releases/download/17.11.1/vnu.war_17.11.1.zip -md5sum = 2af6dec153a5011cd6fcc85ce5fb599d +recipe = slapos.recipe.build:download-unpacked +shared = true +url = https://github.com/validator/validator/releases/download/20.6.30/vnu.war_20.6.30.zip +md5sum = af595613407034da0797e4d10c03b6a2 [vnu-output] # Shared binary location to ease migration diff --git a/component/wkhtmltopdf/buildout.cfg b/component/wkhtmltopdf/buildout.cfg index 4a3f685cf34143a4cfc95cb584d1046990a51991..869a47cb94825bda5dacedb020be155397dc090a 100644 --- a/component/wkhtmltopdf/buildout.cfg +++ b/component/wkhtmltopdf/buildout.cfg @@ -16,19 +16,13 @@ parts = recipe = slapos.recipe.build # here, two %s are used, first one is for directory name (eg. x86_64), and second one is for filename (eg. x86-64). -url_x86-64 = http://download.gna.org/wkhtmltopdf/0.12/0.12.4/wkhtmltox-0.12.4_linux-generic-amd64.tar.xz -url_x86 = http://download.gna.org/wkhtmltopdf/0.12/0.12.4/wkhtmltox-0.12.4_linux-generic-i386.tar.xz - -# supported architectures md5sums -md5sum_x86 = ce1a2c0b2cf786ccc5d5828c42c99ddd -md5sum_x86-64 = 96b7306cebb9e65355f69f7ab63df68b +base = http://download.gna.org/wkhtmltopdf/0.12/0.12.4/wkhtmltox-0.12.4_linux-generic +i386-linux-gnu = ${:base}-i386.tar.xz ce1a2c0b2cf786ccc5d5828c42c99ddd +x86_64-linux-gnu = ${:base}-amd64.tar.xz 96b7306cebb9e65355f69f7ab63df68b install = import os,shutil, sys, tempfile - platform = guessPlatform() - url = options['url_' + platform] - md5sum = options['md5sum_' + platform] - path = self.download(url, md5sum) + path = self.download(*options[multiarch()].split()) extract_dir = tempfile.mkdtemp(self.name) self.cleanup_dir_list.append(extract_dir) self.logger.debug('Created working directory %s', extract_dir) diff --git a/component/xpdf/buildout.cfg b/component/xpdf/buildout.cfg deleted file mode 100644 index 85829a3c1ef5da6578ebd0c03dded861f01397c6..0000000000000000000000000000000000000000 --- a/component/xpdf/buildout.cfg +++ /dev/null @@ -1,50 +0,0 @@ -[buildout] -parts = xpdf - -[xpdf] -<= xpdf-3.02 - -[xpdf-patch-download] -recipe = hexagonit.recipe.download -ignore-existing = true -url = ftp://ftp.foolabs.com/pub/xpdf/${:filename} -download-only = true - -[xpdf-3.02pl1.patch] -<= xpdf-patch-download -filename = xpdf-3.02pl1.patch -md5sum = 877118786dfe27d1b7aa5a6759cc6e45 - -[xpdf-3.02pl2.patch] -<= xpdf-patch-download -filename = xpdf-3.02pl2.patch -md5sum = 3a5cb165ae66781e0b21e6219ae06795 - -[xpdf-3.02pl3.patch] -<= xpdf-patch-download -filename = xpdf-3.02pl3.patch -md5sum = 581963ede0fb5715e1a69f01b5b8ce63 - -[xpdf-3.02pl4.patch] -<= xpdf-patch-download -filename = xpdf-3.02pl4.patch -md5sum = 70b752716798dd341a4bf890df5f6fdc - -[xpdf-3.02pl5.patch] -<= xpdf-patch-download -filename = xpdf-3.02pl5.patch -md5sum = 504902ca5e9d66c67eed03636ec6b163 - -[xpdf-3.02] -recipe = slapos.recipe.cmmi -md5sum = 599dc4cc65a07ee868cf92a667a913d2 -url = ftp://ftp.foolabs.com/pub/xpdf/xpdf-3.02.tar.gz -configure-options = - --without-x -patch-options = -p1 -patches = - ${xpdf-3.02pl1.patch:location}/${xpdf-3.02pl1.patch:filename} - ${xpdf-3.02pl2.patch:location}/${xpdf-3.02pl2.patch:filename} - ${xpdf-3.02pl3.patch:location}/${xpdf-3.02pl3.patch:filename} - ${xpdf-3.02pl4.patch:location}/${xpdf-3.02pl4.patch:filename} - ${xpdf-3.02pl5.patch:location}/${xpdf-3.02pl5.patch:filename} diff --git a/component/xz-utils/buildout.cfg b/component/xz-utils/buildout.cfg index edf776ee7e949468a5e94c8122c6b89bdd43caae..d4f46a25bbbcc74017e0cb09ac71c3d44fcc76e0 100644 --- a/component/xz-utils/buildout.cfg +++ b/component/xz-utils/buildout.cfg @@ -5,7 +5,7 @@ parts = [xz-utils] recipe = slapos.recipe.cmmi shared = true -url = http://tukaani.org/xz/xz-5.2.5.tar.bz2 +url = https://tukaani.org/xz/xz-5.2.5.tar.bz2 md5sum = 33ab3ef79aa1146b83b778210e7b0a54 configure-options = --disable-static diff --git a/component/yarn/buildout.cfg b/component/yarn/buildout.cfg index 37c5c528797434c4f4b197f5465cc4f4ec8567e7..48dc27b2626609d46234c08cbdeab8285e783584 100644 --- a/component/yarn/buildout.cfg +++ b/component/yarn/buildout.cfg @@ -24,14 +24,19 @@ yarn-download = ${yarn-download-1.16.0:location} yarn-download = ${yarn-download-1.3.2:location} [yarn-wrapper] -recipe = slapos.recipe.template:jinja2 -rendered = ${:location}/bin/yarn -template = inline: +recipe = slapos.recipe.build +shared = true +content = #!/bin/sh PATH=${nodejs:location}/bin/:$PATH exec ${:yarn-download}/bin/yarn $@ -location = ${buildout:parts-directory}/${:_buildout_section_name_} -bin-yarn = ${:rendered} +install = + import os + bin = os.path.join(options['location'], 'bin') + os.makedirs(bin) + with open(os.path.join(bin, 'yarn'), 'w') as f: + os.fchmod(f.fileno(), 0o755) + f.write(options['content']) [yarn-download] recipe = slapos.recipe.build:download-unpacked diff --git a/component/zodbtools/test-common.cfg b/component/zodbtools/test-common.cfg index 7e6cca9ed0be7965732366d77edaf1591db54c8d..d9b06e982d559527be424df9c402319a701f9258 100644 --- a/component/zodbtools/test-common.cfg +++ b/component/zodbtools/test-common.cfg @@ -42,5 +42,4 @@ template = inline: [versions] -mock = 3.0.5 freezegun = 1.0.0 diff --git a/format-json b/format-json index a5f72f900c68958aa8a865933e74fa1dfe5a5b42..a036d341f8dc5cd4557067a0886712add9c53807 100755 --- a/format-json +++ b/format-json @@ -10,7 +10,7 @@ Usage:: """ -import os +from __future__ import print_function import sys import json import collections @@ -19,15 +19,15 @@ import collections def main(): exit_code = 0 for f in sys.argv[1:]: - print 'Processing %s' % (f,) - with open(f, 'rb') as infile: + print('Processing', f,) + with open(f) as infile: try: obj = json.load(infile, object_pairs_hook=collections.OrderedDict) except ValueError as e: exit_code = 1 - print e + print(e, file=sys.stderr) else: - with open(f, 'wb') as outfile: + with open(f, 'w') as outfile: json.dump(obj, outfile, sort_keys=False, indent=2, separators=(',', ': ')) outfile.write('\n') sys.exit(exit_code) diff --git a/package-lock.json b/package-lock.json index 46b499cda446e2bee6b680df54d8c11d1e4322a7..8214d6a86a76f384477f7d016076584e9f48f6d7 100644 --- a/package-lock.json +++ b/package-lock.json @@ -2,1973 +2,690 @@ "requires": true, "lockfileVersion": 1, "dependencies": { - "@samverschueren/stream-to-observable": { - "version": "0.3.1", - "resolved": "https://registry.npmjs.org/@samverschueren/stream-to-observable/-/stream-to-observable-0.3.1.tgz", - "integrity": "sha512-c/qwwcHyafOQuVQJj0IlBjf5yYgBI7YPJ77k4fOJYesb41jio65eaJODRUmfYKhTOFBrIZ66kgvGPlNbjuoRdQ==", - "requires": { - "any-observable": "^0.3.0" - } - }, - "ansi-escapes": { - "version": "3.2.0", - "resolved": "https://registry.npmjs.org/ansi-escapes/-/ansi-escapes-3.2.0.tgz", - "integrity": "sha512-cBhpre4ma+U0T1oM5fXg7Dy1Jw7zzwv7lt/GoCpr+hDQJoYnKVPLL4dCvSEFMmQurOQvSrwT7SL/DAlhBI97RQ==" - }, - "ansi-regex": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-3.0.0.tgz", - "integrity": "sha1-7QMXwyIGT3lGbAKWa922Bas32Zg=" - }, - "ansi-styles": { - "version": "3.2.1", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz", - "integrity": "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==", - "requires": { - "color-convert": "^1.9.0" - } - }, - "any-observable": { - "version": "0.3.0", - "resolved": "https://registry.npmjs.org/any-observable/-/any-observable-0.3.0.tgz", - "integrity": "sha512-/FQM1EDkTsf63Ub2C6O7GuYFDsSXUwsaZDurV0np41ocwq0jthUAYCmhBX9f+KwlaCgIuWyr/4WlUQUBfKfZog==" - }, - "argparse": { - "version": "1.0.10", - "resolved": "https://registry.npmjs.org/argparse/-/argparse-1.0.10.tgz", - "integrity": "sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==", - "requires": { - "sprintf-js": "~1.0.2" - } - }, - "arr-diff": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/arr-diff/-/arr-diff-4.0.0.tgz", - "integrity": "sha1-1kYQdP6/7HHn4VI1dhoyml3HxSA=" - }, - "arr-flatten": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/arr-flatten/-/arr-flatten-1.1.0.tgz", - "integrity": "sha512-L3hKV5R/p5o81R7O02IGnwpDmkp6E982XhtbuwSe3O4qOtMMMtodicASA1Cny2U+aCXcNpml+m4dPsvsJ3jatg==" - }, - "arr-union": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/arr-union/-/arr-union-3.1.0.tgz", - "integrity": "sha1-45sJrqne+Gao8gbiiK9jkZuuOcQ=" - }, - "array-unique": { - "version": "0.3.2", - "resolved": "https://registry.npmjs.org/array-unique/-/array-unique-0.3.2.tgz", - "integrity": "sha1-qJS3XUvE9s1nnvMkSp/Y9Gri1Cg=" - }, - "assign-symbols": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/assign-symbols/-/assign-symbols-1.0.0.tgz", - "integrity": "sha1-WWZ/QfrdTyDMvCu5a41Pf3jsA2c=" - }, - "atob": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/atob/-/atob-2.1.2.tgz", - "integrity": "sha512-Wm6ukoaOGJi/73p/cl2GvLjTI5JM1k/O14isD73YML8StrH/7/lRFgmg8nICZgD3bZZvjwCGxtMOD3wWNAu8cg==" - }, - "base": { - "version": "0.11.2", - "resolved": "https://registry.npmjs.org/base/-/base-0.11.2.tgz", - "integrity": "sha512-5T6P4xPgpp0YDFvSWwEZ4NoE3aM4QBQXDzmVbraCkFj8zHM+mba8SyqB5DbZWyR7mYHo6Y7BdQo3MoA4m0TeQg==", - "requires": { - "cache-base": "^1.0.1", - "class-utils": "^0.3.5", - "component-emitter": "^1.2.1", - "define-property": "^1.0.0", - "isobject": "^3.0.1", - "mixin-deep": "^1.2.0", - "pascalcase": "^0.1.1" - }, - "dependencies": { - "define-property": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/define-property/-/define-property-1.0.0.tgz", - "integrity": "sha1-dp66rz9KY6rTr56NMEybvnm/sOY=", - "requires": { - "is-descriptor": "^1.0.0" - } - }, - "is-accessor-descriptor": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/is-accessor-descriptor/-/is-accessor-descriptor-1.0.0.tgz", - "integrity": "sha512-m5hnHTkcVsPfqx3AKlyttIPb7J+XykHvJP2B9bZDjlhLIoEq4XoK64Vg7boZlVWYK6LUY94dYPEE7Lh0ZkZKcQ==", - "requires": { - "kind-of": "^6.0.0" - } - }, - "is-data-descriptor": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/is-data-descriptor/-/is-data-descriptor-1.0.0.tgz", - "integrity": "sha512-jbRXy1FmtAoCjQkVmIVYwuuqDFUbaOeDjmed1tOGPrsMhtJA4rD9tkgA0F1qJ3gRFRXcHYVkdeaP50Q5rE/jLQ==", - "requires": { - "kind-of": "^6.0.0" - } - }, - "is-descriptor": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/is-descriptor/-/is-descriptor-1.0.2.tgz", - "integrity": "sha512-2eis5WqQGV7peooDyLmNEPUrps9+SXX5c9pL3xEB+4e9HnGuDa7mB7kHxHw4CbqS9k1T2hOH3miL8n8WtiYVtg==", - "requires": { - "is-accessor-descriptor": "^1.0.0", - "is-data-descriptor": "^1.0.0", - "kind-of": "^6.0.2" - } - } - } - }, - "braces": { - "version": "2.3.2", - "resolved": "https://registry.npmjs.org/braces/-/braces-2.3.2.tgz", - "integrity": "sha512-aNdbnj9P8PjdXU4ybaWLK2IF3jc/EoDYbC7AazW6to3TRsfXxscC9UXOB5iDiEQrkyIbWp2SLQda4+QAa7nc3w==", - "requires": { - "arr-flatten": "^1.1.0", - "array-unique": "^0.3.2", - "extend-shallow": "^2.0.1", - "fill-range": "^4.0.0", - "isobject": "^3.0.1", - "repeat-element": "^1.1.2", - "snapdragon": "^0.8.1", - "snapdragon-node": "^2.0.1", - "split-string": "^3.0.2", - "to-regex": "^3.0.1" - }, - "dependencies": { - "extend-shallow": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz", - "integrity": "sha1-Ua99YUrZqfYQ6huvu5idaxxWiQ8=", - "requires": { - "is-extendable": "^0.1.0" - } - } - } - }, - "cache-base": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/cache-base/-/cache-base-1.0.1.tgz", - "integrity": "sha512-AKcdTnFSWATd5/GCPRxr2ChwIJ85CeyrEyjRHlKxQ56d4XJMGym0uAiKn0xbLOGOl3+yRpOTi484dVCEc5AUzQ==", - "requires": { - "collection-visit": "^1.0.0", - "component-emitter": "^1.2.1", - "get-value": "^2.0.6", - "has-value": "^1.0.0", - "isobject": "^3.0.1", - "set-value": "^2.0.0", - "to-object-path": "^0.3.0", - "union-value": "^1.0.0", - "unset-value": "^1.0.0" - } - }, - "caller-callsite": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/caller-callsite/-/caller-callsite-2.0.0.tgz", - "integrity": "sha1-hH4PzgoiN1CpoCfFSzNzGtMVQTQ=", - "requires": { - "callsites": "^2.0.0" - } - }, - "caller-path": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/caller-path/-/caller-path-2.0.0.tgz", - "integrity": "sha1-Ro+DBE42mrIBD6xfBs7uFbsssfQ=", - "requires": { - "caller-callsite": "^2.0.0" - } - }, - "callsites": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/callsites/-/callsites-2.0.0.tgz", - "integrity": "sha1-BuuE8A7qQT2oav/vrL/7Ngk7PFA=" - }, - "chalk": { - "version": "2.4.2", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz", - "integrity": "sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==", - "requires": { - "ansi-styles": "^3.2.1", - "escape-string-regexp": "^1.0.5", - "supports-color": "^5.3.0" - } - }, - "ci-info": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/ci-info/-/ci-info-2.0.0.tgz", - "integrity": "sha512-5tK7EtrZ0N+OLFMthtqOj4fI2Jeb88C4CAZPu25LDVUgXJ0A3Js4PMGqrn0JU1W0Mh1/Z8wZzYPxqUrXeBboCQ==" - }, - "class-utils": { - "version": "0.3.6", - "resolved": "https://registry.npmjs.org/class-utils/-/class-utils-0.3.6.tgz", - "integrity": "sha512-qOhPa/Fj7s6TY8H8esGu5QNpMMQxz79h+urzrNYN6mn+9BnxlDGf5QZ+XeCDsxSjPqsSR56XOZOJmpeurnLMeg==", - "requires": { - "arr-union": "^3.1.0", - "define-property": "^0.2.5", - "isobject": "^3.0.0", - "static-extend": "^0.1.1" - }, - "dependencies": { - "define-property": { - "version": "0.2.5", - "resolved": "https://registry.npmjs.org/define-property/-/define-property-0.2.5.tgz", - "integrity": "sha1-w1se+RjsPJkPmlvFe+BKrOxcgRY=", - "requires": { - "is-descriptor": "^0.1.0" - } - } - } - }, - "cli-cursor": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/cli-cursor/-/cli-cursor-2.1.0.tgz", - "integrity": "sha1-s12sN2R5+sw+lHR9QdDQ9SOP/LU=", - "requires": { - "restore-cursor": "^2.0.0" - } - }, - "cli-truncate": { - "version": "0.2.1", - "resolved": "https://registry.npmjs.org/cli-truncate/-/cli-truncate-0.2.1.tgz", - "integrity": "sha1-nxXPuwcFAFNpIWxiasfQWrkN1XQ=", - "requires": { - "slice-ansi": "0.0.4", - "string-width": "^1.0.1" - } - }, - "code-point-at": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/code-point-at/-/code-point-at-1.1.0.tgz", - "integrity": "sha1-DQcLTQQ6W+ozovGkDi7bPZpMz3c=" - }, - "collection-visit": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/collection-visit/-/collection-visit-1.0.0.tgz", - "integrity": "sha1-S8A3PBZLwykbTTaMgpzxqApZ3KA=", - "requires": { - "map-visit": "^1.0.0", - "object-visit": "^1.0.0" - } - }, - "color-convert": { - "version": "1.9.3", - "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz", - "integrity": "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==", - "requires": { - "color-name": "1.1.3" - } - }, - "color-name": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", - "integrity": "sha1-p9BVi9icQveV3UIyj3QIMcpTvCU=" - }, - "commander": { - "version": "2.20.3", - "resolved": "https://registry.npmjs.org/commander/-/commander-2.20.3.tgz", - "integrity": "sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ==" - }, - "component-emitter": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/component-emitter/-/component-emitter-1.3.0.tgz", - "integrity": "sha512-Rd3se6QB+sO1TwqZjscQrurpEPIfO0/yYnSin6Q/rD3mOutHvUrCAhJub3r90uNb+SESBuE0QYoB90YdfatsRg==" - }, - "copy-descriptor": { - "version": "0.1.1", - "resolved": "https://registry.npmjs.org/copy-descriptor/-/copy-descriptor-0.1.1.tgz", - "integrity": "sha1-Z29us8OZl8LuGsOpJP1hJHSPV40=" - }, - "cosmiconfig": { - "version": "5.2.1", - "resolved": "https://registry.npmjs.org/cosmiconfig/-/cosmiconfig-5.2.1.tgz", - "integrity": "sha512-H65gsXo1SKjf8zmrJ67eJk8aIRKV5ff2D4uKZIBZShbhGSpEmsQOPW/SKMKYhSTrqR7ufy6RP69rPogdaPh/kA==", - "requires": { - "import-fresh": "^2.0.0", - "is-directory": "^0.3.1", - "js-yaml": "^3.13.1", - "parse-json": "^4.0.0" - } - }, - "cross-spawn": { - "version": "6.0.5", - "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-6.0.5.tgz", - "integrity": "sha512-eTVLrBSt7fjbDygz805pMnstIs2VTBNkRm0qxZd+M7A5XDdxVRWO5MxGBXZhjY4cqLYLdtrGqRf8mBPmzwSpWQ==", - "requires": { - "nice-try": "^1.0.4", - "path-key": "^2.0.1", - "semver": "^5.5.0", - "shebang-command": "^1.2.0", - "which": "^1.2.9" - } - }, - "date-fns": { - "version": "1.30.1", - "resolved": "https://registry.npmjs.org/date-fns/-/date-fns-1.30.1.tgz", - "integrity": "sha512-hBSVCvSmWC+QypYObzwGOd9wqdDpOt+0wl0KbU+R+uuZBS1jN8VsD1ss3irQDknRj5NvxiTF6oj/nDRnN/UQNw==" - }, - "debug": { - "version": "3.2.6", - "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.6.tgz", - "integrity": "sha512-mel+jf7nrtEl5Pn1Qx46zARXKDpBbvzezse7p7LqINmdoIk8PYP5SySaxEmYv6TZ0JyEKA1hsCId6DIhgITtWQ==", - "requires": { - "ms": "^2.1.1" - } - }, - "decode-uri-component": { - "version": "0.2.0", - "resolved": "https://registry.npmjs.org/decode-uri-component/-/decode-uri-component-0.2.0.tgz", - "integrity": "sha1-6zkTMzRYd1y4TNGh+uBiEGu4dUU=" - }, - "dedent": { - "version": "0.7.0", - "resolved": "https://registry.npmjs.org/dedent/-/dedent-0.7.0.tgz", - "integrity": "sha1-JJXduvbrh0q7Dhvp3yLS5aVEMmw=" - }, - "define-property": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/define-property/-/define-property-2.0.2.tgz", - "integrity": "sha512-jwK2UV4cnPpbcG7+VRARKTZPUWowwXA8bzH5NP6ud0oeAxyYPuGZUAC7hMugpCdz4BeSZl2Dl9k66CHJ/46ZYQ==", - "requires": { - "is-descriptor": "^1.0.2", - "isobject": "^3.0.1" - }, - "dependencies": { - "is-accessor-descriptor": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/is-accessor-descriptor/-/is-accessor-descriptor-1.0.0.tgz", - "integrity": "sha512-m5hnHTkcVsPfqx3AKlyttIPb7J+XykHvJP2B9bZDjlhLIoEq4XoK64Vg7boZlVWYK6LUY94dYPEE7Lh0ZkZKcQ==", - "requires": { - "kind-of": "^6.0.0" - } - }, - "is-data-descriptor": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/is-data-descriptor/-/is-data-descriptor-1.0.0.tgz", - "integrity": "sha512-jbRXy1FmtAoCjQkVmIVYwuuqDFUbaOeDjmed1tOGPrsMhtJA4rD9tkgA0F1qJ3gRFRXcHYVkdeaP50Q5rE/jLQ==", - "requires": { - "kind-of": "^6.0.0" - } - }, - "is-descriptor": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/is-descriptor/-/is-descriptor-1.0.2.tgz", - "integrity": "sha512-2eis5WqQGV7peooDyLmNEPUrps9+SXX5c9pL3xEB+4e9HnGuDa7mB7kHxHw4CbqS9k1T2hOH3miL8n8WtiYVtg==", - "requires": { - "is-accessor-descriptor": "^1.0.0", - "is-data-descriptor": "^1.0.0", - "kind-of": "^6.0.2" - } - } - } - }, - "elegant-spinner": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/elegant-spinner/-/elegant-spinner-1.0.1.tgz", - "integrity": "sha1-2wQ1IcldfjA/2PNFvtwzSc+wcp4=" - }, - "end-of-stream": { - "version": "1.4.4", - "resolved": "https://registry.npmjs.org/end-of-stream/-/end-of-stream-1.4.4.tgz", - "integrity": "sha512-+uw1inIHVPQoaVuHzRyXd21icM+cnt4CzD5rW+NC1wjOUSTOs+Te7FOv7AhN7vS9x/oIyhLP5PR1H+phQAHu5Q==", - "requires": { - "once": "^1.4.0" - } - }, - "error-ex": { - "version": "1.3.2", - "resolved": "https://registry.npmjs.org/error-ex/-/error-ex-1.3.2.tgz", - "integrity": "sha512-7dFHNmqeFSEt2ZBsCriorKnn3Z2pj+fd9kmI6QoWw4//DL+icEBfc0U7qJCisqrTsKTjw4fNFy2pW9OqStD84g==", - "requires": { - "is-arrayish": "^0.2.1" - } - }, - "escape-string-regexp": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", - "integrity": "sha1-G2HAViGQqN/2rjuyzwIAyhMLhtQ=" - }, - "esprima": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/esprima/-/esprima-4.0.1.tgz", - "integrity": "sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A==" - }, - "execa": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/execa/-/execa-1.0.0.tgz", - "integrity": "sha512-adbxcyWV46qiHyvSp50TKt05tB4tK3HcmF7/nxfAdhnox83seTDbwnaqKO4sXRy7roHAIFqJP/Rw/AuEbX61LA==", - "requires": { - "cross-spawn": "^6.0.0", - "get-stream": "^4.0.0", - "is-stream": "^1.1.0", - "npm-run-path": "^2.0.0", - "p-finally": "^1.0.0", - "signal-exit": "^3.0.0", - "strip-eof": "^1.0.0" - } - }, - "expand-brackets": { - "version": "2.1.4", - "resolved": "https://registry.npmjs.org/expand-brackets/-/expand-brackets-2.1.4.tgz", - "integrity": "sha1-t3c14xXOMPa27/D4OwQVGiJEliI=", - "requires": { - "debug": "^2.3.3", - "define-property": "^0.2.5", - "extend-shallow": "^2.0.1", - "posix-character-classes": "^0.1.0", - "regex-not": "^1.0.0", - "snapdragon": "^0.8.1", - "to-regex": "^3.0.1" - }, - "dependencies": { - "debug": { - "version": "2.6.9", - "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", - "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", - "requires": { - "ms": "2.0.0" - } - }, - "define-property": { - "version": "0.2.5", - "resolved": "https://registry.npmjs.org/define-property/-/define-property-0.2.5.tgz", - "integrity": "sha1-w1se+RjsPJkPmlvFe+BKrOxcgRY=", - "requires": { - "is-descriptor": "^0.1.0" - } - }, - "extend-shallow": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz", - "integrity": "sha1-Ua99YUrZqfYQ6huvu5idaxxWiQ8=", - "requires": { - "is-extendable": "^0.1.0" - } - }, - "ms": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", - "integrity": "sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g=" - } - } - }, - "extend-shallow": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-3.0.2.tgz", - "integrity": "sha1-Jqcarwc7OfshJxcnRhMcJwQCjbg=", - "requires": { - "assign-symbols": "^1.0.0", - "is-extendable": "^1.0.1" - }, - "dependencies": { - "is-extendable": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/is-extendable/-/is-extendable-1.0.1.tgz", - "integrity": "sha512-arnXMxT1hhoKo9k1LZdmlNyJdDDfy2v0fXjFlmok4+i8ul/6WlbVge9bhM74OpNPQPMGUToDtz+KXa1PneJxOA==", - "requires": { - "is-plain-object": "^2.0.4" - } - } - } - }, - "extglob": { - "version": "2.0.4", - "resolved": "https://registry.npmjs.org/extglob/-/extglob-2.0.4.tgz", - "integrity": "sha512-Nmb6QXkELsuBr24CJSkilo6UHHgbekK5UiZgfE6UHD3Eb27YC6oD+bhcT+tJ6cl8dmsgdQxnWlcry8ksBIBLpw==", - "requires": { - "array-unique": "^0.3.2", - "define-property": "^1.0.0", - "expand-brackets": "^2.1.4", - "extend-shallow": "^2.0.1", - "fragment-cache": "^0.2.1", - "regex-not": "^1.0.0", - "snapdragon": "^0.8.1", - "to-regex": "^3.0.1" - }, - "dependencies": { - "define-property": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/define-property/-/define-property-1.0.0.tgz", - "integrity": "sha1-dp66rz9KY6rTr56NMEybvnm/sOY=", - "requires": { - "is-descriptor": "^1.0.0" - } - }, - "extend-shallow": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz", - "integrity": "sha1-Ua99YUrZqfYQ6huvu5idaxxWiQ8=", - "requires": { - "is-extendable": "^0.1.0" - } - }, - "is-accessor-descriptor": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/is-accessor-descriptor/-/is-accessor-descriptor-1.0.0.tgz", - "integrity": "sha512-m5hnHTkcVsPfqx3AKlyttIPb7J+XykHvJP2B9bZDjlhLIoEq4XoK64Vg7boZlVWYK6LUY94dYPEE7Lh0ZkZKcQ==", - "requires": { - "kind-of": "^6.0.0" - } - }, - "is-data-descriptor": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/is-data-descriptor/-/is-data-descriptor-1.0.0.tgz", - "integrity": "sha512-jbRXy1FmtAoCjQkVmIVYwuuqDFUbaOeDjmed1tOGPrsMhtJA4rD9tkgA0F1qJ3gRFRXcHYVkdeaP50Q5rE/jLQ==", - "requires": { - "kind-of": "^6.0.0" - } - }, - "is-descriptor": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/is-descriptor/-/is-descriptor-1.0.2.tgz", - "integrity": "sha512-2eis5WqQGV7peooDyLmNEPUrps9+SXX5c9pL3xEB+4e9HnGuDa7mB7kHxHw4CbqS9k1T2hOH3miL8n8WtiYVtg==", - "requires": { - "is-accessor-descriptor": "^1.0.0", - "is-data-descriptor": "^1.0.0", - "kind-of": "^6.0.2" - } - } - } - }, - "figures": { - "version": "1.7.0", - "resolved": "https://registry.npmjs.org/figures/-/figures-1.7.0.tgz", - "integrity": "sha1-y+Hjr/zxzUS4DK3+0o3Hk6lwHS4=", - "requires": { - "escape-string-regexp": "^1.0.5", - "object-assign": "^4.1.0" - } - }, - "fill-range": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-4.0.0.tgz", - "integrity": "sha1-1USBHUKPmOsGpj3EAtJAPDKMOPc=", - "requires": { - "extend-shallow": "^2.0.1", - "is-number": "^3.0.0", - "repeat-string": "^1.6.1", - "to-regex-range": "^2.1.0" - }, - "dependencies": { - "extend-shallow": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz", - "integrity": "sha1-Ua99YUrZqfYQ6huvu5idaxxWiQ8=", - "requires": { - "is-extendable": "^0.1.0" - } - } - } - }, - "find-parent-dir": { - "version": "0.3.0", - "resolved": "https://registry.npmjs.org/find-parent-dir/-/find-parent-dir-0.3.0.tgz", - "integrity": "sha1-M8RLQpqysvBkYpnF+fcY83b/jVQ=" - }, - "find-up": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/find-up/-/find-up-3.0.0.tgz", - "integrity": "sha512-1yD6RmLI1XBfxugvORwlck6f75tYL+iR0jqwsOrOxMZyGYqUuDhJ0l4AXdO1iX/FTs9cBAMEk1gWSEx1kSbylg==", - "requires": { - "locate-path": "^3.0.0" - } - }, - "for-in": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/for-in/-/for-in-1.0.2.tgz", - "integrity": "sha1-gQaNKVqBQuwKxybG4iAMMPttXoA=" - }, - "fragment-cache": { - "version": "0.2.1", - "resolved": "https://registry.npmjs.org/fragment-cache/-/fragment-cache-0.2.1.tgz", - "integrity": "sha1-QpD60n8T6Jvn8zeZxrxaCr//DRk=", - "requires": { - "map-cache": "^0.2.2" - } - }, - "get-own-enumerable-property-symbols": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/get-own-enumerable-property-symbols/-/get-own-enumerable-property-symbols-3.0.2.tgz", - "integrity": "sha512-I0UBV/XOz1XkIJHEUDMZAbzCThU/H8DxmSfmdGcKPnVhu2VfFqr34jr9777IyaTYvxjedWhqVIilEDsCdP5G6g==" - }, - "get-stdin": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/get-stdin/-/get-stdin-6.0.0.tgz", - "integrity": "sha512-jp4tHawyV7+fkkSKyvjuLZswblUtz+SQKzSWnBbii16BuZksJlU1wuBYXY75r+duh/llF1ur6oNwi+2ZzjKZ7g==" - }, - "get-stream": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-4.1.0.tgz", - "integrity": "sha512-GMat4EJ5161kIy2HevLlr4luNjBgvmj413KaQA7jt4V8B4RDsfpHk7WQ9GVqfYyyx8OS/L66Kox+rJRNklLK7w==", - "requires": { - "pump": "^3.0.0" - } - }, - "get-value": { - "version": "2.0.6", - "resolved": "https://registry.npmjs.org/get-value/-/get-value-2.0.6.tgz", - "integrity": "sha1-3BXKHGcjh8p2vTesCjlbogQqLCg=" - }, - "has-ansi": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/has-ansi/-/has-ansi-2.0.0.tgz", - "integrity": "sha1-NPUEnOHs3ysGSa8+8k5F7TVBbZE=", - "requires": { - "ansi-regex": "^2.0.0" - }, - "dependencies": { - "ansi-regex": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-2.1.1.tgz", - "integrity": "sha1-w7M6te42DYbg5ijwRorn7yfWVN8=" - } - } - }, - "has-flag": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", - "integrity": "sha1-tdRU3CGZriJWmfNGfloH87lVuv0=" - }, - "has-value": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/has-value/-/has-value-1.0.0.tgz", - "integrity": "sha1-GLKB2lhbHFxR3vJMkw7SmgvmsXc=", - "requires": { - "get-value": "^2.0.6", - "has-values": "^1.0.0", - "isobject": "^3.0.0" - } - }, - "has-values": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/has-values/-/has-values-1.0.0.tgz", - "integrity": "sha1-lbC2P+whRmGab+V/51Yo1aOe/k8=", - "requires": { - "is-number": "^3.0.0", - "kind-of": "^4.0.0" - }, - "dependencies": { - "kind-of": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-4.0.0.tgz", - "integrity": "sha1-IIE989cSkosgc3hpGkUGb65y3Vc=", - "requires": { - "is-buffer": "^1.1.5" - } - } - } - }, - "hosted-git-info": { - "version": "2.8.8", - "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-2.8.8.tgz", - "integrity": "sha512-f/wzC2QaWBs7t9IYqB4T3sR1xviIViXJRJTWBlx2Gf3g0Xi5vI7Yy4koXQ1c9OYDGHN9sBy1DQ2AB8fqZBWhUg==" - }, - "husky": { - "version": "1.3.1", - "resolved": "https://registry.npmjs.org/husky/-/husky-1.3.1.tgz", - "integrity": "sha512-86U6sVVVf4b5NYSZ0yvv88dRgBSSXXmHaiq5pP4KDj5JVzdwKgBjEtUPOm8hcoytezFwbU+7gotXNhpHdystlg==", - "requires": { - "cosmiconfig": "^5.0.7", - "execa": "^1.0.0", - "find-up": "^3.0.0", - "get-stdin": "^6.0.0", - "is-ci": "^2.0.0", - "pkg-dir": "^3.0.0", - "please-upgrade-node": "^3.1.1", - "read-pkg": "^4.0.1", - "run-node": "^1.0.0", - "slash": "^2.0.0" - } - }, - "import-fresh": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/import-fresh/-/import-fresh-2.0.0.tgz", - "integrity": "sha1-2BNVwVYS04bGH53dOSLUMEgipUY=", - "requires": { - "caller-path": "^2.0.0", - "resolve-from": "^3.0.0" - } - }, - "indent-string": { - "version": "3.2.0", - "resolved": "https://registry.npmjs.org/indent-string/-/indent-string-3.2.0.tgz", - "integrity": "sha1-Sl/W0nzDMvN+VBmlBNu4NxBckok=" - }, - "is-accessor-descriptor": { - "version": "0.1.6", - "resolved": "https://registry.npmjs.org/is-accessor-descriptor/-/is-accessor-descriptor-0.1.6.tgz", - "integrity": "sha1-qeEss66Nh2cn7u84Q/igiXtcmNY=", - "requires": { - "kind-of": "^3.0.2" - }, - "dependencies": { - "kind-of": { - "version": "3.2.2", - "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-3.2.2.tgz", - "integrity": "sha1-MeohpzS6ubuw8yRm2JOupR5KPGQ=", - "requires": { - "is-buffer": "^1.1.5" - } - } - } - }, - "is-arrayish": { - "version": "0.2.1", - "resolved": "https://registry.npmjs.org/is-arrayish/-/is-arrayish-0.2.1.tgz", - "integrity": "sha1-d8mYQFJ6qOyxqLppe4BkWnqSap0=" - }, - "is-buffer": { - "version": "1.1.6", - "resolved": "https://registry.npmjs.org/is-buffer/-/is-buffer-1.1.6.tgz", - "integrity": "sha512-NcdALwpXkTm5Zvvbk7owOUSvVvBKDgKP5/ewfXEznmQFfs4ZRmanOeKBTjRVjka3QFoN6XJ+9F3USqfHqTaU5w==" - }, - "is-ci": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/is-ci/-/is-ci-2.0.0.tgz", - "integrity": "sha512-YfJT7rkpQB0updsdHLGWrvhBJfcfzNNawYDNIyQXJz0IViGf75O8EBPKSdvw2rF+LGCsX4FZ8tcr3b19LcZq4w==", - "requires": { - "ci-info": "^2.0.0" - } - }, - "is-data-descriptor": { - "version": "0.1.4", - "resolved": "https://registry.npmjs.org/is-data-descriptor/-/is-data-descriptor-0.1.4.tgz", - "integrity": "sha1-C17mSDiOLIYCgueT8YVv7D8wG1Y=", - "requires": { - "kind-of": "^3.0.2" - }, - "dependencies": { - "kind-of": { - "version": "3.2.2", - "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-3.2.2.tgz", - "integrity": "sha1-MeohpzS6ubuw8yRm2JOupR5KPGQ=", - "requires": { - "is-buffer": "^1.1.5" - } - } - } - }, - "is-descriptor": { - "version": "0.1.6", - "resolved": "https://registry.npmjs.org/is-descriptor/-/is-descriptor-0.1.6.tgz", - "integrity": "sha512-avDYr0SB3DwO9zsMov0gKCESFYqCnE4hq/4z3TdUlukEy5t9C0YRq7HLrsN52NAcqXKaepeCD0n+B0arnVG3Hg==", - "requires": { - "is-accessor-descriptor": "^0.1.6", - "is-data-descriptor": "^0.1.4", - "kind-of": "^5.0.0" - }, - "dependencies": { - "kind-of": { - "version": "5.1.0", - "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-5.1.0.tgz", - "integrity": "sha512-NGEErnH6F2vUuXDh+OlbcKW7/wOcfdRHaZ7VWtqCztfHri/++YKmP51OdWeGPuqCOba6kk2OTe5d02VmTB80Pw==" - } - } - }, - "is-directory": { - "version": "0.3.1", - "resolved": "https://registry.npmjs.org/is-directory/-/is-directory-0.3.1.tgz", - "integrity": "sha1-YTObbyR1/Hcv2cnYP1yFddwVSuE=" - }, - "is-extendable": { - "version": "0.1.1", - "resolved": "https://registry.npmjs.org/is-extendable/-/is-extendable-0.1.1.tgz", - "integrity": "sha1-YrEQ4omkcUGOPsNqYX1HLjAd/Ik=" - }, - "is-extglob": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", - "integrity": "sha1-qIwCU1eR8C7TfHahueqXc8gz+MI=" - }, - "is-fullwidth-code-point": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-1.0.0.tgz", - "integrity": "sha1-754xOG8DGn8NZDr4L95QxFfvAMs=", - "requires": { - "number-is-nan": "^1.0.0" - } - }, - "is-glob": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.1.tgz", - "integrity": "sha512-5G0tKtBTFImOqDnLB2hG6Bp2qcKEFduo4tZu9MT/H6NQv/ghhy30o55ufafxJ/LdH79LLs2Kfrn85TLKyA7BUg==", - "requires": { - "is-extglob": "^2.1.1" - } - }, - "is-number": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/is-number/-/is-number-3.0.0.tgz", - "integrity": "sha1-JP1iAaR4LPUFYcgQJ2r8fRLXEZU=", - "requires": { - "kind-of": "^3.0.2" - }, - "dependencies": { - "kind-of": { - "version": "3.2.2", - "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-3.2.2.tgz", - "integrity": "sha1-MeohpzS6ubuw8yRm2JOupR5KPGQ=", - "requires": { - "is-buffer": "^1.1.5" - } - } - } - }, - "is-obj": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/is-obj/-/is-obj-1.0.1.tgz", - "integrity": "sha1-PkcprB9f3gJc19g6iW2rn09n2w8=" - }, - "is-observable": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/is-observable/-/is-observable-1.1.0.tgz", - "integrity": "sha512-NqCa4Sa2d+u7BWc6CukaObG3Fh+CU9bvixbpcXYhy2VvYS7vVGIdAgnIS5Ks3A/cqk4rebLJ9s8zBstT2aKnIA==", - "requires": { - "symbol-observable": "^1.1.0" - } - }, - "is-plain-object": { - "version": "2.0.4", - "resolved": "https://registry.npmjs.org/is-plain-object/-/is-plain-object-2.0.4.tgz", - "integrity": "sha512-h5PpgXkWitc38BBMYawTYMWJHFZJVnBquFE57xFpjB8pJFiF6gZ+bU+WyI/yqXiFR5mdLsgYNaPe8uao6Uv9Og==", - "requires": { - "isobject": "^3.0.1" - } - }, - "is-promise": { - "version": "2.2.2", - "resolved": "https://registry.npmjs.org/is-promise/-/is-promise-2.2.2.tgz", - "integrity": "sha512-+lP4/6lKUBfQjZ2pdxThZvLUAafmZb8OAxFb8XXtiQmS35INgr85hdOGoEs124ez1FCnZJt6jau/T+alh58QFQ==" - }, - "is-regexp": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/is-regexp/-/is-regexp-1.0.0.tgz", - "integrity": "sha1-/S2INUXEa6xaYz57mgnof6LLUGk=" - }, - "is-stream": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-1.1.0.tgz", - "integrity": "sha1-EtSj3U5o4Lec6428hBc66A2RykQ=" - }, - "is-windows": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/is-windows/-/is-windows-1.0.2.tgz", - "integrity": "sha512-eXK1UInq2bPmjyX6e3VHIzMLobc4J94i4AWn+Hpq3OU5KkrRC96OAcR3PRJ/pGu6m8TRnBHP9dkXQVsT/COVIA==" - }, - "isarray": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz", - "integrity": "sha1-u5NdSFgsuhaMBoNJV6VKPgcSTxE=" - }, - "isexe": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", - "integrity": "sha1-6PvzdNxVb/iUehDcsFctYz8s+hA=" - }, - "isobject": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/isobject/-/isobject-3.0.1.tgz", - "integrity": "sha1-TkMekrEalzFjaqH5yNHMvP2reN8=" - }, - "jest-get-type": { - "version": "22.4.3", - "resolved": "https://registry.npmjs.org/jest-get-type/-/jest-get-type-22.4.3.tgz", - "integrity": "sha512-/jsz0Y+V29w1chdXVygEKSz2nBoHoYqNShPe+QgxSNjAuP1i8+k4LbQNrfoliKej0P45sivkSCh7yiD6ubHS3w==" - }, - "jest-validate": { - "version": "23.6.0", - "resolved": "https://registry.npmjs.org/jest-validate/-/jest-validate-23.6.0.tgz", - "integrity": "sha512-OFKapYxe72yz7agrDAWi8v2WL8GIfVqcbKRCLbRG9PAxtzF9b1SEDdTpytNDN12z2fJynoBwpMpvj2R39plI2A==", - "requires": { - "chalk": "^2.0.1", - "jest-get-type": "^22.1.0", - "leven": "^2.1.0", - "pretty-format": "^23.6.0" - } - }, - "js-yaml": { - "version": "3.14.0", - "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.14.0.tgz", - "integrity": "sha512-/4IbIeHcD9VMHFqDR/gQ7EdZdLimOvW2DdcxFjdyyZ9NsbS+ccrXqVWDtab/lRl5AlUqmpBx8EhPaWR+OtY17A==", - "requires": { - "argparse": "^1.0.7", - "esprima": "^4.0.0" - } - }, - "json-parse-better-errors": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/json-parse-better-errors/-/json-parse-better-errors-1.0.2.tgz", - "integrity": "sha512-mrqyZKfX5EhL7hvqcV6WG1yYjnjeuYDzDhhcAAUrq8Po85NBQBJP+ZDUT75qZQ98IkUoBqdkExkukOU7Ts2wrw==" - }, - "kind-of": { - "version": "6.0.3", - "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-6.0.3.tgz", - "integrity": "sha512-dcS1ul+9tmeD95T+x28/ehLgd9mENa3LsvDTtzm3vyBEO7RPptvAD+t44WVXaUjTBRcrpFeFlC8WCruUR456hw==" - }, - "leven": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/leven/-/leven-2.1.0.tgz", - "integrity": "sha1-wuep93IJTe6dNCAq6KzORoeHVYA=" - }, - "lint-staged": { - "version": "7.3.0", - "resolved": "https://registry.npmjs.org/lint-staged/-/lint-staged-7.3.0.tgz", - "integrity": "sha512-AXk40M9DAiPi7f4tdJggwuKIViUplYtVj1os1MVEteW7qOkU50EOehayCfO9TsoGK24o/EsWb41yrEgfJDDjCw==", - "requires": { - "chalk": "^2.3.1", - "commander": "^2.14.1", - "cosmiconfig": "^5.0.2", - "debug": "^3.1.0", - "dedent": "^0.7.0", - "execa": "^0.9.0", - "find-parent-dir": "^0.3.0", - "is-glob": "^4.0.0", - "is-windows": "^1.0.2", - "jest-validate": "^23.5.0", - "listr": "^0.14.1", - "lodash": "^4.17.5", - "log-symbols": "^2.2.0", - "micromatch": "^3.1.8", - "npm-which": "^3.0.1", - "p-map": "^1.1.1", - "path-is-inside": "^1.0.2", - "pify": "^3.0.0", - "please-upgrade-node": "^3.0.2", - "staged-git-files": "1.1.1", - "string-argv": "^0.0.2", - "stringify-object": "^3.2.2" - }, - "dependencies": { - "cross-spawn": { - "version": "5.1.0", - "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-5.1.0.tgz", - "integrity": "sha1-6L0O/uWPz/b4+UUQoKVUu/ojVEk=", - "requires": { - "lru-cache": "^4.0.1", - "shebang-command": "^1.2.0", - "which": "^1.2.9" - } - }, - "execa": { - "version": "0.9.0", - "resolved": "https://registry.npmjs.org/execa/-/execa-0.9.0.tgz", - "integrity": "sha512-BbUMBiX4hqiHZUA5+JujIjNb6TyAlp2D5KLheMjMluwOuzcnylDL4AxZYLLn1n2AGB49eSWwyKvvEQoRpnAtmA==", - "requires": { - "cross-spawn": "^5.0.1", - "get-stream": "^3.0.0", - "is-stream": "^1.1.0", - "npm-run-path": "^2.0.0", - "p-finally": "^1.0.0", - "signal-exit": "^3.0.0", - "strip-eof": "^1.0.0" - } - }, - "get-stream": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-3.0.0.tgz", - "integrity": "sha1-jpQ9E1jcN1VQVOy+LtsFqhdO3hQ=" - } - } - }, - "listr": { - "version": "0.14.3", - "resolved": "https://registry.npmjs.org/listr/-/listr-0.14.3.tgz", - "integrity": "sha512-RmAl7su35BFd/xoMamRjpIE4j3v+L28o8CT5YhAXQJm1fD+1l9ngXY8JAQRJ+tFK2i5njvi0iRUKV09vPwA0iA==", - "requires": { - "@samverschueren/stream-to-observable": "^0.3.0", - "is-observable": "^1.1.0", - "is-promise": "^2.1.0", - "is-stream": "^1.1.0", - "listr-silent-renderer": "^1.1.1", - "listr-update-renderer": "^0.5.0", - "listr-verbose-renderer": "^0.5.0", - "p-map": "^2.0.0", - "rxjs": "^6.3.3" - }, - "dependencies": { - "p-map": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/p-map/-/p-map-2.1.0.tgz", - "integrity": "sha512-y3b8Kpd8OAN444hxfBbFfj1FY/RjtTd8tzYwhUqNYXx0fXx2iX4maP4Qr6qhIKbQXI02wTLAda4fYUbDagTUFw==" - } - } - }, - "listr-silent-renderer": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/listr-silent-renderer/-/listr-silent-renderer-1.1.1.tgz", - "integrity": "sha1-kktaN1cVN3C/Go4/v3S4u/P5JC4=" - }, - "listr-update-renderer": { - "version": "0.5.0", - "resolved": "https://registry.npmjs.org/listr-update-renderer/-/listr-update-renderer-0.5.0.tgz", - "integrity": "sha512-tKRsZpKz8GSGqoI/+caPmfrypiaq+OQCbd+CovEC24uk1h952lVj5sC7SqyFUm+OaJ5HN/a1YLt5cit2FMNsFA==", - "requires": { - "chalk": "^1.1.3", - "cli-truncate": "^0.2.1", - "elegant-spinner": "^1.0.1", - "figures": "^1.7.0", - "indent-string": "^3.0.0", - "log-symbols": "^1.0.2", - "log-update": "^2.3.0", - "strip-ansi": "^3.0.1" - }, - "dependencies": { - "ansi-styles": { - "version": "2.2.1", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-2.2.1.tgz", - "integrity": "sha1-tDLdM1i2NM914eRmQ2gkBTPB3b4=" - }, - "chalk": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-1.1.3.tgz", - "integrity": "sha1-qBFcVeSnAv5NFQq9OHKCKn4J/Jg=", - "requires": { - "ansi-styles": "^2.2.1", - "escape-string-regexp": "^1.0.2", - "has-ansi": "^2.0.0", - "strip-ansi": "^3.0.0", - "supports-color": "^2.0.0" - } - }, - "log-symbols": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/log-symbols/-/log-symbols-1.0.2.tgz", - "integrity": "sha1-N2/3tY6jCGoPCfrMdGF+ylAeGhg=", - "requires": { - "chalk": "^1.0.0" - } - }, - "supports-color": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-2.0.0.tgz", - "integrity": "sha1-U10EXOa2Nj+kARcIRimZXp3zJMc=" - } - } - }, - "listr-verbose-renderer": { - "version": "0.5.0", - "resolved": "https://registry.npmjs.org/listr-verbose-renderer/-/listr-verbose-renderer-0.5.0.tgz", - "integrity": "sha512-04PDPqSlsqIOaaaGZ+41vq5FejI9auqTInicFRndCBgE3bXG8D6W1I+mWhk+1nqbHmyhla/6BUrd5OSiHwKRXw==", - "requires": { - "chalk": "^2.4.1", - "cli-cursor": "^2.1.0", - "date-fns": "^1.27.2", - "figures": "^2.0.0" - }, - "dependencies": { - "figures": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/figures/-/figures-2.0.0.tgz", - "integrity": "sha1-OrGi0qYsi/tDGgyUy3l6L84nyWI=", - "requires": { - "escape-string-regexp": "^1.0.5" - } - } - } - }, - "locate-path": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-3.0.0.tgz", - "integrity": "sha512-7AO748wWnIhNqAuaty2ZWHkQHRSNfPVIsPIfwEOWO22AmaoVrWavlOcMR5nzTLNYvp36X220/maaRsrec1G65A==", - "requires": { - "p-locate": "^3.0.0", - "path-exists": "^3.0.0" - } - }, - "lodash": { - "version": "4.17.20", - "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.20.tgz", - "integrity": "sha512-PlhdFcillOINfeV7Ni6oF1TAEayyZBoZ8bcshTHqOYJYlrqzRK5hagpagky5o4HfCzzd1TRkXPMFq6cKk9rGmA==" - }, - "log-symbols": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/log-symbols/-/log-symbols-2.2.0.tgz", - "integrity": "sha512-VeIAFslyIerEJLXHziedo2basKbMKtTw3vfn5IzG0XTjhAVEJyNHnL2p7vc+wBDSdQuUpNw3M2u6xb9QsAY5Eg==", - "requires": { - "chalk": "^2.0.1" - } - }, - "log-update": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/log-update/-/log-update-2.3.0.tgz", - "integrity": "sha1-iDKP19HOeTiykoN0bwsbwSayRwg=", - "requires": { - "ansi-escapes": "^3.0.0", - "cli-cursor": "^2.0.0", - "wrap-ansi": "^3.0.1" - } - }, - "lru-cache": { - "version": "4.1.5", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-4.1.5.tgz", - "integrity": "sha512-sWZlbEP2OsHNkXrMl5GYk/jKk70MBng6UU4YI/qGDYbgf6YbP4EvmqISbXCoJiRKs+1bSpFHVgQxvJ17F2li5g==", - "requires": { - "pseudomap": "^1.0.2", - "yallist": "^2.1.2" - } - }, - "map-cache": { - "version": "0.2.2", - "resolved": "https://registry.npmjs.org/map-cache/-/map-cache-0.2.2.tgz", - "integrity": "sha1-wyq9C9ZSXZsFFkW7TyasXcmKDb8=" - }, - "map-visit": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/map-visit/-/map-visit-1.0.0.tgz", - "integrity": "sha1-7Nyo8TFE5mDxtb1B8S80edmN+48=", - "requires": { - "object-visit": "^1.0.0" - } - }, - "micromatch": { - "version": "3.1.10", - "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-3.1.10.tgz", - "integrity": "sha512-MWikgl9n9M3w+bpsY3He8L+w9eF9338xRl8IAO5viDizwSzziFEyUzo2xrrloB64ADbTf8uA8vRqqttDTOmccg==", - "requires": { - "arr-diff": "^4.0.0", - "array-unique": "^0.3.2", - "braces": "^2.3.1", - "define-property": "^2.0.2", - "extend-shallow": "^3.0.2", - "extglob": "^2.0.4", - "fragment-cache": "^0.2.1", - "kind-of": "^6.0.2", - "nanomatch": "^1.2.9", - "object.pick": "^1.3.0", - "regex-not": "^1.0.0", - "snapdragon": "^0.8.1", - "to-regex": "^3.0.2" - } - }, - "mimic-fn": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-1.2.0.tgz", - "integrity": "sha512-jf84uxzwiuiIVKiOLpfYk7N46TSy8ubTonmneY9vrpHNAnp0QBt2BxWV9dO3/j+BoVAb+a5G6YDPW3M5HOdMWQ==" - }, - "mixin-deep": { - "version": "1.3.2", - "resolved": "https://registry.npmjs.org/mixin-deep/-/mixin-deep-1.3.2.tgz", - "integrity": "sha512-WRoDn//mXBiJ1H40rqa3vH0toePwSsGb45iInWlTySa+Uu4k3tYUSxa2v1KqAiLtvlrSzaExqS1gtk96A9zvEA==", - "requires": { - "for-in": "^1.0.2", - "is-extendable": "^1.0.1" - }, - "dependencies": { - "is-extendable": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/is-extendable/-/is-extendable-1.0.1.tgz", - "integrity": "sha512-arnXMxT1hhoKo9k1LZdmlNyJdDDfy2v0fXjFlmok4+i8ul/6WlbVge9bhM74OpNPQPMGUToDtz+KXa1PneJxOA==", - "requires": { - "is-plain-object": "^2.0.4" - } - } - } - }, - "ms": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", - "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" - }, - "nanomatch": { - "version": "1.2.13", - "resolved": "https://registry.npmjs.org/nanomatch/-/nanomatch-1.2.13.tgz", - "integrity": "sha512-fpoe2T0RbHwBTBUOftAfBPaDEi06ufaUai0mE6Yn1kacc3SnTErfb/h+X94VXzI64rKFHYImXSvdwGGCmwOqCA==", - "requires": { - "arr-diff": "^4.0.0", - "array-unique": "^0.3.2", - "define-property": "^2.0.2", - "extend-shallow": "^3.0.2", - "fragment-cache": "^0.2.1", - "is-windows": "^1.0.2", - "kind-of": "^6.0.2", - "object.pick": "^1.3.0", - "regex-not": "^1.0.0", - "snapdragon": "^0.8.1", - "to-regex": "^3.0.1" - } - }, - "nice-try": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/nice-try/-/nice-try-1.0.5.tgz", - "integrity": "sha512-1nh45deeb5olNY7eX82BkPO7SSxR5SSYJiPTrTdFUVYwAl8CKMA5N9PjTYkHiRjisVcxcQ1HXdLhx2qxxJzLNQ==" - }, - "normalize-package-data": { - "version": "2.5.0", - "resolved": "https://registry.npmjs.org/normalize-package-data/-/normalize-package-data-2.5.0.tgz", - "integrity": "sha512-/5CMN3T0R4XTj4DcGaexo+roZSdSFW/0AOOTROrjxzCG1wrWXEsGbRKevjlIL+ZDE4sZlJr5ED4YW0yqmkK+eA==", - "requires": { - "hosted-git-info": "^2.1.4", - "resolve": "^1.10.0", - "semver": "2 || 3 || 4 || 5", - "validate-npm-package-license": "^3.0.1" - } - }, - "npm-path": { - "version": "2.0.4", - "resolved": "https://registry.npmjs.org/npm-path/-/npm-path-2.0.4.tgz", - "integrity": "sha512-IFsj0R9C7ZdR5cP+ET342q77uSRdtWOlWpih5eC+lu29tIDbNEgDbzgVJ5UFvYHWhxDZ5TFkJafFioO0pPQjCw==", - "requires": { - "which": "^1.2.10" - } - }, - "npm-run-path": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/npm-run-path/-/npm-run-path-2.0.2.tgz", - "integrity": "sha1-NakjLfo11wZ7TLLd8jV7GHFTbF8=", - "requires": { - "path-key": "^2.0.0" - } - }, - "npm-which": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/npm-which/-/npm-which-3.0.1.tgz", - "integrity": "sha1-kiXybsOihcIJyuZ8OxGmtKtxQKo=", + "aggregate-error": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/aggregate-error/-/aggregate-error-3.1.0.tgz", + "integrity": "sha512-4I7Td01quW/RpocfNayFdFVk1qSuoh0E7JrbRJ16nH01HhKFQ88INq9Sd+nd72zqRySlr9BmDA8xlEJ6vJMrYA==", + "dev": true, "requires": { - "commander": "^2.9.0", - "npm-path": "^2.0.2", - "which": "^1.2.10" + "clean-stack": "^2.0.0", + "indent-string": "^4.0.0" } }, - "number-is-nan": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/number-is-nan/-/number-is-nan-1.0.1.tgz", - "integrity": "sha1-CXtgK1NCKlIsGvuHkDGDNpQaAR0=" - }, - "object-assign": { + "ansi-colors": { "version": "4.1.1", - "resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz", - "integrity": "sha1-IQmtx5ZYh8/AXLvUQsrIv7s2CGM=" - }, - "object-copy": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/object-copy/-/object-copy-0.1.0.tgz", - "integrity": "sha1-fn2Fi3gb18mRpBupde04EnVOmYw=", - "requires": { - "copy-descriptor": "^0.1.0", - "define-property": "^0.2.5", - "kind-of": "^3.0.3" - }, - "dependencies": { - "define-property": { - "version": "0.2.5", - "resolved": "https://registry.npmjs.org/define-property/-/define-property-0.2.5.tgz", - "integrity": "sha1-w1se+RjsPJkPmlvFe+BKrOxcgRY=", - "requires": { - "is-descriptor": "^0.1.0" - } - }, - "kind-of": { - "version": "3.2.2", - "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-3.2.2.tgz", - "integrity": "sha1-MeohpzS6ubuw8yRm2JOupR5KPGQ=", - "requires": { - "is-buffer": "^1.1.5" - } - } - } - }, - "object-visit": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/object-visit/-/object-visit-1.0.1.tgz", - "integrity": "sha1-95xEk68MU3e1n+OdOV5BBC3QRbs=", - "requires": { - "isobject": "^3.0.0" - } - }, - "object.pick": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/object.pick/-/object.pick-1.3.0.tgz", - "integrity": "sha1-h6EKxMFpS9Lhy/U1kaZhQftd10c=", - "requires": { - "isobject": "^3.0.1" - } + "resolved": "https://registry.npmjs.org/ansi-colors/-/ansi-colors-4.1.1.tgz", + "integrity": "sha512-JoX0apGbHaUJBNl6yF+p6JAFYZ666/hhCGKN5t9QFjbJQKUU/g8MNbFDbvfrgKXvI1QpZplPOnwIo99lX/AAmA==", + "dev": true }, - "once": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", - "integrity": "sha1-WDsap3WWHUsROsF9nFC6753Xa9E=", + "ansi-escapes": { + "version": "4.3.2", + "resolved": "https://registry.npmjs.org/ansi-escapes/-/ansi-escapes-4.3.2.tgz", + "integrity": "sha512-gKXj5ALrKWQLsYG9jlTRmR/xKluxHV+Z9QEwNIgCfM1/uwPMCuzVVnh5mwTd+OuBZcwSIMbqssNWRm1lE51QaQ==", + "dev": true, "requires": { - "wrappy": "1" + "type-fest": "^0.21.3" } }, - "onetime": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/onetime/-/onetime-2.0.1.tgz", - "integrity": "sha1-BnQoIw/WdEOyeUsiu6UotoZ5YtQ=", - "requires": { - "mimic-fn": "^1.0.0" - } + "ansi-regex": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.0.1.tgz", + "integrity": "sha512-n5M855fKb2SsfMIiFFoVrABHJC8QtHwVx+mHWP3QcEqBHYienj5dHSgjbxtC0WEZXYt4wcD6zrQElDPhFuZgfA==", + "dev": true }, - "p-finally": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/p-finally/-/p-finally-1.0.0.tgz", - "integrity": "sha1-P7z7FbiZpEEjs0ttzBi3JDNqLK4=" + "ansi-styles": { + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-6.1.0.tgz", + "integrity": "sha512-VbqNsoz55SYGczauuup0MFUyXNQviSpFTj1RQtFzmQLk18qbVSpTFFGMT293rmDaQuKCT6InmbuEyUne4mTuxQ==", + "dev": true }, - "p-limit": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz", - "integrity": "sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==", - "requires": { - "p-try": "^2.0.0" - } + "astral-regex": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/astral-regex/-/astral-regex-2.0.0.tgz", + "integrity": "sha512-Z7tMw1ytTXt5jqMcOP+OQteU1VuNK9Y02uuJtKQ1Sv69jXQKKg5cibLwGJow8yzZP+eAc18EmLGPal0bp36rvQ==", + "dev": true }, - "p-locate": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-3.0.0.tgz", - "integrity": "sha512-x+12w/To+4GFfgJhBEpiDcLozRJGegY+Ei7/z0tSLkMmxGZNybVMSfWj9aJn8Z5Fc7dBUNJOOVgPv2H7IwulSQ==", + "braces": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz", + "integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==", + "dev": true, "requires": { - "p-limit": "^2.0.0" + "fill-range": "^7.0.1" } }, - "p-map": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/p-map/-/p-map-1.2.0.tgz", - "integrity": "sha512-r6zKACMNhjPJMTl8KcFH4li//gkrXWfbD6feV8l6doRHlzljFWGJ2AP6iKaCJXyZmAUMOPtvbW7EXkbWO/pLEA==" - }, - "p-try": { + "clean-stack": { "version": "2.2.0", - "resolved": "https://registry.npmjs.org/p-try/-/p-try-2.2.0.tgz", - "integrity": "sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==" + "resolved": "https://registry.npmjs.org/clean-stack/-/clean-stack-2.2.0.tgz", + "integrity": "sha512-4diC9HaTE+KRAMWhDhrGOECgWZxoevMc5TlkObMqNSsVU62PYzXZ/SMTjzyGAFF1YusgxGcSWTEXBhp0CPwQ1A==", + "dev": true }, - "parse-json": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/parse-json/-/parse-json-4.0.0.tgz", - "integrity": "sha1-vjX1Qlvh9/bHRxhPmKeIy5lHfuA=", + "cli-cursor": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/cli-cursor/-/cli-cursor-3.1.0.tgz", + "integrity": "sha512-I/zHAwsKf9FqGoXM4WWRACob9+SNukZTd94DWF57E4toouRulbCxcUh6RKUEOQlYTHJnzkPMySvPNaaSLNfLZw==", + "dev": true, "requires": { - "error-ex": "^1.3.1", - "json-parse-better-errors": "^1.0.1" + "restore-cursor": "^3.1.0" } }, - "pascalcase": { - "version": "0.1.1", - "resolved": "https://registry.npmjs.org/pascalcase/-/pascalcase-0.1.1.tgz", - "integrity": "sha1-s2PlXoAGym/iF4TS2yK9FdeRfxQ=" - }, - "path-exists": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-3.0.0.tgz", - "integrity": "sha1-zg6+ql94yxiSXqfYENe1mwEP1RU=" - }, - "path-is-inside": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/path-is-inside/-/path-is-inside-1.0.2.tgz", - "integrity": "sha1-NlQX3t5EQw0cEa9hAn+s8HS9/FM=" - }, - "path-key": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/path-key/-/path-key-2.0.1.tgz", - "integrity": "sha1-QRyttXTFoUDTpLGRDUDYDMn0C0A=" - }, - "path-parse": { - "version": "1.0.6", - "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.6.tgz", - "integrity": "sha512-GSmOT2EbHrINBf9SR7CDELwlJ8AENk3Qn7OikK4nFYAu3Ote2+JYNVvkpAEQm3/TLNEJFD/xZJjzyxg3KBWOzw==" - }, - "pify": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/pify/-/pify-3.0.0.tgz", - "integrity": "sha1-5aSs0sEB/fPZpNB/DbxNtJ3SgXY=" - }, - "pkg-dir": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/pkg-dir/-/pkg-dir-3.0.0.tgz", - "integrity": "sha512-/E57AYkoeQ25qkxMj5PBOVgF8Kiu/h7cYS30Z5+R7WaiCCBfLq58ZI/dSeaEKb9WVJV5n/03QwrN3IeWIFllvw==", + "cli-truncate": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/cli-truncate/-/cli-truncate-3.1.0.tgz", + "integrity": "sha512-wfOBkjXteqSnI59oPcJkcPl/ZmwvMMOj340qUIY1SKZCv0B9Cf4D4fAucRkIKQmsIuYK3x1rrgU7MeGRruiuiA==", + "dev": true, "requires": { - "find-up": "^3.0.0" + "slice-ansi": "^5.0.0", + "string-width": "^5.0.0" } }, - "please-upgrade-node": { - "version": "3.2.0", - "resolved": "https://registry.npmjs.org/please-upgrade-node/-/please-upgrade-node-3.2.0.tgz", - "integrity": "sha512-gQR3WpIgNIKwBMVLkpMUeR3e1/E1y42bqDQZfql+kDeXd8COYfM8PQA4X6y7a8u9Ua9FHmsrrmirW2vHs45hWg==", + "color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, "requires": { - "semver-compare": "^1.0.0" + "color-name": "~1.1.4" } }, - "posix-character-classes": { - "version": "0.1.1", - "resolved": "https://registry.npmjs.org/posix-character-classes/-/posix-character-classes-0.1.1.tgz", - "integrity": "sha1-AerA/jta9xoqbAL+q7jB/vfgDqs=" - }, - "pretty-format": { - "version": "23.6.0", - "resolved": "https://registry.npmjs.org/pretty-format/-/pretty-format-23.6.0.tgz", - "integrity": "sha512-zf9NV1NSlDLDjycnwm6hpFATCGl/K1lt0R/GdkAK2O5LN/rwJoB+Mh93gGJjut4YbmecbfgLWVGSTCr0Ewvvbw==", - "requires": { - "ansi-regex": "^3.0.0", - "ansi-styles": "^3.2.0" - } + "color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true }, - "pseudomap": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/pseudomap/-/pseudomap-1.0.2.tgz", - "integrity": "sha1-8FKijacOYYkX7wqKw0wa5aaChrM=" + "colorette": { + "version": "2.0.16", + "resolved": "https://registry.npmjs.org/colorette/-/colorette-2.0.16.tgz", + "integrity": "sha512-hUewv7oMjCp+wkBv5Rm0v87eJhq4woh5rSR+42YSQJKecCqgIqNkZ6lAlQms/BwHPJA5NKMRlpxPRv0n8HQW6g==", + "dev": true }, - "pump": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/pump/-/pump-3.0.0.tgz", - "integrity": "sha512-LwZy+p3SFs1Pytd/jYct4wpv49HiYCqd9Rlc5ZVdk0V+8Yzv6jR5Blk3TRmPL1ft69TxP0IMZGJ+WPFU2BFhww==", - "requires": { - "end-of-stream": "^1.1.0", - "once": "^1.3.1" - } + "commander": { + "version": "8.3.0", + "resolved": "https://registry.npmjs.org/commander/-/commander-8.3.0.tgz", + "integrity": "sha512-OkTL9umf+He2DZkUq8f8J9of7yL6RJKI24dVITBmNfZBmri9zYZQrKkuXiKhyfPSu8tUhnVBB1iKXevvnlR4Ww==", + "dev": true }, - "read-pkg": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/read-pkg/-/read-pkg-4.0.1.tgz", - "integrity": "sha1-ljYlN48+HE1IyFhytabsfV0JMjc=", + "cross-spawn": { + "version": "7.0.3", + "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz", + "integrity": "sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==", + "dev": true, "requires": { - "normalize-package-data": "^2.3.2", - "parse-json": "^4.0.0", - "pify": "^3.0.0" + "path-key": "^3.1.0", + "shebang-command": "^2.0.0", + "which": "^2.0.1" } }, - "regex-not": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/regex-not/-/regex-not-1.0.2.tgz", - "integrity": "sha512-J6SDjUgDxQj5NusnOtdFxDwN/+HWykR8GELwctJ7mdqhcyy1xEc4SRFHUXvxTp661YaVKAjfRLZ9cCqS6tn32A==", + "debug": { + "version": "4.3.3", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.3.tgz", + "integrity": "sha512-/zxw5+vh1Tfv+4Qn7a5nsbcJKPaSvCDhojn6FEl9vupwK2VCSDtEiEtqr8DFtzYFOdz63LBkxec7DYuc2jon6Q==", + "dev": true, "requires": { - "extend-shallow": "^3.0.2", - "safe-regex": "^1.1.0" + "ms": "2.1.2" } }, - "repeat-element": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/repeat-element/-/repeat-element-1.1.3.tgz", - "integrity": "sha512-ahGq0ZnV5m5XtZLMb+vP76kcAM5nkLqk0lpqAuojSKGgQtn4eRi4ZZGm2olo2zKFH+sMsWaqOCW1dqAnOru72g==" + "emoji-regex": { + "version": "9.2.2", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-9.2.2.tgz", + "integrity": "sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg==", + "dev": true }, - "repeat-string": { - "version": "1.6.1", - "resolved": "https://registry.npmjs.org/repeat-string/-/repeat-string-1.6.1.tgz", - "integrity": "sha1-jcrkcOHIirwtYA//Sndihtp15jc=" - }, - "resolve": { - "version": "1.17.0", - "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.17.0.tgz", - "integrity": "sha512-ic+7JYiV8Vi2yzQGFWOkiZD5Z9z7O2Zhm9XMaTxdJExKasieFCr+yXZ/WmXsckHiKl12ar0y6XiXDx3m4RHn1w==", + "enquirer": { + "version": "2.3.6", + "resolved": "https://registry.npmjs.org/enquirer/-/enquirer-2.3.6.tgz", + "integrity": "sha512-yjNnPr315/FjS4zIsUxYguYUPP2e1NK4d7E7ZOLiyYCcbFBiTMyID+2wvm2w6+pZ/odMA7cRkjhsPbltwBOrLg==", + "dev": true, "requires": { - "path-parse": "^1.0.6" + "ansi-colors": "^4.1.1" } }, - "resolve-from": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-3.0.0.tgz", - "integrity": "sha1-six699nWiBvItuZTM17rywoYh0g=" - }, - "resolve-url": { - "version": "0.2.1", - "resolved": "https://registry.npmjs.org/resolve-url/-/resolve-url-0.2.1.tgz", - "integrity": "sha1-LGN/53yJOv0qZj/iGqkIAGjiBSo=" - }, - "restore-cursor": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/restore-cursor/-/restore-cursor-2.0.0.tgz", - "integrity": "sha1-n37ih/gv0ybU/RYpI9YhKe7g368=", - "requires": { - "onetime": "^2.0.0", - "signal-exit": "^3.0.2" + "execa": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/execa/-/execa-5.1.1.tgz", + "integrity": "sha512-8uSpZZocAZRBAPIEINJj3Lo9HyGitllczc27Eh5YYojjMFMn8yHMDMaUHE2Jqfq05D/wucwI4JGURyXt1vchyg==", + "dev": true, + "requires": { + "cross-spawn": "^7.0.3", + "get-stream": "^6.0.0", + "human-signals": "^2.1.0", + "is-stream": "^2.0.0", + "merge-stream": "^2.0.0", + "npm-run-path": "^4.0.1", + "onetime": "^5.1.2", + "signal-exit": "^3.0.3", + "strip-final-newline": "^2.0.0" } }, - "ret": { - "version": "0.1.15", - "resolved": "https://registry.npmjs.org/ret/-/ret-0.1.15.tgz", - "integrity": "sha512-TTlYpa+OL+vMMNG24xSlQGEJ3B/RzEfUlLct7b5G/ytav+wPrplCpVMFuwzXbkecJrb6IYo1iFb0S9v37754mg==" - }, - "run-node": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/run-node/-/run-node-1.0.0.tgz", - "integrity": "sha512-kc120TBlQ3mih1LSzdAJXo4xn/GWS2ec0l3S+syHDXP9uRr0JAT8Qd3mdMuyjqCzeZktgP3try92cEgf9Nks8A==" - }, - "rxjs": { - "version": "6.6.3", - "resolved": "https://registry.npmjs.org/rxjs/-/rxjs-6.6.3.tgz", - "integrity": "sha512-trsQc+xYYXZ3urjOiJOuCOa5N3jAZ3eiSpQB5hIT8zGlL2QfnHLJ2r7GMkBGuIausdJN1OneaI6gQlsqNHHmZQ==", + "fill-range": { + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz", + "integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==", + "dev": true, "requires": { - "tslib": "^1.9.0" + "to-regex-range": "^5.0.1" } }, - "safe-regex": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/safe-regex/-/safe-regex-1.1.0.tgz", - "integrity": "sha1-QKNmnzsHfR6UPURinhV91IAjvy4=", - "requires": { - "ret": "~0.1.10" - } + "get-stream": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-6.0.1.tgz", + "integrity": "sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg==", + "dev": true }, - "semver": { - "version": "5.7.1", - "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", - "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==" + "human-signals": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/human-signals/-/human-signals-2.1.0.tgz", + "integrity": "sha512-B4FFZ6q/T2jhhksgkbEW3HBvWIfDW85snkQgawt07S7J5QXTk6BkNV+0yAeZrM5QpMAdYlocGoljn0sJ/WQkFw==", + "dev": true }, - "semver-compare": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/semver-compare/-/semver-compare-1.0.0.tgz", - "integrity": "sha1-De4hahyUGrN+nvsXiPavxf9VN/w=" + "husky": { + "version": "7.0.4", + "resolved": "https://registry.npmjs.org/husky/-/husky-7.0.4.tgz", + "integrity": "sha512-vbaCKN2QLtP/vD4yvs6iz6hBEo6wkSzs8HpRah1Z6aGmF2KW5PdYuAd7uX5a+OyBZHBhd+TFLqgjUgytQr4RvQ==", + "dev": true }, - "set-value": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/set-value/-/set-value-2.0.1.tgz", - "integrity": "sha512-JxHc1weCN68wRY0fhCoXpyK55m/XPHafOmK4UWD7m2CI14GMcFypt4w/0+NV5f/ZMby2F6S2wwA7fgynh9gWSw==", - "requires": { - "extend-shallow": "^2.0.1", - "is-extendable": "^0.1.1", - "is-plain-object": "^2.0.3", - "split-string": "^3.0.1" - }, - "dependencies": { - "extend-shallow": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz", - "integrity": "sha1-Ua99YUrZqfYQ6huvu5idaxxWiQ8=", - "requires": { - "is-extendable": "^0.1.0" - } - } - } + "indent-string": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/indent-string/-/indent-string-4.0.0.tgz", + "integrity": "sha512-EdDDZu4A2OyIK7Lr/2zG+w5jmbuk1DVBnEwREQvBzspBJkCEbRa8GxU1lghYcaGJCnRWibjDXlq779X1/y5xwg==", + "dev": true }, - "shebang-command": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-1.2.0.tgz", - "integrity": "sha1-RKrGW2lbAzmJaMOfNj/uXer98eo=", - "requires": { - "shebang-regex": "^1.0.0" - } + "is-fullwidth-code-point": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-4.0.0.tgz", + "integrity": "sha512-O4L094N2/dZ7xqVdrXhh9r1KODPJpFms8B5sGdJLPy664AgvXsreZUyCQQNItZRDlYug4xStLjNp/sz3HvBowQ==", + "dev": true }, - "shebang-regex": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-1.0.0.tgz", - "integrity": "sha1-2kL0l0DAtC2yypcoVxyxkMmO/qM=" + "is-number": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", + "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", + "dev": true }, - "signal-exit": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.3.tgz", - "integrity": "sha512-VUJ49FC8U1OxwZLxIbTTrDvLnf/6TDgxZcK8wxR8zs13xpx7xbG60ndBlhNrFi2EMuFRoeDoJO7wthSLq42EjA==" + "is-stream": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-2.0.1.tgz", + "integrity": "sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==", + "dev": true }, - "slash": { + "isexe": { "version": "2.0.0", - "resolved": "https://registry.npmjs.org/slash/-/slash-2.0.0.tgz", - "integrity": "sha512-ZYKh3Wh2z1PpEXWr0MpSBZ0V6mZHAQfYevttO11c51CaWjGTaadiKZ+wVt1PbMlDV5qhMFslpZCemhwOK7C89A==" + "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", + "integrity": "sha1-6PvzdNxVb/iUehDcsFctYz8s+hA=", + "dev": true }, - "slice-ansi": { - "version": "0.0.4", - "resolved": "https://registry.npmjs.org/slice-ansi/-/slice-ansi-0.0.4.tgz", - "integrity": "sha1-7b+JA/ZvfOL46v1s7tZeJkyDGzU=" + "lilconfig": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/lilconfig/-/lilconfig-2.0.4.tgz", + "integrity": "sha512-bfTIN7lEsiooCocSISTWXkiWJkRqtL9wYtYy+8EK3Y41qh3mpwPU0ycTOgjdY9ErwXCc8QyrQp82bdL0Xkm9yA==", + "dev": true }, - "snapdragon": { - "version": "0.8.2", - "resolved": "https://registry.npmjs.org/snapdragon/-/snapdragon-0.8.2.tgz", - "integrity": "sha512-FtyOnWN/wCHTVXOMwvSv26d+ko5vWlIDD6zoUJ7LW8vh+ZBC8QdljveRP+crNrtBwioEUWy/4dMtbBjA4ioNlg==", - "requires": { - "base": "^0.11.1", - "debug": "^2.2.0", - "define-property": "^0.2.5", - "extend-shallow": "^2.0.1", - "map-cache": "^0.2.2", - "source-map": "^0.5.6", - "source-map-resolve": "^0.5.0", - "use": "^3.1.0" + "lint-staged": { + "version": "12.1.2", + "resolved": "https://registry.npmjs.org/lint-staged/-/lint-staged-12.1.2.tgz", + "integrity": "sha512-bSMcQVqMW98HLLLR2c2tZ+vnDCnx4fd+0QJBQgN/4XkdspGRPc8DGp7UuOEBe1ApCfJ+wXXumYnJmU+wDo7j9A==", + "dev": true, + "requires": { + "cli-truncate": "^3.1.0", + "colorette": "^2.0.16", + "commander": "^8.3.0", + "debug": "^4.3.2", + "enquirer": "^2.3.6", + "execa": "^5.1.1", + "lilconfig": "2.0.4", + "listr2": "^3.13.3", + "micromatch": "^4.0.4", + "normalize-path": "^3.0.0", + "object-inspect": "^1.11.0", + "string-argv": "^0.3.1", + "supports-color": "^9.0.2", + "yaml": "^1.10.2" + } + }, + "listr2": { + "version": "3.13.5", + "resolved": "https://registry.npmjs.org/listr2/-/listr2-3.13.5.tgz", + "integrity": "sha512-3n8heFQDSk+NcwBn3CgxEibZGaRzx+pC64n3YjpMD1qguV4nWus3Al+Oo3KooqFKTQEJ1v7MmnbnyyNspgx3NA==", + "dev": true, + "requires": { + "cli-truncate": "^2.1.0", + "colorette": "^2.0.16", + "log-update": "^4.0.0", + "p-map": "^4.0.0", + "rfdc": "^1.3.0", + "rxjs": "^7.4.0", + "through": "^2.3.8", + "wrap-ansi": "^7.0.0" }, "dependencies": { - "debug": { - "version": "2.6.9", - "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", - "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", + "ansi-regex": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", + "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", + "dev": true + }, + "ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "requires": { + "color-convert": "^2.0.1" + } + }, + "cli-truncate": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/cli-truncate/-/cli-truncate-2.1.0.tgz", + "integrity": "sha512-n8fOixwDD6b/ObinzTrp1ZKFzbgvKZvuz/TvejnLn1aQfC6r52XEx85FmuC+3HI+JM7coBRXUvNqEU2PHVrHpg==", + "dev": true, "requires": { - "ms": "2.0.0" + "slice-ansi": "^3.0.0", + "string-width": "^4.2.0" } }, - "define-property": { - "version": "0.2.5", - "resolved": "https://registry.npmjs.org/define-property/-/define-property-0.2.5.tgz", - "integrity": "sha1-w1se+RjsPJkPmlvFe+BKrOxcgRY=", + "emoji-regex": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", + "dev": true + }, + "is-fullwidth-code-point": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", + "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", + "dev": true + }, + "slice-ansi": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/slice-ansi/-/slice-ansi-3.0.0.tgz", + "integrity": "sha512-pSyv7bSTC7ig9Dcgbw9AuRNUb5k5V6oDudjZoMBSr13qpLBG7tB+zgCkARjq7xIUgdz5P1Qe8u+rSGdouOOIyQ==", + "dev": true, "requires": { - "is-descriptor": "^0.1.0" + "ansi-styles": "^4.0.0", + "astral-regex": "^2.0.0", + "is-fullwidth-code-point": "^3.0.0" } }, - "extend-shallow": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz", - "integrity": "sha1-Ua99YUrZqfYQ6huvu5idaxxWiQ8=", + "string-width": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "dev": true, "requires": { - "is-extendable": "^0.1.0" + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" } }, - "ms": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", - "integrity": "sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g=" + "strip-ansi": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "dev": true, + "requires": { + "ansi-regex": "^5.0.1" + } } } }, - "snapdragon-node": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/snapdragon-node/-/snapdragon-node-2.1.1.tgz", - "integrity": "sha512-O27l4xaMYt/RSQ5TR3vpWCAB5Kb/czIcqUFOM/C4fYcLnbZUc1PkjTAMjof2pBWaSTwOUd6qUHcFGVGj7aIwnw==", - "requires": { - "define-property": "^1.0.0", - "isobject": "^3.0.0", - "snapdragon-util": "^3.0.1" + "log-update": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/log-update/-/log-update-4.0.0.tgz", + "integrity": "sha512-9fkkDevMefjg0mmzWFBW8YkFP91OrizzkW3diF7CpG+S2EYdy4+TVfGwz1zeF8x7hCx1ovSPTOE9Ngib74qqUg==", + "dev": true, + "requires": { + "ansi-escapes": "^4.3.0", + "cli-cursor": "^3.1.0", + "slice-ansi": "^4.0.0", + "wrap-ansi": "^6.2.0" }, "dependencies": { - "define-property": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/define-property/-/define-property-1.0.0.tgz", - "integrity": "sha1-dp66rz9KY6rTr56NMEybvnm/sOY=", + "ansi-regex": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", + "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", + "dev": true + }, + "ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, "requires": { - "is-descriptor": "^1.0.0" + "color-convert": "^2.0.1" } }, - "is-accessor-descriptor": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/is-accessor-descriptor/-/is-accessor-descriptor-1.0.0.tgz", - "integrity": "sha512-m5hnHTkcVsPfqx3AKlyttIPb7J+XykHvJP2B9bZDjlhLIoEq4XoK64Vg7boZlVWYK6LUY94dYPEE7Lh0ZkZKcQ==", + "emoji-regex": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", + "dev": true + }, + "is-fullwidth-code-point": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", + "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", + "dev": true + }, + "slice-ansi": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/slice-ansi/-/slice-ansi-4.0.0.tgz", + "integrity": "sha512-qMCMfhY040cVHT43K9BFygqYbUPFZKHOg7K73mtTWJRb8pyP3fzf4Ixd5SzdEJQ6MRUg/WBnOLxghZtKKurENQ==", + "dev": true, "requires": { - "kind-of": "^6.0.0" + "ansi-styles": "^4.0.0", + "astral-regex": "^2.0.0", + "is-fullwidth-code-point": "^3.0.0" } }, - "is-data-descriptor": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/is-data-descriptor/-/is-data-descriptor-1.0.0.tgz", - "integrity": "sha512-jbRXy1FmtAoCjQkVmIVYwuuqDFUbaOeDjmed1tOGPrsMhtJA4rD9tkgA0F1qJ3gRFRXcHYVkdeaP50Q5rE/jLQ==", + "string-width": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "dev": true, "requires": { - "kind-of": "^6.0.0" + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" } }, - "is-descriptor": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/is-descriptor/-/is-descriptor-1.0.2.tgz", - "integrity": "sha512-2eis5WqQGV7peooDyLmNEPUrps9+SXX5c9pL3xEB+4e9HnGuDa7mB7kHxHw4CbqS9k1T2hOH3miL8n8WtiYVtg==", + "strip-ansi": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "dev": true, "requires": { - "is-accessor-descriptor": "^1.0.0", - "is-data-descriptor": "^1.0.0", - "kind-of": "^6.0.2" + "ansi-regex": "^5.0.1" } - } - } - }, - "snapdragon-util": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/snapdragon-util/-/snapdragon-util-3.0.1.tgz", - "integrity": "sha512-mbKkMdQKsjX4BAL4bRYTj21edOf8cN7XHdYUJEe+Zn99hVEYcMvKPct1IqNe7+AZPirn8BCDOQBHQZknqmKlZQ==", - "requires": { - "kind-of": "^3.2.0" - }, - "dependencies": { - "kind-of": { - "version": "3.2.2", - "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-3.2.2.tgz", - "integrity": "sha1-MeohpzS6ubuw8yRm2JOupR5KPGQ=", + }, + "wrap-ansi": { + "version": "6.2.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-6.2.0.tgz", + "integrity": "sha512-r6lPcBGxZXlIcymEu7InxDMhdW0KDxpLgoFLcguasxCaJ/SOIZwINatK9KY/tf+ZrlywOKU0UDj3ATXUBfxJXA==", + "dev": true, "requires": { - "is-buffer": "^1.1.5" + "ansi-styles": "^4.0.0", + "string-width": "^4.1.0", + "strip-ansi": "^6.0.0" } } } }, - "source-map": { - "version": "0.5.7", - "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.5.7.tgz", - "integrity": "sha1-igOdLRAh0i0eoUyA2OpGi6LvP8w=" + "merge-stream": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/merge-stream/-/merge-stream-2.0.0.tgz", + "integrity": "sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w==", + "dev": true }, - "source-map-resolve": { - "version": "0.5.3", - "resolved": "https://registry.npmjs.org/source-map-resolve/-/source-map-resolve-0.5.3.tgz", - "integrity": "sha512-Htz+RnsXWk5+P2slx5Jh3Q66vhQj1Cllm0zvnaY98+NFx+Dv2CF/f5O/t8x+KaNdrdIAsruNzoh/KpialbqAnw==", + "micromatch": { + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.4.tgz", + "integrity": "sha512-pRmzw/XUcwXGpD9aI9q/0XOwLNygjETJ8y0ao0wdqprrzDa4YnxLcz7fQRZr8voh8V10kGhABbNcHVk5wHgWwg==", + "dev": true, "requires": { - "atob": "^2.1.2", - "decode-uri-component": "^0.2.0", - "resolve-url": "^0.2.1", - "source-map-url": "^0.4.0", - "urix": "^0.1.0" + "braces": "^3.0.1", + "picomatch": "^2.2.3" } }, - "source-map-url": { - "version": "0.4.0", - "resolved": "https://registry.npmjs.org/source-map-url/-/source-map-url-0.4.0.tgz", - "integrity": "sha1-PpNdfd1zYxuXZZlW1VEo6HtQhKM=" + "mimic-fn": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-2.1.0.tgz", + "integrity": "sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==", + "dev": true }, - "spdx-correct": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/spdx-correct/-/spdx-correct-3.1.1.tgz", - "integrity": "sha512-cOYcUWwhCuHCXi49RhFRCyJEK3iPj1Ziz9DpViV3tbZOwXD49QzIN3MpOLJNxh2qwq2lJJZaKMVw9qNi4jTC0w==", + "ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==", + "dev": true + }, + "normalize-path": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz", + "integrity": "sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==", + "dev": true + }, + "npm-run-path": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/npm-run-path/-/npm-run-path-4.0.1.tgz", + "integrity": "sha512-S48WzZW777zhNIrn7gxOlISNAqi9ZC/uQFnRdbeIHhZhCA6UqpkOT8T1G7BvfdgP4Er8gF4sUbaS0i7QvIfCWw==", + "dev": true, "requires": { - "spdx-expression-parse": "^3.0.0", - "spdx-license-ids": "^3.0.0" + "path-key": "^3.0.0" } }, - "spdx-exceptions": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/spdx-exceptions/-/spdx-exceptions-2.3.0.tgz", - "integrity": "sha512-/tTrYOC7PPI1nUAgx34hUpqXuyJG+DTHJTnIULG4rDygi4xu/tfgmq1e1cIRwRzwZgo4NLySi+ricLkZkw4i5A==" + "object-inspect": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.11.1.tgz", + "integrity": "sha512-If7BjFlpkzzBeV1cqgT3OSWT3azyoxDGajR+iGnFBfVV2EWyDyWaZZW2ERDjUaY2QM8i5jI3Sj7mhsM4DDAqWA==", + "dev": true }, - "spdx-expression-parse": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/spdx-expression-parse/-/spdx-expression-parse-3.0.1.tgz", - "integrity": "sha512-cbqHunsQWnJNE6KhVSMsMeH5H/L9EpymbzqTQ3uLwNCLZ1Q481oWaofqH7nO6V07xlXwY6PhQdQ2IedWx/ZK4Q==", + "onetime": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/onetime/-/onetime-5.1.2.tgz", + "integrity": "sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg==", + "dev": true, "requires": { - "spdx-exceptions": "^2.1.0", - "spdx-license-ids": "^3.0.0" + "mimic-fn": "^2.1.0" } }, - "spdx-license-ids": { - "version": "3.0.6", - "resolved": "https://registry.npmjs.org/spdx-license-ids/-/spdx-license-ids-3.0.6.tgz", - "integrity": "sha512-+orQK83kyMva3WyPf59k1+Y525csj5JejicWut55zeTWANuN17qSiSLUXWtzHeNWORSvT7GLDJ/E/XiIWoXBTw==" - }, - "split-string": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/split-string/-/split-string-3.1.0.tgz", - "integrity": "sha512-NzNVhJDYpwceVVii8/Hu6DKfD2G+NrQHlS/V/qgv763EYudVwEcMQNxd2lh+0VrUByXN/oJkl5grOhYWvQUYiw==", + "p-map": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/p-map/-/p-map-4.0.0.tgz", + "integrity": "sha512-/bjOqmgETBYB5BoEeGVea8dmvHb2m9GLy1E9W43yeyfP6QQCZGFNa+XRceJEuDB6zqr+gKpIAmlLebMpykw/MQ==", + "dev": true, "requires": { - "extend-shallow": "^3.0.0" + "aggregate-error": "^3.0.0" } }, - "sprintf-js": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.0.3.tgz", - "integrity": "sha1-BOaSb2YolTVPPdAVIDYzuFcpfiw=" + "path-key": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", + "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", + "dev": true }, - "staged-git-files": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/staged-git-files/-/staged-git-files-1.1.1.tgz", - "integrity": "sha512-H89UNKr1rQJvI1c/PIR3kiAMBV23yvR7LItZiV74HWZwzt7f3YHuujJ9nJZlt58WlFox7XQsOahexwk7nTe69A==" + "picomatch": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.0.tgz", + "integrity": "sha512-lY1Q/PiJGC2zOv/z391WOTD+Z02bCgsFfvxoXXf6h7kv9o+WmsmzYqrAwY63sNgOxE4xEdq0WyUnXfKeBrSvYw==", + "dev": true }, - "static-extend": { - "version": "0.1.2", - "resolved": "https://registry.npmjs.org/static-extend/-/static-extend-0.1.2.tgz", - "integrity": "sha1-YICcOcv/VTNyJv1eC1IPNB8ftcY=", + "restore-cursor": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/restore-cursor/-/restore-cursor-3.1.0.tgz", + "integrity": "sha512-l+sSefzHpj5qimhFSE5a8nufZYAM3sBSVMAPtYkmC+4EH2anSGaEMXSD0izRQbu9nfyQ9y5JrVmp7E8oZrUjvA==", + "dev": true, "requires": { - "define-property": "^0.2.5", - "object-copy": "^0.1.0" - }, - "dependencies": { - "define-property": { - "version": "0.2.5", - "resolved": "https://registry.npmjs.org/define-property/-/define-property-0.2.5.tgz", - "integrity": "sha1-w1se+RjsPJkPmlvFe+BKrOxcgRY=", - "requires": { - "is-descriptor": "^0.1.0" - } - } + "onetime": "^5.1.0", + "signal-exit": "^3.0.2" } }, - "string-argv": { - "version": "0.0.2", - "resolved": "https://registry.npmjs.org/string-argv/-/string-argv-0.0.2.tgz", - "integrity": "sha1-2sMECGkMIfPDYwo/86BYd73L1zY=" + "rfdc": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/rfdc/-/rfdc-1.3.0.tgz", + "integrity": "sha512-V2hovdzFbOi77/WajaSMXk2OLm+xNIeQdMMuB7icj7bk6zi2F8GGAxigcnDFpJHbNyNcgyJDiP+8nOrY5cZGrA==", + "dev": true }, - "string-width": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-1.0.2.tgz", - "integrity": "sha1-EYvfW4zcUaKn5w0hHgfisLmxB9M=", + "rxjs": { + "version": "7.4.0", + "resolved": "https://registry.npmjs.org/rxjs/-/rxjs-7.4.0.tgz", + "integrity": "sha512-7SQDi7xeTMCJpqViXh8gL/lebcwlp3d831F05+9B44A4B0WfsEwUQHR64gsH1kvJ+Ep/J9K2+n1hVl1CsGN23w==", + "dev": true, "requires": { - "code-point-at": "^1.0.0", - "is-fullwidth-code-point": "^1.0.0", - "strip-ansi": "^3.0.0" + "tslib": "~2.1.0" } }, - "stringify-object": { - "version": "3.3.0", - "resolved": "https://registry.npmjs.org/stringify-object/-/stringify-object-3.3.0.tgz", - "integrity": "sha512-rHqiFh1elqCQ9WPLIC8I0Q/g/wj5J1eMkyoiD6eoQApWHP0FtlK7rqnhmabL5VUY9JQCcqwwvlOaSuutekgyrw==", + "shebang-command": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", + "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", + "dev": true, "requires": { - "get-own-enumerable-property-symbols": "^3.0.0", - "is-obj": "^1.0.1", - "is-regexp": "^1.0.0" + "shebang-regex": "^3.0.0" } }, - "strip-ansi": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-3.0.1.tgz", - "integrity": "sha1-ajhfuIU9lS1f8F0Oiq+UJ43GPc8=", - "requires": { - "ansi-regex": "^2.0.0" - }, - "dependencies": { - "ansi-regex": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-2.1.1.tgz", - "integrity": "sha1-w7M6te42DYbg5ijwRorn7yfWVN8=" - } - } + "shebang-regex": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", + "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", + "dev": true }, - "strip-eof": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/strip-eof/-/strip-eof-1.0.0.tgz", - "integrity": "sha1-u0P/VZim6wXYm1n80SnJgzE2Br8=" + "signal-exit": { + "version": "3.0.6", + "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.6.tgz", + "integrity": "sha512-sDl4qMFpijcGw22U5w63KmD3cZJfBuFlVNbVMKje2keoKML7X2UzWbc4XrmEbDwg0NXJc3yv4/ox7b+JWb57kQ==", + "dev": true }, - "supports-color": { - "version": "5.5.0", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz", - "integrity": "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==", + "slice-ansi": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/slice-ansi/-/slice-ansi-5.0.0.tgz", + "integrity": "sha512-FC+lgizVPfie0kkhqUScwRu1O/lF6NOgJmlCgK+/LYxDCTk8sGelYaHDhFcDN+Sn3Cv+3VSa4Byeo+IMCzpMgQ==", + "dev": true, "requires": { - "has-flag": "^3.0.0" + "ansi-styles": "^6.0.0", + "is-fullwidth-code-point": "^4.0.0" } }, - "symbol-observable": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/symbol-observable/-/symbol-observable-1.2.0.tgz", - "integrity": "sha512-e900nM8RRtGhlV36KGEU9k65K3mPb1WV70OdjfxlG2EAuM1noi/E/BaW/uMhL7bPEssK8QV57vN3esixjUvcXQ==" + "string-argv": { + "version": "0.3.1", + "resolved": "https://registry.npmjs.org/string-argv/-/string-argv-0.3.1.tgz", + "integrity": "sha512-a1uQGz7IyVy9YwhqjZIZu1c8JO8dNIe20xBmSS6qu9kv++k3JGzCVmprbNN5Kn+BgzD5E7YYwg1CcjuJMRNsvg==", + "dev": true }, - "to-object-path": { - "version": "0.3.0", - "resolved": "https://registry.npmjs.org/to-object-path/-/to-object-path-0.3.0.tgz", - "integrity": "sha1-KXWIt7Dn4KwI4E5nL4XB9JmeF68=", + "string-width": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-5.0.1.tgz", + "integrity": "sha512-5ohWO/M4//8lErlUUtrFy3b11GtNOuMOU0ysKCDXFcfXuuvUXu95akgj/i8ofmaGdN0hCqyl6uu9i8dS/mQp5g==", + "dev": true, "requires": { - "kind-of": "^3.0.2" - }, - "dependencies": { - "kind-of": { - "version": "3.2.2", - "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-3.2.2.tgz", - "integrity": "sha1-MeohpzS6ubuw8yRm2JOupR5KPGQ=", - "requires": { - "is-buffer": "^1.1.5" - } - } + "emoji-regex": "^9.2.2", + "is-fullwidth-code-point": "^4.0.0", + "strip-ansi": "^7.0.1" } }, - "to-regex": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/to-regex/-/to-regex-3.0.2.tgz", - "integrity": "sha512-FWtleNAtZ/Ki2qtqej2CXTOayOH9bHDQF+Q48VpWyDXjbYxA4Yz8iDB31zXOBUlOHHKidDbqGVrTUvQMPmBGBw==", + "strip-ansi": { + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.0.1.tgz", + "integrity": "sha512-cXNxvT8dFNRVfhVME3JAe98mkXDYN2O1l7jmcwMnOslDeESg1rF/OZMtK0nRAhiari1unG5cD4jG3rapUAkLbw==", + "dev": true, "requires": { - "define-property": "^2.0.2", - "extend-shallow": "^3.0.2", - "regex-not": "^1.0.2", - "safe-regex": "^1.1.0" + "ansi-regex": "^6.0.1" } }, - "to-regex-range": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-2.1.1.tgz", - "integrity": "sha1-fIDBe53+vlmeJzZ+DU3VWQFB2zg=", - "requires": { - "is-number": "^3.0.0", - "repeat-string": "^1.6.1" - } + "strip-final-newline": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/strip-final-newline/-/strip-final-newline-2.0.0.tgz", + "integrity": "sha512-BrpvfNAE3dcvq7ll3xVumzjKjZQ5tI1sEUIKr3Uoks0XUl45St3FlatVqef9prk4jRDzhW6WZg+3bk93y6pLjA==", + "dev": true }, - "tslib": { - "version": "1.14.0", - "resolved": "https://registry.npmjs.org/tslib/-/tslib-1.14.0.tgz", - "integrity": "sha512-+Zw5lu0D9tvBMjGP8LpvMb0u2WW2QV3y+D8mO6J+cNzCYIN4sVy43Bf9vl92nqFahutN0I8zHa7cc4vihIshnw==" + "supports-color": { + "version": "9.2.1", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-9.2.1.tgz", + "integrity": "sha512-Obv7ycoCTG51N7y175StI9BlAXrmgZrFhZOb0/PyjHBher/NmsdBgbbQ1Inhq+gIhz6+7Gb+jWF2Vqi7Mf1xnQ==", + "dev": true }, - "union-value": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/union-value/-/union-value-1.0.1.tgz", - "integrity": "sha512-tJfXmxMeWYnczCVs7XAEvIV7ieppALdyepWMkHkwciRpZraG/xwT+s2JN8+pr1+8jCRf80FFzvr+MpQeeoF4Xg==", - "requires": { - "arr-union": "^3.1.0", - "get-value": "^2.0.6", - "is-extendable": "^0.1.1", - "set-value": "^2.0.1" - } + "through": { + "version": "2.3.8", + "resolved": "https://registry.npmjs.org/through/-/through-2.3.8.tgz", + "integrity": "sha1-DdTJ/6q8NXlgsbckEV1+Doai4fU=", + "dev": true }, - "unset-value": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/unset-value/-/unset-value-1.0.0.tgz", - "integrity": "sha1-g3aHP30jNRef+x5vw6jtDfyKtVk=", + "to-regex-range": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", + "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", + "dev": true, "requires": { - "has-value": "^0.3.1", - "isobject": "^3.0.0" - }, - "dependencies": { - "has-value": { - "version": "0.3.1", - "resolved": "https://registry.npmjs.org/has-value/-/has-value-0.3.1.tgz", - "integrity": "sha1-ex9YutpiyoJ+wKIHgCVlSEWZXh8=", - "requires": { - "get-value": "^2.0.3", - "has-values": "^0.1.4", - "isobject": "^2.0.0" - }, - "dependencies": { - "isobject": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/isobject/-/isobject-2.1.0.tgz", - "integrity": "sha1-8GVWEJaj8dou9GJy+BXIQNh+DIk=", - "requires": { - "isarray": "1.0.0" - } - } - } - }, - "has-values": { - "version": "0.1.4", - "resolved": "https://registry.npmjs.org/has-values/-/has-values-0.1.4.tgz", - "integrity": "sha1-bWHeldkd/Km5oCCJrThL/49it3E=" - } + "is-number": "^7.0.0" } }, - "urix": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/urix/-/urix-0.1.0.tgz", - "integrity": "sha1-2pN/emLiH+wf0Y1Js1wpNQZ6bHI=" - }, - "use": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/use/-/use-3.1.1.tgz", - "integrity": "sha512-cwESVXlO3url9YWlFW/TA9cshCEhtu7IKJ/p5soJ/gGpj7vbvFrAY/eIioQ6Dw23KjZhYgiIo8HOs1nQ2vr/oQ==" + "tslib": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.1.0.tgz", + "integrity": "sha512-hcVC3wYEziELGGmEEXue7D75zbwIIVUMWAVbHItGPx0ziyXxrOMQx4rQEVEV45Ut/1IotuEvwqPopzIOkDMf0A==", + "dev": true }, - "validate-npm-package-license": { - "version": "3.0.4", - "resolved": "https://registry.npmjs.org/validate-npm-package-license/-/validate-npm-package-license-3.0.4.tgz", - "integrity": "sha512-DpKm2Ui/xN7/HQKCtpZxoRWBhZ9Z0kqtygG8XCgNQ8ZlDnxuQmWhj566j8fN4Cu3/JmbhsDo7fcAJq4s9h27Ew==", - "requires": { - "spdx-correct": "^3.0.0", - "spdx-expression-parse": "^3.0.0" - } + "type-fest": { + "version": "0.21.3", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.21.3.tgz", + "integrity": "sha512-t0rzBq87m3fVcduHDUFhKmyyX+9eo6WQjZvf51Ea/M0Q7+T374Jp1aUiyUl0GKxp8M/OETVHSDvmkyPgvX+X2w==", + "dev": true }, "which": { - "version": "1.3.1", - "resolved": "https://registry.npmjs.org/which/-/which-1.3.1.tgz", - "integrity": "sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ==", + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", + "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", + "dev": true, "requires": { "isexe": "^2.0.0" } }, "wrap-ansi": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-3.0.1.tgz", - "integrity": "sha1-KIoE2H7aXChuBg3+jxNc6NAH+Lo=", - "requires": { - "string-width": "^2.1.1", - "strip-ansi": "^4.0.0" + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", + "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", + "dev": true, + "requires": { + "ansi-styles": "^4.0.0", + "string-width": "^4.1.0", + "strip-ansi": "^6.0.0" }, "dependencies": { + "ansi-regex": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", + "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", + "dev": true + }, + "ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "requires": { + "color-convert": "^2.0.1" + } + }, + "emoji-regex": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", + "dev": true + }, "is-fullwidth-code-point": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz", - "integrity": "sha1-o7MKXE8ZkYMWeqq5O+764937ZU8=" + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", + "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", + "dev": true }, "string-width": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-2.1.1.tgz", - "integrity": "sha512-nOqH59deCq9SRHlxq1Aw85Jnt4w6KvLKqWVik6oA9ZklXLNIOlqg4F2yrT1MVaTjAqvVwdfeZ7w7aCvJD7ugkw==", + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "dev": true, "requires": { - "is-fullwidth-code-point": "^2.0.0", - "strip-ansi": "^4.0.0" + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" } }, "strip-ansi": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-4.0.0.tgz", - "integrity": "sha1-qEeQIusaw2iocTibY1JixQXuNo8=", + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "dev": true, "requires": { - "ansi-regex": "^3.0.0" + "ansi-regex": "^5.0.1" } } } }, - "wrappy": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", - "integrity": "sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8=" - }, - "yallist": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-2.1.2.tgz", - "integrity": "sha1-HBH5IY8HYImkfdUS+TxmmaaoHVI=" + "yaml": { + "version": "1.10.2", + "resolved": "https://registry.npmjs.org/yaml/-/yaml-1.10.2.tgz", + "integrity": "sha512-r3vXyErRCYJ7wg28yvBY5VSoAF8ZvlcW9/BwUzEtUsjvX/DKs24dIkuwjtuprwJJHsbyUbLApepYTR1BN4uHrg==", + "dev": true } } } diff --git a/package.json b/package.json index 950a3b3e7cee106a70d803a4df86f1ccc1199fa1..4c5e2b7368a0d9847dfc2c8a3f6b1a0ede392da6 100644 --- a/package.json +++ b/package.json @@ -1,21 +1,18 @@ { - "dependencies": { - "husky": "^1.1.2", - "lint-staged": "^7.3.0" - }, "lint-staged": { "*.json": [ - "python2 ./format-json", - "git add" + "./format-json" ], "{component,software,stack}/**": [ - "python -c 'import sys, os.path, subprocess; [subprocess.check_call((\"python2\", \"./update-hash\", buildout_hash)) for buildout_hash in { os.path.join(os.path.dirname(staged), \"buildout.hash.cfg\") for staged in sys.argv[1:]} if os.path.exists(buildout_hash)]'", + "python -c 'import sys, os.path, subprocess; [subprocess.check_call((\"./update-hash\", buildout_hash)) for buildout_hash in { os.path.join(os.path.dirname(staged), \"buildout.hash.cfg\") for staged in sys.argv[1:]} if os.path.exists(buildout_hash)]'", "python -c 'import sys, os.path, subprocess; [subprocess.check_call((\"git\", \"add\", buildout_hash)) for buildout_hash in { os.path.join(os.path.dirname(staged), \"buildout.hash.cfg\") for staged in sys.argv[1:]} if os.path.exists(buildout_hash)]'" ] }, - "husky": { - "hooks": { - "pre-commit": "lint-staged" - } + "devDependencies": { + "husky": "^7.0.0", + "lint-staged": "^12.1.2" + }, + "scripts": { + "prepare": "husky install && git config merge.tool update-hash-mergetool && git config mergetool.update-hash-mergetool.cmd './update-hash-mergetool \"$BASE\" \"$LOCAL\" \"$REMOTE\" \"$MERGED\"' && git config mergetool.update-hash-mergetool.trustExitCode true" } } diff --git a/setup.py b/setup.py index 7d0817f06b1dea47bad1b866e24fc7bd512d3d34..c8de16159ae33115b9ae4033f11da4a192276d15 100755 --- a/setup.py +++ b/setup.py @@ -28,7 +28,7 @@ from setuptools import setup, find_packages import glob import os -version = '1.0.217' +version = '1.0.220' name = 'slapos.cookbook' long_description = open("README.rst").read() @@ -64,7 +64,6 @@ setup(name=name, install_requires=[ 'enum34; python_version<"3.4"', # for inotify-simple 'jsonschema', - 'hexagonit.recipe.download', 'netaddr', # to manipulate on IP addresses 'setuptools', # namespaces 'inotify_simple', @@ -78,7 +77,6 @@ setup(name=name, entry_points={ 'zc.buildout': [ 'addresiliency = slapos.recipe.addresiliency:Recipe', - 'accords = slapos.recipe.accords:Recipe', 'apacheperl = slapos.recipe.apacheperl:Recipe', 'apachephp = slapos.recipe.apachephp:Recipe', 'apachephpconfigure = slapos.recipe.apachephpconfigure:Recipe', @@ -91,8 +89,6 @@ setup(name=name, 'check_parameter = slapos.recipe.check_parameter:Recipe', 'cloud9 = slapos.recipe.cloud9:Recipe', 'cloudooo.test = slapos.recipe.erp5_test:CloudoooRecipe', - 'condor = slapos.recipe.condor:Recipe', - 'condor.submit = slapos.recipe.condor:AppSubmit', 'copyfilelist = slapos.recipe.copyfilelist:Recipe', 'cron = slapos.recipe.dcron:Recipe', 'cron.d = slapos.recipe.dcron:Part', @@ -126,7 +122,6 @@ setup(name=name, 'neoppod.admin = slapos.recipe.neoppod:Admin', 'neoppod.master = slapos.recipe.neoppod:Master', 'neoppod.storage = slapos.recipe.neoppod:Storage', - 'nosqltestbed = slapos.recipe.nosqltestbed:NoSQLTestBed', 'notifier = slapos.recipe.notifier:Recipe', 'notifier.callback = slapos.recipe.notifier:Callback', 'notifier.notify = slapos.recipe.notifier:Notify', @@ -161,8 +156,6 @@ setup(name=name, 'slapconfiguration = slapos.recipe.slapconfiguration:Recipe', 'slapconfiguration.serialised = slapos.recipe.slapconfiguration:Serialised', 'slapconfiguration.jsondump = slapos.recipe.slapconfiguration:JsonDump', - 'slapcontainer = slapos.recipe.container:Recipe', - 'sphinx= slapos.recipe.sphinx:Recipe', 'squid = slapos.recipe.squid:Recipe', 'sshkeys_authority = slapos.recipe.sshkeys_authority:Recipe', 'sshkeys_authority.request = slapos.recipe.sshkeys_authority:Request', @@ -181,9 +174,6 @@ setup(name=name, 'zero-knowledge.read = slapos.recipe.zero_knowledge:ReadRecipe', 'zero-knowledge.write = slapos.recipe.zero_knowledge:WriteRecipe' ], - 'slapos.recipe.nosqltestbed.plugin': [ - 'kumo = slapos.recipe.nosqltestbed.kumo:KumoTestBed', - ], }, extras_require=extras_require, test_suite='slapos.test', diff --git a/slapos/recipe/accords/__init__.py b/slapos/recipe/accords/__init__.py deleted file mode 100644 index 7efa69958a610d712f2f24138f9868fc63323291..0000000000000000000000000000000000000000 --- a/slapos/recipe/accords/__init__.py +++ /dev/null @@ -1,105 +0,0 @@ -############################################################################## -# -# Copyright (c) 2011 Vifib SARL and Contributors. All Rights Reserved. -# -# WARNING: This program as such is intended to be used by professional -# programmers who take the whole responsibility of assessing all potential -# consequences resulting from its eventual inadequacies and bugs -# End users who are looking for a ready-to-use solution with commercial -# guarantees and support are strongly adviced to contract a Free Software -# Service Company -# -# This program is Free Software; you can redistribute it and/or -# modify it under the terms of the GNU General Public License -# as published by the Free Software Foundation; either version 3 -# of the License, or (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program; if not, write to the Free Software -# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. -# -############################################################################## -import os -import shutil -from slapos.recipe.librecipe import GenericSlapRecipe -import shutil -import subprocess -import sys - -class Recipe(GenericSlapRecipe): - def _install(self): - path_list = [] - accords_location = self.buildout['accordsdirectory']['accords'] - - parameter_dict = dict( - userid=self.options['userid'], - tenantname=self.options['tenantname'], - password=self.options['password'], - domain=self.options['domain'], - openstack_url=self.options['openstack_url'], - python_location=sys.executable, - accords_location=accords_location, - manifest_name=self.options['manifest-name'], - # XXX this is workaround - accords_lib_directory=self.options['accords_lib_directory'], - computer_id = self.computer_id, - computer_partition_id = self.computer_partition_id, - server_url = self.server_url, - software_release_url = self.software_release_url, - key_file = self.key_file, - cert_file = self.cert_file, - path = '%s:%s' % (self.options['accords_bin_directory'], - os.environ.get('PATH', '')), - ) - # Generate os-config.xml - os_config_file = self.createFile(self.options['os-config'], - self.substituteTemplate(self.getTemplateFilename('os_config.xml.in'), - parameter_dict)) - path_list.append(os_config_file) - - # Put modified accords configuration file - accords_configuration_parameter_dict = dict( - listen_ip = self.options['listen-ip'] - ) - accords_configuration_file_location = self.createFile( - self.options['accords-configuration-file'], - self.substituteTemplate(self.getTemplateFilename('accords.ini.in'), - accords_configuration_parameter_dict)) - path_list.append(accords_configuration_file_location) - - # XXX is it dangerous? - security_path = os.path.join(accords_location, 'security') - if os.path.exists(security_path): - shutil.rmtree(security_path) - - # Initiate configuration - subprocess.check_call('./accords-config', - cwd=accords_location - ) - - # Generate manifest - manifest_origin_location = self.options['manifest-source'] - manifest_location = self.options['manifest-destination'] - - shutil.copy(manifest_origin_location, manifest_location) - path_list.append(manifest_location) - - # Generate wrapper - wrapper_location = self.createPythonScript(self.options['accords-wrapper'], - __name__ + '.accords.runAccords', - (parameter_dict,)) - path_list.append(wrapper_location) - - # Generate helper for debug - self.createExecutable( - self.options['testos-wrapper'], - self.substituteTemplate(self.getTemplateFilename('testos.in'), - parameter_dict) - ) - - return path_list diff --git a/slapos/recipe/accords/accords.py b/slapos/recipe/accords/accords.py deleted file mode 100644 index 88760135df217ae9ef1caa30d0d1d35187990ae6..0000000000000000000000000000000000000000 --- a/slapos/recipe/accords/accords.py +++ /dev/null @@ -1,92 +0,0 @@ -#!%(python_location)s - -############################################################################## -# -# Copyright (c) 2010 Vifib SARL and Contributors. All Rights Reserved. -# -# WARNING: This program as such is intended to be used by professional -# programmers who take the whole responsibility of assessing all potential -# consequences resulting from its eventual inadequacies and bugs -# End users who are looking for a ready-to-use solution with commercial -# guarantees and support are strongly adviced to contract a Free Software -# Service Company -# -# This program is Free Software; you can redistribute it and/or -# modify it under the terms of the GNU General Public License -# as published by the Free Software Foundation; either version 3 -# of the License, or (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program; if not, write to the Free Software -# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. -# -############################################################################## -import os -from slapos import slap -import signal -import subprocess -from subprocess import Popen -import sys -import time - -def runAccords(accords_conf): - """Launch ACCORDS, parse manifest, broker manifest, send connection - informations to SlapOS Master. Destroy instance and stops ACCORDS at - SIGTERM.""" - computer_id = accords_conf['computer_id'] - computer_partition_id = accords_conf['computer_partition_id'] - server_url = accords_conf['server_url'] - software_release_url = accords_conf['software_release_url'] - key_file = accords_conf['key_file'] - cert_file = accords_conf['cert_file'] - accords_lib_directory = accords_conf['accords_lib_directory'] - accords_location = accords_conf['accords_location'] - manifest_name = accords_conf['manifest_name'] - - environment = dict( - LD_LIBRARY_PATH=accords_lib_directory, - PATH=accords_conf['path'], - HOME=accords_location, - ) - - # Set handler to stop ACCORDS when end of world comes - # XXX use subprocess.check_call and add exception handlers - def sigtermHandler(signum, frame): - Popen(['./co-command', 'stop', '/service/*'], - cwd=accords_location, env=environment).communicate() - Popen(['./co-stop'], - cwd=accords_location, env=environment).communicate() - sys.exit(0) - - signal.signal(signal.SIGTERM, sigtermHandler) - - # Launch ACCORDS, parse & broke manifest to deploy instance - print 'Starting ACCORDS and friends...' - subprocess.check_call(['./co-start'],cwd=accords_location, env=environment) - print 'Parsing manifest...' - subprocess.check_call(['./co-parser', manifest_name], - cwd=accords_location, env=environment) - print 'Brokering manifest...' - subprocess.check_call(['./co-broker', manifest_name], - cwd=accords_location, env=environment) - print 'Done.' - - # Parse answer - # XXX - connection_dict = dict(connection='hardcoded') - - # Send information about published service to SlapOS Master - slap_connection = slap.slap() - slap_connection.initializeConnection(server_url, key_file, cert_file) - computer_partition = slap_connection.registerComputerPartition(computer_id, - computer_partition_id) - computer_partition.setConnectionDict(connection_dict) - - # Go to sleep, wait kill - while(True): - time.sleep(60) diff --git a/slapos/recipe/accords/template/accords.ini.in b/slapos/recipe/accords/template/accords.ini.in deleted file mode 100644 index d18ebb426f542f3280121953c692045784d18fc2..0000000000000000000000000000000000000000 --- a/slapos/recipe/accords/template/accords.ini.in +++ /dev/null @@ -1,35 +0,0 @@ -# REST host (default: 127.0.0.1) -resthost=%(listen_ip)s - -# REST port (default: 8086) -#restport=8086 - -# Target (default: ./accords.xml) -#target=accords.xml - -# Acitvate TLS (default: 0) -#tls=0 - -# Activate monitoring (default: 1) -#monitor=1 - -# Trace (default: 1) -#trace=1 - -# Threads (default:1) -#threads=1 - -# Be verbose (default: 1) -#verbose=0 - -# Debug (default: 1) -#debug=1 - -# Domain (default: occi) -#domain=occi - -# Operator (default: accords) -#operator=accords - -# Password (default: co-system) -#password=co-system diff --git a/slapos/recipe/accords/template/coips.xml.in b/slapos/recipe/accords/template/coips.xml.in deleted file mode 100644 index 6613a0fa3544323a75ecd9d0d24f25ed68e4422b..0000000000000000000000000000000000000000 --- a/slapos/recipe/accords/template/coips.xml.in +++ /dev/null @@ -1,15 +0,0 @@ -<?xml version="1.0" encoding="UTF8"?> -<manifest name="coips:model" xmlns="http://www.compatibleone.fr/schemes/cords.xsd"> - -<description>Infrastructure profile used by production tool</description> - -<node name="coips:model"> -<infrastructure name="coips:model"> -<compute name="coips:model" cores="1" speed="1GHz" architecture="x686" memory="1GB"/> -<storage name="coips:model" size="10GB" type="SATA"/> -<network name="coips:model" vlan="true" label="database"/> -</infrastructure> -</node> - - -</manifest> \ No newline at end of file diff --git a/slapos/recipe/accords/template/os_config.xml.in b/slapos/recipe/accords/template/os_config.xml.in deleted file mode 100644 index e479390f7fea8ed8999dcafce900ba876b9f3bf0..0000000000000000000000000000000000000000 --- a/slapos/recipe/accords/template/os_config.xml.in +++ /dev/null @@ -1,17 +0,0 @@ -<os_configs> - <os_config - id="e1f892e3-slap-slap-slap-9354b95d3b17" - name="slaposrecipe" - description="Configuration of Account used by slapos recipe" - user="%(userid)s" - password="%(password)s" - authenticate="" - agent="CompatibleOne/OpenStackClient/1.0a.0.01" - host="%(openstack_url)s" - version="v1.1" - namespace="%(domain)s" - base="" - tls="0" - current="0" - /> -</os_configs> diff --git a/slapos/recipe/accords/template/testos.in b/slapos/recipe/accords/template/testos.in deleted file mode 100644 index 2413a33846e07c1073ce0f0465ef70f60f32f8cd..0000000000000000000000000000000000000000 --- a/slapos/recipe/accords/template/testos.in +++ /dev/null @@ -1,11 +0,0 @@ -#!/bin/sh - -export PATH=%(path)s - -export ENO_HOST=%(openstack_url)s -export ENO_USER=%(userid)s -export ENO_PASS=%(password)s -export ENO_VERSION=v1.1 -export ENO_TENANT=%(tenantname)s - -testos --host $ENO_HOST --password $ENO_PASS --user $ENO_USER --version $ENO_VERSION --tenant $ENO_TENANT $1 $2 $3 $4 $5 $6 $7 diff --git a/slapos/recipe/condor/__init__.py b/slapos/recipe/condor/__init__.py deleted file mode 100644 index e70f257ba70b8a669a5cc125d8ba0e04c25e75b2..0000000000000000000000000000000000000000 --- a/slapos/recipe/condor/__init__.py +++ /dev/null @@ -1,284 +0,0 @@ -############################################################################## -# -# Copyright (c) 2010 Vifib SARL and Contributors. All Rights Reserved. -# -# WARNING: This program as such is intended to be used by professional -# programmers who take the whole responsibility of assessing all potential -# consequences resulting from its eventual inadequacies and bugs -# End users who are looking for a ready-to-use solution with commercial -# guarantees and support are strongly adviced to contract a Free Software -# Service Company -# -# This program is Free Software; you can redistribute it and/or -# modify it under the terms of the GNU General Public License -# as published by the Free Software Foundation; either version 3 -# of the License, or (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program; if not, write to the Free Software -# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. -# -############################################################################## -from slapos.recipe.librecipe import GenericBaseRecipe -import os -import subprocess -import zc.buildout -import filecmp -import shutil -import re -import json - -class Recipe(GenericBaseRecipe): - """Deploy a fully operational condor architecture.""" - - def __init__(self, buildout, name, options): - self.environ = {} - self.role = '' - environment_section = options.get('environment-section', '').strip() - if environment_section and environment_section in buildout: - # Use environment variables from the designated config section. - self.environ.update(buildout[environment_section]) - for variable in options.get('environment', '').splitlines(): - if variable.strip(): - try: - key, value = variable.split('=', 1) - self.environ[key.strip()] = value - except ValueError: - raise zc.buildout.UserError('Invalid environment variable definition: %s', variable) - # Extrapolate the environment variables using values from the current - # environment. - for key in self.environ: - self.environ[key] = self.environ[key] % os.environ - return GenericBaseRecipe.__init__(self, buildout, name, options) - - def _options(self, options): - #Path of condor compiled package - self.package = options['package'].strip() - self.rootdir = options['rootdirectory'].strip() - #Other condor dependances - self.javabin = options['java-bin'].strip() - self.dash = options['dash'].strip() - #Directory to deploy condor - self.prefix = options['rootdirectory'].strip() - self.localdir = options['local-dir'].strip() - self.wrapperdir = options['wrapper-dir'].strip() - self.wrapper_bin = options['bin'].strip() - self.wrapper_sbin = options['sbin'].strip() - - self.diskspace = options['disk-space'].strip() - self.ipv6 = options['ip'].strip() - self.condor_host = options['condor_host'].strip() - self.collector_name = options['collector_name'].strip() - self.host_list = self.options.get('allowed-write', '*') - self.email = self.options.get('admin-email', "root@$(FULL_HOSTNAME)") - - def install(self): - path_list = [] - #get UID and GID for current slapuser - stat_info = os.stat(self.rootdir) - slapuser = str(stat_info.st_uid)+"."+str(stat_info.st_gid) - domain_name = 'slapos%s.com' % stat_info.st_uid - - #Configure condor - configure_script = os.path.join(self.package, 'condor_configure') - install_args = [configure_script, '--install='+self.package, - '--prefix='+self.prefix, '--overwrite', '--verbose', - '--local-dir='+self.localdir] #--ignore-missing-libs - if self.options['machine-role'].strip() == "manager": - self.role = "manager,submit" - elif self.options['machine-role'].strip() == "worker": - self.role = "execute" - install_args += ['--central-manager='+self.condor_host] - install_args += ['--type='+self.role] - configure = subprocess.Popen(install_args, env=self.environ, - stdout=subprocess.PIPE) - configure.communicate()[0] - if configure.returncode is None or configure.returncode != 0: - return path_list - - #Generate condor_configure file - condor_config = os.path.join(self.rootdir, 'etc/condor_config') - config_local = os.path.join(self.localdir, 'condor_config.local') - condor_configure = dict(condor_host=self.condor_host, releasedir=self.prefix, - localdir=self.localdir, config_local=config_local, - slapuser=slapuser, ipv6=self.ipv6, - diskspace=self.diskspace, javabin=self.javabin, - host_list=self.host_list, collector_name=self.collector_name, - email=self.email, domain_name=domain_name) - destination = os.path.join(condor_config) - config = self.createFile(destination, - self.substituteTemplate(self.getTemplateFilename('condor_config.generic'), - condor_configure)) - path_list.append(config) - - #Search if is needed to update condor_config.local file - find = re.search('NETWORK_INTERFACE[\s]*=[\s]*(%s)' % self.ipv6, - open(config_local, 'r').read()) - if not find: - #update condor_config.local - with open(config_local, 'a') as f: - if self.role == "execute": - f.write("\nSTART = TRUE") - f.write("\nCOLLECTOR_NAME = %s\n \nNETWORK_INTERFACE=%s" % - (self.collector_name, self.ipv6)) - - #create condor binary launcher for slapos - if not os.path.exists(self.wrapper_bin): - os.makedirs(self.wrapper_bin, int('0o744', 8)) - if not os.path.exists(self.wrapper_sbin): - os.makedirs(self.wrapper_sbin, int('0o744', 8)) - #generate script for each file in prefix/bin - for binary in os.listdir(self.prefix+'/bin'): - wrapper_location = os.path.join(self.wrapper_bin, binary) - current_exe = os.path.join(self.prefix, 'bin', binary) - wrapper = open(wrapper_location, 'w') - content = """#!%s - export LD_LIBRARY_PATH=%s - export PATH=%s - export CONDOR_CONFIG=%s - export CONDOR_LOCATION=%s - export CONDOR_IDS=%s - export HOME=%s - export HOSTNAME=%s - exec %s $*""" % (self.dash, - self.environ['LD_LIBRARY_PATH'], self.environ['PATH'], - condor_config, self.prefix, slapuser, self.environ['HOME'], - self.environ['HOSTNAME'], current_exe) - wrapper.write(content) - wrapper.close() - path_list.append(wrapper_location) - os.chmod(wrapper_location, 0o744) - - #generate script for each file in prefix/sbin - for binary in os.listdir(self.prefix+'/sbin'): - wrapper_location = os.path.join(self.wrapper_sbin, binary) - current_exe = os.path.join(self.prefix, 'sbin', binary) - wrapper = open(wrapper_location, 'w') - content = """#!%s - export LD_LIBRARY_PATH=%s - export PATH=%s - export CONDOR_CONFIG=%s - export CONDOR_LOCATION=%s - export CONDOR_IDS=%s - export HOME=%s - export HOSTNAME=%s - exec %s $*""" % (self.dash, - self.environ['LD_LIBRARY_PATH'], self.environ['PATH'], - condor_config, self.prefix, slapuser, self.environ['HOME'], - self.environ['HOSTNAME'], current_exe) - wrapper.write(content) - wrapper.close() - path_list.append(wrapper_location) - os.chmod(wrapper_location, 0o744) - - #generate script for start condor - wrapper = self.createPythonScript( - os.path.join(self.wrapperdir, 'start_condor'), - __name__ + '.configure.condorStart', - (os.path.join(self.wrapper_sbin, 'condor_reconfig'), - os.path.join(self.wrapper_sbin, 'condor_master')) - ) - path_list.append(wrapper) - return path_list - -class AppSubmit(GenericBaseRecipe): - """Submit a condor job into an existing Condor master instance""" - - def download(self, url, filename=None, md5sum=None): - cache = os.path.join(self.options['rootdirectory'].strip(), 'tmp') - if not os.path.exists(cache): - os.mkdir(cache) - downloader = zc.buildout.download.Download(self.buildout['buildout'], - hash_name=True, cache=cache) - path, _ = downloader(url, md5sum) - if filename: - name = os.path.join(cache, filename) - os.rename(path, name) - return name - return path - - def copy_file(self, source, dest): - """"Copy file with source to dest with auto replace - return True if file has been copied and dest ha been replaced - """ - result = False - if source and os.path.exists(source): - if os.path.exists(dest): - if filecmp.cmp(dest, source): - return False - os.unlink(dest) - result = True - shutil.copy(source, dest) - return result - - def getFiles(self): - """This is used to download app files if necessary and update options values""" - app_list = json.loads(self.options['condor-app-list']) - if not app_list: - return None - for app in app_list: - if app_list[app].get('files', None): - file_list = app_list[app]['files'] - for file in file_list: - if file and (file.startswith('http') or file.startswith('ftp')): - file_list[file] = self.download(file_list[file]) - os.chmod(file_list[file], 0o600) - else: - app_list[app]['files'] = {} - - executable = app_list[app].get('executable', '') - if executable and (executable.startswith('http') or executable.startswith('ftp')): - app_list[app]['executable'] = self.download(executable, - app_list[app]['executable-name']) - os.chmod(app_list[app]['executable-name'], 0o700) - submit_file = app_list[app].get('description-file', '') - if submit_file and (submit_file.startswith('http') or submit_file.startswith('ftp')): - app_list[app]['description-file'] = self.download(submit_file, 'submit') - os.chmod(app_list[app]['description-file'], 0o600) - - return app_list - - def install(self): - path_list = [] - #check if curent condor instance is an condor master - if self.options['machine-role'].strip() != "manager": - raise Exception("Cannot submit a job to Condor worker instance") - - #Setup directory - jobdir = self.options['job-dir'].strip() - if not os.path.exists(jobdir): - os.mkdir(jobdir) - app_list = self.getFiles() - for appname in app_list: - appdir = os.path.join(jobdir, appname) - if not os.path.exists(appdir): - os.mkdir(appdir) - submitfile = os.path.join(appdir, 'submit') - - self.copy_file(app_list[appname]['executable'], - os.path.join(appdir, app_list[appname]['executable-name']) - ) - install = self.copy_file(app_list[appname]['description-file'], submitfile) - sig_install = os.path.join(appdir, '.install') - if install: - with open(sig_install, 'w') as f: - f.write('to_install') - for file in app_list[appname]['files']: - destination = os.path.join(appdir, file) - if os.path.exists(destination): - os.unlink(destination) - os.symlink(app_list[appname]['files'][file], destination) - #generate wrapper for submitting job - submit_job = self.createPythonScript( - os.path.join(self.options['wrapper-dir'].strip(), appname), - __name__ + '.configure.submitJob', - (os.path.join(self.options['bin'].strip(), 'condor_submit'), - 'submit', appdir, appname, sig_install) - ) - path_list.append(submit_job) - return path_list \ No newline at end of file diff --git a/slapos/recipe/condor/configure.py b/slapos/recipe/condor/configure.py deleted file mode 100644 index f0590ebe4b970a3015a2eabf4fc99a3ffc980590..0000000000000000000000000000000000000000 --- a/slapos/recipe/condor/configure.py +++ /dev/null @@ -1,53 +0,0 @@ -############################################################################## -# -# Copyright (c) 2010 Vifib SARL and Contributors. All Rights Reserved. -# -# WARNING: This program as such is intended to be used by professional -# programmers who take the whole responsibility of assessing all potential -# consequences resulting from its eventual inadequacies and bugs -# End users who are looking for a ready-to-use solution with commercial -# guarantees and support are strongly adviced to contract a Free Software -# Service Company -# -# This program is Free Software; you can redistribute it and/or -# modify it under the terms of the GNU General Public License -# as published by the Free Software Foundation; either version 3 -# of the License, or (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program; if not, write to the Free Software -# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. -# -############################################################################## - -import os -import subprocess -import time - -def submitJob(submit, submit_file, appdir, appname, sig_install): - """Run condor_submit (if needed) for job deployment""" - time.sleep(10) - print "Check if needed to submit %s job's" % appname - if not os.path.exists(sig_install): - print "Nothing for install or update...Exited" - return - # '-a', "log = out.log", '-a', "error = error.log", - launch_args = submit, '-verbose', submit_file - process = subprocess.Popen(launch_args, stdout=subprocess.PIPE, - stderr=subprocess.STDOUT, cwd=appdir) - result = process.communicate()[0] - if process.returncode is None or process.returncode != 0: - print "Failed to execute condor_submit.\nThe error was: %s" % result - else: - os.unlink(sig_install) - -def condorStart(condor_reconfig, start_bin): - """Start Condor if deamons is currently stopped""" - if subprocess.call(condor_reconfig): - #process failled to reconfig condor that mean that condor deamons is not curently started - subprocess.call(start_bin) diff --git a/slapos/recipe/condor/template/condor_config.generic b/slapos/recipe/condor/template/condor_config.generic deleted file mode 100644 index 9fc59e238540b41d081ce2b77bb1eb538d383263..0000000000000000000000000000000000000000 --- a/slapos/recipe/condor/template/condor_config.generic +++ /dev/null @@ -1,2569 +0,0 @@ -###################################################################### -## -## condor_config -## -## This is the global configuration file for condor. Any settings -## made here may potentially be overridden in the local configuration -## file. KEEP THAT IN MIND! To double-check that a variable is -## getting set from the configuration file that you expect, use -## condor_config_val -v <variable name> -## -## The file is divided into four main parts: -## Part 1: Settings you likely want to customize -## Part 2: Settings you may want to customize -## Part 3: Settings that control the policy of when condor will -## start and stop jobs on your machines -## Part 4: Settings you should probably leave alone (unless you -## know what you're doing) -## -## Please read the INSTALL file (or the Install chapter in the -## Condor Administrator's Manual) for detailed explanations of the -## various settings in here and possible ways to configure your -## pool. -## -## Unless otherwise specified, settings that are commented out show -## the defaults that are used if you don't define a value. Settings -## that are defined here MUST BE DEFINED since they have no default -## value. -## -## Unless otherwise indicated, all settings which specify a time are -## defined in seconds. -## -###################################################################### - -###################################################################### -###################################################################### -## -## ###### # -## # # ## ##### ##### ## -## # # # # # # # # # -## ###### # # # # # # -## # ###### ##### # # -## # # # # # # # -## # # # # # # ##### -## -## Part 1: Settings you likely want to customize: -###################################################################### -###################################################################### - -NO_DNS = False -#DEFAULT_DOMAIN_NAME = %(domain_name)s -ENABLE_IPV6 = TRUE -## What machine is your central manager? -CONDOR_HOST = %(condor_host)s -##-------------------------------------------------------------------- -## Pathnames: -##-------------------------------------------------------------------- -## Where have you installed the bin, sbin and lib condor directories? -RELEASE_DIR = %(releasedir)s - -## Where is the local condor directory for each host? -## This is where the local config file(s), logs and -## spool/execute directories are located -LOCAL_DIR = %(localdir)s -#LOCAL_DIR = $(RELEASE_DIR)/hosts/$(HOSTNAME) - -## Where is the machine-specific local config file for each host? -LOCAL_CONFIG_FILE = %(config_local)s - -## Where are optional machine-specific local config files located? -## Config files are included in lexicographic order. -LOCAL_CONFIG_DIR = $(LOCAL_DIR)/config -#LOCAL_CONFIG_DIR = $(LOCAL_DIR)/config - -## Blacklist for file processing in the LOCAL_CONFIG_DIR -## LOCAL_CONFIG_DIR_EXCLUDE_REGEXP = ^((\..*)|(.*~)|(#.*)|(.*\.rpmsave)|(.*\.rpmnew))$ - -## If the local config file is not present, is it an error? -## WARNING: This is a potential security issue. -## If not specificed, the default is True -#REQUIRE_LOCAL_CONFIG_FILE = TRUE - -##-------------------------------------------------------------------- -## Mail parameters: -##-------------------------------------------------------------------- -## When something goes wrong with condor at your site, who should get -## the email? -CONDOR_ADMIN = %(email)s - -## Full path to a mail delivery program that understands that "-s" -## means you want to specify a subject: -MAIL = /bin/mail - -##-------------------------------------------------------------------- -## Network domain parameters: -##-------------------------------------------------------------------- -## Internet domain of machines sharing a common UID space. If your -## machines don't share a common UID space, set it to -## UID_DOMAIN = $(FULL_HOSTNAME) -## to specify that each machine has its own UID space. -UID_DOMAIN = $(FULL_HOSTNAME) - -## Internet domain of machines sharing a common file system. -## If your machines don't use a network file system, set it to -## FILESYSTEM_DOMAIN = $(FULL_HOSTNAME) -## to specify that each machine has its own file system. -FILESYSTEM_DOMAIN = $(FULL_HOSTNAME) - -## This macro is used to specify a short description of your pool. -## It should be about 20 characters long. For example, the name of -## the UW-Madison Computer Science Condor Pool is ``UW-Madison CS''. -COLLECTOR_NAME = %(collector_name)s - -###################################################################### -###################################################################### -## -## ###### ##### -## # # ## ##### ##### # # -## # # # # # # # # -## ###### # # # # # ##### -## # ###### ##### # # -## # # # # # # # -## # # # # # # ####### -## -## Part 2: Settings you may want to customize: -## (it is generally safe to leave these untouched) -###################################################################### -###################################################################### - -## -## The user/group ID <uid>.<gid> of the "Condor" user. -## (this can also be specified in the environment) -## Note: the CONDOR_IDS setting is ignored on Win32 platforms -CONDOR_IDS=%(slapuser)s - -##-------------------------------------------------------------------- -## Flocking: Submitting jobs to more than one pool -##-------------------------------------------------------------------- -## Flocking allows you to run your jobs in other pools, or lets -## others run jobs in your pool. -## -## To let others flock to you, define FLOCK_FROM. -## -## To flock to others, define FLOCK_TO. - -## FLOCK_FROM defines the machines where you would like to grant -## people access to your pool via flocking. (i.e. you are granting -## access to these machines to join your pool). -FLOCK_FROM = -## An example of this is: -#FLOCK_FROM = somehost.friendly.domain, anotherhost.friendly.domain - -## FLOCK_TO defines the central managers of the pools that you want -## to flock to. (i.e. you are specifying the machines that you -## want your jobs to be negotiated at -- thereby specifying the -## pools they will run in.) -FLOCK_TO = -## An example of this is: -#FLOCK_TO = central_manager.friendly.domain, condor.cs.wisc.edu - -## FLOCK_COLLECTOR_HOSTS should almost always be the same as -## FLOCK_NEGOTIATOR_HOSTS (as shown below). The only reason it would be -## different is if the collector and negotiator in the pool that you are -## flocking too are running on different machines (not recommended). -## The collectors must be specified in the same corresponding order as -## the FLOCK_NEGOTIATOR_HOSTS list. -FLOCK_NEGOTIATOR_HOSTS = $(FLOCK_TO) -FLOCK_COLLECTOR_HOSTS = $(FLOCK_TO) -## An example of having the negotiator and the collector on different -## machines is: -#FLOCK_NEGOTIATOR_HOSTS = condor.cs.wisc.edu, condor-negotiator.friendly.domain -#FLOCK_COLLECTOR_HOSTS = condor.cs.wisc.edu, condor-collector.friendly.domain - -##-------------------------------------------------------------------- -## Host/IP access levels -##-------------------------------------------------------------------- -## Please see the administrator's manual for details on these -## settings, what they're for, and how to use them. - -## What machines have administrative rights for your pool? This -## defaults to your central manager. You should set it to the -## machine(s) where whoever is the condor administrator(s) works -## (assuming you trust all the users who log into that/those -## machine(s), since this is machine-wide access you're granting). -ALLOW_ADMINISTRATOR = $(CONDOR_HOST), $(IP_ADDRESS), %(host_list)s - -## If there are no machines that should have administrative access -## to your pool (for example, there's no machine where only trusted -## users have accounts), you can uncomment this setting. -## Unfortunately, this will mean that administering your pool will -## be more difficult. -#DENY_ADMINISTRATOR = * - -## What machines should have "owner" access to your machines, meaning -## they can issue commands that a machine owner should be able to -## issue to their own machine (like condor_vacate). This defaults to -## machines with administrator access, and the local machine. This -## is probably what you want. -ALLOW_OWNER = $(FULL_HOSTNAME), $(ALLOW_ADMINISTRATOR) - -## Read access. Machines listed as allow (and/or not listed as deny) -## can view the status of your pool, but cannot join your pool -## or run jobs. -## NOTE: By default, without these entries customized, you -## are granting read access to the whole world. You may want to -## restrict that to hosts in your domain. If possible, please also -## grant read access to "*.cs.wisc.edu", so the Condor developers -## will be able to view the status of your pool and more easily help -## you install, configure or debug your Condor installation. -## It is important to have this defined. -ALLOW_READ = * -#ALLOW_READ = *.your.domain, *.cs.wisc.edu -#DENY_READ = *.bad.subnet, bad-machine.your.domain, 144.77.88.* - -## Write access. Machines listed here can join your pool, submit -## jobs, etc. Note: Any machine which has WRITE access must -## also be granted READ access. Granting WRITE access below does -## not also automatically grant READ access; you must change -## ALLOW_READ above as well. -## -## You must set this to something else before Condor will run. -## This most simple option is: -## ALLOW_WRITE = * -## but note that this will allow anyone to submit jobs or add -## machines to your pool and is a serious security risk. - -ALLOW_WRITE = $(FULL_HOSTNAME), $(IP_ADDRESS), %(host_list)s -#ALLOW_WRITE = *.your.domain, your-friend's-machine.other.domain -#DENY_WRITE = bad-machine.your.domain - -## Are you upgrading to a new version of Condor and confused about -## why the above ALLOW_WRITE setting is causing Condor to refuse to -## start up? If you are upgrading from a configuration that uses -## HOSTALLOW/HOSTDENY instead of ALLOW/DENY we recommend that you -## convert all uses of the former to the latter. The syntax of the -## authorization settings is identical. They both support -## unauthenticated IP-based authorization as well as authenticated -## user-based authorization. To avoid confusion, the use of -## HOSTALLOW/HOSTDENY is discouraged. Support for it may be removed -## in the future. - -## Negotiator access. Machines listed here are trusted central -## managers. You should normally not have to change this. -ALLOW_NEGOTIATOR = $(CONDOR_HOST), $(IP_ADDRESS) -## Now, with flocking we need to let the SCHEDD trust the other -## negotiators we are flocking with as well. You should normally -## not have to change this either. -ALLOW_NEGOTIATOR_SCHEDD = $(CONDOR_HOST), $(FLOCK_NEGOTIATOR_HOSTS), $(IP_ADDRESS) - -## Config access. Machines listed here can use the condor_config_val -## tool to modify all daemon configurations. This level of host-wide -## access should only be granted with extreme caution. By default, -## config access is denied from all hosts. -#ALLOW_CONFIG = trusted-host.your.domain - -## Flocking Configs. These are the real things that Condor looks at, -## but we set them from the FLOCK_FROM/TO macros above. It is safe -## to leave these unchanged. -ALLOW_WRITE_COLLECTOR = $(ALLOW_WRITE), $(FLOCK_FROM) -ALLOW_WRITE_STARTD = $(ALLOW_WRITE), $(FLOCK_FROM) -ALLOW_READ_COLLECTOR = $(ALLOW_READ), $(FLOCK_FROM) -ALLOW_READ_STARTD = $(ALLOW_READ), $(FLOCK_FROM) - - -##-------------------------------------------------------------------- -## Security parameters for setting configuration values remotely: -##-------------------------------------------------------------------- -## These parameters define the list of attributes that can be set -## remotely with condor_config_val for the security access levels -## defined above (for example, WRITE, ADMINISTRATOR, CONFIG, etc). -## Please see the administrator's manual for futher details on these -## settings, what they're for, and how to use them. There are no -## default values for any of these settings. If they are not -## defined, no attributes can be set with condor_config_val. - -## Do you want to allow condor_config_val -rset to work at all? -## This feature is disabled by default, so to enable, you must -## uncomment the following setting and change the value to "True". -## Note: changing this requires a restart not just a reconfig. -#ENABLE_RUNTIME_CONFIG = False - -## Do you want to allow condor_config_val -set to work at all? -## This feature is disabled by default, so to enable, you must -## uncomment the following setting and change the value to "True". -## Note: changing this requires a restart not just a reconfig. -#ENABLE_PERSISTENT_CONFIG = False - -## Directory where daemons should write persistent config files (used -## to support condor_config_val -set). This directory should *ONLY* -## be writable by root (or the user the Condor daemons are running as -## if non-root). There is no default, administrators must define this. -## Note: changing this requires a restart not just a reconfig. -#PERSISTENT_CONFIG_DIR = /full/path/to/root-only/local/directory - -## Attributes that can be set by hosts with "CONFIG" permission (as -## defined with ALLOW_CONFIG and DENY_CONFIG above). -## The commented-out value here was the default behavior of Condor -## prior to version 6.3.3. If you don't need this behavior, you -## should leave this commented out. -#SETTABLE_ATTRS_CONFIG = * - -## Attributes that can be set by hosts with "ADMINISTRATOR" -## permission (as defined above) -#SETTABLE_ATTRS_ADMINISTRATOR = *_DEBUG, MAX_*_LOG - -## Attributes that can be set by hosts with "OWNER" permission (as -## defined above) NOTE: any Condor job running on a given host will -## have OWNER permission on that host by default. If you grant this -## kind of access, Condor jobs will be able to modify any attributes -## you list below on the machine where they are running. This has -## obvious security implications, so only grant this kind of -## permission for custom attributes that you define for your own use -## at your pool (custom attributes about your machines that are -## published with the STARTD_ATTRS setting, for example). -#SETTABLE_ATTRS_OWNER = your_custom_attribute, another_custom_attr - -## You can also define daemon-specific versions of each of these -## settings. For example, to define settings that can only be -## changed in the condor_startd's configuration by hosts with OWNER -## permission, you would use: -#STARTD_SETTABLE_ATTRS_OWNER = your_custom_attribute_name - - -##-------------------------------------------------------------------- -## Network filesystem parameters: -##-------------------------------------------------------------------- -## Do you want to use NFS for file access instead of remote system -## calls? -#USE_NFS = False - -## Do you want to use AFS for file access instead of remote system -## calls? -#USE_AFS = False - -##-------------------------------------------------------------------- -## Checkpoint server: -##-------------------------------------------------------------------- -## Do you want to use a checkpoint server if one is available? If a -## checkpoint server isn't available or USE_CKPT_SERVER is set to -## False, checkpoints will be written to the local SPOOL directory on -## the submission machine. -#USE_CKPT_SERVER = True - -## What's the hostname of this machine's nearest checkpoint server? -#CKPT_SERVER_HOST = checkpoint-server-hostname.your.domain - -## Do you want the starter on the execute machine to choose the -## checkpoint server? If False, the CKPT_SERVER_HOST set on -## the submit machine is used. Otherwise, the CKPT_SERVER_HOST set -## on the execute machine is used. The default is true. -#STARTER_CHOOSES_CKPT_SERVER = True - -##-------------------------------------------------------------------- -## Miscellaneous: -##-------------------------------------------------------------------- -## Try to save this much swap space by not starting new shadows. -## Specified in megabytes. -#RESERVED_SWAP = 0 - -## What's the maximum number of jobs you want a single submit machine -## to spawn shadows for? The default is a function of $(DETECTED_MEMORY) -## and a guess at the number of ephemeral ports available. - -## Example 1: -#MAX_JOBS_RUNNING = 10000 - -## Example 2: -## This is more complicated, but it produces the same limit as the default. -## First define some expressions to use in our calculation. -## Assume we can use up to 80%% of memory and estimate shadow private data -## size of 800k. -#MAX_SHADOWS_MEM = ceiling($(DETECTED_MEMORY)*0.8*1024/800) -## Assume we can use ~21,000 ephemeral ports (avg ~2.1 per shadow). -## Under Linux, the range is set in /proc/sys/net/ipv4/ip_local_port_range. -#MAX_SHADOWS_PORTS = 10000 -## Under windows, things are much less scalable, currently. -## Note that this can probably be safely increased a bit under 64-bit windows. -#MAX_SHADOWS_OPSYS = ifThenElse(regexp("WIN.*","$(OPSYS)"),200,100000) -## Now build up the expression for MAX_JOBS_RUNNING. This is complicated -## due to lack of a min() function. -#MAX_JOBS_RUNNING = $(MAX_SHADOWS_MEM) -#MAX_JOBS_RUNNING = \ -# ifThenElse( $(MAX_SHADOWS_PORTS) < $(MAX_JOBS_RUNNING), \ -# $(MAX_SHADOWS_PORTS), \ -# $(MAX_JOBS_RUNNING) ) -#MAX_JOBS_RUNNING = \ -# ifThenElse( $(MAX_SHADOWS_OPSYS) < $(MAX_JOBS_RUNNING), \ -# $(MAX_SHADOWS_OPSYS), \ -# $(MAX_JOBS_RUNNING) ) - - -## Maximum number of simultaneous downloads of output files from -## execute machines to the submit machine (limit applied per schedd). -## The value 0 means unlimited. -#MAX_CONCURRENT_DOWNLOADS = 10 - -## Maximum number of simultaneous uploads of input files from the -## submit machine to execute machines (limit applied per schedd). -## The value 0 means unlimited. -#MAX_CONCURRENT_UPLOADS = 10 - -## Condor needs to create a few lock files to synchronize access to -## various log files. Because of problems we've had with network -## filesystems and file locking over the years, we HIGHLY recommend -## that you put these lock files on a local partition on each -## machine. If you don't have your LOCAL_DIR on a local partition, -## be sure to change this entry. Whatever user (or group) condor is -## running as needs to have write access to this directory. If -## you're not running as root, this is whatever user you started up -## the condor_master as. If you are running as root, and there's a -## condor account, it's probably condor. Otherwise, it's whatever -## you've set in the CONDOR_IDS environment variable. See the Admin -## manual for details on this. -LOCK = $(LOG) - -## If you don't use a fully qualified name in your /etc/hosts file -## (or NIS, etc.) for either your official hostname or as an alias, -## Condor wouldn't normally be able to use fully qualified names in -## places that it'd like to. You can set this parameter to the -## domain you'd like appended to your hostname, if changing your host -## information isn't a good option. This parameter must be set in -## the global config file (not the LOCAL_CONFIG_FILE from above). -#DEFAULT_DOMAIN_NAME = your.domain.name - -## If you don't have DNS set up, Condor will normally fail in many -## places because it can't resolve hostnames to IP addresses and -## vice-versa. If you enable this option, Condor will use -## pseudo-hostnames constructed from a machine's IP address and the -## DEFAULT_DOMAIN_NAME. Both NO_DNS and DEFAULT_DOMAIN must be set in -## your top-level config file for this mode of operation to work -## properly. -#NO_DNS = True - -## Condor can be told whether or not you want the Condor daemons to -## create a core file if something really bad happens. This just -## sets the resource limit for the size of a core file. By default, -## we don't do anything, and leave in place whatever limit was in -## effect when you started the Condor daemons. If this parameter is -## set and "True", we increase the limit to as large as it gets. If -## it's set to "False", we set the limit at 0 (which means that no -## core files are even created). Core files greatly help the Condor -## developers debug any problems you might be having. -#CREATE_CORE_FILES = True - -## When Condor daemons detect a fatal internal exception, they -## normally log an error message and exit. If you have turned on -## CREATE_CORE_FILES, in some cases you may also want to turn on -## ABORT_ON_EXCEPTION so that core files are generated when an -## exception occurs. Set the following to True if that is what you -## want. -#ABORT_ON_EXCEPTION = False - -## Condor Glidein downloads binaries from a remote server for the -## machines into which you're gliding. This saves you from manually -## downloading and installing binaries for every architecture you -## might want to glidein to. The default server is one maintained at -## The University of Wisconsin. If you don't want to use the UW -## server, you can set up your own and change the following to -## point to it, instead. -GLIDEIN_SERVER_URLS = \ - http://www.cs.wisc.edu/condor/glidein/binaries - -## List the sites you want to GlideIn to on the GLIDEIN_SITES. For example, -## if you'd like to GlideIn to some Alliance GiB resources, -## uncomment the line below. -## Make sure that $(GLIDEIN_SITES) is included in ALLOW_READ and -## ALLOW_WRITE, or else your GlideIns won't be able to join your pool. -## This is _NOT_ done for you by default, because it is an even better -## idea to use a strong security method (such as GSI) rather than -## host-based security for authorizing glideins. -#GLIDEIN_SITES = *.ncsa.uiuc.edu, *.cs.wisc.edu, *.mcs.anl.gov -#GLIDEIN_SITES = - -## If your site needs to use UID_DOMAIN settings (defined above) that -## are not real Internet domains that match the hostnames, you can -## tell Condor to trust whatever UID_DOMAIN a submit machine gives to -## the execute machine and just make sure the two strings match. The -## default for this setting is False, since it is more secure this -## way. -#TRUST_UID_DOMAIN = False - -## If you would like to be informed in near real-time via condor_q when -## a vanilla/standard/java job is in a suspension state, set this attribute to -## TRUE. However, this real-time update of the condor_schedd by the shadows -## could cause performance issues if there are thousands of concurrently -## running vanilla/standard/java jobs under a single condor_schedd and they -## are allowed to suspend and resume. -#REAL_TIME_JOB_SUSPEND_UPDATES = False - -## A standard universe job can perform arbitrary shell calls via the -## libc 'system()' function. This function call is routed back to the shadow -## which performs the actual system() invocation in the initialdir of the -## running program and as the user who submitted the job. However, since the -## user job can request ARBITRARY shell commands to be run by the shadow, this -## is a generally unsafe practice. This should only be made available if it is -## actually needed. If this attribute is not defined, then it is the same as -## it being defined to False. Set it to True to allow the shadow to execute -## arbitrary shell code from the user job. -#SHADOW_ALLOW_UNSAFE_REMOTE_EXEC = False - -## KEEP_OUTPUT_SANDBOX is an optional feature to tell Condor-G to not -## remove the job spool when the job leaves the queue. To use, just -## set to TRUE. Since you will be operating Condor-G in this manner, -## you may want to put leave_in_queue = false in your job submit -## description files, to tell Condor-G to simply remove the job from -## the queue immediately when the job completes (since the output files -## will stick around no matter what). -#KEEP_OUTPUT_SANDBOX = False - -## This setting tells the negotiator to ignore user priorities. This -## avoids problems where jobs from different users won't run when using -## condor_advertise instead of a full-blown startd (some of the user -## priority system in Condor relies on information from the startd -- -## we will remove this reliance when we support the user priority -## system for grid sites in the negotiator; for now, this setting will -## just disable it). -#NEGOTIATOR_IGNORE_USER_PRIORITIES = False - -## This is a list of libraries containing ClassAd plug-in functions. -#CLASSAD_USER_LIBS = - -## This setting tells Condor whether to delegate or copy GSI X509 -## credentials when sending them over the wire between daemons. -## Delegation can take up to a second, which is very slow when -## submitting a large number of jobs. Copying exposes the credential -## to third parties if Condor isn't set to encrypt communications. -## By default, Condor will delegate rather than copy. -#DELEGATE_JOB_GSI_CREDENTIALS = True - -## This setting controls whether Condor delegates a full or limited -## X509 credential for jobs. Currently, this only affects grid-type -## gt2 grid universe jobs. The default is False. -#DELEGATE_FULL_JOB_GSI_CREDENTIALS = False - -## This setting controls the default behaviour for the spooling of files -## into, or out of, the Condor system by such tools as condor_submit -## and condor_transfer_data. Here is the list of valid settings for this -## parameter and what they mean: -## -## stm_use_schedd_only -## Ask the condor_schedd to solely store/retreive the sandbox -## -## stm_use_transferd -## Ask the condor_schedd for a location of a condor_transferd, then -## store/retreive the sandbox from the transferd itself. -## -## The allowed values are case insensitive. -## The default of this parameter if not specified is: stm_use_schedd_only -#SANDBOX_TRANSFER_METHOD = stm_use_schedd_only - -## This setting specifies an IP address that depends on the setting of -## BIND_ALL_INTERFACES. If BIND_ALL_INTERFACES is True (the default), then -## this variable controls what IP address will be advertised as the public -## address of the daemon. If BIND_ALL_INTERFACES is False, then this variable -## specifies which IP address to bind network sockets to. If -## BIND_ALL_INTERFACES is False and NETWORK_INTERFACE is not defined, Condor -## chooses a network interface automatically. It tries to choose a public -## interface if one is available. If it cannot decide which of two interfaces -## to choose from, it will pick the first one. -NETWORK_INTERFACE = %(ipv6)s -BIND_ALL_INTERFACES = FALSE - -##-------------------------------------------------------------------- -## Settings that control the daemon's debugging output: -##-------------------------------------------------------------------- - -## -## The flags given in ALL_DEBUG are shared between all daemons. -## - -ALL_DEBUG = - -MAX_COLLECTOR_LOG = 1000000 -COLLECTOR_DEBUG = - -MAX_KBDD_LOG = 1000000 -KBDD_DEBUG = - -MAX_NEGOTIATOR_LOG = 1000000 -NEGOTIATOR_DEBUG = D_MATCH -MAX_NEGOTIATOR_MATCH_LOG = 1000000 - -MAX_SCHEDD_LOG = 1000000 -SCHEDD_DEBUG = D_PID - -MAX_SHADOW_LOG = 1000000 -SHADOW_DEBUG = - -MAX_STARTD_LOG = 1000000 -STARTD_DEBUG = - -MAX_STARTER_LOG = 1000000 - -MAX_MASTER_LOG = 1000000 -MASTER_DEBUG = -## When the master starts up, should it truncate it's log file? -#TRUNC_MASTER_LOG_ON_OPEN = False - -MAX_JOB_ROUTER_LOG = 1000000 -JOB_ROUTER_DEBUG = - -MAX_ROOSTER_LOG = 1000000 -ROOSTER_DEBUG = - -MAX_SHARED_PORT_LOG = 1000000 -SHARED_PORT_DEBUG = - -MAX_HDFS_LOG = 1000000 -HDFS_DEBUG = - -# High Availability Logs -MAX_HAD_LOG = 1000000 -HAD_DEBUG = -MAX_REPLICATION_LOG = 1000000 -REPLICATION_DEBUG = -MAX_TRANSFERER_LOG = 1000000 -TRANSFERER_DEBUG = - - -## The daemons touch their log file periodically, even when they have -## nothing to write. When a daemon starts up, it prints the last time -## the log file was modified. This lets you estimate when a previous -## instance of a daemon stopped running. This paramete controls how often -## the daemons touch the file (in seconds). -#TOUCH_LOG_INTERVAL = 60 - -###################################################################### -###################################################################### -## -## ###### ##### -## # # ## ##### ##### # # -## # # # # # # # # -## ###### # # # # # ##### -## # ###### ##### # # -## # # # # # # # # -## # # # # # # ##### -## -## Part 3: Settings control the policy for running, stopping, and -## periodically checkpointing condor jobs: -###################################################################### -###################################################################### - -## This section contains macros are here to help write legible -## expressions: -MINUTE = 60 -HOUR = (60 * $(MINUTE)) -StateTimer = (time() - EnteredCurrentState) -ActivityTimer = (time() - EnteredCurrentActivity) -ActivationTimer = ifThenElse(JobStart =!= UNDEFINED, (time() - JobStart), 0) -LastCkpt = (time() - LastPeriodicCheckpoint) - -## The JobUniverse attribute is just an int. These macros can be -## used to specify the universe in a human-readable way: -STANDARD = 1 -VANILLA = 5 -MPI = 8 -VM = 13 -IsMPI = (TARGET.JobUniverse == $(MPI)) -IsVanilla = (TARGET.JobUniverse == $(VANILLA)) -IsStandard = (TARGET.JobUniverse == $(STANDARD)) -IsVM = (TARGET.JobUniverse == $(VM)) - -NonCondorLoadAvg = (LoadAvg - CondorLoadAvg) -BackgroundLoad = 0.3 -HighLoad = 0.5 -StartIdleTime = 15 * $(MINUTE) -ContinueIdleTime = 5 * $(MINUTE) -MaxSuspendTime = 10 * $(MINUTE) -MaxVacateTime = 10 * $(MINUTE) - -KeyboardBusy = (KeyboardIdle < $(MINUTE)) -ConsoleBusy = (ConsoleIdle < $(MINUTE)) -CPUIdle = ($(NonCondorLoadAvg) <= $(BackgroundLoad)) -CPUBusy = ($(NonCondorLoadAvg) >= $(HighLoad)) -KeyboardNotBusy = ($(KeyboardBusy) == False) - -BigJob = (TARGET.ImageSize >= (50 * 1024)) -MediumJob = (TARGET.ImageSize >= (15 * 1024) && TARGET.ImageSize < (50 * 1024)) -SmallJob = (TARGET.ImageSize < (15 * 1024)) - -JustCPU = ($(CPUBusy) && ($(KeyboardBusy) == False)) -MachineBusy = ($(CPUBusy) || $(KeyboardBusy)) - -## The RANK expression controls which jobs this machine prefers to -## run over others. Some examples from the manual include: -## RANK = TARGET.ImageSize -## RANK = (Owner == "coltrane") + (Owner == "tyner") \ -## + ((Owner == "garrison") * 10) + (Owner == "jones") -## By default, RANK is always 0, meaning that all jobs have an equal -## ranking. -#RANK = 0 - - -##################################################################### -## This where you choose the configuration that you would like to -## use. It has no defaults so it must be defined. We start this -## file off with the UWCS_* policy. -###################################################################### - -## Also here is what is referred to as the TESTINGMODE_*, which is -## a quick hardwired way to test Condor with a simple no-preemption policy. -## Replace UWCS_* with TESTINGMODE_* if you wish to do testing mode. -## For example: -## WANT_SUSPEND = $(UWCS_WANT_SUSPEND) -## becomes -## WANT_SUSPEND = $(TESTINGMODE_WANT_SUSPEND) - -# When should we only consider SUSPEND instead of PREEMPT? -WANT_SUSPEND = $(UWCS_WANT_SUSPEND) - -# When should we preempt gracefully instead of hard-killing? -WANT_VACATE = $(UWCS_WANT_VACATE) - -## When is this machine willing to start a job? -START = $(UWCS_START) - -## When should a local universe job be allowed to start? -#START_LOCAL_UNIVERSE = TotalLocalJobsRunning < 200 - -## When should a scheduler universe job be allowed to start? -#START_SCHEDULER_UNIVERSE = TotalSchedulerJobsRunning < 200 - -## When to suspend a job? -SUSPEND = $(UWCS_SUSPEND) - -## When to resume a suspended job? -CONTINUE = $(UWCS_CONTINUE) - -## When to nicely stop a job? -## (as opposed to killing it instantaneously) -PREEMPT = $(UWCS_PREEMPT) - -## When to instantaneously kill a preempting job -## (e.g. if a job is in the pre-empting stage for too long) -KILL = $(UWCS_KILL) - -PERIODIC_CHECKPOINT = $(UWCS_PERIODIC_CHECKPOINT) -PREEMPTION_REQUIREMENTS = $(UWCS_PREEMPTION_REQUIREMENTS) -PREEMPTION_RANK = $(UWCS_PREEMPTION_RANK) -NEGOTIATOR_PRE_JOB_RANK = $(UWCS_NEGOTIATOR_PRE_JOB_RANK) -NEGOTIATOR_POST_JOB_RANK = $(UWCS_NEGOTIATOR_POST_JOB_RANK) -MaxJobRetirementTime = $(UWCS_MaxJobRetirementTime) -CLAIM_WORKLIFE = $(UWCS_CLAIM_WORKLIFE) - -##################################################################### -## This is the UWisc - CS Department Configuration. -##################################################################### - -# When should we only consider SUSPEND instead of PREEMPT? -# Only when SUSPEND is True and one of the following is also true: -# - the job is small -# - the keyboard is idle -# - it is a vanilla universe job -UWCS_WANT_SUSPEND = ( $(SmallJob) || $(KeyboardNotBusy) || $(IsVanilla) ) && \ - ( $(SUSPEND) ) - -# When should we preempt gracefully instead of hard-killing? -UWCS_WANT_VACATE = ( $(ActivationTimer) > 10 * $(MINUTE) || $(IsVanilla) ) - -# Only start jobs if: -# 1) the keyboard has been idle long enough, AND -# 2) the load average is low enough OR the machine is currently -# running a Condor job -# (NOTE: Condor will only run 1 job at a time on a given resource. -# The reasons Condor might consider running a different job while -# already running one are machine Rank (defined above), and user -# priorities.) -UWCS_START = ( (KeyboardIdle > $(StartIdleTime)) \ - && ( $(CPUIdle) || \ - (State != "Unclaimed" && State != "Owner")) ) - -# Suspend jobs if: -# 1) the keyboard has been touched, OR -# 2a) The cpu has been busy for more than 2 minutes, AND -# 2b) the job has been running for more than 90 seconds -UWCS_SUSPEND = ( $(KeyboardBusy) || \ - ( (CpuBusyTime > 2 * $(MINUTE)) \ - && $(ActivationTimer) > 90 ) ) - -# Continue jobs if: -# 1) the cpu is idle, AND -# 2) we've been suspended more than 10 seconds, AND -# 3) the keyboard hasn't been touched in a while -UWCS_CONTINUE = ( $(CPUIdle) && ($(ActivityTimer) > 10) \ - && (KeyboardIdle > $(ContinueIdleTime)) ) - -# Preempt jobs if: -# 1) The job is suspended and has been suspended longer than we want -# 2) OR, we don't want to suspend this job, but the conditions to -# suspend jobs have been met (someone is using the machine) -UWCS_PREEMPT = ( ((Activity == "Suspended") && \ - ($(ActivityTimer) > $(MaxSuspendTime))) \ - || (SUSPEND && (WANT_SUSPEND == False)) ) - -# Maximum time (in seconds) to wait for a job to finish before kicking -# it off (due to PREEMPT, a higher priority claim, or the startd -# gracefully shutting down). This is computed from the time the job -# was started, minus any suspension time. Once the retirement time runs -# out, the usual preemption process will take place. The job may -# self-limit the retirement time to _less_ than what is given here. -# By default, nice user jobs and standard universe jobs set their -# MaxJobRetirementTime to 0, so they will not wait in retirement. - -UWCS_MaxJobRetirementTime = 0 - -## If you completely disable preemption of claims to machines, you -## should consider limiting the timespan over which new jobs will be -## accepted on the same claim. See the manual section on disabling -## preemption for a comprehensive discussion. Since this example -## configuration does not disable preemption of claims, we leave -## CLAIM_WORKLIFE undefined (infinite). -#UWCS_CLAIM_WORKLIFE = 1200 - -# How long to allow a job to vacate gracefully. After this time, -# the job is killed. -MachineMaxVacateTime = $(MaxVacateTime) - -# Abort graceful eviction of a job, even though it has not -# yet used all the time allotted by MachineMaxVacateTime. -UWCS_KILL = false - -## Only define vanilla versions of these if you want to make them -## different from the above settings. -#SUSPEND_VANILLA = ( $(KeyboardBusy) || \ -# ((CpuBusyTime > 2 * $(MINUTE)) && $(ActivationTimer) > 90) ) -#CONTINUE_VANILLA = ( $(CPUIdle) && ($(ActivityTimer) > 10) \ -# && (KeyboardIdle > $(ContinueIdleTime)) ) -#PREEMPT_VANILLA = ( ((Activity == "Suspended") && \ -# ($(ActivityTimer) > $(MaxSuspendTime))) \ -# || (SUSPEND_VANILLA && (WANT_SUSPEND == False)) ) -#KILL_VANILLA = false - -## Checkpoint every 3 hours on average, with a +-30 minute random -## factor to avoid having many jobs hit the checkpoint server at -## the same time. -UWCS_PERIODIC_CHECKPOINT = $(LastCkpt) > (3 * $(HOUR) + \ - $RANDOM_INTEGER(-30,30,1) * $(MINUTE) ) - -## You might want to checkpoint a little less often. A good -## example of this is below. For jobs smaller than 60 megabytes, we -## periodic checkpoint every 6 hours. For larger jobs, we only -## checkpoint every 12 hours. -#UWCS_PERIODIC_CHECKPOINT = \ -# ( (TARGET.ImageSize < 60000) && \ -# ($(LastCkpt) > (6 * $(HOUR) + $RANDOM_INTEGER(-30,30,1))) ) || \ -# ( $(LastCkpt) > (12 * $(HOUR) + $RANDOM_INTEGER(-30,30,1)) ) - -## The rank expressions used by the negotiator are configured below. -## This is the order in which ranks are applied by the negotiator: -## 1. NEGOTIATOR_PRE_JOB_RANK -## 2. rank in job ClassAd -## 3. NEGOTIATOR_POST_JOB_RANK -## 4. cause of preemption (0=user priority,1=startd rank,2=no preemption) -## 5. PREEMPTION_RANK - -## The NEGOTIATOR_PRE_JOB_RANK expression overrides all other ranks -## that are used to pick a match from the set of possibilities. -## The following expression matches jobs to unclaimed resources -## whenever possible, regardless of the job-supplied rank. -UWCS_NEGOTIATOR_PRE_JOB_RANK = RemoteOwner =?= UNDEFINED - -## The NEGOTIATOR_POST_JOB_RANK expression chooses between -## resources that are equally preferred by the job. -## The following example expression steers jobs toward -## faster machines and tends to fill a cluster of multi-processors -## breadth-first instead of depth-first. It also prefers online -## machines over offline (hibernating) ones. In this example, -## the expression is chosen to have no effect when preemption -## would take place, allowing control to pass on to -## PREEMPTION_RANK. -UWCS_NEGOTIATOR_POST_JOB_RANK = \ - (RemoteOwner =?= UNDEFINED) * (KFlops - SlotID - 1.0e10*(Offline=?=True)) - -## The negotiator will not preempt a job running on a given machine -## unless the PREEMPTION_REQUIREMENTS expression evaluates to true -## and the owner of the idle job has a better priority than the owner -## of the running job. This expression defaults to true. -UWCS_PREEMPTION_REQUIREMENTS = ((SubmitterGroup =?= RemoteGroup) \ - && ($(StateTimer) > (1 * $(HOUR))) \ - && (RemoteUserPrio > TARGET.SubmitterUserPrio * 1.2)) \ - || (MY.NiceUser == True) - -## The PREEMPTION_RANK expression is used in a case where preemption -## is the only option and all other negotiation ranks are equal. For -## example, if the job has no preference, it is usually preferable to -## preempt a job with a small ImageSize instead of a job with a large -## ImageSize. The default is to rank all preemptable matches the -## same. However, the negotiator will always prefer to match the job -## with an idle machine over a preemptable machine, if all other -## negotiation ranks are equal. -UWCS_PREEMPTION_RANK = (RemoteUserPrio * 1000000) - TARGET.ImageSize - - -##################################################################### -## This is a Configuration that will cause your Condor jobs to -## always run. This is intended for testing only. -###################################################################### - -## This mode will cause your jobs to start on a machine an will let -## them run to completion. Condor will ignore all of what is going -## on in the machine (load average, keyboard activity, etc.) - -TESTINGMODE_WANT_SUSPEND = False -TESTINGMODE_WANT_VACATE = False -TESTINGMODE_START = True -TESTINGMODE_SUSPEND = False -TESTINGMODE_CONTINUE = True -TESTINGMODE_PREEMPT = False -TESTINGMODE_KILL = False -TESTINGMODE_PERIODIC_CHECKPOINT = False -TESTINGMODE_PREEMPTION_REQUIREMENTS = False -TESTINGMODE_PREEMPTION_RANK = 0 - -# Prevent machine claims from being reused indefinitely, since -# preemption of claims is disabled in the TESTINGMODE configuration. -TESTINGMODE_CLAIM_WORKLIFE = 1200 - - -###################################################################### -###################################################################### -## -## ###### # -## # # ## ##### ##### # # -## # # # # # # # # # -## ###### # # # # # # # -## # ###### ##### # ####### -## # # # # # # # -## # # # # # # # -## -## Part 4: Settings you should probably leave alone: -## (unless you know what you're doing) -###################################################################### -###################################################################### - -###################################################################### -## Daemon-wide settings: -###################################################################### - -## Pathnames -LOG = $(LOCAL_DIR)/log -SPOOL = $(LOCAL_DIR)/spool -EXECUTE = $(LOCAL_DIR)/execute -BIN = $(RELEASE_DIR)/bin -LIB = $(RELEASE_DIR)/lib -INCLUDE = $(RELEASE_DIR)/include -SBIN = $(RELEASE_DIR)/sbin -LIBEXEC = $(RELEASE_DIR)/libexec - -## If you leave HISTORY undefined (comment it out), no history file -## will be created. -HISTORY = $(SPOOL)/history - -## Log files -COLLECTOR_LOG = $(LOG)/CollectorLog -KBDD_LOG = $(LOG)/KbdLog -MASTER_LOG = $(LOG)/MasterLog -NEGOTIATOR_LOG = $(LOG)/NegotiatorLog -NEGOTIATOR_MATCH_LOG = $(LOG)/MatchLog -SCHEDD_LOG = $(LOG)/SchedLog -SHADOW_LOG = $(LOG)/ShadowLog -STARTD_LOG = $(LOG)/StartLog -STARTER_LOG = $(LOG)/StarterLog -JOB_ROUTER_LOG = $(LOG)/JobRouterLog -ROOSTER_LOG = $(LOG)/RoosterLog -SHARED_PORT_LOG = $(LOG)/SharedPortLog -# High Availability Logs -HAD_LOG = $(LOG)/HADLog -REPLICATION_LOG = $(LOG)/ReplicationLog -TRANSFERER_LOG = $(LOG)/TransfererLog -HDFS_LOG = $(LOG)/HDFSLog - -## Lock files -SHADOW_LOCK = $(LOCK)/ShadowLock - -## This setting controls how often any lock files currently in use have their -## timestamp updated. Updating the timestamp prevents administrative programs -## like 'tmpwatch' from deleting long lived lock files. The parameter is -## an integer in seconds with a minimum of 60 seconds. The default if not -## specified is 28800 seconds, or 8 hours. -## This attribute only takes effect on restart of the daemons or at the next -## update time. -# LOCK_FILE_UPDATE_INTERVAL = 28800 - -## This setting primarily allows you to change the port that the -## collector is listening on. By default, the collector uses port -## 9618, but you can set the port with a ":port", such as: -## COLLECTOR_HOST = $(CONDOR_HOST):1234 -COLLECTOR_HOST = $(CONDOR_HOST) - -## The NEGOTIATOR_HOST parameter has been deprecated. The port where -## the negotiator is listening is now dynamically allocated and the IP -## and port are now obtained from the collector, just like all the -## other daemons. However, if your pool contains any machines that -## are running version 6.7.3 or earlier, you can uncomment this -## setting to go back to the old fixed-port (9614) for the negotiator. -#NEGOTIATOR_HOST = $(CONDOR_HOST) - -## How long are you willing to let daemons try their graceful -## shutdown methods before they do a hard shutdown? (30 minutes) -#SHUTDOWN_GRACEFUL_TIMEOUT = 1800 - -## How much disk space would you like reserved from Condor? In -## places where Condor is computing the free disk space on various -## partitions, it subtracts the amount it really finds by this -## many megabytes. (If undefined, defaults to 0). -RESERVED_DISK = %(diskspace)s - -## If your machine is running AFS and the AFS cache lives on the same -## partition as the other Condor directories, and you want Condor to -## reserve the space that your AFS cache is configured to use, set -## this to true. -#RESERVE_AFS_CACHE = False - -## By default, if a user does not specify "notify_user" in the submit -## description file, any email Condor sends about that job will go to -## "username@UID_DOMAIN". If your machines all share a common UID -## domain (so that you would set UID_DOMAIN to be the same across all -## machines in your pool), *BUT* email to user@UID_DOMAIN is *NOT* -## the right place for Condor to send email for your site, you can -## define the default domain to use for email. A common example -## would be to set EMAIL_DOMAIN to the fully qualified hostname of -## each machine in your pool, so users submitting jobs from a -## specific machine would get email sent to user@machine.your.domain, -## instead of user@your.domain. In general, you should leave this -## setting commented out unless two things are true: 1) UID_DOMAIN is -## set to your domain, not $(FULL_HOSTNAME), and 2) email to -## user@UID_DOMAIN won't work. -#EMAIL_DOMAIN = $(FULL_HOSTNAME) - -## Should Condor daemons create a UDP command socket (for incomming -## UDP-based commands) in addition to the TCP command socket? By -## default, classified ad updates sent to the collector use UDP, in -## addition to some keep alive messages and other non-essential -## communication. However, in certain situations, it might be -## desirable to disable the UDP command port (for example, to reduce -## the number of ports represented by a CCB broker, etc). If not -## defined, the UDP command socket is enabled by default, and to -## modify this, you must restart your Condor daemons. Also, this -## setting must be defined machine-wide. For example, setting -## "STARTD.WANT_UDP_COMMAND_SOCKET = False" while the global setting -## is "True" will still result in the startd creating a UDP socket. -#WANT_UDP_COMMAND_SOCKET = True - -## If your site needs to use TCP updates to the collector, instead of -## UDP, you can enable this feature. HOWEVER, WE DO NOT RECOMMEND -## THIS FOR MOST SITES! In general, the only sites that might want -## this feature are pools made up of machines connected via a -## wide-area network where UDP packets are frequently or always -## dropped. If you enable this feature, you *MUST* turn on the -## COLLECTOR_SOCKET_CACHE_SIZE setting at your collector, and each -## entry in the socket cache uses another file descriptor. If not -## defined, this feature is disabled by default. -#UPDATE_COLLECTOR_WITH_TCP = True - -## HIGHPORT and LOWPORT let you set the range of ports that Condor -## will use. This may be useful if you are behind a firewall. By -## default, Condor uses port 9618 for the collector, 9614 for the -## negotiator, and system-assigned (apparently random) ports for -## everything else. HIGHPORT and LOWPORT only affect these -## system-assigned ports, but will restrict them to the range you -## specify here. If you want to change the well-known ports for the -## collector or negotiator, see COLLECTOR_HOST or NEGOTIATOR_HOST. -## Note that both LOWPORT and HIGHPORT must be at least 1024 if you -## are not starting your daemons as root. You may also specify -## different port ranges for incoming and outgoing connections by -## using IN_HIGHPORT/IN_LOWPORT and OUT_HIGHPORT/OUT_LOWPORT. -#HIGHPORT = 9700 -#LOWPORT = 9600 - -## If a daemon doens't respond for too long, do you want go generate -## a core file? This bascially controls the type of the signal -## sent to the child process, and mostly affects the Condor Master -#NOT_RESPONDING_WANT_CORE = False - - -###################################################################### -## Daemon-specific settings: -###################################################################### - -##-------------------------------------------------------------------- -## condor_master -##-------------------------------------------------------------------- -## Daemons you want the master to keep running for you: -DAEMON_LIST = MASTER, STARTD, SCHEDD - -## Which daemons use the Condor DaemonCore library (i.e., not the -## checkpoint server or custom user daemons)? -#DC_DAEMON_LIST = \ -#MASTER, STARTD, SCHEDD, KBDD, COLLECTOR, NEGOTIATOR, EVENTD, \ -#VIEW_SERVER, CONDOR_VIEW, VIEW_COLLECTOR, HAWKEYE, CREDD, HAD, \ -#DBMSD, QUILL, JOB_ROUTER, ROOSTER, LEASEMANAGER, HDFS, SHARED_PORT, \ -#DEFRAG - - -## Where are the binaries for these daemons? -MASTER = $(SBIN)/condor_master -STARTD = $(SBIN)/condor_startd -SCHEDD = $(SBIN)/condor_schedd -KBDD = $(SBIN)/condor_kbdd -NEGOTIATOR = $(SBIN)/condor_negotiator -COLLECTOR = $(SBIN)/condor_collector -CKPT_SERVER = $(SBIN)/condor_ckpt_server -STARTER_LOCAL = $(SBIN)/condor_starter -JOB_ROUTER = $(LIBEXEC)/condor_job_router -ROOSTER = $(LIBEXEC)/condor_rooster -HDFS = $(SBIN)/condor_hdfs -SHARED_PORT = $(LIBEXEC)/condor_shared_port -TRANSFERER = $(LIBEXEC)/condor_transferer -DEFRAG = $(LIBEXEC)/condor_defrag - -## When the master starts up, it can place it's address (IP and port) -## into a file. This way, tools running on the local machine don't -## need to query the central manager to find the master. This -## feature can be turned off by commenting out this setting. -MASTER_ADDRESS_FILE = $(LOG)/.master_address - -## Where should the master find the condor_preen binary? If you don't -## want preen to run at all, set it to nothing. -PREEN = $(SBIN)/condor_preen - -## How do you want preen to behave? The "-m" means you want email -## about files preen finds that it thinks it should remove. The "-r" -## means you want preen to actually remove these files. If you don't -## want either of those things to happen, just remove the appropriate -## one from this setting. -PREEN_ARGS = -m -r - -## How often should the master start up condor_preen? (once a day) -#PREEN_INTERVAL = 86400 - -## If a daemon dies an unnatural death, do you want email about it? -#PUBLISH_OBITUARIES = True - -## If you're getting obituaries, how many lines of the end of that -## daemon's log file do you want included in the obituary? -#OBITUARY_LOG_LENGTH = 20 - -## Should the master run? -#START_MASTER = True - -## Should the master start up the daemons you want it to? -#START_DAEMONS = True - -## How often do you want the master to send an update to the central -## manager? -#MASTER_UPDATE_INTERVAL = 300 - -## How often do you want the master to check the timestamps of the -## daemons it's running? If any daemons have been modified, the -## master restarts them. -#MASTER_CHECK_NEW_EXEC_INTERVAL = 300 - -## Once you notice new binaries, how long should you wait before you -## try to execute them? -#MASTER_NEW_BINARY_DELAY = 120 - -## What's the maximum amount of time you're willing to give the -## daemons to quickly shutdown before you just kill them outright? -#SHUTDOWN_FAST_TIMEOUT = 120 - -###### -## Exponential backoff settings: -###### -## When a daemon keeps crashing, we use "exponential backoff" so we -## wait longer and longer before restarting it. This is the base of -## the exponent used to determine how long to wait before starting -## the daemon again: -#MASTER_BACKOFF_FACTOR = 2.0 - -## What's the maximum amount of time you want the master to wait -## between attempts to start a given daemon? (With 2.0 as the -## MASTER_BACKOFF_FACTOR, you'd hit 1 hour in 12 restarts...) -#MASTER_BACKOFF_CEILING = 3600 - -## How long should a daemon run without crashing before we consider -## it "recovered". Once a daemon has recovered, we reset the number -## of restarts so the exponential backoff stuff goes back to normal. -#MASTER_RECOVER_FACTOR = 300 - - -##-------------------------------------------------------------------- -## condor_collector -##-------------------------------------------------------------------- -## Address to which Condor will send a weekly e-mail with output of -## condor_status. -#CONDOR_DEVELOPERS = condor-admin@cs.wisc.edu - -## Global Collector to periodically advertise basic information about -## your pool. -#CONDOR_DEVELOPERS_COLLECTOR = condor.cs.wisc.edu - -## When the collector starts up, it can place it's address (IP and port) -## into a file. This way, tools running on the local machine don't -## need to query the central manager to find the collector. This -## feature can be turned off by commenting out this setting. -## This is essential when using a port of "0" (automatic) for the -## COLLECTOR_HOST, a useful technique for personal Condor installs. -COLLECTOR_ADDRESS_FILE = $(LOG)/.collector_address - - -##-------------------------------------------------------------------- -## condor_negotiator -##-------------------------------------------------------------------- -## Determine if the Negotiator will honor SlotWeight attributes, which -## may be used to give a slot greater weight when calculating usage. -#NEGOTIATOR_USE_SLOT_WEIGHTS = True - - -## How often the Negotaitor starts a negotiation cycle, defined in -## seconds. -#NEGOTIATOR_INTERVAL = 60 - -## Should the Negotiator publish an update to the Collector after -## every negotiation cycle. It is useful to have this set to True -## to get immediate updates on LastNegotiationCycle statistics. -#NEGOTIATOR_UPDATE_AFTER_CYCLE = False - - -##-------------------------------------------------------------------- -## condor_startd -##-------------------------------------------------------------------- -## Where are the various condor_starter binaries installed? -STARTER_LIST = STARTER, STARTER_STANDARD -STARTER = $(SBIN)/condor_starter -STARTER_STANDARD = $(SBIN)/condor_starter.std -STARTER_LOCAL = $(SBIN)/condor_starter - -## When the startd starts up, it can place it's address (IP and port) -## into a file. This way, tools running on the local machine don't -## need to query the central manager to find the startd. This -## feature can be turned off by commenting out this setting. -STARTD_ADDRESS_FILE = $(LOG)/.startd_address - -## When a machine is claimed, how often should we poll the state of -## the machine to see if we need to evict/suspend the job, etc? -#POLLING_INTERVAL = 5 - -## How often should the startd send updates to the central manager? -#UPDATE_INTERVAL = 300 - -## How long is the startd willing to stay in the "matched" state? -#MATCH_TIMEOUT = 300 - -## How long is the startd willing to stay in the preempting/killing -## state before it just kills the starter directly? -#KILLING_TIMEOUT = 30 - -## When a machine unclaimed, when should it run benchmarks? -## LastBenchmark is initialized to 0, so this expression says as soon -## as we're unclaimed, run the benchmarks. Thereafter, if we're -## unclaimed and it's been at least 4 hours since we ran the last -## benchmarks, run them again. The startd keeps a weighted average -## of the benchmark results to provide more accurate values. -## Note, if you don't want any benchmarks run at all, either comment -## RunBenchmarks out, or set it to "False". -BenchmarkTimer = (time() - LastBenchmark) -RunBenchmarks : (LastBenchmark == 0 ) || ($(BenchmarkTimer) >= (4 * $(HOUR))) -#RunBenchmarks : False - -## When the startd does benchmarks, which set of benchmarks should we -## run? The default is the same as pre-7.5.6: MIPS and KFLOPS. -benchmarks_joblist = mips kflops - -## What's the max "load" of all running benchmarks? With the default -## (1.01), the startd will run the benchmarks serially. -benchmarks_max_job_load = 1.0 - -# MIPS (Dhrystone 2.1) benchmark: load 1.0 -benchmarks_mips_executable = $(LIBEXEC)/condor_mips -benchmarks_mips_job_load = 1.0 - -# KFLOPS (clinpack) benchmark: load 1.0 -benchmarks_kflops_executable = $(LIBEXEC)/condor_kflops -benchmarks_kflops_job_load = 1.0 - - -## Normally, when the startd is computing the idle time of all the -## users of the machine (both local and remote), it checks the utmp -## file to find all the currently active ttys, and only checks access -## time of the devices associated with active logins. Unfortunately, -## on some systems, utmp is unreliable, and the startd might miss -## keyboard activity by doing this. So, if your utmp is unreliable, -## set this setting to True and the startd will check the access time -## on all tty and pty devices. -#STARTD_HAS_BAD_UTMP = False - -## This entry allows the startd to monitor console (keyboard and -## mouse) activity by checking the access times on special files in -## /dev. Activity on these files shows up as "ConsoleIdle" time in -## the startd's ClassAd. Just give a comma-separated list of the -## names of devices you want considered the console, without the -## "/dev/" portion of the pathname. -#CONSOLE_DEVICES = mouse, console - - -## The STARTD_ATTRS (and legacy STARTD_EXPRS) entry allows you to -## have the startd advertise arbitrary attributes from the config -## file in its ClassAd. Give the comma-separated list of entries -## from the config file you want in the startd ClassAd. -## NOTE: because of the different syntax of the config file and -## ClassAds, you might have to do a little extra work to get a given -## entry into the ClassAd. In particular, ClassAds require double -## quotes (") around your strings. Numeric values can go in -## directly, as can boolean expressions. For example, if you wanted -## the startd to advertise its list of console devices, when it's -## configured to run benchmarks, and how often it sends updates to -## the central manager, you'd have to define the following helper -## macro: -#MY_CONSOLE_DEVICES = "$(CONSOLE_DEVICES)" -## Note: this must come before you define STARTD_ATTRS because macros -## must be defined before you use them in other macros or -## expressions. -## Then, you'd set the STARTD_ATTRS setting to this: -#STARTD_ATTRS = MY_CONSOLE_DEVICES, RunBenchmarks, UPDATE_INTERVAL -## -## STARTD_ATTRS can also be defined on a per-slot basis. The startd -## builds the list of attributes to advertise by combining the lists -## in this order: STARTD_ATTRS, SLOTx_STARTD_ATTRS. In the below -## example, the startd ad for slot1 will have the value for -## favorite_color, favorite_season, and favorite_movie, and slot2 -## will have favorite_color, favorite_season, and favorite_song. -## -#STARTD_ATTRS = favorite_color, favorite_season -#SLOT1_STARTD_ATTRS = favorite_movie -#SLOT2_STARTD_ATTRS = favorite_song -## -## Attributes in the STARTD_ATTRS list can also be on a per-slot basis. -## For example, the following configuration: -## -#favorite_color = "blue" -#favorite_season = "spring" -#SLOT2_favorite_color = "green" -#SLOT3_favorite_season = "summer" -#STARTD_ATTRS = favorite_color, favorite_season -## -## will result in the following attributes in the slot classified -## ads: -## -## slot1 - favorite_color = "blue"; favorite_season = "spring" -## slot2 - favorite_color = "green"; favorite_season = "spring" -## slot3 - favorite_color = "blue"; favorite_season = "summer" -## -## Finally, the recommended default value for this setting, is to -## publish the COLLECTOR_HOST setting as a string. This can be -## useful using the "$$(COLLECTOR_HOST)" syntax in the submit file -## for jobs to know (for example, via their environment) what pool -## they're running in. -COLLECTOR_HOST_STRING = "$(COLLECTOR_HOST)" -STARTD_ATTRS = COLLECTOR_HOST_STRING - -## When the startd is claimed by a remote user, it can also advertise -## arbitrary attributes from the ClassAd of the job its working on. -## Just list the attribute names you want advertised. -## Note: since this is already a ClassAd, you don't have to do -## anything funny with strings, etc. This feature can be turned off -## by commenting out this setting (there is no default). -STARTD_JOB_EXPRS = ImageSize, ExecutableSize, JobUniverse, NiceUser - -## If you want to "lie" to Condor about how many CPUs your machine -## has, you can use this setting to override Condor's automatic -## computation. If you modify this, you must restart the startd for -## the change to take effect (a simple condor_reconfig will not do). -## Please read the section on "condor_startd Configuration File -## Macros" in the Condor Administrators Manual for a further -## discussion of this setting. Its use is not recommended. This -## must be an integer ("N" isn't a valid setting, that's just used to -## represent the default). -#NUM_CPUS = N - -## If you never want Condor to detect more the "N" CPUs, uncomment this -## line out. You must restart the startd for this setting to take -## effect. If set to 0 or a negative number, it is ignored. -## By default, it is ignored. Otherwise, it must be a positive -## integer ("N" isn't a valid setting, that's just used to -## represent the default). -#MAX_NUM_CPUS = N - -## Normally, Condor will automatically detect the amount of physical -## memory available on your machine. Define MEMORY to tell Condor -## how much physical memory (in MB) your machine has, overriding the -## value Condor computes automatically. For example: -#MEMORY = 128 - -## How much memory would you like reserved from Condor? By default, -## Condor considers all the physical memory of your machine as -## available to be used by Condor jobs. If RESERVED_MEMORY is -## defined, Condor subtracts it from the amount of memory it -## advertises as available. -#RESERVED_MEMORY = 0 - -###### -## SMP startd settings -## -## By default, Condor will evenly divide the resources in an SMP -## machine (such as RAM, swap space and disk space) among all the -## CPUs, and advertise each CPU as its own slot with an even share of -## the system resources. If you want something other than this, -## there are a few options available to you. Please read the section -## on "Configuring The Startd for SMP Machines" in the Condor -## Administrator's Manual for full details. The various settings are -## only briefly listed and described here. -###### - -## The maximum number of different slot types. -#MAX_SLOT_TYPES = 10 - -## Use this setting to define your own slot types. This -## allows you to divide system resources unevenly among your CPUs. -## You must use a different setting for each different type you -## define. The "<N>" in the name of the macro listed below must be -## an integer from 1 to MAX_SLOT_TYPES (defined above), -## and you use this number to refer to your type. There are many -## different formats these settings can take, so be sure to refer to -## the section on "Configuring The Startd for SMP Machines" in the -## Condor Administrator's Manual for full details. In particular, -## read the section titled "Defining Slot Types" to help -## understand this setting. If you modify any of these settings, you -## must restart the condor_start for the change to take effect. -#SLOT_TYPE_<N> = 1/4 -#SLOT_TYPE_<N> = cpus=1, ram=25%%, swap=1/4, disk=1/4 -# For example: -#SLOT_TYPE_1 = 1/8 -#SLOT_TYPE_2 = 1/4 - -## If you define your own slot types, you must specify how -## many slots of each type you wish to advertise. You do -## this with the setting below, replacing the "<N>" with the -## corresponding integer you used to define the type above. You can -## change the number of a given type being advertised at run-time, -## with a simple condor_reconfig. -#NUM_SLOTS_TYPE_<N> = M -# For example: -#NUM_SLOTS_TYPE_1 = 6 -#NUM_SLOTS_TYPE_2 = 1 - -## The number of evenly-divided slots you want Condor to -## report to your pool (if less than the total number of CPUs). This -## setting is only considered if the "type" settings described above -## are not in use. By default, all CPUs are reported. This setting -## must be an integer ("N" isn't a valid setting, that's just used to -## represent the default). -#NUM_SLOTS = N - -## How many of the slots the startd is representing should -## be "connected" to the console (in other words, notice when there's -## console activity)? This defaults to all slots (N in a -## machine with N CPUs). This must be an integer ("N" isn't a valid -## setting, that's just used to represent the default). -#SLOTS_CONNECTED_TO_CONSOLE = N - -## How many of the slots the startd is representing should -## be "connected" to the keyboard (for remote tty activity, as well -## as console activity). Defaults to 1. -#SLOTS_CONNECTED_TO_KEYBOARD = 1 - -## If there are slots that aren't connected to the -## keyboard or the console (see the above two settings), the -## corresponding idle time reported will be the time since the startd -## was spawned, plus the value of this parameter. It defaults to 20 -## minutes. We do this because, if the slot is configured -## not to care about keyboard activity, we want it to be available to -## Condor jobs as soon as the startd starts up, instead of having to -## wait for 15 minutes or more (which is the default time a machine -## must be idle before Condor will start a job). If you don't want -## this boost, just set the value to 0. If you change your START -## expression to require more than 15 minutes before a job starts, -## but you still want jobs to start right away on some of your SMP -## nodes, just increase this parameter. -#DISCONNECTED_KEYBOARD_IDLE_BOOST = 1200 - -###### -## Settings for computing optional resource availability statistics: -###### -## If STARTD_COMPUTE_AVAIL_STATS = True, the startd will compute -## statistics about resource availability to be included in the -## classad(s) sent to the collector describing the resource(s) the -## startd manages. The following attributes will always be included -## in the resource classad(s) if STARTD_COMPUTE_AVAIL_STATS = True: -## AvailTime = What proportion of the time (between 0.0 and 1.0) -## has this resource been in a state other than "Owner"? -## LastAvailInterval = What was the duration (in seconds) of the -## last period between "Owner" states? -## The following attributes will also be included if the resource is -## not in the "Owner" state: -## AvailSince = At what time did the resource last leave the -## "Owner" state? Measured in the number of seconds since the -## epoch (00:00:00 UTC, Jan 1, 1970). -## AvailTimeEstimate = Based on past history, this is an estimate -## of how long the current period between "Owner" states will -## last. -#STARTD_COMPUTE_AVAIL_STATS = False - -## If STARTD_COMPUTE_AVAIL_STATS = True, STARTD_AVAIL_CONFIDENCE sets -## the confidence level of the AvailTimeEstimate. By default, the -## estimate is based on the 80th percentile of past values. -#STARTD_AVAIL_CONFIDENCE = 0.8 - -## STARTD_MAX_AVAIL_PERIOD_SAMPLES limits the number of samples of -## past available intervals stored by the startd to limit memory and -## disk consumption. Each sample requires 4 bytes of memory and -## approximately 10 bytes of disk space. -#STARTD_MAX_AVAIL_PERIOD_SAMPLES = 100 - -## CKPT_PROBE is the location of a program which computes aspects of the -## CheckpointPlatform classad attribute. By default the location of this -## executable will be here: $(LIBEXEC)/condor_ckpt_probe -CKPT_PROBE = $(LIBEXEC)/condor_ckpt_probe - -##-------------------------------------------------------------------- -## condor_schedd -##-------------------------------------------------------------------- -## Where are the various shadow binaries installed? -SHADOW_LIST = SHADOW, SHADOW_STANDARD -SHADOW = $(SBIN)/condor_shadow -SHADOW_STANDARD = $(SBIN)/condor_shadow.std - -## When the schedd starts up, it can place it's address (IP and port) -## into a file. This way, tools running on the local machine don't -## need to query the central manager to find the schedd. This -## feature can be turned off by commenting out this setting. -SCHEDD_ADDRESS_FILE = $(SPOOL)/.schedd_address - -## Additionally, a daemon may store its ClassAd on the local filesystem -## as well as sending it to the collector. This way, tools that need -## information about a daemon do not have to contact the central manager -## to get information about a daemon on the same machine. -## This feature is necessary for Quill to work. -SCHEDD_DAEMON_AD_FILE = $(SPOOL)/.schedd_classad - -## How often should the schedd send an update to the central manager? -#SCHEDD_INTERVAL = 300 - -## How long should the schedd wait between spawning each shadow? -#JOB_START_DELAY = 2 - -## How many concurrent sub-processes should the schedd spawn to handle -## queries? (Unix only) -#SCHEDD_QUERY_WORKERS = 3 - -## How often should the schedd send a keep alive message to any -## startds it has claimed? (5 minutes) -#ALIVE_INTERVAL = 300 - -## This setting controls the maximum number of times that a -## condor_shadow processes can have a fatal error (exception) before -## the condor_schedd will simply relinquish the match associated with -## the dying shadow. -#MAX_SHADOW_EXCEPTIONS = 5 - -## Estimated virtual memory size of each condor_shadow process. -## Specified in kilobytes. -# SHADOW_SIZE_ESTIMATE = 800 - -## The condor_schedd can renice the condor_shadow processes on your -## submit machines. How "nice" do you want the shadows? (1-19). -## The higher the number, the lower priority the shadows have. -# SHADOW_RENICE_INCREMENT = 0 - -## The condor_schedd can renice scheduler universe processes -## (e.g. DAGMan) on your submit machines. How "nice" do you want the -## scheduler universe processes? (1-19). The higher the number, the -## lower priority the processes have. -# SCHED_UNIV_RENICE_INCREMENT = 0 - -## By default, when the schedd fails to start an idle job, it will -## not try to start any other idle jobs in the same cluster during -## that negotiation cycle. This makes negotiation much more -## efficient for large job clusters. However, in some cases other -## jobs in the cluster can be started even though an earlier job -## can't. For example, the jobs' requirements may differ, because of -## different disk space, memory, or operating system requirements. -## Or, machines may be willing to run only some jobs in the cluster, -## because their requirements reference the jobs' virtual memory size -## or other attribute. Setting NEGOTIATE_ALL_JOBS_IN_CLUSTER to True -## will force the schedd to try to start all idle jobs in each -## negotiation cycle. This will make negotiation cycles last longer, -## but it will ensure that all jobs that can be started will be -## started. -#NEGOTIATE_ALL_JOBS_IN_CLUSTER = False - -## This setting controls how often, in seconds, the schedd considers -## periodic job actions given by the user in the submit file. -## (Currently, these are periodic_hold, periodic_release, and periodic_remove.) -#PERIODIC_EXPR_INTERVAL = 60 - -###### -## Queue management settings: -###### -## How often should the schedd truncate it's job queue transaction -## log? (Specified in seconds, once a day is the default.) -#QUEUE_CLEAN_INTERVAL = 86400 - -## How often should the schedd commit "wall clock" run time for jobs -## to the queue, so run time statistics remain accurate when the -## schedd crashes? (Specified in seconds, once per hour is the -## default. Set to 0 to disable.) -#WALL_CLOCK_CKPT_INTERVAL = 3600 - -## What users do you want to grant super user access to this job -## queue? (These users will be able to remove other user's jobs). -## By default, this only includes root. -QUEUE_SUPER_USERS = root, condor - - -##-------------------------------------------------------------------- -## condor_shadow -##-------------------------------------------------------------------- -## If the shadow is unable to read a checkpoint file from the -## checkpoint server, it keeps trying only if the job has accumulated -## more than MAX_DISCARDED_RUN_TIME seconds of CPU usage. Otherwise, -## the job is started from scratch. Defaults to 1 hour. This -## setting is only used if USE_CKPT_SERVER (from above) is True. -#MAX_DISCARDED_RUN_TIME = 3600 - -## Should periodic checkpoints be compressed? -#COMPRESS_PERIODIC_CKPT = False - -## Should vacate checkpoints be compressed? -#COMPRESS_VACATE_CKPT = False - -## Should we commit the application's dirty memory pages to swap -## space during a periodic checkpoint? -#PERIODIC_MEMORY_SYNC = False - -## Should we write vacate checkpoints slowly? If nonzero, this -## parameter specifies the speed at which vacate checkpoints should -## be written, in kilobytes per second. -#SLOW_CKPT_SPEED = 0 - -## How often should the shadow update the job queue with job -## attributes that periodically change? Specified in seconds. -#SHADOW_QUEUE_UPDATE_INTERVAL = 15 * 60 - -## Should the shadow wait to update certain job attributes for the -## next periodic update, or should it immediately these update -## attributes as they change? Due to performance concerns of -## aggressive updates to a busy condor_schedd, the default is True. -#SHADOW_LAZY_QUEUE_UPDATE = TRUE - - -##-------------------------------------------------------------------- -## condor_starter -##-------------------------------------------------------------------- -## The condor_starter can renice the processes of Condor -## jobs on your execute machines. If you want this, uncomment the -## following entry and set it to how "nice" you want the user -## jobs. (1-19) The larger the number, the lower priority the -## process gets on your machines. -## Note on Win32 platforms, this number needs to be greater than -## zero (i.e. the job must be reniced) or the mechanism that -## monitors CPU load on Win32 systems will give erratic results. -#JOB_RENICE_INCREMENT = 10 - -## Should the starter do local logging to its own log file, or send -## debug information back to the condor_shadow where it will end up -## in the ShadowLog? -#STARTER_LOCAL_LOGGING = TRUE - -## If the UID_DOMAIN settings match on both the execute and submit -## machines, but the UID of the user who submitted the job isn't in -## the passwd file of the execute machine, the starter will normally -## exit with an error. Do you want the starter to just start up the -## job with the specified UID, even if it's not in the passwd file? -#SOFT_UID_DOMAIN = FALSE - -## honor the run_as_owner option from the condor submit file. -## -#STARTER_ALLOW_RUNAS_OWNER = TRUE - -## Tell the Starter/Startd what program to use to remove a directory -## condor_rmdir.exe is a windows-only command that does a better job -## than the built-in rmdir command when it is run with elevated privileges -## Such as when when Condor is running as a service. -## /s is delete subdirectories -## /c is continue on error -WINDOWS_RMDIR = $(SBIN)\condor_rmdir.exe -#WINDOWS_RMDIR_OPTIONS = /s /c - -##-------------------------------------------------------------------- -## condor_procd -##-------------------------------------------------------------------- -## -# the path to the procd binary -# -PROCD = $(SBIN)/condor_procd - -# the path to the procd "address" -# - on UNIX this will be a named pipe; we'll put it in the -# $(LOCK) directory by default (note that multiple named pipes -# will be created in this directory for when the procd responds -# to its clients) -# - on Windows, this will be a named pipe as well (but named pipes on -# Windows are not even close to the same thing as named pipes on -# UNIX); the name will be something like: -# \\.\pipe\condor_procd -# -PROCD_ADDRESS = $(LOCK)/procd_pipe - -# Note that in other Condor daemons, turning on D_PROCFAMILY will -# result in that daemon logging all of its interactions with the -# ProcD. -# -PROCD_LOG = $(LOG)/ProcLog - -# This is the maximum period that the procd will use for taking -# snapshots (the actual period may be lower if a condor daemon registers -# a family for which it wants more frequent snapshots) -# -PROCD_MAX_SNAPSHOT_INTERVAL = 60 - -# On Windows, we send a process a "soft kill" via a WM_CLOSE message. -# This binary is used by the ProcD (and other Condor daemons if PRIVSEP -# is not enabled) to help when sending soft kills. -WINDOWS_SOFTKILL = $(SBIN)/condor_softkill - -##-------------------------------------------------------------------- -## condor_submit -##-------------------------------------------------------------------- -## If you want condor_submit to automatically append an expression to -## the Requirements expression or Rank expression of jobs at your -## site, uncomment these entries. -#APPEND_REQUIREMENTS = (expression to append job requirements) -#APPEND_RANK = (expression to append job rank) - -## If you want expressions only appended for either standard or -## vanilla universe jobs, you can uncomment these entries. If any of -## them are defined, they are used for the given universe, instead of -## the generic entries above. -#APPEND_REQ_VANILLA = (expression to append to vanilla job requirements) -#APPEND_REQ_STANDARD = (expression to append to standard job requirements) -#APPEND_RANK_STANDARD = (expression to append to vanilla job rank) -#APPEND_RANK_VANILLA = (expression to append to standard job rank) - -## This can be used to define a default value for the rank expression -## if one is not specified in the submit file. -#DEFAULT_RANK = (default rank expression for all jobs) - -## If you want universe-specific defaults, you can use the following -## entries: -#DEFAULT_RANK_VANILLA = (default rank expression for vanilla jobs) -#DEFAULT_RANK_STANDARD = (default rank expression for standard jobs) - -## If you want condor_submit to automatically append expressions to -## the job ClassAds it creates, you can uncomment and define the -## SUBMIT_EXPRS setting. It works just like the STARTD_EXPRS -## described above with respect to ClassAd vs. config file syntax, -## strings, etc. One common use would be to have the full hostname -## of the machine where a job was submitted placed in the job -## ClassAd. You would do this by uncommenting the following lines: -#MACHINE = "$(FULL_HOSTNAME)" -#SUBMIT_EXPRS = MACHINE - -## Condor keeps a buffer of recently-used data for each file an -## application opens. This macro specifies the default maximum number -## of bytes to be buffered for each open file at the executing -## machine. -#DEFAULT_IO_BUFFER_SIZE = 524288 - -## Condor will attempt to consolidate small read and write operations -## into large blocks. This macro specifies the default block size -## Condor will use. -#DEFAULT_IO_BUFFER_BLOCK_SIZE = 32768 - -##-------------------------------------------------------------------- -## condor_preen -##-------------------------------------------------------------------- -## Who should condor_preen send email to? -#PREEN_ADMIN = $(CONDOR_ADMIN) - -## What files should condor_preen leave in the spool directory? -VALID_SPOOL_FILES = job_queue.log, job_queue.log.tmp, history, \ - Accountant.log, Accountantnew.log, \ - local_univ_execute, .quillwritepassword, \ - .pgpass, \ - .schedd_address, .schedd_classad - -## What files should condor_preen remove from the log directory? -INVALID_LOG_FILES = core - -##-------------------------------------------------------------------- -## Java parameters: -##-------------------------------------------------------------------- -## If you would like this machine to be able to run Java jobs, -## then set JAVA to the path of your JVM binary. If you are not -## interested in Java, there is no harm in leaving this entry -## empty or incorrect. - -JAVA = %(javabin)s - -## JAVA_CLASSPATH_DEFAULT gives the default set of paths in which -## Java classes are to be found. Each path is separated by spaces. -## If your JVM needs to be informed of additional directories, add -## them here. However, do not remove the existing entries, as Condor -## needs them. - -JAVA_CLASSPATH_DEFAULT = $(LIB) $(LIB)/scimark2lib.jar . - -## JAVA_CLASSPATH_ARGUMENT describes the command-line parameter -## used to introduce a new classpath: - -JAVA_CLASSPATH_ARGUMENT = -classpath - -## JAVA_CLASSPATH_SEPARATOR describes the character used to mark -## one path element from another: - -JAVA_CLASSPATH_SEPARATOR = : - -## JAVA_BENCHMARK_TIME describes the number of seconds for which -## to run Java benchmarks. A longer time yields a more accurate -## benchmark, but consumes more otherwise useful CPU time. -## If this time is zero or undefined, no Java benchmarks will be run. - -JAVA_BENCHMARK_TIME = 2 - -## If your JVM requires any special arguments not mentioned in -## the options above, then give them here. - -JAVA_EXTRA_ARGUMENTS = - -## -##-------------------------------------------------------------------- -## Condor-G settings -##-------------------------------------------------------------------- -## Where is the GridManager binary installed? - -GRIDMANAGER = $(SBIN)/condor_gridmanager -GT2_GAHP = $(SBIN)/gahp_server -GRID_MONITOR = $(SBIN)/grid_monitor - -##-------------------------------------------------------------------- -## Settings that control the daemon's debugging output: -##-------------------------------------------------------------------- -## -## Note that the Gridmanager runs as the User, not a Condor daemon, so -## all users must have write permssion to the directory that the -## Gridmanager will use for it's logfile. Our suggestion is to create a -## directory called GridLogs in $(LOG) with UNIX permissions 1777 -## (just like /tmp ) -## Another option is to use /tmp as the location of the GridManager log. -## - -MAX_GRIDMANAGER_LOG = 1000000 -GRIDMANAGER_DEBUG = - -GRIDMANAGER_LOG = $(LOG)/GridmanagerLog.$(USERNAME) -GRIDMANAGER_LOCK = $(LOCK)/GridmanagerLock.$(USERNAME) - -##-------------------------------------------------------------------- -## Various other settings that the Condor-G can use. -##-------------------------------------------------------------------- - -## The number of seconds between status update requests. You can make -## this short (5 seconds) if you want Condor to respond quickly to -## instances as they terminate, or you can make it long (300 seconds = 5 -## minutes) if you know your instances will run for awhile and don't -## mind delay between when they stop and when Condor responds to them -## stopping. -GRIDMANAGER_JOB_PROBE_INTERVAL = 300 - -## For grid-type gt2 jobs (pre-WS GRAM), limit the number of jobmanager -## processes the gridmanager will let run on the headnode. Letting too -## many jobmanagers run causes severe load on the headnode. -GRIDMANAGER_MAX_JOBMANAGERS_PER_RESOURCE = 10 - -## If we're talking to a Globus 2.0 resource, Condor-G will use the new -## version of the GRAM protocol. The first option is how often to check the -## proxy on the submit site of things. If the GridManager discovers a new -## proxy, it will restart itself and use the new proxy for all future -## jobs launched. In seconds, and defaults to 10 minutes -#GRIDMANAGER_CHECKPROXY_INTERVAL = 600 - -## The GridManager will shut things down 3 minutes before loosing Contact -## because of an expired proxy. -## In seconds, and defaults to 3 minutes -#GRDIMANAGER_MINIMUM_PROXY_TIME = 180 - -## Condor requires that each submitted job be designated to run under a -## particular "universe". -## -## If no universe is specificed in the submit file, Condor must pick one -## for the job to use. By default, it chooses the "vanilla" universe. -## The default can be overridden in the config file with the DEFAULT_UNIVERSE -## setting, which is a string to insert into a job submit description if the -## job does not try and define it's own universe -## -#DEFAULT_UNIVERSE = vanilla - -# -# The Cred_min_time_left is the first-pass at making sure that Condor-G -# does not submit your job without it having enough time left for the -# job to finish. For example, if you have a job that runs for 20 minutes, and -# you might spend 40 minutes in the queue, it's a bad idea to submit with less -# than an hour left before your proxy expires. -# 2 hours seemed like a reasonable default. -# -CRED_MIN_TIME_LEFT = 120 - - -## -## The GridMonitor allows you to submit many more jobs to a GT2 GRAM server -## than is normally possible. -#ENABLE_GRID_MONITOR = TRUE - -## -## When an error occurs with the GridMonitor, how long should the -## gridmanager wait before trying to submit a new GridMonitor job? -## The default is 1 hour (3600 seconds). -#GRID_MONITOR_DISABLE_TIME = 3600 - -## -## The location of the wrapper for invoking -## Condor GAHP server -## -CONDOR_GAHP = $(SBIN)/condor_c-gahp -CONDOR_GAHP_WORKER = $(SBIN)/condor_c-gahp_worker_thread - -## -## The Condor GAHP server has its own log. Like the Gridmanager, the -## GAHP server is run as the User, not a Condor daemon, so all users must -## have write permssion to the directory used for the logfile. Our -## suggestion is to create a directory called GridLogs in $(LOG) with -## UNIX permissions 1777 (just like /tmp ) -## Another option is to use /tmp as the location of the CGAHP log. -## -MAX_C_GAHP_LOG = 1000000 - -#C_GAHP_LOG = $(LOG)/GridLogs/CGAHPLog.$(USERNAME) -C_GAHP_LOG = /tmp/CGAHPLog.$(USERNAME) -C_GAHP_LOCK = /tmp/CGAHPLock.$(USERNAME) -C_GAHP_WORKER_THREAD_LOG = /tmp/CGAHPWorkerLog.$(USERNAME) -C_GAHP_WORKER_THREAD_LOCK = /tmp/CGAHPWorkerLock.$(USERNAME) - -## -## Location of the PBS/LSF gahp and its associated binaries -## -GLITE_LOCATION = $(LIBEXEC)/glite -BATCH_GAHP = $(GLITE_LOCATION)/bin/batch_gahp - -## -## The location of the wrapper for invoking the Unicore GAHP server -## -UNICORE_GAHP = $(SBIN)/unicore_gahp - -## -## The location of the wrapper for invoking the NorduGrid GAHP server -## -NORDUGRID_GAHP = $(SBIN)/nordugrid_gahp - -## The location of the CREAM GAHP server -CREAM_GAHP = $(SBIN)/cream_gahp - -## Condor-G and CredD can use MyProxy to refresh GSI proxies which are -## about to expire. -#MYPROXY_GET_DELEGATION = /path/to/myproxy-get-delegation - -## The location of the Deltacloud GAHP server -DELTACLOUD_GAHP = $(SBIN)/deltacloud_gahp - -## -## EC2 (REST): Universe = Grid, Grid_Resource = ec2 -## - -## The location of the ec2_gahp program, required -EC2_GAHP = $(SBIN)/ec2_gahp - -## Location of log files, useful for debugging, must be in -## a directory writable by any user, such as /tmp -#EC2_GAHP_DEBUG = D_FULLDEBUG -EC2_GAHP_LOG = /tmp/EC2GahpLog.$(USERNAME) - -## As of this writing EC2 has a hard limit of 20 concurrently -## running instances, so a limit of 20 is imposed so the GridManager -## does not waste its time sending requests that will be rejected. -GRIDMANAGER_MAX_SUBMITTED_JOBS_PER_RESOURCE_EC2 = 20 - -## -##-------------------------------------------------------------------- -## condor_credd credential managment daemon -##-------------------------------------------------------------------- -## Where is the CredD binary installed? -CREDD = $(SBIN)/condor_credd - -## When the credd starts up, it can place it's address (IP and port) -## into a file. This way, tools running on the local machine don't -## need an additional "-n host:port" command line option. This -## feature can be turned off by commenting out this setting. -CREDD_ADDRESS_FILE = $(LOG)/.credd_address - -## Specify a remote credd server here, -#CREDD_HOST = $(CONDOR_HOST):$(CREDD_PORT) - -## CredD startup arguments -## Start the CredD on a well-known port. Uncomment to to simplify -## connecting to a remote CredD. Note: that this interface may change -## in a future release. -CREDD_PORT = 9620 -CREDD_ARGS = -p $(CREDD_PORT) -f - -## CredD daemon debugging log -CREDD_LOG = $(LOG)/CredLog -CREDD_DEBUG = D_FULLDEBUG -MAX_CREDD_LOG = 4000000 - -## The credential owner submits the credential. This list specififies -## other user who are also permitted to see all credentials. Defaults -## to root on Unix systems, and Administrator on Windows systems. -#CRED_SUPER_USERS = - -## Credential storage location. This directory must exist -## prior to starting condor_credd. It is highly recommended to -## restrict access permissions to _only_ the directory owner. -CRED_STORE_DIR = $(LOCAL_DIR)/cred_dir - -## Index file path of saved credentials. -## This file will be automatically created if it does not exist. -#CRED_INDEX_FILE = $(CRED_STORE_DIR/cred-index - -## condor_credd will attempt to refresh credentials when their -## remaining lifespan is less than this value. Units = seconds. -#DEFAULT_CRED_EXPIRE_THRESHOLD = 3600 - -## condor-credd periodically checks remaining lifespan of stored -## credentials, at this interval. -#CRED_CHECK_INTERVAL = 60 - -## -##-------------------------------------------------------------------- -## Stork data placment server -##-------------------------------------------------------------------- -## Where is the Stork binary installed? -STORK = $(SBIN)/stork_server - -## When Stork starts up, it can place it's address (IP and port) -## into a file. This way, tools running on the local machine don't -## need an additional "-n host:port" command line option. This -## feature can be turned off by commenting out this setting. -STORK_ADDRESS_FILE = $(LOG)/.stork_address - -## Specify a remote Stork server here, -#STORK_HOST = $(CONDOR_HOST):$(STORK_PORT) - -## STORK_LOG_BASE specifies the basename for heritage Stork log files. -## Stork uses this macro to create the following output log files: -## $(STORK_LOG_BASE): Stork server job queue classad collection -## journal file. -## $(STORK_LOG_BASE).history: Used to track completed jobs. -## $(STORK_LOG_BASE).user_log: User level log, also used by DAGMan. -STORK_LOG_BASE = $(LOG)/Stork - -## Modern Condor DaemonCore logging feature. -STORK_LOG = $(LOG)/StorkLog -STORK_DEBUG = D_FULLDEBUG -MAX_STORK_LOG = 4000000 - -## Stork startup arguments -## Start Stork on a well-known port. Uncomment to to simplify -## connecting to a remote Stork. Note: that this interface may change -## in a future release. -#STORK_PORT = 34048 -STORK_PORT = 9621 -STORK_ARGS = -p $(STORK_PORT) -f -Serverlog $(STORK_LOG_BASE) - -## Stork environment. Stork modules may require external programs and -## shared object libraries. These are located using the PATH and -## LD_LIBRARY_PATH environments. Further, some modules may require -## further specific environments. By default, Stork inherits a full -## environment when invoked from condor_master or the shell. If the -## default environment is not adequate for all Stork modules, specify -## a replacement environment here. This environment will be set by -## condor_master before starting Stork, but does not apply if Stork is -## started directly from the command line. -#STORK_ENVIRONMENT = TMP=/tmp;CONDOR_CONFIG=/special/config;PATH=/lib - -## Limits the number of concurrent data placements handled by Stork. -#STORK_MAX_NUM_JOBS = 5 - -## Limits the number of retries for a failed data placement. -#STORK_MAX_RETRY = 5 - -## Limits the run time for a data placement job, after which the -## placement is considered failed. -#STORK_MAXDELAY_INMINUTES = 10 - -## Temporary credential storage directory used by Stork. -#STORK_TMP_CRED_DIR = /tmp - -## Directory containing Stork modules. -#STORK_MODULE_DIR = $(LIBEXEC) - -## -##-------------------------------------------------------------------- -## Quill Job Queue Mirroring Server -##-------------------------------------------------------------------- -## Where is the Quill binary installed and what arguments should be passed? -QUILL = $(SBIN)/condor_quill -#QUILL_ARGS = - -# Where is the log file for the quill daemon? -QUILL_LOG = $(LOG)/QuillLog - -# The identification and location of the quill daemon for local clients. -QUILL_ADDRESS_FILE = $(LOG)/.quill_address - -# If this is set to true, then the rest of the QUILL arguments must be defined -# for quill to function. If it is Fase or left undefined, then quill will not -# be consulted by either the scheduler or the tools, but in the case of a -# remote quill query where the local client has quill turned off, but the -# remote client has quill turned on, things will still function normally. -#QUILL_ENABLED = TRUE - -# -# If Quill is enabled, by default it will only mirror the current job -# queue into the database. For historical jobs, and classads from other -# sources, the SQL Log must be enabled. -#QUILL_USE_SQL_LOG=FALSE - -# -# The SQL Log can be enabled on a per-daemon basis. For example, to collect -# historical job information, but store no information about execute machines, -# uncomment these two lines -#QUILL_USE_SQL_LOG = FALSE -#SCHEDD.QUILL_USE_SQL_LOG = TRUE - -# This will be the name of a quill daemon using this config file. This name -# should not conflict with any other quill name--or schedd name. -#QUILL_NAME = quill@postgresql-server.machine.com - -# The Postgreql server requires usernames that can manipulate tables. This will -# be the username associated with this instance of the quill daemon mirroring -# a schedd's job queue. Each quill daemon must have a unique username -# associated with it otherwise multiple quill daemons will corrupt the data -# held under an indentical user name. -#QUILL_DB_NAME = name_of_db - -# The required password for the DB user which quill will use to read -# information from the database about the queue. -#QUILL_DB_QUERY_PASSWORD = foobar - -# What kind of database server is this? -# For now, only PGSQL is supported -#QUILL_DB_TYPE = PGSQL - -# The machine and port of the postgres server. -# Although this says IP Addr, it can be a DNS name. -# It must match whatever format you used for the .pgpass file, however -#QUILL_DB_IP_ADDR = machine.domain.com:5432 - -# The login to use to attach to the database for updating information. -# There should be an entry in file $SPOOL/.pgpass that gives the password -# for this login id. -#QUILL_DB_USER = quillwriter - -# Polling period, in seconds, for when quill reads transactions out of the -# schedd's job queue log file and puts them into the database. -#QUILL_POLLING_PERIOD = 10 - -# Allows or disallows a remote query to the quill daemon and database -# which is reading this log file. Defaults to true. -#QUILL_IS_REMOTELY_QUERYABLE = TRUE - -# Add debugging flags to here if you need to debug quill for some reason. -#QUILL_DEBUG = D_FULLDEBUG - -# Number of seconds the master should wait for the Quill daemon to respond -# before killing it. This number might need to be increased for very -# large logfiles. -# The default is 3600 (one hour), but kicking it up to a few hours won't hurt -#QUILL_NOT_RESPONDING_TIMEOUT = 3600 - -# Should Quill hold open a database connection to the DBMSD? -# Each open connection consumes resources at the server, so large pools -# (100 or more machines) should set this variable to FALSE. Note the -# default is TRUE. -#QUILL_MAINTAIN_DB_CONN = TRUE - -## -##-------------------------------------------------------------------- -## Database Management Daemon settings -##-------------------------------------------------------------------- -## Where is the DBMSd binary installed and what arguments should be passed? -DBMSD = $(SBIN)/condor_dbmsd -DBMSD_ARGS = -f - -# Where is the log file for the quill daemon? -DBMSD_LOG = $(LOG)/DbmsdLog - -# Interval between consecutive purging calls (in seconds) -#DATABASE_PURGE_INTERVAL = 86400 - -# Interval between consecutive database reindexing operations -# This is only used when dbtype = PGSQL -#DATABASE_REINDEX_INTERVAL = 86400 - -# Number of days before purging resource classad history -# This includes things like machine ads, daemon ads, submitters -#QUILL_RESOURCE_HISTORY_DURATION = 7 - -# Number of days before purging job run information -# This includes job events, file transfers, matchmaker matches, etc -# This does NOT include the final job ad. condor_history does not need -# any of this information to work. -#QUILL_RUN_HISTORY_DURATION = 7 - -# Number of days before purging job classad history -# This is the information needed to run condor_history -#QUILL_JOB_HISTORY_DURATION = 3650 - -# DB size threshold for warning the condor administrator. This is checked -# after every purge. The size is given in gigabytes. -#QUILL_DBSIZE_LIMIT = 20 - -# Number of seconds the master should wait for the DBMSD to respond before -# killing it. This number might need to be increased for very large databases -# The default is 3600 (one hour). -#DBMSD_NOT_RESPONDING_TIMEOUT = 3600 - -## -##-------------------------------------------------------------------- -## VM Universe Parameters -##-------------------------------------------------------------------- -## Where is the Condor VM-GAHP installed? (Required) -VM_GAHP_SERVER = $(SBIN)/condor_vm-gahp - -## If the VM-GAHP is to have its own log, define -## the location of log file. -## -## Optionally, if you do NOT define VM_GAHP_LOG, logs of VM-GAHP will -## be stored in the starter's log file. -## However, on Windows machine you must always define VM_GAHP_LOG. -# -VM_GAHP_LOG = $(LOG)/VMGahpLog -MAX_VM_GAHP_LOG = 1000000 -#VM_GAHP_DEBUG = D_FULLDEBUG - -## What kind of virtual machine program will be used for -## the VM universe? -## The two options are vmware and xen. (Required) -#VM_TYPE = vmware - -## How much memory can be used for the VM universe? (Required) -## This value is the maximum amount of memory that can be used by the -## virtual machine program. -#VM_MEMORY = 128 - -## Want to support networking for VM universe? -## Default value is FALSE -#VM_NETWORKING = FALSE - -## What kind of networking types are supported? -## -## If you set VM_NETWORKING to TRUE, you must define this parameter. -## VM_NETWORKING_TYPE = nat -## VM_NETWORKING_TYPE = bridge -## VM_NETWORKING_TYPE = nat, bridge -## -## If multiple networking types are defined, you may define -## VM_NETWORKING_DEFAULT_TYPE for default networking type. -## Otherwise, nat is used for default networking type. -## VM_NETWORKING_DEFAULT_TYPE = nat -#VM_NETWORKING_DEFAULT_TYPE = nat -#VM_NETWORKING_TYPE = nat - -## In default, the number of possible virtual machines is same as -## NUM_CPUS. -## Since too many virtual machines can cause the system to be too slow -## and lead to unexpected problems, limit the number of running -## virtual machines on this machine with -#VM_MAX_NUMBER = 2 - -## When a VM universe job is started, a status command is sent -## to the VM-GAHP to see if the job is finished. -## If the interval between checks is too short, it will consume -## too much of the CPU. If the VM-GAHP fails to get status 5 times in a row, -## an error will be reported to startd, and then startd will check -## the availability of VM universe. -## Default value is 60 seconds and minimum value is 30 seconds -#VM_STATUS_INTERVAL = 60 - -## How long will we wait for a request sent to the VM-GAHP to be completed? -## If a request is not completed within the timeout, an error will be reported -## to the startd, and then the startd will check -## the availability of vm universe. Default value is 5 mins. -#VM_GAHP_REQ_TIMEOUT = 300 - -## When VMware or Xen causes an error, the startd will disable the -## VM universe. However, because some errors are just transient, -## we will test one more -## whether vm universe is still unavailable after some time. -## In default, startd will recheck vm universe after 10 minutes. -## If the test also fails, vm universe will be disabled. -#VM_RECHECK_INTERVAL = 600 - -## Usually, when we suspend a VM, the memory being used by the VM -## will be saved into a file and then freed. -## However, when we use soft suspend, neither saving nor memory freeing -## will occur. -## For VMware, we send SIGSTOP to a process for VM in order to -## stop the VM temporarily and send SIGCONT to resume the VM. -## For Xen, we pause CPU. Pausing CPU doesn't save the memory of VM -## into a file. It only stops the execution of a VM temporarily. -#VM_SOFT_SUSPEND = TRUE - -## If Condor runs as root and a job comes from a different UID domain, -## Condor generally uses "nobody", unless SLOTx_USER is defined. -## If "VM_UNIV_NOBODY_USER" is defined, a VM universe job will run -## as the user defined in "VM_UNIV_NOBODY_USER" instead of "nobody". -## -## Notice: In VMware VM universe, "nobody" can not create a VMware VM. -## So we need to define "VM_UNIV_NOBODY_USER" with a regular user. -## For VMware, the user defined in "VM_UNIV_NOBODY_USER" must have a -## home directory. So SOFT_UID_DOMAIN doesn't work for VMware VM universe job. -## If neither "VM_UNIV_NOBODY_USER" nor "SLOTx_VMUSER"/"SLOTx_USER" is defined, -## VMware VM universe job will run as "condor" instead of "nobody". -## As a result, the preference of local users for a VMware VM universe job -## which comes from the different UID domain is -## "VM_UNIV_NOBODY_USER" -> "SLOTx_VMUSER" -> "SLOTx_USER" -> "condor". -#VM_UNIV_NOBODY_USER = login name of a user who has home directory - -## If Condor runs as root and "ALWAYS_VM_UNIV_USE_NOBODY" is set to TRUE, -## all VM universe jobs will run as a user defined in "VM_UNIV_NOBODY_USER". -#ALWAYS_VM_UNIV_USE_NOBODY = FALSE - -##-------------------------------------------------------------------- -## VM Universe Parameters Specific to VMware -##-------------------------------------------------------------------- - -## Where is perl program? (Required) -VMWARE_PERL = perl - -## Where is the Condor script program to control VMware? (Required) -VMWARE_SCRIPT = $(SBIN)/condor_vm_vmware - -## Networking parameters for VMware -## -## What kind of VMware networking is used? -## -## If multiple networking types are defined, you may specify different -## parameters for each networking type. -## -## Examples -## (e.g.) VMWARE_NAT_NETWORKING_TYPE = nat -## (e.g.) VMWARE_BRIDGE_NETWORKING_TYPE = bridged -## -## If there is no parameter for specific networking type, VMWARE_NETWORKING_TYPE is used. -## -#VMWARE_NAT_NETWORKING_TYPE = nat -#VMWARE_BRIDGE_NETWORKING_TYPE = bridged -VMWARE_NETWORKING_TYPE = nat - -## The contents of this file will be inserted into the .vmx file of -## the VMware virtual machine before Condor starts it. -#VMWARE_LOCAL_SETTINGS_FILE = /path/to/file - -##-------------------------------------------------------------------- -## VM Universe Parameters common to libvirt controlled vm's (xen & kvm) -##-------------------------------------------------------------------- - -## Networking parameters for Xen & KVM -## -## This is the path to the XML helper command; the libvirt_simple_script.awk -## script just reproduces what Condor already does for the kvm/xen VM -## universe -LIBVIRT_XML_SCRIPT = $(LIBEXEC)/libvirt_simple_script.awk - -## This is the optional debugging output file for the xml helper -## script. Scripts that need to output debugging messages should -## write them to the file specified by this argument, which will be -## passed as the second command line argument when the script is -## executed - -#LIBVRT_XML_SCRIPT_ARGS = /dev/stderr - -##-------------------------------------------------------------------- -## VM Universe Parameters Specific to Xen -##-------------------------------------------------------------------- - -## Where is bootloader for Xen domainU? (Required) -## -## The bootloader will be used in the case that a kernel image includes -## a disk image -#XEN_BOOTLOADER = /usr/bin/pygrub - -## -##-------------------------------------------------------------------- -## condor_lease_manager lease manager daemon -##-------------------------------------------------------------------- -## Where is the LeaseManager binary installed? -LeaseManager = $(SBIN)/condor_lease_manager - -# Turn on the lease manager -#DAEMON_LIST = $(DAEMON_LIST), LeaseManager - -# The identification and location of the lease manager for local clients. -LeaseManger_ADDRESS_FILE = $(LOG)/.lease_manager_address - -## LeaseManager startup arguments -#LeaseManager_ARGS = -local-name generic - -## LeaseManager daemon debugging log -LeaseManager_LOG = $(LOG)/LeaseManagerLog -LeaseManager_DEBUG = D_FULLDEBUG -MAX_LeaseManager_LOG = 1000000 - -# Basic parameters -LeaseManager.GETADS_INTERVAL = 60 -LeaseManager.UPDATE_INTERVAL = 300 -LeaseManager.PRUNE_INTERVAL = 60 -LeaseManager.DEBUG_ADS = False - -LeaseManager.CLASSAD_LOG = $(SPOOL)/LeaseManagerState -#LeaseManager.QUERY_ADTYPE = Any -#LeaseManager.QUERY_CONSTRAINTS = MyType == "SomeType" -#LeaseManager.QUERY_CONSTRAINTS = TargetType == "SomeType" - -## -##-------------------------------------------------------------------- -## KBDD - keyboard activity detection daemon -##-------------------------------------------------------------------- -## When the KBDD starts up, it can place it's address (IP and port) -## into a file. This way, tools running on the local machine don't -## need an additional "-n host:port" command line option. This -## feature can be turned off by commenting out this setting. -KBDD_ADDRESS_FILE = $(LOG)/.kbdd_address - -## -##-------------------------------------------------------------------- -## condor_ssh_to_job -##-------------------------------------------------------------------- -# NOTE: condor_ssh_to_job is not supported under Windows. - -# Tell the starter (execute side) whether to allow the job owner or -# queue super user on the schedd from which the job was submitted to -# use condor_ssh_to_job to access the job interactively (e.g. for -# debugging). TARGET is the job; MY is the machine. -#ENABLE_SSH_TO_JOB = true - -# Tell the schedd (submit side) whether to allow the job owner or -# queue super user to use condor_ssh_to_job to access the job -# interactively (e.g. for debugging). MY is the job; TARGET is not -# defined. -#SCHEDD_ENABLE_SSH_TO_JOB = true - -# Command condor_ssh_to_job should use to invoke the ssh client. -# %%h --> remote host -# %%i --> ssh key file -# %%k --> known hosts file -# %%u --> remote user -# %%x --> proxy command -# %%%% --> %% -#SSH_TO_JOB_SSH_CMD = "ssh -oUser=%%u -oIdentityFile=%%i -oStrictHostKeyChecking=yes -oUserKnownHostsFile=%%k -oGlobalKnownHostsFile=%%k -oProxyCommand=%%x %%h" - -# Additional ssh clients may be configured. They all have the same -# default as ssh, except for scp, which omits the %%h: -#SSH_TO_JOB_SCP_CMD = "scp -oUser=%%u -oIdentityFile=%%i -oStrictHostKeyChecking=yes -oUserKnownHostsFile=%%k -oGlobalKnownHostsFile=%%k -oProxyCommand=%%x" - -# Path to sshd -#SSH_TO_JOB_SSHD = /usr/sbin/sshd - -# Arguments the starter should use to invoke sshd in inetd mode. -# %%f --> sshd config file -# %%%% --> %% -#SSH_TO_JOB_SSHD_ARGS = "-i -e -f %%f" - -# sshd configuration template used by condor_ssh_to_job_sshd_setup. -#SSH_TO_JOB_SSHD_CONFIG_TEMPLATE = $(LIB)/condor_ssh_to_job_sshd_config_template - -# Path to ssh-keygen -#SSH_TO_JOB_SSH_KEYGEN = /usr/bin/ssh-keygen - -# Arguments to ssh-keygen -# %%f --> key file to generate -# %%%% --> %% -#SSH_TO_JOB_SSH_KEYGEN_ARGS = "-N '' -C '' -q -f %%f -t rsa" - -###################################################################### -## -## Condor HDFS -## -## This is the default local configuration file for configuring Condor -## daemon responsible for running services related to hadoop -## distributed storage system.You should copy this file to the -## appropriate location and customize it for your needs. -## -## Unless otherwise specified, settings that are commented out show -## the defaults that are used if you don't define a value. Settings -## that are defined here MUST BE DEFINED since they have no default -## value. -## -###################################################################### - -###################################################################### -## FOLLOWING MUST BE CHANGED -###################################################################### - -## The location for hadoop installation directory. The default location -## is under 'libexec' directory. The directory pointed by HDFS_HOME -## should contain a lib folder that contains all the required Jars necessary -## to run HDFS name and data nodes. -#HDFS_HOME = $(RELEASE_DIR)/libexec/hdfs - -## The host and port for hadoop's name node. If this machine is the -## name node (see HDFS_SERVICES) then the specified port will be used -## to run name node. -HDFS_NAMENODE = hdfs://example.com:9000 -HDFS_NAMENODE_WEB = example.com:8000 - -HDFS_BACKUPNODE = hdfs://example.com:50100 -HDFS_BACKUPNODE_WEB = example.com:50105 - -## You need to pick one machine as name node by setting this parameter -## to HDFS_NAMENODE. The remaining machines in a storage cluster will -## act as data nodes (HDFS_DATANODE). -HDFS_NODETYPE = HDFS_DATANODE - -## If machine is selected to be NameNode then by a role should defined. -## If it selected to be DataNode then this paramer is ignored. -## Available options: -## ACTIVE: Active NameNode role (default value) -## BACKUP: Always synchronized with the active NameNode state, thus -## creating a backup of the namespace. Currently the NameNode -## supports one Backup node at a time. -## CHECKPOINT: Periodically creates checkpoints of the namespace. -HDFS_NAMENODE_ROLE = ACTIVE - -## The two set of directories that are required by HDFS are for name -## node (HDFS_NAMENODE_DIR) and data node (HDFS_DATANODE_DIR). The -## directory for name node is only required for a machine running -## name node service and is used to store critical meta data for -## files. The data node needs its directory to store file blocks and -## their replicas. -HDFS_NAMENODE_DIR = /tmp/hadoop_name -HDFS_DATANODE_DIR = /scratch/tmp/hadoop_data - -## Unlike name node address settings (HDFS_NAMENODE), that needs to be -## well known across the storage cluster, data node can run on any -## arbitrary port of given host. -#HDFS_DATANODE_ADDRESS = 0.0.0.0:0 - -#################################################################### -## OPTIONAL -##################################################################### - -## Sets the log4j debug level. All the emitted debug output from HDFS -## will go in 'hdfs.log' under $(LOG) directory. -#HDFS_LOG4J=DEBUG - -## The access to HDFS services both name node and data node can be -## restricted by specifying IP/host based filters. By default settings -## from ALLOW_READ/ALLOW_WRITE and DENY_READ/DENY_WRITE -## are used to specify allow and deny list. The below two parameters can -## be used to override these settings. Read the Condor manual for -## specification of these filters. -## WARN: HDFS doesn't make any distinction between read or write based connection. -#HDFS_ALLOW=* -#HDFS_DENY=* - -#Fully qualified name for Name node and Datanode class. -#HDFS_NAMENODE_CLASS=org.apache.hadoop.hdfs.server.namenode.NameNode -#HDFS_DATANODE_CLASS=org.apache.hadoop.hdfs.server.datanode.DataNode -#HDFS_DFSADMIN_CLASS=org.apache.hadoop.hdfs.tools.DFSAdmin - -## In case an old name for hdfs configuration files is required. -#HDFS_SITE_FILE = hdfs-site.xml - - -## -##-------------------------------------------------------------------- -## file transfer plugin defaults -##-------------------------------------------------------------------- -FILETRANSFER_PLUGINS = $(LIBEXEC)/curl_plugin, $(LIBEXEC)/data_plugin diff --git a/slapos/recipe/condor/template/condor_config.local b/slapos/recipe/condor/template/condor_config.local deleted file mode 100644 index ee4994cfdd182676cfb7f120af417d0742e83e27..0000000000000000000000000000000000000000 --- a/slapos/recipe/condor/template/condor_config.local +++ /dev/null @@ -1,55 +0,0 @@ - -## Pathnames: -## Where have you installed the bin, sbin and lib condor directories? - -RELEASE_DIR = %(releasedir)s - - -## Where is the local condor directory for each host? -## This is where the local config file(s), logs and -## spool/execute directories are located - -LOCAL_DIR = %(localdir)s - - -## Mail parameters: -## When something goes wrong with condor at your site, who should get -## the email? - -CONDOR_ADMIN = - - -## The user/group ID <uid>.<gid> of the "Condor" user. -## (this can also be specified in the environment) -## Note: the CONDOR_IDS setting is ignored on Win32 platforms - -CONDOR_IDS = %(slapuser)s - - -## Condor needs to create a few lock files to synchronize access to -## various log files. Because of problems we've had with network -## filesystems and file locking over the years, we HIGHLY recommend -## that you put these lock files on a local partition on each -## machine. If you don't have your LOCAL_DIR on a local partition, -## be sure to change this entry. Whatever user (or group) condor is -## running as needs to have write access to this directory. If -## you're not running as root, this is whatever user you started up -## the condor_master as. If you are running as root, and there's a -## condor account, it's probably condor. Otherwise, it's whatever -## you've set in the CONDOR_IDS environment variable. See the Admin -## manual for details on this. - -LOCK = /tmp/condor-lock.$(HOSTNAME)0.829500835462571 - -DAEMON_LIST = MASTER, SCHEDD, STARTD - - -## Java parameters: -## If you would like this machine to be able to run Java jobs, -## then set JAVA to the path of your JVM binary. If you are not -## interested in Java, there is no harm in leaving this entry -## empty or incorrect. - -JAVA = %(java-bin)s - -JAVA_MAXHEAP_ARGUMENT = -Xmx1024m diff --git a/slapos/recipe/container.py b/slapos/recipe/container.py deleted file mode 100644 index 96dbb2113d646584d018d91cb9bc7202768a2714..0000000000000000000000000000000000000000 --- a/slapos/recipe/container.py +++ /dev/null @@ -1,92 +0,0 @@ -############################################################################## -# -# Copyright (c) 2010 Vifib SARL and Contributors. All Rights Reserved. -# -# WARNING: This program as such is intended to be used by professional -# programmers who take the whole responsibility of assessing all potential -# consequences resulting from its eventual inadequacies and bugs -# End users who are looking for a ready-to-use solution with commercial -# guarantees and support are strongly adviced to contract a Free Software -# Service Company -# -# This program is Free Software; you can redistribute it and/or -# modify it under the terms of the GNU General Public License -# as published by the Free Software Foundation; either version 3 -# of the License, or (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program; if not, write to the Free Software -# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. -# -############################################################################## - -import ConfigParser -import uuid -import os -import subprocess - -# XXX : This is in order to get the computer_partition object -# which exposes the state of the current partition. -# -# XXX : We could have modify slapgrid in order to put the -# state of the current partition offline. But this is -# written to have the most minimal impact. -from slapos.recipe.librecipe import GenericSlapRecipe -from slapos.recipe.librecipe import GenericBaseRecipe - -class Recipe(GenericSlapRecipe): - - def _options(self, options): - config_filename = self.options['config'] - - container_uuid = None - - if os.path.exists(config_filename): - config = ConfigParser.ConfigParser() - config.read(config_filename) - if config.has_option('requested', 'name'): - container_uuid = uuid.UUID(hex=config.get('requested', 'name')) - - if container_uuid is None: - # uuid wasn't generated at first in order to avoid - # wasting entropy - container_uuid = uuid.uuid4() - - options['slapcontainer-name'] = container_uuid.hex - - return options - - - - def _install(self): - path_list = [] - - self.logger.info("Putting slapcontainer configuration file...") - - config = ConfigParser.ConfigParser() - config.add_section('requested') - config.set('requested', 'status', - self.computer_partition.getState()) - config.set('requested', 'name', self.options['slapcontainer-name']) - config.add_section('rootfs') - config.set('rootfs', 'image', self.options['image']) - config.set('rootfs', 'complete', self.options['image-complete']) - config.add_section('network') - config.set('network', 'interface', self.options['interface']) - config.add_section('config') - config.set('config', 'file', self.options['lxc-config']) - - # Just a touch - open(self.options['lxc-config'], 'a').close() - - config_filename = self.options['config'] - with open(config_filename, 'w') as config_file: - config.write(config_file) - path_list.append(config_filename) - - return path_list diff --git a/slapos/recipe/lampgeneric/__init__.py b/slapos/recipe/lampgeneric/__init__.py deleted file mode 100644 index 695a72c4d2fdbc326ccd4cdf552cc2ae21d767d4..0000000000000000000000000000000000000000 --- a/slapos/recipe/lampgeneric/__init__.py +++ /dev/null @@ -1,83 +0,0 @@ -############################################################################## -# -# Copyright (c) 2013 Vifib SARL and Contributors. All Rights Reserved. -# -# WARNING: This program as such is intended to be used by professional -# programmers who take the whole responsibility of assessing all potential -# consequences resulting from its eventual inadequacies and bugs -# End users who are looking for a ready-to-use solution with commercial -# guarantees and support are strongly adviced to contract a Free Software -# Service Company -# -# This program is Free Software; you can redistribute it and/or -# modify it under the terms of the GNU General Public License -# as published by the Free Software Foundation; either version 3 -# of the License, or (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program; if not, write to the Free Software -# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. -# -############################################################################## - -import shutil -import os -import zc.buildout - -from slapos.recipe.librecipe import GenericBaseRecipe - -class Recipe(GenericBaseRecipe): - # XXX-Cedric: write docstring - - def install(self): - path_list = [] - - # Download and unpack application if not already existing - htdocs_location = self.options['htdocs'] - if not (os.path.exists(htdocs_location) and os.listdir(htdocs_location)): - try: - os.rmdir(htdocs_location) - except: - pass - self.download(htdocs_location) - - # Install php.ini - php_ini = self.createFile(os.path.join(self.options['php-ini-dir'], - 'php.ini'), - self.substituteTemplate(self.getTemplateFilename('php.ini.in'), - dict(tmp_directory=self.options['tmp-dir'])) - ) - path_list.append(php_ini) - - # Install apache - apache_config = dict( - pid_file=self.options['pid-file'], - lock_file=self.options['lock-file'], - ip=self.options['ip'], - port=self.options['port'], - error_log=self.options['error-log'], - access_log=self.options['access-log'], - document_root=self.options['htdocs'], - php_ini_dir=self.options['php-ini-dir'], - ) - httpd_conf = self.createFile(self.options['httpd-conf'], - self.substituteTemplate(self.getTemplateFilename('apache.in'), - apache_config) - ) - path_list.append(httpd_conf) - - wrapper = self.createWrapper(self.options['wrapper'], - (self.options['httpd-binary'], - '-f', - self.options['httpd-conf'], - '-DFOREGROUND' - )) - path_list.append(wrapper) - - - return path_list diff --git a/slapos/recipe/lampgeneric/template/apache.in b/slapos/recipe/lampgeneric/template/apache.in deleted file mode 100644 index a354890425d6f96e946627a659c65299e4c0091c..0000000000000000000000000000000000000000 --- a/slapos/recipe/lampgeneric/template/apache.in +++ /dev/null @@ -1,56 +0,0 @@ -# Apache static configuration -# Automatically generated - -# Basic server configuration -PidFile "%(pid_file)s" -Listen %(ip)s:%(port)s -PHPINIDir %(php_ini_dir)s -ServerAdmin someone@email -DefaultType text/plain -TypesConfig conf/mime.types -AddType application/x-compress .Z -AddType application/x-gzip .gz .tgz -AddType application/x-httpd-php .php .phtml .php5 .php4 -AddType application/x-httpd-php-source .phps - -# Log configuration -ErrorLog "%(error_log)s" -LogLevel warn -LogFormat "%%h %%{REMOTE_USER}i %%l %%u %%t \"%%r\" %%>s %%b \"%%{Referer}i\" \"%%{User-Agent}i\"" combined -LogFormat "%%h %%{REMOTE_USER}i %%l %%u %%t \"%%r\" %%>s %%b" common -CustomLog "%(access_log)s" common - -# Directory protection -<Directory /> - Options FollowSymLinks - AllowOverride None - Require all denied -</Directory> - -<Directory %(document_root)s> - Options FollowSymLinks - AllowOverride All - Require all granted -</Directory> -DocumentRoot %(document_root)s -DirectoryIndex index.html index.php - -# List of modules -LoadModule unixd_module modules/mod_unixd.so -LoadModule access_compat_module modules/mod_access_compat.so -LoadModule authz_core_module modules/mod_authz_core.so -LoadModule authz_host_module modules/mod_authz_host.so -LoadModule log_config_module modules/mod_log_config.so -LoadModule setenvif_module modules/mod_setenvif.so -LoadModule version_module modules/mod_version.so -LoadModule proxy_module modules/mod_proxy.so -LoadModule proxy_http_module modules/mod_proxy_http.so -LoadModule mime_module modules/mod_mime.so -LoadModule dav_module modules/mod_dav.so -LoadModule dav_fs_module modules/mod_dav_fs.so -LoadModule negotiation_module modules/mod_negotiation.so -LoadModule rewrite_module modules/mod_rewrite.so -LoadModule headers_module modules/mod_headers.so -LoadModule dir_module modules/mod_dir.so -LoadModule php5_module modules/libphp5.so -LoadModule alias_module modules/mod_alias.so diff --git a/slapos/recipe/lampgeneric/template/php.ini.in b/slapos/recipe/lampgeneric/template/php.ini.in deleted file mode 100644 index 955d2af75dc9f0a776930fba6870bc44707e7f53..0000000000000000000000000000000000000000 --- a/slapos/recipe/lampgeneric/template/php.ini.in +++ /dev/null @@ -1,18 +0,0 @@ -[PHP] -engine = On -safe_mode = Off -expose_php = Off -error_reporting = E_ALL & ~(E_DEPRECATED|E_NOTICE|E_WARNING) -display_errors = On -display_startup_errors = Off -log_errors = On -log_errors_max_len = 1024 -ignore_repeated_errors = Off -ignore_repeated_source = Off -session.save_path = "%(tmp_directory)s" -session.auto_start = 0 -date.timezone = Europe/Paris -file_uploads = On -upload_max_filesize = 8M -post_max_size = 8M -magic_quotes_gpc=Off diff --git a/slapos/recipe/librecipe/generic.py b/slapos/recipe/librecipe/generic.py index 181bc80a870ff8ff77a290400e49acee16a6f1fe..fe82d02ad45d9ca1fc9104813173b61d7d9ec950 100644 --- a/slapos/recipe/librecipe/generic.py +++ b/slapos/recipe/librecipe/generic.py @@ -32,7 +32,6 @@ import os import sys import inspect import re -import shutil import stat from six.moves.urllib.parse import quote import itertools @@ -255,31 +254,3 @@ class GenericBaseRecipe(object): url = urlunparse((scheme, netloc, path, params, query, fragment)) return url - - def setLocationOption(self): - if not self.options.get('location'): - self.options['location'] = os.path.join( - self.buildout['buildout']['parts-directory'], self.name) - - def download(self, destination=None): - """ A simple wrapper around h.r.download, downloading to self.location""" - self.setLocationOption() - - import hexagonit.recipe.download - if not destination: - destination = self.location - if os.path.exists(destination): - # leftovers from a previous failed attempt, removing it. - self.logger.warning('Removing already existing directory %s', - destination) - shutil.rmtree(destination) - os.mkdir(destination) - - try: - options = self.options.copy() - options['destination'] = destination - hexagonit.recipe.download.Recipe( - self.buildout, self.name, options).install() - except: - shutil.rmtree(destination) - raise diff --git a/slapos/recipe/nosqltestbed/__init__.py b/slapos/recipe/nosqltestbed/__init__.py deleted file mode 100644 index 2b1eb49f0d57b1ac787c74cd951e11115d9d0185..0000000000000000000000000000000000000000 --- a/slapos/recipe/nosqltestbed/__init__.py +++ /dev/null @@ -1,57 +0,0 @@ -############################################################################## -# -# Copyright (c) 2010 Vifib SARL and Contributors. All Rights Reserved. -# -# WARNING: This program as such is intended to be used by professional -# programmers who take the whole responsibility of assessing all potential -# consequences resulting from its eventual inadequacies and bugs -# End users who are looking for a ready-to-use solution with commercial -# guarantees and support are strongly adviced to contract a Free Software -# Service Company -# -# This program is Free Software; you can redistribute it and/or -# modify it under the terms of the GNU General Public License -# as published by the Free Software Foundation; either version 3 -# of the License, or (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program; if not, write to the Free Software -# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. -# -############################################################################## - -from __future__ import print_function - -import sys -import pkg_resources -from logging import Formatter -from slapos.recipe.librecipe import BaseSlapRecipe - -class NoSQLTestBed(BaseSlapRecipe): - - def _install(self): - self.parameter_dict = self.computer_partition.getInstanceParameterDict() - try: - entry_point = pkg_resources.iter_entry_points(group='slapos.recipe.nosqltestbed.plugin', - name=self.parameter_dict.get('plugin', 'kumo')).next() - plugin_class = entry_point.load() - - testbed = plugin_class() - except: - print(Formatter().formatException(sys.exc_info())) - return None - - software_type = self.parameter_dict.get('slap_software_type', 'default') - if software_type is None or software_type == 'RootSoftwareInstance': - software_type = 'default' - if "run_%s" % software_type in dir(testbed) and \ - callable(getattr(testbed, "run_%s" % software_type)): - return getattr(testbed, "run_%s" % software_type)(self) - else: - raise NotImplementedError("Do not support %s" % software_type) - diff --git a/slapos/recipe/nosqltestbed/kumo/__init__.py b/slapos/recipe/nosqltestbed/kumo/__init__.py deleted file mode 100644 index cbc11bf18b1b2632d491927e4d000a2fd9abd265..0000000000000000000000000000000000000000 --- a/slapos/recipe/nosqltestbed/kumo/__init__.py +++ /dev/null @@ -1,288 +0,0 @@ -############################################################################## -# -# Copyright (c) 2011 Vifib SARL and Contributors. All Rights Reserved. -# -# WARNING: This program as such is intended to be used by professional -# programmers who take the whole responsibility of assessing all potential -# consequences resulting from its eventual inadequacies and bugs -# End users who are looking for a ready-to-use solution with commercial -# guarantees and support are strongly adviced to contract a Free Software -# Service Company -# -# This program is Free Software; you can redistribute it and/or -# modify it under the terms of the GNU General Public License -# as published by the Free Software Foundation; either version 3 -# of the License, or (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program; if not, write to the Free Software -# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. -# -############################################################################## - -import os -import pkg_resources -from logging import Formatter - -class KumoTestBed(object): - - def run_default(self, recipe): - run_kumo_cloud(recipe) - - def run_kumo_cloud(self, recipe): - """ Deploy kumofs system on a cloud. """ - - kumo_cloud_config = {} - kumo_cloud_config.update(recipe.options) - kumo_cloud_config.update(recipe.parameter_dict) - - kumo_cloud_config['address'] = recipe.getGlobalIPv6Address() - kumo_cloud_config['report_path'] = recipe.log_directory - - kumo_cloud_config.setdefault('max_server', 4) - kumo_cloud_config.setdefault('max_tester', 5) - kumo_cloud_config.setdefault('nb_thread', 32) - kumo_cloud_config.setdefault('nb_request', 1024000) - kumo_cloud_config.setdefault('erp5_publish_url', '') - kumo_cloud_config.setdefault('erp5_publish_project', '') - - computer_guid_list = [] - computer_guid_list.append("COMP-23") # manager - computer_guid_list.append("COMP-13") # server 1 - computer_guid_list.append("COMP-14") # server 2 - computer_guid_list.append("COMP-20") # server 3 - computer_guid_list.append("COMP-19") # server 4 - computer_guid_list.append("COMP-23") # tester 1 - computer_guid_list.append("COMP-22") # tester 2 - computer_guid_list.append("COMP-14") # tester 3 - computer_guid_list.append("COMP-20") # tester 4 - computer_guid_list.append("COMP-19") # tester 5 - - kumo_cloud_config.setdefault('computer_guid_list', ":".join(computer_guid_list)) - - kumo_cloud_config['software_release_url'] = recipe.software_release_url - kumo_cloud_config['server_url'] = recipe.server_url - kumo_cloud_config['key_file'] = recipe.key_file - kumo_cloud_config['cert_file'] = recipe.cert_file - kumo_cloud_config['computer_id'] = recipe.computer_id - kumo_cloud_config['computer_partition_id'] = recipe.computer_partition_id - kumo_cloud_config['plugin_name'] = 'kumo' - - kumo_cloud_connection = {} - kumo_cloud_connection['url'] = "http://["+kumo_cloud_config['address']+"]:5000/" - kumo_cloud_connection['computer_guid_list'] = kumo_cloud_config['computer_guid_list'] - recipe.computer_partition.setConnectionDict(kumo_cloud_connection) - - nosqltester_manager_wrapper_template_location = pkg_resources.resource_filename( - __name__, os.path.join( - 'template', 'kumotester_manager_run.in')) - nosqltester_manager_runner_path = recipe.createRunningWrapper("kumotester_manager", - recipe.substituteTemplate(nosqltester_manager_wrapper_template_location, kumo_cloud_config)) - - return [nosqltester_manager_runner_path] - - def run_all(self, recipe): - """ Run all services on one machine. """ - all_config = {} - all_config.update(recipe.options) - - ipaddress = "[%s]" % recipe.getGlobalIPv6Address() - - all_config['manager_address'] = ipaddress - all_config['manager_port'] = 19700 - all_config['server_address'] = ipaddress - all_config['server_port'] = 19800 - all_config['server_listen_port'] = 19900 - all_config['server_storage'] = os.path.join(recipe.data_root_directory, "kumodb.tch") - all_config['gateway_address'] = ipaddress - all_config['gateway_port'] = 11411 - all_config['manager_log'] = os.path.join(recipe.log_directory, "kumo-manager.log") - all_config['server_log'] = os.path.join(recipe.log_directory, "kumo-server.log") - all_config['gateway_log'] = os.path.join(recipe.log_directory, "kumo-gateway.log") - - manager_wrapper_template_location = pkg_resources.resource_filename( - __name__, os.path.join( - 'template', 'kumo_manager_run.in')) - manager_runner_path = recipe.createRunningWrapper("kumo-manager", - recipe.substituteTemplate(manager_wrapper_template_location, all_config)) - server_wrapper_template_location = pkg_resources.resource_filename( - __name__, os.path.join( - 'template', 'kumo_server_run.in')) - server_runner_path = recipe.createRunningWrapper("kumo-server", - recipe.substituteTemplate(server_wrapper_template_location, all_config)) - gateway_wrapper_template_location = pkg_resources.resource_filename( - __name__, os.path.join( - 'template', 'kumo_gateway_run.in')) - gateway_runner_path = recipe.createRunningWrapper("kumo-gateway", - recipe.substituteTemplate(gateway_wrapper_template_location, all_config)) - - return [manager_runner_path, server_runner_path, gateway_runner_path] - - def run_kumo_manager(self, recipe): - """ Run the kumofs manager. """ - manager_config = {} - manager_config.update(recipe.options) - - manager_config['manager_address'] = "[%s]" % recipe.getGlobalIPv6Address() - manager_config['manager_port'] = 19700 - manager_config['manager_log'] = os.path.join(recipe.log_directory, "kumo-manager.log") - - manager_connection = {} - manager_connection['address'] = manager_config['manager_address'] - manager_connection['port'] = manager_config['manager_port'] - recipe.computer_partition.setConnectionDict(manager_connection) - - manager_wrapper_template_location = pkg_resources.resource_filename( - __name__, os.path.join( - 'template', 'kumo_manager_run.in')) - manager_runner_path = recipe.createRunningWrapper("kumo-manager", - recipe.substituteTemplate(manager_wrapper_template_location, manager_config)) - - return [manager_runner_path] - - def run_kumo_server(self, recipe): - """ Run the kumofs server. """ - server_config = {} - server_config.update(recipe.options) - server_config.update(recipe.parameter_dict) - - server_config['server_address'] = "[%s]" % recipe.getGlobalIPv6Address() - server_config['server_port'] = 19800 - server_config['server_listen_port'] = 19900 - server_config['server_storage'] = os.path.join(recipe.var_directory,"kumodb.tch") - server_config['server_log'] = os.path.join(recipe.log_directory, "kumo-server.log") - - server_connection = {} - server_connection['address'] = server_config['server_address'] - recipe.computer_partition.setConnectionDict(server_connection) - - server_wrapper_template_location = pkg_resources.resource_filename( - __name__, os.path.join( - 'template', 'kumo_server_run.in')) - server_runner_path = recipe.createRunningWrapper("kumo-server", - recipe.substituteTemplate(server_wrapper_template_location, server_config)) - - return [server_runner_path] - - def run_kumo_gateway(self, recipe): - """ Run the kumofs gateway. """ - gateway_config = {} - gateway_config.update(recipe.options) - gateway_config.update(recipe.parameter_dict) - - gateway_config['gateway_address'] = "[%s]" % recipe.getGlobalIPv6Address() - gateway_config['gateway_port'] = 11411 - gateway_config['gateway_log'] = os.path.join(recipe.log_directory, "kumo-gateway.log") - - gateway_connection = {} - gateway_connection['address'] = gateway_config['gateway_address'] - gateway_connection['port'] = gateway_config['gateway_port'] - recipe.computer_partition.setConnectionDict(gateway_connection) - - gateway_wrapper_template_location = pkg_resources.resource_filename( - __name__, os.path.join( - 'template', 'kumo_gateway_run.in')) - gateway_runner_path = recipe.createRunningWrapper("kumo-gateway", - recipe.substituteTemplate(gateway_wrapper_template_location, gateway_config)) - - return [gateway_runner_path] - - def run_kumo_tester(self, recipe): - """ Run the kumofs tester. """ - tester_config = {} - tester_config.update(recipe.options) - tester_config.update(recipe.parameter_dict) - - tester_config['tester_address'] = recipe.getGlobalIPv6Address() - # tester_config['url'] = "http://%s:5000/" % tester_config['tester_address'] - # tester_config['start_url'] = "http://%s:5000/start" % tester_config['tester_address'] - tester_config['report_path'] = recipe.log_directory - config_dict['binary'] = "%s -g -l %s -p %s -t %s %s" % (config_dict['memstrike_binary'], - config_dict['gateway_address'].strip("[]"), - str(config_dict['gateway_port']), - str(config_dict['nb_thread']), - str(config_dict['nb_request'])) - tester_config['log_directory'] = recipe.log_directory - tester_config['compress_method'] = "bz2" - - tester_connection = {} - tester_connection['url'] = "http://%s:5000/" % tester_config['tester_address'] - recipe.computer_partition.setConnectionDict(tester_connection) - - tester_wrapper_template_location = pkg_resources.resource_filename( - 'slapos.recipe.nosqltestbed', os.path.join( - 'template', 'nosqltester_run.in')) - tester_runner_path = recipe.createRunningWrapper("nosqltester", - recipe.substituteTemplate(tester_wrapper_template_location, tester_config)) - - return [tester_runner_path] - - def run_kumo_tester_and_gateway(self, recipe): - """ Run the kumofs tester and gateway on the same partition. """ - address = recipe.getGlobalIPv6Address() - - config_dict = {} - config_dict.update(recipe.options) - config_dict.update(recipe.parameter_dict) - - # Gateway part - config_dict['gateway_address'] = "[%s]" % address - config_dict['gateway_port'] = 11411 - config_dict['gateway_log'] = os.path.join(recipe.log_directory, "kumo-gateway.log") - - # Tester part - config_dict['tester_address'] = address - config_dict['report_path'] = recipe.log_directory - config_dict['binary'] = "%s -g -l %s -p %s -t %s %s" % (config_dict['memstrike_binary'], - config_dict['gateway_address'].strip("[]"), - str(config_dict['gateway_port']), - str(config_dict['nb_thread']), - str(config_dict['nb_request'])) - config_dict['log_directory'] = recipe.log_directory - config_dict['compress_method'] = "bz2" - - connection_dict = {} - # connection_dict['address'] = config_dict['gateway_address'] - # connection_dict['port'] = config_dict['gateway_port'] - connection_dict['url'] = "http://%s:5000/" % config_dict['tester_address'] - recipe.computer_partition.setConnectionDict(connection_dict) - - gateway_wrapper_template_location = pkg_resources.resource_filename( - __name__, os.path.join( - 'template', 'kumo_gateway_run.in')) - gateway_runner_path = recipe.createRunningWrapper("kumo-gateway", - recipe.substituteTemplate(gateway_wrapper_template_location, config_dict)) - - tester_wrapper_template_location = pkg_resources.resource_filename( - 'slapos.recipe.nosqltestbed', os.path.join( - 'template', 'nosqltester_run.in')) - tester_runner_path = recipe.createRunningWrapper("nosqltester", - recipe.substituteTemplate(tester_wrapper_template_location, config_dict)) - - return [gateway_runner_path, tester_runner_path] - - def run_memstrike_set(self, recipe): - """ Run memstrike in set mode. """ - memstrike_config = {} - memstrike_config.update(recipe.options) - memstrike_config.update(recipe.parameter_dict) - - memstrike_config['gateway_address'] = memstrike_config['gateway_address'].strip("[]") - - memstrike_connection = {} - memstrike_connection['status'] = "OK" - recipe.computer_partition.setConnectionDict(memstrike_connection) - - memstrike_wrapper_template_location = pkg_resources.resource_filename( - __name__, os.path.join( - 'template', 'memstrike_run.in')) - memstrike_runner_path = recipe.createRunningWrapper("memstrike_set", - recipe.substituteTemplate(memstrike_wrapper_template_location, memstrike_config)) - - return [memstrike_runner_path] - diff --git a/slapos/recipe/nosqltestbed/kumo/template/kumo_gateway_run.in b/slapos/recipe/nosqltestbed/kumo/template/kumo_gateway_run.in deleted file mode 100644 index 6cf3a5d9956e7192bdf5024b8f6b39825a585f12..0000000000000000000000000000000000000000 --- a/slapos/recipe/nosqltestbed/kumo/template/kumo_gateway_run.in +++ /dev/null @@ -1,5 +0,0 @@ -#!/bin/sh -# BEWARE: This file is operated by slapos node -# BEWARE: It will be overwritten automatically -exec %(gateway_binary)s -F -E -m %(manager_address)s:%(manager_port)s \ - -t %(gateway_address)s:%(gateway_port)s --verbose -o %(gateway_log)s diff --git a/slapos/recipe/nosqltestbed/kumo/template/kumo_manager_run.in b/slapos/recipe/nosqltestbed/kumo/template/kumo_manager_run.in deleted file mode 100644 index ab31a069541336b6b6fac7e57c93e030f7c3e2b8..0000000000000000000000000000000000000000 --- a/slapos/recipe/nosqltestbed/kumo/template/kumo_manager_run.in +++ /dev/null @@ -1,5 +0,0 @@ -#!/bin/sh -# BEWARE: This file is operated by slapos node -# BEWARE: It will be overwritten automatically -exec %(manager_binary)s -a -l %(manager_address)s:%(manager_port)s \ - --verbose -o %(manager_log)s diff --git a/slapos/recipe/nosqltestbed/kumo/template/kumo_server_run.in b/slapos/recipe/nosqltestbed/kumo/template/kumo_server_run.in deleted file mode 100644 index 4a8b4b148137803447cf35afe2176faf16f8e5ca..0000000000000000000000000000000000000000 --- a/slapos/recipe/nosqltestbed/kumo/template/kumo_server_run.in +++ /dev/null @@ -1,6 +0,0 @@ -#!/bin/sh -# BEWARE: This file is operated by slapos node -# BEWARE: It will be overwritten automatically -exec %(server_binary)s -l %(server_address)s:%(server_port)s \ - -L %(server_listen_port)s -m %(manager_address)s:%(manager_port)s \ - -s %(server_storage)s --verbose -o %(server_log)s diff --git a/slapos/recipe/nosqltestbed/kumo/template/kumotester_manager_run.in b/slapos/recipe/nosqltestbed/kumo/template/kumotester_manager_run.in deleted file mode 100644 index 04e89c1029b1818f21a7f9b8447a58541d511c60..0000000000000000000000000000000000000000 --- a/slapos/recipe/nosqltestbed/kumo/template/kumotester_manager_run.in +++ /dev/null @@ -1,8 +0,0 @@ -#!/bin/sh -# BEWARE: This file is operated by slapos node -# BEWARE: It will be overwritten automatically -exec %(nosqltester_manager_binary)s %(plugin_name)s -a %(address)s \ - -r %(report_path)s -s %(max_server)s -t %(max_tester)s \ - --erp5-publish-url "%(erp5_publish_url)s" --erp5-publish-project "%(erp5_publish_project)s" \ - %(software_release_url)s %(server_url)s "%(key_file)s" "%(cert_file)s" %(computer_id)s %(computer_partition_id)s \ - %(nb_thread)s %(nb_request)s diff --git a/slapos/recipe/nosqltestbed/kumo/template/memstrike_run.in b/slapos/recipe/nosqltestbed/kumo/template/memstrike_run.in deleted file mode 100644 index 4459ba5a8fa0ecf1b1c5cccae1df693d2d7751cd..0000000000000000000000000000000000000000 --- a/slapos/recipe/nosqltestbed/kumo/template/memstrike_run.in +++ /dev/null @@ -1,4 +0,0 @@ -#!/bin/sh -# BEWARE: This file is operated by slapos node -# BEWARE: It will be overwritten automatically -exec %(memstrike_binary)s -s -l %(gateway_address)s -p %(gateway_port)s -t %(nb_thread)s %(nb_request)s diff --git a/slapos/recipe/nosqltestbed/template/nosqltester_manager_run.in b/slapos/recipe/nosqltestbed/template/nosqltester_manager_run.in deleted file mode 100644 index a8db610d06a8424d4b97775317d9b24edf5ea961..0000000000000000000000000000000000000000 --- a/slapos/recipe/nosqltestbed/template/nosqltester_manager_run.in +++ /dev/null @@ -1,7 +0,0 @@ -#!/bin/sh -# BEWARE: This file is operated by slapos node -# BEWARE: It will be overwritten automatically -exec %(nosqltester_manager_binary)s %(plugin_name)s -a %(address)s \ - -r %(report_path)s -s %(max_server)s -t %(max_tester)s \ - --erp5-publish-url "%(erp5_publish_url)s" --erp5-publish-project "%(erp5_publish_project)s" \ - %(software_release_url)s %(server_url)s "%(key_file)s" "%(cert_file)s" %(computer_id)s %(computer_partition_id)s diff --git a/slapos/recipe/nosqltestbed/template/nosqltester_run.in b/slapos/recipe/nosqltestbed/template/nosqltester_run.in deleted file mode 100644 index 4f203cf10786f76e008b009d5b229803921f805d..0000000000000000000000000000000000000000 --- a/slapos/recipe/nosqltestbed/template/nosqltester_run.in +++ /dev/null @@ -1,6 +0,0 @@ -#!/bin/sh -# BEWARE: This file is operated by slapos node -# BEWARE: It will be overwritten automatically -exec %(nosqltester_binary)s -m %(host_address)s -a %(tester_address)s \ - -r %(report_path)s -b "%(binary)s" -l %(log_directory)s \ - -c "%(compress_method)s" diff --git a/slapos/recipe/postgres.py b/slapos/recipe/postgres.py index 9dc090711b6a017e4fad712988962320f2af9a9d..1d05ac0e5094da6cf0e3ab53045cb33d3ed7c677 100644 --- a/slapos/recipe/postgres.py +++ b/slapos/recipe/postgres.py @@ -147,7 +147,7 @@ class Recipe(GenericBaseRecipe): with open(postgres_conf, 'w') as cfg: cfg.write(textwrap.dedent("""\ listen_addresses = '%s' - port = %s + %s logging_collector = on log_rotation_size = 50MB max_connections = 100 @@ -163,7 +163,7 @@ class Recipe(GenericBaseRecipe): unix_socket_permissions = 0700 """ % ( ','.join(set(ipv4).union(ipv6)), - self.options['port'], + 'port = %s' % self.options['port'] if self.options['port'] else '', pgdata, ))) diff --git a/slapos/recipe/request.py b/slapos/recipe/request.py index 0dccbc24fb7262dea46dca2066ffdd08edb64d1d..2bdb651f9a5f0aa0829b334f041f30408a4e58f0 100644 --- a/slapos/recipe/request.py +++ b/slapos/recipe/request.py @@ -88,7 +88,9 @@ class Recipe(object): Possible names depend on requested partition's software type. state (optional) - Requested state, default value is the state of the requester. + Requested state, default value is "started", except the state of + the requester is "stopped" (which changes the default value to + "stopped"). Output: See "return" input key. @@ -120,8 +122,13 @@ class Recipe(object): slave = options.get('slave', 'false').lower() in \ librecipe.GenericBaseRecipe.TRUE_VALUES - # By default XXXX Way of doing it is ugly and dangerous - requested_state = options.get('state', buildout['slap-connection'].get('requested','started')) + # By default, propagate the state of the parent instance + # Except if parent is destroyed, as it may lead to the unexpected + # destruction of the full instance tree + default_state = buildout['slap-connection'].get('requested', 'started') + if default_state not in ('started', 'stopped'): + default_state = 'started' + requested_state = options.get('state', default_state) options['requested-state'] = requested_state slap = slapmodule.slap() diff --git a/slapos/recipe/sphinx/__init__.py b/slapos/recipe/sphinx/__init__.py deleted file mode 100644 index bac224d84992530e531cb80adbd50ebb8600e647..0000000000000000000000000000000000000000 --- a/slapos/recipe/sphinx/__init__.py +++ /dev/null @@ -1,57 +0,0 @@ -############################################################################## -# -# Copyright (c) 2011 Vifib SARL and Contributors. All Rights Reserved. -# -# WARNING: This program as such is intended to be used by professional -# programmers who take the whole responsibility of assessing all potential -# consequences resulting from its eventual inadequacies and bugs -# End users who are looking for a ready-to-use solution with commercial -# guarantees and support are strongly adviced to contract a Free Software -# Service Company -# -# This program is Free Software; you can redistribute it and/or -# modify it under the terms of the GNU General Public License -# as published by the Free Software Foundation; either version 3 -# of the License, or (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program; if not, write to the Free Software -# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. -# -############################################################################## - -from slapos.recipe.librecipe import GenericBaseRecipe - -class Recipe(GenericBaseRecipe): - - def install(self): - - # Configuration file - config = dict( - data_directory=self.options['data-directory'], - ip_address=self.options['ip'], - port=self.options['sphinx-port'], - sql_port=self.options['sql-port'], - searchd_log=self.options['searchd-log'], - query_log=self.options['query-log'], - pid=self.options['pid'], - ) - sphinx_conf_path = self.createFile( - self.options['configuration-file'], - self.substituteTemplate(self.getTemplateFilename('sphinx.conf.in'), - config) - ) - - # Create init script - wrapper = self.createWrapper( - self.options['wrapper'], - (self.options['sphinx-searchd-binary'].strip(), '-c', - sphinx_conf_path, '--nodetach'), - ) - - return [wrapper, sphinx_conf_path] diff --git a/slapos/recipe/sphinx/template/sphinx.conf.in b/slapos/recipe/sphinx/template/sphinx.conf.in deleted file mode 100644 index 3e83ec492f044382eaf684f4f824ca5a6a6d4869..0000000000000000000000000000000000000000 --- a/slapos/recipe/sphinx/template/sphinx.conf.in +++ /dev/null @@ -1,596 +0,0 @@ -############################################################################# -## index definition -############################################################################# - -# realtime index -# -# you can run INSERT, REPLACE, and DELETE on this index on the fly -# using MySQL protocol (see 'listen' directive below) -index erp5 -{ - # 'rt' index type must be specified to use RT index - type = rt - - # index files path and file name, without extension - # mandatory, path must be writable, extensions will be auto-appended - path = %(data_directory)s/erp5 - - # RAM chunk size limit - # RT index will keep at most this much data in RAM, then flush to disk - # optional, default is 32M - # - # rt_mem_limit = 512M - - # full-text field declaration - # multi-value, mandatory - rt_field = SearchableText - - # unsigned integer attribute declaration - # multi-value (an arbitrary number of attributes is allowed), optional - # declares an unsigned 32-bit attribute - rt_attr_uint = uid - - # RT indexes currently support the following attribute types: - # uint, bigint, float, timestamp, string - # - # rt_attr_bigint = guid - # rt_attr_float = gpa - # rt_attr_timestamp = ts_added - # rt_attr_string = author - - # document attribute values (docinfo) storage mode - # optional, default is 'extern' - # known values are 'none', 'extern' and 'inline' - # docinfo = extern - - # memory locking for cached data (.spa and .spi), to prevent swapping - # optional, default is 0 (do not mlock) - # requires searchd to be run from root - # mlock = 0 - - # a list of morphology preprocessors to apply - # optional, default is empty - # - # builtin preprocessors are 'none', 'stem_en', 'stem_ru', 'stem_enru', - # 'soundex', and 'metaphone'; additional preprocessors available from - # libstemmer are 'libstemmer_XXX', where XXX is algorithm code - # (see libstemmer_c/libstemmer/modules.txt) - # - # morphology = stem_en, stem_ru, soundex - # morphology = libstemmer_german - # morphology = libstemmer_sv - morphology = stem_en - - # minimum word length at which to enable stemming - # optional, default is 1 (stem everything) - # - # min_stemming_len = 1 - - # stopword files list (space separated) - # optional, default is empty - # contents are plain text, charset_table and stemming are both applied - # - # stopwords = %(data_directory)s/erp5/stopwords.txt - - # wordforms file, in "mapfrom > mapto" plain text format - # optional, default is empty - # - # wordforms = %(data_directory)s/erp5/wordforms.txt - - # tokenizing exceptions file - # optional, default is empty - # - # plain text, case sensitive, space insensitive in map-from part - # one "Map Several Words => ToASingleOne" entry per line - # - # exceptions = %(data_directory)s/erp5/exceptions.txt - - # minimum indexed word length - # default is 1 (index everything) - min_word_len = 1 - - # charset encoding type - # optional, default is 'sbcs' - # known types are 'sbcs' (Single Byte CharSet) and 'utf-8' - charset_type = utf-8 - - # charset definition and case folding rules "table" - # optional, default value depends on charset_type - # - # defaults are configured to include English and Russian characters only - # you need to change the table to include additional ones - # this behavior MAY change in future versions - # - # 'sbcs' default value is - # charset_table = 0..9, A..Z->a..z, _, a..z, U+A8->U+B8, U+B8, U+C0..U+DF->U+E0..U+FF, U+E0..U+FF - # - # 'utf-8' default value is - # charset_table = 0..9, A..Z->a..z, _, a..z, U+410..U+42F->U+430..U+44F, U+430..U+44F - charset_table = \ - U+00C0->a, U+00C1->a, U+00C2->a, U+00C3->a, U+00C4->a, U+00C5->a, U+00E0->a, U+00E1->a, U+00E2->a, U+00E3->a, U+00E4->a, U+00E5->a, U+0100->a, U+0101->a, U+0102->a, U+0103->a, U+010300->a, U+0104->a, U+0105->a, U+01CD->a, U+01CE->a, U+01DE->a, U+01DF->a, \ - U+01E0->a, U+01E1->a, U+01FA->a, U+01FB->a, U+0200->a, U+0201->a, U+0202->a, U+0203->a, U+0226->a, U+0227->a, U+023A->a, U+0250->a, U+04D0->a, U+04D1->a, U+1D2C->a, U+1D43->a, U+1D44->a, U+1D8F->a, U+1E00->a, U+1E01->a, U+1E9A->a, U+1EA0->a, U+1EA1->a, \ - U+1EA2->a, U+1EA3->a, U+1EA4->a, U+1EA5->a, U+1EA6->a, U+1EA7->a, U+1EA8->a, U+1EA9->a, U+1EAA->a, U+1EAB->a, U+1EAC->a, U+1EAD->a, U+1EAE->a, U+1EAF->a, U+1EB0->a, U+1EB1->a, U+1EB2->a, U+1EB3->a, U+1EB4->a, U+1EB5->a, U+1EB6->a, U+1EB7->a, U+2090->a, \ - U+2C65->a, U+0180->b, U+0181->b, U+0182->b, U+0183->b, U+0243->b, U+0253->b, U+0299->b, U+16D2->b, U+1D03->b, U+1D2E->b, U+1D2F->b, U+1D47->b, U+1D6C->b, U+1D80->b, U+1E02->b, U+1E03->b, U+1E04->b, U+1E05->b, U+1E06->b, U+1E07->b, U+00C7->c, U+00E7->c, \ - U+0106->c, U+0107->c, U+0108->c, U+0109->c, U+010A->c, U+010B->c, U+010C->c, U+010D->c, U+0187->c, U+0188->c, U+023B->c, U+023C->c, U+0255->c, U+0297->c, U+1D9C->c, U+1D9D->c, U+1E08->c, U+1E09->c, U+212D->c, U+2184->c, U+010E->d, U+010F->d, U+0110->d, \ - U+0111->d, U+0189->d, U+018A->d, U+018B->d, U+018C->d, U+01C5->d, U+01F2->d, U+0221->d, U+0256->d, U+0257->d, U+1D05->d, U+1D30->d, U+1D48->d, U+1D6D->d, U+1D81->d, U+1D91->d, U+1E0A->d, U+1E0B->d, U+1E0C->d, U+1E0D->d, U+1E0E->d, U+1E0F->d, U+1E10->d, \ - U+1E11->d, U+1E12->d, U+1E13->d, U+00C8->e, U+00C9->e, U+00CA->e, U+00CB->e, U+00E8->e, U+00E9->e, U+00EA->e, U+00EB->e, U+0112->e, U+0113->e, U+0114->e, U+0115->e, U+0116->e, U+0117->e, U+0118->e, U+0119->e, U+011A->e, U+011B->e, U+018E->e, U+0190->e, \ - U+01DD->e, U+0204->e, U+0205->e, U+0206->e, U+0207->e, U+0228->e, U+0229->e, U+0246->e, U+0247->e, U+0258->e, U+025B->e, U+025C->e, U+025D->e, U+025E->e, U+029A->e, U+1D07->e, U+1D08->e, U+1D31->e, U+1D32->e, U+1D49->e, U+1D4B->e, U+1D4C->e, U+1D92->e, \ - U+1D93->e, U+1D94->e, U+1D9F->e, U+1E14->e, U+1E15->e, U+1E16->e, U+1E17->e, U+1E18->e, U+1E19->e, U+1E1A->e, U+1E1B->e, U+1E1C->e, U+1E1D->e, U+1EB8->e, U+1EB9->e, U+1EBA->e, U+1EBB->e, U+1EBC->e, U+1EBD->e, U+1EBE->e, U+1EBF->e, U+1EC0->e, U+1EC1->e, \ - U+1EC2->e, U+1EC3->e, U+1EC4->e, U+1EC5->e, U+1EC6->e, U+1EC7->e, U+2091->e, U+0191->f, U+0192->f, U+1D6E->f, U+1D82->f, U+1DA0->f, U+1E1E->f, U+1E1F->f, U+011C->g, U+011D->g, U+011E->g, U+011F->g, U+0120->g, U+0121->g, U+0122->g, U+0123->g, U+0193->g, \ - U+01E4->g, U+01E5->g, U+01E6->g, U+01E7->g, U+01F4->g, U+01F5->g, U+0260->g, U+0261->g, U+0262->g, U+029B->g, U+1D33->g, U+1D4D->g, U+1D77->g, U+1D79->g, U+1D83->g, U+1DA2->g, U+1E20->g, U+1E21->g, U+0124->h, U+0125->h, U+0126->h, U+0127->h, U+021E->h, \ - U+021F->h, U+0265->h, U+0266->h, U+029C->h, U+02AE->h, U+02AF->h, U+02B0->h, U+02B1->h, U+1D34->h, U+1DA3->h, U+1E22->h, U+1E23->h, U+1E24->h, U+1E25->h, U+1E26->h, U+1E27->h, U+1E28->h, U+1E29->h, U+1E2A->h, U+1E2B->h, U+1E96->h, U+210C->h, U+2C67->h, \ - U+2C68->h, U+2C75->h, U+2C76->h, U+00CC->i, U+00CD->i, U+00CE->i, U+00CF->i, U+00EC->i, U+00ED->i, U+00EE->i, U+00EF->i, U+010309->i, U+0128->i, U+0129->i, U+012A->i, U+012B->i, U+012C->i, U+012D->i, U+012E->i, U+012F->i, U+0130->i, U+0131->i, U+0197->i, \ - U+01CF->i, U+01D0->i, U+0208->i, U+0209->i, U+020A->i, U+020B->i, U+0268->i, U+026A->i, U+040D->i, U+0418->i, U+0419->i, U+0438->i, U+0439->i, U+0456->i, U+1D09->i, U+1D35->i, U+1D4E->i, U+1D62->i, U+1D7B->i, U+1D96->i, U+1DA4->i, U+1DA6->i, U+1DA7->i, \ - U+1E2C->i, U+1E2D->i, U+1E2E->i, U+1E2F->i, U+1EC8->i, U+1EC9->i, U+1ECA->i, U+1ECB->i, U+2071->i, U+2111->i, U+0134->j, U+0135->j, U+01C8->j, U+01CB->j, U+01F0->j, U+0237->j, U+0248->j, U+0249->j, U+025F->j, U+0284->j, U+029D->j, U+02B2->j, U+1D0A->j, \ - U+1D36->j, U+1DA1->j, U+1DA8->j, U+0136->k, U+0137->k, U+0198->k, U+0199->k, U+01E8->k, U+01E9->k, U+029E->k, U+1D0B->k, U+1D37->k, U+1D4F->k, U+1D84->k, U+1E30->k, U+1E31->k, U+1E32->k, U+1E33->k, U+1E34->k, U+1E35->k, U+2C69->k, U+2C6A->k, U+0139->l, \ - U+013A->l, U+013B->l, U+013C->l, U+013D->l, U+013E->l, U+013F->l, U+0140->l, U+0141->l, U+0142->l, U+019A->l, U+01C8->l, U+0234->l, U+023D->l, U+026B->l, U+026C->l, U+026D->l, U+029F->l, U+02E1->l, U+1D0C->l, U+1D38->l, U+1D85->l, U+1DA9->l, U+1DAA->l, \ - U+1DAB->l, U+1E36->l, U+1E37->l, U+1E38->l, U+1E39->l, U+1E3A->l, U+1E3B->l, U+1E3C->l, U+1E3D->l, U+2C60->l, U+2C61->l, U+2C62->l, U+019C->m, U+026F->m, U+0270->m, U+0271->m, U+1D0D->m, U+1D1F->m, U+1D39->m, U+1D50->m, U+1D5A->m, U+1D6F->m, U+1D86->m, \ - U+1DAC->m, U+1DAD->m, U+1E3E->m, U+1E3F->m, U+1E40->m, U+1E41->m, U+1E42->m, U+1E43->m, U+00D1->n, U+00F1->n, U+0143->n, U+0144->n, U+0145->n, U+0146->n, U+0147->n, U+0148->n, U+0149->n, U+019D->n, U+019E->n, U+01CB->n, U+01F8->n, U+01F9->n, U+0220->n, \ - U+0235->n, U+0272->n, U+0273->n, U+0274->n, U+1D0E->n, U+1D3A->n, U+1D3B->n, U+1D70->n, U+1D87->n, U+1DAE->n, U+1DAF->n, U+1DB0->n, U+1E44->n, U+1E45->n, U+1E46->n, U+1E47->n, U+1E48->n, U+1E49->n, U+1E4A->n, U+1E4B->n, U+207F->n, U+00D2->o, U+00D3->o, \ - U+00D4->o, U+00D5->o, U+00D6->o, U+00D8->o, U+00F2->o, U+00F3->o, U+00F4->o, U+00F5->o, U+00F6->o, U+00F8->o, U+01030F->o, U+014C->o, U+014D->o, U+014E->o, U+014F->o, U+0150->o, U+0151->o, U+0186->o, U+019F->o, U+01A0->o, U+01A1->o, U+01D1->o, U+01D2->o, \ - U+01EA->o, U+01EB->o, U+01EC->o, U+01ED->o, U+01FE->o, U+01FF->o, U+020C->o, U+020D->o, U+020E->o, U+020F->o, U+022A->o, U+022B->o, U+022C->o, U+022D->o, U+022E->o, U+022F->o, U+0230->o, U+0231->o, U+0254->o, U+0275->o, U+043E->o, U+04E6->o, U+04E7->o, \ - U+04E8->o, U+04E9->o, U+04EA->o, U+04EB->o, U+1D0F->o, U+1D10->o, U+1D11->o, U+1D12->o, U+1D13->o, U+1D16->o, U+1D17->o, U+1D3C->o, U+1D52->o, U+1D53->o, U+1D54->o, U+1D55->o, U+1D97->o, U+1DB1->o, U+1E4C->o, U+1E4D->o, U+1E4E->o, U+1E4F->o, U+1E50->o, \ - U+1E51->o, U+1E52->o, U+1E53->o, U+1ECC->o, U+1ECD->o, U+1ECE->o, U+1ECF->o, U+1ED0->o, U+1ED1->o, U+1ED2->o, U+1ED3->o, U+1ED4->o, U+1ED5->o, U+1ED6->o, U+1ED7->o, U+1ED8->o, U+1ED9->o, U+1EDA->o, U+1EDB->o, U+1EDC->o, U+1EDD->o, U+1EDE->o, U+1EDF->o, \ - U+1EE0->o, U+1EE1->o, U+1EE2->o, U+1EE3->o, U+2092->o, U+2C9E->o, U+2C9F->o, U+01A4->p, U+01A5->p, U+1D18->p, U+1D3E->p, U+1D56->p, U+1D71->p, U+1D7D->p, U+1D88->p, U+1E54->p, U+1E55->p, U+1E56->p, U+1E57->p, U+2C63->p, U+024A->q, U+024B->q, U+02A0->q, \ - U+0154->r, U+0155->r, U+0156->r, U+0157->r, U+0158->r, U+0159->r, U+0210->r, U+0211->r, U+0212->r, U+0213->r, U+024C->r, U+024D->r, U+0279->r, U+027A->r, U+027B->r, U+027C->r, U+027D->r, U+027E->r, U+027F->r, U+0280->r, U+0281->r, U+02B3->r, U+02B4->r, \ - U+02B5->r, U+02B6->r, U+1D19->r, U+1D1A->r, U+1D3F->r, U+1D63->r, U+1D72->r, U+1D73->r, U+1D89->r, U+1DCA->r, U+1E58->r, U+1E59->r, U+1E5A->r, U+1E5B->r, U+1E5C->r, U+1E5D->r, U+1E5E->r, U+1E5F->r, U+211C->r, U+2C64->r, U+00DF->s, U+015A->s, U+015B->s, \ - U+015C->s, U+015D->s, U+015E->s, U+015F->s, U+0160->s, U+0161->s, U+017F->s, U+0218->s, U+0219->s, U+023F->s, U+0282->s, U+02E2->s, U+1D74->s, U+1D8A->s, U+1DB3->s, U+1E60->s, U+1E61->s, U+1E62->s, U+1E63->s, U+1E64->s, U+1E65->s, U+1E66->s, U+1E67->s, \ - U+1E68->s, U+1E69->s, U+1E9B->s, U+0162->t, U+0163->t, U+0164->t, U+0165->t, U+0166->t, U+0167->t, U+01AB->t, U+01AC->t, U+01AD->t, U+01AE->t, U+021A->t, U+021B->t, U+0236->t, U+023E->t, U+0287->t, U+0288->t, U+1D1B->t, U+1D40->t, U+1D57->t, U+1D75->t, \ - U+1DB5->t, U+1E6A->t, U+1E6B->t, U+1E6C->t, U+1E6D->t, U+1E6E->t, U+1E6F->t, U+1E70->t, U+1E71->t, U+1E97->t, U+2C66->t, U+00D9->u, U+00DA->u, U+00DB->u, U+00DC->u, U+00F9->u, U+00FA->u, U+00FB->u, U+00FC->u, U+010316->u, U+0168->u, U+0169->u, U+016A->u, \ - U+016B->u, U+016C->u, U+016D->u, U+016E->u, U+016F->u, U+0170->u, U+0171->u, U+0172->u, U+0173->u, U+01AF->u, U+01B0->u, U+01D3->u, U+01D4->u, U+01D5->u, U+01D6->u, U+01D7->u, U+01D8->u, U+01D9->u, U+01DA->u, U+01DB->u, U+01DC->u, U+0214->u, U+0215->u, \ - U+0216->u, U+0217->u, U+0244->u, U+0289->u, U+1D1C->u, U+1D1D->u, U+1D1E->u, U+1D41->u, U+1D58->u, U+1D59->u, U+1D64->u, U+1D7E->u, U+1D99->u, U+1DB6->u, U+1DB8->u, U+1E72->u, U+1E73->u, U+1E74->u, U+1E75->u, U+1E76->u, U+1E77->u, U+1E78->u, U+1E79->u, \ - U+1E7A->u, U+1E7B->u, U+1EE4->u, U+1EE5->u, U+1EE6->u, U+1EE7->u, U+1EE8->u, U+1EE9->u, U+1EEA->u, U+1EEB->u, U+1EEC->u, U+1EED->u, U+1EEE->u, U+1EEF->u, U+1EF0->u, U+1EF1->u, U+01B2->v, U+0245->v, U+028B->v, U+028C->v, U+1D20->v, U+1D5B->v, U+1D65->v, \ - U+1D8C->v, U+1DB9->v, U+1DBA->v, U+1E7C->v, U+1E7D->v, U+1E7E->v, U+1E7F->v, U+2C74->v, U+0174->w, U+0175->w, U+028D->w, U+02B7->w, U+1D21->w, U+1D42->w, U+1E80->w, U+1E81->w, U+1E82->w, U+1E83->w, U+1E84->w, U+1E85->w, U+1E86->w, U+1E87->w, U+1E88->w, \ - U+1E89->w, U+1E98->w, U+02E3->x, U+1D8D->x, U+1E8A->x, U+1E8B->x, U+1E8C->x, U+1E8D->x, U+2093->x, U+00DD->y, U+00FD->y, U+00FF->y, U+0176->y, U+0177->y, U+0178->y, U+01B3->y, U+01B4->y, U+0232->y, U+0233->y, U+024E->y, U+024F->y, U+028E->y, U+028F->y, \ - U+02B8->y, U+1E8E->y, U+1E8F->y, U+1E99->y, U+1EF2->y, U+1EF3->y, U+1EF4->y, U+1EF5->y, U+1EF6->y, U+1EF7->y, U+1EF8->y, U+1EF9->y, U+0179->z, U+017A->z, U+017B->z, U+017C->z, U+017D->z, U+017E->z, U+01B5->z, U+01B6->z, U+0224->z, U+0225->z, U+0240->z, \ - U+0290->z, U+0291->z, U+1D22->z, U+1D76->z, U+1D8E->z, U+1DBB->z, U+1DBC->z, U+1DBD->z, U+1E90->z, U+1E91->z, U+1E92->z, U+1E93->z, U+1E94->z, U+1E95->z, U+2128->z, U+2C6B->z, U+2C6C->z, U+00C6->U+00E6, U+01E2->U+00E6, U+01E3->U+00E6, U+01FC->U+00E6, \ - U+01FD->U+00E6, U+1D01->U+00E6, U+1D02->U+00E6, U+1D2D->U+00E6, U+1D46->U+00E6, U+00E6, U+0622->U+0627, U+0623->U+0627, U+0624->U+0648, U+0625->U+0627, U+0626->U+064A, U+06C0->U+06D5, U+06C2->U+06C1, U+06D3->U+06D2, U+FB50->U+0671, U+FB51->U+0671, U+FB52->U+067B, \ - U+FB53->U+067B, U+FB54->U+067B, U+FB56->U+067E, U+FB57->U+067E, U+FB58->U+067E, U+FB5A->U+0680, U+FB5B->U+0680, U+FB5C->U+0680, U+FB5E->U+067A, U+FB5F->U+067A, U+FB60->U+067A, U+FB62->U+067F, U+FB63->U+067F, U+FB64->U+067F, U+FB66->U+0679, U+FB67->U+0679, \ - U+FB68->U+0679, U+FB6A->U+06A4, U+FB6B->U+06A4, U+FB6C->U+06A4, U+FB6E->U+06A6, U+FB6F->U+06A6, U+FB70->U+06A6, U+FB72->U+0684, U+FB73->U+0684, U+FB74->U+0684, U+FB76->U+0683, U+FB77->U+0683, U+FB78->U+0683, U+FB7A->U+0686, U+FB7B->U+0686, U+FB7C->U+0686, \ - U+FB7E->U+0687, U+FB7F->U+0687, U+FB80->U+0687, U+FB82->U+068D, U+FB83->U+068D, U+FB84->U+068C, U+FB85->U+068C, U+FB86->U+068E, U+FB87->U+068E, U+FB88->U+0688, U+FB89->U+0688, U+FB8A->U+0698, U+FB8B->U+0698, U+FB8C->U+0691, U+FB8D->U+0691, U+FB8E->U+06A9, \ - U+FB8F->U+06A9, U+FB90->U+06A9, U+FB92->U+06AF, U+FB93->U+06AF, U+FB94->U+06AF, U+FB96->U+06B3, U+FB97->U+06B3, U+FB98->U+06B3, U+FB9A->U+06B1, U+FB9B->U+06B1, U+FB9C->U+06B1, U+FB9E->U+06BA, U+FB9F->U+06BA, U+FBA0->U+06BB, U+FBA1->U+06BB, U+FBA2->U+06BB, \ - U+FBA4->U+06C0, U+FBA5->U+06C0, U+FBA6->U+06C1, U+FBA7->U+06C1, U+FBA8->U+06C1, U+FBAA->U+06BE, U+FBAB->U+06BE, U+FBAC->U+06BE, U+FBAE->U+06D2, U+FBAF->U+06D2, U+FBB0->U+06D3, U+FBB1->U+06D3, U+FBD3->U+06AD, U+FBD4->U+06AD, U+FBD5->U+06AD, U+FBD7->U+06C7, \ - U+FBD8->U+06C7, U+FBD9->U+06C6, U+FBDA->U+06C6, U+FBDB->U+06C8, U+FBDC->U+06C8, U+FBDD->U+0677, U+FBDE->U+06CB, U+FBDF->U+06CB, U+FBE0->U+06C5, U+FBE1->U+06C5, U+FBE2->U+06C9, U+FBE3->U+06C9, U+FBE4->U+06D0, U+FBE5->U+06D0, U+FBE6->U+06D0, U+FBE8->U+0649, \ - U+FBFC->U+06CC, U+FBFD->U+06CC, U+FBFE->U+06CC, U+0621, U+0627..U+063A, U+0641..U+064A, U+0660..U+0669, U+066E, U+066F, U+0671..U+06BF, U+06C1, U+06C3..U+06D2, U+06D5, U+06EE..U+06FC, U+06FF, U+0750..U+076D, U+FB55, U+FB59, U+FB5D, U+FB61, U+FB65, U+FB69, \ - U+FB6D, U+FB71, U+FB75, U+FB79, U+FB7D, U+FB81, U+FB91, U+FB95, U+FB99, U+FB9D, U+FBA3, U+FBA9, U+FBAD, U+FBD6, U+FBE7, U+FBE9, U+FBFF, U+0531..U+0556->U+0561..U+0586, U+0561..U+0586, U+0587, U+09DC->U+09A1, U+09DD->U+09A2, U+09DF->U+09AF, U+09F0->U+09AC, \ - U+09F1->U+09AC, U+0985..U+0990, U+0993..U+09B0, U+09B2, U+09B6..U+09B9, U+09CE, U+09E0, U+09E1, U+09E6..U+09EF, U+F900->U+8C48, U+F901->U+66F4, U+F902->U+8ECA, U+F903->U+8CC8, U+F904->U+6ED1, U+F905->U+4E32, U+F906->U+53E5, U+F907->U+9F9C, U+F908->U+9F9C, \ - U+F909->U+5951, U+F90A->U+91D1, U+F90B->U+5587, U+F90C->U+5948, U+F90D->U+61F6, U+F90E->U+7669, U+F90F->U+7F85, U+F910->U+863F, U+F911->U+87BA, U+F912->U+88F8, U+F913->U+908F, U+F914->U+6A02, U+F915->U+6D1B, U+F916->U+70D9, U+F917->U+73DE, U+F918->U+843D, \ - U+F919->U+916A, U+F91A->U+99F1, U+F91B->U+4E82, U+F91C->U+5375, U+F91D->U+6B04, U+F91E->U+721B, U+F91F->U+862D, U+F920->U+9E1E, U+F921->U+5D50, U+F922->U+6FEB, U+F923->U+85CD, U+F924->U+8964, U+F925->U+62C9, U+F926->U+81D8, U+F927->U+881F, U+F928->U+5ECA, \ - U+F929->U+6717, U+F92A->U+6D6A, U+F92B->U+72FC, U+F92C->U+90CE, U+F92D->U+4F86, U+F92E->U+51B7, U+F92F->U+52DE, U+F930->U+64C4, U+F931->U+6AD3, U+F932->U+7210, U+F933->U+76E7, U+F934->U+8001, U+F935->U+8606, U+F936->U+865C, U+F937->U+8DEF, U+F938->U+9732, \ - U+F939->U+9B6F, U+F93A->U+9DFA, U+F93B->U+788C, U+F93C->U+797F, U+F93D->U+7DA0, U+F93E->U+83C9, U+F93F->U+9304, U+F940->U+9E7F, U+F941->U+8AD6, U+F942->U+58DF, U+F943->U+5F04, U+F944->U+7C60, U+F945->U+807E, U+F946->U+7262, U+F947->U+78CA, U+F948->U+8CC2, \ - U+F949->U+96F7, U+F94A->U+58D8, U+F94B->U+5C62, U+F94C->U+6A13, U+F94D->U+6DDA, U+F94E->U+6F0F, U+F94F->U+7D2F, U+F950->U+7E37, U+F951->U+964B, U+F952->U+52D2, U+F953->U+808B, U+F954->U+51DC, U+F955->U+51CC, U+F956->U+7A1C, U+F957->U+7DBE, U+F958->U+83F1, \ - U+F959->U+9675, U+F95A->U+8B80, U+F95B->U+62CF, U+F95C->U+6A02, U+F95D->U+8AFE, U+F95E->U+4E39, U+F95F->U+5BE7, U+F960->U+6012, U+F961->U+7387, U+F962->U+7570, U+F963->U+5317, U+F964->U+78FB, U+F965->U+4FBF, U+F966->U+5FA9, U+F967->U+4E0D, U+F968->U+6CCC, \ - U+F969->U+6578, U+F96A->U+7D22, U+F96B->U+53C3, U+F96C->U+585E, U+F96D->U+7701, U+F96E->U+8449, U+F96F->U+8AAA, U+F970->U+6BBA, U+F971->U+8FB0, U+F972->U+6C88, U+F973->U+62FE, U+F974->U+82E5, U+F975->U+63A0, U+F976->U+7565, U+F977->U+4EAE, U+F978->U+5169, \ - U+F979->U+51C9, U+F97A->U+6881, U+F97B->U+7CE7, U+F97C->U+826F, U+F97D->U+8AD2, U+F97E->U+91CF, U+F97F->U+52F5, U+F980->U+5442, U+F981->U+5973, U+F982->U+5EEC, U+F983->U+65C5, U+F984->U+6FFE, U+F985->U+792A, U+F986->U+95AD, U+F987->U+9A6A, U+F988->U+9E97, \ - U+F989->U+9ECE, U+F98A->U+529B, U+F98B->U+66C6, U+F98C->U+6B77, U+F98D->U+8F62, U+F98E->U+5E74, U+F98F->U+6190, U+F990->U+6200, U+F991->U+649A, U+F992->U+6F23, U+F993->U+7149, U+F994->U+7489, U+F995->U+79CA, U+F996->U+7DF4, U+F997->U+806F, U+F998->U+8F26, \ - U+F999->U+84EE, U+F99A->U+9023, U+F99B->U+934A, U+F99C->U+5217, U+F99D->U+52A3, U+F99E->U+54BD, U+F99F->U+70C8, U+F9A0->U+88C2, U+F9A1->U+8AAA, U+F9A2->U+5EC9, U+F9A3->U+5FF5, U+F9A4->U+637B, U+F9A5->U+6BAE, U+F9A6->U+7C3E, U+F9A7->U+7375, U+F9A8->U+4EE4, \ - U+F9A9->U+56F9, U+F9AA->U+5BE7, U+F9AB->U+5DBA, U+F9AC->U+601C, U+F9AD->U+73B2, U+F9AE->U+7469, U+F9AF->U+7F9A, U+F9B0->U+8046, U+F9B1->U+9234, U+F9B2->U+96F6, U+F9B3->U+9748, U+F9B4->U+9818, U+F9B5->U+4F8B, U+F9B6->U+79AE, U+F9B7->U+91B4, U+F9B8->U+96B8, \ - U+F9B9->U+60E1, U+F9BA->U+4E86, U+F9BB->U+50DA, U+F9BC->U+5BEE, U+F9BD->U+5C3F, U+F9BE->U+6599, U+F9BF->U+6A02, U+F9C0->U+71CE, U+F9C1->U+7642, U+F9C2->U+84FC, U+F9C3->U+907C, U+F9C4->U+9F8D, U+F9C5->U+6688, U+F9C6->U+962E, U+F9C7->U+5289, U+F9C8->U+677B, \ - U+F9C9->U+67F3, U+F9CA->U+6D41, U+F9CB->U+6E9C, U+F9CC->U+7409, U+F9CD->U+7559, U+F9CE->U+786B, U+F9CF->U+7D10, U+F9D0->U+985E, U+F9D1->U+516D, U+F9D2->U+622E, U+F9D3->U+9678, U+F9D4->U+502B, U+F9D5->U+5D19, U+F9D6->U+6DEA, U+F9D7->U+8F2A, U+F9D8->U+5F8B, \ - U+F9D9->U+6144, U+F9DA->U+6817, U+F9DB->U+7387, U+F9DC->U+9686, U+F9DD->U+5229, U+F9DE->U+540F, U+F9DF->U+5C65, U+F9E0->U+6613, U+F9E1->U+674E, U+F9E2->U+68A8, U+F9E3->U+6CE5, U+F9E4->U+7406, U+F9E5->U+75E2, U+F9E6->U+7F79, U+F9E7->U+88CF, U+F9E8->U+88E1, \ - U+F9E9->U+91CC, U+F9EA->U+96E2, U+F9EB->U+533F, U+F9EC->U+6EBA, U+F9ED->U+541D, U+F9EE->U+71D0, U+F9EF->U+7498, U+F9F0->U+85FA, U+F9F1->U+96A3, U+F9F2->U+9C57, U+F9F3->U+9E9F, U+F9F4->U+6797, U+F9F5->U+6DCB, U+F9F6->U+81E8, U+F9F7->U+7ACB, U+F9F8->U+7B20, \ - U+F9F9->U+7C92, U+F9FA->U+72C0, U+F9FB->U+7099, U+F9FC->U+8B58, U+F9FD->U+4EC0, U+F9FE->U+8336, U+F9FF->U+523A, U+FA00->U+5207, U+FA01->U+5EA6, U+FA02->U+62D3, U+FA03->U+7CD6, U+FA04->U+5B85, U+FA05->U+6D1E, U+FA06->U+66B4, U+FA07->U+8F3B, U+FA08->U+884C, \ - U+FA09->U+964D, U+FA0A->U+898B, U+FA0B->U+5ED3, U+FA0C->U+5140, U+FA0D->U+55C0, U+FA10->U+585A, U+FA12->U+6674, U+FA15->U+51DE, U+FA16->U+732A, U+FA17->U+76CA, U+FA18->U+793C, U+FA19->U+795E, U+FA1A->U+7965, U+FA1B->U+798F, U+FA1C->U+9756, U+FA1D->U+7CBE, \ - U+FA1E->U+7FBD, U+FA20->U+8612, U+FA22->U+8AF8, U+FA25->U+9038, U+FA26->U+90FD, U+FA2A->U+98EF, U+FA2B->U+98FC, U+FA2C->U+9928, U+FA2D->U+9DB4, U+FA30->U+4FAE, U+FA31->U+50E7, U+FA32->U+514D, U+FA33->U+52C9, U+FA34->U+52E4, U+FA35->U+5351, U+FA36->U+559D, \ - U+FA37->U+5606, U+FA38->U+5668, U+FA39->U+5840, U+FA3A->U+58A8, U+FA3B->U+5C64, U+FA3C->U+5C6E, U+FA3D->U+6094, U+FA3E->U+6168, U+FA3F->U+618E, U+FA40->U+61F2, U+FA41->U+654F, U+FA42->U+65E2, U+FA43->U+6691, U+FA44->U+6885, U+FA45->U+6D77, U+FA46->U+6E1A, \ - U+FA47->U+6F22, U+FA48->U+716E, U+FA49->U+722B, U+FA4A->U+7422, U+FA4B->U+7891, U+FA4C->U+793E, U+FA4D->U+7949, U+FA4E->U+7948, U+FA4F->U+7950, U+FA50->U+7956, U+FA51->U+795D, U+FA52->U+798D, U+FA53->U+798E, U+FA54->U+7A40, U+FA55->U+7A81, U+FA56->U+7BC0, \ - U+FA57->U+7DF4, U+FA58->U+7E09, U+FA59->U+7E41, U+FA5A->U+7F72, U+FA5B->U+8005, U+FA5C->U+81ED, U+FA5D->U+8279, U+FA5E->U+8279, U+FA5F->U+8457, U+FA60->U+8910, U+FA61->U+8996, U+FA62->U+8B01, U+FA63->U+8B39, U+FA64->U+8CD3, U+FA65->U+8D08, U+FA66->U+8FB6, \ - U+FA67->U+9038, U+FA68->U+96E3, U+FA69->U+97FF, U+FA6A->U+983B, U+FA70->U+4E26, U+FA71->U+51B5, U+FA72->U+5168, U+FA73->U+4F80, U+FA74->U+5145, U+FA75->U+5180, U+FA76->U+52C7, U+FA77->U+52FA, U+FA78->U+559D, U+FA79->U+5555, U+FA7A->U+5599, U+FA7B->U+55E2, \ - U+FA7C->U+585A, U+FA7D->U+58B3, U+FA7E->U+5944, U+FA7F->U+5954, U+FA80->U+5A62, U+FA81->U+5B28, U+FA82->U+5ED2, U+FA83->U+5ED9, U+FA84->U+5F69, U+FA85->U+5FAD, U+FA86->U+60D8, U+FA87->U+614E, U+FA88->U+6108, U+FA89->U+618E, U+FA8A->U+6160, U+FA8B->U+61F2, \ - U+FA8C->U+6234, U+FA8D->U+63C4, U+FA8E->U+641C, U+FA8F->U+6452, U+FA90->U+6556, U+FA91->U+6674, U+FA92->U+6717, U+FA93->U+671B, U+FA94->U+6756, U+FA95->U+6B79, U+FA96->U+6BBA, U+FA97->U+6D41, U+FA98->U+6EDB, U+FA99->U+6ECB, U+FA9A->U+6F22, U+FA9B->U+701E, \ - U+FA9C->U+716E, U+FA9D->U+77A7, U+FA9E->U+7235, U+FA9F->U+72AF, U+FAA0->U+732A, U+FAA1->U+7471, U+FAA2->U+7506, U+FAA3->U+753B, U+FAA4->U+761D, U+FAA5->U+761F, U+FAA6->U+76CA, U+FAA7->U+76DB, U+FAA8->U+76F4, U+FAA9->U+774A, U+FAAA->U+7740, U+FAAB->U+78CC, \ - U+FAAC->U+7AB1, U+FAAD->U+7BC0, U+FAAE->U+7C7B, U+FAAF->U+7D5B, U+FAB0->U+7DF4, U+FAB1->U+7F3E, U+FAB2->U+8005, U+FAB3->U+8352, U+FAB4->U+83EF, U+FAB5->U+8779, U+FAB6->U+8941, U+FAB7->U+8986, U+FAB8->U+8996, U+FAB9->U+8ABF, U+FABA->U+8AF8, U+FABB->U+8ACB, \ - U+FABC->U+8B01, U+FABD->U+8AFE, U+FABE->U+8AED, U+FABF->U+8B39, U+FAC0->U+8B8A, U+FAC1->U+8D08, U+FAC2->U+8F38, U+FAC3->U+9072, U+FAC4->U+9199, U+FAC5->U+9276, U+FAC6->U+967C, U+FAC7->U+96E3, U+FAC8->U+9756, U+FAC9->U+97DB, U+FACA->U+97FF, U+FACB->U+980B, \ - U+FACC->U+983B, U+FACD->U+9B12, U+FACE->U+9F9C, U+FACF->U+2284A, U+FAD0->U+22844, U+FAD1->U+233D5, U+FAD2->U+3B9D, U+FAD3->U+4018, U+FAD4->U+4039, U+FAD5->U+25249, U+FAD6->U+25CD0, U+FAD7->U+27ED3, U+FAD8->U+9F43, U+FAD9->U+9F8E, U+2F800->U+4E3D, U+2F801->U+4E38, \ - U+2F802->U+4E41, U+2F803->U+20122, U+2F804->U+4F60, U+2F805->U+4FAE, U+2F806->U+4FBB, U+2F807->U+5002, U+2F808->U+507A, U+2F809->U+5099, U+2F80A->U+50E7, U+2F80B->U+50CF, U+2F80C->U+349E, U+2F80D->U+2063A, U+2F80E->U+514D, U+2F80F->U+5154, U+2F810->U+5164, \ - U+2F811->U+5177, U+2F812->U+2051C, U+2F813->U+34B9, U+2F814->U+5167, U+2F815->U+518D, U+2F816->U+2054B, U+2F817->U+5197, U+2F818->U+51A4, U+2F819->U+4ECC, U+2F81A->U+51AC, U+2F81B->U+51B5, U+2F81C->U+291DF, U+2F81D->U+51F5, U+2F81E->U+5203, U+2F81F->U+34DF, \ - U+2F820->U+523B, U+2F821->U+5246, U+2F822->U+5272, U+2F823->U+5277, U+2F824->U+3515, U+2F825->U+52C7, U+2F826->U+52C9, U+2F827->U+52E4, U+2F828->U+52FA, U+2F829->U+5305, U+2F82A->U+5306, U+2F82B->U+5317, U+2F82C->U+5349, U+2F82D->U+5351, U+2F82E->U+535A, \ - U+2F82F->U+5373, U+2F830->U+537D, U+2F831->U+537F, U+2F832->U+537F, U+2F833->U+537F, U+2F834->U+20A2C, U+2F835->U+7070, U+2F836->U+53CA, U+2F837->U+53DF, U+2F838->U+20B63, U+2F839->U+53EB, U+2F83A->U+53F1, U+2F83B->U+5406, U+2F83C->U+549E, U+2F83D->U+5438, \ - U+2F83E->U+5448, U+2F83F->U+5468, U+2F840->U+54A2, U+2F841->U+54F6, U+2F842->U+5510, U+2F843->U+5553, U+2F844->U+5563, U+2F845->U+5584, U+2F846->U+5584, U+2F847->U+5599, U+2F848->U+55AB, U+2F849->U+55B3, U+2F84A->U+55C2, U+2F84B->U+5716, U+2F84C->U+5606, \ - U+2F84D->U+5717, U+2F84E->U+5651, U+2F84F->U+5674, U+2F850->U+5207, U+2F851->U+58EE, U+2F852->U+57CE, U+2F853->U+57F4, U+2F854->U+580D, U+2F855->U+578B, U+2F856->U+5832, U+2F857->U+5831, U+2F858->U+58AC, U+2F859->U+214E4, U+2F85A->U+58F2, U+2F85B->U+58F7, \ - U+2F85C->U+5906, U+2F85D->U+591A, U+2F85E->U+5922, U+2F85F->U+5962, U+2F860->U+216A8, U+2F861->U+216EA, U+2F862->U+59EC, U+2F863->U+5A1B, U+2F864->U+5A27, U+2F865->U+59D8, U+2F866->U+5A66, U+2F867->U+36EE, U+2F868->U+36FC, U+2F869->U+5B08, U+2F86A->U+5B3E, \ - U+2F86B->U+5B3E, U+2F86C->U+219C8, U+2F86D->U+5BC3, U+2F86E->U+5BD8, U+2F86F->U+5BE7, U+2F870->U+5BF3, U+2F871->U+21B18, U+2F872->U+5BFF, U+2F873->U+5C06, U+2F874->U+5F53, U+2F875->U+5C22, U+2F876->U+3781, U+2F877->U+5C60, U+2F878->U+5C6E, U+2F879->U+5CC0, \ - U+2F87A->U+5C8D, U+2F87B->U+21DE4, U+2F87C->U+5D43, U+2F87D->U+21DE6, U+2F87E->U+5D6E, U+2F87F->U+5D6B, U+2F880->U+5D7C, U+2F881->U+5DE1, U+2F882->U+5DE2, U+2F883->U+382F, U+2F884->U+5DFD, U+2F885->U+5E28, U+2F886->U+5E3D, U+2F887->U+5E69, U+2F888->U+3862, \ - U+2F889->U+22183, U+2F88A->U+387C, U+2F88B->U+5EB0, U+2F88C->U+5EB3, U+2F88D->U+5EB6, U+2F88E->U+5ECA, U+2F88F->U+2A392, U+2F890->U+5EFE, U+2F891->U+22331, U+2F892->U+22331, U+2F893->U+8201, U+2F894->U+5F22, U+2F895->U+5F22, U+2F896->U+38C7, U+2F897->U+232B8, \ - U+2F898->U+261DA, U+2F899->U+5F62, U+2F89A->U+5F6B, U+2F89B->U+38E3, U+2F89C->U+5F9A, U+2F89D->U+5FCD, U+2F89E->U+5FD7, U+2F89F->U+5FF9, U+2F8A0->U+6081, U+2F8A1->U+393A, U+2F8A2->U+391C, U+2F8A3->U+6094, U+2F8A4->U+226D4, U+2F8A5->U+60C7, U+2F8A6->U+6148, \ - U+2F8A7->U+614C, U+2F8A8->U+614E, U+2F8A9->U+614C, U+2F8AA->U+617A, U+2F8AB->U+618E, U+2F8AC->U+61B2, U+2F8AD->U+61A4, U+2F8AE->U+61AF, U+2F8AF->U+61DE, U+2F8B0->U+61F2, U+2F8B1->U+61F6, U+2F8B2->U+6210, U+2F8B3->U+621B, U+2F8B4->U+625D, U+2F8B5->U+62B1, \ - U+2F8B6->U+62D4, U+2F8B7->U+6350, U+2F8B8->U+22B0C, U+2F8B9->U+633D, U+2F8BA->U+62FC, U+2F8BB->U+6368, U+2F8BC->U+6383, U+2F8BD->U+63E4, U+2F8BE->U+22BF1, U+2F8BF->U+6422, U+2F8C0->U+63C5, U+2F8C1->U+63A9, U+2F8C2->U+3A2E, U+2F8C3->U+6469, U+2F8C4->U+647E, \ - U+2F8C5->U+649D, U+2F8C6->U+6477, U+2F8C7->U+3A6C, U+2F8C8->U+654F, U+2F8C9->U+656C, U+2F8CA->U+2300A, U+2F8CB->U+65E3, U+2F8CC->U+66F8, U+2F8CD->U+6649, U+2F8CE->U+3B19, U+2F8CF->U+6691, U+2F8D0->U+3B08, U+2F8D1->U+3AE4, U+2F8D2->U+5192, U+2F8D3->U+5195, \ - U+2F8D4->U+6700, U+2F8D5->U+669C, U+2F8D6->U+80AD, U+2F8D7->U+43D9, U+2F8D8->U+6717, U+2F8D9->U+671B, U+2F8DA->U+6721, U+2F8DB->U+675E, U+2F8DC->U+6753, U+2F8DD->U+233C3, U+2F8DE->U+3B49, U+2F8DF->U+67FA, U+2F8E0->U+6785, U+2F8E1->U+6852, U+2F8E2->U+6885, \ - U+2F8E3->U+2346D, U+2F8E4->U+688E, U+2F8E5->U+681F, U+2F8E6->U+6914, U+2F8E7->U+3B9D, U+2F8E8->U+6942, U+2F8E9->U+69A3, U+2F8EA->U+69EA, U+2F8EB->U+6AA8, U+2F8EC->U+236A3, U+2F8ED->U+6ADB, U+2F8EE->U+3C18, U+2F8EF->U+6B21, U+2F8F0->U+238A7, U+2F8F1->U+6B54, \ - U+2F8F2->U+3C4E, U+2F8F3->U+6B72, U+2F8F4->U+6B9F, U+2F8F5->U+6BBA, U+2F8F6->U+6BBB, U+2F8F7->U+23A8D, U+2F8F8->U+21D0B, U+2F8F9->U+23AFA, U+2F8FA->U+6C4E, U+2F8FB->U+23CBC, U+2F8FC->U+6CBF, U+2F8FD->U+6CCD, U+2F8FE->U+6C67, U+2F8FF->U+6D16, U+2F900->U+6D3E, \ - U+2F901->U+6D77, U+2F902->U+6D41, U+2F903->U+6D69, U+2F904->U+6D78, U+2F905->U+6D85, U+2F906->U+23D1E, U+2F907->U+6D34, U+2F908->U+6E2F, U+2F909->U+6E6E, U+2F90A->U+3D33, U+2F90B->U+6ECB, U+2F90C->U+6EC7, U+2F90D->U+23ED1, U+2F90E->U+6DF9, U+2F90F->U+6F6E, \ - U+2F910->U+23F5E, U+2F911->U+23F8E, U+2F912->U+6FC6, U+2F913->U+7039, U+2F914->U+701E, U+2F915->U+701B, U+2F916->U+3D96, U+2F917->U+704A, U+2F918->U+707D, U+2F919->U+7077, U+2F91A->U+70AD, U+2F91B->U+20525, U+2F91C->U+7145, U+2F91D->U+24263, U+2F91E->U+719C, \ - U+2F91F->U+243AB, U+2F920->U+7228, U+2F921->U+7235, U+2F922->U+7250, U+2F923->U+24608, U+2F924->U+7280, U+2F925->U+7295, U+2F926->U+24735, U+2F927->U+24814, U+2F928->U+737A, U+2F929->U+738B, U+2F92A->U+3EAC, U+2F92B->U+73A5, U+2F92C->U+3EB8, U+2F92D->U+3EB8, \ - U+2F92E->U+7447, U+2F92F->U+745C, U+2F930->U+7471, U+2F931->U+7485, U+2F932->U+74CA, U+2F933->U+3F1B, U+2F934->U+7524, U+2F935->U+24C36, U+2F936->U+753E, U+2F937->U+24C92, U+2F938->U+7570, U+2F939->U+2219F, U+2F93A->U+7610, U+2F93B->U+24FA1, U+2F93C->U+24FB8, \ - U+2F93D->U+25044, U+2F93E->U+3FFC, U+2F93F->U+4008, U+2F940->U+76F4, U+2F941->U+250F3, U+2F942->U+250F2, U+2F943->U+25119, U+2F944->U+25133, U+2F945->U+771E, U+2F946->U+771F, U+2F947->U+771F, U+2F948->U+774A, U+2F949->U+4039, U+2F94A->U+778B, U+2F94B->U+4046, \ - U+2F94C->U+4096, U+2F94D->U+2541D, U+2F94E->U+784E, U+2F94F->U+788C, U+2F950->U+78CC, U+2F951->U+40E3, U+2F952->U+25626, U+2F953->U+7956, U+2F954->U+2569A, U+2F955->U+256C5, U+2F956->U+798F, U+2F957->U+79EB, U+2F958->U+412F, U+2F959->U+7A40, U+2F95A->U+7A4A, \ - U+2F95B->U+7A4F, U+2F95C->U+2597C, U+2F95D->U+25AA7, U+2F95E->U+25AA7, U+2F95F->U+7AEE, U+2F960->U+4202, U+2F961->U+25BAB, U+2F962->U+7BC6, U+2F963->U+7BC9, U+2F964->U+4227, U+2F965->U+25C80, U+2F966->U+7CD2, U+2F967->U+42A0, U+2F968->U+7CE8, U+2F969->U+7CE3, \ - U+2F96A->U+7D00, U+2F96B->U+25F86, U+2F96C->U+7D63, U+2F96D->U+4301, U+2F96E->U+7DC7, U+2F96F->U+7E02, U+2F970->U+7E45, U+2F971->U+4334, U+2F972->U+26228, U+2F973->U+26247, U+2F974->U+4359, U+2F975->U+262D9, U+2F976->U+7F7A, U+2F977->U+2633E, U+2F978->U+7F95, \ - U+2F979->U+7FFA, U+2F97A->U+8005, U+2F97B->U+264DA, U+2F97C->U+26523, U+2F97D->U+8060, U+2F97E->U+265A8, U+2F97F->U+8070, U+2F980->U+2335F, U+2F981->U+43D5, U+2F982->U+80B2, U+2F983->U+8103, U+2F984->U+440B, U+2F985->U+813E, U+2F986->U+5AB5, U+2F987->U+267A7, \ - U+2F988->U+267B5, U+2F989->U+23393, U+2F98A->U+2339C, U+2F98B->U+8201, U+2F98C->U+8204, U+2F98D->U+8F9E, U+2F98E->U+446B, U+2F98F->U+8291, U+2F990->U+828B, U+2F991->U+829D, U+2F992->U+52B3, U+2F993->U+82B1, U+2F994->U+82B3, U+2F995->U+82BD, U+2F996->U+82E6, \ - U+2F997->U+26B3C, U+2F998->U+82E5, U+2F999->U+831D, U+2F99A->U+8363, U+2F99B->U+83AD, U+2F99C->U+8323, U+2F99D->U+83BD, U+2F99E->U+83E7, U+2F99F->U+8457, U+2F9A0->U+8353, U+2F9A1->U+83CA, U+2F9A2->U+83CC, U+2F9A3->U+83DC, U+2F9A4->U+26C36, U+2F9A5->U+26D6B, \ - U+2F9A6->U+26CD5, U+2F9A7->U+452B, U+2F9A8->U+84F1, U+2F9A9->U+84F3, U+2F9AA->U+8516, U+2F9AB->U+273CA, U+2F9AC->U+8564, U+2F9AD->U+26F2C, U+2F9AE->U+455D, U+2F9AF->U+4561, U+2F9B0->U+26FB1, U+2F9B1->U+270D2, U+2F9B2->U+456B, U+2F9B3->U+8650, U+2F9B4->U+865C, \ - U+2F9B5->U+8667, U+2F9B6->U+8669, U+2F9B7->U+86A9, U+2F9B8->U+8688, U+2F9B9->U+870E, U+2F9BA->U+86E2, U+2F9BB->U+8779, U+2F9BC->U+8728, U+2F9BD->U+876B, U+2F9BE->U+8786, U+2F9BF->U+45D7, U+2F9C0->U+87E1, U+2F9C1->U+8801, U+2F9C2->U+45F9, U+2F9C3->U+8860, \ - U+2F9C4->U+8863, U+2F9C5->U+27667, U+2F9C6->U+88D7, U+2F9C7->U+88DE, U+2F9C8->U+4635, U+2F9C9->U+88FA, U+2F9CA->U+34BB, U+2F9CB->U+278AE, U+2F9CC->U+27966, U+2F9CD->U+46BE, U+2F9CE->U+46C7, U+2F9CF->U+8AA0, U+2F9D0->U+8AED, U+2F9D1->U+8B8A, U+2F9D2->U+8C55, \ - U+2F9D3->U+27CA8, U+2F9D4->U+8CAB, U+2F9D5->U+8CC1, U+2F9D6->U+8D1B, U+2F9D7->U+8D77, U+2F9D8->U+27F2F, U+2F9D9->U+20804, U+2F9DA->U+8DCB, U+2F9DB->U+8DBC, U+2F9DC->U+8DF0, U+2F9DD->U+208DE, U+2F9DE->U+8ED4, U+2F9DF->U+8F38, U+2F9E0->U+285D2, U+2F9E1->U+285ED, \ - U+2F9E2->U+9094, U+2F9E3->U+90F1, U+2F9E4->U+9111, U+2F9E5->U+2872E, U+2F9E6->U+911B, U+2F9E7->U+9238, U+2F9E8->U+92D7, U+2F9E9->U+92D8, U+2F9EA->U+927C, U+2F9EB->U+93F9, U+2F9EC->U+9415, U+2F9ED->U+28BFA, U+2F9EE->U+958B, U+2F9EF->U+4995, U+2F9F0->U+95B7, \ - U+2F9F1->U+28D77, U+2F9F2->U+49E6, U+2F9F3->U+96C3, U+2F9F4->U+5DB2, U+2F9F5->U+9723, U+2F9F6->U+29145, U+2F9F7->U+2921A, U+2F9F8->U+4A6E, U+2F9F9->U+4A76, U+2F9FA->U+97E0, U+2F9FB->U+2940A, U+2F9FC->U+4AB2, U+2F9FD->U+29496, U+2F9FE->U+980B, U+2F9FF->U+980B, \ - U+2FA00->U+9829, U+2FA01->U+295B6, U+2FA02->U+98E2, U+2FA03->U+4B33, U+2FA04->U+9929, U+2FA05->U+99A7, U+2FA06->U+99C2, U+2FA07->U+99FE, U+2FA08->U+4BCE, U+2FA09->U+29B30, U+2FA0A->U+9B12, U+2FA0B->U+9C40, U+2FA0C->U+9CFD, U+2FA0D->U+4CCE, U+2FA0E->U+4CED, \ - U+2FA0F->U+9D67, U+2FA10->U+2A0CE, U+2FA11->U+4CF8, U+2FA12->U+2A105, U+2FA13->U+2A20E, U+2FA14->U+2A291, U+2FA15->U+9EBB, U+2FA16->U+4D56, U+2FA17->U+9EF9, U+2FA18->U+9EFE, U+2FA19->U+9F05, U+2FA1A->U+9F0F, U+2FA1B->U+9F16, U+2FA1C->U+9F3B, U+2FA1D->U+2A600, \ - U+2F00->U+4E00, U+2F01->U+4E28, U+2F02->U+4E36, U+2F03->U+4E3F, U+2F04->U+4E59, U+2F05->U+4E85, U+2F06->U+4E8C, U+2F07->U+4EA0, U+2F08->U+4EBA, U+2F09->U+513F, U+2F0A->U+5165, U+2F0B->U+516B, U+2F0C->U+5182, U+2F0D->U+5196, U+2F0E->U+51AB, U+2F0F->U+51E0, \ - U+2F10->U+51F5, U+2F11->U+5200, U+2F12->U+529B, U+2F13->U+52F9, U+2F14->U+5315, U+2F15->U+531A, U+2F16->U+5338, U+2F17->U+5341, U+2F18->U+535C, U+2F19->U+5369, U+2F1A->U+5382, U+2F1B->U+53B6, U+2F1C->U+53C8, U+2F1D->U+53E3, U+2F1E->U+56D7, U+2F1F->U+571F, \ - U+2F20->U+58EB, U+2F21->U+5902, U+2F22->U+590A, U+2F23->U+5915, U+2F24->U+5927, U+2F25->U+5973, U+2F26->U+5B50, U+2F27->U+5B80, U+2F28->U+5BF8, U+2F29->U+5C0F, U+2F2A->U+5C22, U+2F2B->U+5C38, U+2F2C->U+5C6E, U+2F2D->U+5C71, U+2F2E->U+5DDB, U+2F2F->U+5DE5, \ - U+2F30->U+5DF1, U+2F31->U+5DFE, U+2F32->U+5E72, U+2F33->U+5E7A, U+2F34->U+5E7F, U+2F35->U+5EF4, U+2F36->U+5EFE, U+2F37->U+5F0B, U+2F38->U+5F13, U+2F39->U+5F50, U+2F3A->U+5F61, U+2F3B->U+5F73, U+2F3C->U+5FC3, U+2F3D->U+6208, U+2F3E->U+6236, U+2F3F->U+624B, \ - U+2F40->U+652F, U+2F41->U+6534, U+2F42->U+6587, U+2F43->U+6597, U+2F44->U+65A4, U+2F45->U+65B9, U+2F46->U+65E0, U+2F47->U+65E5, U+2F48->U+66F0, U+2F49->U+6708, U+2F4A->U+6728, U+2F4B->U+6B20, U+2F4C->U+6B62, U+2F4D->U+6B79, U+2F4E->U+6BB3, U+2F4F->U+6BCB, \ - U+2F50->U+6BD4, U+2F51->U+6BDB, U+2F52->U+6C0F, U+2F53->U+6C14, U+2F54->U+6C34, U+2F55->U+706B, U+2F56->U+722A, U+2F57->U+7236, U+2F58->U+723B, U+2F59->U+723F, U+2F5A->U+7247, U+2F5B->U+7259, U+2F5C->U+725B, U+2F5D->U+72AC, U+2F5E->U+7384, U+2F5F->U+7389, \ - U+2F60->U+74DC, U+2F61->U+74E6, U+2F62->U+7518, U+2F63->U+751F, U+2F64->U+7528, U+2F65->U+7530, U+2F66->U+758B, U+2F67->U+7592, U+2F68->U+7676, U+2F69->U+767D, U+2F6A->U+76AE, U+2F6B->U+76BF, U+2F6C->U+76EE, U+2F6D->U+77DB, U+2F6E->U+77E2, U+2F6F->U+77F3, \ - U+2F70->U+793A, U+2F71->U+79B8, U+2F72->U+79BE, U+2F73->U+7A74, U+2F74->U+7ACB, U+2F75->U+7AF9, U+2F76->U+7C73, U+2F77->U+7CF8, U+2F78->U+7F36, U+2F79->U+7F51, U+2F7A->U+7F8A, U+2F7B->U+7FBD, U+2F7C->U+8001, U+2F7D->U+800C, U+2F7E->U+8012, U+2F7F->U+8033, \ - U+2F80->U+807F, U+2F81->U+8089, U+2F82->U+81E3, U+2F83->U+81EA, U+2F84->U+81F3, U+2F85->U+81FC, U+2F86->U+820C, U+2F87->U+821B, U+2F88->U+821F, U+2F89->U+826E, U+2F8A->U+8272, U+2F8B->U+8278, U+2F8C->U+864D, U+2F8D->U+866B, U+2F8E->U+8840, U+2F8F->U+884C, \ - U+2F90->U+8863, U+2F91->U+897E, U+2F92->U+898B, U+2F93->U+89D2, U+2F94->U+8A00, U+2F95->U+8C37, U+2F96->U+8C46, U+2F97->U+8C55, U+2F98->U+8C78, U+2F99->U+8C9D, U+2F9A->U+8D64, U+2F9B->U+8D70, U+2F9C->U+8DB3, U+2F9D->U+8EAB, U+2F9E->U+8ECA, U+2F9F->U+8F9B, \ - U+2FA0->U+8FB0, U+2FA1->U+8FB5, U+2FA2->U+9091, U+2FA3->U+9149, U+2FA4->U+91C6, U+2FA5->U+91CC, U+2FA6->U+91D1, U+2FA7->U+9577, U+2FA8->U+9580, U+2FA9->U+961C, U+2FAA->U+96B6, U+2FAB->U+96B9, U+2FAC->U+96E8, U+2FAD->U+9751, U+2FAE->U+975E, U+2FAF->U+9762, \ - U+2FB0->U+9769, U+2FB1->U+97CB, U+2FB2->U+97ED, U+2FB3->U+97F3, U+2FB4->U+9801, U+2FB5->U+98A8, U+2FB6->U+98DB, U+2FB7->U+98DF, U+2FB8->U+9996, U+2FB9->U+9999, U+2FBA->U+99AC, U+2FBB->U+9AA8, U+2FBC->U+9AD8, U+2FBD->U+9ADF, U+2FBE->U+9B25, U+2FBF->U+9B2F, \ - U+2FC0->U+9B32, U+2FC1->U+9B3C, U+2FC2->U+9B5A, U+2FC3->U+9CE5, U+2FC4->U+9E75, U+2FC5->U+9E7F, U+2FC6->U+9EA5, U+2FC7->U+9EBB, U+2FC8->U+9EC3, U+2FC9->U+9ECD, U+2FCA->U+9ED1, U+2FCB->U+9EF9, U+2FCC->U+9EFD, U+2FCD->U+9F0E, U+2FCE->U+9F13, U+2FCF->U+9F20, \ - U+2FD0->U+9F3B, U+2FD1->U+9F4A, U+2FD2->U+9F52, U+2FD3->U+9F8D, U+2FD4->U+9F9C, U+2FD5->U+9FA0, U+3042->U+3041, U+3044->U+3043, U+3046->U+3045, U+3048->U+3047, U+304A->U+3049, U+304C->U+304B, U+304E->U+304D, U+3050->U+304F, U+3052->U+3051, U+3054->U+3053, \ - U+3056->U+3055, U+3058->U+3057, U+305A->U+3059, U+305C->U+305B, U+305E->U+305D, U+3060->U+305F, U+3062->U+3061, U+3064->U+3063, U+3065->U+3063, U+3067->U+3066, U+3069->U+3068, U+3070->U+306F, U+3071->U+306F, U+3073->U+3072, U+3074->U+3072, U+3076->U+3075, \ - U+3077->U+3075, U+3079->U+3078, U+307A->U+3078, U+307C->U+307B, U+307D->U+307B, U+3084->U+3083, U+3086->U+3085, U+3088->U+3087, U+308F->U+308E, U+3094->U+3046, U+3095->U+304B, U+3096->U+3051, U+30A2->U+30A1, U+30A4->U+30A3, U+30A6->U+30A5, U+30A8->U+30A7, \ - U+30AA->U+30A9, U+30AC->U+30AB, U+30AE->U+30AD, U+30B0->U+30AF, U+30B2->U+30B1, U+30B4->U+30B3, U+30B6->U+30B5, U+30B8->U+30B7, U+30BA->U+30B9, U+30BC->U+30BB, U+30BE->U+30BD, U+30C0->U+30BF, U+30C2->U+30C1, U+30C5->U+30C4, U+30C7->U+30C6, U+30C9->U+30C8, \ - U+30D0->U+30CF, U+30D1->U+30CF, U+30D3->U+30D2, U+30D4->U+30D2, U+30D6->U+30D5, U+30D7->U+30D5, U+30D9->U+30D8, U+30DA->U+30D8, U+30DC->U+30DB, U+30DD->U+30DB, U+30E4->U+30E3, U+30E6->U+30E5, U+30E8->U+30E7, U+30EF->U+30EE, U+30F4->U+30A6, U+30AB->U+30F5, \ - U+30B1->U+30F6, U+30F7->U+30EF, U+30F8->U+30F0, U+30F9->U+30F1, U+30FA->U+30F2, U+30AF->U+31F0, U+30B7->U+31F1, U+30B9->U+31F2, U+30C8->U+31F3, U+30CC->U+31F4, U+30CF->U+31F5, U+30D2->U+31F6, U+30D5->U+31F7, U+30D8->U+31F8, U+30DB->U+31F9, U+30E0->U+31FA, \ - U+30E9->U+31FB, U+30EA->U+31FC, U+30EB->U+31FD, U+30EC->U+31FE, U+30ED->U+31FF, U+FF66->U+30F2, U+FF67->U+30A1, U+FF68->U+30A3, U+FF69->U+30A5, U+FF6A->U+30A7, U+FF6B->U+30A9, U+FF6C->U+30E3, U+FF6D->U+30E5, U+FF6E->U+30E7, U+FF6F->U+30C3, U+FF71->U+30A1, \ - U+FF72->U+30A3, U+FF73->U+30A5, U+FF74->U+30A7, U+FF75->U+30A9, U+FF76->U+30AB, U+FF77->U+30AD, U+FF78->U+30AF, U+FF79->U+30B1, U+FF7A->U+30B3, U+FF7B->U+30B5, U+FF7C->U+30B7, U+FF7D->U+30B9, U+FF7E->U+30BB, U+FF7F->U+30BD, U+FF80->U+30BF, U+FF81->U+30C1, \ - U+FF82->U+30C3, U+FF83->U+30C6, U+FF84->U+30C8, U+FF85->U+30CA, U+FF86->U+30CB, U+FF87->U+30CC, U+FF88->U+30CD, U+FF89->U+30CE, U+FF8A->U+30CF, U+FF8B->U+30D2, U+FF8C->U+30D5, U+FF8D->U+30D8, U+FF8E->U+30DB, U+FF8F->U+30DE, U+FF90->U+30DF, U+FF91->U+30E0, \ - U+FF92->U+30E1, U+FF93->U+30E2, U+FF94->U+30E3, U+FF95->U+30E5, U+FF96->U+30E7, U+FF97->U+30E9, U+FF98->U+30EA, U+FF99->U+30EB, U+FF9A->U+30EC, U+FF9B->U+30ED, U+FF9C->U+30EF, U+FF9D->U+30F3, U+FFA0->U+3164, U+FFA1->U+3131, U+FFA2->U+3132, U+FFA3->U+3133, \ - U+FFA4->U+3134, U+FFA5->U+3135, U+FFA6->U+3136, U+FFA7->U+3137, U+FFA8->U+3138, U+FFA9->U+3139, U+FFAA->U+313A, U+FFAB->U+313B, U+FFAC->U+313C, U+FFAD->U+313D, U+FFAE->U+313E, U+FFAF->U+313F, U+FFB0->U+3140, U+FFB1->U+3141, U+FFB2->U+3142, U+FFB3->U+3143, \ - U+FFB4->U+3144, U+FFB5->U+3145, U+FFB6->U+3146, U+FFB7->U+3147, U+FFB8->U+3148, U+FFB9->U+3149, U+FFBA->U+314A, U+FFBB->U+314B, U+FFBC->U+314C, U+FFBD->U+314D, U+FFBE->U+314E, U+FFC2->U+314F, U+FFC3->U+3150, U+FFC4->U+3151, U+FFC5->U+3152, U+FFC6->U+3153, \ - U+FFC7->U+3154, U+FFCA->U+3155, U+FFCB->U+3156, U+FFCC->U+3157, U+FFCD->U+3158, U+FFCE->U+3159, U+FFCF->U+315A, U+FFD2->U+315B, U+FFD3->U+315C, U+FFD4->U+315D, U+FFD5->U+315E, U+FFD6->U+315F, U+FFD7->U+3160, U+FFDA->U+3161, U+FFDB->U+3162, U+FFDC->U+3163, \ - U+3131->U+1100, U+3132->U+1101, U+3133->U+11AA, U+3134->U+1102, U+3135->U+11AC, U+3136->U+11AD, U+3137->U+1103, U+3138->U+1104, U+3139->U+1105, U+313A->U+11B0, U+313B->U+11B1, U+313C->U+11B2, U+313D->U+11B3, U+313E->U+11B4, U+313F->U+11B5, U+3140->U+111A, \ - U+3141->U+1106, U+3142->U+1107, U+3143->U+1108, U+3144->U+1121, U+3145->U+1109, U+3146->U+110A, U+3147->U+110B, U+3148->U+110C, U+3149->U+110D, U+314A->U+110E, U+314B->U+110F, U+314C->U+1110, U+314D->U+1111, U+314E->U+1112, U+314F->U+1161, U+3150->U+1162, \ - U+3151->U+1163, U+3152->U+1164, U+3153->U+1165, U+3154->U+1166, U+3155->U+1167, U+3156->U+1168, U+3157->U+1169, U+3158->U+116A, U+3159->U+116B, U+315A->U+116C, U+315B->U+116D, U+315C->U+116E, U+315D->U+116F, U+315E->U+1170, U+315F->U+1171, U+3160->U+1172, \ - U+3161->U+1173, U+3162->U+1174, U+3163->U+1175, U+3165->U+1114, U+3166->U+1115, U+3167->U+11C7, U+3168->U+11C8, U+3169->U+11CC, U+316A->U+11CE, U+316B->U+11D3, U+316C->U+11D7, U+316D->U+11D9, U+316E->U+111C, U+316F->U+11DD, U+3170->U+11DF, U+3171->U+111D, \ - U+3172->U+111E, U+3173->U+1120, U+3174->U+1122, U+3175->U+1123, U+3176->U+1127, U+3177->U+1129, U+3178->U+112B, U+3179->U+112C, U+317A->U+112D, U+317B->U+112E, U+317C->U+112F, U+317D->U+1132, U+317E->U+1136, U+317F->U+1140, U+3180->U+1147, U+3181->U+114C, \ - U+3182->U+11F1, U+3183->U+11F2, U+3184->U+1157, U+3185->U+1158, U+3186->U+1159, U+3187->U+1184, U+3188->U+1185, U+3189->U+1188, U+318A->U+1191, U+318B->U+1192, U+318C->U+1194, U+318D->U+119E, U+318E->U+11A1, U+A490->U+A408, U+A491->U+A1B9, U+4E00..U+9FBB, \ - U+3400..U+4DB5, U+20000..U+2A6D6, U+FA0E, U+FA0F, U+FA11, U+FA13, U+FA14, U+FA1F, U+FA21, U+FA23, U+FA24, U+FA27, U+FA28, U+FA29, U+3105..U+312C, U+31A0..U+31B7, U+3041, U+3043, U+3045, U+3047, U+3049, U+304B, U+304D, U+304F, U+3051, U+3053, U+3055, U+3057, \ - U+3059, U+305B, U+305D, U+305F, U+3061, U+3063, U+3066, U+3068, U+306A..U+306F, U+3072, U+3075, U+3078, U+307B, U+307E..U+3083, U+3085, U+3087, U+3089..U+308E, U+3090..U+3093, U+30A1, U+30A3, U+30A5, U+30A7, U+30A9, U+30AD, U+30AF, U+30B3, U+30B5, U+30BB, \ - U+30BD, U+30BF, U+30C1, U+30C3, U+30C4, U+30C6, U+30CA, U+30CB, U+30CD, U+30CE, U+30DE, U+30DF, U+30E1, U+30E2, U+30E3, U+30E5, U+30E7, U+30EE, U+30F0..U+30F3, U+30F5, U+30F6, U+31F0, U+31F1, U+31F2, U+31F3, U+31F4, U+31F5, U+31F6, U+31F7, U+31F8, U+31F9, \ - U+31FA, U+31FB, U+31FC, U+31FD, U+31FE, U+31FF, U+AC00..U+D7A3, U+1100..U+1159, U+1161..U+11A2, U+11A8..U+11F9, U+A000..U+A48C, U+A492..U+A4C6, U+2C80->U+2C81, U+2C81, U+2C82->U+2C83, U+2C83, U+2C84->U+2C85, U+2C85, U+2C86->U+2C87, U+2C87, U+2C88->U+2C89, \ - U+2C89, U+2C8A->U+2C8B, U+2C8B, U+2C8C->U+2C8D, U+2C8D, U+2C8E->U+2C8F, U+2C8F, U+2C90->U+2C91, U+2C91, U+2C92->U+2C93, U+2C93, U+2C94->U+2C95, U+2C95, U+2C96->U+2C97, U+2C97, U+2C98->U+2C99, U+2C99, U+2C9A->U+2C9B, U+2C9B, U+2C9C->U+2C9D, U+2C9D, U+2C9E->U+2C9F, \ - U+2C9F, U+2CA0->U+2CA1, U+2CA1, U+2CA2->U+2CA3, U+2CA3, U+2CA4->U+2CA5, U+2CA5, U+2CA6->U+2CA7, U+2CA7, U+2CA8->U+2CA9, U+2CA9, U+2CAA->U+2CAB, U+2CAB, U+2CAC->U+2CAD, U+2CAD, U+2CAE->U+2CAF, U+2CAF, U+2CB0->U+2CB1, U+2CB1, U+2CB2->U+2CB3, U+2CB3, U+2CB4->U+2CB5, \ - U+2CB5, U+2CB6->U+2CB7, U+2CB7, U+2CB8->U+2CB9, U+2CB9, U+2CBA->U+2CBB, U+2CBB, U+2CBC->U+2CBD, U+2CBD, U+2CBE->U+2CBF, U+2CBF, U+2CC0->U+2CC1, U+2CC1, U+2CC2->U+2CC3, U+2CC3, U+2CC4->U+2CC5, U+2CC5, U+2CC6->U+2CC7, U+2CC7, U+2CC8->U+2CC9, U+2CC9, U+2CCA->U+2CCB, \ - U+2CCB, U+2CCC->U+2CCD, U+2CCD, U+2CCE->U+2CCF, U+2CCF, U+2CD0->U+2CD1, U+2CD1, U+2CD2->U+2CD3, U+2CD3, U+2CD4->U+2CD5, U+2CD5, U+2CD6->U+2CD7, U+2CD7, U+2CD8->U+2CD9, U+2CD9, U+2CDA->U+2CDB, U+2CDB, U+2CDC->U+2CDD, U+2CDD, U+2CDE->U+2CDF, U+2CDF, U+2CE0->U+2CE1, \ - U+2CE1, U+2CE2->U+2CE3, U+2CE3, U+0400->U+0435, U+0401->U+0435, U+0402->U+0452, U+0452, U+0403->U+0433, U+0404->U+0454, U+0454, U+0405->U+0455, U+0455, U+0406->U+0456, U+0407->U+0456, U+0457->U+0456, U+0456, U+0408..U+040B->U+0458..U+045B, U+0458..U+045B, \ - U+040C->U+043A, U+040D->U+0438, U+040E->U+0443, U+040F->U+045F, U+045F, U+0450->U+0435, U+0451->U+0435, U+0453->U+0433, U+045C->U+043A, U+045D->U+0438, U+045E->U+0443, U+0460->U+0461, U+0461, U+0462->U+0463, U+0463, U+0464->U+0465, U+0465, U+0466->U+0467, \ - U+0467, U+0468->U+0469, U+0469, U+046A->U+046B, U+046B, U+046C->U+046D, U+046D, U+046E->U+046F, U+046F, U+0470->U+0471, U+0471, U+0472->U+0473, U+0473, U+0474->U+0475, U+0476->U+0475, U+0477->U+0475, U+0475, U+0478->U+0479, U+0479, U+047A->U+047B, U+047B, \ - U+047C->U+047D, U+047D, U+047E->U+047F, U+047F, U+0480->U+0481, U+0481, U+048A->U+0438, U+048B->U+0438, U+048C->U+044C, U+048D->U+044C, U+048E->U+0440, U+048F->U+0440, U+0490->U+0433, U+0491->U+0433, U+0490->U+0433, U+0491->U+0433, U+0492->U+0433, U+0493->U+0433, \ - U+0494->U+0433, U+0495->U+0433, U+0496->U+0436, U+0497->U+0436, U+0498->U+0437, U+0499->U+0437, U+049A->U+043A, U+049B->U+043A, U+049C->U+043A, U+049D->U+043A, U+049E->U+043A, U+049F->U+043A, U+04A0->U+043A, U+04A1->U+043A, U+04A2->U+043D, U+04A3->U+043D, \ - U+04A4->U+043D, U+04A5->U+043D, U+04A6->U+043F, U+04A7->U+043F, U+04A8->U+04A9, U+04A9, U+04AA->U+0441, U+04AB->U+0441, U+04AC->U+0442, U+04AD->U+0442, U+04AE->U+0443, U+04AF->U+0443, U+04B0->U+0443, U+04B1->U+0443, U+04B2->U+0445, U+04B3->U+0445, U+04B4->U+04B5, \ - U+04B5, U+04B6->U+0447, U+04B7->U+0447, U+04B8->U+0447, U+04B9->U+0447, U+04BA->U+04BB, U+04BB, U+04BC->U+04BD, U+04BE->U+04BD, U+04BF->U+04BD, U+04BD, U+04C0->U+04CF, U+04CF, U+04C1->U+0436, U+04C2->U+0436, U+04C3->U+043A, U+04C4->U+043A, U+04C5->U+043B, \ - U+04C6->U+043B, U+04C7->U+043D, U+04C8->U+043D, U+04C9->U+043D, U+04CA->U+043D, U+04CB->U+0447, U+04CC->U+0447, U+04CD->U+043C, U+04CE->U+043C, U+04D0->U+0430, U+04D1->U+0430, U+04D2->U+0430, U+04D3->U+0430, U+04D4->U+00E6, U+04D5->U+00E6, U+04D6->U+0435, \ - U+04D7->U+0435, U+04D8->U+04D9, U+04DA->U+04D9, U+04DB->U+04D9, U+04D9, U+04DC->U+0436, U+04DD->U+0436, U+04DE->U+0437, U+04DF->U+0437, U+04E0->U+04E1, U+04E1, U+04E2->U+0438, U+04E3->U+0438, U+04E4->U+0438, U+04E5->U+0438, U+04E6->U+043E, U+04E7->U+043E, \ - U+04E8->U+043E, U+04E9->U+043E, U+04EA->U+043E, U+04EB->U+043E, U+04EC->U+044D, U+04ED->U+044D, U+04EE->U+0443, U+04EF->U+0443, U+04F0->U+0443, U+04F1->U+0443, U+04F2->U+0443, U+04F3->U+0443, U+04F4->U+0447, U+04F5->U+0447, U+04F6->U+0433, U+04F7->U+0433, \ - U+04F8->U+044B, U+04F9->U+044B, U+04FA->U+0433, U+04FB->U+0433, U+04FC->U+0445, U+04FD->U+0445, U+04FE->U+0445, U+04FF->U+0445, U+0410..U+0418->U+0430..U+0438, U+0419->U+0438, U+0430..U+0438, U+041A..U+042F->U+043A..U+044F, U+043A..U+044F, U+0929->U+0928, \ - U+0931->U+0930, U+0934->U+0933, U+0958->U+0915, U+0959->U+0916, U+095A->U+0917, U+095B->U+091C, U+095C->U+0921, U+095D->U+0922, U+095E->U+092B, U+095F->U+092F, U+0904..U+0928, U+092A..U+0930, U+0932, U+0933, U+0935..U+0939, U+0960, U+0961, U+0966..U+096F, \ - U+097B..U+097F, U+10FC->U+10DC, U+10D0..U+10FA, U+10A0..U+10C5->U+2D00..U+2D25, U+2D00..U+2D25, U+0386->U+03B1, U+0388->U+03B5, U+0389->U+03B7, U+038A->U+03B9, U+038C->U+03BF, U+038E->U+03C5, U+038F->U+03C9, U+0390->U+03B9, U+03AA->U+03B9, U+03AB->U+03C5, \ - U+03AC->U+03B1, U+03AD->U+03B5, U+03AE->U+03B7, U+03AF->U+03B9, U+03B0->U+03C5, U+03CA->U+03B9, U+03CB->U+03C5, U+03CC->U+03BF, U+03CD->U+03C5, U+03CE->U+03C9, U+03D0->U+03B2, U+03D1->U+03B8, U+03D2->U+03C5, U+03D3->U+03C5, U+03D4->U+03C5, U+03D5->U+03C6, \ - U+03D6->U+03C0, U+03D8->U+03D9, U+03DA->U+03DB, U+03DC->U+03DD, U+03DE->U+03DF, U+03E0->U+03E1, U+03E2->U+03E3, U+03E4->U+03E5, U+03E6->U+03E7, U+03E8->U+03E9, U+03EA->U+03EB, U+03EC->U+03ED, U+03EE->U+03EF, U+03F0->U+03BA, U+03F1->U+03C1, U+03F2->U+03C3, \ - U+03F4->U+03B8, U+03F5->U+03B5, U+03F6->U+03B5, U+03F7->U+03F8, U+03F9->U+03C3, U+03FA->U+03FB, U+1F00->U+03B1, U+1F01->U+03B1, U+1F02->U+03B1, U+1F03->U+03B1, U+1F04->U+03B1, U+1F05->U+03B1, U+1F06->U+03B1, U+1F07->U+03B1, U+1F08->U+03B1, U+1F09->U+03B1, \ - U+1F0A->U+03B1, U+1F0B->U+03B1, U+1F0C->U+03B1, U+1F0D->U+03B1, U+1F0E->U+03B1, U+1F0F->U+03B1, U+1F10->U+03B5, U+1F11->U+03B5, U+1F12->U+03B5, U+1F13->U+03B5, U+1F14->U+03B5, U+1F15->U+03B5, U+1F18->U+03B5, U+1F19->U+03B5, U+1F1A->U+03B5, U+1F1B->U+03B5, \ - U+1F1C->U+03B5, U+1F1D->U+03B5, U+1F20->U+03B7, U+1F21->U+03B7, U+1F22->U+03B7, U+1F23->U+03B7, U+1F24->U+03B7, U+1F25->U+03B7, U+1F26->U+03B7, U+1F27->U+03B7, U+1F28->U+03B7, U+1F29->U+03B7, U+1F2A->U+03B7, U+1F2B->U+03B7, U+1F2C->U+03B7, U+1F2D->U+03B7, \ - U+1F2E->U+03B7, U+1F2F->U+03B7, U+1F30->U+03B9, U+1F31->U+03B9, U+1F32->U+03B9, U+1F33->U+03B9, U+1F34->U+03B9, U+1F35->U+03B9, U+1F36->U+03B9, U+1F37->U+03B9, U+1F38->U+03B9, U+1F39->U+03B9, U+1F3A->U+03B9, U+1F3B->U+03B9, U+1F3C->U+03B9, U+1F3D->U+03B9, \ - U+1F3E->U+03B9, U+1F3F->U+03B9, U+1F40->U+03BF, U+1F41->U+03BF, U+1F42->U+03BF, U+1F43->U+03BF, U+1F44->U+03BF, U+1F45->U+03BF, U+1F48->U+03BF, U+1F49->U+03BF, U+1F4A->U+03BF, U+1F4B->U+03BF, U+1F4C->U+03BF, U+1F4D->U+03BF, U+1F50->U+03C5, U+1F51->U+03C5, \ - U+1F52->U+03C5, U+1F53->U+03C5, U+1F54->U+03C5, U+1F55->U+03C5, U+1F56->U+03C5, U+1F57->U+03C5, U+1F59->U+03C5, U+1F5B->U+03C5, U+1F5D->U+03C5, U+1F5F->U+03C5, U+1F60->U+03C9, U+1F61->U+03C9, U+1F62->U+03C9, U+1F63->U+03C9, U+1F64->U+03C9, U+1F65->U+03C9, \ - U+1F66->U+03C9, U+1F67->U+03C9, U+1F68->U+03C9, U+1F69->U+03C9, U+1F6A->U+03C9, U+1F6B->U+03C9, U+1F6C->U+03C9, U+1F6D->U+03C9, U+1F6E->U+03C9, U+1F6F->U+03C9, U+1F70->U+03B1, U+1F71->U+03B1, U+1F72->U+03B5, U+1F73->U+03B5, U+1F74->U+03B7, U+1F75->U+03B7, \ - U+1F76->U+03B9, U+1F77->U+03B9, U+1F78->U+03BF, U+1F79->U+03BF, U+1F7A->U+03C5, U+1F7B->U+03C5, U+1F7C->U+03C9, U+1F7D->U+03C9, U+1F80->U+03B1, U+1F81->U+03B1, U+1F82->U+03B1, U+1F83->U+03B1, U+1F84->U+03B1, U+1F85->U+03B1, U+1F86->U+03B1, U+1F87->U+03B1, \ - U+1F88->U+03B1, U+1F89->U+03B1, U+1F8A->U+03B1, U+1F8B->U+03B1, U+1F8C->U+03B1, U+1F8D->U+03B1, U+1F8E->U+03B1, U+1F8F->U+03B1, U+1F90->U+03B7, U+1F91->U+03B7, U+1F92->U+03B7, U+1F93->U+03B7, U+1F94->U+03B7, U+1F95->U+03B7, U+1F96->U+03B7, U+1F97->U+03B7, \ - U+1F98->U+03B7, U+1F99->U+03B7, U+1F9A->U+03B7, U+1F9B->U+03B7, U+1F9C->U+03B7, U+1F9D->U+03B7, U+1F9E->U+03B7, U+1F9F->U+03B7, U+1FA0->U+03C9, U+1FA1->U+03C9, U+1FA2->U+03C9, U+1FA3->U+03C9, U+1FA4->U+03C9, U+1FA5->U+03C9, U+1FA6->U+03C9, U+1FA7->U+03C9, \ - U+1FA8->U+03C9, U+1FA9->U+03C9, U+1FAA->U+03C9, U+1FAB->U+03C9, U+1FAC->U+03C9, U+1FAD->U+03C9, U+1FAE->U+03C9, U+1FAF->U+03C9, U+1FB0->U+03B1, U+1FB1->U+03B1, U+1FB2->U+03B1, U+1FB3->U+03B1, U+1FB4->U+03B1, U+1FB6->U+03B1, U+1FB7->U+03B1, U+1FB8->U+03B1, \ - U+1FB9->U+03B1, U+1FBA->U+03B1, U+1FBB->U+03B1, U+1FBC->U+03B1, U+1FC2->U+03B7, U+1FC3->U+03B7, U+1FC4->U+03B7, U+1FC6->U+03B7, U+1FC7->U+03B7, U+1FC8->U+03B5, U+1FC9->U+03B5, U+1FCA->U+03B7, U+1FCB->U+03B7, U+1FCC->U+03B7, U+1FD0->U+03B9, U+1FD1->U+03B9, \ - U+1FD2->U+03B9, U+1FD3->U+03B9, U+1FD6->U+03B9, U+1FD7->U+03B9, U+1FD8->U+03B9, U+1FD9->U+03B9, U+1FDA->U+03B9, U+1FDB->U+03B9, U+1FE0->U+03C5, U+1FE1->U+03C5, U+1FE2->U+03C5, U+1FE3->U+03C5, U+1FE4->U+03C1, U+1FE5->U+03C1, U+1FE6->U+03C5, U+1FE7->U+03C5, \ - U+1FE8->U+03C5, U+1FE9->U+03C5, U+1FEA->U+03C5, U+1FEB->U+03C5, U+1FEC->U+03C1, U+1FF2->U+03C9, U+1FF3->U+03C9, U+1FF4->U+03C9, U+1FF6->U+03C9, U+1FF7->U+03C9, U+1FF8->U+03BF, U+1FF9->U+03BF, U+1FFA->U+03C9, U+1FFB->U+03C9, U+1FFC->U+03C9, U+0391..U+03A1->U+03B1..U+03C1, \ - U+03B1..U+03C1, U+03A3..U+03A9->U+03C3..U+03C9, U+03C3..U+03C9, U+03C2, U+03D9, U+03DB, U+03DD, U+03DF, U+03E1, U+03E3, U+03E5, U+03E7, U+03E9, U+03EB, U+03ED, U+03EF, U+03F3, U+03F8, U+03FB, U+0A85..U+0A8C, U+0A8F, U+0A90, U+0A93..U+0AB0, U+0AB2, U+0AB3, \ - U+0AB5..U+0AB9, U+0AE0, U+0AE1, U+0AE6..U+0AEF, U+0A33->U+0A32, U+0A36->U+0A38, U+0A59->U+0A16, U+0A5A->U+0A17, U+0A5B->U+0A1C, U+0A5E->U+0A2B, U+0A05..U+0A0A, U+0A0F, U+0A10, U+0A13..U+0A28, U+0A2A..U+0A30, U+0A32, U+0A35, U+0A38, U+0A39, U+0A5C, U+0A66..U+0A6F, \ - U+FB1D->U+05D9, U+FB1F->U+05F2, U+FB20->U+05E2, U+FB21->U+05D0, U+FB22->U+05D3, U+FB23->U+05D4, U+FB24->U+05DB, U+FB25->U+05DC, U+FB26->U+05DD, U+FB27->U+05E8, U+FB28->U+05EA, U+FB2A->U+05E9, U+FB2B->U+05E9, U+FB2C->U+05E9, U+FB2D->U+05E9, U+FB2E->U+05D0, \ - U+FB2F->U+05D0, U+FB30->U+05D0, U+FB31->U+05D1, U+FB32->U+05D2, U+FB33->U+05D3, U+FB34->U+05D4, U+FB35->U+05D5, U+FB36->U+05D6, U+FB38->U+05D8, U+FB39->U+05D9, U+FB3A->U+05DA, U+FB3B->U+05DB, U+FB3C->U+05DC, U+FB3E->U+05DE, U+FB40->U+05E0, U+FB41->U+05E1, \ - U+FB43->U+05E3, U+FB44->U+05E4, U+FB46->U+05E6, U+FB47->U+05E7, U+FB48->U+05E8, U+FB49->U+05E9, U+FB4A->U+05EA, U+FB4B->U+05D5, U+FB4C->U+05D1, U+FB4D->U+05DB, U+FB4E->U+05E4, U+FB4F->U+05D0, U+05D0..U+05F2, U+0C85..U+0C8C, U+0C8E..U+0C90, U+0C92..U+0CA8, \ - U+0CAA..U+0CB3, U+0CB5..U+0CB9, U+0CE0, U+0CE1, U+0CE6..U+0CEF, U+1900..U+191C, U+1930..U+1938, U+1946..U+194F, U+0D05..U+0D0C, U+0D0E..U+0D10, U+0D12..U+0D28, U+0D2A..U+0D39, U+0D60, U+0D61, U+0D66..U+0D6F, U+0B94->U+0B92, U+0B85..U+0B8A, U+0B8E..U+0B90, \ - U+0B92, U+0B93, U+0B95, U+0B99, U+0B9A, U+0B9C, U+0B9E, U+0B9F, U+0BA3, U+0BA4, U+0BA8..U+0BAA, U+0BAE..U+0BB9, U+0BE6..U+0BEF, U+0E01..U+0E30, U+0E32, U+0E33, U+0E40..U+0E46, U+0E50..U+0E5B, U+FF10..U+FF19->0..9, U+FF21..U+FF3A->a..z, U+FF41..U+FF5A->a..z, \ - 0..9, A..Z->a..z, a..z - - # ignored characters list - # optional, default value is empty - # - # ignore_chars = U+00AD - - # minimum word prefix length to index - # optional, default is 0 (do not index prefixes) - # - # min_prefix_len = 0 - - # minimum word infix length to index - # optional, default is 0 (do not index infixes) - # - # min_infix_len = 0 - - # list of fields to limit prefix/infix indexing to - # optional, default value is empty (index all fields in prefix/infix mode) - # - # prefix_fields = filename - # infix_fields = url, domain - - # enable star-syntax (wildcards) when searching prefix/infix indexes - # search-time only, does not affect indexing, can be 0 or 1 - # optional, default is 0 (do not use wildcard syntax) - # - # enable_star = 1 - - # expand keywords with exact forms and/or stars when searching fit indexes - # search-time only, does not affect indexing, can be 0 or 1 - # optional, default is 0 (do not expand keywords) - # - # expand_keywords = 1 - - # n-gram length to index, for CJK indexing - # only supports 0 and 1 for now, other lengths to be implemented - # optional, default is 0 (disable n-grams) - # - ngram_len = 1 - - # n-gram characters list, for CJK indexing - # optional, default is empty - # - ngram_chars = U+4E00..U+9FBB, U+3400..U+4DB5, U+20000..U+2A6D6, U+FA0E, U+FA0F, U+FA11, U+FA13, U+FA14, U+FA1F, U+FA21, U+FA23, U+FA24, U+FA27, U+FA28, U+FA29, U+3105..U+312C, U+31A0..U+31B7, U+3041, U+3043, U+3045, U+3047, U+3049, U+304B, U+304D, U+304F, U+3051, U+3053, U+3055, U+3057, U+3059, U+305B, U+305D, U+305F, U+3061, U+3063, U+3066, U+3068, U+306A..U+306F, U+3072, U+3075, U+3078, U+307B, U+307E..U+3083, U+3085, U+3087, U+3089..U+308E, U+3090..U+3093, U+30A1, U+30A3, U+30A5, U+30A7, U+30A9, U+30AD, U+30AF, U+30B3, U+30B5, U+30BB, U+30BD, U+30BF, U+30C1, U+30C3, U+30C4, U+30C6, U+30CA, U+30CB, U+30CD, U+30CE, U+30DE, U+30DF, U+30E1, U+30E2, U+30E3, U+30E5, U+30E7, U+30EE, U+30F0..U+30F3, U+30F5, U+30F6, U+31F0, U+31F1, U+31F2, U+31F3, U+31F4, U+31F5, U+31F6, U+31F7, U+31F8, U+31F9, U+31FA, U+31FB, U+31FC, U+31FD, U+31FE, U+31FF, U+AC00..U+D7A3, U+1100..U+1159, U+1161..U+11A2, U+11A8..U+11F9, U+A000..U+A48C, U+A492..U+A4C6 - - # phrase boundary characters list - # optional, default is empty - # - # phrase_boundary = ., ?, !, U+2026 # horizontal ellipsis - - # phrase boundary word position increment - # optional, default is 0 - # - # phrase_boundary_step = 100 - - # blended characters list - # blended chars are indexed both as separators and valid characters - # for instance, AT&T will results in 3 tokens ("at", "t", and "at&t") - # optional, default is empty - # - # blend_chars = +, &, U+23 - - # blended token indexing mode - # a comma separated list of blended token indexing variants - # known variants are trim_none, trim_head, trim_tail, trim_both, skip_pure - # optional, default is trim_none - # - # blend_mode = trim_tail, skip_pure - - # whether to strip HTML tags from incoming documents - # known values are 0 (do not strip) and 1 (do strip) - # optional, default is 0 - html_strip = 0 - - # what HTML attributes to index if stripping HTML - # optional, default is empty (do not index anything) - # - # html_index_attrs = img=alt,title; a=title; - - # what HTML elements contents to strip - # optional, default is empty (do not strip element contents) - # - # html_remove_elements = style, script - - # whether to preopen index data files on startup - # optional, default is 0 (do not preopen), searchd-only - # - # preopen = 1 - - # whether to keep dictionary (.spi) on disk, or cache it in RAM - # optional, default is 0 (cache in RAM), searchd-only - # - # ondisk_dict = 1 - - # whether to enable in-place inversion (2x less disk, 90-95%% speed) - # optional, default is 0 (use separate temporary files), indexer-only - # - # inplace_enable = 1 - - # in-place fine-tuning options - # optional, defaults are listed below - # - # inplace_hit_gap = 0 # preallocated hitlist gap size - # inplace_docinfo_gap = 0 # preallocated docinfo gap size - # inplace_reloc_factor = 0.1 # relocation buffer size within arena - # inplace_write_factor = 0.1 # write buffer size within arena - - # whether to index original keywords along with stemmed versions - # enables "=exactform" operator to work - # optional, default is 0 - # - # index_exact_words = 1 - - # position increment on overshort (less that min_word_len) words - # optional, allowed values are 0 and 1, default is 1 - # - # overshort_step = 1 - - # position increment on stopword - # optional, allowed values are 0 and 1, default is 1 - # - # stopword_step = 1 - - # hitless words list - # positions for these keywords will not be stored in the index - # optional, allowed values are 'all', or a list file name - # - # hitless_words = all - # hitless_words = hitless.txt - - # detect and index sentence and paragraph boundaries - # required for the SENTENCE and PARAGRAPH operators to work - # optional, allowed values are 0 and 1, default is 0 - # - # index_sp = 1 - - # index zones, delimited by HTML/XML tags - # a comma separated list of tags and wildcards - # required for the ZONE operator to work - # optional, default is empty string (do not index zones) - # - # index_zones = title, h*, th -} - -############################################################################# -## searchd settings -############################################################################# - -searchd -{ - # [hostname:]port[:protocol], or /unix/socket/path to listen on - # known protocols are 'sphinx' (SphinxAPI) and 'mysql41' (SphinxQL) - # - # multi-value, multiple listen points are allowed - # optional, defaults are 9312:sphinx and 9306:mysql41, as below - # - # listen = 127.0.0.1 - # listen = 192.168.0.1:9312 - # listen = 9312 - # listen = /var/run/searchd.sock - listen = %(ip_address)s:%(port)s:sphinx - listen = %(ip_address)s:%(sql_port)s:mysql41 - - # log file, searchd run info is logged here - # optional, default is 'searchd.log' - log = %(searchd_log)s - - # query log file, all search queries are logged here - # optional, default is empty (do not log queries) - query_log = %(query_log)s - - # client read timeout, seconds - # optional, default is 5 - read_timeout = 5 - - # request timeout, seconds - # optional, default is 5 minutes - client_timeout = 300 - - # maximum amount of children to fork (concurrent searches to run) - # optional, default is 0 (unlimited) - max_children = 30 - - # PID file, searchd process ID file name - # mandatory - pid_file = %(pid)s - - # max amount of matches the daemon ever keeps in RAM, per-index - # WARNING, THERE'S ALSO PER-QUERY LIMIT, SEE SetLimits() API CALL - # default is 1000 (just like Google) - max_matches = 1000 - - # seamless rotate, prevents rotate stalls if precaching huge datasets - # optional, default is 1 - seamless_rotate = 1 - - # whether to forcibly preopen all indexes on startup - # optional, default is 0 (do not preopen) - preopen_indexes = 0 - - # whether to unlink .old index copies on succesful rotation. - # optional, default is 1 (do unlink) - unlink_old = 1 - - # attribute updates periodic flush timeout, seconds - # updates will be automatically dumped to disk this frequently - # optional, default is 0 (disable periodic flush) - # - # attr_flush_period = 900 - - # instance-wide ondisk_dict defaults (per-index value take precedence) - # optional, default is 0 (precache all dictionaries in RAM) - # - # ondisk_dict_default = 1 - - # MVA updates pool size - # shared between all instances of searchd, disables attr flushes! - # optional, default size is 1M - mva_updates_pool = 1M - - # max allowed network packet size - # limits both query packets from clients, and responses from agents - # optional, default size is 8M - max_packet_size = 8M - - # crash log path - # searchd will (try to) log crashed query to 'crash_log_path.PID' file - # optional, default is empty (do not create crash logs) - # - # crash_log_path = (log_directory) - - # max allowed per-query filter count - # optional, default is 256 - max_filters = 256 - - # max allowed per-filter values count - # optional, default is 4096 - max_filter_values = 4096 - - # socket listen queue length - # optional, default is 5 - # - # listen_backlog = 5 - - # per-keyword read buffer size - # optional, default is 256K - # - # read_buffer = 256K - - # unhinted read size (currently used when reading hits) - # optional, default is 32K - # - # read_unhinted = 32K - - # max allowed per-batch query count (aka multi-query count) - # optional, default is 32 - max_batch_queries = 32 - - # max common subtree document cache size, per-query - # optional, default is 0 (disable subtree optimization) - # - # subtree_docs_cache = 4M - - # max common subtree hit cache size, per-query - # optional, default is 0 (disable subtree optimization) - # - # subtree_hits_cache = 8M - - # multi-processing mode (MPM) - # known values are none, fork, prefork, and threads - # optional, default is fork - # - workers = threads # for RT to work - - # max threads to create for searching local parts of a distributed index - # optional, default is 0, which means disable multi-threaded searching - # should work with all MPMs (ie. does NOT require workers=threads) - # - # dist_threads = 4 - - # binlog files path; use empty string to disable binlog - # optional, default is build-time configured data directory - # - binlog_path = # disable logging - # binlog_path = %(data_directory)s # binlog.001 etc will be created there - - # binlog flush/sync mode - # 0 means flush and sync every second - # 1 means flush and sync every transaction - # 2 means flush every transaction, sync every second - # optional, default is 2 - # - # binlog_flush = 2 - - # binlog per-file size limit - # optional, default is 128M, 0 means no limit - # - # binlog_max_log_size = 256M -} diff --git a/slapos/test/recipe/test_postgres.py b/slapos/test/recipe/test_postgres.py index e80a2e570a0aa5f275e9dc2f30b596e885973123..ca09e750195834c768f803b2374eee07f59e7310 100644 --- a/slapos/test/recipe/test_postgres.py +++ b/slapos/test/recipe/test_postgres.py @@ -1,7 +1,6 @@ import os import shutil import tempfile -import textwrap import time import unittest @@ -54,7 +53,7 @@ class PostgresTest(unittest.TestCase): self.addCleanup(server_process.terminate) # wait for server to accept connections - for i in range(60): + for i in range(10): time.sleep(i) try: psycopg2.connect(self.buildout['postgres']['url']).close() @@ -114,3 +113,7 @@ class PostgresTest(unittest.TestCase): class PostgresTestNonStandardPort(PostgresTest): port = 5433 + + +class PostgresTestEmptyPort(PostgresTest): + port = '' diff --git a/slapos/test/recipe/test_request.py b/slapos/test/recipe/test_request.py index aa7e3987c8b1a65c9874be181c3835aac4604af8..c700e63eca36f3b08cd68ae8f2edeee7a268902a 100644 --- a/slapos/test/recipe/test_request.py +++ b/slapos/test/recipe/test_request.py @@ -89,6 +89,35 @@ class RecipeTestMixin(object): partition_parameter_kw=self.called_partition_parameter_kw, shared=False, state='started') + def test_requester_stopped_state_propagated(self): + options = defaultdict(str) + options['return'] = 'anything' + self.buildout['slap-connection']['requested'] = 'stopped' + + self.instance_getConnectionParameter.return_value = self.return_value_empty + + with LogCapture() as log: + self.recipe(self.buildout, "request", options) + log.check() + self.request_instance.assert_called_with( + '', 'RootSoftwareInstance', '', filter_kw={}, + partition_parameter_kw=self.called_partition_parameter_kw, + shared=False, state='stopped') + + def test_requester_destroyed_state_not_propagated(self): + options = defaultdict(str) + options['return'] = 'anything' + self.buildout['slap-connection']['requested'] = 'destroyed' + + self.instance_getConnectionParameter.return_value = self.return_value_empty + + with LogCapture() as log: + self.recipe(self.buildout, "request", options) + log.check() + self.request_instance.assert_called_with( + '', 'RootSoftwareInstance', '', filter_kw={}, + partition_parameter_kw=self.called_partition_parameter_kw, + shared=False, state='started') class RecipeTest(RecipeTestMixin, unittest.TestCase): recipe = request.Recipe diff --git a/software/caddy-frontend/software.cfg b/software/caddy-frontend/software.cfg index 2f6b01a0be8dc62acc4d7d81570c914ca8764d88..76a927a750aead4b954d8e25588b07d43fe1c2af 100644 --- a/software/caddy-frontend/software.cfg +++ b/software/caddy-frontend/software.cfg @@ -63,7 +63,6 @@ eggs = recipe = slapos.recipe.template:jinja2 template = ${:_profile_base_location_}/instance-common.cfg.in rendered = ${buildout:directory}/instance-common.cfg -mode = 0644 context = key develop_eggs_directory buildout:develop-eggs-directory key eggs_directory buildout:eggs-directory @@ -129,29 +128,24 @@ htpasswd = ${:bin_directory}/htpasswd recipe = slapos.recipe.template:jinja2 template = ${:_profile_base_location_}/instance.cfg.in rendered = ${buildout:directory}/template.cfg -mode = 0644 context = section software_parameter_dict software-parameter-section [profile-caddy-frontend] recipe = slapos.recipe.build:download url = ${:_profile_base_location_}/instance-apache-frontend.cfg.in -mode = 0644 [profile-caddy-replicate] recipe = slapos.recipe.build:download url = ${:_profile_base_location_}/instance-apache-replicate.cfg.in -mode = 0644 [profile-kedifa] recipe = slapos.recipe.build:download url = ${:_profile_base_location_}/instance-kedifa.cfg.in -mode = 0644 [download-template] recipe = slapos.recipe.build:download url = ${:_profile_base_location_}/${:_update_hash_filename_} -mode = 640 [profile-slave-list] <=download-template @@ -181,7 +175,6 @@ mode = 640 recipe = slapos.recipe.template url = ${:_profile_base_location_}/templates/wrapper.in output = ${buildout:directory}/template-wrapper.cfg -mode = 0644 [template-trafficserver-records-config] <=download-template diff --git a/software/caucase/software.cfg b/software/caucase/software.cfg index 0493bcf2a042dd6d40e34e367cc9e9c9b66b9763..9accf6e698cf91cb09a58e765522ca48dceeeacd 100644 --- a/software/caucase/software.cfg +++ b/software/caucase/software.cfg @@ -10,8 +10,6 @@ parts += [instance-caucased] recipe = slapos.recipe.build:download url = ${:_profile_base_location_}/${:filename} -# XXX: following mode should be the default -mode = 644 [instance] recipe = slapos.recipe.template:jinja2 diff --git a/software/cloudooo/software-common.cfg b/software/cloudooo/software-common.cfg index ee19089011747f0aba2d5219291e1e64f47f3a07..d3949743a68f1ed486a252bd7cb78bac1084a854 100644 --- a/software/cloudooo/software-common.cfg +++ b/software/cloudooo/software-common.cfg @@ -97,7 +97,6 @@ template-monitor = ${monitor2-template:rendered} [template-cloudooo-instance] recipe = slapos.recipe.build:download url = ${:_profile_base_location_}/${:filename} -mode = 640 [versions] argparse = 1.4.0 diff --git a/software/erp5/instance-erp5-input-schema.json b/software/erp5/instance-erp5-input-schema.json index 4af4f5df267638e53f5a2a33d6b907e28b16e4c5..d753eaa4e4d32d70fac917afed68607c1ada8a35 100644 --- a/software/erp5/instance-erp5-input-schema.json +++ b/software/erp5/instance-erp5-input-schema.json @@ -217,7 +217,7 @@ }, "timerserver-interval": { "description": "Timerserver tick period, in seconds, or 0 to disable", - "default": 5, + "default": 1, "type": "number" }, "private-dev-shm": { diff --git a/software/erp5/software.cfg.json b/software/erp5/software.cfg.json index 17bb0bdb5c81ae88a0b0e07f4d2882f97fa297b6..0adf511573badda1b5fbe7a3c1eeba595766e828 100644 --- a/software/erp5/software.cfg.json +++ b/software/erp5/software.cfg.json @@ -5,6 +5,7 @@ "software-type": { "default": { "title": "Default", + "software-type": "default", "description": "No automated database modification (ERP5Site is not automatically created).", "request": "instance-erp5-input-schema.json", "response": "instance-erp5-output-schema.json", diff --git a/software/erp5testnode/software.cfg.json b/software/erp5testnode/software.cfg.json index 6faf1274d48364113a3a2430fdc82efe126b4731..a1d0c1a7e210db61af21d81544d552c009d52c95 100644 --- a/software/erp5testnode/software.cfg.json +++ b/software/erp5testnode/software.cfg.json @@ -5,9 +5,11 @@ "software-type": { "default": { "title": "Default", + "software-type": "default", "description": "Default setup for ERP5TestNode Instance.", "request": "instance-erp5testnode-input-schema.json", - "response": "instance-output-schema.json" + "response": "instance-output-schema.json", + "index": 0 } } } diff --git a/software/erp5testnode/testsuite/buildout.hash.cfg b/software/erp5testnode/testsuite/buildout.hash.cfg new file mode 100644 index 0000000000000000000000000000000000000000..a33380315f38f11ba8ea410d5aa6484bc841c86f --- /dev/null +++ b/software/erp5testnode/testsuite/buildout.hash.cfg @@ -0,0 +1,18 @@ +# THIS IS NOT A BUILDOUT FILE, despite purposedly using a compatible syntax. +# The only allowed lines here are (regexes): +# - "^#" comments, copied verbatim +# - "^[" section beginings, copied verbatim +# - lines containing an "=" sign which must fit in the following categorie. +# - "^\s*filename\s*=\s*path\s*$" where "path" is relative to this file +# Copied verbatim. +# - "^\s*hashtype\s*=.*" where "hashtype" is one of the values supported +# by the re-generation script. +# Re-generated. +# - other lines are copied verbatim +# Substitution (${...:...}), extension ([buildout] extends = ...) and +# section inheritance (< = ...) are NOT supported (but you should really +# not need these here). + +[template-erp5testnode] +filename = instance.cfg.in +md5sum = bbfe2f0e83df4d6cd2120c0ef3c483cd diff --git a/software/erp5testnode/testsuite/deploy-test/buildout.hash.cfg b/software/erp5testnode/testsuite/deploy-test/buildout.hash.cfg index cf8362078a03dc76df50ceab348bede1f8abb4b2..9fab2286ad6e8dd237851a5ecdd6280d8047881a 100644 --- a/software/erp5testnode/testsuite/deploy-test/buildout.hash.cfg +++ b/software/erp5testnode/testsuite/deploy-test/buildout.hash.cfg @@ -14,7 +14,7 @@ # not need these here). [deploy-script-controller-script] filename = deploy-script-controller -md5sum = 8288e59eb442c662544daffbf446a033 +md5sum = 8357771b70efd0740561b1cb46f6955e [template-deploy-test] filename = instance.cfg.in diff --git a/software/erp5testnode/testsuite/deploy-test/deploy-script-controller b/software/erp5testnode/testsuite/deploy-test/deploy-script-controller index d11b9809cfd372e1f9ee7a4f9b29379dfd4ded8e..6fc80eac542577d718b00158e71080c163c83cde 100644 --- a/software/erp5testnode/testsuite/deploy-test/deploy-script-controller +++ b/software/erp5testnode/testsuite/deploy-test/deploy-script-controller @@ -83,8 +83,6 @@ function upload () add_checks $LOG_FILE t=`date '+%Y%m%d%H%S'` mv $LOG_FILE ${LOG_FILE}.$t - # just to be sure flush all disk operations before uploading - flush curl -q -X POST --data-urlencode "path=test-script-result/log-file.log.$t" --data-urlencode "content@${LOG_FILE}.$t" http://10.0.2.100/ } diff --git a/software/erp5testnode/testsuite/instance.cfg.in b/software/erp5testnode/testsuite/instance.cfg.in index d8da4c656959cb96f766ea5294dc5074ff606505..7f11577e9ac1c6b5a0cbb2b7f765821afa015181 100644 --- a/software/erp5testnode/testsuite/instance.cfg.in +++ b/software/erp5testnode/testsuite/instance.cfg.in @@ -10,7 +10,7 @@ test = dynamic-template-resilient-test:rendered [dynamic-template-resilient-test] recipe = slapos.recipe.template:jinja2 -template = ${template-resilient-test:location}/${template-resilient-test:filename} +template = ${template-resilient-test:target} rendered = $${buildout:directory}/template-resilient-test.cfg bin-directory = ${buildout:bin-directory} context = @@ -19,4 +19,3 @@ context = key slapparameter_dict slap-configuration:configuration raw bin_directory ${buildout:bin-directory} ${template-resilient-test:extra-context} -mode = 0644 diff --git a/software/erp5testnode/testsuite/kvm.cfg b/software/erp5testnode/testsuite/kvm.cfg index 05e0dd235dd386df949f2485253f397fc2b8d2d6..12ac39cc94b29369df91d2342bde1bdeaa2bb9dd 100644 --- a/software/erp5testnode/testsuite/kvm.cfg +++ b/software/erp5testnode/testsuite/kvm.cfg @@ -6,14 +6,9 @@ extends = parts += template-erp5testnode [default-test-image] -recipe = hexagonit.recipe.download -ignore-existing = true -filename = ${:_buildout_section_name_} +recipe = slapos.recipe.build:download url = http://www.nexedi.org/static/slapos/kvm_resiliency_test/virtual.qcow.gz md5sum = dd82c771f6f7738fb4b0fc1330ed8236 -download-only = true -mode = 0644 -location = ${buildout:parts-directory}/${:_buildout_section_name_} [template-resilient-test] filename = instance-kvm-resilient-test.cfg.jinja2 @@ -21,7 +16,7 @@ md5sum = 71ddbdeb8769bcb0ebb3c9407ef7e36c # Ingest extra-context, on a the final template-resilient-test rendering # always ingest raw values. extra-context = - raw default_test_image_url file://${default-test-image:location}/${default-test-image:filename} + raw default_test_image_url file://${default-test-image:target} raw default_test_image_md5sum ${default-test-image:md5sum} diff --git a/software/erp5testnode/testsuite/testsuite.cfg b/software/erp5testnode/testsuite/testsuite.cfg index cdf50653c45250ba6f05d3941af4ba51d8696648..61910ca58d1b9edbbd36c62d92a981faaa64a3de 100644 --- a/software/erp5testnode/testsuite/testsuite.cfg +++ b/software/erp5testnode/testsuite/testsuite.cfg @@ -1,18 +1,14 @@ +[buildout] +extends = buildout.hash.cfg + [template-erp5testnode] recipe = slapos.recipe.template url = ${:_profile_base_location_}/instance.cfg.in -md5sum = d864a32edab3c4f7207a7d4fe6bb7e30 output = ${buildout:directory}/template.cfg -mode = 0644 [template] output = ${buildout:directory}/template-original.cfg [template-resilient-test] -recipe = hexagonit.recipe.download -ignore-existing = true +recipe = slapos.recipe.build:download url = ${:_profile_base_location_}/${:filename} -mode = 0644 -download-only = true -on-update = true -extra-context = diff --git a/software/gitlab/software.cfg b/software/gitlab/software.cfg index 3c7faa3f0380ea6127cea0437dec867f55858cb6..631e99339cf21adcb79acbd7dfdf267ddb09125e 100644 --- a/software/gitlab/software.cfg +++ b/software/gitlab/software.cfg @@ -397,10 +397,9 @@ destination = ${buildout:directory}/${:_buildout_section_name_} <= download-file [gitlab-demo-backup.git] -recipe = hexagonit.recipe.download +recipe = slapos.recipe.build:download-unpacked url = https://lab.nexedi.com/alain.takoudjou/labdemo.backup/repository/archive.tar.gz?ref=master md5sum = d40e5e211dc9a4e5ada9c0250377c639 -strip-top-level-dir = true [versions] cns.recipe.symlink = 0.2.3 diff --git a/software/grafana/software.cfg b/software/grafana/software.cfg index e4e847fae358d88eac77e2c96c331cd555c59c63..0403c1af177b6e6b8f0ec0820d74b5aa01f23722 100644 --- a/software/grafana/software.cfg +++ b/software/grafana/software.cfg @@ -99,8 +99,6 @@ stop-on-error = true [download-file-base] recipe = slapos.recipe.build:download url = ${:_profile_base_location_}/${:filename} -download-only = true -mode = 0644 [influxdb-config-file] <= download-file-base diff --git a/software/headless-chromium/software.cfg b/software/headless-chromium/software.cfg index 8950608e3a30bc02d329e2e4d99a90f1c84fd7e3..2ce6477b48b120a94379f7a49f325bfbff13952d 100644 --- a/software/headless-chromium/software.cfg +++ b/software/headless-chromium/software.cfg @@ -38,7 +38,6 @@ context = [download-base] recipe = slapos.recipe.build:download url = ${:_profile_base_location_}/${:_update_hash_filename_} -mode = 0644 [instance-headless-chromium] <= download-base diff --git a/software/hellorina/instance-root.cfg.in b/software/hellorina/instance-root.cfg.in index 183b083f3c65f098480c5e5a15ead6a14d729cc1..e7da4b6d284cf7c529b25b996ccad1eaf4a13c9c 100644 --- a/software/hellorina/instance-root.cfg.in +++ b/software/hellorina/instance-root.cfg.in @@ -32,7 +32,7 @@ return = [proxy] recipe = slapos.cookbook:wrapper -command-line = {{ python_executable }} {{ rina_proxy }} ${server:instance-guid} ${:ipv6} ${:port} +command-line = {{ rina_proxy }} ${server:instance-guid} ${:ipv6} ${:port} wrapper-path = ${directory:service}/proxy environment = PATH={{ rina_tools_location }}/bin:%(PATH)s diff --git a/software/hellorina/instance.cfg.in b/software/hellorina/instance.cfg.in index fd1d31b6e7184c5f886c64fd064b723ea19b5f93..507abc52cfa8fc69b13ef0caa1ed88b8b31f579e 100644 --- a/software/hellorina/instance.cfg.in +++ b/software/hellorina/instance.cfg.in @@ -28,7 +28,6 @@ template = {{ instance_root }} extra-context = import urlparse urlparse key ipv6 slap-configuration:ipv6-random - raw python_executable {{ python_executable }} raw rina_proxy {{ rina_proxy }} [server] diff --git a/software/hellorina/software.cfg b/software/hellorina/software.cfg index fd603765d8fab3f6fe17b9d03c97c6db763fa669..5e2ccc24e0cd749a26479e2e09aea6490a425c97 100644 --- a/software/hellorina/software.cfg +++ b/software/hellorina/software.cfg @@ -6,37 +6,50 @@ parts = slapos-cookbook template +[file] +# For old GCC like 4.9.2 on Debian 8. +# XXX: This should be moved to component/rina-tools/buildout.cfg, next to where +# we force use of system GCC. However, our buildout patches are still not +# perfect concerning the processing of += +environment += + CFLAGS=-std=c99 -g -O2 + [template] recipe = slapos.recipe.template:jinja2 template = ${:_profile_base_location_}/instance.cfg.in -md5sum = f6c61225990986d94d0017b07b873aa7 +md5sum = d7506e861ef87977eaa554b8928d2c99 # XXX: "template.cfg" is hardcoded in instanciation recipe rendered = ${buildout:directory}/template.cfg context = key develop_eggs_directory buildout:develop-eggs-directory key eggs_directory buildout:eggs-directory - key python_executable python:executable key rina_tools_location rina-tools:location key instance_root instance-root:target key instance_server instance-server:target - key rina_proxy proxy:target + key rina_proxy proxy:location [download-base] recipe = slapos.recipe.build:download url = ${:_profile_base_location_}/${:_buildout_section_name_}.cfg.in -# XXX: following mode should be the default -mode = 644 [instance-root] <= download-base -md5sum = f647054be67998f9eece174f106c4464 +md5sum = 1c0e222aab51dfc598094e972f1d1482 [instance-server] <= download-base md5sum = 88a451b0f7f8def12713b92b91659b98 [proxy] -recipe = slapos.recipe.build:download +recipe = slapos.recipe.build +location = ${buildout:bin-directory}/${:_buildout_section_name_} url = ${:_profile_base_location_}/${:_buildout_section_name_} -mode = 755 md5sum = 78b77a6bda9958f547f7d89b747731e3 +install = + import os, sys + with open(self.download(options['url'], options['md5sum'])) as src, \ + open(options['location'], 'w') as dst: + os.fchmod(dst.fileno(), 0o755) + src.readline() + dst.write('#!%s\n' % sys.executable) + dst.write(src.read()) diff --git a/software/html5as-base/software.cfg b/software/html5as-base/software.cfg index 7777953ca4a2a30d29f9a57fc65e8f8b874e4d2c..ebb57280e30d473ca9b5a487c332509a83523525 100644 --- a/software/html5as-base/software.cfg +++ b/software/html5as-base/software.cfg @@ -28,7 +28,6 @@ rendered = ${buildout:directory}/template.cfg template = ${:_profile_base_location_}/${:filename} filename = instance.cfg.in md5sum = 310aab063704794065ee3bc8f81fdc70 -mode = 0644 context = section buildout buildout key nginx_location nginx:location @@ -44,28 +43,24 @@ recipe = slapos.recipe.build:download url = ${:_profile_base_location_}/${:_update_hash_filename_} _update_hash_filename_ = instance_html5as.cfg.in md5sum = 9b7ed68551cac5967915979383238669 -mode = 0644 [template_nginx_conf] recipe = slapos.recipe.build:download url = ${:_profile_base_location_}/${:_update_hash_filename_} _update_hash_filename_ = templates/nginx_conf.in md5sum = 61dc4c82bf48563228ce4dea6c5c6319 -mode = 0644 [template_launcher] recipe = slapos.recipe.build:download url = ${:_profile_base_location_}/${:_update_hash_filename_} _update_hash_filename_ = templates/launcher.in md5sum = 6cb0d64905ae7fc67277c1bf76b86875 -mode = 0644 [template_mime_types] recipe = slapos.recipe.build:download url = ${:_profile_base_location_}/${:_update_hash_filename_} _update_hash_filename_ = templates/mime_types.in md5sum = 4ef94a7b458d885cd79ba0b930a5727e -mode = 0644 [extra-eggs] recipe = zc.recipe.egg diff --git a/software/html5as/software.cfg b/software/html5as/software.cfg index d0e01a6e77690796c9c03f74e11ed4107edbaf47..e4e22b22965aad2651ccb3ec62ac67f1bbbb44bb 100644 --- a/software/html5as/software.cfg +++ b/software/html5as/software.cfg @@ -29,7 +29,6 @@ parts = recipe = slapos.recipe.template:jinja2 rendered = ${buildout:directory}/template.cfg template = ${:_profile_base_location_}/${:filename} -mode = 0644 context = section buildout buildout key nginx_location nginx:location @@ -50,7 +49,6 @@ context = [download-base] recipe = slapos.recipe.build:download url = ${:_profile_base_location_}/${:_update_hash_filename_} -mode = 0644 # Download instance_html5as.cfg.in [instance_html5as] diff --git a/software/htmlvalidatorserver/buildout.hash.cfg b/software/htmlvalidatorserver/buildout.hash.cfg index e749d74a7a58b101a7ed7af82b10714ff0e0c992..ea5792f410bef07e9f039e0b5e659c6b0754339f 100644 --- a/software/htmlvalidatorserver/buildout.hash.cfg +++ b/software/htmlvalidatorserver/buildout.hash.cfg @@ -26,4 +26,4 @@ md5sum = dc8b8d03b0af9cd32398d1fe86267bb7 [template] filename = instance.cfg.in -md5sum = 94fc13254c819cba33b03f30251bc469 +md5sum = c5b18ea5d353f96c41294541186f2528 diff --git a/software/htmlvalidatorserver/instance.cfg.in b/software/htmlvalidatorserver/instance.cfg.in index 0532e0c732081ae66453364d91f2a6b1b9a26827..bfaa0898c379b92358ae2c4e9d39ab7bdf663db3 100644 --- a/software/htmlvalidatorserver/instance.cfg.in +++ b/software/htmlvalidatorserver/instance.cfg.in @@ -18,6 +18,7 @@ context = [switch-softwaretype] recipe = slapos.cookbook:switch-softwaretype RootSoftwareInstance = $${:validator} +default = $${:validator} validator = dynamic-template-validator:rendered [slap-configuration] diff --git a/software/htmlvalidatorserver/test/test.py b/software/htmlvalidatorserver/test/test.py index 8196a1cc0cb6348919b6fd4676190fb42ce6b253..f00251974d6eb18fc59a1481e5017572e90064c8 100644 --- a/software/htmlvalidatorserver/test/test.py +++ b/software/htmlvalidatorserver/test/test.py @@ -39,7 +39,6 @@ setUpModule, InstanceTestCase = makeModuleSetUpAndTestCaseClass( class TestHtmlValidatorServer(InstanceTestCase): - def test(self): parameter_dict = self.computer_partition.getConnectionParameterDict() @@ -68,3 +67,8 @@ class TestHtmlValidatorServer(InstanceTestCase): [httplib.UNAUTHORIZED, False], [result.status_code, result.is_redirect] ) + +class TestHtmlValidatorServerInsideWebRunner(TestHtmlValidatorServer): + @classmethod + def getInstanceSoftwareType(cls): + return 'default' diff --git a/software/hugo/software.cfg b/software/hugo/software.cfg index 00640fff021dd236544cd353dd902887c0dae037..0bc099845f9c5aed23789c7ffd92048a6f50f1e5 100644 --- a/software/hugo/software.cfg +++ b/software/hugo/software.cfg @@ -29,7 +29,6 @@ template_monitor = ${monitor-template:rendered} recipe = slapos.recipe.template:jinja2 rendered = ${buildout:directory}/template.cfg template = ${:_profile_base_location_}/${:filename} -mode = 0644 context = section buildout buildout section parameter_list profile-common @@ -37,7 +36,6 @@ context = [download-base] recipe = slapos.recipe.build:download url = ${:_profile_base_location_}/${:_update_hash_filename_} -mode = 0644 [template_nginx_conf] <= download-base diff --git a/software/jscrawler/software.cfg b/software/jscrawler/software.cfg index c1bc67c7b2bf9ebb0c3c212ca2e462b11892a57b..633e0f66b8d1a5c5119c4d985041e3e4d8a99a73 100644 --- a/software/jscrawler/software.cfg +++ b/software/jscrawler/software.cfg @@ -36,13 +36,11 @@ update-command = ${:command} [download-template] recipe = slapos.recipe.build:download url = ${:_profile_base_location_}/${:filename} -mode = 0644 [instance] recipe = slapos.recipe.template:jinja2 rendered = ${buildout:directory}/instance.cfg template = ${:_profile_base_location_}/${:filename} -mode = 0644 context = key bash_location bash:location key bin_directory buildout:bin-directory diff --git a/software/jupyter/buildout.hash.cfg b/software/jupyter/buildout.hash.cfg index ffa8bf117870101ebd31813e5f95d405514417e7..1a19b1ee4893f00aa3a9298974296af65717a017 100644 --- a/software/jupyter/buildout.hash.cfg +++ b/software/jupyter/buildout.hash.cfg @@ -19,7 +19,7 @@ md5sum = de37ec3d4adb0be4c67bcc7397f27c91 [instance-jupyter] filename = instance-jupyter.cfg.in -md5sum = 95e3da48abdd257fb9d5dbdf14ea87b9 +md5sum = 1136bb108aa8d703e0ecf2005b4a1f28 [jupyter-notebook-config] filename = jupyter_notebook_config.py.jinja diff --git a/software/jupyter/instance-jupyter.cfg.in b/software/jupyter/instance-jupyter.cfg.in index 4979eadd457551d3d5e8e43419e488792de99fde..ce17ff7eb0e94ab15bfcee785c140e8af2d65e63 100644 --- a/software/jupyter/instance-jupyter.cfg.in +++ b/software/jupyter/instance-jupyter.cfg.in @@ -55,10 +55,6 @@ frontend-additional-instance-name = Jupyter Frontend Additional {{ k }} = {{ v }} {% endfor -%} -[dynamic-jinja2-template-base] -recipe = slapos.recipe.template:jinja2 -mode = 0644 - [generate-certificate] ; TODO: there is a slapos recipe to generate certificates. Use it instead recipe = plone.recipe.command @@ -98,10 +94,9 @@ recipe = slapos.cookbook:generate.password bytes = 10 [jupyter-notebook-config] -<= dynamic-jinja2-template-base +recipe = slapos.recipe.template:jinja2 template = {{ jupyter_config_location }}/{{ jupyter_config_filename }} rendered = ${directory:jupyter_config_dir}/jupyter_notebook_config.py -mode = 0744 context = key password jupyter-password:passwd raw gcc_location {{ gcc_location }} @@ -192,7 +187,7 @@ link-binary = {{ erp5_kernel_location }}/{{ erp5_kernel_filename }} target-directory = ${directory:erp5_kernel_dir} [kernel-json] -<= dynamic-jinja2-template-base +recipe = slapos.recipe.template:jinja2 template = {{ kernel_json_location }}/{{ kernel_json_filename }} rendered = ${directory:erp5_kernel_dir}/kernel.json # Use python2.7 executable bin file for kernel config diff --git a/software/jupyter/software.cfg b/software/jupyter/software.cfg index ecdb2d83c1a854f137eb0059c010da238ba1b0a7..9eb92b1507814e3f42cd08c7107660ba973a0a6e 100644 --- a/software/jupyter/software.cfg +++ b/software/jupyter/software.cfg @@ -25,8 +25,6 @@ python_executable = ${buildout:bin-directory}/${:interpreter} [download-file-base] recipe = slapos.recipe.build:download url = ${:_profile_base_location_}/${:filename} -download-only = true -mode = 0644 [jupyter-notebook-config] <= download-file-base diff --git a/software/kvm/buildout.hash.cfg b/software/kvm/buildout.hash.cfg index 740f8c34e76bc251d53bd989105f9f44755b9fa1..7138d1471087bdef0442ed7bffe5b8e8fd57a281 100644 --- a/software/kvm/buildout.hash.cfg +++ b/software/kvm/buildout.hash.cfg @@ -15,11 +15,11 @@ [template] filename = instance.cfg.in -md5sum = f2b0f1ed27148504f220e06eaceff935 +md5sum = 087f226ba90928dcc5a722d7008c867a [template-kvm] filename = instance-kvm.cfg.jinja2 -md5sum = 93cbee3403e7e23b4278143c32209ddc +md5sum = b260fce887535fc69a259d1fd31af1b2 [template-kvm-cluster] filename = instance-kvm-cluster.cfg.jinja2.in @@ -51,7 +51,7 @@ md5sum = 4bcb07c1a9223e2d956651aa25d23654 [template-ansible-promise] filename = template/ansible-promise.in -md5sum = b7e87479a289f472b634a046b44b5257 +md5sum = a8cf453d20f01c707f02c4b4014580d8 [template-kvm-run] filename = template/template-kvm-run.in @@ -78,17 +78,17 @@ filename = instance-kvm-http.cfg.in md5sum = d57764bb7135037b4d21543b2f56ce1d [image-download-controller] -_update_hash_filename_ = template/image-download-controller.py +filename = template/image-download-controller.py md5sum = 9c67058edcc4edae0b57956c0932a9fc [image-download-config-creator] -_update_hash_filename_ = template/image-download-config-creator.py +filename = template/image-download-config-creator.py md5sum = 54261e418ab9860efe73efd514c4d47f [whitelist-firewall-download-controller] -_update_hash_filename_ = template/whitelist-firewall-download-controller.py +filename = template/whitelist-firewall-download-controller.py md5sum = bc64e29546833817636261d1b28aa6dc [whitelist-domains-default] -_update_hash_filename_ = template/whitelist-domains-default +filename = template/whitelist-domains-default md5sum = e9d40162ba77472775256637a2617d14 diff --git a/software/kvm/instance-kvm.cfg.jinja2 b/software/kvm/instance-kvm.cfg.jinja2 index af4c95111170c2c278f7c30964d4ebe52a535bfd..5525b0d071608d9d1a40963294dfa8e79d6f0752 100644 --- a/software/kvm/instance-kvm.cfg.jinja2 +++ b/software/kvm/instance-kvm.cfg.jinja2 @@ -84,7 +84,8 @@ storage-path = ${directory:srv}/tap_mac [gen-passwd] recipe = slapos.cookbook:generate.password -storage-path = ${directory:srv}/passwd +storage-path = ${directory:srv}/.passwd +# VNC protocol supports passwords of 8 characters max bytes = 8 {% if boot_image_url_select_enabled %} @@ -94,10 +95,7 @@ bytes = 8 promise = check_file_state name = ${:_buildout_section_name_}.py config-state = empty -# It's very hard to put the username and password correctly, after schema:// -# and before the host, as it's not the way how one can use monitor provided -# information, so just show the information in the URL -config-url = ${monitor-base:base-url}/private/boot-image-url-select/${:filename} with username ${monitor-publish-parameters:monitor-user} and password ${monitor-publish-parameters:monitor-password} +config-url = ${monitor-base:base-url}/private/boot-image-url-select/${:filename} [boot-image-url-select-source-config] recipe = slapos.recipe.template:jinja2 @@ -193,10 +191,7 @@ config-filename = ${boot-image-url-select-download-wrapper:error-state-file} promise = check_file_state name = ${:_buildout_section_name_}.py config-state = empty -# It's very hard to put the username and password correctly, after schema:// -# and before the host, as it's not the way how one can use monitor provided -# information, so just show the information in the URL -config-url = ${monitor-base:base-url}/private/boot-image-url-list/${:filename} with username ${monitor-publish-parameters:monitor-user} and password ${monitor-publish-parameters:monitor-password} +config-url = ${monitor-base:base-url}/private/boot-image-url-list/${:filename} [boot-image-url-list-source-config] recipe = slapos.recipe.template:jinja2 @@ -292,10 +287,7 @@ config-filename = ${boot-image-url-list-download-wrapper:error-state-file} promise = check_file_state name = ${:_buildout_section_name_}.py config-state = empty -# It's very hard to put the username and password correctly, after schema:// -# and before the host, as it's not the way how one can use monitor provided -# information, so just show the information in the URL -config-url = ${monitor-base:base-url}/private/virtual-hard-drive-url/${:filename} with username ${monitor-publish-parameters:monitor-user} and password ${monitor-publish-parameters:monitor-password} +config-url = ${monitor-base:base-url}/private/virtual-hard-drive-url/${:filename} [virtual-hard-drive-url-source-config] recipe = slapos.recipe.template:jinja2 @@ -768,7 +760,7 @@ sla-instance_guid = ${slap-parameter:frontend-instance-guid} <= monitor-promise-base promise = check_url_available name = frontend_promise.py -config-url = ${publish-connection-information:url} +config-url = ${request-slave-frontend:connection-secure_access} {% if additional_frontend %} [request-slave-frontend-additional] @@ -909,9 +901,9 @@ command = ip a | grep eth0: && [ \$IFACE = ens3 ] && IFACE=eth0 ip a | grep eth1: && [ \$IFACE = ens4 ] && IFACE=eth1 ${:ipv4-add-address} + ${:ipv4-set-link-up} ${:ipv4-add-gateway-route} ${:ipv4-add-default-route} - ${:ipv4-set-link-up} EOF update-command = ${:command} {% endif -%} @@ -930,8 +922,8 @@ command = #!/bin/sh IFACE={{ iface }} ${:ipv6-add-address} - ${:ipv6-add-default-route} ${:ipv6-set-link-up} + ${:ipv6-add-default-route} EOF update-command = ${:command} {% endif -%} diff --git a/software/kvm/instance.cfg.in b/software/kvm/instance.cfg.in index 2d00f42c8bb90766f53a1f20911a9d226cdd284c..eba44563fb4442a52479fedb54c11b790d1f7abf 100644 --- a/software/kvm/instance.cfg.in +++ b/software/kvm/instance.cfg.in @@ -67,7 +67,7 @@ filename = template-kvm-cluster.cfg extra-context = section parameter_dict dynamic-template-kvm-cluster-parameters raw logrotate_cfg ${template-logrotate-base:rendered} - raw template_content ${template-content:location}/${template-content:filename} + raw template_content ${template-content:target} raw template_httpd_cfg ${template-httpd:rendered} raw template_monitor ${monitor2-template:rendered} @@ -77,13 +77,13 @@ template = ${template-kvm:location}/instance-kvm.cfg.jinja2 filename = template-kvm.cfg extra-context = section slap_configuration slap-configuration - raw ansible_promise_tpl ${template-ansible-promise:location}/${template-ansible-promise:filename} + raw ansible_promise_tpl ${template-ansible-promise:target} raw curl_executable_location ${curl:location}/bin/curl raw dash_executable_location ${dash:location}/bin/dash raw dnsresolver_executable ${buildout:bin-directory}/dnsresolver raw dcron_executable_location ${dcron:location}/sbin/crond raw debian_amd64_netinst_location ${debian-amd64-bullseye-netinst.iso:target} - raw whitelist_domains_default ${whitelist-domains-default:location}/${whitelist-domains-default:filename} + raw whitelist_domains_default ${whitelist-domains-default:target} raw whitelist_firewall_download_controller ${whitelist-firewall-download-controller:target} raw image_download_controller ${image-download-controller:target} raw image_download_config_creator ${image-download-config-creator:target} @@ -94,12 +94,12 @@ extra-context = raw python_eggs_executable ${buildout:bin-directory}/${python-with-eggs:interpreter} raw qemu_executable_location ${qemu:location}/bin/qemu-system-x86_64 raw qemu_img_executable_location ${qemu:location}/bin/qemu-img - raw qemu_start_promise_tpl ${template-qemu-ready:location}/${template-qemu-ready:filename} + raw qemu_start_promise_tpl ${template-qemu-ready:target} raw sixtunnel_executable_location ${6tunnel:location}/bin/6tunnel raw template_httpd_cfg ${template-httpd:rendered} - raw template_content ${template-content:location}/${template-content:filename} - raw template_kvm_controller_run ${template-kvm-controller:location}/${template-kvm-controller:filename} - raw template_kvm_run ${template-kvm-run:location}/${template-kvm-run:filename} + raw template_content ${template-content:target} + raw template_kvm_controller_run ${template-kvm-controller:target} + raw template_kvm_run ${template-kvm-run:target} raw template_monitor ${monitor2-template:rendered} raw websockify_executable_location ${buildout:directory}/bin/websockify raw wipe_disk_wrapper ${buildout:directory}/bin/securedelete @@ -134,7 +134,7 @@ context = key develop_eggs_directory buildout:develop-eggs-directory key eggs_directory buildout:eggs-directory raw kvm_template $${dynamic-template-kvm:rendered} - raw template_kvm_export ${template-kvm-export-script:location}/${template-kvm-export-script:filename} + raw template_kvm_export ${template-kvm-export-script:target} key pbsready_export_template template-pbsready-export:rendered raw gzip_binary ${gzip:location}/bin/gzip key slapparameter_dict slap-configuration:configuration @@ -149,7 +149,7 @@ context = key develop_eggs_directory buildout:develop-eggs-directory key eggs_directory buildout:eggs-directory raw qemu_location ${qemu:location} - raw template_kvm_import ${template-kvm-import-script:location}/${template-kvm-import-script:filename} + raw template_kvm_import ${template-kvm-import-script:target} key pbsready_import_template template-pbsready-import:rendered key slapparameter_dict slap-configuration:configuration raw zcat_binary ${gzip:location}/bin/zcat diff --git a/software/kvm/monitor/ansibleReport.py b/software/kvm/monitor/ansibleReport.py deleted file mode 100644 index 76a3ec83bfcc0db13278e9a0888e95b0d931fc1c..0000000000000000000000000000000000000000 --- a/software/kvm/monitor/ansibleReport.py +++ /dev/null @@ -1,204 +0,0 @@ -#!/usr/bin/env python - -# Parse Ansible result log file and and generate a report - -import os, time -import json -import sqlite3 -from datetime import datetime, timedelta - -import sys - -FIELDS = ['cmd', 'command', 'start', 'end', 'delta', 'msg', 'stdout', 'stderr', - 'response', 'status_code', 'url', 'dest'] - -class ansibleReport(object): - - def __init__(self, db_path, - ansible_log_dir, - name): - self.db_path = db_path - self.ansible_log_dir = ansible_log_dir - self.name = name - self.result_OK = '127.0.0.1_OK' - self.result_failed = '127.0.0.1_FAILED' - self.result_failed_ignore = '127.0.0.1_FAILED_INGORED' # tipo in ansible log upload pluging - self.date_format = '%Y-%m-%d %H:%M:%S' - self.day_format = '%Y-%m-%d' - - self._init_db() - - def _init_db(self): - db = sqlite3.connect(self.db_path) - c = db.cursor() - c.executescript(""" -CREATE TABLE IF NOT EXISTS ansible_report ( - name VARCHAR(40), - reportdate VARCHAR(15), - createdate VARCHAR(15), - status VARCHAR(20), - success_count INTEGER, - ignored_count INTEGER, - failed_count INTEGER, - ignored TEXT, - failed TEXT, - success TEXT); -""") - db.commit() - db.close() - - def connect_db(self): - db = sqlite3.connect(self.db_path) - return db - - def insertEntryDb(self, table_name, data_dict): - db = self.connect_db() - columns = data_dict.keys() - entries = ', '.join(columns) - values = '?' + ', ?' * (len(columns)-1) - sql_string = "insert into %s(%s) values (%s)" % ( - table_name, entries, values) - tuple_data = () - for key in columns: - tuple_data += (data_dict[key],) - db.execute(sql_string, tuple_data) - db.commit() - db.close() - - def selectEntriesDb(self, fields=[], start_date=None, limit=0, success=None, order='DESC', where=""): - db = self.connect_db() - - entries = ', '.join(fields) if fields else '*' - query = "select %s from ansible_report " % entries - where = " and %s" % where if where else "" - if not start_date: - start_date = datetime.utcnow().strftime(self.day_format) - tuple_values = (start_date,) - if success is not None: - status = 'OK' if success else 'FAILLED' - query += "where createdate>=? and status=? %s order by createdate %s" % (where, order) - tuple_values += (status,) - else: - query += "where createdate>=? %s order by createdate %s" % (where, order) - if limit: - query += " limit ?" - tuple_values += (limit,) - - rows = db.cursor().execute(query, tuple_values) - #db.close() - if rows: - return [list(row) for row in rows] - return [] - - def truncateEntriesDb(self, table_name, on_field, to_value, operator='<'): - db = self.connect_db() - query = "delete from %s where %s%s?" % (table_name, on_field, - operator) - db.execute(query, (to_value,)) - db.commit() - db.close() - - def getLogString(self, res, head=False): - log = "" - if type(res) == type(dict()): - log = '%s, args [%s]\n' % (res['invocation']['module_name'], - res['invocation']['module_args']) - if head: - return log - for field in FIELDS: - if field in res.keys(): - # use default encoding, check out sys.setdefaultencoding - log += '\n{0}:\n{1}'.format(field, res[field]) - - return log - - def _read_file(self, filepath): - content = '[]' - with open(filepath, 'r') as f: - content = f.read() - return content - - def saveResult(self): - - date = datetime.utcnow().strftime(self.date_format) - - files_list = os.listdir(self.ansible_log_dir) - if not len(files_list): - return - - to_date = (datetime.now() - timedelta(days=2)).strftime(self.date_format) - cmp_file = os.path.join(self.ansible_log_dir, files_list.pop()) - modification_date = datetime.fromtimestamp( - os.path.getmtime(cmp_file) - ).strftime(self.date_format) - # Get the latest insert date - result = self.selectEntriesDb(['reportdate'], start_date=to_date, limit=1) - if len(result): - latest_date = result[0][0] - if latest_date >= modification_date: - return - - file_map = dict( - success_file=os.path.join(self.ansible_log_dir, self.result_OK), - failed_file=os.path.join(self.ansible_log_dir, self.result_failed), - ignored_file=os.path.join(self.ansible_log_dir, self.result_failed_ignore)) - - data = dict(name=self.name, status='FAILED', - reportdate=modification_date, createdate=date, - success_count=0, ignored_count=0, - failed_count=0, success="", - failed="", ignored="") - for category in ('failed', 'success', 'ignored'): - file_category = file_map['%s_file' % category] - if os.path.exists(file_category): - text_content = self._read_file(file_category) - count = len(json.loads(text_content)) - if count > 0: - data['%s_count' % category] = count - data[category] = text_content - - if data['failed_count'] == 0: - data['status'] = 'OK' - - self.insertEntryDb('ansible_report', data) - - def getAnsibleReport(self, start_date=None, limit=0, success=None, order='DESC', category=None, head=False, only_state=True): - """Get one or many entries from the ansible report table. - """ - where = "" - get_content = category is not None - fields = ['name', 'reportdate', 'createdate', 'status', 'success_count', - 'ignored_count', 'failed_count'] - if category: - where = " %s_count>0" % category - if not only_state: - fields.append(category) - - rows = self.selectEntriesDb(fields=fields, start_date=start_date, - limit=limit, success=success, order=order, - where=where) - result_dict = {} - if category and not only_state: - last_pos = len(fields) -1 - for i in range (0, len(rows)): - message = "" - message_list = json.loads(rows[i][last_pos]) - for msg in message_list: - message += '%s\n\n' % self.getLogString(msg, head=head) - rows[i][last_pos] = message - else: - return {} - return rows - - -if __name__ == "__main__": - json = """{ - "status": "OK", - "message": "kvm-1: OK(114) FAILED(0) IGNORED(2)", - "description": "Ansible playbook report in kvm-1. Execution date is: 2015-08-28 17:42:01." -}""" - parameter_dict = json.loads(sys.argv[1]) - with open(parameter_dict['status_path'], 'w') as status_file: - status_file.write(json) - - \ No newline at end of file diff --git a/software/kvm/software.cfg b/software/kvm/software.cfg index 0ac9b187e56dd0891739daa19f864a5c8425f08a..58504748947654b2a460d48eb19b74edb8932895 100644 --- a/software/kvm/software.cfg +++ b/software/kvm/software.cfg @@ -58,95 +58,55 @@ scripts = # Create all templates that will be used to deploy instances [download-base] -recipe = hexagonit.recipe.download +recipe = slapos.recipe.build:download url = ${:_profile_base_location_}/${:filename} -mode = 0644 - -[download-file-base] -<= download-base -ignore-existing = true -download-only = true - -[download-template-base] -<= download-file-base -url = ${:_profile_base_location_}/template/${:path} -path = ${:filename} -[template-file-base] +[template] recipe = slapos.recipe.template url = ${:_profile_base_location_}/${:filename} -mode = 0644 - -[template] -<= template-file-base output = ${buildout:directory}/template.cfg [template-kvm] -<= download-file-base -on-update = true +<= download-base [template-kvm-cluster] -<= download-file-base -on-update = true +<= download-base [template-kvm-resilient] -<= download-file-base -on-update = true +<= download-base [template-kvm-import] -<= download-file-base -on-update = true +<= download-base [template-kvm-import-script] -<= download-template-base -filename = kvm-import.sh.jinja2 -mode = 0755 +<= download-base [template-kvm-export] -<= download-file-base -on-update = true +<= download-base [template-kvm-export-script] -<= download-template-base -filename = kvm-export.sh.jinja2 -mode = 0755 +<= download-base [template-nbd] -<= download-file-base -on-update = true +<= download-base [template-ansible-promise] -<= download-template-base -filename = ansible-promise.in +<= download-base [template-kvm-run] -<= download-template-base -filename = template-kvm-run.in -on-update = true +<= download-base [template-kvm-controller] -<= download-template-base -filename = kvm-controller-run.in -on-update = true +<= download-base [template-apache-conf] -<= download-template-base -filename = apache.conf.in -on-update = true +<= download-base [template-content] -<= download-template-base -filename = template-content.in -on-update = true +<= download-base [template-qemu-ready] -<= download-template-base -filename = qemu-is-ready.in -on-update = true - -[whitelist-domains-default] -<= download-template-base -filename = whitelist-domains-default +<= download-base [template-httpd] recipe = slapos.recipe.template:jinja2 @@ -155,24 +115,22 @@ rendered = ${buildout:parts-directory}/${:_buildout_section_name_}/instance-kvm- context = key apache_location apache:location raw openssl_executable_location ${openssl:location}/bin/openssl - raw template_apache_conf ${template-apache-conf:location}/${template-apache-conf:filename} + key template_apache_conf template-apache-conf:target [image-download-controller] -recipe = slapos.recipe.build:download -url = ${:_profile_base_location_}/${:_update_hash_filename_} -mode = 640 +<= download-base [image-download-config-creator] -<= image-download-controller +<= download-base [whitelist-firewall-download-controller] -<= image-download-controller +<= download-base +[whitelist-domains-default] +<= download-base [versions] websockify = 0.9.0 - -collective.recipe.environment = 0.2.0 gitdb = 0.6.4 pycurl = 7.43.0 smmap = 0.9.0 diff --git a/software/kvm/template/ansible-promise.in b/software/kvm/template/ansible-promise.in index 6b2cccda94ff6608aa8fa2cfee7f8708f21e26a2..e71d10bef2559607cbd5684c2621cdca2a916c9f 100644 --- a/software/kvm/template/ansible-promise.in +++ b/software/kvm/template/ansible-promise.in @@ -17,11 +17,12 @@ result_failed_ignore = '127.0.0.1_FAILED_IGNORED' def get_log(res): log = "" if type(res) == type(dict()): - log = '>> Running task: %s, args [%s]\n' % (res['invocation']['module_name'], - res['invocation']['module_args']) + module_name = res['invocation'].get('module_name', 'unkown_module_name') + module_args = res['invocation'].get('module_args', 'unkown_module_args') + log = '>> Running task: %s, args [%s]\n' % (module_name, module_args) for field in FIELDS: if field in res.keys(): - # use default encoding, check out sys.setdefaultencoding + # use default encoding, check out sys.setdefaultencoding log += '\n{0}:\n{1}'.format(field, res[field]) return log diff --git a/software/kvm/test/test.py b/software/kvm/test/test.py index a7859b33e56f3de90b97f346016c98d407ea429c..f74f261d0c80623c69783e042462792511e192f1 100644 --- a/software/kvm/test/test.py +++ b/software/kvm/test/test.py @@ -1437,7 +1437,7 @@ class TestImageDownloadController(InstanceTestCase, FakeImageServerMixin): self.image_download_controller = os.path.join( self.slap.instance_directory, self.__partition_reference__ + '0', 'software_release', 'parts', 'image-download-controller', - 'image-download-controller') + 'image-download-controller.py') def tearDown(self): self.stopImageHttpServer() diff --git a/software/lamp-template/software.cfg b/software/lamp-template/software.cfg index 87e0df2cc3cb3cba30be071ef652ddeee11b05b7..db34c942487119cb855ebe0ef0c1a2d84e012658 100644 --- a/software/lamp-template/software.cfg +++ b/software/lamp-template/software.cfg @@ -15,7 +15,6 @@ recipe = slapos.recipe.build:download url = ${:_profile_base_location_}/template/REPLACE_ME_BY_APPLICATION_TEMPLATE_NAME #md5sum = Student may put here md5sum of this file, this is good idea filename = template.in -mode = 0644 location = ${buildout:parts-directory}/${:_buildout_section_name_} [application-configuration] diff --git a/software/metabase/software.cfg b/software/metabase/software.cfg index e8ed1dbea5a0ebfc6335e4e70f2f78c78a2d84f0..7bb24c0e011b7ae26941c9e093c961fff5eaea56 100644 --- a/software/metabase/software.cfg +++ b/software/metabase/software.cfg @@ -22,8 +22,8 @@ part = python3 [metabase.jar] recipe = slapos.recipe.build:download -url = https://downloads.metabase.com/v0.41.2/metabase.jar -md5sum = 630068d1ccbdc95556931fe9cfc12e61 +url = https://downloads.metabase.com/v0.41.4/metabase.jar +md5sum = 9b81838e5c40302b552c66df5a767f8e [instance-profile] recipe = slapos.recipe.template diff --git a/software/monitor/buildout.hash.cfg b/software/monitor/buildout.hash.cfg index d76c2755a7c18c7a6fb5f038e0dcf67fa23af7b3..804c64129c593f27220ec53a5678fff87fc7fed6 100644 --- a/software/monitor/buildout.hash.cfg +++ b/software/monitor/buildout.hash.cfg @@ -14,7 +14,7 @@ # not need these here). [template] filename = instance.cfg -md5sum = b6c2df0d4a62473d6dae26b10c0a4adc +md5sum = 37966f5f7c6b82137c0388ca3520cb71 [template-monitor] _update_hash_filename_ = instance-monitor.cfg.jinja2 diff --git a/software/monitor/instance.cfg b/software/monitor/instance.cfg index 4264899a0bf49401cd0cc1c4aba143371dffc111..bd9da0515d0912941d26dbb442441e338667cbb3 100644 --- a/software/monitor/instance.cfg +++ b/software/monitor/instance.cfg @@ -25,7 +25,7 @@ context = key develop_eggs_directory buildout:develop-eggs-directory raw buildout_bin ${buildout:bin-directory} raw monitor_template_output ${monitor-template:output} raw network_benck_cfg_output ${network-bench-cfg:output} - raw monitor_collect_csv_dump ${monitor-collect-csv-dump:output} + raw monitor_collect_csv_dump ${monitor-collect-csv-dump:target} mode = 0644 [instance-template] @@ -93,7 +93,7 @@ context = import json_module json key template_surykatka_ini surykatka:ini raw buildout_bin ${buildout:bin-directory} raw monitor_template_output ${monitor-template:output} - raw monitor_collect_csv_dump ${monitor-collect-csv-dump:output} + raw monitor_collect_csv_dump ${monitor-collect-csv-dump:target} mode = 0644 diff --git a/software/monitor/software.cfg b/software/monitor/software.cfg index 0348dcb9d7c6401106d0dc0b11e91d22d4d95ad1..698d223c2233b4c55ada12abafaa7e611926f56d 100644 --- a/software/monitor/software.cfg +++ b/software/monitor/software.cfg @@ -15,7 +15,6 @@ parts = template template-monitor-edgetest template-monitor - monitor-collect-csv-dump [python] part = python3 @@ -24,27 +23,22 @@ part = python3 recipe = slapos.recipe.template url = ${:_profile_base_location_}/${:filename} output = ${buildout:directory}/template.cfg -mode = 0644 [template-monitor] recipe = slapos.recipe.build:download url = ${:_profile_base_location_}/${:_update_hash_filename_} -mode = 0644 [template-monitor-edgetest-basic] recipe = slapos.recipe.build:download url = ${:_profile_base_location_}/${:_update_hash_filename_} -mode = 0644 [template-monitor-edgetest] recipe = slapos.recipe.build:download url = ${:_profile_base_location_}/${:_update_hash_filename_} -mode = 0644 [template-monitor-edgebot] recipe = slapos.recipe.build:download url = ${:_profile_base_location_}/${:_update_hash_filename_} -mode = 0644 [template-surykatka-ini] recipe = slapos.recipe.build:download @@ -53,19 +47,15 @@ url = ${:_profile_base_location_}/${:_update_hash_filename_} [json-test-template] recipe = slapos.recipe.build:download url = ${:_profile_base_location_}/${:_update_hash_filename_} -mode = 0644 [network-bench-cfg] recipe = slapos.recipe.template url = ${:_profile_base_location_}/${:filename} output = ${buildout:parts-directory}/${:_buildout_section_name_} -mode = 0644 [monitor-collect-csv-dump] -<= monitor-template-script -url = ${:_profile_base_location_}/script/${:filename} -filename = collect_csv_dump.py -output = ${:destination}/${:filename} +recipe = slapos.recipe.build:download +url = ${:_profile_base_location_}/${:filename} [surykatka] recipe = zc.recipe.egg @@ -76,7 +66,6 @@ recipe = zc.recipe.egg eggs += slapos.cookbook slapos.recipe.cmmi - hexagonit.recipe.download plone.recipe.command scripts = diff --git a/software/neoppod/software-common.cfg b/software/neoppod/software-common.cfg index a5b559208186a2976021f2e909dc8f45a75b4a07..3b9297f64a29f96838ac7553517e6e143b35e4ee 100644 --- a/software/neoppod/software-common.cfg +++ b/software/neoppod/software-common.cfg @@ -59,8 +59,6 @@ adapter-egg = ${python-mysqlclient:egg} [download-base-neo] recipe = slapos.recipe.build:download url = ${:_profile_base_location_}/${:filename} -# XXX: following mode should be the default -mode = 644 # XXX: must be rendered, not just dled [instance-common] @@ -127,7 +125,6 @@ template = [versions] coverage = 4.5.1 -mock = 3.0.5 ecdsa = 0.13 mysqlclient = 1.3.12 pycrypto = 2.6.1 diff --git a/software/nextcloud/software.cfg b/software/nextcloud/software.cfg index 08abea84006c2a662927e7b4255520da3bc360fd..59151e5aea5134a8c566bcb0fc46fc3e0c70112b 100644 --- a/software/nextcloud/software.cfg +++ b/software/nextcloud/software.cfg @@ -4,26 +4,22 @@ extends = ../../component/redis/buildout.cfg ../../stack/lamp/buildout.cfg -[nc-download-base] -recipe = hexagonit.recipe.download -ignore-existing = true -download-only = true +[nc-download] +recipe = slapos.recipe.build:download url = ${:_profile_base_location_}/${:filename} -mode = 0644 - [application] url = https://download.nextcloud.com/server/releases/nextcloud-16.0.3.tar.bz2 md5sum = d81902d2dec5d547779bec6336a438be [template-nextcloud-install.sh] -<= nc-download-base +<= nc-download [template-apache-httpd] -<= nc-download-base +<= nc-download [template-nextcloud-config.json] -<= nc-download-base +<= nc-download [template-nextcloud-instance] recipe = slapos.recipe.template:jinja2 @@ -37,48 +33,46 @@ context = key php_location apache-php:location raw redis_bin ${redis:location}/bin/redis-server raw redis_cli ${redis:location}/bin/redis-cli - raw nextcloud_install_sh ${template-nextcloud-install.sh:location}/${template-nextcloud-install.sh:filename} - raw nextcloud_apache_httpd ${template-apache-httpd:location}/${template-apache-httpd:filename} - raw nextcloud_parameter_json ${template-nextcloud-config.json:location}/${template-nextcloud-config.json:filename} + key nextcloud_install_sh template-nextcloud-install.sh:target + key nextcloud_apache_httpd template-apache-httpd:target + key nextcloud_parameter_json template-nextcloud-config.json:target section nextcloud_apps nextcloud-apps [custom-application-deployment] path = ${template-nextcloud-instance:rendered} part-list = nextcloud-install.sh +[nc-download-unpacked] +recipe = slapos.recipe.build:download-unpacked +shared = true + [news-updater] -recipe = hexagonit.recipe.download +<= nc-download-unpacked url = https://github.com/nextcloud/news-updater/archive/10.0.1.tar.gz md5sum = 37387199c0482e08d01e9294cd95eaad -strip-top-level-dir = true - -[nc-download-app] -<= nc-download-base -download-only = false -strip-top-level-dir = true [nextcloud-app-spreed] -<= nc-download-app +<= nc-download-unpacked url = https://github.com/nextcloud/spreed/releases/download/v6.0.5/spreed-6.0.5.tar.gz md5sum = 002c09e543edc141f6ca848782573376 [nextcloud-app-richdocuments] -<= nc-download-app +<= nc-download-unpacked url = https://github.com/nextcloud/richdocuments/releases/download/v3.7.17/richdocuments.tar.gz md5sum = 5559cd14a4a0a93d2a39b260538839f8 [nextcloud-app-calendar] -<= nc-download-app +<= nc-download-unpacked url = https://github.com/nextcloud/calendar/releases/download/v1.7.3/calendar.tar.gz md5sum = ab398d943eb6939e3e71df5b1a1abf87 [nextcloud-app-rainloop] -<= nc-download-app +<= nc-download-unpacked url = https://github.com/pierre-alain-b/rainloop-nextcloud/releases/download/6.1.4/rainloop.tar.gz md5sum = 7cefc3dd3bd52b42d381de7d7447691f [nextcloud-app-news] -<= nc-download-app +<= nc-download-unpacked url = https://github.com/nextcloud/news/releases/download/14.2.2/news.tar.gz md5sum = f48d4b5dcbc078131bb86a4ae619da99 diff --git a/software/ors-amarisoft/README.md b/software/ors-amarisoft/README.md new file mode 100644 index 0000000000000000000000000000000000000000..004f423479f58479b7128006ffc4525ae8ecbd6c --- /dev/null +++ b/software/ors-amarisoft/README.md @@ -0,0 +1,85 @@ +# ORS Amarisoft software release + +How to deploy from scratch + + 1. Compile and install kernel module lte_trx_sdr by `# cd trx_sdr*/kernel/ && make && sh init.sh` + 2. Make sure to have "create_tun = True" in /etc/opt/slapos/slapos.cfg + 3. Install ors playbook + 4. Deploy this SR + +## Generated buildout configurations and json input schemas + +Since there are many ors-amarisoft softwares releases and software types, the following files are +generated with jinja2 templates with the render-templates script before being pushed to gitlab: + + * instance-tdd1900-enb-epc-input-schema.json + * instance-tdd1900-enb-input-schema.json + * instance-tdd1900-gnb-epc-input-schema.json + * instance-tdd1900-gnb-input-schema.json + * instance-tdd2600-enb-epc-input-schema.json + * instance-tdd2600-enb-input-schema.json + * instance-tdd2600-gnb-epc-input-schema.json + * instance-tdd2600-gnb-input-schema.json + * instance-tdd3500-enb-epc-input-schema.json + * instance-tdd3500-enb-input-schema.json + * instance-tdd3500-gnb-epc-input-schema.json + * instance-tdd3500-gnb-input-schema.json + * instance-tdd3700-enb-epc-input-schema.json + * instance-tdd3700-enb-input-schema.json + * instance-tdd3700-gnb-epc-input-schema.json + * instance-tdd3700-gnb-input-schema.json + * software-tdd1900.cfg + * software-tdd1900.cfg.json + * software-tdd2600.cfg + * software-tdd2600.cfg.json + * software-tdd3500.cfg + * software-tdd3500.cfg.json + * software-tdd3700.cfg + * software-tdd3700.cfg.json + +These files should not be modified directly, and the render-templates scripts should be run along +with update-hash before each commit. + +## Services + +instance.cfg is rather complicated because Amarisoft LTE stack consists of 4 binaries + + * **lteenb** - eNodeB software is the server accepting connection from UI (user interfaces) + * **ltemme** - Mobile Management Entity in other words core network which handles orchestration of + eNodeBs in case UI switches from one to another + * **lteims** - IP Multimedia System is another protocol such as LTE but designed for services over + IP. Please read http://www.differencebetween.com/difference-between-lte-and-vs-ims-2/ + * **ltembmsgw** - Multimedia Broadcast Multicast Services (Gateway) is technology which broadcast + the same multimedia content into multiple IP addresses at once to save bandwidth. + +Those binaries are started in foreground, originaly in screen. We don't want the binaries inside one +screen because then we cannot easily control their resource usage. Thus we make 4 on-watch services. + + +### MME + +Is the core network. This binary keep track of UEs and to which eNodeB they are currently connected. +It reroutes traffic when UE switches between eNodeBs. +MME also serves as a service bus thus all services must register within MME. + + +### IMS + +Service connected into MME bus. IMS handles circuit-ish services over IP whereas LTE would have +failed because it is intended as data-over-IP service. + + +### MBMSGW + +MBMS Gateway is a standalone component connected to BMSC (Broadcast Multicast Service Centre), server +supporting streaming content from providers, which is another component inside our core network not +provided by Amarisoft. +MBMS Gateway is connected to MME which then manages MBMS sessions. + +## Gotchas! + +**trx_sdr.so** provided from archive MUST be placed next to `lteenb` binary. This library is the +only one which does not follow standard `ld` path resolution. + +**rf_driver** has to be compiled and installed. Inside trx_sdr/kernel folder issue `# make` to compile the +kernel module, and then `# ./init.sh` to create devices `/dev/sdr<N>` and insert compiled module. diff --git a/software/ors-amarisoft/buildout.hash.cfg b/software/ors-amarisoft/buildout.hash.cfg new file mode 100644 index 0000000000000000000000000000000000000000..e5a5be4da9c615a29d4e688e00da4ad895623f61 --- /dev/null +++ b/software/ors-amarisoft/buildout.hash.cfg @@ -0,0 +1,63 @@ +# THIS IS NOT A BUILDOUT FILE, despite purposedly using a compatible syntax. +# The only allowed lines here are (regexes): +# - "^#" comments, copied verbatim +# - "^[" section beginings, copied verbatim +# - lines containing an "=" sign which must fit in the following categorie. +# - "^\s*filename\s*=\s*path\s*$" where "path" is relative to this file +# But avoid directories, they are not portable. +# Copied verbatim. +# - "^\s*hashtype\s*=.*" where "hashtype" is one of the values supported +# by the re-generation script. +# Re-generated. +# - other lines are copied verbatim +# Substitution (${...:...}), extension ([buildout] extends = ...) and +# section inheritance (< = ...) are NOT supported (but you should really +# not need these here). + +[template] +filename = instance.cfg +md5sum = 96a76300b2f714b6c47157920fe79a53 + +[template-lte-enb-epc] +_update_hash_filename_ = instance-enb-epc.jinja2.cfg +md5sum = 762d55291e75e8b61e35f9f28d29915a + +[template-lte-enb] +_update_hash_filename_ = instance-enb.jinja2.cfg +md5sum = 83c37b43d7b70584bf71aed9289ea13c + +[template-lte-gnb-epc] +_update_hash_filename_ = instance-gnb-epc.jinja2.cfg +md5sum = e43a726dd3023a4bbaa474bb2d7a6ebe + +[template-lte-gnb] +_update_hash_filename_ = instance-gnb.jinja2.cfg +md5sum = fc59d15a7f7f942951f9e38d7a1cca2c + +[template-lte-epc] +_update_hash_filename_ = instance-epc.jinja2.cfg +md5sum = 0585b4f8fd42538595e6abb61d07fd93 + +[ue_db.jinja2.cfg] +filename = config/ue_db.jinja2.cfg +md5sum = d33163012d6c98efc59161974c649557 + +[enb.jinja2.cfg] +filename = config/enb.jinja2.cfg +md5sum = d841debc51d9f12555a47d1556a6a3c1 + +[gnb.jinja2.cfg] +filename = config/gnb.jinja2.cfg +md5sum = da64ea9c5003f40987a8bba3f18e8839 + +[ltelogs.jinja2.sh] +filename = ltelogs.jinja2.sh +md5sum = 1ba2e065bdf14a6411e95e80db17dcfd + +[mme.jinja2.cfg] +filename = config/mme.jinja2.cfg +md5sum = 518c71ce57204304b703b977c665a164 + +[ims.jinja2.cfg] +filename = config/ims.jinja2.cfg +md5sum = e561ec26a70943c61557def1781cf65f diff --git a/software/ors-amarisoft/config/enb.jinja2.cfg b/software/ors-amarisoft/config/enb.jinja2.cfg new file mode 100644 index 0000000000000000000000000000000000000000..46cbff2432ad658c89939da49534987954f423fe --- /dev/null +++ b/software/ors-amarisoft/config/enb.jinja2.cfg @@ -0,0 +1,278 @@ +/* lteenb configuration file version 2021-09-18 + * Copyright (C) 2015-2021 Amarisoft + */ + +#define TDD 1 // Values: 0 (FDD), 1(TDD) +#define N_RB_DL {{ slapparameter_dict.get('n_rb_dl', slap_configuration['configuration.default_lte_n_rb_dl']) }} // Values: 6 (1.4 MHz), 15 (3MHz), 25 (5MHz), 50 (10MHz), 75 (15MHz), 100 (20MHz) +#define N_ANTENNA_DL 2 // Values: 1 (SISO), 2 (MIMO 2x2) +#define N_ANTENNA_UL 2 // Values: 1, 2 +#define CHANNEL_SIM 0 // Values: 0 (channel simulator disabled), 1 (channel simulator enabled) + +{ + /* Log filter: syntax: layer.field=value[,...] + + Possible layers are phy, mac, rlc, pdcp, rrc, nas, s1ap, x2ap, gtpu and + all. The 'all' layer is used to address all the layers at the + same time. + + field values: + + - 'level': the log level of each layer can be set to 'none', + 'error', 'info' or 'debug'. Use 'debug' to log all the messages. + + - 'max_size': set the maximum size of the hex dump. 0 means no + hex dump. -1 means no limit. + */ + log_options: "all.level=debug,all.max_size=32", + log_filename: "{{ directory['log'] }}/enb.log", + + /* Enable remote API and Web interface */ + com_addr: "[{{ slap_configuration['ipv6-random'] }}]:{{ slap_configuration['configuration.enb_ws_port'] }}", + + /* RF driver configuration */ + rf_driver: { + name: "sdr", + /* list of devices. 'dev0' is always the master. */ + args: "dev0=/dev/sdr0", + /* synchronisation source: none, internal, gps, external (default = none) */ + // sync: "gps", + rx_antenna:"tx_rx", + }, + tx_gain: {{ slapparameter_dict.get('tx_gain', slap_configuration['configuration.default_lte_tx_gain']) }}, /* TX gain (in dB) */ + rx_gain: {{ slapparameter_dict.get('rx_gain', slap_configuration['configuration.default_lte_rx_gain']) }}, /* RX gain (in dB) */ + +#if CHANNEL_SIM == 1 + rf_ports: [ + { + channel_dl: { + type: "awgn", + noise_level: -30, + }, + } + ], +#endif + + mme_list: [ + { + /* address of MME for S1AP connection. Must be modified if the MME + runs on a different host. */ + mme_addr: "127.0.1.100" + }, + ], + /* GTP bind address (=address of the ethernet interface connected to + the MME). Must be modified if the MME runs on a different host. */ + gtp_addr: "{{ slapparameter_dict.get('gtp_addr', '127.0.1.1') }}", + + /* high 20 bits of SIB1.cellIdentifier */ + enb_id: 0x1A2D0, + + /* list of cells */ + cell_list: [ + { + /* Broadcasted PLMN identities */ + plmn_list: [ + "00101", + ], + + dl_earfcn: {{ slapparameter_dict.get('dl_earfcn', slap_configuration['configuration.default_dl_earfcn']) }}, + + n_id_cell: 1, + cell_id: 0x01, + tac: 0x0001, + root_sequence_index: 204, /* PRACH root sequence index */ + }, + ], /* cell_list */ + + /* default cell parameters */ + cell_default: { + n_antenna_dl: N_ANTENNA_DL, /* number of DL antennas */ + n_antenna_ul: N_ANTENNA_UL, /* number of UL antennas */ + +#if TDD == 1 + uldl_config: 2, /* TDD only */ + sp_config: 7, /* TDD only */ +#endif + + n_rb_dl: N_RB_DL, /* Bandwidth: 25: 5 MHz, 50: 10 MHz, 75: 15 MHz, 100: 20 MHz */ + cyclic_prefix: "normal", + + phich_duration: "normal", + phich_resource: "1", /* ratio of NG */ + + /* SIB1 */ + si_value_tag: 0, /* increment modulo 32 if SI is modified */ + cell_barred: false, /* SIB1.cellBarred-r13 */ + intra_freq_reselection: true, /* SIB1.intraFreqReselection */ + q_rx_lev_min: -70, /* SIB1.q-RxLevMin */ + p_max: 10, /* maximum power allowed for the UE (dBm) */ + si_window_length: 40, /* ms */ + sib_sched_list: [ + { + filename: "{{ directory['software'] }}/enb/config/sib23.asn", + si_periodicity: 16, /* frames */ + }, + ], + +#if N_RB_DL == 6 + si_coderate: 0.30, /* maximum code rate for SI/RA/P-RNTI messages */ +#else + si_coderate: 0.20, /* maximum code rate for SI/RA/P-RNTI messages */ +#endif + si_pdcch_format: 2, /* 2 or 3. Log2 of the number of CCEs for PDCCH + for SI/RA/P-RNTI */ + + n_symb_cch: 0, /* number of symbols for CCH (0 = auto) */ + + /* PDSCH dedicated config (currently same for all UEs) */ + pdsch_dedicated: { +#if N_ANTENNA_DL == 4 + p_a: -6, +#elif N_ANTENNA_DL == 2 + p_a: -3, +#else + p_a: 0, +#endif + p_b: -1, /* -1 means automatic */ + }, + + /* If defined, force for number of CCEs for UE specific PDCCH to + 2^pdcch_format. Otherwise it is computed from the reported + CQI. Range: 0 to 3. */ +#if N_RB_DL == 6 + pdcch_format: 1, +#else + pdcch_format: 2, +#endif + + /* if defined, force the PDSCH MCS for all UEs. Otherwise it is + computed from the reported CQI */ + /* pdsch_mcs: 12, */ + +#if N_RB_DL == 6 + prach_config_index: 15, /* subframe 9 every 20 ms */ +#else + prach_config_index: 4, /* subframe 4 every 10 ms */ +#endif + prach_freq_offset: -1, /* -1 means automatic */ + + /* PUCCH dedicated config (currently same for all UEs) */ + pucch_dedicated: { + n1_pucch_sr_count: 11, /* increase if more UEs are needed */ + cqi_pucch_n_rb: 1, /* increase if more UEs are needed */ +#if TDD == 1 + //tdd_ack_nack_feedback_mode: "bundling", /* TDD only */ + tdd_ack_nack_feedback_mode: "multiplexing", /* TDD only */ +#endif + }, + + /* PUSCH dedicated config (currently same for all UEs) */ + pusch_dedicated: { + beta_offset_ack_index: 9, + beta_offset_ri_index: 6, + beta_offset_cqi_index: 6, + }, + + pusch_hopping_offset: -1, /* -1 means automatic */ + + /* MCS for Msg3 (=CCCH RRC Connection Request) */ + pusch_msg3_mcs: 0, + + /* this CQI value is assumed when none is received from the UE */ +#if N_RB_DL == 6 + initial_cqi: 5, +#else + initial_cqi: 3, +#endif + + /* if defined, force the PUSCH MCS for all UEs. Otherwise it is + computed from the last received SRS/PUSCH. */ + // pusch_mcs: 18, + + dl_256qam: true, + ul_64qam: true, + + /* Scheduling request period (ms). Must be >= 40 for HD-FDD */ + sr_period: 20, + + /* CQI report config */ + cqi_period: 40, /* period (ms). Must be >= 32 for HD-FDD */ + +#if N_ANTENNA_DL >= 2 + /* RI reporting is done with a period of m_ri * cqi_period. + m_ri = 0 (default) disables RI reporting. */ + m_ri: 8, + /* transmission mode */ + transmission_mode: 3, +#endif + + /* SRS dedicated config. All UEs share these + parameters. srs_config_index and freq_domain_position are + allocated for each UE) */ + srs_dedicated: { +#if N_RB_DL == 6 + srs_bandwidth_config: 7, + srs_bandwidth: 1, +#elif N_RB_DL == 15 + srs_bandwidth_config: 6, + srs_bandwidth: 1, +#elif N_RB_DL == 25 + srs_bandwidth_config: 3, + srs_bandwidth: 1, +#elif N_RB_DL == 50 + srs_bandwidth_config: 2, + srs_bandwidth: 2, +#elif N_RB_DL == 75 + srs_bandwidth_config: 2, + srs_bandwidth: 2, +#else + srs_bandwidth_config: 2, + srs_bandwidth: 3, +#endif + srs_subframe_config: 3, /* 0 - 15 */ + srs_period: 40, /* period (ms). Must be >= 40 for HD-FDD */ + srs_hopping_bandwidth: 0, + }, + + /* MAC configuration (same for all UEs) */ + mac_config: { + ul_max_harq_tx: 5, /* max number of HARQ transmissions for uplink */ + dl_max_harq_tx: 5, /* max number of HARQ transmissions for downlink */ + }, + + /* CPU load limitation */ + pusch_max_its: 6, /* max number of turbo decoder iterations */ + + /* dynamic power control */ + dpc: true, + dpc_pusch_snr_target: 25, + dpc_pucch_snr_target: 20, + + /* RRC/UP ciphering algorithm preference. EEA0 is always the last. */ + cipher_algo_pref: [], + /* RRC integrity algorithm preference. EIA0 is always the last. */ + integ_algo_pref: [2, 1], + + /* (in ms) send RRC connection release after this time of network + inactivity */ + inactivity_timer: 10000, + + /* SRB configuration */ + srb_config: [ + { + id: 1, + maxRetxThreshold: 32, + t_Reordering: 45, + t_PollRetransmit: 60, + }, + { + id: 2 , + maxRetxThreshold: 32, + t_Reordering: 45, + t_PollRetransmit: 60, + } + ], + + /* DRB configuration */ + drb_config: "{{ directory['software'] }}/enb/config/drb.cfg", + }, +} diff --git a/software/ors-amarisoft/config/gnb.jinja2.cfg b/software/ors-amarisoft/config/gnb.jinja2.cfg new file mode 100644 index 0000000000000000000000000000000000000000..879a6358d1649e1e49e15a1f0e363b5f75b2e048 --- /dev/null +++ b/software/ors-amarisoft/config/gnb.jinja2.cfg @@ -0,0 +1,624 @@ +/* lteenb configuration file version 2021-09-18 + * Copyright (C) 2019-2021 Amarisoft + * NR SA FDD or TDD cell */ + + +#define NR_TDD 1 // Values: 0 (NR FDD), 1(NR TDD) +#define FR2 0 // Values: 0 (FR1), 1 (FR2) +#define NR_TDD_CONFIG 2 // Values: FR1: 1, 2, 3, 4 (compatible with LTE TDD config 2) FR2: 10 +#define N_ANTENNA_DL 2 // Values: 1 (SISO), 2 (MIMO 2x2), 4 (MIMO 4x4) +#define N_ANTENNA_UL 2 // Values: 1, 2, 4 +#define NR_BANDWIDTH {{ slapparameter_dict.get('nr_bandwidth', slap_configuration['configuration.default_nr_bandwidth']) }} // NR cell bandwidth +#define NR_LONG_PUCCH_FORMAT 2 // Values: 2, 3, 4 + +/* define to 1 to enable periodic SRS with N_ANTENNA_UL ports. Uplink + SU-MIMO is also enabled if N_ANTENNA_UL >= 2. Not all UEs support + uplink SU-MIMO. */ +#define USE_SRS 0 + +{ + log_options: "all.level=debug,all.max_size=32", + log_filename: "{{ directory['log'] }}/gnb.log", + + /* Enable remote API and Web interface */ + com_addr: "[{{ slap_configuration['ipv6-random'] }}]:{{ slap_configuration['configuration.enb_ws_port'] }}", + + rf_driver: { + name: "sdr", + /* list of devices. 'dev0' is always the master. */ + args: "dev0=/dev/sdr0", + /* synchronisation source: none, internal, gps, external (default = none) */ + // sync: "gps", + rx_antenna: "tx_rx", + }, + tx_gain: {{ slapparameter_dict.get('tx_gain', slap_configuration['configuration.default_nr_tx_gain']) }}, /* TX gain (in dB) */ + rx_gain: {{ slapparameter_dict.get('rx_gain', slap_configuration['configuration.default_nr_rx_gain']) }}, /* RX gain (in dB) */ + + amf_list: [ + { + /* address of AMF for NGAP connection. Must be modified if the AMF runs on a different host. */ + amf_addr: "127.0.1.100", + }, + ], + /* GTP bind address (=address of the ethernet interface connected to + the AMF). Must be modified if the AMF runs on a different host. */ + gtp_addr: "{{ slapparameter_dict.get('gtp_addr', '127.0.1.1') }}", + + gnb_id_bits: 28, + gnb_id: 0x12345, + + nr_support: true, + + rf_ports: [ + { +#if FR2 + /* an external frequency translator must be used for FR2 */ + rf_dl_freq: 3500, /* MHz */ + rf_ul_freq: 3500, /* MHz */ + + /* uncomment to have a higher per-UE bitrate at the expense of + higher gNB real time constraints. The default value is 4 + ms. 1 ms gives the maximum per-UE bitrate. */ +// rx_to_tx_latency: 1, /* ms */ +#endif + }, + ], + + /* list of cells */ + cell_list: [], + + nr_cell_list: [ + { + rf_port: 0, + cell_id: 0x01, +#if NR_TDD == 1 +#if FR2 + band: 257, + dl_nr_arfcn: 2079167, /* 28000.08 MHz */ + subcarrier_spacing: 120, /* kHz */ + ssb_pos_bitmap: "0100000000000000000000000000000000000000000000000000000000000000", +#else + band: {{ slapparameter_dict.get('nr_band', slap_configuration['configuration.default_nr_band']) }}, + dl_nr_arfcn: {{ slapparameter_dict.get('dl_nr_arfcn', slap_configuration['configuration.default_dl_nr_arfcn']) }}, + subcarrier_spacing: 30, /* kHz */ + ssb_pos_bitmap: "10000000", +#endif +#else + band: 7, + dl_nr_arfcn: 536020, /* 2680 MHz */ + ssb_subcarrier_spacing: 15, + subcarrier_spacing: 30, /* kHz */ + ssb_pos_bitmap: "1000", +#endif + }, + ], /* nr_cell_list */ + + nr_cell_default: { + bandwidth: NR_BANDWIDTH, /* MHz */ + n_antenna_dl: N_ANTENNA_DL, + n_antenna_ul: N_ANTENNA_UL, + + /* force the timing TA offset (optional) */ +// n_timing_advance_offset: 39936, + /* subframe offset to align with the LTE TDD pattern (optional) */ +// subframe_offset: 2, + +#if NR_TDD == 1 + tdd_ul_dl_config: { +#if NR_TDD_CONFIG == 1 + pattern1: { + period: 5, /* in ms */ + dl_slots: 7, + dl_symbols: /* 6 */ 2, + ul_slots: 2, + ul_symbols: 2, + }, +#elif NR_TDD_CONFIG == 2 + pattern1: { + period: 5, /* in ms */ + dl_slots: 7, + dl_symbols: 6, + ul_slots: 2, + ul_symbols: 4, + }, +#elif NR_TDD_CONFIG == 3 + pattern1: { + period: 5, /* in ms */ + dl_slots: 6, + dl_symbols: 2, + ul_slots: 3, + ul_symbols: 2, + }, +#elif NR_TDD_CONFIG == 4 + pattern1: { + period: 3, /* in ms */ + dl_slots: 3, + dl_symbols: 6, + ul_symbols: 4, + ul_slots: 2, + }, + pattern2: { + period: 2, /* in ms */ + dl_slots: 4, + dl_symbols: 0, + ul_symbols: 0, + ul_slots: 0, + }, +#elif NR_TDD_CONFIG == 10 + /* only for FR2 */ + pattern1: { + period: 0.625, /* in ms */ + dl_slots: 3, + dl_symbols: 10, + ul_slots: 1, + ul_symbols: 2, + }, +#endif + }, +#endif + ssb_period: 20, /* in ms */ + n_id_cell: 500, + + plmn_list: [ { + tac: 100, + plmn: "00101", + reserved: false, + nssai: [ + { + sst: 1, + }, + /*{ + sst: 2, + }, + { + sst: 3, + sd: 50, + },*/ + ], + }, + ], + + /*sib_sched_list: [ + { + filename: "{{ directory['software'] }}/enb/config/sib2_nr.asn", + si_periodicity: 16, + }, + { + filename: "{{ directory['software'] }}/enb/config/sib3_nr.asn", + si_periodicity: 16, + }, + { + filename: "{{ directory['software'] }}/enb/config/sib4_nr.asn", + si_periodicity: 32, + }, + ], + sib9: { + si_periodicity: 32 + },*/ + si_window_length: 40, + + cell_barred: false, + intra_freq_reselection: true, + q_rx_lev_min: -70, + q_qual_min: -20, + p_max: 10, /* dBm */ + + root_sequence_index: 1, /* PRACH root sequence index */ + + /* Scheduling request period (slots). */ + sr_period: 40, + + dmrs_type_a_pos: 2, + + /* to limit the number of HARQ feedback in UL, use pdsch_harq_ack_max; + allows to workaround issues with SM-G977N for example */ + //pdsch_harq_ack_max: 2, + + prach: { +#if NR_TDD == 1 +#if FR2 + prach_config_index: 149, /* format C0, every 4 frames */ + msg1_subcarrier_spacing: 120, /* kHz */ +#else +#if NR_TDD_CONFIG == 4 + prach_config_index: 156, /* format B4, subframe 2 */ +#else + prach_config_index: 160, /* format B4, subframe 9 */ +#endif + msg1_subcarrier_spacing: 30, /* kHz */ +#endif +#else + prach_config_index: 16, /* subframe 1 every frame */ +#endif + msg1_fdm: 1, + msg1_frequency_start: -1, + zero_correlation_zone_config: 15, + preamble_received_target_power: -110, /* in dBm */ + preamble_trans_max: 7, + power_ramping_step: 4, /* in dB */ +#if FR2 + ra_response_window: 40, /* in slots */ +#else + ra_response_window: 20, /* in slots */ +#endif + restricted_set_config: "unrestricted_set", + ra_contention_resolution_timer: 64, /* in ms */ + ssb_per_prach_occasion: 1, + cb_preambles_per_ssb: 8, + }, + + pdcch: { + search_space0_index: 0, + + dedicated_coreset: { + rb_start: -1, /* -1 to have the maximum bandwidth */ + l_crb: -1, /* -1 means all the bandwidth */ + duration: 0, /* 0 means to automatically set it from the coreset bandwidth */ + precoder_granularity: "sameAsREG_bundle", + }, + + css: { + n_candidates: [ 0, 0, 1, 0, 0 ], + }, + rar_al_index: 2, + si_al_index: 2, + + uss: { + n_candidates: [ 0, 2, 1, 0, 0 ], + dci_0_1_and_1_1: true, + }, + al_index: 1, + }, + + pdsch: { + mapping_type: "typeA", + dmrs_add_pos: 1, + dmrs_type: 1, + dmrs_max_len: 1, + /* k0 delay in slots from DCI to PDSCH: automatic setting */ + /* k1 delay in slots from PDSCH to PUCCH/PUSCH ACK/NACK: automatic setting */ + mcs_table: "qam256", + rar_mcs: 2, + si_mcs: 6, + /* If defined, force the PDSCH MCS for all UEs. Otherwise it is computed + * based on DL channel quality estimation */ + /* mcs: 24, */ + }, + + csi_rs: { + nzp_csi_rs_resource: [ + { + csi_rs_id: 0, +#if N_ANTENNA_DL == 1 + n_ports: 1, + frequency_domain_allocation: "row2", + bitmap: "100000000000", + cdm_type: "no_cdm", +#elif N_ANTENNA_DL == 2 + n_ports: 2, + frequency_domain_allocation: "other", + bitmap: "100000", + cdm_type: "fd_cdm2", +#elif N_ANTENNA_DL == 4 + n_ports: 4, + frequency_domain_allocation: "row4", + bitmap: "100", + cdm_type: "fd_cdm2", +#elif N_ANTENNA_DL == 8 + n_ports: 8, + frequency_domain_allocation: "other", + bitmap: "110011", + cdm_type: "fd_cdm2", +#else +#error unsupported number of DL antennas +#endif + density: 1, + first_symb: 4, + rb_start: 0, + l_crb: -1, /* -1 means from rb_start to the end of the bandwidth */ + power_control_offset: 0, /* dB */ + power_control_offset_ss: 0, /* dB */ + period: 80, + offset: 1, /* != 0 to avoid collision with SSB */ + qcl_info_periodic_csi_rs: 0, + }, +#if FR2 == 0 +#define USE_TRS +#endif +#ifdef USE_TRS + /* TRS : period of 40 ms, slots 1 & 2, symbols 4 and 8 */ + { + csi_rs_id: 1, + n_ports: 1, + frequency_domain_allocation: "row1", + bitmap: "0001", + cdm_type: "no_cdm", + density: 3, + first_symb: 4, + rb_start: 0, + l_crb: -1, /* -1 means from rb_start to the end of the bandwidth */ + power_control_offset: 0, /* dB */ + power_control_offset_ss: 0, /* dB */ + period: 40, + offset: 11, + qcl_info_periodic_csi_rs: 0, + }, + { + csi_rs_id: 2, + n_ports: 1, + frequency_domain_allocation: "row1", + bitmap: "0001", + cdm_type: "no_cdm", + density: 3, + first_symb: 8, + rb_start: 0, + l_crb: -1, /* -1 means from rb_start to the end of the bandwidth */ + power_control_offset: 0, /* dB */ + power_control_offset_ss: 0, /* dB */ + period: 40, + offset: 11, + qcl_info_periodic_csi_rs: 0, + }, + { + csi_rs_id: 3, + n_ports: 1, + frequency_domain_allocation: "row1", + bitmap: "0001", + cdm_type: "no_cdm", + density: 3, + first_symb: 4, + rb_start: 0, + l_crb: -1, /* -1 means from rb_start to the end of the bandwidth */ + power_control_offset: 0, /* dB */ + power_control_offset_ss: 0, /* dB */ + period: 40, + offset: 12, + qcl_info_periodic_csi_rs: 0, + }, + { + csi_rs_id: 4, + n_ports: 1, + frequency_domain_allocation: "row1", + bitmap: "0001", + cdm_type: "no_cdm", + density: 3, + first_symb: 8, + rb_start: 0, + l_crb: -1, /* -1 means from rb_start to the end of the bandwidth */ + power_control_offset: 0, /* dB */ + power_control_offset_ss: 0, /* dB */ + period: 40, + offset: 12, + qcl_info_periodic_csi_rs: 0, + }, +#endif + ], + nzp_csi_rs_resource_set: [ + { + csi_rs_set_id: 0, + nzp_csi_rs_resources: [ 0 ], + repetition: false, + }, +#ifdef USE_TRS + { + csi_rs_set_id: 1, + nzp_csi_rs_resources: [ 1, 2, 3, 4 ], + repetition: false, + trs_info: true, + }, +#endif + ], + + csi_im_resource: [ + { + csi_im_id: 0, + pattern: 1, + subcarrier_location: 8, + symbol_location: 8, + rb_start: 0, + l_crb: -1, /* -1 means from rb_start to the end of the bandwidth */ + period: 80, + offset: 1, /* != 0 to avoid collision with SSB */ + }, + ], + csi_im_resource_set: [ + { + csi_im_set_id: 0, + csi_im_resources: [ 0 ], + } + ], + /* ZP CSI-RS to set the CSI-IM REs to zero */ + zp_csi_rs_resource: [ + { + csi_rs_id: 0, + frequency_domain_allocation: "row4", + bitmap: "100", + n_ports: 4, + cdm_type: "fd_cdm2", + first_symb: 8, + density: 1, + rb_start: 0, + l_crb: -1, /* -1 means from rb_start to the end of the bandwidth */ + period: 80, + offset: 1, + }, + ], + p_zp_csi_rs_resource_set: [ + { + zp_csi_rs_resources: [ 0 ], + }, + ], + + csi_resource_config: [ + { + csi_rsc_config_id: 0, + nzp_csi_rs_resource_set_list: [ 0 ], + resource_type: "periodic", + }, + { + csi_rsc_config_id: 1, + csi_im_resource_set_list: [ 0 ], + resource_type: "periodic", + }, +#ifdef USE_TRS + { + csi_rsc_config_id: 2, + nzp_csi_rs_resource_set_list: [ 1 ], + resource_type: "periodic", + }, +#endif + ], + csi_report_config: [ + { + resources_for_channel_measurement: 0, + csi_im_resources_for_interference: 1, + report_config_type: "periodic", + period: 80, + report_quantity: "CRI_RI_PMI_CQI", +#if N_ANTENNA_DL > 1 + codebook_config: { + codebook_type: "type1", + sub_type: "typeI_SinglePanel", +#if N_ANTENNA_DL == 2 +#elif N_ANTENNA_DL == 4 + n1: 2, + n2: 1, + codebook_mode: 1, +#elif N_ANTENNA_DL == 8 + n1: 4, + n2: 1, + codebook_mode: 1, +#endif + }, +#endif + cqi_table: 2, + subband_size: "value1", + }, + ], + }, + + pucch: { + pucch_group_hopping: "neither", + hopping_id: -1, /* -1 = n_cell_id */ + p0_nominal: -90, +#if 0 + pucch0: { + initial_cyclic_shift: 1, + n_symb: 1, + }, +#else + pucch1: { + n_cs: 3, + n_occ: 3, + freq_hopping: true, +#if USE_SRS && NR_TDD == 0 + n_symb: 13, +#endif + }, +#endif +#if NR_LONG_PUCCH_FORMAT == 2 + pucch2: { + n_symb: 2, + n_prb: 1, + freq_hopping: true, + simultaneous_harq_ack_csi: false, + max_code_rate: 0.25, + }, +#elif NR_LONG_PUCCH_FORMAT == 3 + pucch3: { + bpsk: false, + additional_dmrs: false, + freq_hopping: true, + n_prb: 1, + simultaneous_harq_ack_csi: true, + max_code_rate: 0.25, + }, +#elif NR_LONG_PUCCH_FORMAT == 4 + pucch4: { + occ_len: 4, + bpsk: false, + additional_dmrs: false, + freq_hopping: true, + simultaneous_harq_ack_csi: true, + max_code_rate: 0.25, + }, +#endif + }, + +#if USE_SRS + srs: { +#if NR_TDD +#if NR_TDD_CONFIG == 1 || NR_TDD_CONFIG == 2 + srs_symbols: [ 0, 0, 0, 0, 0, 0, 0, 2, 0, 0 ], +#elif NR_TDD_CONFIG == 3 + srs_symbols: [ 0, 0, 0, 0, 0, 0, 2, 0, 0, 0 ], +#elif NR_TDD_CONFIG == 4 + srs_symbols: [ 0, 0, 0, 4, 0, 0, 0, 0, 0, 0 ], +#elif NR_TDD_CONFIG == 10 + srs_symbols: [ 0, 0, 0, 2, 0 ], +#endif +#else + srs_symbols: [ 1, 0, 0, 0, 0, 1, 0, 0, 0, 0 ], +#endif + srs_resource: [ + { + srs_resource_id: 0, + n_ports: N_ANTENNA_UL, + resource_type: "periodic", + period: 80, /* in slots */ + } + ], + srs_resource_set: [ + { + srs_resource_id_list: [ 0 ], + }, + ], + }, +#endif + + pusch: { + mapping_type: "typeA", + n_symb: 14, + dmrs_add_pos: 1, + dmrs_type: 1, + dmrs_max_len: 1, + tf_precoding: false, + mcs_table: "qam256", /* without transform precoding */ + mcs_table_tp: "qam256", /* with transform precoding */ + ldpc_max_its: 5, + /* k2, msg3_k2 delay in slots from DCI to PUSCH: automatic setting */ + p0_nominal_with_grant: -76, + msg3_mcs: 4, + msg3_delta_power: 0, /* in dB */ + beta_offset_ack_index: 9, + + /* if defined, force the PUSCH MCS for all UEs. Otherwise it is + computed from the last received PUSCH. */ + /* mcs: 16, */ + }, + + /* MAC configuration */ + mac_config: { + msg3_max_harq_tx: 5, + ul_max_harq_tx: 5, /* max number of HARQ transmissions for uplink */ + dl_max_harq_tx: 5, /* max number of HARQ transmissions for downlink */ + ul_max_consecutive_retx: 30, /* disconnect UE if reached */ + dl_max_consecutive_retx: 30, /* disconnect UE if reached */ + periodic_bsr_timer: 20, + retx_bsr_timer: 320, + periodic_phr_timer: 500, + prohibit_phr_timer: 200, + phr_tx_power_factor_change: "dB3", + sr_prohibit_timer: 0, /* in ms, 0 to disable the timer */ + sr_trans_max: 64, + }, + + cipher_algo_pref: [], + integ_algo_pref: [2, 1], + + inactivity_timer: 10000, + + drb_config: "{{ directory['software'] }}/enb/config/drb_nr.cfg", + }, +} + diff --git a/software/ors-amarisoft/config/ims.jinja2.cfg b/software/ors-amarisoft/config/ims.jinja2.cfg new file mode 100644 index 0000000000000000000000000000000000000000..d1fca55c5d35450a20ccb4f1129688f0e500a1e6 --- /dev/null +++ b/software/ors-amarisoft/config/ims.jinja2.cfg @@ -0,0 +1,78 @@ +/* lteims configuration file + * version 2021-09-18 + * Copyright (C) 2015-2021 Amarisoft + */ +{ + log_options: "all.level=debug,all.max_size=32", + log_filename: "{{ directory['log'] }}/ims.log", + + /* SIP bind address */ + //sip_addr: [ + // {addr: "192.168.4.1", bind_addr: "192.168.4.1", port_min: 10000, port_max: 20000}, + // "2001:468:3000:1::", + // {addr: "192.168.5.1", bind_addr: "192.168.5.1", port_min: 10000, port_max: 20000}, + // "2001:468:4000:1::" + //], + /* SIP bind address */ + sip_addr: [ + {addr: "{{ slap_configuration['tun-ipv4-addr'] }}", bind_addr: "0.0.0.0", port_min: 10000, port_max: 20000}, + {#" slap_configuration['tun-ipv6-addr'] ",#} + ], + + mms_server_bind_addr: "{{ netaddr.IPAddress(netaddr.IPNetwork(slap_configuration['tun-ipv4-network']).first) + 1 }}:1111", + + /* MME connection for SMS over SG */ + sctp_addr: "{{ slap_configuration['configuration.ims_addr'] }}", + + /* Cx connection */ + cx_server_addr: "127.0.1.100", + cx_bind_addr: "{{ slap_configuration['configuration.ims_addr'] }}", + + /* Rx connection */ + rx_server_addr: "127.0.1.100", + rx_bind_addr: "{{ slap_configuration['configuration.ims_addr'] }}", + + /* Remote API */ + com_addr: "[{{ slap_configuration['ipv6-random'] }}]:{{ slap_configuration['configuration.ims_ws_port'] }}", + + /* Global domain name (May be overriden for each user) */ + domain: "{{ slap_configuration['configuration.domain'] }}", + + include "{{ slap_configuration['ue_db_path'] }}", + +{# Example of of s6a connection #} +{# s6: { #} +{# server_addr: "", #} +{# bind_addr: "", #} +{# origin_realm: "", #} +{# origin_host: "", #} +{# }, #} + + /* Echo phone number */ + echo: [ + "tel:666", + "tel:+666", + {impu: "tel:404", code: 404}, /* 404 test */ + {impu: "urn:service:sos", anonymous: true, authentication: false}, /* Emergency call */ + {impu: "urn:service:sos.police", anonymous: true, authentication: false}, /* Emergency call */ + ], + + /* Delay */ + //sms_expires: 86400, + //binding_expires: 3600, + + /* on: 3GPP mode allowed + * silent: 3GPP mode forced + * off: IETF mode + */ + precondition: "on", + "100rel": true, + + /* IPsec */ + ipsec_aalg_list: ["hmac-md5-96", "hmac-sha-1-96"], + ipsec_ealg_list: ["null", "aes-cbc", "des-cbc", "des-ede3-cbc"], + + mt_call_sdp_file: "{{ directory['software'] }}/mme/config/mt_call.sdp", + + ue_db_filename: "{{ directory['var'] }}/lte_ue_ims.db", +} diff --git a/software/ors-amarisoft/config/mme.jinja2.cfg b/software/ors-amarisoft/config/mme.jinja2.cfg new file mode 100644 index 0000000000000000000000000000000000000000..a6d20237e28c75e3d0cec12a37e880c5fd16c555 --- /dev/null +++ b/software/ors-amarisoft/config/mme.jinja2.cfg @@ -0,0 +1,159 @@ +/* ltemme configuration file for ims + * version 2021-09-18 + * Copyright (C) 2015-2021 Amarisoft + */ +{ + /* Log filter: syntax: layer.field=value[,...] + + Possible layers are nas, ip, s1ap, gtpu and all. The 'all' layer + is used to address all the layers at the same time. + + field values: + + - 'level': the log level of each layer can be set to 'none', + 'error', 'info' or 'debug'. Use 'debug' to log all the messages. + + - 'max_size': set the maximum size of the hex dump. 0 means no + hex dump. -1 means no limit. + */ + log_options: "all.level=debug,all.max_size=32", + log_filename: "{{ directory['log'] }}/mme.log", + + /* Enable remote API and Web interface */ + com_addr: "[{{ slap_configuration['ipv6-random'] }}]:{{ slap_configuration['configuration.mme_ws_port'] }}", + + /* bind address for GTP-U. Normally = address of the PC, here bound + on local interface to be able to run ltemme on the same PC as + lteenb. By default, the S1AP SCTP connection is bound on the same + address. */ + gtp_addr: "127.0.1.100", + + plmn: "00101", + mme_group_id: 32769, + mme_code: 1, + + ims_vops_eps: true, /* IMS supported */ + ims_vops_5gs_3gpp: true, /* IMS supported */ + ims_vops_5gs_n3gpp: true, /* IMS supported */ + //emc_bs: true, /* emergency calls supported */ + //emc: 3, /* NR/E-UTRA connected to 5GCN emergency calls supported */ + //emc_n3gpp: true, /* non-3GPP emergency calls supported */ + emergency_number_list: [ + /* Category bits: (Table 10.5.135d/3GPP TS 24.008) + Bit 1 Police + Bit 2 Ambulance + Bit 3 Fire Brigade + Bit 4 Marine Guard + Bit 5 Mountain Rescue + */ + { category: 0x1f, digits: "911" }, + { category: 0x1f, digits: "112" }, + ], + + rx: { + qci: {audio: 1, video: 2}, + }, + + /* network name and network short name sent in the EMM information + message to the UE */ + network_name: "{{ slap_configuration['configuration.network_name'] }}", + network_short_name: "{{ slap_configuration['configuration.network_name'] }}", + + /* Control Plane Cellular IoT EPS optimization support */ + cp_ciot_opt: true, + + /* DCNR support */ + nr_support: true, + + eps_5gs_interworking: "with_n26", + + + /* 15 bearers support */ + fifteen_bearers: false, + + ims_list: [ + { + ims_addr: "{{ slap_configuration['configuration.ims_addr'] }}", + bind_addr: "{{ slap_configuration['configuration.ims_bind'] }}" + } + ], + + /* AMF slices configuration */ + /*nssai: [ + { + sst: 1, + }, + { + sst: 2, + }, + { + sst: 3, + sd: 50, + } + ],*/ + + /* ePDG configuration */ + //epdg: { + // bind_addr: "127.0.1.100:500", + // esp_duration: 900, + // certificate: "epdg.pem", + // /* required for some buggy Mediatek phones */ + // //omit_auth_in_first_auth_rsp: true + //}, + + /* Public Data Networks. The first one is the default. */ + pdn_list: [ + { + pdn_type: "ipv4", + tun_ifname: "{{ slap_configuration['tun-name'] }}", + access_point_name: ["default", "internet", "ims", "sos"], + first_ip_addr: "{{ netaddr.IPAddress(netaddr.IPNetwork(slap_configuration['tun-ipv4-network']).first) + 2 }}", + last_ip_addr: "{{ netaddr.IPAddress(netaddr.IPNetwork(slap_configuration['tun-ipv4-network']).last) - 1 }}", + ip_addr_shift: 2, /* difference between allocated IP addresses is 4 */ + dns_addr: "8.8.8.8", /* Google DNS address */ + // TODO: enable this instead of Google's DNS above + // dns_addr: "{{ slap_configuration['tun-ipv4-addr'] }}", + p_cscf_addr: ["{{ slap_configuration['tun-ipv4-addr'] }}"], + + erabs: [ + { + qci: 9, + priority_level: 15, + pre_emption_capability: "shall_not_trigger_pre_emption", + pre_emption_vulnerability: "not_pre_emptable", + }, + ], + }, + ], + + /* Setup script for the network interface. + If no script is given, no network interface is created. + Script is called for each PDN with following parameters: + 1) Interface name + 2) PDN index + 3) Access Point Name + 4) IP version: 'ipv4' or 'ipv6' + 5) IP address: first IP address for ipv4 and link local address for IPv6 + 6) First IP address + 7) Last IP address + + SlapOS: We suplement the script by preparing the TUN interface in slapos format phase. + Gotcha: The script has to be presented + */ + tun_setup_script: "{{ ifup_empty }}", + /* If true, inter-UE routing is done inside the MME (hence no IP + packet is output to the virtual network interface in case of + inter-UE communication). Otherwise, it is done by the Linux IP + layer. */ + ue_to_ue_forwarding: false, + + /* NAS ciphering algorithm preference. EEA0 is always the last. */ + nas_cipher_algo_pref: [ ], + /* NAS integrity algorithm preference. EIA0 is always the last. */ + nas_integ_algo_pref: [ 2, 1 ], + + include "{{ slap_configuration['ue_db_path'] }}", + + /* persistent user database */ + ue_db_filename: "{{ directory['var'] }}/lte_ue.db" +} diff --git a/software/ors-amarisoft/config/ue_db.jinja2.cfg b/software/ors-amarisoft/config/ue_db.jinja2.cfg new file mode 100644 index 0000000000000000000000000000000000000000..0df14cfb7c4231f4b285d01b842596c411df4ec7 --- /dev/null +++ b/software/ors-amarisoft/config/ue_db.jinja2.cfg @@ -0,0 +1,25 @@ +{%- set filtered_slave_instance_list = [] %} +{%- for slave_instance in slave_instance_list %} +{%- if slave_instance.get('imsi', '') != '' %} +{%- do filtered_slave_instance_list.append(slave_instance) %} +{%- endif %} +{%- endfor -%} +ue_db: [ +{%- for i, slave_instance in enumerate(filtered_slave_instance_list) %} +{%- if i == 0 -%} +{ +{%- else -%} +, { +{%- endif %} + sim_algo: "{{ slave_instance.get('sim_algo', 'milenage') }}", + imsi: "{{ slave_instance.get('imsi', '') }}", + opc: "{{ slave_instance.get('opc', '') }}", + amf: {{ slave_instance.get('amf', '0x9001') }}, + sqn: "{{ slave_instance.get('sqn', '000000000000') }}", + K: "{{ slave_instance.get('k', '') }}", + impu: "{{ slave_instance.get('impu', '') }}", + impi: "{{ slave_instance.get('impi', '') }}", +} +{%- endfor -%} +] + diff --git a/software/ors-amarisoft/instance-enb-epc.jinja2.cfg b/software/ors-amarisoft/instance-enb-epc.jinja2.cfg new file mode 100644 index 0000000000000000000000000000000000000000..62c5ed21b0f2289043cd5650835653a03166d9e5 --- /dev/null +++ b/software/ors-amarisoft/instance-enb-epc.jinja2.cfg @@ -0,0 +1,78 @@ +[buildout] +parts = + directory + lte-enb-request + lte-epc-request + publish-connection-information + +extends = {{ monitor_template }} + +eggs-directory = {{ eggs_directory }} +develop-eggs-directory = {{ develop_eggs_directory }} +offline = true + +[slap-configuration] +recipe = slapos.cookbook:slapconfiguration.serialised +computer = {{ slap_connection['computer-id'] }} +partition = {{ slap_connection['partition-id'] }} +url = {{ slap_connection['server-url'] }} +key = {{ slap_connection['key-file'] }} +cert = {{ slap_connection['cert-file'] }} + +[directory] +recipe = slapos.cookbook:mkdirectory +software = {{ buildout_directory }} +home = ${buildout:directory} +etc = ${:home}/etc +var = ${:home}/var +etc = ${:home}/etc +bin = ${:home}/bin +run = ${:var}/run +script = ${:etc}/run +service = ${:etc}/service +promise = ${:etc}/promise +log = ${:var}/log + +[request-common-base] +recipe = slapos.cookbook:request.serialised +software-url = {{ slap_connection['software-release-url'] }} +server-url = {{ slap_connection['server-url'] }} +computer-id = {{ slap_connection['computer-id'] }} +partition-id = {{ slap_connection['partition-id'] }} +key-file = {{ slap_connection['key-file'] }} +cert-file = {{ slap_connection['cert-file'] }} + +config-monitor-password = ${monitor-htpasswd:passwd} +return = monitor-base-url + +[lte-epc-request] +<= request-common-base +name = EPC +software-type = epc +config-name = epc + +[lte-enb-request] +<= request-common-base +name = eNB +software-type = enb +config-name = enb +{% if slapparameter_dict.get("tx_gain", None) %} +config-tx_gain = {{ dumps(slapparameter_dict["tx_gain"]) }} +{% endif %} +{% if slapparameter_dict.get("rx_gain", None) %} +config-rx_gain = {{ dumps(slapparameter_dict["rx_gain"]) }} +{% endif %} +{% if slapparameter_dict.get("dl_earfcn", None) %} +config-dl_earfcn = {{ dumps(slapparameter_dict["dl_earfcn"]) }} +{% endif %} +{% if slapparameter_dict.get("n_rb_dl", None) %} +config-n_rb_dl = {{ dumps(slapparameter_dict["n_rb_dl"]) }} +{% endif %} + +[monitor-base-url-dict] +lte-epc-request = ${lte-epc-request:connection-monitor-base-url} +lte-enb-request = ${lte-enb-request:connection-monitor-base-url} + +[publish-connection-information] +recipe = slapos.cookbook:publish +<= monitor-publish diff --git a/software/ors-amarisoft/instance-enb-input-schema.jinja2.json b/software/ors-amarisoft/instance-enb-input-schema.jinja2.json new file mode 100644 index 0000000000000000000000000000000000000000..25abbd8c1c2c56d8052037e03db409085dbcb45b --- /dev/null +++ b/software/ors-amarisoft/instance-enb-input-schema.jinja2.json @@ -0,0 +1,31 @@ +{ + "type": "object", + "$schema": "http://json-schema.org/draft-04/schema", + "title": "Input Parameters", + "properties": { + "tx_gain": { + "title": "Tx gain", + "description": "Tx gain (in dB)", + "type": "number", + "default": {{ default_lte_tx_gain }} + }, + "rx_gain": { + "title": "Rx gain", + "description": "Rx gain (in dB)", + "type": "number", + "default": {{ default_lte_rx_gain }} + }, + "dl_earfcn": { + "title": "DL EARFCN", + "description": "Downlink E-UTRA Absolute Radio Frequency Channel Number of the cell", + "type": "number", + "default": {{ default_dl_earfcn }} + }, + "n_rb_dl": { + "title": "DL RB", + "description": "number of DL resource blocks", + "type": "number", + "default": {{ default_lte_n_rb_dl }} + } + } +} diff --git a/software/ors-amarisoft/instance-enb.jinja2.cfg b/software/ors-amarisoft/instance-enb.jinja2.cfg new file mode 100644 index 0000000000000000000000000000000000000000..cb79a08974c29d8876459aa891cfee36ea4267c0 --- /dev/null +++ b/software/ors-amarisoft/instance-enb.jinja2.cfg @@ -0,0 +1,99 @@ +[buildout] +parts = + directory + ltelogs + lte-enb-config + lte-enb-service +# Temporarily extend monitor-base until promises are added + monitor-base + publish-connection-information + +extends = {{ monitor_template }} + +eggs-directory = {{ eggs_directory }} +develop-eggs-directory = {{ develop_eggs_directory }} +offline = true + +[slap-configuration] +recipe = slapos.cookbook:slapconfiguration.serialised +computer = {{ slap_connection['computer-id'] }} +partition = {{ slap_connection['partition-id'] }} +url = {{ slap_connection['server-url'] }} +key = {{ slap_connection['key-file'] }} +cert = {{ slap_connection['cert-file'] }} + +configuration.network_name = VIFIB +configuration.domain = vifib.com +configuration.mme_ws_port = 9000 +configuration.enb_ws_port = 9002 +configuration.ims_ws_port = 9003 +configuration.ims_addr = 127.0.0.1 +configuration.ims_bind = 127.0.0.2 +configuration.enb_addr = 127.0.1.1 +configuration.default_dl_earfcn = {{ default_dl_earfcn }} +configuration.default_lte_dl_freq = {{ default_lte_dl_freq }} +configuration.default_lte_band = {{ default_lte_band }} +configuration.default_lte_n_rb_dl = {{ default_lte_n_rb_dl }} +configuration.default_lte_tx_gain = {{ default_lte_tx_gain }} +configuration.default_lte_rx_gain = {{ default_lte_rx_gain }} + +[directory] +recipe = slapos.cookbook:mkdirectory +software = {{ buildout_directory }} +home = ${buildout:directory} +etc = ${:home}/etc +var = ${:home}/var +etc = ${:home}/etc +bin = ${:home}/bin +run = ${:var}/run +script = ${:etc}/run +service = ${:etc}/service +promise = ${:etc}/promise +log = ${:var}/log + +[ltelogs] +recipe = slapos.recipe.template:jinja2 +template = {{ ltelogs_template }} +rendered = ${directory:home}/ltelogs.sh +mode = 0775 +extensions = jinja2.ext.do +context = + section directory directory + +### eNodeB (enb) +[lte-enb-service] +recipe = slapos.cookbook:wrapper +init = ${ltelogs:rendered} ${directory:log}/enb.log; sleep 2 +command-line = {{ enb }}/lteenb ${directory:etc}/enb.cfg +wrapper-path = ${directory:service}/lte-enb +mode = 0775 +reserve-cpu = True +pidfile = ${directory:run}/enb.pid +hash-files = + ${lte-enb-config:rendered} +environment = + LD_LIBRARY_PATH={{ openssl_location }}/lib + AMARISOFT_PATH=/opt/amarisoft/.amarisoft + +[config-base] +recipe = slapos.recipe.template:jinja2 +mode = 0664 +extensions = jinja2.ext.do +context = + section directory directory + section slap_configuration slap-configuration + key slapparameter_dict slap-configuration:configuration + import netaddr netaddr + +[lte-enb-config] +<= config-base +template = {{ enb_template }} +rendered = ${directory:etc}/enb.cfg + +[publish-connection-information] +recipe = slapos.cookbook:publish.serialised +monitor-base-url = ${monitor-instance-parameter:monitor-base-url} + +[monitor-instance-parameter] +monitor-title = {{ slapparameter_dict['name'] | string }} +password = {{ slapparameter_dict['monitor-password'] | string }} diff --git a/software/ors-amarisoft/instance-epc-input-schema.json b/software/ors-amarisoft/instance-epc-input-schema.json new file mode 100644 index 0000000000000000000000000000000000000000..fa461822c6584d3915f04917d1b303bf567c120d --- /dev/null +++ b/software/ors-amarisoft/instance-epc-input-schema.json @@ -0,0 +1,6 @@ +{ + "$schema": "http://json-schema.org/draft-04/schema", + "title": "EPC Input Parameters", + "type": "object", + "properties": {} +} diff --git a/software/ors-amarisoft/instance-epc-slave-input-schema.json b/software/ors-amarisoft/instance-epc-slave-input-schema.json new file mode 100644 index 0000000000000000000000000000000000000000..5ceb4aa27e4f800973cab3ab0d85837d60a7b292 --- /dev/null +++ b/software/ors-amarisoft/instance-epc-slave-input-schema.json @@ -0,0 +1,65 @@ +{ + "type": "object", + "$schema": "http://json-schema.org/draft-04/schema", + "title": "EPC SIM Card Parameters", + "required": [ + "sim_algo", + "imsi", + "opc", + "amf", + "sqn", + "k", + "impu", + "impi" + ], + "properties": { + "sim_algo": { + "title": "Sim Algorithm", + "description": "xor, milenage or tuak. Set the USIM authentication algorithm.", + "type": "string", + "default": "milenage" + }, + "imsi": { + "title": "IMSI", + "description": "IMSI", + "type": "string", + "default": "" + }, + "opc": { + "title": "OPC", + "description": "Operator key preprocessed with the user secret key (as a 16 byte hexadecimal string). When the Milenage authentication algorithm is used, opc must be set.", + "type": "string", + "default": "" + }, + "amf": { + "title": "AMF", + "description": "Range: 0 to 65535. Set the Authentication Management Field.", + "type": "string", + "default": "0x9001" + }, + "sqn": { + "title": "SQN", + "description": "Optional String (6 byte hexadecimal string). Set the initial sequence number. For the XOR algorithm, the actual value does not matter. For the Milenage or TUAK algorithm, a sequence number resynchronization is initiated if the sequence number does not match the one stored in the USIM.", + "type": "string", + "default": "000000000000" + }, + "k": { + "title": "K", + "description": "Set the user secret key (as a 16 bytes hexadecimal string, or eventually 32 bytes hexadecimal string for TUAK).", + "type": "string", + "default": "" + }, + "impu": { + "title": "IMPU", + "description": "sip URI or a telephone number. Note that sip URI must not include hostname. If IMPU does not start by a scheme, it is assumed to be a sip URI.", + "type": "string", + "default": "" + }, + "impi": { + "title": "IMPI", + "description": "Defines user IMPI. Must be fully filled with hostname if necessary.", + "type": "string", + "default": "" + } + } +} diff --git a/software/ors-amarisoft/instance-epc.jinja2.cfg b/software/ors-amarisoft/instance-epc.jinja2.cfg new file mode 100644 index 0000000000000000000000000000000000000000..323f23bc52cd9344ddfcd20961f42c223359d186 --- /dev/null +++ b/software/ors-amarisoft/instance-epc.jinja2.cfg @@ -0,0 +1,135 @@ +[buildout] +parts = + directory + ltelogs + lte-mme-config + lte-mme-service +# Temporarily extend monitor-base until promises are added + monitor-base + publish-connection-information + +extends = {{ monitor_template }} + +eggs-directory = {{ eggs_directory }} +develop-eggs-directory = {{ develop_eggs_directory }} +offline = true + +[slap-configuration] +recipe = slapos.cookbook:slapconfiguration.serialised +computer = {{ slap_connection['computer-id'] }} +partition = {{ slap_connection['partition-id'] }} +url = {{ slap_connection['server-url'] }} +key = {{ slap_connection['key-file'] }} +cert = {{ slap_connection['cert-file'] }} + +configuration.network_name = VIFIB +configuration.domain = vifib.com +configuration.log_size = 50M +configuration.mme_ws_port = 9000 +configuration.enb_ws_port = 9002 +configuration.ims_ws_port = 9003 +configuration.mme_addr = 127.0.1.100 +configuration.ims_addr = 127.0.0.1 +configuration.ims_bind = 127.0.0.2 +configuration.enb_addr = 127.0.1.1 + +ue_db_path = {{ ue_db_path }} + +[directory] +recipe = slapos.cookbook:mkdirectory +software = {{ buildout_directory }} +home = ${buildout:directory} +etc = ${:home}/etc +var = ${:home}/var +etc = ${:home}/etc +bin = ${:home}/bin +run = ${:var}/run +script = ${:etc}/run +service = ${:etc}/service +promise = ${:etc}/promise +log = ${:var}/log + +[ltelogs] +recipe = slapos.recipe.template:jinja2 +template = {{ ltelogs_template }} +rendered = ${directory:home}/ltelogs.sh +mode = 0775 +extensions = jinja2.ext.do +context = + section directory directory + +### IMS +[lte-ims-service] +recipe = slapos.cookbook:wrapper +init = ${ltelogs:rendered} ${directory:log}/ims.log; sleep 1 +command-line = rm -f ${directory:var}/lte_ue.db; {{ mme }}/lteims ${directory:etc}/ims.cfg +wrapper-path = ${directory:service}/lte-ims +mode = 0775 +pidfile = ${directory:run}/ims.pid +hash-files = + ${lte-ims-config:rendered} + {{ ue_db_path }} +environment = AMARISOFT_PATH=/opt/amarisoft/.amarisoft + +### MME +[lte-mme-service] +recipe = slapos.cookbook:wrapper +init = ${ltelogs:rendered} ${directory:log}/mme.log +command-line = {{ mme }}/ltemme ${directory:etc}/mme.cfg +wrapper-path = ${directory:service}/lte-mme +mode = 0775 +pidfile = ${directory:run}/mme.pid +hash-files = + ${lte-mme-config:rendered} + {{ ue_db_path }} +environment = + LD_LIBRARY_PATH={{ openssl_location }}/lib:{{ nghttp2_location }}/lib + AMARISOFT_PATH=/opt/amarisoft/.amarisoft + +### EMPTY mme-ifup script +[lte-mme-ifup-empty] +recipe = slapos.cookbook:wrapper +wrapper-path = ${directory:bin}/mme-ifup-empty +command-line = echo Using interface +mode = 775 + +[config-base] +recipe = slapos.recipe.template:jinja2 +mode = 0664 +extensions = jinja2.ext.do +context = + section directory directory + section slap_configuration slap-configuration + key slapparameter_dict slap-configuration:configuration + import netaddr netaddr + +[lte-ims-config] +<= config-base +template = {{ ims_template }} +rendered = ${directory:etc}/ims.cfg + +[lte-mme-config] +<= config-base +template = {{ mme_template }} +rendered = ${directory:etc}/mme.cfg +context = + section directory directory + section slap_configuration slap-configuration + key slapparameter_dict slap-configuration:configuration + import netaddr netaddr + key ifup_empty lte-mme-ifup-empty:wrapper-path + +[publish-connection-information] +recipe = slapos.cookbook:publish.serialised + +{% if slapparameter_dict.get("monitor-password", None) %} +monitor-base-url = ${monitor-instance-parameter:monitor-base-url} +{% else %} +monitor-setup-url = https://monitor.app.officejs.com/#page=settings_configurator&url=${monitor-publish-parameters:monitor-url}&username=${monitor-publish-parameters:monitor-user}&password=${monitor-publish-parameters:monitor-password} +{% endif %} + +{% if slapparameter_dict.get("monitor-password", None) %} +[monitor-instance-parameter] +monitor-title = {{ slapparameter_dict['name'] }} +password = {{ slapparameter_dict['monitor-password'] }} +{% endif %} diff --git a/software/ors-amarisoft/instance-gnb-epc.jinja2.cfg b/software/ors-amarisoft/instance-gnb-epc.jinja2.cfg new file mode 100644 index 0000000000000000000000000000000000000000..050c88c259aa25e3fefd6e6e48e9a7915c1e6138 --- /dev/null +++ b/software/ors-amarisoft/instance-gnb-epc.jinja2.cfg @@ -0,0 +1,81 @@ +[buildout] +parts = + directory + lte-gnb-request + lte-epc-request + publish-connection-information + +extends = {{ monitor_template }} + +eggs-directory = {{ eggs_directory }} +develop-eggs-directory = {{ develop_eggs_directory }} +offline = true + +[slap-configuration] +recipe = slapos.cookbook:slapconfiguration.serialised +computer = {{ slap_connection['computer-id'] }} +partition = {{ slap_connection['partition-id'] }} +url = {{ slap_connection['server-url'] }} +key = {{ slap_connection['key-file'] }} +cert = {{ slap_connection['cert-file'] }} + +[directory] +recipe = slapos.cookbook:mkdirectory +software = {{ buildout_directory }} +home = ${buildout:directory} +etc = ${:home}/etc +var = ${:home}/var +etc = ${:home}/etc +bin = ${:home}/bin +run = ${:var}/run +script = ${:etc}/run +service = ${:etc}/service +promise = ${:etc}/promise +log = ${:var}/log + +[request-common-base] +recipe = slapos.cookbook:request.serialised +software-url = {{ slap_connection['software-release-url'] }} +server-url = {{ slap_connection['server-url'] }} +computer-id = {{ slap_connection['computer-id'] }} +partition-id = {{ slap_connection['partition-id'] }} +key-file = {{ slap_connection['key-file'] }} +cert-file = {{ slap_connection['cert-file'] }} + +config-monitor-password = ${monitor-htpasswd:passwd} +return = monitor-base-url + +[lte-epc-request] +<= request-common-base +name = EPC +software-type = epc +config-name = epc + +[lte-gnb-request] +<= request-common-base +name = gNB +software-type = gnb +config-name = gnb +{% if slapparameter_dict.get("tx_gain", None) %} +config-tx_gain = {{ dumps(slapparameter_dict["tx_gain"]) }} +{% endif %} +{% if slapparameter_dict.get("rx_gain", None) %} +config-rx_gain = {{ dumps(slapparameter_dict["rx_gain"]) }} +{% endif %} +{% if slapparameter_dict.get("dl_nr_arfcn", None) %} +config-dl_nr_arfcn = {{ dumps(slapparameter_dict["dl_nr_arfcn"]) }} +{% endif %} +{% if slapparameter_dict.get("nr_band", None) %} +config-nr_band = {{ dumps(slapparameter_dict["nr_band"]) }} +{% endif %} +{% if slapparameter_dict.get("nr_bandwidth", None) %} +config-nr_bandwidth = {{ dumps(slapparameter_dict["nr_bandwidth"]) }} +{% endif %} + +[monitor-base-url-dict] +lte-epc-request = ${lte-epc-request:connection-monitor-base-url} +lte-gnb-request = ${lte-gnb-request:connection-monitor-base-url} + +[publish-connection-information] +recipe = slapos.cookbook:publish +<= monitor-publish diff --git a/software/ors-amarisoft/instance-gnb-input-schema.jinja2.json b/software/ors-amarisoft/instance-gnb-input-schema.jinja2.json new file mode 100644 index 0000000000000000000000000000000000000000..727fcbb46c9fe6d4228a4eff7ec71034b7d76426 --- /dev/null +++ b/software/ors-amarisoft/instance-gnb-input-schema.jinja2.json @@ -0,0 +1,37 @@ +{ + "type": "object", + "$schema": "http://json-schema.org/draft-04/schema", + "title": "Input Parameters", + "properties": { + "tx_gain": { + "title": "Tx gain", + "description": "Tx gain (in dB)", + "type": "number", + "default": {{ default_nr_tx_gain }} + }, + "rx_gain": { + "title": "Rx gain", + "description": "Rx gain (in dB)", + "type": "number", + "default": {{ default_nr_rx_gain }} + }, + "dl_nr_arfcn": { + "title": "DL NR ARFCN", + "description": "Downlink NR Absolute Radio Frequency Channel Number of the cell", + "type": "number", + "default": {{ default_dl_nr_arfcn }} + }, + "nr_band": { + "title": "NR band", + "description": "NR band number", + "type": "number", + "default": {{ default_nr_band }} + }, + "nr_bandwidth": { + "title": "Bandwidth", + "description": "Downlink Bandwidth (in MHz)", + "type": "number", + "default": {{ default_nr_bandwidth }} + } + } +} diff --git a/software/ors-amarisoft/instance-gnb.jinja2.cfg b/software/ors-amarisoft/instance-gnb.jinja2.cfg new file mode 100644 index 0000000000000000000000000000000000000000..75d819f93ef61586747a875ca54e9a0c463534e5 --- /dev/null +++ b/software/ors-amarisoft/instance-gnb.jinja2.cfg @@ -0,0 +1,99 @@ +[buildout] +parts = + directory + ltelogs + lte-gnb-config + lte-enb-service +# Temporarily extend monitor-base until promises are added + monitor-base + publish-connection-information + +extends = {{ monitor_template }} + +eggs-directory = {{ eggs_directory }} +develop-eggs-directory = {{ develop_eggs_directory }} +offline = true + +[slap-configuration] +recipe = slapos.cookbook:slapconfiguration.serialised +computer = {{ slap_connection['computer-id'] }} +partition = {{ slap_connection['partition-id'] }} +url = {{ slap_connection['server-url'] }} +key = {{ slap_connection['key-file'] }} +cert = {{ slap_connection['cert-file'] }} + +configuration.network_name = VIFIB +configuration.domain = vifib.com +configuration.mme_ws_port = 9000 +configuration.enb_ws_port = 9002 +configuration.ims_ws_port = 9003 +configuration.ims_addr = 127.0.0.1 +configuration.ims_bind = 127.0.0.2 +configuration.enb_addr = 127.0.1.1 +configuration.default_dl_nr_arfcn = {{ default_dl_nr_arfcn }} +configuration.default_nr_band = {{ default_nr_band }} +configuration.default_nr_dl_freq = {{ default_nr_dl_freq }} +configuration.default_nr_bandwidth = {{ default_nr_bandwidth }} +configuration.default_nr_tx_gain = {{ default_nr_tx_gain }} +configuration.default_nr_rx_gain = {{ default_nr_rx_gain }} + +[directory] +recipe = slapos.cookbook:mkdirectory +software = {{ buildout_directory }} +home = ${buildout:directory} +etc = ${:home}/etc +var = ${:home}/var +etc = ${:home}/etc +bin = ${:home}/bin +run = ${:var}/run +script = ${:etc}/run +service = ${:etc}/service +promise = ${:etc}/promise +log = ${:var}/log + +[ltelogs] +recipe = slapos.recipe.template:jinja2 +template = {{ ltelogs_template }} +rendered = ${directory:home}/ltelogs.sh +mode = 0775 +extensions = jinja2.ext.do +context = + section directory directory + +### eNodeB (enb) +[lte-enb-service] +recipe = slapos.cookbook:wrapper +init = ${ltelogs:rendered} ${directory:log}/enb.log; sleep 2 +command-line = {{ enb }}/lteenb ${directory:etc}/gnb.cfg +wrapper-path = ${directory:service}/lte-enb +mode = 0775 +reserve-cpu = True +pidfile = ${directory:run}/enb.pid +hash-files = + ${lte-gnb-config:rendered} +environment = + LD_LIBRARY_PATH={{ openssl_location }}/lib + AMARISOFT_PATH=/opt/amarisoft/.amarisoft + +[config-base] +recipe = slapos.recipe.template:jinja2 +mode = 0664 +extensions = jinja2.ext.do +context = + section directory directory + section slap_configuration slap-configuration + key slapparameter_dict slap-configuration:configuration + import netaddr netaddr + +[lte-gnb-config] +<= config-base +template = {{ gnb_template }} +rendered = ${directory:etc}/gnb.cfg + +[publish-connection-information] +recipe = slapos.cookbook:publish.serialised +monitor-base-url = ${monitor-instance-parameter:monitor-base-url} + +[monitor-instance-parameter] +monitor-title = {{ slapparameter_dict['name'] | string }} +password = {{ slapparameter_dict['monitor-password'] | string }} diff --git a/software/ors-amarisoft/instance-tdd1900-enb-epc-input-schema.json b/software/ors-amarisoft/instance-tdd1900-enb-epc-input-schema.json new file mode 100644 index 0000000000000000000000000000000000000000..6b2351ea1b32925e62ea7968bcaadca55293cba9 --- /dev/null +++ b/software/ors-amarisoft/instance-tdd1900-enb-epc-input-schema.json @@ -0,0 +1,31 @@ +{ + "type": "object", + "$schema": "http://json-schema.org/draft-04/schema", + "title": "Input Parameters", + "properties": { + "tx_gain": { + "title": "Tx gain", + "description": "Tx gain (in dB)", + "type": "number", + "default": 70 + }, + "rx_gain": { + "title": "Rx gain", + "description": "Rx gain (in dB)", + "type": "number", + "default": 30 + }, + "dl_earfcn": { + "title": "DL EARFCN", + "description": "Downlink E-UTRA Absolute Radio Frequency Channel Number of the cell", + "type": "number", + "default": 38350 + }, + "n_rb_dl": { + "title": "DL RB", + "description": "number of DL resource blocks", + "type": "number", + "default": 100 + } + } +} \ No newline at end of file diff --git a/software/ors-amarisoft/instance-tdd1900-enb-input-schema.json b/software/ors-amarisoft/instance-tdd1900-enb-input-schema.json new file mode 100644 index 0000000000000000000000000000000000000000..6b2351ea1b32925e62ea7968bcaadca55293cba9 --- /dev/null +++ b/software/ors-amarisoft/instance-tdd1900-enb-input-schema.json @@ -0,0 +1,31 @@ +{ + "type": "object", + "$schema": "http://json-schema.org/draft-04/schema", + "title": "Input Parameters", + "properties": { + "tx_gain": { + "title": "Tx gain", + "description": "Tx gain (in dB)", + "type": "number", + "default": 70 + }, + "rx_gain": { + "title": "Rx gain", + "description": "Rx gain (in dB)", + "type": "number", + "default": 30 + }, + "dl_earfcn": { + "title": "DL EARFCN", + "description": "Downlink E-UTRA Absolute Radio Frequency Channel Number of the cell", + "type": "number", + "default": 38350 + }, + "n_rb_dl": { + "title": "DL RB", + "description": "number of DL resource blocks", + "type": "number", + "default": 100 + } + } +} \ No newline at end of file diff --git a/software/ors-amarisoft/instance-tdd1900-gnb-epc-input-schema.json b/software/ors-amarisoft/instance-tdd1900-gnb-epc-input-schema.json new file mode 100644 index 0000000000000000000000000000000000000000..95a398388528bfb119c1d433d9fdf25ab673a579 --- /dev/null +++ b/software/ors-amarisoft/instance-tdd1900-gnb-epc-input-schema.json @@ -0,0 +1,37 @@ +{ + "type": "object", + "$schema": "http://json-schema.org/draft-04/schema", + "title": "Input Parameters", + "properties": { + "tx_gain": { + "title": "Tx gain", + "description": "Tx gain (in dB)", + "type": "number", + "default": 70 + }, + "rx_gain": { + "title": "Rx gain", + "description": "Rx gain (in dB)", + "type": "number", + "default": 50 + }, + "dl_nr_arfcn": { + "title": "DL NR ARFCN", + "description": "Downlink NR Absolute Radio Frequency Channel Number of the cell", + "type": "number", + "default": 378000 + }, + "nr_band": { + "title": "NR band", + "description": "NR band number", + "type": "number", + "default": 39 + }, + "nr_bandwidth": { + "title": "Bandwidth", + "description": "Downlink Bandwidth (in MHz)", + "type": "number", + "default": 40 + } + } +} \ No newline at end of file diff --git a/software/ors-amarisoft/instance-tdd1900-gnb-input-schema.json b/software/ors-amarisoft/instance-tdd1900-gnb-input-schema.json new file mode 100644 index 0000000000000000000000000000000000000000..95a398388528bfb119c1d433d9fdf25ab673a579 --- /dev/null +++ b/software/ors-amarisoft/instance-tdd1900-gnb-input-schema.json @@ -0,0 +1,37 @@ +{ + "type": "object", + "$schema": "http://json-schema.org/draft-04/schema", + "title": "Input Parameters", + "properties": { + "tx_gain": { + "title": "Tx gain", + "description": "Tx gain (in dB)", + "type": "number", + "default": 70 + }, + "rx_gain": { + "title": "Rx gain", + "description": "Rx gain (in dB)", + "type": "number", + "default": 50 + }, + "dl_nr_arfcn": { + "title": "DL NR ARFCN", + "description": "Downlink NR Absolute Radio Frequency Channel Number of the cell", + "type": "number", + "default": 378000 + }, + "nr_band": { + "title": "NR band", + "description": "NR band number", + "type": "number", + "default": 39 + }, + "nr_bandwidth": { + "title": "Bandwidth", + "description": "Downlink Bandwidth (in MHz)", + "type": "number", + "default": 40 + } + } +} \ No newline at end of file diff --git a/software/ors-amarisoft/instance-tdd2600-enb-epc-input-schema.json b/software/ors-amarisoft/instance-tdd2600-enb-epc-input-schema.json new file mode 100644 index 0000000000000000000000000000000000000000..ba9e354df8d92f6c9dbf5d12cd486aea947f0c1a --- /dev/null +++ b/software/ors-amarisoft/instance-tdd2600-enb-epc-input-schema.json @@ -0,0 +1,31 @@ +{ + "type": "object", + "$schema": "http://json-schema.org/draft-04/schema", + "title": "Input Parameters", + "properties": { + "tx_gain": { + "title": "Tx gain", + "description": "Tx gain (in dB)", + "type": "number", + "default": 70 + }, + "rx_gain": { + "title": "Rx gain", + "description": "Rx gain (in dB)", + "type": "number", + "default": 30 + }, + "dl_earfcn": { + "title": "DL EARFCN", + "description": "Downlink E-UTRA Absolute Radio Frequency Channel Number of the cell", + "type": "number", + "default": 38050 + }, + "n_rb_dl": { + "title": "DL RB", + "description": "number of DL resource blocks", + "type": "number", + "default": 100 + } + } +} \ No newline at end of file diff --git a/software/ors-amarisoft/instance-tdd2600-enb-input-schema.json b/software/ors-amarisoft/instance-tdd2600-enb-input-schema.json new file mode 100644 index 0000000000000000000000000000000000000000..ba9e354df8d92f6c9dbf5d12cd486aea947f0c1a --- /dev/null +++ b/software/ors-amarisoft/instance-tdd2600-enb-input-schema.json @@ -0,0 +1,31 @@ +{ + "type": "object", + "$schema": "http://json-schema.org/draft-04/schema", + "title": "Input Parameters", + "properties": { + "tx_gain": { + "title": "Tx gain", + "description": "Tx gain (in dB)", + "type": "number", + "default": 70 + }, + "rx_gain": { + "title": "Rx gain", + "description": "Rx gain (in dB)", + "type": "number", + "default": 30 + }, + "dl_earfcn": { + "title": "DL EARFCN", + "description": "Downlink E-UTRA Absolute Radio Frequency Channel Number of the cell", + "type": "number", + "default": 38050 + }, + "n_rb_dl": { + "title": "DL RB", + "description": "number of DL resource blocks", + "type": "number", + "default": 100 + } + } +} \ No newline at end of file diff --git a/software/ors-amarisoft/instance-tdd2600-gnb-epc-input-schema.json b/software/ors-amarisoft/instance-tdd2600-gnb-epc-input-schema.json new file mode 100644 index 0000000000000000000000000000000000000000..0293cdf0bf878e29877d2880ab56b14a3558ec42 --- /dev/null +++ b/software/ors-amarisoft/instance-tdd2600-gnb-epc-input-schema.json @@ -0,0 +1,37 @@ +{ + "type": "object", + "$schema": "http://json-schema.org/draft-04/schema", + "title": "Input Parameters", + "properties": { + "tx_gain": { + "title": "Tx gain", + "description": "Tx gain (in dB)", + "type": "number", + "default": 70 + }, + "rx_gain": { + "title": "Rx gain", + "description": "Rx gain (in dB)", + "type": "number", + "default": 50 + }, + "dl_nr_arfcn": { + "title": "DL NR ARFCN", + "description": "Downlink NR Absolute Radio Frequency Channel Number of the cell", + "type": "number", + "default": 520000 + }, + "nr_band": { + "title": "NR band", + "description": "NR band number", + "type": "number", + "default": 38 + }, + "nr_bandwidth": { + "title": "Bandwidth", + "description": "Downlink Bandwidth (in MHz)", + "type": "number", + "default": 40 + } + } +} \ No newline at end of file diff --git a/software/ors-amarisoft/instance-tdd2600-gnb-input-schema.json b/software/ors-amarisoft/instance-tdd2600-gnb-input-schema.json new file mode 100644 index 0000000000000000000000000000000000000000..0293cdf0bf878e29877d2880ab56b14a3558ec42 --- /dev/null +++ b/software/ors-amarisoft/instance-tdd2600-gnb-input-schema.json @@ -0,0 +1,37 @@ +{ + "type": "object", + "$schema": "http://json-schema.org/draft-04/schema", + "title": "Input Parameters", + "properties": { + "tx_gain": { + "title": "Tx gain", + "description": "Tx gain (in dB)", + "type": "number", + "default": 70 + }, + "rx_gain": { + "title": "Rx gain", + "description": "Rx gain (in dB)", + "type": "number", + "default": 50 + }, + "dl_nr_arfcn": { + "title": "DL NR ARFCN", + "description": "Downlink NR Absolute Radio Frequency Channel Number of the cell", + "type": "number", + "default": 520000 + }, + "nr_band": { + "title": "NR band", + "description": "NR band number", + "type": "number", + "default": 38 + }, + "nr_bandwidth": { + "title": "Bandwidth", + "description": "Downlink Bandwidth (in MHz)", + "type": "number", + "default": 40 + } + } +} \ No newline at end of file diff --git a/software/ors-amarisoft/instance-tdd3500-enb-epc-input-schema.json b/software/ors-amarisoft/instance-tdd3500-enb-epc-input-schema.json new file mode 100644 index 0000000000000000000000000000000000000000..6e6c5fa836d5106c089c2d1c986e9f7502d69490 --- /dev/null +++ b/software/ors-amarisoft/instance-tdd3500-enb-epc-input-schema.json @@ -0,0 +1,31 @@ +{ + "type": "object", + "$schema": "http://json-schema.org/draft-04/schema", + "title": "Input Parameters", + "properties": { + "tx_gain": { + "title": "Tx gain", + "description": "Tx gain (in dB)", + "type": "number", + "default": 70 + }, + "rx_gain": { + "title": "Rx gain", + "description": "Rx gain (in dB)", + "type": "number", + "default": 30 + }, + "dl_earfcn": { + "title": "DL EARFCN", + "description": "Downlink E-UTRA Absolute Radio Frequency Channel Number of the cell", + "type": "number", + "default": 42590 + }, + "n_rb_dl": { + "title": "DL RB", + "description": "number of DL resource blocks", + "type": "number", + "default": 100 + } + } +} \ No newline at end of file diff --git a/software/ors-amarisoft/instance-tdd3500-enb-input-schema.json b/software/ors-amarisoft/instance-tdd3500-enb-input-schema.json new file mode 100644 index 0000000000000000000000000000000000000000..6e6c5fa836d5106c089c2d1c986e9f7502d69490 --- /dev/null +++ b/software/ors-amarisoft/instance-tdd3500-enb-input-schema.json @@ -0,0 +1,31 @@ +{ + "type": "object", + "$schema": "http://json-schema.org/draft-04/schema", + "title": "Input Parameters", + "properties": { + "tx_gain": { + "title": "Tx gain", + "description": "Tx gain (in dB)", + "type": "number", + "default": 70 + }, + "rx_gain": { + "title": "Rx gain", + "description": "Rx gain (in dB)", + "type": "number", + "default": 30 + }, + "dl_earfcn": { + "title": "DL EARFCN", + "description": "Downlink E-UTRA Absolute Radio Frequency Channel Number of the cell", + "type": "number", + "default": 42590 + }, + "n_rb_dl": { + "title": "DL RB", + "description": "number of DL resource blocks", + "type": "number", + "default": 100 + } + } +} \ No newline at end of file diff --git a/software/ors-amarisoft/instance-tdd3500-gnb-epc-input-schema.json b/software/ors-amarisoft/instance-tdd3500-gnb-epc-input-schema.json new file mode 100644 index 0000000000000000000000000000000000000000..521211f3dbaa7e27515c68fed3184ae170df2910 --- /dev/null +++ b/software/ors-amarisoft/instance-tdd3500-gnb-epc-input-schema.json @@ -0,0 +1,37 @@ +{ + "type": "object", + "$schema": "http://json-schema.org/draft-04/schema", + "title": "Input Parameters", + "properties": { + "tx_gain": { + "title": "Tx gain", + "description": "Tx gain (in dB)", + "type": "number", + "default": 70 + }, + "rx_gain": { + "title": "Rx gain", + "description": "Rx gain (in dB)", + "type": "number", + "default": 50 + }, + "dl_nr_arfcn": { + "title": "DL NR ARFCN", + "description": "Downlink NR Absolute Radio Frequency Channel Number of the cell", + "type": "number", + "default": 632628 + }, + "nr_band": { + "title": "NR band", + "description": "NR band number", + "type": "number", + "default": 78 + }, + "nr_bandwidth": { + "title": "Bandwidth", + "description": "Downlink Bandwidth (in MHz)", + "type": "number", + "default": 40 + } + } +} \ No newline at end of file diff --git a/software/ors-amarisoft/instance-tdd3500-gnb-input-schema.json b/software/ors-amarisoft/instance-tdd3500-gnb-input-schema.json new file mode 100644 index 0000000000000000000000000000000000000000..521211f3dbaa7e27515c68fed3184ae170df2910 --- /dev/null +++ b/software/ors-amarisoft/instance-tdd3500-gnb-input-schema.json @@ -0,0 +1,37 @@ +{ + "type": "object", + "$schema": "http://json-schema.org/draft-04/schema", + "title": "Input Parameters", + "properties": { + "tx_gain": { + "title": "Tx gain", + "description": "Tx gain (in dB)", + "type": "number", + "default": 70 + }, + "rx_gain": { + "title": "Rx gain", + "description": "Rx gain (in dB)", + "type": "number", + "default": 50 + }, + "dl_nr_arfcn": { + "title": "DL NR ARFCN", + "description": "Downlink NR Absolute Radio Frequency Channel Number of the cell", + "type": "number", + "default": 632628 + }, + "nr_band": { + "title": "NR band", + "description": "NR band number", + "type": "number", + "default": 78 + }, + "nr_bandwidth": { + "title": "Bandwidth", + "description": "Downlink Bandwidth (in MHz)", + "type": "number", + "default": 40 + } + } +} \ No newline at end of file diff --git a/software/ors-amarisoft/instance-tdd3700-enb-epc-input-schema.json b/software/ors-amarisoft/instance-tdd3700-enb-epc-input-schema.json new file mode 100644 index 0000000000000000000000000000000000000000..b14f89fb8d2d891a4f80b20f9a067f7e8e31b17c --- /dev/null +++ b/software/ors-amarisoft/instance-tdd3700-enb-epc-input-schema.json @@ -0,0 +1,31 @@ +{ + "type": "object", + "$schema": "http://json-schema.org/draft-04/schema", + "title": "Input Parameters", + "properties": { + "tx_gain": { + "title": "Tx gain", + "description": "Tx gain (in dB)", + "type": "number", + "default": 70 + }, + "rx_gain": { + "title": "Rx gain", + "description": "Rx gain (in dB)", + "type": "number", + "default": 30 + }, + "dl_earfcn": { + "title": "DL EARFCN", + "description": "Downlink E-UTRA Absolute Radio Frequency Channel Number of the cell", + "type": "number", + "default": 44590 + }, + "n_rb_dl": { + "title": "DL RB", + "description": "number of DL resource blocks", + "type": "number", + "default": 100 + } + } +} \ No newline at end of file diff --git a/software/ors-amarisoft/instance-tdd3700-enb-input-schema.json b/software/ors-amarisoft/instance-tdd3700-enb-input-schema.json new file mode 100644 index 0000000000000000000000000000000000000000..b14f89fb8d2d891a4f80b20f9a067f7e8e31b17c --- /dev/null +++ b/software/ors-amarisoft/instance-tdd3700-enb-input-schema.json @@ -0,0 +1,31 @@ +{ + "type": "object", + "$schema": "http://json-schema.org/draft-04/schema", + "title": "Input Parameters", + "properties": { + "tx_gain": { + "title": "Tx gain", + "description": "Tx gain (in dB)", + "type": "number", + "default": 70 + }, + "rx_gain": { + "title": "Rx gain", + "description": "Rx gain (in dB)", + "type": "number", + "default": 30 + }, + "dl_earfcn": { + "title": "DL EARFCN", + "description": "Downlink E-UTRA Absolute Radio Frequency Channel Number of the cell", + "type": "number", + "default": 44590 + }, + "n_rb_dl": { + "title": "DL RB", + "description": "number of DL resource blocks", + "type": "number", + "default": 100 + } + } +} \ No newline at end of file diff --git a/software/ors-amarisoft/instance-tdd3700-gnb-epc-input-schema.json b/software/ors-amarisoft/instance-tdd3700-gnb-epc-input-schema.json new file mode 100644 index 0000000000000000000000000000000000000000..20d103fda97bedf863242752e43581687db68943 --- /dev/null +++ b/software/ors-amarisoft/instance-tdd3700-gnb-epc-input-schema.json @@ -0,0 +1,37 @@ +{ + "type": "object", + "$schema": "http://json-schema.org/draft-04/schema", + "title": "Input Parameters", + "properties": { + "tx_gain": { + "title": "Tx gain", + "description": "Tx gain (in dB)", + "type": "number", + "default": 70 + }, + "rx_gain": { + "title": "Rx gain", + "description": "Rx gain (in dB)", + "type": "number", + "default": 50 + }, + "dl_nr_arfcn": { + "title": "DL NR ARFCN", + "description": "Downlink NR Absolute Radio Frequency Channel Number of the cell", + "type": "number", + "default": 646666 + }, + "nr_band": { + "title": "NR band", + "description": "NR band number", + "type": "number", + "default": 78 + }, + "nr_bandwidth": { + "title": "Bandwidth", + "description": "Downlink Bandwidth (in MHz)", + "type": "number", + "default": 40 + } + } +} \ No newline at end of file diff --git a/software/ors-amarisoft/instance-tdd3700-gnb-input-schema.json b/software/ors-amarisoft/instance-tdd3700-gnb-input-schema.json new file mode 100644 index 0000000000000000000000000000000000000000..20d103fda97bedf863242752e43581687db68943 --- /dev/null +++ b/software/ors-amarisoft/instance-tdd3700-gnb-input-schema.json @@ -0,0 +1,37 @@ +{ + "type": "object", + "$schema": "http://json-schema.org/draft-04/schema", + "title": "Input Parameters", + "properties": { + "tx_gain": { + "title": "Tx gain", + "description": "Tx gain (in dB)", + "type": "number", + "default": 70 + }, + "rx_gain": { + "title": "Rx gain", + "description": "Rx gain (in dB)", + "type": "number", + "default": 50 + }, + "dl_nr_arfcn": { + "title": "DL NR ARFCN", + "description": "Downlink NR Absolute Radio Frequency Channel Number of the cell", + "type": "number", + "default": 646666 + }, + "nr_band": { + "title": "NR band", + "description": "NR band number", + "type": "number", + "default": 78 + }, + "nr_bandwidth": { + "title": "Bandwidth", + "description": "Downlink Bandwidth (in MHz)", + "type": "number", + "default": 40 + } + } +} \ No newline at end of file diff --git a/software/ors-amarisoft/instance.cfg b/software/ors-amarisoft/instance.cfg new file mode 100644 index 0000000000000000000000000000000000000000..bb205a78acf00fd05630c9bb48790acdfa770fd1 --- /dev/null +++ b/software/ors-amarisoft/instance.cfg @@ -0,0 +1,117 @@ +[buildout] +parts = + switch-softwaretype + +eggs-directory = ${buildout:eggs-directory} +develop-eggs-directory = ${buildout:develop-eggs-directory} +offline = true + +[slap-configuration] +recipe = slapos.cookbook:slapconfiguration.serialised +computer = $${slap-connection:computer-id} +partition = $${slap-connection:partition-id} +url = $${slap-connection:server-url} +key = $${slap-connection:key-file} +cert = $${slap-connection:cert-file} + +[jinja2-template-base] +recipe = slapos.recipe.template:jinja2 +rendered = $${buildout:directory}/$${:filename} +extra-context = +context = + import json_module json + key eggs_directory buildout:eggs-directory + key develop_eggs_directory buildout:develop-eggs-directory + raw buildout_directory ${buildout:directory} + section slap_connection slap-connection + key slapparameter_dict slap-configuration:configuration + $${:extra-context} + +[switch-softwaretype] +recipe = slapos.cookbook:switch-softwaretype +enb-epc = dynamic-template-lte-enb-epc:rendered +gnb-epc = dynamic-template-lte-gnb-epc:rendered +enb = dynamic-template-lte-enb:rendered +gnb = dynamic-template-lte-gnb:rendered +epc = dynamic-template-lte-epc:rendered +RootSoftwareInstance = $${:enb-epc} + +[dynamic-template-lte-enb-epc] +< = jinja2-template-base +template = ${template-lte-enb-epc:target} +filename = instance-lte-enb-epc.cfg +extensions = jinja2.ext.do +extra-context = + raw monitor_template ${monitor2-template:rendered} + +[dynamic-template-lte-gnb-epc] +< = jinja2-template-base +template = ${template-lte-gnb-epc:target} +filename = instance-lte-gnb-epc.cfg +extensions = jinja2.ext.do +extra-context = + raw monitor_template ${monitor2-template:rendered} + +[dynamic-template-lte-enb] +< = jinja2-template-base +template = ${template-lte-enb:target} +filename = instance-lte-enb.cfg +extensions = jinja2.ext.do +extra-context = + raw monitor_template ${monitor2-template:rendered} + raw enb ${enb:destination} + raw enb_template ${enb.jinja2.cfg:target} + raw ltelogs_template ${ltelogs.jinja2.sh:target} + raw openssl_location ${openssl:location} + raw default_dl_earfcn ${enb:default-dl-earfcn} + raw default_lte_dl_freq ${enb:default-lte-dl-freq} + raw default_lte_band ${enb:default-lte-band} + raw default_lte_n_rb_dl ${enb:default-lte-n-rb-dl} + raw default_lte_tx_gain ${enb:default-lte-tx-gain} + raw default_lte_rx_gain ${enb:default-lte-rx-gain} + raw min_frequency ${enb:min-frequency} + raw max_frequency ${enb:max-frequency} + +[dynamic-template-lte-gnb] +< = jinja2-template-base +template = ${template-lte-gnb:target} +filename = instance-lte-gnb.cfg +extensions = jinja2.ext.do +extra-context = + raw monitor_template ${monitor2-template:rendered} + raw enb ${enb:destination} + raw gnb_template ${gnb.jinja2.cfg:target} + raw ltelogs_template ${ltelogs.jinja2.sh:target} + raw openssl_location ${openssl:location} + raw default_dl_nr_arfcn ${enb:default-dl-nr-arfcn} + raw default_nr_band ${enb:default-nr-band} + raw default_nr_dl_freq ${enb:default-nr-dl-freq} + raw default_nr_bandwidth ${enb:default-nr-bandwidth} + raw default_nr_tx_gain ${enb:default-nr-tx-gain} + raw default_nr_rx_gain ${enb:default-nr-rx-gain} + raw min_frequency ${enb:min-frequency} + raw max_frequency ${enb:max-frequency} + +[dynamic-template-lte-epc] +< = jinja2-template-base +template = ${template-lte-epc:target} +filename = instance-lte-epc.cfg +extensions = jinja2.ext.do +extra-context = + raw monitor_template ${monitor2-template:rendered} + raw mme ${mme:destination} + raw mme_template ${mme.jinja2.cfg:target} + raw ims_template ${ims.jinja2.cfg:target} + raw ltelogs_template ${ltelogs.jinja2.sh:target} + raw openssl_location ${openssl:location} + raw nghttp2_location ${nghttp2:location} + key ue_db_path ue-db-config:rendered + +[ue-db-config] +recipe = slapos.recipe.template:jinja2 +template = ${ue_db.jinja2.cfg:target} +filename = ue_db.cfg +extensions = jinja2.ext.do +rendered = $${buildout:directory}/$${:filename} +context = + key slave_instance_list slap-configuration:slave-instance-list diff --git a/software/ors-amarisoft/ltelogs.jinja2.sh b/software/ors-amarisoft/ltelogs.jinja2.sh new file mode 100755 index 0000000000000000000000000000000000000000..4027f53673a9e0ae3e1e42133cd56ed692705a40 --- /dev/null +++ b/software/ors-amarisoft/ltelogs.jinja2.sh @@ -0,0 +1,23 @@ +#!/bin/bash +# Copyright (C) 2012-2015 Amarisoft +# LTE system logger version 2016-10-13 + +# Path for multi environment support +export PATH="$PATH:/bin/:/usr/bin/:/usr/local/bin" + +while [ "$1" != "" ] ; do + + if [ -e "$1" ] ; then + # Avoid storing logs with comments only + HAS_LOG=$(grep -v -l "#" $1) + if [ "$HAS_LOG" != "" ] ; then + DATE=$(date -u +%Y%m%d.%H:%M:%S | sed -e "s/ /-/g") + FILE=$(basename $1) + mv $1 "{{ directory['log'] }}/${FILE}.${DATE}" + else + rm -f $1 + fi + fi + shift +done + diff --git a/software/ors-amarisoft/render-templates b/software/ors-amarisoft/render-templates new file mode 100755 index 0000000000000000000000000000000000000000..4d4e637663efc35d376ae85035b360e7cbda5bee --- /dev/null +++ b/software/ors-amarisoft/render-templates @@ -0,0 +1,123 @@ +#!/usr/bin/env python3 + +from jinja2 import Template +import os + +# Values: 6 (1.4 MHz), 15 (3MHz), 25 (5MHz), 50 (10MHz), 75 (15MHz), 100 (20MHz) +LTE_N_RB_DL=100 +LTE_TX_GAIN=70 +LTE_RX_GAIN=30 + +NR_BANDWIDTH=40 +NR_TX_GAIN=70 +NR_RX_GAIN=50 + +global_context = { + 'generated_file_message': "This file was generated using a jinja2 template and the render-templates script, don't modify directly." +} + +rf_mode_context_list = [ + { + 'rf_mode': 'tdd1900', + + 'default_dl_earfcn': 38350, + 'default_lte_dl_freq': 1890.0, + 'default_lte_band': 39, + 'default_lte_n_rb_dl': LTE_N_RB_DL, + 'default_lte_tx_gain': LTE_TX_GAIN, + 'default_lte_rx_gain': LTE_RX_GAIN, + + 'default_dl_nr_arfcn': 378000, + 'default_nr_band': 39, + 'default_nr_dl_freq': 1890.0, + 'default_nr_bandwidth': NR_BANDWIDTH, + 'default_nr_tx_gain': NR_TX_GAIN, + 'default_nr_rx_gain': NR_RX_GAIN, + + 'min_frequency': 1880.0, + 'max_frequency': 1920, + }, { + 'rf_mode': 'tdd2600', + + 'default_dl_earfcn': 38050, + 'default_lte_dl_freq': 2600.0, + 'default_lte_band': 38, + 'default_lte_n_rb_dl': LTE_N_RB_DL, + 'default_lte_tx_gain': LTE_TX_GAIN, + 'default_lte_rx_gain': LTE_RX_GAIN, + + 'default_dl_nr_arfcn': 520000, + 'default_nr_band': 38, + 'default_nr_dl_freq': 2600.0, + 'default_nr_bandwidth': NR_BANDWIDTH, + 'default_nr_tx_gain': NR_TX_GAIN, + 'default_nr_rx_gain': NR_RX_GAIN, + + 'min_frequency': 2570, + 'max_frequency': 2620, + }, { + 'rf_mode': 'tdd3500', + + 'default_dl_earfcn': 42590, + 'default_lte_dl_freq': 3500.0, + 'default_lte_band': 42, + 'default_lte_n_rb_dl': LTE_N_RB_DL, + 'default_lte_tx_gain': LTE_TX_GAIN, + 'default_lte_rx_gain': LTE_RX_GAIN, + + 'default_dl_nr_arfcn': 632628, + 'default_nr_band': 78, + 'default_nr_dl_freq': 3489.42, + 'default_nr_bandwidth': NR_BANDWIDTH, + 'default_nr_tx_gain': NR_TX_GAIN, + 'default_nr_rx_gain': NR_RX_GAIN, + + 'min_frequency': 3400, + 'max_frequency': 3600, + }, { + 'rf_mode': 'tdd3700', + + 'default_dl_earfcn': 44590, + 'default_lte_dl_freq': 3700.0, + 'default_lte_band': 43, + 'default_lte_n_rb_dl': LTE_N_RB_DL, + 'default_lte_tx_gain': LTE_TX_GAIN, + 'default_lte_rx_gain': LTE_RX_GAIN, + + 'default_dl_nr_arfcn': 646666, + 'default_nr_band': 78, + 'default_nr_dl_freq': 3699.99, + 'default_nr_bandwidth': NR_BANDWIDTH, + 'default_nr_tx_gain': NR_TX_GAIN, + 'default_nr_rx_gain': NR_RX_GAIN, + + 'min_frequency': 3600, + 'max_frequency': 3800, + } +] + +with open('software.cfg.jinja2.json', 'r') as f: + software_json_template = Template(f.read()) +with open('instance-enb-input-schema.jinja2.json', 'r') as f: + instance_enb_json_template = Template(f.read()) +with open('instance-gnb-input-schema.jinja2.json', 'r') as f: + instance_gnb_json_template = Template(f.read()) +with open('software.jinja2.cfg', 'r') as f: + software_template = Template(f.read()) + +for rf_mode_context in rf_mode_context_list: + with open('software-{}.cfg.json'.format(rf_mode_context['rf_mode']), + 'w+') as f: + f.write(software_json_template.render(**rf_mode_context, **global_context)) + with open('software-{}.cfg'.format(rf_mode_context['rf_mode']), + 'w+') as f: + f.write(software_template.render(**rf_mode_context, **global_context)) + for software_type in ['enb', 'enb-epc', 'gnb', 'gnb-epc']: + with open('instance-{}-{}-input-schema.json'.format( + rf_mode_context['rf_mode'], + software_type), + 'w+') as f: + if software_type in ['enb', 'enb-epc']: + f.write(instance_enb_json_template.render(**rf_mode_context, **global_context)) + else: + f.write(instance_gnb_json_template.render(**rf_mode_context, **global_context)) diff --git a/software/ors-amarisoft/software-tdd1900.cfg b/software/ors-amarisoft/software-tdd1900.cfg new file mode 100644 index 0000000000000000000000000000000000000000..2d7c4d17a7586c58260a6b8e9a1556768e9a77f4 --- /dev/null +++ b/software/ors-amarisoft/software-tdd1900.cfg @@ -0,0 +1,22 @@ +# This file was generated using a jinja2 template and the render-templates script, don't modify directly. +[buildout] +extends = + software.cfg + +[enb] +default-dl-earfcn = 38350 +default-lte-dl-freq = 1890.0 +default-lte-band = 39 +default-lte-n-rb-dl = 100 +default-lte-tx-gain = 70 +default-lte-rx-gain = 30 + +default-dl-nr-arfcn = 378000 +default-nr-band = 39 +default-nr-dl-freq = 1890.0 +default-nr-bandwidth = 40 +default-nr-tx-gain = 70 +default-nr-rx-gain = 50 + +min-frequency = 1880.0 +max-frequency = 1920 \ No newline at end of file diff --git a/software/ors-amarisoft/software-tdd1900.cfg.json b/software/ors-amarisoft/software-tdd1900.cfg.json new file mode 100644 index 0000000000000000000000000000000000000000..52ac08a38b433e5858dca84c1011c2d171f96e42 --- /dev/null +++ b/software/ors-amarisoft/software-tdd1900.cfg.json @@ -0,0 +1,56 @@ +{ + "name": "ORS Amarisoft", + "description": "4G and 5G amarisoft stack for ORS", + "serialisation": "xml", + "software-type": { + "enb": { + "title": "eNB", + "software-type": "enb", + "description": "eNodeB Configuration", + "request": "instance-tdd1900-enb-input-schema.json", + "response": "instance-tdd1900-enb-schema.json", + "index": 0 + }, + "enb-epc": { + "title": "eNB and EPC", + "software-type": "enb-epc", + "description": "eNodeB and EPC Configuration", + "request": "instance-tdd1900-enb-epc-input-schema.json", + "response": "instance-tdd1900-enb-epc-schema.json", + "index": 1 + }, + "gnb": { + "title": "gNB", + "software-type": "gnb", + "description": "gNodeB Configuration", + "request": "instance-tdd1900-gnb-input-schema.json", + "response": "instance-tdd1900-gnb-schema.json", + "index": 2 + }, + "gnb-epc": { + "title": "gNB and EPC", + "software-type": "gnb-epc", + "description": "gNodeB and EPC Configuration", + "request": "instance-tdd1900-gnb-epc-input-schema.json", + "response": "instance-tdd1900-gnb-epc-schema.json", + "index": 3 + }, + "epc": { + "title": "EPC", + "software-type": "epc", + "description": "EPC Configuration", + "request": "instance-epc-input-schema.json", + "response": "instance-epc-schema.json", + "index": 4 + }, + "epc-slave": { + "title": "Sim Card", + "description": "EPC Configuration", + "software-type": "epc", + "request": "instance-epc-slave-input-schema.json", + "response": "instance-epc-slave-schema.json", + "shared": true, + "index": 5 + } + } +} \ No newline at end of file diff --git a/software/ors-amarisoft/software-tdd2600.cfg b/software/ors-amarisoft/software-tdd2600.cfg new file mode 100644 index 0000000000000000000000000000000000000000..9243cc621a6046a341e94f392cfa799c11a94d91 --- /dev/null +++ b/software/ors-amarisoft/software-tdd2600.cfg @@ -0,0 +1,22 @@ +# This file was generated using a jinja2 template and the render-templates script, don't modify directly. +[buildout] +extends = + software.cfg + +[enb] +default-dl-earfcn = 38050 +default-lte-dl-freq = 2600.0 +default-lte-band = 38 +default-lte-n-rb-dl = 100 +default-lte-tx-gain = 70 +default-lte-rx-gain = 30 + +default-dl-nr-arfcn = 520000 +default-nr-band = 38 +default-nr-dl-freq = 2600.0 +default-nr-bandwidth = 40 +default-nr-tx-gain = 70 +default-nr-rx-gain = 50 + +min-frequency = 2570 +max-frequency = 2620 \ No newline at end of file diff --git a/software/ors-amarisoft/software-tdd2600.cfg.json b/software/ors-amarisoft/software-tdd2600.cfg.json new file mode 100644 index 0000000000000000000000000000000000000000..0063db883f5845d0232a329f8b20004984392432 --- /dev/null +++ b/software/ors-amarisoft/software-tdd2600.cfg.json @@ -0,0 +1,56 @@ +{ + "name": "ORS Amarisoft", + "description": "4G and 5G amarisoft stack for ORS", + "serialisation": "xml", + "software-type": { + "enb": { + "title": "eNB", + "software-type": "enb", + "description": "eNodeB Configuration", + "request": "instance-tdd2600-enb-input-schema.json", + "response": "instance-tdd2600-enb-schema.json", + "index": 0 + }, + "enb-epc": { + "title": "eNB and EPC", + "software-type": "enb-epc", + "description": "eNodeB and EPC Configuration", + "request": "instance-tdd2600-enb-epc-input-schema.json", + "response": "instance-tdd2600-enb-epc-schema.json", + "index": 1 + }, + "gnb": { + "title": "gNB", + "software-type": "gnb", + "description": "gNodeB Configuration", + "request": "instance-tdd2600-gnb-input-schema.json", + "response": "instance-tdd2600-gnb-schema.json", + "index": 2 + }, + "gnb-epc": { + "title": "gNB and EPC", + "software-type": "gnb-epc", + "description": "gNodeB and EPC Configuration", + "request": "instance-tdd2600-gnb-epc-input-schema.json", + "response": "instance-tdd2600-gnb-epc-schema.json", + "index": 3 + }, + "epc": { + "title": "EPC", + "software-type": "epc", + "description": "EPC Configuration", + "request": "instance-epc-input-schema.json", + "response": "instance-epc-schema.json", + "index": 4 + }, + "epc-slave": { + "title": "Sim Card", + "description": "EPC Configuration", + "software-type": "epc", + "request": "instance-epc-slave-input-schema.json", + "response": "instance-epc-slave-schema.json", + "shared": true, + "index": 5 + } + } +} \ No newline at end of file diff --git a/software/ors-amarisoft/software-tdd3500.cfg b/software/ors-amarisoft/software-tdd3500.cfg new file mode 100644 index 0000000000000000000000000000000000000000..6d067d48c344d8e75a3b1376d8bf2e41a73484d8 --- /dev/null +++ b/software/ors-amarisoft/software-tdd3500.cfg @@ -0,0 +1,22 @@ +# This file was generated using a jinja2 template and the render-templates script, don't modify directly. +[buildout] +extends = + software.cfg + +[enb] +default-dl-earfcn = 42590 +default-lte-dl-freq = 3500.0 +default-lte-band = 42 +default-lte-n-rb-dl = 100 +default-lte-tx-gain = 70 +default-lte-rx-gain = 30 + +default-dl-nr-arfcn = 632628 +default-nr-band = 78 +default-nr-dl-freq = 3489.42 +default-nr-bandwidth = 40 +default-nr-tx-gain = 70 +default-nr-rx-gain = 50 + +min-frequency = 3400 +max-frequency = 3600 \ No newline at end of file diff --git a/software/ors-amarisoft/software-tdd3500.cfg.json b/software/ors-amarisoft/software-tdd3500.cfg.json new file mode 100644 index 0000000000000000000000000000000000000000..f4313e6721b06a28ee16279663f5fc02e0ddd3ec --- /dev/null +++ b/software/ors-amarisoft/software-tdd3500.cfg.json @@ -0,0 +1,56 @@ +{ + "name": "ORS Amarisoft", + "description": "4G and 5G amarisoft stack for ORS", + "serialisation": "xml", + "software-type": { + "enb": { + "title": "eNB", + "software-type": "enb", + "description": "eNodeB Configuration", + "request": "instance-tdd3500-enb-input-schema.json", + "response": "instance-tdd3500-enb-schema.json", + "index": 0 + }, + "enb-epc": { + "title": "eNB and EPC", + "software-type": "enb-epc", + "description": "eNodeB and EPC Configuration", + "request": "instance-tdd3500-enb-epc-input-schema.json", + "response": "instance-tdd3500-enb-epc-schema.json", + "index": 1 + }, + "gnb": { + "title": "gNB", + "software-type": "gnb", + "description": "gNodeB Configuration", + "request": "instance-tdd3500-gnb-input-schema.json", + "response": "instance-tdd3500-gnb-schema.json", + "index": 2 + }, + "gnb-epc": { + "title": "gNB and EPC", + "software-type": "gnb-epc", + "description": "gNodeB and EPC Configuration", + "request": "instance-tdd3500-gnb-epc-input-schema.json", + "response": "instance-tdd3500-gnb-epc-schema.json", + "index": 3 + }, + "epc": { + "title": "EPC", + "software-type": "epc", + "description": "EPC Configuration", + "request": "instance-epc-input-schema.json", + "response": "instance-epc-schema.json", + "index": 4 + }, + "epc-slave": { + "title": "Sim Card", + "description": "EPC Configuration", + "software-type": "epc", + "request": "instance-epc-slave-input-schema.json", + "response": "instance-epc-slave-schema.json", + "shared": true, + "index": 5 + } + } +} \ No newline at end of file diff --git a/software/ors-amarisoft/software-tdd3700.cfg b/software/ors-amarisoft/software-tdd3700.cfg new file mode 100644 index 0000000000000000000000000000000000000000..b4e3173882379eb9d3337adf842ae6d40da7c838 --- /dev/null +++ b/software/ors-amarisoft/software-tdd3700.cfg @@ -0,0 +1,22 @@ +# This file was generated using a jinja2 template and the render-templates script, don't modify directly. +[buildout] +extends = + software.cfg + +[enb] +default-dl-earfcn = 44590 +default-lte-dl-freq = 3700.0 +default-lte-band = 43 +default-lte-n-rb-dl = 100 +default-lte-tx-gain = 70 +default-lte-rx-gain = 30 + +default-dl-nr-arfcn = 646666 +default-nr-band = 78 +default-nr-dl-freq = 3699.99 +default-nr-bandwidth = 40 +default-nr-tx-gain = 70 +default-nr-rx-gain = 50 + +min-frequency = 3600 +max-frequency = 3800 \ No newline at end of file diff --git a/software/ors-amarisoft/software-tdd3700.cfg.json b/software/ors-amarisoft/software-tdd3700.cfg.json new file mode 100644 index 0000000000000000000000000000000000000000..fd58eed1775799846c7c93d4e94b9c20566c8939 --- /dev/null +++ b/software/ors-amarisoft/software-tdd3700.cfg.json @@ -0,0 +1,56 @@ +{ + "name": "ORS Amarisoft", + "description": "4G and 5G amarisoft stack for ORS", + "serialisation": "xml", + "software-type": { + "enb": { + "title": "eNB", + "software-type": "enb", + "description": "eNodeB Configuration", + "request": "instance-tdd3700-enb-input-schema.json", + "response": "instance-tdd3700-enb-schema.json", + "index": 0 + }, + "enb-epc": { + "title": "eNB and EPC", + "software-type": "enb-epc", + "description": "eNodeB and EPC Configuration", + "request": "instance-tdd3700-enb-epc-input-schema.json", + "response": "instance-tdd3700-enb-epc-schema.json", + "index": 1 + }, + "gnb": { + "title": "gNB", + "software-type": "gnb", + "description": "gNodeB Configuration", + "request": "instance-tdd3700-gnb-input-schema.json", + "response": "instance-tdd3700-gnb-schema.json", + "index": 2 + }, + "gnb-epc": { + "title": "gNB and EPC", + "software-type": "gnb-epc", + "description": "gNodeB and EPC Configuration", + "request": "instance-tdd3700-gnb-epc-input-schema.json", + "response": "instance-tdd3700-gnb-epc-schema.json", + "index": 3 + }, + "epc": { + "title": "EPC", + "software-type": "epc", + "description": "EPC Configuration", + "request": "instance-epc-input-schema.json", + "response": "instance-epc-schema.json", + "index": 4 + }, + "epc-slave": { + "title": "Sim Card", + "description": "EPC Configuration", + "software-type": "epc", + "request": "instance-epc-slave-input-schema.json", + "response": "instance-epc-slave-schema.json", + "shared": true, + "index": 5 + } + } +} \ No newline at end of file diff --git a/software/ors-amarisoft/software.cfg b/software/ors-amarisoft/software.cfg new file mode 100644 index 0000000000000000000000000000000000000000..dfe66ddef809f9203cc829a6f786f88826be60c4 --- /dev/null +++ b/software/ors-amarisoft/software.cfg @@ -0,0 +1,142 @@ +[buildout] +extends = + buildout.hash.cfg + ../../stack/slapos.cfg + ../../stack/monitor/buildout.cfg + ../../component/logrotate/buildout.cfg + ../../component/nghttp2/buildout.cfg + +parts += + template + slapos-cookbook + ltelogs.jinja2.sh +# copy all configs by default + mme.jinja2.cfg + ims.jinja2.cfg + enb.jinja2.cfg + gnb.jinja2.cfg + ue_db.jinja2.cfg +# sdr driver is dependent on ENB thus should be added explicitely by software.cfg + sdr-driver + lteenb-cap-sys-nice + lteenb-avx2-cap-sys-nice +# unimplemented parts - the http monitor and better log handling using logrotate +# apache-php +# logrotate + +[template] +recipe = slapos.recipe.template +url = ${:_profile_base_location_}/${:filename} +output = ${buildout:directory}/template.cfg +mode = 0644 + +[download-base] +recipe = slapos.recipe.build:download +url = ${:_profile_base_location_}/${:_update_hash_filename_} + +[template-lte-enb-epc] +<= download-base + +[template-lte-gnb-epc] +<= download-base + +[template-lte-enb] +<= download-base + +[template-lte-gnb] +<= download-base + +[template-lte-epc] +<= download-base + +[amarisoft] +recipe = slapos.recipe.build +path = /opt/amarisoft/lte +init = + import os + options['lte-version'] = os.readlink(options["path"])[:-1] + +[copy-to-instance] +recipe = slapos.recipe.build:download +url = ${:_profile_base_location_}/${:_buildout_section_name_} + +[copy-config-to-instance] +recipe = slapos.recipe.build:download +url = ${:_profile_base_location_}/config/${:_buildout_section_name_} + +[unpack-to-instance] +recipe = slapos.recipe.build:download-unpacked +url = ${amarisoft:path}/lte${:_buildout_section_name_}-linux-${amarisoft:lte-version}.tar.gz +destination = ${buildout:directory}/${:_buildout_section_name_} +strip-top-level-dir = true + +[enb.jinja2.cfg] +<= copy-config-to-instance +filename = enb.jinja2.cfg +[gnb.jinja2.cfg] +<= copy-config-to-instance +filename = gnb.jinja2.cfg +[ltelogs.jinja2.sh] +<= copy-to-instance +filename = ltelogs.jinja2.sh +[ue_db.jinja2.cfg] +<= copy-config-to-instance +filename = ue_db.jinja2.cfg +[mme.jinja2.cfg] +<= copy-config-to-instance +filename = mme.jinja2.cfg +[ims.jinja2.cfg] +<= copy-config-to-instance +filename = ims.jinja2.cfg + +[sdr] +<= unpack-to-instance +url = ${amarisoft:path}/trx_${:_buildout_section_name_}-linux-${amarisoft:lte-version}.tar.gz +destination = ${enb:destination}/x86_64 +md5sum = ${trx_sdr-linux:md5sum} + +[sdr-driver] +# move trx_sdr.so next to lteenb binary +recipe = plone.recipe.command +command = cp -p ${sdr:destination}/trx_sdr.so ${enb:destination} + +[enb] +<= unpack-to-instance +md5sum = ${lteenb-linux:md5sum} +[mme] +<= unpack-to-instance +md5sum = ${ltemme-linux:md5sum} + +[lteenb-linux] +filename = ${amarisoft:lte-version}/lteenb-linux-${amarisoft:lte-version}.tar.gz +md5sum = 842b1526073472a30cb0b286d3b1528c +[ltemme-linux] +filename = ${amarisoft:lte-version}/ltemme-linux-${amarisoft:lte-version}.tar.gz +md5sum = 9d7917f90c7c7b2a8ba624d874595351 +[ltewww-linux] +filename = ${amarisoft:lte-version}/ltewww-linux-${amarisoft:lte-version}.tar.gz +md5sum = 416b6167f70b12910fbbb9293038554c +[trx_sdr-linux] +filename = ${amarisoft:lte-version}/trx_sdr-linux-${amarisoft:lte-version}.tar.gz +md5sum = e6960e3460f1a32c2436f36b2082995d + +[base-lteenb-cap-sys-nice] +recipe = plone.recipe.command +command = + getcap ${amarisoft:path}/${:binary} | grep cap_sys_nice+ep && exit 0; + # Make a copy or restore the copy, as patchelf will irreversibly change the md5sum + stat ${enb:destination}/${:binary}-unpriviledged || + cp ${enb:destination}/${:binary} ${enb:destination}/${:binary}-unpriviledged && + cp ${enb:destination}/${:binary}-unpriviledged ${enb:destination}/${:binary} + # ORS are pre-configured to have sudo allow slapsoft to run give-cap-sys-nice-lteenb script + # with root permissions + sudo -n ${amarisoft:path}/../give-cap-sys-nice-lteenb ${enb:destination}/${:binary} || true; +update-command = ${:command} + +[lteenb-avx2-cap-sys-nice] +<= base-lteenb-cap-sys-nice +binary=lteenb + +[lteenb-cap-sys-nice] +<= base-lteenb-cap-sys-nice +binary=lteenb-avx2 diff --git a/software/ors-amarisoft/software.cfg.jinja2.json b/software/ors-amarisoft/software.cfg.jinja2.json new file mode 100644 index 0000000000000000000000000000000000000000..defcc0da36b7a9c4b8cfd50de9175bbb48db80f5 --- /dev/null +++ b/software/ors-amarisoft/software.cfg.jinja2.json @@ -0,0 +1,56 @@ +{ + "name": "ORS Amarisoft", + "description": "4G and 5G amarisoft stack for ORS", + "serialisation": "xml", + "software-type": { + "enb": { + "title": "eNB", + "software-type": "enb", + "description": "eNodeB Configuration", + "request": "instance-{{ rf_mode }}-enb-input-schema.json", + "response": "instance-{{ rf_mode }}-enb-schema.json", + "index": 0 + }, + "enb-epc": { + "title": "eNB and EPC", + "software-type": "enb-epc", + "description": "eNodeB and EPC Configuration", + "request": "instance-{{ rf_mode }}-enb-epc-input-schema.json", + "response": "instance-{{ rf_mode }}-enb-epc-schema.json", + "index": 1 + }, + "gnb": { + "title": "gNB", + "software-type": "gnb", + "description": "gNodeB Configuration", + "request": "instance-{{ rf_mode }}-gnb-input-schema.json", + "response": "instance-{{ rf_mode }}-gnb-schema.json", + "index": 2 + }, + "gnb-epc": { + "title": "gNB and EPC", + "software-type": "gnb-epc", + "description": "gNodeB and EPC Configuration", + "request": "instance-{{ rf_mode }}-gnb-epc-input-schema.json", + "response": "instance-{{ rf_mode }}-gnb-epc-schema.json", + "index": 3 + }, + "epc": { + "title": "EPC", + "software-type": "epc", + "description": "EPC Configuration", + "request": "instance-epc-input-schema.json", + "response": "instance-epc-schema.json", + "index": 4 + }, + "epc-slave": { + "title": "Sim Card", + "description": "EPC Configuration", + "software-type": "epc", + "request": "instance-epc-slave-input-schema.json", + "response": "instance-epc-slave-schema.json", + "shared": true, + "index": 5 + } + } +} diff --git a/software/ors-amarisoft/software.jinja2.cfg b/software/ors-amarisoft/software.jinja2.cfg new file mode 100644 index 0000000000000000000000000000000000000000..332cdff3a28991f4661e3fed9e4dc59e6d7674e7 --- /dev/null +++ b/software/ors-amarisoft/software.jinja2.cfg @@ -0,0 +1,22 @@ +# {{ generated_file_message }} +[buildout] +extends = + software.cfg + +[enb] +default-dl-earfcn = {{ default_dl_earfcn }} +default-lte-dl-freq = {{ default_lte_dl_freq }} +default-lte-band = {{ default_lte_band }} +default-lte-n-rb-dl = {{ default_lte_n_rb_dl }} +default-lte-tx-gain = {{ default_lte_tx_gain }} +default-lte-rx-gain = {{ default_lte_rx_gain }} + +default-dl-nr-arfcn = {{ default_dl_nr_arfcn }} +default-nr-band = {{ default_nr_band }} +default-nr-dl-freq = {{ default_nr_dl_freq }} +default-nr-bandwidth = {{ default_nr_bandwidth }} +default-nr-tx-gain = {{ default_nr_tx_gain }} +default-nr-rx-gain = {{ default_nr_rx_gain }} + +min-frequency = {{ min_frequency }} +max-frequency = {{ max_frequency }} diff --git a/software/powerdns/software.cfg b/software/powerdns/software.cfg index 769ca9fb3bfd5f74afdbc45e566ed0a4c9809891..5aa9ddfa5603e4c57314b2db701aed6682dc2850 100644 --- a/software/powerdns/software.cfg +++ b/software/powerdns/software.cfg @@ -26,33 +26,27 @@ eggs = recipe = slapos.recipe.template url = ${:_profile_base_location_}/${:filename} output = ${buildout:directory}/template.cfg -mode = 0644 [template-powerdns] recipe = slapos.recipe.template url = ${:_profile_base_location_}/${:filename} output = ${buildout:directory}/template-powerdns.cfg -mode = 0644 [template-pdns-configuration] recipe = slapos.recipe.build:download url = ${:_profile_base_location_}/${:_update_hash_filename_} -mode = 640 [template-dns-replicate] recipe = slapos.recipe.build:download url = ${:_profile_base_location_}/${:_update_hash_filename_} -mode = 0644 [iso-list] recipe = slapos.recipe.build:download url = ${:_profile_base_location_}/${:_update_hash_filename_} -mode = 0644 [template-zones-file] recipe = slapos.recipe.build:download url = ${:_profile_base_location_}/${:_update_hash_filename_} -mode = 0644 [versions] PyRSS2Gen = 1.1 diff --git a/software/proftpd/buildout.hash.cfg b/software/proftpd/buildout.hash.cfg index 701a028b2494f4c6bf2cf3ac5f52f0c0961de044..c441b72b630a833245b2aed37f4efc2b42136290 100644 --- a/software/proftpd/buildout.hash.cfg +++ b/software/proftpd/buildout.hash.cfg @@ -15,11 +15,11 @@ [instance-profile] filename = instance.cfg.in -md5sum = efb4238229681447aa7fe73898dffad4 +md5sum = b62dd8dd89ef6627ebac20987a89a069 [instance-default] filename = instance-default.cfg.in -md5sum = 4df64032e14c19363ad3dfe9aecf8e0c +md5sum = 16d592805c6c1756e54924242148ba85 [proftpd-config-file] filename = proftpd-config-file.cfg.in diff --git a/software/proftpd/instance-default.cfg.in b/software/proftpd/instance-default.cfg.in index ea88be4037f78dd31c6fd5ad732e8fdde1da4200..bb1076554a9c97747b397c83d7c8f2b9e91699b0 100644 --- a/software/proftpd/instance-default.cfg.in +++ b/software/proftpd/instance-default.cfg.in @@ -102,7 +102,7 @@ config-port = ${proftpd:sftp-port} recipe = slapos.cookbook:wrapper wrapper-path =${buildout:bin-directory}/${:_buildout_section_name_} command-line = - {{ perl_bin }} {{ ftpasswd_bin }} --passwd --home=${proftpd:data-dir} --shell=/bin/false --uid=${proftpd-userinfo:pw-uid} --gid=${proftpd-userinfo:gr-gid} --file ${auth-user-file:output} + {{ ftpasswd_bin }} --passwd --home=${proftpd:data-dir} --shell=/bin/false --uid=${proftpd-userinfo:pw-uid} --gid=${proftpd-userinfo:gr-gid} --file ${auth-user-file:output} [auth-user-file] recipe = plone.recipe.command diff --git a/software/proftpd/instance.cfg.in b/software/proftpd/instance.cfg.in index c93595426d1379e041e8cc6bcc75bb246e09398c..7e9927914968b3cf9073f9f2755f6c269dff7b95 100644 --- a/software/proftpd/instance.cfg.in +++ b/software/proftpd/instance.cfg.in @@ -24,7 +24,6 @@ context = raw ftpasswd_bin {{ ftpasswd_bin }} raw ftpdctl_bin {{Â ftpdctl_bin }} raw ssh_keygen_bin {{Â ssh_keygen_bin }} - raw perl_bin {{Â perl_bin }} raw template_monitor {{Â template_monitor }} [instance-default] diff --git a/software/proftpd/software.cfg b/software/proftpd/software.cfg index ea0ebbf29005678fe27803710ba9266f39982ad8..b6e5df4df7da7291948bf4736c7c5efeebae6937 100644 --- a/software/proftpd/software.cfg +++ b/software/proftpd/software.cfg @@ -18,8 +18,6 @@ part = python3 [download-file-base] recipe = slapos.recipe.build:download url = ${:_profile_base_location_}/${:filename} -download-only = true -mode = 0644 [proftpd-config-file] <= download-file-base @@ -31,7 +29,6 @@ mode = 0644 recipe = slapos.recipe.template:jinja2 template = ${:_profile_base_location_}/${:filename} rendered = ${buildout:directory}/instance.cfg -mode = 0644 extensions = jinja2.ext.do context = section buildout buildout @@ -40,10 +37,7 @@ context = key ftpasswd_bin proftpd-output:ftpasswd key ftpdctl_bin proftpd-output:ftpdctl key ssh_keygen_bin openssh-output:keygen - key perl_bin proftpd-output:perl raw template_monitor ${monitor2-template:rendered} [versions] -collective.recipe.environment = 1.1.0 -collective.recipe.grp = 1.1.0 plone.recipe.command = 1.1 diff --git a/software/proftpd/test/test.py b/software/proftpd/test/test.py index 68982bcd01822fa5d214f7d7355515edd554dcba..8b0a4551ec258127f81654e8d863366bb15e8b8e 100644 --- a/software/proftpd/test/test.py +++ b/software/proftpd/test/test.py @@ -476,6 +476,5 @@ class TestBanLog(ProFTPdTestCase, LogRotationMixin): expected_logged_text = 'denied due to host ban' def _access(self) -> None: for _ in range(6): - with self.assertRaisesRegex( - Exception, '(Authentication failed|Connection reset by peer)'): + with self.assertRaises(Exception): self._getConnection(password='wrong') diff --git a/software/re6stnet/software.cfg b/software/re6stnet/software.cfg index c6d388b299189998a5457f18bba0ab8938c9e748..ad8342fae60292992627de32f1c215bfa2dc2ccb 100644 --- a/software/re6stnet/software.cfg +++ b/software/re6stnet/software.cfg @@ -72,7 +72,6 @@ context = [download-base] recipe = slapos.recipe.build:download url = ${:_profile_base_location_}/${:filename} -mode = 644 [template] recipe = slapos.recipe.template:jinja2 diff --git a/software/slapos-master/software.cfg b/software/slapos-master/software.cfg index 37865286c48589b7ffdff51f8bea27b22118f48a..b25d07df97207439a5bb4721172ba74fe54b3ffa 100644 --- a/software/slapos-master/software.cfg +++ b/software/slapos-master/software.cfg @@ -62,7 +62,6 @@ extra-paths += [download-base-part] recipe = slapos.recipe.build:download url = ${:_profile_base_location_}/${:filename} -mode = 644 [template-erp5] < = download-base-part diff --git a/software/slapos-sr-testing/software.cfg b/software/slapos-sr-testing/software.cfg index f7a2dfe3dd2de2e280eb83de7ebb2a155f69f917..cbad13b13e4afe5c829192c1f606beaac95b00af 100644 --- a/software/slapos-sr-testing/software.cfg +++ b/software/slapos-sr-testing/software.cfg @@ -234,6 +234,7 @@ setup = ${slapos.core-repository:location} eggs += ${:extra-eggs} extra-eggs = ${lxml-python:egg} + ${python-PyYAML:egg} ${slapos.core-setup:egg} ${pillow-python:egg} ${pycurl:egg} diff --git a/software/slapos-testing/software.cfg b/software/slapos-testing/software.cfg index f5de29c54ad22b30cc173104e82364e52f275d34..7322933142482cd92d90b249f55b629896dd14ac 100644 --- a/software/slapos-testing/software.cfg +++ b/software/slapos-testing/software.cfg @@ -115,6 +115,7 @@ setup = ${rubygemsrecipe-repository:location} <= python-interpreter eggs += ${lxml-python:egg} + ${python-PyYAML:egg} ${python-cryptography:egg} ${backports.lzma:egg} ${pycurl:egg} diff --git a/software/slaprunner/buildout.hash.cfg b/software/slaprunner/buildout.hash.cfg index f24bbe4c0ae8a5505de80135f65a0939b62a4ca1..2b7beac10d610a77835757457c6e66abe9df06b3 100644 --- a/software/slaprunner/buildout.hash.cfg +++ b/software/slaprunner/buildout.hash.cfg @@ -14,11 +14,11 @@ # not need these here). [template] filename = instance.cfg -md5sum = 8d6878ff1d2e75010c50a1a2b0c13b24 +md5sum = ba3539d959143a3be76fcb54196a3aa8 [template-runner] filename = instance-runner.cfg -md5sum = 384285ab789396b6e674a8125ce2d030 +md5sum = efa0d1647dee2689485264daf256c76d [template-runner-import-script] filename = template/runner-import.sh.jinja2 diff --git a/software/slaprunner/instance-runner.cfg b/software/slaprunner/instance-runner.cfg index bfc3de7691f940dc4c1463c72fcd99b43e422ff3..da3f68575c5ebad5c05c19db9308d3310894799e 100644 --- a/software/slaprunner/instance-runner.cfg +++ b/software/slaprunner/instance-runner.cfg @@ -362,7 +362,7 @@ work_dir = $${slaprunner:working-directory} [nginx_conf] recipe = slapos.recipe.template:jinja2 -template = ${template_nginx_conf:location}/${template_nginx_conf:filename} +template = ${template_nginx_conf:target} rendered = $${nginx-frontend:path_nginx_conf} context = key shellinabox_socket shellinabox:socket @@ -372,7 +372,7 @@ context = [nginx-launcher] recipe = slapos.recipe.template:jinja2 -template = ${template_launcher:location}/${template_launcher:filename} +template = ${template_launcher:target} rendered = $${nginx-frontend:bin_launcher} mode = 700 context = @@ -404,7 +404,7 @@ httpd_cors_file = $${slaprunner-httpd-cors:location} [httpd-conf] recipe = slapos.recipe.template:jinja2 -template = ${template_httpd_conf:location}/${template_httpd_conf:filename} +template = ${template_httpd_conf:target} rendered = $${directory:etc}/httpd.conf context = section parameters httpd-parameters @@ -663,7 +663,7 @@ buildout-shared-folder = $${runnerdirectory:home}/shared [slapos-cfg] recipe = slapos.recipe.template:jinja2 -template = ${template-slapos-cfg:location}/${template-slapos-cfg:filename} +template = ${template-slapos-cfg:target} rendered = $${slaprunner:slapos.cfg} mode = 700 context = @@ -673,7 +673,7 @@ context = [slapos-test-cfg] recipe = slapos.recipe.template:jinja2 -template = ${template-slapos-cfg:location}/${template-slapos-cfg:filename} +template = ${template-slapos-cfg:target} rendered = $${test-runner:slapos.cfg} mode = 700 context = @@ -681,7 +681,7 @@ context = [slapformat-definition.cfg] recipe = slapos.recipe.template:jinja2 -template = ${template-slapformat-definition.cfg:location}/${template-slapformat-definition.cfg:filename} +template = ${template-slapformat-definition.cfg:target} rendered = $${slaprunner:slapformat-definition.cfg} mode = 700 context = @@ -727,7 +727,7 @@ command = $${prepare-software:wrapper-path} [instance-parameters] recipe = slapos.recipe.template:jinja2 extensions = jinja2.ext.do -template = ${template-parameters:location}/${template-parameters:filename} +template = ${template-parameters:target} rendered = $${directory:etc}/.parameter.xml.default mode = 0644 context = @@ -759,7 +759,7 @@ stop-on-error = true [bash-profile] recipe = slapos.recipe.template:jinja2 -template = ${template-bash-profile:location}/${template-bash-profile:filename} +template = ${template-bash-profile:target} rendered = $${buildout:directory}/.bash_profile context = raw path $${shell-environment:path} @@ -826,7 +826,7 @@ slapformat-definition.cfg = $${slaprunner:slapformat-definition.cfg} [supervisord-conf] recipe = slapos.recipe.template:jinja2 -template = ${template-supervisord:location}/${template-supervisord:filename} +template = ${template-supervisord:target} rendered = $${directory:etc}/supervisord.conf context = import multiprocessing multiprocessing @@ -836,7 +836,7 @@ context = [listener-slapgrid-bin] recipe = slapos.recipe.template:jinja2 -template = ${template-listener-slapgrid:location}/${template-listener-slapgrid:filename} +template = ${template-listener-slapgrid:target} rendered = $${directory:bin}/listener_slapgrid.py mode = 0744 context = @@ -905,7 +905,7 @@ private-path-list += [monitor-check-webrunner-internal-instance] recipe = slapos.recipe.template:jinja2 -template = ${monitor-check-webrunner-internal-instance:location}/${monitor-check-webrunner-internal-instance:filename} +template = ${monitor-check-webrunner-internal-instance:target} rendered = $${monitor-directory:bin}/$${:filename} filename = monitor-check-webrunner-internal-instance mode = 0744 @@ -913,7 +913,7 @@ mode = 0744 ## Slapuser slapos command script [template-slapuser-script] recipe = slapos.recipe.template:jinja2 -template = ${template-slapuser-script:location}/${template-slapuser-script:filename} +template = ${template-slapuser-script:target} rendered = $${buildout:bin-directory}/slapos mode = 0744 context = diff --git a/software/slaprunner/instance.cfg b/software/slaprunner/instance.cfg index baab39005a12b0c41186eed57e2f41b932fb30a0..4d39f614cb2c23aacbebb88cd376dfcc00f76723 100644 --- a/software/slaprunner/instance.cfg +++ b/software/slaprunner/instance.cfg @@ -63,7 +63,7 @@ context = key slapparameter_dict slap-configuration:configuration raw software_release_bin ${buildout:bin-directory} raw backup_wait_time ${exporter-default-configuration:backup_wait_time} - raw monitor_check_resilient_feed_template_path ${template-monitor-check-resilient-feed:location}/${template-monitor-check-resilient-feed:filename} + raw monitor_check_resilient_feed_template_path ${template-monitor-check-resilient-feed:target} raw buildout_executable_location ${buildout:executable} raw bash_executable_location ${bash:location}/bin/bash raw rsync_bin_folder ${rsync:location}/bin @@ -78,12 +78,12 @@ context = key template_runner_path instance-base-runner:rendered key slapparameter_dict slap-configuration:configuration raw software_release_bin ${buildout:bin-directory} - raw importer_script_path ${template-runner-import-script:location}/${template-runner-import-script:filename} + raw importer_script_path ${template-runner-import-script:target} raw buildout_executable_location ${buildout:executable} raw bash_executable_location ${bash:location}/bin/bash raw sqlite3_executable_location ${sqlite3:location}/bin/sqlite3 raw rsync_executable_location ${rsync:location}/bin/rsync - raw software_release_information_template ${template-resilient-software-release-information:destination}/${template-resilient-software-release-information:filename} + raw software_release_information_template ${template-resilient-software-release-information:target} [slap-configuration] recipe = slapos.cookbook:slapconfiguration diff --git a/software/slaprunner/software.cfg b/software/slaprunner/software.cfg index 31b7ae9d7c9de9b7665ca7544c7b583332c84b15..ee75b2846e21e73c06d7ece60196bf0cfb9b3ffd 100644 --- a/software/slaprunner/software.cfg +++ b/software/slaprunner/software.cfg @@ -54,23 +54,10 @@ parts = [template-base] recipe = slapos.recipe.template url = ${:_profile_base_location_}/${:filename} -mode = 0644 [download-base] -recipe = hexagonit.recipe.download +recipe = slapos.recipe.build:download url = ${:_profile_base_location_}/${:filename} -mode = 0644 - -[download-only-base] -< = download-base -ignore-existing = true -download-only = true - -[template-download-base] -# Downloads from template directory into current directory -< = download-only-base -url = ${:_profile_base_location_}/template/${:filename} -location = ${buildout:parts-directory}/${:_buildout_section_name_} [template] < = template-base @@ -81,66 +68,52 @@ output = ${buildout:directory}/template.cfg output = ${buildout:directory}/template-runner.cfg.in [template-runner-import-script] -< = template-download-base -filename = runner-import.sh.jinja2 +< = download-base [instance-runner-import] < = download-base -recipe = slapos.recipe.build:download [instance-runner-export] < = download-base -recipe = slapos.recipe.build:download [template-resilient] < = download-base -recipe = slapos.recipe.build:download [template_nginx_conf] -< = download-only-base +< = download-base [template_httpd_conf] -< = download-only-base +< = download-base [template_launcher] < = download-base -recipe = slapos.recipe.build:download [template-slapos-cfg] -< = template-download-base -filename = slapos.cfg.in +< = download-base [template-slapformat-definition.cfg] -< = template-download-base -filename = slapformat-definition.cfg.in +< = download-base [template-parameters] -< = download-only-base +< = download-base [template-bash-profile] -< = template-download-base -filename = bash_profile.in +< = download-base [template-supervisord] -< = template-download-base -filename = supervisord.conf.in +< = download-base [template-listener-slapgrid] -< = template-download-base -filename = listener_slapgrid.py.in +< = download-base [monitor-check-webrunner-internal-instance] -< = template-download-base -destination = ${:location}/${:filename} -filename = monitor-check-webrunner-internal-instances.py +< = download-base [template-resilient-software-release-information] -< = template-download-base -filename = resilient_software_release_information.py.in +< = download-base [template-slapuser-script] -< = template-download-base -filename = slapos-slapuser-script.in +< = download-base [template-buildout-shared-part-list] < = template-base @@ -175,5 +148,4 @@ gitdb = 0.6.4 gunicorn = 19.10.0 prettytable = 0.7.2 pycurl = 7.43.0 -collective.recipe.environment = 0.2.0 smmap = 0.9.0 diff --git a/software/theia/buildout.hash.cfg b/software/theia/buildout.hash.cfg index b2f2331d24e3eef3363ab6316f3a5c6e50ea4227..051754deb611b0ddf80d3990c0411bd1f43cf5fa 100644 --- a/software/theia/buildout.hash.cfg +++ b/software/theia/buildout.hash.cfg @@ -15,48 +15,40 @@ [instance-theia] _update_hash_filename_ = instance-theia.cfg.jinja.in -md5sum = 8e4f43e603a5dd57752758c987465d41 +md5sum = 4df9f0d76a134a8abec9060a0c1be50b [instance] _update_hash_filename_ = instance.cfg.in -md5sum = a7d78b4002266c69ece05a476df82791 +md5sum = f2f01a47d98a980177dc1755e618bbb7 [instance-import] _update_hash_filename_ = instance-import.cfg.jinja.in -md5sum = 57b707cf0ed83be1959d26a88c131906 +md5sum = b0a2c2b3d59fd6c8ba76c634b83a1ba2 [instance-export] _update_hash_filename_ = instance-export.cfg.jinja.in -md5sum = 190a736471f0e0cffcb2838968e01d84 +md5sum = b3f1dd83033d6a45def0bd26e70d5a9c [instance-resilient] _update_hash_filename_ = instance-resilient.cfg.jinja -md5sum = d78a9f885bdebf6720197209e0c21aa0 +md5sum = b1e338973bc9cfe1bb4e16d46b3c6da9 [theia-common] _update_hash_filename_ = theia_common.py -md5sum = e57396473b4b6a17d26a747f0030293c +md5sum = 6a25c6a7f1beb27232a3c9acd8a76500 [theia-export] _update_hash_filename_ = theia_export.py -md5sum = b5f5ac1924b27d3f2be2e5ea291c119e +md5sum = e2f6c483cce09f87ab1e63ae8be0daf4 [theia-import] _update_hash_filename_ = theia_import.py -md5sum = 9e8c17a4b2d802695caf0c2c052f0d11 - -[yarn.lock] -_update_hash_filename_ = yarn.lock -md5sum = 067d2db611b21f77885f3adfd7f81453 +md5sum = 1a668d6203d42b4d46d56e24c7606cb2 [python-language-server-requirements.txt] _update_hash_filename_ = python-language-server-requirements.txt md5sum = 9f478fd1b03b7738f3de549cb899bf54 -[preloadTemplate.html] -_update_hash_filename_ = preloadTemplate.html -md5sum = 8157c22134200bd862a07c6521ebf799 - [slapos.css.in] _update_hash_filename_ = slapos.css.in md5sum = d2930ec3ef973b7908f0fa896033fd64 diff --git a/software/theia/instance-export.cfg.jinja.in b/software/theia/instance-export.cfg.jinja.in index 5f7f03e53d09a881736270bccadfe22ab378c304..56fbcbcbd6ddc4de0a03792eb8785aca754d0436 100644 --- a/software/theia/instance-export.cfg.jinja.in +++ b/software/theia/instance-export.cfg.jinja.in @@ -33,7 +33,7 @@ mode = 0700 exitcode-file = $${directory:srv}/export-exitcode-file error-file = $${directory:srv}/export-errormessage-file context = - raw python ${software-info:python-with-eggs} + raw python ${software-info:python-for-resiliency} raw theia_export ${software-info:theia-export} raw bash ${software-info:bash} raw rsync ${software-info:rsync} diff --git a/software/theia/instance-import.cfg.jinja.in b/software/theia/instance-import.cfg.jinja.in index 578ae9138e92d67f62378b2c7a516653355f4157..3ed8d607343528b8aec2af3620f79a09bf4e1298 100644 --- a/software/theia/instance-import.cfg.jinja.in +++ b/software/theia/instance-import.cfg.jinja.in @@ -29,6 +29,11 @@ name = Import {{ parameter_dict['additional-frontend-name'] }} {%- endif %} +# Change standalone socket path to avoid collisions +[slapos-standalone-config] +abstract-socket-path = $${directory:home}/standalone-import-ready + + # Change port ranges to avoid race conditions on port allocation [frontend-instance-port] minimum = 3200 @@ -79,7 +84,7 @@ mode = 0700 exitcode-file = $${directory:srv}/import-exitcode-file error-file = $${directory:srv}/import-errormessage-file context = - raw python ${software-info:python-with-eggs} + raw python ${software-info:python-for-resiliency} raw theia_import ${software-info:theia-import} raw bash ${software-info:bash} raw rsync ${software-info:rsync} diff --git a/software/theia/instance-resilient.cfg.jinja b/software/theia/instance-resilient.cfg.jinja index 671a505ae7305f614357a8c8031b76def40edb6b..29007c6827b71a8feeaaf9e095cd0ac24d5f728a 100644 --- a/software/theia/instance-resilient.cfg.jinja +++ b/software/theia/instance-resilient.cfg.jinja @@ -1,7 +1,7 @@ -{% import 'parts' as parts %} -{% import 'replicated' as replicated with context %} +{% import 'parts' as parts -%} +{% import 'replicated' as replicated with context -%} -{% set number_of_instances = slapparameter_dict.get('resilient-clone-number', 1)|int %} +{% set clones_amount = slapparameter_dict.get('resilient-clone-number', 1)|int + 1 -%} [buildout] eggs-directory = {{ eggs_directory }} @@ -11,59 +11,38 @@ extends = {{ monitor_template }} parts += -# Generate the parts to request theia-export, pull-backup and theia-import -# See stack/resilient/template-parts.cfg.in and stack/resilient/template-replicated.cfg.in -# See below for the generation of the sections corresponding to the parts generated here - {{ parts.replicate("theia", number_of_instances + 1) }} -# Also publish some connection parameters - publish-connection-parameter + publish +{#- Generate the parts to request the main theia, the clones and the PBS. #} +{#- See ../../stack/resilient/template-parts.cfg.in #} + {{ parts.replicate("theia", clones_amount) }} -[ArgLeader] -[ArgBackup] +{#- Prepare monitoring information to transmit to and request from the main theia, the clones and the PBS #} +{%- set monitor_cors_domains = slapparameter_dict.pop('monitor-cors-domains', 'monitor.app.officejs.com') %} +{%- set monitor_username = slapparameter_dict.get('monitor-username', '${monitor-instance-parameter:username}') %} +{%- set monitor_password = slapparameter_dict.get('monitor-password', '${monitor-htpasswd:passwd}') %} +{%- set monitor_return = ['monitor-base-url'] %} +{%- set monitor_parameter = {'monitor-cors-domains': monitor_cors_domains, 'monitor-username' : monitor_username, 'monitor-password': monitor_password} %} +{%- set monitor_dict = {'parameter': monitor_parameter, 'return': monitor_return} %} -# Generate sections to request theia-export, pull-backup and theia-import -# See stack/resilient/template-replicated.cfg.in -# In particular: -# -# [request-theia] -# <= ArgLeader -# software-type = export -# ... -# -# [request-theia-pseudo-replicating-1] -# <= ArgBackup -# software-type = import -# ... -# -# [request-pbs-theia-1] -# software-type = pull-backup -# ... -# -{{ replicated.replicate("theia", number_of_instances + 1, - "export", "import", - "ArgLeader", "ArgBackup", - slapparameter_dict=slapparameter_dict) }} -# Extend the list of return parameters for the export request -# The monitor parameters are only there to assert they are -# actually published by the export instance +{# Generate the sections to request the main theia, the clones and the PBS. #} +{#- See ../../stack/resilient/template-replicated.cfg.in #} +{{ replicated.replicate("theia", clones_amount, "export", "import", slapparameter_dict=slapparameter_dict, monitor_parameter_dict=monitor_dict) }} + + +# Ask for the connection parameters of the main theia [request-theia] -return += url username password backend-url monitor-base-url monitor-setup-url +return += url username password backend-url -# Extend the list of return parameters for the import request -# with the monitor parameters to assert they are actually published -[request-theia-pseudo-replicating-1] -return += monitor-base-url monitor-setup-url -# Publish some parameters from the export instance -[publish-connection-parameter] +# Publish connection parameters of the main theia and resiliency parameters +[publish] recipe = slapos.cookbook:publish url = ${request-theia:connection-url} username = ${request-theia:connection-username} password = ${request-theia:connection-password} backend-url = ${request-theia:connection-backend-url} - -# Publish resiliency parameters fetched by the resilient stack -[publish-connection-parameter] +monitor-base-url = ${monitor-publish:monitor-base-url} +monitor-setup-url = ${monitor-publish:monitor-setup-url} <= publish-connection-information diff --git a/software/theia/instance-theia.cfg.jinja.in b/software/theia/instance-theia.cfg.jinja.in index 5cec5291000672921542243366bf5a37850ad690..ad293536175bd87967823d10ecd887ef8885079d 100644 --- a/software/theia/instance-theia.cfg.jinja.in +++ b/software/theia/instance-theia.cfg.jinja.in @@ -63,6 +63,25 @@ bash-completions = $${:home}/.local/share/bash-completion/completions/ fish-completions = $${:home}/.config/fish/completions/ +# Monitor +# ------- + +[monitor-instance-parameter] +monitor-httpd-port = {{ parameter_dict['monitor-httpd-port'] }} +{%- for k in ('monitor-cors-domains', 'monitor-username', 'monitor-password') %} +{%- set v = parameter_dict.get(k) %} +{%- if v %} +{{ k[8:] }} = {{ v }} +{%- endif %} +{%- endfor %} +{%- for k in ('monitor-url-list', ) %} +{%- set v = parameter_dict.get(k) %} +{%- if v %} +{{ k }} = {{ v }} +{%- endif %} +{%- endfor %} + + # Promises # -------- @@ -132,7 +151,7 @@ config-port = $${slapos-standalone-instance:port} <= monitor-promise-base promise = check_socket_listening name = standalone-ready-promise.py -config-abstract = $${directory:runner}/standalone_ready +config-abstract = $${slapos-standalone-config:abstract-socket-path} [slapos-autorun-promise] <= monitor-promise-base @@ -183,7 +202,6 @@ sla-instance_guid = {{ parameter_dict['additional-frontend-guid'] }} [frontend-instance-password] recipe = slapos.cookbook:generate.password username = admin -bytes = 12 storage-path = $${buildout:parts-directory}/.$${:_buildout_section_name_} [frontend-instance-port] @@ -378,7 +396,7 @@ recipe = slapos.cookbook:wrapper wrapper-path = $${directory:services}/$${:_buildout_section_name_} command-line = $${theia-service:rendered} --hostname=$${:hostname} --port=$${:port} $${directory:project} hash-existing-files = - ${yarn.lock:output} + ${yarn.lock:target} ${theia-wrapper:rendered} ip = {{ ipv4_random }} hostname = $${:ip} @@ -426,6 +444,42 @@ command = ${buildout:bin-directory}/slapos complete --shell fish > $${directory:fish-completions}/slapos.fish +# Embedded Instance +# ----------------- + +{%- set embedded_sr = parameter_dict['embedded-sr'] %} +{%- set embedded_sr_type = parameter_dict['embedded-sr-type'] %} +{%- set embedded_instance_parameters = parameter_dict['embedded-instance-parameters'] %} +{%- if embedded_sr %} +{%- if embedded_sr.startswith('~/') %} +{%- set embedded_sr = os_module.path.join(partition_root_path, embedded_sr[2:]) %} +{%- set embedded_sr = os_module.path.normpath(embedded_sr) %} +{%- endif %} +[request-embedded-instance-script] +recipe = slapos.recipe.template:jinja2 +rendered = $${directory:project}/request_embedded.sh +mode = 0700 +template = + inline:#!/bin/sh + + slapos supply {{ embedded_sr }} slaprunner + + slapos request "embedded_instance" {{ embedded_sr }} +{%- if embedded_sr_type %} --type {{ embedded_sr_type }} {%- endif %} +{%- if embedded_instance_parameters %} --parameters-file $${embedded-instance-parameters:rendered} + +[embedded-instance-parameters] +recipe = slapos.recipe.template:jinja2 +rendered = $${directory:project}/$${:_buildout_section_name_}.json +template = + inline:{{ embedded_instance_parameters | indent(2) }} +{%- endif %} +{%- endif %} + +{%- set embedded_digest = str(embedded_sr) + str(embedded_sr_type) + str(embedded_instance_parameters) %} +{%- set embedded_digest_hash = hashlib_module.md5(embedded_digest.encode()).hexdigest() %} + + # SlapOS Standalone # ----------------- @@ -442,6 +496,7 @@ port = $${slapos-standalone-port:port} local-software-release-root = $${directory:home} slapos-configuration = $${directory:runner}/etc/slapos.cfg computer-id = slaprunner +abstract-socket-path = $${directory:home}/standalone-{{ embedded_digest_hash[:16] }} [slapos-standalone-activate] recipe = slapos.recipe.template:jinja2 @@ -453,6 +508,100 @@ template = export SLAPOS_CLIENT_CONFIGURATION=$SLAPOS_CONFIGURATION echo 'Standalone SlapOS for computer `$${slapos-standalone-config:computer-id}` activated' +[slapos-standalone-script] +recipe = slapos.recipe.template:jinja2 +rendered = $${directory:bin}/$${:_buildout_section_name_} +template = + inline:#!${python-for-standalone:executable} + import glob + import json + import os + import signal + import socket + import subprocess + import sys + import time + import traceback + + import slapos.slap.standalone + + # Include this hash, so that if it changes the standalone service will be restarted + # {{ embedded_digest_hash }} + + shared_parts = """{{ '''${buildout:shared-part-list}''' | indent(2) }}""" + shared_part_list = [x.strip() for x in shared_parts.splitlines() if x.strip()] + partition_forward_configuration = ( + slapos.slap.standalone.PartitionForwardAsPartitionConfiguration( + master_url="$${slap-connection:server-url}", + computer="$${slap-connection:computer-id}", + partition="$${slap-connection:partition-id}", + cert="$${slap-connection:cert-file}", + key="$${slap-connection:key-file}", + software_release_list=( + 'http://git.erp5.org/gitweb/slapos.git/blob_plain/HEAD:/software/apache-frontend/software.cfg', + ), + ), + ) + standalone = slapos.slap.standalone.StandaloneSlapOS( + "$${directory:runner}", + "$${slapos-standalone-config:ipv4}", + $${slapos-standalone-config:port}, + computer_id="$${slapos-standalone-config:computer-id}", + shared_part_list=shared_part_list, + software_root="$${directory:runner}/software", + instance_root="$${directory:runner}/instance", + partition_forward_configuration=partition_forward_configuration, + slapos_bin="${buildout:bin-directory}/slapos", + local_software_release_root="$${slapos-standalone-config:local-software-release-root}", + ) + + def signal_handler(signum, frame): + print("Signal {signum} received".format(signum=signum)) + sys.exit() + signal.signal(signal.SIGTERM, signal_handler) + + standalone.start() + try: + partition_count = 20 + print("Standalone SlapOS: Formatting %d partitions" % partition_count) + standalone.format(partition_count, '$${slapos-standalone-config:ipv4}', '$${slapos-standalone-config:ipv6}') + print("Standalone SlapOS for computer `$${slapos-standalone-config:computer-id}` started") + # Run instance at least once, to start the supervisor managing instances. + try: + standalone.waitForInstance(max_retry=0) + except slapos.slap.standalone.SlapOSNodeCommandError as e: + print("Error instanciating: {}".format(e)) +{%- if embedded_sr %} + # Compatibility layer + try: + for cp in standalone.computer.getComputerPartitionList(): + if cp.getInstanceParameterDict().get("instance_title") == "Embedded Instance": + print("Renaming 'Embedded Instance' into 'embedded_instance'") + cp.rename(new_name="embedded_instance") + break + except Exception: + print("Exception in compatibility layer, printing and moving on") + traceback.print_exc() + # Run request script + print("Running SlapOS script $${request-embedded-instance-script:rendered}") + slapos_env = { + 'PATH': os.path.dirname(standalone._slapos_bin), + 'SLAPOS_CONFIGURATION': standalone._slapos_config, + 'SLAPOS_CLIENT_CONFIGURATION': standalone._slapos_config + } + subprocess.call(("$${request-embedded-instance-script:rendered}",), env=slapos_env) +{%- endif %} + s = socket.socket(socket.AF_UNIX) + s.bind("\0$${slapos-standalone-config:abstract-socket-path}") + s.listen(5) + print("Standalone SlapOS ready") + while True: + s.accept()[0].close() + finally: + print("Stopping standalone subsystem") + standalone.stop() + print("Exiting") + [slapos-standalone] recipe = slapos.recipe.template:jinja2 rendered = $${directory:bin}/$${:_buildout_section_name_} @@ -463,25 +612,7 @@ template = #XXX find out where the extra nodejs in theia's PATH comes from export PATH=${nodejs:location}/bin/:$PATH . $${slapos-standalone-activate:rendered} - exec ${slapos-standalone:script-path} \ - $${directory:runner} \ - $${slapos-standalone-config:ipv4} \ - $${slapos-standalone-config:ipv6} \ - $${slapos-standalone-config:port} \ - $${slapos-standalone-config:local-software-release-root} \ - $${slapos-standalone-config:computer-id} \ - {%- if parameter_dict.get('embedded-sr') %} - --sr='{{ parameter_dict['embedded-sr'] }}' \ - {%- if parameter_dict.get('embedded-sr-type') %} - --srtype='{{ parameter_dict['embedded-sr-type'] }}' \ - {%- endif %} - --srparams='$${embedded-instance-parameters:rendered}' \ - {%- endif %} - $${slap-connection:server-url} \ - $${slap-connection:computer-id} \ - $${slap-connection:partition-id} \ - --key='$${slap-connection:key-file}' \ - --cert='$${slap-connection:cert-file}' + exec $${slapos-standalone-script:rendered} [slapos-standalone-instance] recipe = slapos.cookbook:wrapper @@ -489,6 +620,7 @@ wrapper-path = $${directory:services}/$${:_buildout_section_name_} command-line = $${slapos-standalone:rendered} hash-files = $${slapos-standalone:rendered} + $${slapos-standalone-script:rendered} hostname = $${slapos-standalone-config:ipv4} port = $${slapos-standalone-config:port} @@ -604,14 +736,6 @@ recipe = slapos.cookbook:symbolic.link target-directory = $${directory:project} link-binary = $${directory:runner} -{% if parameter_dict.get('embedded-sr') -%} -[embedded-instance-parameters] -recipe = slapos.recipe.template:jinja2 -rendered = $${directory:etc}/$${:_buildout_section_name_}.json -template = - inline:{{ parameter_dict['embedded-instance-parameters'] | indent(2) }} -{%- endif %} - [request-script-template] recipe = slapos.recipe.template:jinja2 rendered = $${directory:project}/$${:_buildout_section_name_}.sh diff --git a/software/theia/instance.cfg.in b/software/theia/instance.cfg.in index d88c1c822663339020a9ec36384ea33740ac1d41..17cdba73febd5fd6a6126fb41b76179a39141464 100644 --- a/software/theia/instance.cfg.in +++ b/software/theia/instance.cfg.in @@ -36,14 +36,17 @@ context = jsonkey default_parameter_dict :default-parameters key parameter_dict slap-configuration:configuration key root_title slap-configuration:root-instance-title + key partition_root_path buildout:directory key ipv6_random slap-configuration:ipv6-random key ipv4_random slap-configuration:ipv4-random + import os_module os + import hashlib_module hashlib default-parameters = { "autorun": "running", "embedded-sr": null, "embedded-sr-type": null, - "embedded-instance-parameters": "null", + "embedded-instance-parameters": null, "frontend-name": "Theia Frontend", "frontend-sr": "$${:frontend-sr}", "frontend-sr-type": "RootSoftwareInstance", @@ -51,7 +54,8 @@ default-parameters = "additional-frontend-name":"Theia Additional Frontend", "additional-frontend-sr": "$${:frontend-sr}", "additional-frontend-sr-type": "RootSoftwareInstance", - "additional-frontend-guid": null + "additional-frontend-guid": null, + "monitor-httpd-port": 8386 } frontend-sr = http://git.erp5.org/gitweb/slapos.git/blob_plain/HEAD:/software/apache-frontend/software.cfg diff --git a/software/theia/software.cfg b/software/theia/software.cfg index 6210b964707a914886849133abfea64dc206954d..daee67ff9bb8efe0710e3331e8290769a8629e38 100644 --- a/software/theia/software.cfg +++ b/software/theia/software.cfg @@ -1,31 +1,28 @@ [buildout] extends = - ../../component/caddy/buildout.cfg - ../../component/git/buildout.cfg - ../../component/bash/buildout.cfg - ../../component/bash-completion/buildout.cfg - ../../component/fish-shell/buildout.cfg - ../../component/tmux/buildout.cfg - ../../component/tig/buildout.cfg - ../../component/vim/buildout.cfg - ../../component/curl/buildout.cfg - ../../component/coreutils/buildout.cfg - ../../component/java-jdk/buildout.cfg - ../../component/fonts/buildout.cfg - ../../component/libsecret/buildout.cfg - ../../component/pkgconfig/buildout.cfg - ../../stack/nodejs.cfg - ../../stack/slapos.cfg - ../../stack/monitor/buildout.cfg - ../../stack/resilient/buildout.cfg - ../../component/defaults.cfg - ./download-plugins.cfg - ./buildout.hash.cfg + ../../component/caddy/buildout.cfg + ../../component/git/buildout.cfg + ../../component/bash/buildout.cfg + ../../component/bash-completion/buildout.cfg + ../../component/fish-shell/buildout.cfg + ../../component/tmux/buildout.cfg + ../../component/tig/buildout.cfg + ../../component/vim/buildout.cfg + ../../component/curl/buildout.cfg + ../../component/coreutils/buildout.cfg + ../../component/fonts/buildout.cfg + ../../component/theia/buildout.cfg + ../../stack/slapos.cfg + ../../stack/monitor/buildout.cfg + ../../stack/resilient/buildout.cfg + ../../component/defaults.cfg + ./buildout.hash.cfg parts = theia-wrapper slapos-cookbook - python-with-eggs + python-for-resiliency + python-for-standalone instance-theia instance instance-import @@ -37,149 +34,28 @@ parts = # default for slapos-standalone shared-part-list = + +# Versions +# -------- + +[gcc] # We keep the gcc part in sync with the one from erp5 software, so that when we install # erp5 inside theia's slapos parts can be shared. -[gcc] max_version = 0 - [nodejs] -<= nodejs-12.18.3 +<= nodejs-14.16.0 [yarn] <= yarn-1.17.3 -[slapos-standalone] -recipe = zc.recipe.egg -eggs = - slapos.core -scripts = ${:_buildout_section_name_} -script-path = ${buildout:bin-directory}/${:scripts} -# XXX generate a fake entry point for a non existant module, that will not -# be used because we exit in initialization step -entry-points = - ${:scripts}=not_used:main -initialization = - import argparse - import glob - import json - import os - import signal - import socket - import sys - import time - - import slapos.slap.standalone - - parser = argparse.ArgumentParser() - parser.add_argument('base_directory') - parser.add_argument('ipv4') - parser.add_argument('ipv6') - parser.add_argument('server_port', type=int) - parser.add_argument('local_software_release_root') - parser.add_argument('computer_id') - parser.add_argument('--sr') - parser.add_argument('--srtype') - parser.add_argument('--srparams') - forwarded_arguments = parser.add_argument_group('forwarded') - forwarded_arguments.add_argument('master_url') - forwarded_arguments.add_argument('computer') - forwarded_arguments.add_argument('partition') - # cert and key are optional - forwarded_arguments.add_argument('--cert') - forwarded_arguments.add_argument('--key') - args = parser.parse_args() - shared_part_list = [x.strip() for x in '''${buildout:shared-part-list}'''.splitlines() if x.strip()] - partition_forward_configuration = ( - slapos.slap.standalone.PartitionForwardAsPartitionConfiguration( - master_url=args.master_url, - computer=args.computer, - partition=args.partition, - cert=args.cert, - key=args.key, - software_release_list=( - 'http://git.erp5.org/gitweb/slapos.git/blob_plain/HEAD:/software/apache-frontend/software.cfg', - ), - ), - ) - standalone = slapos.slap.standalone.StandaloneSlapOS( - args.base_directory, - args.ipv4, - args.server_port, - computer_id=args.computer_id, - shared_part_list=shared_part_list, - software_root="%s/software" % args.base_directory, - instance_root="%s/instance" % args.base_directory, - partition_forward_configuration=partition_forward_configuration, - slapos_bin="${buildout:bin-directory}/slapos", - local_software_release_root=args.local_software_release_root, - ) - - def signal_handler(signum, frame): - print("Signal {signum} received".format(signum=signum)) - sys.exit() - signal.signal(signal.SIGTERM, signal_handler) - - standalone.start() - try: - partition_count = 20 - if len(glob.glob(os.path.join(standalone.instance_directory, '*'))) < partition_count: - print("Standalone SlapOS: Formatting {partition_count} partitions".format( - partition_count=partition_count)) - standalone.format( - partition_count, - args.ipv4, - args.ipv6, - ) - print("Standalone SlapOS for computer `{}` started".format(args.computer_id)) - # Run instance at least once, to start the supervisor managing instances. - try: - standalone.waitForInstance(max_retry=0) - except slapos.slap.standalone.SlapOSNodeCommandError as e: - print("Error instanciating: {}".format(e)) - - if args.sr: - try: - with open(args.srparams) as f: - params = json.load(f) - except Exception: - params = None - if not isinstance(params, dict): - params = None - print("Supplying and Requesting Embedded Software {sr} with type {srtype}".format( - sr=args.sr, srtype=args.srtype)) - print("With parameters {param_dict} parsed from '{srparams}'".format( - param_dict=params, srparams=args.srparams)) - standalone.supply(args.sr) - standalone.request( - args.sr, - "Embedded Instance", - args.srtype, - partition_parameter_kw=params, - ) - - s = socket.socket(socket.AF_UNIX) - s.bind('\0' + os.path.join(args.base_directory, 'standalone_ready')) - s.listen(5) - print("Standalone SlapOS ready") - while True: - s.accept()[0].close() - - finally: - print("Stopping standalone subsystem") - standalone.stop() - print("Exiting") - +[gowork] +install += + golang.org/x/tools/gopls@v0.6.6 -needs-these-eggs-scripts-in-path = - ${supervisor:recipe} - ${slapos-command:recipe} -[supervisor] -recipe = zc.recipe.egg -eggs = - supervisor - setuptools +# Downloads and templates +# ----------------------- [template-base] recipe = slapos.recipe.template @@ -192,199 +68,15 @@ url = ${:_profile_base_location_}/${:_update_hash_filename_} destination = ${buildout:directory}/${:_buildout_section_name_} output = ${:destination} -[python-language-server] -version = 0.19.0 -recipe = plone.recipe.command -command = - PATH=${git:location}/bin/:$PATH bash -c "${python3:executable} -m venv --clear ${:location} && \ - . ${:location}/bin/activate && \ - pip install -r ${python-language-server-requirements.txt:output}" -location = ${buildout:parts-directory}/${:_buildout_section_name_} -stop-on-error = true - [python-language-server-requirements.txt] <= download-base -[theia] -recipe = plone.recipe.command -command = ${bash:location}/bin/bash -c " - export \ - TMPDIR=${:location}/tmp \ - PATH=${nodejs:location}/bin:${pkgconfig:location}/bin:$PATH \ - PKG_CONFIG_PATH=${libsecret:pkg-config-path} \ - LDFLAGS='-Wl,-rpath=${libsecret:location}/lib -L${gettext:location}/lib -Wl,-rpath=${gettext:location}/lib -Wl,-rpath=${glib:location}/lib' && \ - mkdir -p ${:location} && \ - mkdir -p \$TMPDIR && \ - cd ${:location} && \ - cp ${package.json:rendered} . && - cp ${yarn.lock:output} . && - ${yarn:location}/bin/yarn && \ - ${yarn:location}/bin/yarn theia build" -location = ${buildout:parts-directory}/${:_buildout_section_name_} -stop-on-error = true -uses = ${yarn.lock:recipe} - -[theia-plugins] -recipe = slapos.recipe.build -urls = ${theia-download-plugins:urls} -install = - import os - for line in options['urls'].splitlines(): - extension_name, url, md5sum = line.split() - extract_dir = self.extract(self.download(url, md5sum)) - destination_dir = os.path.join(options['location'], extension_name) - self.copyTree(guessworkdir(extract_dir), destination_dir) - os.chmod(destination_dir, 0o750) - -[yarn.lock] -<= download-base - -[preloadTemplate.html] -<= download-base - [slapos.css.in] <= download-base [logo.png] <= download-base -[package.json] -recipe = slapos.recipe.template:jinja2 -template = - inline:{ - "private": true, - "theia": { - "backend": { - "config": { - "warnOnPotentiallyInsecureHostPattern": false - } - }, - "frontend": { - "config": { - "applicationName": "Theia SlapOS", - "preferences": { - "application.confirmExit": "always", - "files.associations": { - "*.cfg": "zc-buildout" - }, - "files.enableTrash": false, - "files.exclude": { - "**.pyc": true, - "**.egg-info": true, - "__pycache__": true, - ".git": true, - ".env": true, - "**/node_modules/**": true - }, - "files.watcherExclude": { - "**/.eggs/**": true, - "**/.env/**": true, - "**/.git/**": true, - "**/node_modules/**": true - }, - "editor.multiCursorModifier": "ctrlCmd", - "editor.tabSize": 2, - "plantuml.server": "https://plantuml.host.vifib.net/svg/", - "plantuml.render": "PlantUMLServer", - "gitlens.remotes": [{ "domain": "lab.nexedi.com", "type": "GitLab" }], - "java.home": "${java-jdk:location}" - }, - "warnOnPotentiallyInsecureHostPattern": false - } - }, - "generator": { - "config": { - "preloadTemplate": "${preloadTemplate.html:output}" - } - } - }, - "dependencies": { - "@theia/bulk-edit": "latest", - "@theia/callhierarchy": "latest", - "@theia/console": "latest", - "@theia/core": "latest", - "@theia/debug": "latest", - "@theia/editor": "latest", - "@theia/editor-preview": "latest", - "@theia/file-search": "latest", - "@theia/filesystem": "latest", - "@theia/getting-started": "latest", - "@theia/keymaps": "latest", - "@theia/markers": "latest", - "@theia/messages": "latest", - "@theia/metrics": "latest", - "@theia/mini-browser": "latest", - "@theia/monaco": "latest", - "@theia/navigator": "latest", - "@theia/outline-view": "latest", - "@theia/output": "latest", - "@theia/plugin-dev": "latest", - "@theia/plugin-ext": "latest", - "@theia/plugin-ext-vscode": "latest", - "@theia/preferences": "latest", - "@theia/preview": "latest", - "@theia/process": "latest", - "@theia/property-view": "latest", - "@theia/scm": "latest", - "@theia/scm-extra": "latest", - "@theia/search-in-workspace": "latest", - "@theia/task": "latest", - "@theia/terminal": "latest", - "@theia/timeline": "latest", - "@theia/typehierarchy": "latest", - "@theia/userstorage": "latest", - "@theia/variable-resolver": "latest", - "@theia/vsx-registry": "latest", - "@theia/workspace": "latest", - "@perrinjerome/theia-open": "latest", - "@perrinjerome/theia-open-cli": "latest" - }, - "devDependencies": { - "@theia/cli": "latest" - } - } -rendered = ${buildout:directory}/${:_buildout_section_name_} - - -[gowork] -install += - golang.org/x/tools/gopls@v0.6.6 - - -[cli-utilities] -PATH = ${nodejs:location}/bin:${bash:location}/bin:${fish-shell:location}/bin:${tig:location}/bin:${vim:location}/bin:${tmux:location}/bin:${git:location}/bin:${curl:location}/bin:${python2.7:location}/bin:${buildout:bin-directory} - - -[theia-wrapper] -recipe = slapos.recipe.template:jinja2 -rendered = ${buildout:bin-directory}/${:_buildout_section_name_} -template = - inline: - #!/bin/sh - cd ${theia:location} - export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:${libsecret:location}/lib:${gettext:location}/lib:${glib:location}/lib" - exec ${yarn:location}/bin/yarn theia start "$@" - -[theia-open] -recipe = slapos.recipe.template:jinja2 -rendered = ${buildout:bin-directory}/${:_buildout_section_name_} -template = - inline: - #!/bin/sh - exec ${nodejs:location}/bin/node ${theia:location}/node_modules/.bin/theia-open "$@" - -[python-with-eggs] -recipe = zc.recipe.egg -interpreter = ${:_buildout_section_name_} -eggs = - ${slapos-toolbox:eggs} - six - zc.buildout -# Only generate the interpreter script to avoid conflicts with scripts -# for eggs that are also generated by another section, like slapos.toolbox -scripts = ${:interpreter} - - [instance-theia] <= template-base output = ${buildout:directory}/instance-theia.cfg.jinja @@ -416,9 +108,57 @@ destination = ${buildout:directory}/theia_export.py <= download-base destination = ${buildout:directory}/theia_import.py + +# Utilities +# --------- + +[supervisor] +recipe = zc.recipe.egg +eggs = + supervisor + setuptools + +[python-language-server] +version = 0.19.0 +recipe = plone.recipe.command +command = + PATH=${git:location}/bin/:$PATH bash -c "${python3:executable} -m venv --clear ${:location} && \ + . ${:location}/bin/activate && \ + pip install -r ${python-language-server-requirements.txt:output}" +location = ${buildout:parts-directory}/${:_buildout_section_name_} +stop-on-error = true + +[cli-utilities] +PATH = ${nodejs:location}/bin:${bash:location}/bin:${fish-shell:location}/bin:${tig:location}/bin:${vim:location}/bin:${tmux:location}/bin:${git:location}/bin:${curl:location}/bin:${python:location}/bin:${buildout:bin-directory} + + +[python-with-eggs] +recipe = zc.recipe.egg +interpreter = ${:_buildout_section_name_} +# Only generate the interpreter script to avoid conflicts with scripts +# for eggs that are also generated by another section, like slapos.toolbox +scripts = ${:interpreter} +executable = ${buildout:bin-directory}/${:interpreter} + +[python-for-resiliency] +<= python-with-eggs +eggs = + ${slapos-toolbox:eggs} + six + zc.buildout + +[python-for-standalone] +<= python-with-eggs +eggs = + slapos.core +needs-these-eggs-scripts-in-path = + ${supervisor:recipe} + ${slapos-command:recipe} + + [software-info] slapos = ${buildout:bin-directory}/slapos -python-with-eggs = ${buildout:bin-directory}/${python-with-eggs:interpreter} +python-for-resiliency = ${python-for-resiliency:executable} python = ${python:location}/bin/python rsync = ${rsync:location}/bin/rsync sqlite3 = ${sqlite3:location}/bin/sqlite3 diff --git a/software/theia/test/project_tests.py b/software/theia/test/project_tests.py index 89aaaae7195cba92316a3d8d049fcc3e372fbd51..441ba207cb2f7e2dc321fe5a55f1c110fbd68e95 100644 --- a/software/theia/test/project_tests.py +++ b/software/theia/test/project_tests.py @@ -60,7 +60,7 @@ def setUpModule(): class ERP5Mixin(object): _test_software_url = erp5_software_release_url - _connexion_parameters_regex = re.compile(r"{\s*'_'\s*:\s*'(.*)'\s*}") + _connexion_parameters_regex = re.compile(r"{.*}", re.DOTALL) def _getERP5ConnexionParameters(self, software_type='export'): slapos = self._getSlapos(software_type) @@ -69,7 +69,7 @@ class ERP5Mixin(object): stderr=subprocess.STDOUT, ) print(out) - return json.loads(self._connexion_parameters_regex.search(out).group(1)) + return json.loads(self._connexion_parameters_regex.search(out).group(0).replace("'", '"')) def _getERP5Url(self, connexion_parameters, path=''): return urljoin(connexion_parameters['family-default-v6'], path) @@ -121,6 +121,10 @@ class TestTheiaResilienceERP5(ERP5Mixin, test_resiliency.TestTheiaResilience): backup_max_tries = 480 backup_wait_interval = 60 + def test_twice(self): + # do nothing + pass + def _prepareExport(self): super(TestTheiaResilienceERP5, self)._prepareExport() diff --git a/software/theia/test/test.py b/software/theia/test/test.py index bd6c4087200be3bf3ec4bd9cd4e1cf7543e44cb3..7808625115fb557323a4431e5183362cefe0a8cf 100644 --- a/software/theia/test/test.py +++ b/software/theia/test/test.py @@ -40,7 +40,7 @@ import six from six.moves.urllib.parse import urlparse, urljoin -from slapos.testing.testcase import makeModuleSetUpAndTestCaseClass +from slapos.testing.testcase import makeModuleSetUpAndTestCaseClass, SlapOSNodeCommandError from slapos.grid.svcbackend import getSupervisorRPC, _getSupervisordSocketPath @@ -229,29 +229,82 @@ class TestTheiaEmbeddedSlapOSShutdown(TheiaTestCase): self.assertFalse(embedded_slapos_process.is_running()) -class TestTheiaWithSR(TheiaTestCase): - sr_url = 'bogus/software.cfg' +class ReRequestMixin(object): + def rerequest(self, parameter_dict=None, state='started'): + software_url = self.getSoftwareURL() + software_type = self.getInstanceSoftwareType() + name = self.default_partition_reference + self.slap.request( + software_release=software_url, + software_type=software_type, + partition_reference=name, + partition_parameter_kw=parameter_dict, + state=state) + + def reinstantiate(self): + # Process at least twice to propagate parameter changes + try: + self.slap.waitForInstance() + except SlapOSNodeCommandError: + pass + self.slap.waitForInstance(self.instance_max_retry) + + +class TestTheiaWithSR(TheiaTestCase, ReRequestMixin): + sr_url = '~/bogus/software.cfg' sr_type = 'bogus_type' - instance_parameters = '{\n"bogus_param": "bogus_value"\n}' + instance_parameters = '{\n"bogus_param": "bogus_value",\n"bogus_param2": "bogus_value2"\n}' - @classmethod - def getInstanceParameterDict(cls): - return { - 'embedded-sr': cls.sr_url, - 'embedded-sr-type': cls.sr_type, - 'embedded-instance-parameters': cls.instance_parameters - } + def proxy_show(self, slapos): + return subprocess.check_output((slapos, 'proxy', 'show'), universal_newlines=True) def test(self): slapos = self._getSlapos() - info = subprocess.check_output((slapos, 'proxy', 'show'), universal_newlines=True) - instance_name = "Embedded Instance" + home = self.computer_partition_root_path + + # Check that no request script was generated + request_script = os.path.join(home, 'srv', 'project', 'request_embedded.sh') + self.assertFalse(os.path.exists(request_script)) + + # Manually request old-name 'Embedded Instance' + old_instance_name = "Embedded Instance" + subprocess.check_call((slapos, 'request', old_instance_name, 'bogus_url')) + self.assertIn(old_instance_name, self.proxy_show(slapos)) + + # Update Theia instance parameters + embedded_request_parameters = { + 'embedded-sr': self.sr_url, + 'embedded-sr-type': self.sr_type, + 'embedded-instance-parameters': self.instance_parameters + } + self.rerequest(embedded_request_parameters) + self.reinstantiate() + + # Check that embedded instance was requested + instance_name = "embedded_instance" + info = self.proxy_show(slapos) + try: + self.assertIn(instance_name, info) + except AssertionError: + for filename in os.listdir(home): + if 'standalone' in filename and '.log' in filename: + filepath = os.path.join(home, filename) + with open(filepath) as f: + print("Contents of filepath: " + filepath) + print(f.read()) + raise + + # Check that old-name instance was renamed + self.assertNotIn(old_instance_name, info) + + # Check embedded instance parameters + bogus_sr = os.path.join(home, self.sr_url[2:]) - self.assertIsNotNone(re.search(r"%s\s+slaprunner\s+available" % (self.sr_url,), info), info) - self.assertIsNotNone(re.search(r"%s\s+%s\s+%s" % (self.sr_url, self.sr_type, instance_name), info), info) + self.assertIsNotNone(re.search(r"%s\s+slaprunner\s+available" % (bogus_sr,), info), info) + self.assertIsNotNone(re.search(r"%s\s+%s\s+%s" % (bogus_sr, self.sr_type, instance_name), info), info) service_info = subprocess.check_output((slapos, 'service', 'info', instance_name), universal_newlines=True) - self.assertIn("{'bogus_param': 'bogus_value'}", service_info) + self.assertIn("{'bogus_param': 'bogus_value', 'bogus_param2': 'bogus_value2'}", service_info) class TestTheiaFrontend(TheiaTestCase): diff --git a/software/theia/test/test_resiliency.py b/software/theia/test/test_resiliency.py index b42d460054ca2bff3dd53ab9401b1fad2426732a..949e83708a9fadf960df3977f74ae02bb321a361 100644 --- a/software/theia/test/test_resiliency.py +++ b/software/theia/test/test_resiliency.py @@ -225,6 +225,8 @@ class TestTheiaExportAndImportFailures(ExportAndImportMixin, ResilientTheiaTestC script_relpath = os.path.join( 'srv', 'runner', 'instance', 'slappart0', 'srv', '.backup_identity_script') + signature_relpath = os.path.join( + 'srv', 'backup', 'theia', 'backup.signature') def assertPromiseFailure(self, *msg): # Force promises to recompute regardless of periodicity @@ -291,6 +293,10 @@ class TestTheiaExportAndImportFailures(ExportAndImportMixin, ResilientTheiaTestC self.customSignatureScript(content=None) self.customRestoreScript(content=None) self.cleanupExitfiles() + try: + os.remove(self._getPartitionPath('import', self.signature_relpath)) + except OSError: + pass def test_export_promise(self): self.writeFile(self.getExportExitfile(), '1') @@ -303,17 +309,14 @@ class TestTheiaExportAndImportFailures(ExportAndImportMixin, ResilientTheiaTestC def test_custom_hash_script(self): errmsg = 'Bye bye' self.customSignatureScript(content='>&2 echo "%s"\nexit 1' % errmsg) - backup_script = self._getPartitionPath( - 'export', 'srv', 'backup', 'theia', self.script_relpath) - self.assertExportFailure('Compute backup signature\n ... ERROR !', - 'Custom signature script %s failed' % os.path.abspath(backup_script), + custom_script = self._getPartitionPath('export', self.script_relpath) + self.assertExportFailure('Compute partitions backup signatures\n ... ERROR !', + 'Custom signature script %s failed' % os.path.abspath(custom_script), 'and stderr:\n%s' % errmsg) def test_signature_mismatch(self): - signature_file = self._getPartitionPath('import', 'srv', 'backup', 'theia', 'backup.signature') - moved_file = self._getPartitionPath('import', 'srv', 'backup', 'backup.signature.moved') - self.writeFile(moved_file, 'Bogus Hash\n', mode='a') - os.rename(moved_file, signature_file) + signature_file = self._getPartitionPath('import', self.signature_relpath) + self.writeFile(signature_file, 'Bogus Hash\n', mode='a') self.assertImportFailure('ERROR the backup signatures do not match') def test_restore_script_error(self): @@ -363,12 +366,15 @@ class TestTheiaExportAndImport(ResilienceMixin, ExportAndImportMixin, ResilientT self.writeFile(os.path.join(dummy_root, 'exclude', 'excluded'), 'This file should be excluded from resilient backup') - # Check that ~/srv/exporter.exclude and ~/srv/runner-import-restore + # Check that ~/srv/exporter.exclude and ~/srv/runner-import-restore exist # As well as ~/srv/.backup_identity_script self.assertTrue(os.path.exists(os.path.join(dummy_root, 'srv', 'exporter.exclude'))) self.assertTrue(os.path.exists(os.path.join(dummy_root, 'srv', 'runner-import-restore'))) self.assertTrue(os.path.exists(os.path.join(dummy_root, 'srv', '.backup_identity_script'))) + # Remember content of ~/etc in the import theia + self.etc_listdir = os.listdir(self._getPartitionPath('import', 'etc')) + def _doSync(self): self._doExport() self._doTransfer() @@ -384,14 +390,20 @@ class TestTheiaExportAndImport(ResilienceMixin, ExportAndImportMixin, ResilientT self.assertIn(adapted_test_url, proxy_content) self.assertNotIn(self._test_software_url, proxy_content) + # Check that ~/etc still contains everything it did before + etc_listdir = os.listdir(self._getPartitionPath('import', 'etc')) + self.assertTrue(set(self.etc_listdir).issubset(etc_listdir)) + # Check that ~/srv/project was exported self.assertTrue(os.path.exists(adapted_test_url)) # Check that the dummy instance is not yet started self.checkLog(os.path.join(dummy_root, 'log.log'), self.initial_log, newline=None) - # Check that ~/srv/.backup_identity_script was called - signature = self._getPartitionPath('import', 'srv', 'backup', 'backup.signature.proof') + # Check that ~/srv/.backup_identity_script was detected and called + signature = self._getPartitionPath( + 'import', 'srv', 'backup', 'theia', 'slappart0.backup.signature.custom') + self.assertTrue(os.path.exists(signature)) with open(signature) as f: self.assertIn('Custom script', f.read()) @@ -477,6 +489,14 @@ class TestTheiaResilience(ResilienceMixin, TakeoverMixin, ResilientTheiaTestCase _test_software_url = dummy_software_url + def test_twice(self): + # Run two synchronisations on the same instances + # to make sure everything still works the second time + # Check ~/etc in import theia again + self.etc_listdir = os.listdir(self._getPartitionPath('import', 'etc')) + self._doSync() + self._checkSync() + def _prepareExport(self): # Deploy test instance self._deployEmbeddedSoftware(self._test_software_url, 'test_instance', self.test_instance_max_retries) @@ -485,6 +505,9 @@ class TestTheiaResilience(ResilienceMixin, TakeoverMixin, ResilientTheiaTestCase self.export_id = self._getPartitionId('export') self.import_id = self._getPartitionId('import') + # Remember content of ~/etc in the import theia + self.etc_listdir = os.listdir(self._getPartitionPath('import', 'etc')) + def _doSync(self): start = time.time() @@ -499,6 +522,11 @@ class TestTheiaResilience(ResilienceMixin, TakeoverMixin, ResilientTheiaTestCase # Wait for takoever to be ready self._waitTakeoverReady(takeover_url, start, self.backup_max_tries, self.backup_wait_interval) + def _checkSync(self): + # Check that ~/etc still contains everything it did before + etc_listdir = os.listdir(self._getPartitionPath('import', 'etc')) + self.assertTrue(set(self.etc_listdir).issubset(etc_listdir)) + def _doTakeover(self): # Takeover takeover_url, takeover_password = self._getTakeoverUrlAndPassword() diff --git a/software/theia/theia_common.py b/software/theia/theia_common.py index 49f12f29b8fd005a29e4496ed055b50eb965c1cc..74975a70ce56f75bcc1840ed9c484637efde8a63 100644 --- a/software/theia/theia_common.py +++ b/software/theia/theia_common.py @@ -4,6 +4,7 @@ import glob import hashlib import os import re +import shutil import subprocess as sp import sqlite3 @@ -21,13 +22,19 @@ EXCLUDE_FLAGS = ['--exclude={}'.format(x) for x in sorted(EXCLUDE_PATTERNS)] def makedirs(path): try: - os.makedirs(path if os.path.isdir(path) else os.path.dirname(path)) + os.makedirs(path) except OSError as e: if e.errno != errno.EEXIST: raise -def copytree(rsyncbin, src, dst, exclude=[], extrargs=[], verbosity='-v'): +def copyfile(src, dst): + dst = os.path.abspath(dst) + makedirs(os.path.dirname(dst)) + shutil.copy2(src, dst) + + +def copytree(rsyncbin, src, dst, exclude=(), extrargs=(), verbosity='-v'): # Ensure there is a trailing slash in the source directory # to avoid creating an additional directory level at the destination src = os.path.join(src, '') @@ -60,21 +67,20 @@ def copytree(rsyncbin, src, dst, exclude=[], extrargs=[], verbosity='-v'): def copydb(sqlite3bin, src_db, dst_db): - makedirs(dst_db) + makedirs(os.path.dirname(dst_db)) sp.check_output((sqlite3bin, src_db, '.backup ' + dst_db)) def remove(path): try: os.remove(path) - except OSError: - if os.path.exists(path): + except OSError as e: + if e.errno != errno.ENOENT: raise def parse_installed(partition): paths = [] - custom_script = os.path.join(partition, 'srv', '.backup_identity_script') for cfg in glob.glob(os.path.join(partition, '.installed*.cfg')): try: with open(cfg) as f: @@ -86,7 +92,7 @@ def parse_installed(partition): for section in six.itervalues(installed_cfg): for p in section.get('__buildout_installed__', '').splitlines(): p = p.strip() - if p and p != custom_script: + if p: paths.append(p) return paths @@ -101,31 +107,44 @@ def sha256sum(file_path, chunk_size=1024 * 1024): return sha256.hexdigest() -def hashwalk(backup_dir, mirror_partitions): - scripts = {} - for p in mirror_partitions: - script_path = os.path.join(p, 'srv', '.backup_identity_script') - if os.path.exists(script_path): - scripts[os.path.abspath(p)] = script_path - for dirpath, dirnames, filenames in os.walk(backup_dir): - filenames.sort() +def fast_hashwalk(root_dir): + for dirpath, dirnames, filenames in os.walk(root_dir): + for f in filenames: + filepath = os.path.join(dirpath, f) + if os.path.isfile(filepath): + displaypath = os.path.relpath(filepath, start=root_dir) + yield '%s %s' % (sha256sum(filepath), displaypath) + + +def exclude_hashwalk(root_dir, instance_dir): + root_dir = os.path.abspath(root_dir) + instance_dir = os.path.abspath(instance_dir) + for dirpath, dirnames, filenames in os.walk(root_dir): for f in filenames: filepath = os.path.join(dirpath, f) if os.path.isfile(filepath): - displaypath = os.path.relpath(filepath, start=backup_dir) + displaypath = os.path.relpath(filepath, start=root_dir) yield '%s %s' % (sha256sum(filepath), displaypath) - remaining_dirnames = [] - for subdir in dirnames: - subdirpath = os.path.abspath(os.path.join(dirpath, subdir)) - custom_hashscript = scripts.get(subdirpath) - if custom_hashscript: - print('Using custom signature script %s' % custom_hashscript) - for s in hashcustom(subdirpath, backup_dir, custom_hashscript): - yield s - else: - remaining_dirnames.append(subdir) - remaining_dirnames.sort() - dirnames[:] = remaining_dirnames + if dirpath == instance_dir: + remaining_dirs = [] + for d in dirnames: + if not d.startswith('slappart'): + remaining_dirs.append(d) + dirnames[:] = remaining_dirs + + +def hashwalk(root_dir, instance_dir=None): + if instance_dir and not os.path.relpath( + instance_dir, start=root_dir).startswith(os.pardir): + return exclude_hashwalk(root_dir, instance_dir) + return fast_hashwalk(root_dir) + + +def hashscript(partition): + script = os.path.join(partition, 'srv', '.backup_identity_script') + if os.path.exists(script): + return script + return None @contextlib.contextmanager @@ -138,10 +157,11 @@ def cwd(path): os.chdir(old_path) -def hashcustom(mirrordir, backup_dir, custom_hashscript): - workingdir = os.path.join(mirrordir, os.pardir, os.pardir, os.pardir) +def hashcustom(partition, script): + workingdir = os.path.join(partition, os.pardir, os.pardir, os.pardir) with cwd(os.path.abspath(workingdir)): - for dirpath, _, filenames in os.walk(mirrordir): + for dirpath, dirnames, filenames in os.walk(partition): + dirnames.sort() filepaths = [] for f in filenames: path = os.path.join(dirpath, f) @@ -150,16 +170,16 @@ def hashcustom(mirrordir, backup_dir, custom_hashscript): if not filepaths: continue hashprocess = sp.Popen( - custom_hashscript, stdin=sp.PIPE, stdout=sp.PIPE, stderr=sp.PIPE) + script, stdin=sp.PIPE, stdout=sp.PIPE, stderr=sp.PIPE) out, err = hashprocess.communicate(str2bytes('\0'.join(filepaths))) if hashprocess.returncode != 0: template = "Custom signature script %s failed on inputs:\n%s" - msg = template % (custom_hashscript, '\n'.join(filepaths)) + msg = template % (script, '\n'.join(filepaths)) msg += "\nwith stdout:\n%s" % bytes2str(out) msg += "\nand stderr:\n%s" % bytes2str(err) raise Exception(msg) signatures = bytes2str(out).strip('\n').split('\n') signatures.sort() - displaypath = os.path.relpath(dirpath, start=backup_dir) + displaypath = os.path.relpath(dirpath, start=partition) for s in signatures: - yield '%s %s/ (custom)' % (s, displaypath) + yield '%s %s' % (s, displaypath) diff --git a/software/theia/theia_export.py b/software/theia/theia_export.py index d4be966d37adc41312c979f8f619fffd6bc914d5..e5a52778a197cceb0aaa67e9428b3b00e0b7c6d2 100644 --- a/software/theia/theia_export.py +++ b/software/theia/theia_export.py @@ -9,8 +9,8 @@ import traceback import six from six.moves import configparser -sys.path.append(os.path.dirname(__file__)) -from theia_common import copytree, copydb, hashwalk, parse_installed, remove +sys.path.insert(0, os.path.dirname(__file__)) +from theia_common import * os.environ['LC_ALL'] = 'C' @@ -55,45 +55,74 @@ class TheiaExport(object): self.copytree_partitions_args = {} self.logs = [] - def mirrorpath(self, src): + def mirror_path(self, src): return os.path.abspath(os.path.join( self.backup_dir, os.path.relpath(src, start=self.root_dir))) - def backuptree(self, src, exclude=[], extrargs=[], verbosity='-v'): - dst = self.mirrorpath(src) - return copytree(self.rsync_bin, src, dst, exclude, extrargs, verbosity) + def backup_tree(self, src): + return copytree(self.rsync_bin, src, self.mirror_path(src)) - def backupdb(self): - copydb(self.sqlite3_bin, self.proxy_db, self.mirrorpath(self.proxy_db)) + def backup_file(self, src): + return copyfile(src, self.mirror_path(src)) - def backuppartition(self, partition): + def backup_db(self): + copydb(self.sqlite3_bin, self.proxy_db, self.mirror_path(self.proxy_db)) + + def backup_partition(self, partition): installed = parse_installed(partition) rules = os.path.join(partition, 'srv', 'exporter.exclude') extrargs = ('--filter=.-/ ' + rules,) if os.path.exists(rules) else () - self.backuptree(partition, exclude=installed, extrargs=extrargs) - self.copytree_partitions_args[partition] = (installed, extrargs) + dst = self.mirror_path(partition) + copytree(self.rsync_bin, partition, dst, installed, extrargs) + self.copytree_partitions_args[partition] = (dst, installed, extrargs) - def sign(self, signaturefile): + def sign(self, signaturefile, signatures): remove(signaturefile) pardir = os.path.abspath(os.path.join(self.backup_dir, os.pardir)) - tmpfile = os.path.join(pardir, 'backup.signature.tmp') - mirror_partitions = [self.mirrorpath(p) for p in self.partition_dirs] + tmpfile = os.path.join(pardir, os.path.basename(signaturefile) + '.tmp') with open(tmpfile, 'w') as f: - for s in hashwalk(self.backup_dir, mirror_partitions): + for s in signatures: f.write(s + '\n') os.rename(tmpfile, signaturefile) - def checkpartition(self, partition, pattern='/srv/backup/'): - installed, extrargs = self.copytree_partitions_args[partition] - output = self.backuptree( + def sign_root(self): + signaturefile = os.path.join(self.backup_dir, 'backup.signature') + signatures = hashwalk(self.backup_dir, self.mirror_path(self.instance_dir)) + self.sign(signaturefile, signatures) + + def sign_partition(self, partition): + dst = self.mirror_path(partition) + filename = os.path.basename(partition) + '.backup.signature' + signaturefile = os.path.join(self.backup_dir, filename) + script = hashscript(partition) + if script: + signaturefile += '.custom' + self.sign(signaturefile, hashcustom(dst, script)) + else: + self.sign(signaturefile, hashwalk(dst)) + + def remove_signatures(self): + pattern = os.path.join(self.backup_dir, '*backup.signature*') + signature_files = glob.glob(pattern) + for f in signature_files: + try: + os.remove(f) + except OSError: + pass + + def check_partition(self, partition, pattern='/srv/backup/'): + dst, installed, extrargs = self.copytree_partitions_args[partition] + output = copytree( + self.rsync_bin, partition, + dst, exclude=installed, extrargs=extrargs + ('--dry-run', '--update'), verbosity='--out-format=%n', ) return [path for path in output.splitlines() if pattern in path] - def loginfo(self, msg): + def log(self, msg): print(msg) self.logs.append(msg) @@ -118,40 +147,46 @@ class TheiaExport(object): def export(self): export_start_date = int(time.time()) - etc_dir = os.path.join(self.root_dir, 'etc') - with open(os.path.join(etc_dir, '.resilient_timestamp'), 'w') as f: + timestamp = os.path.join(self.root_dir, 'etc', '.resilient_timestamp') + with open(timestamp, 'w') as f: f.write(str(export_start_date)) - self.loginfo('Backup directory ' + etc_dir) - self.backuptree(etc_dir, extrargs=('--filter=- */', '--filter=-! .*')) + self.remove_signatures() + + self.log('Backup resilient timestamp ' + timestamp) + self.backup_file(timestamp) for d in self.dirs: - self.loginfo('Backup directory ' + d) - self.backuptree(d) + self.log('Backup directory ' + d) + self.backup_tree(d) - self.loginfo('Backup slapproxy database') - self.backupdb() + self.log('Backup slapproxy database') + self.backup_db() - self.loginfo('Backup partitions') + self.log('Backup partitions') for p in self.partition_dirs: - self.backuppartition(p) + self.backup_partition(p) - self.loginfo('Compute backup signature') - self.sign(os.path.join(self.backup_dir, 'backup.signature')) + self.log('Compute root backup signature') + self.sign_root() + + self.log('Compute partitions backup signatures') + for p in self.partition_dirs: + self.sign_partition(p) time.sleep(10) - self.loginfo('Check partitions') + self.log('Check partitions') modified = list(itertools.chain.from_iterable( - self.checkpartition(p) for p in self.partition_dirs)) + self.check_partition(p) for p in self.partition_dirs)) if modified: msg = 'Some files have been modified since the backup started' - self.loginfo(msg + ':') - self.loginfo('\n'.join(modified)) - self.loginfo("Let's wait %d minutes and try again" % BACKUP_WAIT) + self.log(msg + ':') + self.log('\n'.join(modified)) + self.log("Let's wait %d minutes and try again" % BACKUP_WAIT) time.sleep(BACKUP_WAIT * 60) raise Exception(msg) - self.loginfo('Done') + self.log('Done') if __name__ == '__main__': diff --git a/software/theia/theia_import.py b/software/theia/theia_import.py index 382dc550d6f94b10cc970d31cd2fe75827f3078c..9948ff5177040fc4c4b51ba9dfd59ecb92e9bce7 100644 --- a/software/theia/theia_import.py +++ b/software/theia/theia_import.py @@ -10,7 +10,7 @@ import six from six.moves import configparser sys.path.append(os.path.dirname(__file__)) -from theia_common import copytree, copydb, hashwalk, parse_installed, remove +from theia_common import * os.environ['LC_ALL'] = 'C' @@ -57,28 +57,32 @@ class TheiaImport(object): configp.read(cfg) self.proxy_db = configp.get('slapproxy', 'database_uri') self.instance_dir = configp.get('slapos', 'instance_root') - mirror_dir = self.mirrorpath(self.instance_dir) + mirror_dir = self.mirror_path(self.instance_dir) partitions = glob.glob(os.path.join(mirror_dir, 'slappart*')) self.mirror_partition_dirs = [p for p in partitions if os.path.isdir(p)] self.logs = [] - def mirrorpath(self, dst): + def mirror_path(self, dst): return os.path.abspath(os.path.join( self.backup_dir, os.path.relpath(dst, start=self.root_dir))) - def dstpath(self, src): + def dst_path(self, src): return os.path.abspath(os.path.join( self.root_dir, os.path.relpath(src, start=self.backup_dir))) - def restoretree(self, dst, exclude=[], extrargs=[], verbosity='-v'): - src = self.mirrorpath(dst) + def restore_tree(self, dst, exclude=(), extrargs=(), verbosity='-v'): + src = self.mirror_path(dst) return copytree(self.rsync_bin, src, dst, exclude, extrargs, verbosity) - def restoredb(self): - copydb(self.sqlite3_bin, self.mirrorpath(self.proxy_db), self.proxy_db) + def restore_file(self, dst): + src = self.mirror_path(dst) + return copyfile(src, dst) - def restorepartition(self, mirror_partition): - p = self.dstpath(mirror_partition) + def restore_db(self): + copydb(self.sqlite3_bin, self.mirror_path(self.proxy_db), self.proxy_db) + + def restore_partition(self, mirror_partition): + p = self.dst_path(mirror_partition) installed = parse_installed(p) if os.path.exists(p) else [] copytree(self.rsync_bin, mirror_partition, p, exclude=installed) @@ -86,38 +90,67 @@ class TheiaImport(object): supervisor_command = (self.supervisorctl_bin, '-c', self.supervisord_conf) command = supervisor_command + args print(' '.join(command)) - sp.check_call(command) + print(sp.check_output(command, stderr=sp.STDOUT, universal_newlines=True)) def slapos(self, *args): command = (self.slapos_bin,) + args + ('--cfg', self.slapos_cfg) print(' '.join(command)) - sp.check_call(command) + print(sp.check_output(command, stderr=sp.STDOUT, universal_newlines=True)) + + def sign(self, signaturefile, root_dir): + with open(signaturefile, 'r') as f: + for line in f: + try: + _, relpath = line.strip().split(None, 1) + except ValueError: + yield 'Could not parse: %s' % line + continue + filepath = os.path.join(root_dir, relpath) + try: + signature = sha256sum(filepath) + except IOError: + yield 'Could not read: %s' % filepath + continue + yield '%s %s' % (signature, relpath) + + def sign_custom(self, root_dir): + partition = self.dst_path(root_dir) + script = hashscript(partition) + if not script: + msg = 'ERROR: missing custom signature script for partition ' + partition + raise Exception(msg) + return hashcustom(root_dir, script) - def verify(self, signaturefile): - pardir = os.path.abspath(os.path.join(self.backup_dir, os.pardir)) - moved = os.path.join(pardir, 'backup.signature.moved') - proof = os.path.join(pardir, 'backup.signature.proof') + def find_signature_file(self, partition): + filename = os.path.basename(partition) + '.backup.signature' + signaturefile = os.path.join(self.backup_dir, filename) if os.path.exists(signaturefile): - os.rename(signaturefile, moved) - if not os.path.exists(moved): - msg = 'ERROR the backup signature file is missing' - print(msg) - raise Exception(msg) + return signaturefile, False + signaturefile += '.custom' + if os.path.exists(signaturefile): + return signaturefile, True + raise Exception('ERROR: missing signature file for partition ' + partition) + + def verify(self, signaturefile, root_dir, custom=False): + proof = signaturefile + '.proof' + if custom: + signatures = self.sign_custom(root_dir) + else: + signatures = self.sign(signaturefile, root_dir) with open(proof, 'w') as f: - for s in hashwalk(self.backup_dir, self.mirror_partition_dirs): + for s in signatures: f.write(s + '\n') - diffcommand = ('diff', moved, proof) - print(' '.join(diffcommand)) + diffcommand = ('diff', signaturefile, proof) try: sp.check_output( diffcommand, stderr=sp.STDOUT, universal_newlines=True) except sp.CalledProcessError as e: - template = 'ERROR the backup signatures do not match\n\n%s' - msg = template % e.output + template = 'ERROR the backup signatures do not match\n\n%s\n%s' + msg = template % (' '.join(diffcommand), e.output) print(msg) raise Exception(msg) - def loginfo(self, msg): + def log(self, msg): print(msg) self.logs.append(msg) @@ -126,9 +159,11 @@ class TheiaImport(object): exitcode = 0 try: self.restore() - except Exception: + except Exception as e: exitcode = 1 exc = traceback.format_exc() + if isinstance(e, sp.CalledProcessError) and e.output: + exc = "%s\n\n%s" % (exc, e.output) with open(self.error_file, 'w') as f: f.write('\n ... OK\n\n'.join(self.logs)) f.write('\n ... ERROR !\n\n') @@ -140,44 +175,54 @@ class TheiaImport(object): sys.exit(exitcode) def restore(self): - self.loginfo('Verify backup signature') - self.verify(os.path.join(self.backup_dir, 'backup.signature')) + self.log('Verify main backup signature') + signaturefile = os.path.join(self.backup_dir, 'backup.signature') + self.verify(signaturefile, self.backup_dir) - self.loginfo('Stop slapproxy') + custom_partition_signatures = [] + for m in self.mirror_partition_dirs: + signaturefile, custom = self.find_signature_file(m) + if custom: + custom_partition_signatures.append((signaturefile, m)) + else: + self.log('Verify backup signature for ' + m) + self.verify(signaturefile, m) + + self.log('Stop slapproxy') self.supervisorctl('stop', 'slapos-proxy') - self.loginfo('Restore partitions') + self.log('Restore partitions') for m in self.mirror_partition_dirs: - self.restorepartition(m) + self.restore_partition(m) for d in self.dirs: - self.loginfo('Restore directory ' + d) - self.restoretree(d) + self.log('Restore directory ' + d) + self.restore_tree(d) - self.loginfo('Restore slapproxy database') - self.restoredb() + self.log('Restore slapproxy database') + self.restore_db() - etc_dir = os.path.join(self.root_dir, 'etc') - self.loginfo('Restore directory ' + etc_dir) - self.restoretree(etc_dir, extrargs=('--filter=- */', '--filter=-! .*')) + timestamp = os.path.join(self.root_dir, 'etc', '.resilient_timestamp') + self.log('Restore resilient timestamp ' + timestamp) + self.restore_file(timestamp) custom_script = os.path.join(self.root_dir, 'srv', 'runner-import-restore') if os.path.exists(custom_script): - self.loginfo('Run custom restore script %s' % custom_script) - sp.check_call(custom_script) + self.log('Run custom restore script %s' % custom_script) + print(sp.check_output(custom_script)) - self.loginfo('Start slapproxy again') + self.log('Start slapproxy again') self.supervisorctl('start', 'slapos-proxy') - self.loginfo('Reformat partitions') + self.log('Reformat partitions') self.slapos('node', 'format', '--now') - self.loginfo('Remove old supervisord configuration files') + self.log('Remove old supervisord configuration files') conf_dir = os.path.join(self.instance_dir, 'etc', 'supervisor.conf.d') for f in glob.glob(os.path.join(conf_dir, '*')): os.remove(f) - self.loginfo('Build Software Releases') + self.log('Build Software Releases') for i in range(3): try: self.slapos('node', 'software', '--all', '--logfile', self.sr_log) @@ -187,18 +232,18 @@ class TheiaImport(object): else: break - self.loginfo('Remove old custom instance scripts') + self.log('Remove old custom instance scripts') partitions_glob = os.path.join(self.instance_dir, 'slappart*') scripts = os.path.join(partitions_glob, 'srv', 'runner-import-restore') for f in glob.glob(scripts): remove(f) - self.loginfo('Remove partition timestamps') + self.log('Remove partition timestamps') timestamps = os.path.join(partitions_glob, '.timestamp') for f in glob.glob(timestamps): remove(f) - self.loginfo('Build Instances') + self.log('Build Instances') cp_log = self.cp_log for i in range(3): try: @@ -209,11 +254,15 @@ class TheiaImport(object): else: break + self.log('Verify custom backup signatures') + for signaturefile, m in custom_partition_signatures: + self.verify(signaturefile, m, True) + for custom_script in glob.glob(scripts): - self.loginfo('Running custom instance script %s' % custom_script) - sp.check_call(custom_script) + self.log('Running custom instance script %s' % custom_script) + print(sp.check_output(custom_script)) - self.loginfo('Done') + self.log('Done') if __name__ == '__main__': diff --git a/software/turnserver/software.cfg b/software/turnserver/software.cfg index 51bd5076e9663eed06581f4ca7d73b87398c65dd..b21688dba94ff7a81afe10ab56a410f7c8603751 100644 --- a/software/turnserver/software.cfg +++ b/software/turnserver/software.cfg @@ -18,7 +18,6 @@ parts = [download-base] recipe = slapos.recipe.build:download url = ${:_profile_base_location_}/${:filename} -mode = 644 [instance-cfg] recipe = slapos.recipe.template diff --git a/software/wordpress/software.cfg b/software/wordpress/software.cfg index ee90787f879ee591c51904ac24d5a7fd002d1b67..f952d126b3356febbd200243a1b0e47db6ed8027 100644 --- a/software/wordpress/software.cfg +++ b/software/wordpress/software.cfg @@ -10,9 +10,7 @@ md5sum = 409889c98b13cbdbb9fd121df859ae3e [application-template] recipe = slapos.recipe.build:download url = ${:_profile_base_location_}/${:_update_hash_filename_} -download-only = True filename = template.in -mode = 0644 location = ${buildout:parts-directory}/${:_buildout_section_name_} [application-configuration] diff --git a/stack/caucase/buildout.cfg b/stack/caucase/buildout.cfg index 898ca2b618741bcc9b2c474dacd5ce0542e39e87..fdf7dd2b6d7e79b5ae5f8a55f602281150b68eae 100644 --- a/stack/caucase/buildout.cfg +++ b/stack/caucase/buildout.cfg @@ -32,7 +32,6 @@ scripts = [caucase-jinja2-library] recipe = slapos.recipe.build:download url = ${:_profile_base_location_}/${:filename} -mode = 0644 depends = ${caucase-jinja2-library-eggs:eggs} [versions] diff --git a/stack/erp5/buildout.cfg b/stack/erp5/buildout.cfg index e1bc9b4734c6ff461a71537fb3e50728f48eadc9..43b49632b83d84e3f57bd46d792db979db82f959 100644 --- a/stack/erp5/buildout.cfg +++ b/stack/erp5/buildout.cfg @@ -98,15 +98,12 @@ url = ${:_profile_base_location_}/${:filename} [mariadb-start-clone-from-backup] <= download-base -mode = 755 [mariadb-resiliency-after-import-script] <= download-base -mode = 755 [mariadb-slow-query-report-script] <= download-base -mode = 755 [template-mariadb] <= download-base diff --git a/stack/erp5/buildout.hash.cfg b/stack/erp5/buildout.hash.cfg index ba7bddef4425197e1a64528df7846b3e4499b187..7d30be3fb5e82a4f1e53ebcd74ec0d1986e4702b 100644 --- a/stack/erp5/buildout.hash.cfg +++ b/stack/erp5/buildout.hash.cfg @@ -74,7 +74,7 @@ md5sum = bbef65b4edeb342f08309604ca3717d5 [template-erp5] filename = instance-erp5.cfg.in -md5sum = c10634353841bb09a847168b4add8d2f +md5sum = cb41e15a1585973896a7645dc2ae3a5c [template-zeo] filename = instance-zeo.cfg.in diff --git a/stack/erp5/instance-erp5.cfg.in b/stack/erp5/instance-erp5.cfg.in index 15ae741318902ef8d2c2297474803baca9efd14c..ffdf4ff15822eda4a1fe4ecd49e5c34c13643ed5 100644 --- a/stack/erp5/instance-erp5.cfg.in +++ b/stack/erp5/instance-erp5.cfg.in @@ -297,7 +297,7 @@ config-name = {{ dumps(custom_name) }} config-instance-count = {{ dumps(zope_parameter_dict['instance-count']) }} config-private-dev-shm = {{ zope_parameter_dict.get('private-dev-shm', '') }} config-thread-amount = {{ dumps(zope_parameter_dict['thread-amount']) }} -config-timerserver-interval = {{ dumps(zope_parameter_dict.get('timerserver-interval', 5)) }} +config-timerserver-interval = {{ dumps(zope_parameter_dict.get('timerserver-interval', 1)) }} config-longrequest-logger-interval = {{ dumps(zope_parameter_dict.get('longrequest-logger-interval', -1)) }} config-longrequest-logger-timeout = {{ dumps(zope_parameter_dict.get('longrequest-logger-timeout', 1)) }} config-large-file-threshold = {{ dumps(zope_parameter_dict.get('large-file-threshold', "10MB")) }} diff --git a/stack/lamp/buildout.cfg b/stack/lamp/buildout.cfg index 9f612dedb984c1afb6e97a5772459bb20f9e2330..672934ffe38cac780023d2e01161471ec99f0b1a 100644 --- a/stack/lamp/buildout.cfg +++ b/stack/lamp/buildout.cfg @@ -32,17 +32,11 @@ extends = [template-download-base] -recipe = hexagonit.recipe.download -ignore-existing = true -download-only = true +recipe = slapos.recipe.build:download url = ${:_profile_base_location_}/${:filename} -mode = 0644 [application] -recipe = hexagonit.recipe.download -ignore-existing = true -# If the provided tarball does not contain top directory, this option should be changed to false -strip-top-level-dir = true +recipe = slapos.recipe.build:download-unpacked #---------------- #-- @@ -93,10 +87,10 @@ context = key groonga_plugin_dir groonga:groonga-plugin-dir key groonga_mysql_normalizer_plugin_dir groonga-normalizer-mysql:groonga-plugin-dir key percona_toolkit_location percona-toolkit:location - key template_php_ini template-php.ini:output - key template_apache_conf template-apache.conf:output - key template_apache_php instance-apache-php:output - key template_lamp instance-lamp:output + key template_php_ini template-php.ini:target + key template_apache_conf template-apache.conf:target + key template_apache_php instance-apache-php:target + key template_lamp instance-lamp:target key template_mariadb template-mariadb:target key template_mariadb_initial_setup template-mariadb-initial-setup:target key template_mysqld_wrapper template-mysqld-wrapper:rendered @@ -107,19 +101,15 @@ context = [instance-apache-php] <= template-download-base -output = ${buildout:parts-directory}/${:_buildout_section_name_}/${:filename} [instance-lamp] <= template-download-base -output = ${buildout:parts-directory}/${:_buildout_section_name_}/${:filename} [template-apache.conf] <= template-download-base -output = ${buildout:parts-directory}/${:_buildout_section_name_}/${:filename} [template-php.ini] <= template-download-base -output = ${buildout:parts-directory}/${:_buildout_section_name_}/${:filename} [erp5] # lamp stack reuses erp5 stack to have mariadb, but we don't need to checkout erp5 here. diff --git a/stack/logrotate/buildout.cfg b/stack/logrotate/buildout.cfg index b8648f99251ff462d4290223a1340018e0e88e90..e91a22b91d641b750c8fe8388954b12f141b61cc 100644 --- a/stack/logrotate/buildout.cfg +++ b/stack/logrotate/buildout.cfg @@ -10,8 +10,6 @@ parts = [logrotate-download-base] recipe = slapos.recipe.build:download url = ${:_profile_base_location_}/${:filename} -# XXX: following mode should be the default -mode = 644 [logrotate-conf-template] < = logrotate-download-base diff --git a/stack/monitor/buildout.cfg b/stack/monitor/buildout.cfg index ddd11fbf09148b1bcba2e422fd15a6b910e2fec8..acce28e479ad4adb961d71769d54b175cf116365 100644 --- a/stack/monitor/buildout.cfg +++ b/stack/monitor/buildout.cfg @@ -17,17 +17,8 @@ parts = monitor2-template [monitor-download-base] -recipe = hexagonit.recipe.download -ignore-existing = true -download-only = true +recipe = slapos.recipe.build:download url = ${:_profile_base_location_}/${:_update_hash_filename_} -mode = 0644 - -[monitor-template-script] -<= monitor-download-base -url = ${:_profile_base_location_}/scripts/${:filename} -destination = ${buildout:parts-directory}/monitor-scripts -on-update = true [monitor-eggs] recipe = zc.recipe.egg diff --git a/stack/resilient/buildout.cfg b/stack/resilient/buildout.cfg index 242fff39ac093dd9dcb86784a759f7557251bead..de7bb472621168ee5345991db3606e4583a69230 100644 --- a/stack/resilient/buildout.cfg +++ b/stack/resilient/buildout.cfg @@ -26,7 +26,6 @@ parts = recipe = zc.recipe.egg eggs = collective.recipe.template - collective.recipe.environment #---------------- #-- @@ -40,7 +39,6 @@ eggs = recipe = slapos.recipe.template url = ${:_profile_base_location_}/pbsready.cfg.in output = ${buildout:directory}/pbsready.cfg -mode = 0644 [pbsready-import] # An import instance has an importer script, which is called @@ -48,7 +46,6 @@ mode = 0644 recipe = slapos.recipe.template url = ${:_profile_base_location_}/pbsready-import.cfg.in output = ${buildout:directory}/pbsready-import.cfg -mode = 0644 [pbsready-export] # An export instance has an exporter script, and communicates @@ -56,29 +53,24 @@ mode = 0644 recipe = slapos.recipe.build:download url = ${:_profile_base_location_}/pbsready-export.cfg.in filename = pbsready-export.cfg.in -mode = 0644 [template-pull-backup] recipe = slapos.recipe.template url = ${:_profile_base_location_}/instance-pull-backup.cfg.in output = ${buildout:directory}/instance-pull-backup.cfg -mode = 0644 [template-replicated] recipe = slapos.recipe.build:download url = ${:_profile_base_location_}/template-replicated.cfg.in -mode = 0644 [template-parts] recipe = slapos.recipe.build:download url = ${:_profile_base_location_}/template-parts.cfg.in -mode = 0644 [template-resilient-templates] recipe = slapos.recipe.template url = ${:_profile_base_location_}/${:filename}.in output = ${buildout:directory}/${:filename} -mode = 0644 filename = template-resilient-templates.cfg [instance-frozen] @@ -93,32 +85,26 @@ output = ${buildout:directory}/instance-frozen.cfg recipe = slapos.recipe.build:download url = ${:_profile_base_location_}/resilient-web-takeover-cgi-script.py.in filename = resilient-web-takeover-cgi-script.py.in -mode = 0644 # Provide an empty wrapper [template-wrapper] recipe = slapos.recipe.template url = ${:_profile_base_location_}/templates/wrapper.in output = ${buildout:directory}/template-wrapper.cfg -mode = 0644 [notifier-feed-promise-template] recipe = slapos.recipe.build:download url = ${:_profile_base_location_}/templates/notifier-feed-promise.py.in filename = notifier-feed-promise.py.in -mode = 0644 ################## # Monitor element # [template-monitor-check-resilient-feed] -recipe = hexagonit.recipe.download -ignore-existing = true +recipe = slapos.recipe.build:download url = ${:_profile_base_location_}/templates/monitor-check-resilient-feed.in -download-only = true filename = monitor-check-resilient-feed.in -mode = 0644 [exporter-default-configuration] # Time in minutes to wait for the backup to finish diff --git a/stack/resilient/buildout.hash.cfg b/stack/resilient/buildout.hash.cfg index 53b3a05ae8114d73fe741bc78654d81e0925f2f9..24a11fc51a0eb7458681b487135a7ffce6066e8c 100644 --- a/stack/resilient/buildout.hash.cfg +++ b/stack/resilient/buildout.hash.cfg @@ -26,7 +26,7 @@ md5sum = 8f15263c4a27ec315eb3a12dbf7a7b34 [template-pull-backup] filename = instance-pull-backup.cfg.in -md5sum = 4425db50d551fb8a974e547308990bac +md5sum = d1af7f8a5d4f0846e4c016253fa31f3d [template-replicated] filename = template-replicated.cfg.in diff --git a/stack/resilient/instance-pull-backup.cfg.in b/stack/resilient/instance-pull-backup.cfg.in index 399bec5f3c0609166edc42fc3ff8f947d17d1248..0511301ef0b34233237e52f18858e76a8063e3b4 100644 --- a/stack/resilient/instance-pull-backup.cfg.in +++ b/stack/resilient/instance-pull-backup.cfg.in @@ -250,6 +250,7 @@ monitor-base-url = $${monitor-publish-parameters:monitor-base-url} monitor-url = $${monitor-publish-parameters:monitor-url} monitor-user = $${monitor-publish-parameters:monitor-user} monitor-password = $${monitor-publish-parameters:monitor-password} +monitor-setup-url = $${monitor-publish:monitor-setup-url} #---------------- #-- @@ -269,7 +270,7 @@ private-path-list += [monitor-check-resilient-feed-file] recipe = slapos.recipe.template:jinja2 -template = ${template-monitor-check-resilient-feed:location}/${template-monitor-check-resilient-feed:filename} +template = ${template-monitor-check-resilient-feed:target} rendered = $${monitor-directory:bin}/check-create-resilient-feed-files mode = 700 context = diff --git a/stack/slapos.cfg b/stack/slapos.cfg index 4dee6439754f220fc99a07466e81ddfc51ba2872..e8c222ba6443fe39bfd773c5bda9ba95a29bccec 100644 --- a/stack/slapos.cfg +++ b/stack/slapos.cfg @@ -38,7 +38,6 @@ exec-sitecustomize = false # Add location for modified non-official slapos.buildout find-links += http://www.nexedi.org/static/packages/source/ - http://www.nexedi.org/static/packages/source/hexagonit.recipe.download/ http://www.nexedi.org/static/packages/source/slapos.buildout/ # Use only quite well working sites. @@ -126,6 +125,7 @@ eggs = recipe = zc.recipe.egg eggs = ${lxml-python:egg} + ${python-PyYAML:egg} slapos.core slapos.libnetworkcache @@ -135,15 +135,13 @@ setuptools = 44.1.1 zc.buildout = 2.7.1+slapos014 # Use SlapOS patched zc.recipe.egg (zc.recipe.egg 2.x is for Buildout 2) zc.recipe.egg = 2.0.3+slapos003 -# Use own version of h.r.download to be able to open .xz and .lz archives -hexagonit.recipe.download = 1.7.post4 traitlets = 4.3.3 Jinja2 = 2.11.3 Importing = 1.10 MarkupSafe = 1.0 PyYAML = 5.4.1 -Werkzeug = 1.0.1 +Werkzeug = 2.0.2 ZConfig = 2.9.3 asn1crypto = 1.3.0 atomicwrites = 1.4.0 @@ -171,6 +169,7 @@ itsdangerous = 0.24 lock-file = 2.0 lxml = 4.6.3 meld3 = 1.0.2 +mock = 3.0.5 more-itertools = 5.0.0 netaddr = 0.7.19 pathlib2 = 2.3.5 @@ -190,13 +189,13 @@ setproctitle = 1.1.10 setuptools-dso = 1.7 rubygemsrecipe = 0.4.3 six = 1.12.0 -slapos.cookbook = 1.0.214 -slapos.core = 1.6.19 +slapos.cookbook = 1.0.220 +slapos.core = 1.7.2 slapos.extension.strip = 0.4 slapos.extension.shared = 1.0 -slapos.libnetworkcache = 0.20 +slapos.libnetworkcache = 0.22 slapos.rebootstrap = 4.5 -slapos.recipe.build = 0.50 +slapos.recipe.build = 0.52 slapos.recipe.cmmi = 0.18 slapos.recipe.template = 4.6 slapos.toolbox = 0.126 @@ -251,6 +250,11 @@ chardet = 3.0.4 urllib3 = 1.25.9 pkgconfig = 1.5.1 + +[versions:python2] +Werkzeug = 1.0.1 + + [networkcache] download-cache-url = http://shacache.nxdcdn.com download-dir-url = http://shadir.nxdcdn.com diff --git a/stack/supervisord/buildout.cfg b/stack/supervisord/buildout.cfg index 6d75608b1cd8b92f2d7d3763058a2ee144132e4f..0fd4d182290b3d5c979688bd6fa4603923a94ffe 100644 --- a/stack/supervisord/buildout.cfg +++ b/stack/supervisord/buildout.cfg @@ -10,7 +10,6 @@ parts = [supervisord-download-base] recipe = slapos.recipe.build:download -mode = 0644 url = ${:_profile_base_location_}/${:_update_hash_filename_} diff --git a/stack/hadoop/buildout.cfg b/stack/unstable/hadoop/buildout.cfg similarity index 100% rename from stack/hadoop/buildout.cfg rename to stack/unstable/hadoop/buildout.cfg diff --git a/stack/hadoop/buildout.hash.cfg b/stack/unstable/hadoop/buildout.hash.cfg similarity index 100% rename from stack/hadoop/buildout.hash.cfg rename to stack/unstable/hadoop/buildout.hash.cfg diff --git a/stack/hadoop/instance-stack.cfg.in b/stack/unstable/hadoop/instance-stack.cfg.in similarity index 100% rename from stack/hadoop/instance-stack.cfg.in rename to stack/unstable/hadoop/instance-stack.cfg.in diff --git a/stack/hadoop/template/start-daemons.sh.in b/stack/unstable/hadoop/template/start-daemons.sh.in similarity index 100% rename from stack/hadoop/template/start-daemons.sh.in rename to stack/unstable/hadoop/template/start-daemons.sh.in diff --git a/update-hash b/update-hash index d1e19db0db54db94a60daa0b12cd2022c8c7476c..0aa1d16b30a413e92a0fbc33c9dee656ae777b64 100755 --- a/update-hash +++ b/update-hash @@ -1,6 +1,14 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python """ -Suggested installation: +Helper script to update buildout.hash.cfg + +Usage: +$ ./update-hash [BUILDOUT_HASH_CFG] + +Automatic installation using husky, from the root of the repository: +$ npm install + +Manual installation: $ cp update-hash .git/hooks/ $ $EDITOR .git/hooks/pre-commit @@ -36,7 +44,6 @@ import hashlib import os import shutil import sys -import tempfile # Note: this is an intentionally very restrictive and primitive # ConfigParser-ish parser. @@ -62,7 +69,11 @@ def main(): try: outfile = os.fdopen(outfile_fd, 'w') write = outfile.write - nextLine = iter(infile).next + if sys.version_info <= (3,): + nextLine = iter(infile).next + else: + nextLine = iter(infile).__next__ + while True: try: line = nextLine() @@ -80,7 +91,8 @@ def main(): os.path.join( infile_dirname, *hash_file_path.split('/') - ) + ), + 'rb', ).read() ).hexdigest(), eol,