Commit 9333d26a authored by Ivan Tyagov's avatar Ivan Tyagov

Merge branch 'master' into ivan

parents cdc4009e f0518706
Changes Changes
======= =======
0.102 (2015-05-22)
------------------
* kvm-recipe: vm of kvm-cluster can get ipv4/hostname of all other vm in the same cluster
* simplehttpserver-recipe: simple http server to serve files
0.101 (2015-04-29)
------------------
* kvm recipe: new parameters: external-disk-format, numa and cpu-options.
* kvm recipe: allow guest VM to connect to host http service via a local predefined ipv4 address (guestfwd).
0.100 (2015-04-20)
------------------
* re6stnet recipe: re6st-registry log can now be reopened with SIGUSR1
* re6stnet recipe: re6st certificate generation is improved.
0.99 (2015-04-10)
-----------------
* re6stnet: new recipe to deploy re6st registry (re6st master) with slapos.
0.98 (2015-04-09)
-----------------
* shellinabox: do not run in debug mode, it is much slower !
0.97 (2015-03-26)
-----------------
* switch softwaretype recipe: the recipe is backward compatible with old slapos node packages.
* kvm recipe: Avoid getting wrong storage path when creating kvm external disk
0.96 (2015-03-20)
-----------------
* slap configuration: recipe can read from master network information releated to a tap interface
* slap configuration: recipe will setup data folder in DATA directory of computer partition if disk is mounted
* switch softwaretype recipe: also generate tap network information when they exist
* switch softwaretype recipe: generate configuration for DATA directory when disk is mounted
0.95 (2015-02-14)
-----------------
* resiliency stack: allow web takeover to work inside of webrunner/erp5testnode.
* resiliency takeover script: create lock file stating that takeover has been done.
0.94 (2015-02-06)
-----------------
* kvm: allow to configure tap and nat interface at the same time with use-nat and use-tap [d3d65916]
* kvm: use -netdev to configure network interface as -net is now obsolete [27baa9d4]
0.85 (2013-12-03) 0.85 (2013-12-03)
----------------- -----------------
......
...@@ -30,9 +30,9 @@ md5sum = 2202b18f269ad606d70e1864857ed93c ...@@ -30,9 +30,9 @@ md5sum = 2202b18f269ad606d70e1864857ed93c
[apache] [apache]
# inspired on http://old.aclark.net/team/aclark/blog/a-lamp-buildout-for-wordpress-and-other-php-apps/ # inspired on http://old.aclark.net/team/aclark/blog/a-lamp-buildout-for-wordpress-and-other-php-apps/
recipe = slapos.recipe.cmmi recipe = slapos.recipe.cmmi
version = 2.4.10 version = 2.4.12
url = https://archive.apache.org/dist/httpd/httpd-${:version}.tar.bz2 url = https://archive.apache.org/dist/httpd/httpd-${:version}.tar.bz2
md5sum = 44543dff14a4ebc1e9e2d86780507156 md5sum = b8dc8367a57a8d548a9b4ce16d264a13
configure-command = cp -ar ${apr:location}/apr-${apr:version} srclib/apr/; cp -ar ${apr-util:location}/apr-util-${apr-util:version} srclib/apr-util; ./configure configure-command = cp -ar ${apr:location}/apr-${apr:version} srclib/apr/; cp -ar ${apr-util:location}/apr-util-${apr-util:version} srclib/apr-util; ./configure
configure-options = --prefix=${buildout:parts-directory}/${:_buildout_section_name_} configure-options = --prefix=${buildout:parts-directory}/${:_buildout_section_name_}
--disable-static --disable-static
......
...@@ -8,7 +8,7 @@ parts = ...@@ -8,7 +8,7 @@ parts =
[automake] [automake]
recipe = slapos.recipe.cmmi recipe = slapos.recipe.cmmi
md5sum = 140e084223bf463a24c1a28427c6aec7 md5sum = 9a1ddb0e053474d9d1105cfe39b0c48d
url = http://ftp.gnu.org/gnu/automake/automake-1.12.6.tar.xz url = http://ftp.gnu.org/gnu/automake/automake-1.15.tar.xz
environment = environment =
PATH =${autoconf:location}/bin:${perl:location}/bin:${xz-utils:location}/bin:%(PATH)s PATH =${autoconf:location}/bin:${perl:location}/bin:${xz-utils:location}/bin:%(PATH)s
[buildout]
extends =
../../stack/slapos.cfg
parts = babeld
[babeld]
recipe = slapos.recipe.cmmi
url = https://lab.nexedi.cn/rafael/babeld/repository/archive.tar.gz?ref=v1.6.0-nxd1
md5sum = 1f269d01321103873b8d245df19984f0
configure-command =
echo "No configure.."
make-targets =
install PREFIX=${buildout:parts-directory}/${:_buildout_section_name_}
[buildout]
parts =
binutils
extends =
../bison/buildout.cfg
../zlib/buildout.cfg
[binutils]
recipe = slapos.recipe.cmmi
url = http://ftp.gnu.org/gnu/binutils/binutils-2.21.1.tar.bz2
md5sum = bde820eac53fa3a8d8696667418557ad
environment =
PATH=${bison:location}/bin:%(PATH)s
CPPFLAGS=-I${zlib:location}/include
LDFLAGS=-L${zlib:location}/lib -Wl,-rpath=${zlib:location}/lib
[buildout] [buildout]
extends = extends =
../m4/buildout.cfg ../m4/buildout.cfg
../xz-utils/buildout.cfg
parts = parts =
bison bison
[bison] [bison]
patch-options = -p1
patches =
${:_profile_base_location_}/bison-drop.gets.patch#ac06cbaa298ac686d0b0c04bc03e6ad8
recipe = slapos.recipe.cmmi recipe = slapos.recipe.cmmi
url = http://ftp.gnu.org/gnu/bison/bison-2.5.tar.bz2 url = http://ftp.gnu.org/gnu/bison/bison-3.0.2.tar.xz
md5sum = 9dba20116b13fc61a0846b0058fbe004 md5sum = 146be9ff9fbd27497f0bf2286a5a2082
environment = environment =
M4=${m4:location}/bin/m4 M4=${m4:location}/bin/m4
PATH=${xz-utils:location}/bin:%(PATH)s
[buildout] [buildout]
extends = extends =
../gmp/buildout.cfg
../xz-utils/buildout.cfg ../xz-utils/buildout.cfg
parts = parts =
coreutils-output coreutils-output
...@@ -12,7 +13,7 @@ configure-options = ...@@ -12,7 +13,7 @@ configure-options =
--prefix=${buildout:parts-directory}/${:_buildout_section_name_} --enable-install-program=tr,basename,uname,cat,cp,ls --prefix=${buildout:parts-directory}/${:_buildout_section_name_} --enable-install-program=tr,basename,uname,cat,cp,ls
environment = environment =
PATH=${xz-utils:location}/bin:%(PATH)s PATH=${xz-utils:location}/bin:%(PATH)s
LDFLAGS =-Wl,--as-needed LDFLAGS=-Wl,--as-needed -L${gmp:location}/lib -Wl,-rpath=${gmp:location}/lib
[coreutils-output] [coreutils-output]
# Shared binary location to ease migration # Shared binary location to ease migration
......
...@@ -12,8 +12,8 @@ parts = ...@@ -12,8 +12,8 @@ parts =
[curl] [curl]
recipe = slapos.recipe.cmmi recipe = slapos.recipe.cmmi
url = http://curl.haxx.se/download/curl-7.39.0.tar.bz2 url = http://curl.haxx.se/download/curl-7.42.1.tar.bz2
md5sum = 1efecb5b0e43c17d968f0d228bbbbbbd md5sum = 296945012ce647b94083ed427c1877a8
configure-options = configure-options =
--disable-static --disable-static
--disable-ldap --disable-ldap
......
...@@ -6,8 +6,8 @@ parts = dash-output ...@@ -6,8 +6,8 @@ parts = dash-output
[dash] [dash]
recipe = slapos.recipe.cmmi recipe = slapos.recipe.cmmi
url = http://gondor.apana.org.au/~herbert/dash/files/dash-0.5.7.tar.gz url = http://gondor.apana.org.au/~herbert/dash/files/dash-0.5.8.tar.gz
md5sum = f6cedb10ae7258adb5ab17a10ae80d51 md5sum = 5c152209680dab3c319e8923f6c51378
configure-options = configure-options =
--disable-static --disable-static
--disable-fnmatch --disable-fnmatch
......
...@@ -10,8 +10,8 @@ parts = ...@@ -10,8 +10,8 @@ parts =
[dbus] [dbus]
recipe = slapos.recipe.cmmi recipe = slapos.recipe.cmmi
url = http://dbus.freedesktop.org/releases/dbus/dbus-1.8.8.tar.gz url = http://dbus.freedesktop.org/releases/dbus/dbus-1.8.16.tar.gz
md5sum = b9f4a18ee3faa1e07c04aa1d83239c43 md5sum = 020824a38850501e7d6ba8307a7c5ac3
location = ${buildout:parts-directory}/${:_buildout_section_name_} location = ${buildout:parts-directory}/${:_buildout_section_name_}
configure-options = configure-options =
--disable-static --disable-static
...@@ -25,8 +25,8 @@ environment = ...@@ -25,8 +25,8 @@ environment =
[dbus-glib] [dbus-glib]
recipe = slapos.recipe.cmmi recipe = slapos.recipe.cmmi
url = http://dbus.freedesktop.org/releases/dbus-glib/dbus-glib-0.102.tar.gz url = http://dbus.freedesktop.org/releases/dbus-glib/dbus-glib-0.104.tar.gz
md5sum = f76b8558fd575d0106c3a556eaa49184 md5sum = 5497d2070709cf796f1878c75a72a039
configure-options = configure-options =
--disable-static --disable-static
--disable-gtk-doc-html --disable-gtk-doc-html
......
[buildout]
parts = dtach
[dtach-build]
recipe = slapos.recipe.cmmi
version = 0.8
url = http://freefr.dl.sourceforge.net/project/dtach/dtach/0.8/dtach-${:version}.tar.gz
md5sum = ec5999f3b6bb67da19754fcb2e5221f3
keep-compile-dir = true
make-targets =
[dtach]
recipe = plone.recipe.command
location = ${buildout:parts-directory}/${:_buildout_section_name_}
dtach-bin = dtach
command =
mkdir -p ${:location}/bin
cp -rp ${dtach-build:compile-directory}/dtach-${dtach-build:version}/${:dtach-bin} ${:location}/bin
\ No newline at end of file
...@@ -8,8 +8,8 @@ extends = ...@@ -8,8 +8,8 @@ extends =
[file] [file]
recipe = slapos.recipe.cmmi recipe = slapos.recipe.cmmi
url = ftp://ftp.astron.com/pub/file/file-5.20.tar.gz url = ftp://ftp.astron.com/pub/file/file-5.22.tar.gz
md5sum = 5d5e13eb3e0e13839da869a31790faf2 md5sum = 8fb13e5259fe447e02c4a37bc7225add
configure-options = configure-options =
--disable-static --disable-static
environment = environment =
......
...@@ -26,12 +26,12 @@ depends = ...@@ -26,12 +26,12 @@ depends =
${liberation-fonts:location} ${liberation-fonts:location}
${ipaex-fonts:location} ${ipaex-fonts:location}
version = 33.1 version = 36.0.4
# MD5SUMs are available at : # MD5SUMs are available at :
# https://ftp.mozilla.org/pub/mozilla.org/firefox/releases/${:version}/MD5SUMS # https://ftp.mozilla.org/pub/mozilla.org/firefox/releases/${:version}/MD5SUMS
x86 = http://download-installer.cdn.mozilla.net/pub/firefox/releases/${:version}/linux-i686/en-US/firefox-${:version}.tar.bz2 1106b9c56da48a3e5c5ef965fea70078 x86 = http://download-installer.cdn.mozilla.net/pub/firefox/releases/${:version}/linux-i686/en-US/firefox-${:version}.tar.bz2 6f50b82a6c020d0f29a04cfcd9fe8282
x86-64 = http://download-installer.cdn.mozilla.net/pub/firefox/releases/${:version}/linux-x86_64/en-US/firefox-${:version}.tar.bz2 4201d94f32b6b778456e5681cdcacd76 x86-64 = http://download-installer.cdn.mozilla.net/pub/firefox/releases/${:version}/linux-x86_64/en-US/firefox-${:version}.tar.bz2 119d9369a3b1be282936c35bf036414f
script = script =
if not self.options.get('url'): self.options['url'], self.options['md5sum'] = self.options[guessPlatform()].split(' ') if not self.options.get('url'): self.options['url'], self.options['md5sum'] = self.options[guessPlatform()].split(' ')
......
[buildout]
extends =
../../component/git/buildout.cfg
../../component/ruby/buildout.cfg
../../component/icu/buildout.cfg
[fluentd]
recipe = rubygemsrecipe
ruby-executable = ${ruby:location}/bin/ruby
gems =
fluentd
gem-options = --with-icu-lib=${icu:location}/lib/ --with-icu-dir=${icu:location}/
environment =
LDFLAGS = -L${icu:location}/lib -Wl,-rpath=${icu:location}/lib
CFLAGS = -I${icu:location}/include
...@@ -26,12 +26,12 @@ md5sum = 5c781723a0d9ed6188960defba8e91cf ...@@ -26,12 +26,12 @@ md5sum = 5c781723a0d9ed6188960defba8e91cf
# http://ipafont.ipa.go.jp/ # http://ipafont.ipa.go.jp/
[ipaex-fonts] [ipaex-fonts]
<= fonts-base <= fonts-base
url = http://ipafont.ipa.go.jp/ipaexfont/IPAexfont00201.php url = http://dl.sourceforge.jp/ipafonts/57330/IPAexfont00201.zip
md5sum = 7bf84182a04a9632268dbcb03f100d05 md5sum = 7bf84182a04a9632268dbcb03f100d05
[ipa-fonts] [ipa-fonts]
<= fonts-base <= fonts-base
url = http://ipafont.ipa.go.jp/ipafont/IPAfont00303.php url = http://dl.sourceforge.jp/ipafonts/51868/IPAfont00303.zip
md5sum = 39a828acf27790adbe4944dfb4d94bb1 md5sum = 39a828acf27790adbe4944dfb4d94bb1
[ocrb-fonts] [ocrb-fonts]
...@@ -43,3 +43,36 @@ md5sum = 9f2acd83291a31dbe053912f4115db75 ...@@ -43,3 +43,36 @@ md5sum = 9f2acd83291a31dbe053912f4115db75
<= fonts-base <= fonts-base
url = ftp://ftp.free.fr/mirrors/ftp.debian.org/pool/main/f/fonts-android/fonts-android_4.3.orig.tar.xz url = ftp://ftp.free.fr/mirrors/ftp.debian.org/pool/main/f/fonts-android/fonts-android_4.3.orig.tar.xz
md5sum = 2d41d5342eb5f61591ddeec5b80da74d md5sum = 2d41d5342eb5f61591ddeec5b80da74d
# Microsoft's TrueType core fonts
# non-free so not enabled by default
[msttcore-fonts]
location = ${fonts:location}/${:_buildout_section_name_}
recipe = slapos.recipe.build
script =
from zc.buildout.download import Download
d = self.options['location']
fonts = []
download = lambda x, dl=Download(self.buildout['buildout']): (
dl("http://downloads.sf.net/corefonts/%%s32.exe" %% name, md5sum=md5sum)
for md5sum, name in (x.split() for x in x.splitlines() if x))
extract = lambda x, d=d, p7z="${p7zip:location}/bin/7za": any(
subprocess.check_call((p7z, "x", "-ssc-", path, "*.ttf"), cwd=d)
for path, is_temp in x)
try: fonts += download(self.options['fonts']); os.makedirs(d); extract(fonts)
except: shutil.rmtree(d, ignore_errors=True); raise
finally: any(os.remove(path) for path, is_temp in fonts if is_temp)
slapos_promise =
slapos_update_promise = ${:slapos_promise}
fonts =
cbdc2fdd7d2ed0832795e86a8b9ee19a andale
9637df0e91703179f0723ec095a36cb5 arial
c9089ae0c3b3d0d8c4b0a95979bb9ff0 arialb
2b30de40bb5e803a0452c7715fc835d1 comic
4e412c772294403ab62fb2d247d85c60 courie
4d90016026e2da447593b41a8d8fa8bd georgi
7907c7dd6684e9bade91cff82683d9d7 impact
ed39c8ef91b9fb80f76f702568291bd5 times
0d7ea16cac6261f8513a061fbfcdb2b5 trebuc
12d2a75f8156e10607be1eaa8e8ef120 verdan
230a1d13a365b22815f502eb24d9149b webdin
...@@ -13,8 +13,8 @@ parts = ...@@ -13,8 +13,8 @@ parts =
[freetype] [freetype]
recipe = slapos.recipe.cmmi recipe = slapos.recipe.cmmi
url = http://download.savannah.gnu.org/releases/freetype/freetype-2.5.3.tar.bz2 url = http://download.savannah.gnu.org/releases/freetype/freetype-2.5.5.tar.bz2
md5sum = d6b60f06bfc046e43ab2a6cbfd171d65 md5sum = 2a7a314927011d5030903179cf183be0
pkg_config_depends = ${zlib:location}/lib/pkgconfig:${libpng:location}/lib/pkgconfig pkg_config_depends = ${zlib:location}/lib/pkgconfig:${libpng:location}/lib/pkgconfig
location = ${buildout:parts-directory}/${:_buildout_section_name_} location = ${buildout:parts-directory}/${:_buildout_section_name_}
configure-options = configure-options =
......
[buildout]
extends =
../../stack/slapos.cfg
../python-kerberos/buildout.cfg
../git/buildout.cfg
develop =
gateone-repository
parts =
gateone-develop
gateone
[gateone-repository]
recipe = slapos.recipe.build:gitclone
repository = https://github.com/liftoff/GateOne.git
branch = master
git-executable = ${git:location}/bin/git
[gateone-develop]
recipe = zc.recipe.egg:develop
setup = ${gateone-repository:location}
[gateone]
recipe = zc.recipe.egg
eggs =
${lxml-python:egg}
${python-kerberos:egg}
tornado
setuptools
pyOpenSSL
futures
PIL
gateone
...@@ -2,10 +2,8 @@ ...@@ -2,10 +2,8 @@
# Mostly required to support languages different than C or C++ # Mostly required to support languages different than C or C++
[buildout] [buildout]
extends = extends =
../m4/buildout.cfg
../gmp/buildout.cfg ../gmp/buildout.cfg
../xz-utils/buildout.cfg ../xz-utils/buildout.cfg
../zip/buildout.cfg
parts = parts =
gcc gcc
...@@ -23,8 +21,8 @@ environment = ...@@ -23,8 +21,8 @@ environment =
[mpc] [mpc]
recipe = slapos.recipe.cmmi recipe = slapos.recipe.cmmi
url = http://ftp.gnu.org/gnu/mpc/mpc-1.0.2.tar.gz url = http://ftp.gnu.org/gnu/mpc/mpc-1.0.3.tar.gz
md5sum = 68fadff3358fb3e7976c7a398a0af4c3 md5sum = d6a1d5f8ddea3abd2cc3e98f58352d26
configure-options = configure-options =
--with-gmp=${gmp:location} --with-gmp=${gmp:location}
--with-mpfr=${mpfr:location} --with-mpfr=${mpfr:location}
...@@ -34,29 +32,18 @@ environment = ...@@ -34,29 +32,18 @@ environment =
[isl] [isl]
recipe = slapos.recipe.cmmi recipe = slapos.recipe.cmmi
url = ftp://gcc.gnu.org/pub/gcc/infrastructure/isl-0.12.2.tar.bz2 url = ftp://gcc.gnu.org/pub/gcc/infrastructure/isl-0.14.tar.bz2
md5sum = e039bfcfb6c2ab039b8ee69bf883e824 md5sum = acd347243fca5609e3df37dba47fd0bb
configure-options = configure-options =
--with-gmp-prefix=${gmp:location} --with-gmp-prefix=${gmp:location}
--disable-static --disable-static
environment = environment =
LDFLAGS=-Wl,-rpath=${gmp:location}/lib LDFLAGS=-Wl,-rpath=${gmp:location}/lib
[cloog]
recipe = slapos.recipe.cmmi
url = ftp://gcc.gnu.org/pub/gcc/infrastructure/cloog-0.18.1.tar.gz
md5sum = e34fca0540d840e5d0f6427e98c92252
configure-options =
--with-gmp-prefix=${gmp:location}
--with-isl-prefix=${isl:location}
--disable-static
environment =
LDFLAGS=-Wl,-rpath=${gmp:location}/lib -Wl,-rpath=${isl:location}/lib
[gcc-common] [gcc-common]
recipe = slapos.recipe.cmmi recipe = slapos.recipe.cmmi
url = http://ftp.gnu.org/gnu/gcc/gcc-4.9.2/gcc-4.9.2.tar.bz2 url = http://ftp.gnu.org/gnu/gcc/gcc-5.1.0/gcc-5.1.0.tar.bz2
md5sum = 4df8ee253b7f3863ad0b86359cd39c43 md5sum = d5525b1127d07d215960e6051c5da35e
# make install does not work when several core are used # make install does not work when several core are used
make-targets = install -j1 make-targets = install -j1
...@@ -70,9 +57,8 @@ configure-options = ...@@ -70,9 +57,8 @@ configure-options =
--with-mpc=${mpc:location} --with-mpc=${mpc:location}
--enable-languages="c,c++" --enable-languages="c,c++"
--with-isl=${isl:location} --with-isl=${isl:location}
--with-cloog=${cloog:location}
environment = environment =
LDFLAGS=-Wl,-rpath=${cloog:location}/lib -Wl,-rpath=${gmp:location}/lib -Wl,-rpath=${isl:location}/lib -Wl,-rpath=${mpc:location}/lib -Wl,-rpath=${mpfr:location}/lib LDFLAGS=-Wl,-rpath=${gmp:location}/lib -Wl,-rpath=${isl:location}/lib -Wl,-rpath=${mpc:location}/lib -Wl,-rpath=${mpfr:location}/lib
[gcc-minimal] [gcc-minimal]
<= gcc-common <= gcc-common
...@@ -98,6 +84,5 @@ configure-options = ...@@ -98,6 +84,5 @@ configure-options =
--with-mpc=${mpc:location} --with-mpc=${mpc:location}
--enable-languages="c,c++,fortran" --enable-languages="c,c++,fortran"
--with-isl=${isl:location} --with-isl=${isl:location}
--with-cloog=${cloog:location}
environment = environment =
LDFLAGS=-Wl,-rpath=${cloog:location}/lib -Wl,-rpath=${gmp:location}/lib -Wl,-rpath=${isl:location}/lib -Wl,-rpath=${mpc:location}/lib -Wl,-rpath=${mpfr:location}/lib LDFLAGS=-Wl,-rpath=${gmp:location}/lib -Wl,-rpath=${isl:location}/lib -Wl,-rpath=${mpc:location}/lib -Wl,-rpath=${mpfr:location}/lib
[buildout]
extends =
../numpy/buildout.cfg
../curl/buildout.cfg
../geos/buildout.cfg
../giflib/buildout.cfg
../jasper/buildout.cfg
../libexpat/buildout.cfg
../pcre/buildout.cfg
../proj4/buildout.cfg
../sqlite3/buildout.cfg
../webp/buildout.cfg
parts =
gdal-python
[gdal]
recipe = slapos.recipe.cmmi
version = 1.11.1
url = http://download.osgeo.org/gdal/${:version}/gdal-${:version}.tar.xz
md5sum = 2e126d7c6605691d38f3e71b945f5c73
configure-options =
--with-curl=${curl:location}/bin/curl-config
--with-expat=${libexpat:location}
--with-geos=${geos:location}/bin/geos-config
--with-gif=${giflib:location}
--with-jasper=${jasper:location}
--with-jpeg=${libjpeg:location}
--with-libtiff=${libtiff:location}
--with-libz=${zlib:location}
--with-png=${libpng:location}
--with-static-proj4=${proj4:location}
--with-sqlite3=${sqlite3:location}
--with-wepb=${webp:location}
--with-xml2=${libxml2:location}/bin/xml2-config
environment =
CPPFLAGS=-I${pcre:location}/include
LDFLAGS=-L${pcre:location}/lib -Wl,-rpath=${buildout:parts-directory}/${:_buildout_section_name_}/lib -Wl,-rpath=${curl:location}/lib -Wl,-rpath=${geos:location}/lib -Wl,-rpath=${giflib:location}/lib -Wl,-rpath=${jasper:location}/lib -Wl,-rpath=${jbigkit:location}/lib -Wl,-rpath=${libexpat:location}/lib -Wl,-rpath=${libjpeg:location}/lib -Wl,-rpath=${libpng:location}/lib -Wl,-rpath=${libtiff:location}/lib -Wl,-rpath=${libxml2:location}/lib -Wl,-rpath=${openssl:location}/lib -Wl,-rpath=${pcre:location}/lib -Wl,-rpath=${sqlite3:location}/lib -Wl,-rpath=${webp:location}/lib -Wl,-rpath=${zlib:location}/lib
[gdal-python]
recipe = zc.recipe.egg:custom
egg = GDAL ==${gdal:version}
setup-eggs = ${numpy:egg}
rpath = ${:library-dirs}
include-dirs =
${gdal:location}/include
library-dirs =
${gdal:location}/lib
environment = gdal-python-env
[gdal-python-env]
PATH = ${gdal:location}/bin:%(PATH)s
[buildout]
parts =
geos
[geos]
recipe = slapos.recipe.cmmi
version = 3.4.2
url = http://download.osgeo.org/geos/geos-${:version}.tar.bz2
md5sum = fc5df2d926eb7e67f988a43a92683bae
configure-options =
--disable-dependency-tracking
--disable-static
...@@ -2,16 +2,16 @@ ...@@ -2,16 +2,16 @@
parts = parts =
gettext gettext
extends = extends =
../ncurses/buildout.cfg
../libxml2/buildout.cfg ../libxml2/buildout.cfg
../lunzip/buildout.cfg
../ncurses/buildout.cfg
../perl/buildout.cfg ../perl/buildout.cfg
../zlib/buildout.cfg ../zlib/buildout.cfg
../xz-utils/buildout.cfg
[gettext] [gettext]
recipe = slapos.recipe.cmmi recipe = slapos.recipe.cmmi
url = http://ftp.gnu.org/pub/gnu/gettext/gettext-0.19.3.tar.xz url = http://ftp.gnu.org/pub/gnu/gettext/gettext-0.19.4.tar.lz
md5sum = 092c3f460553ceb4a638ff81d36434c4 md5sum = 3e2c6f64efbffd97520ec3b998682c6b
configure-options = configure-options =
--disable-static --disable-static
...@@ -29,6 +29,6 @@ configure-options = ...@@ -29,6 +29,6 @@ configure-options =
--without-xz --without-xz
environment = environment =
PATH=${perl:location}/bin:${xz-utils:location}/bin:%(PATH)s PATH=${perl:location}/bin:${lunzip:location}/bin:%(PATH)s
CPPFLAGS=-I${libxml2:location}/include -I${zlib:location}/include -I${ncurses:location}/include CPPFLAGS=-I${libxml2:location}/include -I${zlib:location}/include -I${ncurses:location}/include
LDFLAGS=-L${libxml2:location}/lib -Wl,-rpath=${libxml2:location}/lib -L${zlib:location}/lib -Wl,-rpath=${zlib:location}/lib -L${ncurses:location}/lib -Wl,-rpath=${ncurses:location}/lib LDFLAGS=-L${libxml2:location}/lib -Wl,-rpath=${libxml2:location}/lib -L${zlib:location}/lib -Wl,-rpath=${zlib:location}/lib -L${ncurses:location}/lib -Wl,-rpath=${ncurses:location}/lib
...@@ -2,7 +2,6 @@ ...@@ -2,7 +2,6 @@
extends = extends =
../fontconfig/buildout.cfg ../fontconfig/buildout.cfg
../freetype/buildout.cfg ../freetype/buildout.cfg
../libjpeg/buildout.cfg
../libtiff/buildout.cfg ../libtiff/buildout.cfg
../libxml2/buildout.cfg ../libxml2/buildout.cfg
../pkgconfig/buildout.cfg ../pkgconfig/buildout.cfg
...@@ -33,8 +32,8 @@ environment = ...@@ -33,8 +32,8 @@ environment =
[ghostscript-9] [ghostscript-9]
<= ghostscript-common <= ghostscript-common
url = http://downloads.ghostscript.com/public/ghostscript-9.15.tar.bz2 url = http://downloads.ghostscript.com/public/ghostscript-9.16.tar.bz2
md5sum = 0ee21d975c67a4883ba66bcc332418f5 md5sum = 21732fd6e39acc283bc623b8842cbfbb
[ghostscript-fonts] [ghostscript-fonts]
recipe = hexagonit.recipe.download recipe = hexagonit.recipe.download
......
...@@ -8,6 +8,7 @@ extends = ...@@ -8,6 +8,7 @@ extends =
../libexpat/buildout.cfg ../libexpat/buildout.cfg
../openssl/buildout.cfg ../openssl/buildout.cfg
../pcre/buildout.cfg ../pcre/buildout.cfg
../perl/buildout.cfg
../xz-utils/buildout.cfg ../xz-utils/buildout.cfg
../zlib/buildout.cfg ../zlib/buildout.cfg
parts = parts =
...@@ -15,16 +16,16 @@ parts = ...@@ -15,16 +16,16 @@ parts =
[git] [git]
recipe = slapos.recipe.cmmi recipe = slapos.recipe.cmmi
url = https://www.kernel.org/pub/software/scm/git/git-2.1.0.tar.xz url = https://www.kernel.org/pub/software/scm/git/git-2.2.1.tar.xz
md5sum = 47b1f55d9a16be112f7ae2c778a9b30c md5sum = 43e01f9d96ba8c11611e0eef0d9f9f28
configure-options = configure-options =
--with-curl=${curl:location} --with-curl=${curl:location}
--with-openssl=${openssl:location} --with-openssl=${openssl:location}
--with-zlib=${zlib:location} --with-zlib=${zlib:location}
--with-libpcre=${pcre:location} --with-libpcre=${pcre:location}
--with-expat=${libexpat:location} --with-expat=${libexpat:location}
--with-perl=${perl:location}/bin/perl
environment = environment =
NO_PERL=y
NO_PYTHON=y NO_PYTHON=y
NO_TCLTK=y NO_TCLTK=y
PATH=${curl:location}/bin:${gettext:location}/bin:${xz-utils:location}/bin:%(PATH)s PATH=${curl:location}/bin:${gettext:location}/bin:${xz-utils:location}/bin:%(PATH)s
......
...@@ -10,8 +10,8 @@ parts = ...@@ -10,8 +10,8 @@ parts =
[glib] [glib]
recipe = slapos.recipe.cmmi recipe = slapos.recipe.cmmi
url = http://ftp.gnome.org/pub/gnome/core/3.12/3.12.2/sources/glib-2.40.0.tar.xz url = http://ftp.gnome.org/pub/gnome/core/3.14/3.14.2/sources/glib-2.42.1.tar.xz
md5sum = 05fb7cb17eacbc718e90366a1eae60d9 md5sum = 89c4119e50e767d3532158605ee9121a
configure-options = configure-options =
--disable-static --disable-static
--disable-selinux --disable-selinux
......
...@@ -11,8 +11,8 @@ parts = ...@@ -11,8 +11,8 @@ parts =
[glibmm] [glibmm]
recipe = slapos.recipe.cmmi recipe = slapos.recipe.cmmi
url = http://ftp.gnome.org/pub/gnome/core/3.12/3.12.2/sources/glibmm-2.40.0.tar.xz url = http://ftp.gnome.org/pub/gnome/core/3.14/3.14.2/sources/glibmm-2.42.0.tar.xz
md5sum = f62754f4f5c9030f8ff43c7ed20556c2 md5sum = 7c52cc42085d30ac3b73d74c3f2eb22e
pkg_config_depends = ${glib:location}/lib/pkgconfig:${libsigc:location}/lib/pkgconfig pkg_config_depends = ${glib:location}/lib/pkgconfig:${libsigc:location}/lib/pkgconfig
configure-options = configure-options =
--disable-documentation --disable-documentation
......
[buildout]
parts =
glpk
[glpk]
recipe = slapos.recipe.cmmi
url = http://ftp.gnu.org/gnu/glpk/glpk-4.55.tar.gz
md5sum = c632a7a631b8aed02e28eafcd99477f7
...@@ -9,7 +9,7 @@ parts = ...@@ -9,7 +9,7 @@ parts =
[gmp] [gmp]
recipe = slapos.recipe.cmmi recipe = slapos.recipe.cmmi
version = 6.0.0a version = 6.0.0a
url = ftp://ftp.gmplib.org/pub/gmp-6.0.0/gmp-${:version}.tar.xz url = https://gmplib.org/download/gmp/gmp-${:version}.tar.xz
md5sum = 1e6da4e434553d2811437aa42c7f7c76 md5sum = 1e6da4e434553d2811437aa42c7f7c76
configure-options = configure-options =
--enable-cxx --enable-cxx
......
...@@ -4,20 +4,20 @@ extends = ...@@ -4,20 +4,20 @@ extends =
../nettle/buildout.cfg ../nettle/buildout.cfg
../p11-kit/buildout.cfg ../p11-kit/buildout.cfg
../pkgconfig/buildout.cfg ../pkgconfig/buildout.cfg
../xz-utils/buildout.cfg ../lunzip/buildout.cfg
../zlib/buildout.cfg ../zlib/buildout.cfg
parts = gnutls parts = gnutls
[gpg-error] [gpg-error]
recipe = slapos.recipe.cmmi recipe = slapos.recipe.cmmi
url = ftp://ftp.gnupg.org/gcrypt/libgpg-error/libgpg-error-1.17.tar.bz2 url = ftp://ftp.gnupg.org/gcrypt/libgpg-error/libgpg-error-1.19.tar.bz2
md5sum = b4f8d8b9ff14aed41f279aa844563539 md5sum = c04c16245b92829281f43b5bef7d16da
[gcrypt] [gcrypt]
recipe = slapos.recipe.cmmi recipe = slapos.recipe.cmmi
url = ftp://ftp.gnupg.org/gcrypt/libgcrypt/libgcrypt-1.6.2.tar.bz2 url = ftp://ftp.gnupg.org/gcrypt/libgcrypt/libgcrypt-1.6.3.tar.bz2
md5sum = b54395a93cb1e57619943c082da09d5f md5sum = 4262c3aadf837500756c2051a5c4ae5e
configure-options = configure-options =
--with-gpg-error-prefix=${gpg-error:location} --with-gpg-error-prefix=${gpg-error:location}
environment = environment =
...@@ -26,8 +26,8 @@ environment = ...@@ -26,8 +26,8 @@ environment =
[gnutls] [gnutls]
recipe = slapos.recipe.cmmi recipe = slapos.recipe.cmmi
url = ftp://ftp.gnutls.org/gcrypt/gnutls/v3.2/gnutls-3.2.20.tar.xz url = ftp://ftp.gnutls.org/gcrypt/gnutls/v3.2/gnutls-3.2.21.tar.lz
md5sum = 637ef52191bf87a597240a49cc533972 md5sum = a653d93a5a26434474c603470cc86666
configure-options = configure-options =
--disable-static --disable-static
--disable-libdane --disable-libdane
...@@ -35,7 +35,7 @@ configure-options = ...@@ -35,7 +35,7 @@ configure-options =
--disable-crywrap --disable-crywrap
--without-tpm --without-tpm
environment = environment =
PATH=${pkgconfig:location}/bin:${xz-utils:location}/bin:%(PATH)s PATH=${pkgconfig:location}/bin:${lunzip:location}/bin:%(PATH)s
PKG_CONFIG_PATH=${nettle:location}/lib/pkgconfig:${p11-kit:location}/lib/pkgconfig:${zlib:location}/lib/pkgconfig PKG_CONFIG_PATH=${nettle:location}/lib/pkgconfig:${p11-kit:location}/lib/pkgconfig:${zlib:location}/lib/pkgconfig
CPPFLAGS=-I${gmp:location}/include -I${zlib:location}/include CPPFLAGS=-I${gmp:location}/include -I${zlib:location}/include
LDFLAGS=-L${gmp:location}/lib -Wl,-rpath=${gmp:location}/lib -Wl,-rpath=${nettle:location}/lib -L${zlib:location}/lib -Wl,-rpath=${zlib:location}/lib LDFLAGS=-L${gmp:location}/lib -Wl,-rpath=${gmp:location}/lib -Wl,-rpath=${nettle:location}/lib -L${zlib:location}/lib -Wl,-rpath=${zlib:location}/lib
...@@ -6,19 +6,14 @@ parts = ...@@ -6,19 +6,14 @@ parts =
graphviz graphviz
extends = extends =
../bzip2/buildout.cfg ../bzip2/buildout.cfg
../fontconfig/buildout.cfg
../freetype/buildout.cfg
../libpng/buildout.cfg
../glib/buildout.cfg
../gtk-2/buildout.cfg ../gtk-2/buildout.cfg
../pkgconfig/buildout.cfg ../pkgconfig/buildout.cfg
../xorg/buildout.cfg
../zlib/buildout.cfg ../zlib/buildout.cfg
[graphviz] [graphviz]
recipe = slapos.recipe.cmmi recipe = slapos.recipe.cmmi
url = http://www.graphviz.org/pub/graphviz/stable/SOURCES/graphviz-2.36.0.tar.gz url = http://www.graphviz.org/pub/graphviz/stable/SOURCES/graphviz-2.38.0.tar.gz
md5sum = 1f41664dba0c93109ac8b71216bf2b57 md5sum = 5b6a829b2ac94efcd5fa3c223ed6d3ae
pkg_config_depends = ${pango:location}/lib/pkgconfig:${pango:pkg_config_depends} pkg_config_depends = ${pango:location}/lib/pkgconfig:${pango:pkg_config_depends}
configure-options = configure-options =
--with-included-ltdl --with-included-ltdl
......
...@@ -8,8 +8,8 @@ parts = ...@@ -8,8 +8,8 @@ parts =
[grep] [grep]
recipe = slapos.recipe.cmmi recipe = slapos.recipe.cmmi
url = http://ftp.gnu.org/gnu/grep/grep-2.20.tar.xz url = http://ftp.gnu.org/gnu/grep/grep-2.21.tar.xz
md5sum = 2cbea44a4f1548aee20b9ff2d3076908 md5sum = 43c48064d6409862b8a850db83c8038a
environment = environment =
PATH=${xz-utils:location}/bin:%(PATH)s PATH=${xz-utils:location}/bin:%(PATH)s
CPPFLAGS=-I${pcre:location}/include CPPFLAGS=-I${pcre:location}/include
......
...@@ -6,13 +6,15 @@ parts = ...@@ -6,13 +6,15 @@ parts =
groonga groonga
extends = extends =
../jemalloc/buildout.cfg
../libstemmer/buildout.cfg
../pkgconfig/buildout.cfg ../pkgconfig/buildout.cfg
../zlib/buildout.cfg ../zlib/buildout.cfg
[groonga] [groonga]
recipe = slapos.recipe.cmmi recipe = slapos.recipe.cmmi
url = http://packages.groonga.org/source/groonga/groonga-4.0.7.tar.gz url = http://packages.groonga.org/source/groonga/groonga-5.0.4.tar.gz
md5sum = 5d11546f921377d040f700bb74f1d7e4 md5sum = 4c83e9b12e9fad96a5b51874ddf66dc4
# temporary patch to respect more tokens in natural language mode. # temporary patch to respect more tokens in natural language mode.
patches = patches =
${:_profile_base_location_}/groonga.patch#9ed02fbe8400402d3eab47eee149978b ${:_profile_base_location_}/groonga.patch#9ed02fbe8400402d3eab47eee149978b
...@@ -23,16 +25,20 @@ configure-options = ...@@ -23,16 +25,20 @@ configure-options =
--disable-benchmark --disable-benchmark
--disable-document --disable-document
--disable-groonga-httpd --disable-groonga-httpd
--with-jemalloc
--with-libstemmer
--with-libstemmer-include=${libstemmer:location}/include
--with-libstemmer-lib=${libstemmer:location}/lib
--without-mecab --without-mecab
--without-kytea
environment = environment =
CPPFLAGS =-I${zlib:location}/include CPPFLAGS=-I${libstemmer:location}/include -I${libstemmer:location}/include -I${zlib:location}/include
LDFLAGS =-Wl,-rpath=${zlib:location}/lib -L${zlib:location}/lib LDFLAGS=-Wl,-rpath=${jemalloc:location}/lib -L${jemalloc:location}/lib -Wl,-rpath=${libstemmer:location}/lib -L${libstemmer:location}/lib -Wl,-rpath=${zlib:location}/lib -L${zlib:location}/lib
[groonga-normalizer-mysql] [groonga-normalizer-mysql]
recipe = slapos.recipe.cmmi recipe = slapos.recipe.cmmi
version = 1.0.6 url = http://packages.groonga.org/source/groonga-normalizer-mysql/groonga-normalizer-mysql-1.1.0.tar.gz
url = http://packages.groonga.org/source/groonga-normalizer-mysql/groonga-normalizer-mysql-${:version}.tar.gz md5sum = 0a0b523547999f90d347982fe9d03c0e
md5sum = 12740a835cfdf4b0dac66c3f2f152d84
location = ${groonga:location} location = ${groonga:location}
configure-options = configure-options =
--disable-static --disable-static
......
[buildout] [buildout]
extends = extends =
../autoconf/buildout.cfg ../autoconf/buildout.cfg
../libtool/buildout.cfg
../automake/buildout.cfg ../automake/buildout.cfg
../bzip2/buildout.cfg ../bzip2/buildout.cfg
../fontconfig/buildout.cfg ../fontconfig/buildout.cfg
...@@ -12,7 +11,8 @@ extends = ...@@ -12,7 +11,8 @@ extends =
../libjpeg/buildout.cfg ../libjpeg/buildout.cfg
../libpng/buildout.cfg ../libpng/buildout.cfg
../libtiff/buildout.cfg ../libtiff/buildout.cfg
../libxml2/buildout.cfg ../libtool/buildout.cfg
../m4/buildout.cfg
../perl/buildout.cfg ../perl/buildout.cfg
../pkgconfig/buildout.cfg ../pkgconfig/buildout.cfg
../xorg/buildout.cfg ../xorg/buildout.cfg
...@@ -43,6 +43,7 @@ configure-options = ...@@ -43,6 +43,7 @@ configure-options =
--enable-fc --enable-fc
--disable-lto --disable-lto
environment = environment =
M4=${m4:location}/bin/m4
PATH=${freetype:location}/bin:${pkgconfig:location}/bin:${xz-utils:location}/bin:${autoconf:location}/bin:${automake:location}/bin:${libtool:location}/bin:%(PATH)s PATH=${freetype:location}/bin:${pkgconfig:location}/bin:${xz-utils:location}/bin:${autoconf:location}/bin:${automake:location}/bin:${libtool:location}/bin:%(PATH)s
PKG_CONFIG_PATH=${:pkg_config_depends} PKG_CONFIG_PATH=${:pkg_config_depends}
CPPFLAGS=-I${zlib:location}/include CPPFLAGS=-I${zlib:location}/include
...@@ -65,8 +66,8 @@ environment = ...@@ -65,8 +66,8 @@ environment =
[pango] [pango]
recipe = slapos.recipe.cmmi recipe = slapos.recipe.cmmi
url = http://ftp.gnome.org/pub/gnome/core/3.12/3.12.2/sources/pango-1.36.3.tar.xz url = http://ftp.gnome.org/pub/gnome/core/3.14/3.14.2/sources/pango-1.36.8.tar.xz
md5sum = d9532826e95bdb374355deebc42441bb md5sum = 217a9a753006275215fa9fa127760ece
pkg_config_depends = ${harfbuzz:location}/lib/pkgconfig:${harfbuzz:pkg_config_depends} pkg_config_depends = ${harfbuzz:location}/lib/pkgconfig:${harfbuzz:pkg_config_depends}
configure-options = configure-options =
--disable-static --disable-static
...@@ -79,8 +80,8 @@ environment = ...@@ -79,8 +80,8 @@ environment =
[gdk-pixbuf] [gdk-pixbuf]
recipe = slapos.recipe.cmmi recipe = slapos.recipe.cmmi
url = http://ftp.gnome.org/pub/gnome/core/3.12/3.12.2/sources/gdk-pixbuf-2.30.7.tar.xz url = http://ftp.gnome.org/pub/gnome/core/3.14/3.14.2/sources/gdk-pixbuf-2.31.1.tar.xz
md5sum = 1195d26f14adfe99ff0878d37678a70a md5sum = 74cde211f5b7ac1015d1a7c9feee037c
pkg_config_depends = ${glib:location}/lib/pkgconfig:${libX11:location}/lib/pkgconfig:${libX11:pkg_config_depends} pkg_config_depends = ${glib:location}/lib/pkgconfig:${libX11:location}/lib/pkgconfig:${libX11:pkg_config_depends}
configure-options = configure-options =
--disable-static --disable-static
...@@ -96,8 +97,8 @@ environment = ...@@ -96,8 +97,8 @@ environment =
[atk] [atk]
recipe = slapos.recipe.cmmi recipe = slapos.recipe.cmmi
url = http://ftp.gnome.org/pub/gnome/core/3.12/3.12.2/sources/atk-2.12.0.tar.xz url = http://ftp.gnome.org/pub/gnome/core/3.14/3.14.2/sources/atk-2.14.0.tar.xz
md5sum = 930238dec55fdbf8eda9975b44f07b76 md5sum = ecb7ca8469a5650581b1227d78051b8b
environment = environment =
PATH=${glib:location}/bin:${pkgconfig:location}/bin:${xz-utils:location}/bin:%(PATH)s PATH=${glib:location}/bin:${pkgconfig:location}/bin:${xz-utils:location}/bin:%(PATH)s
PKG_CONFIG_PATH=${glib:location}/lib/pkgconfig PKG_CONFIG_PATH=${glib:location}/lib/pkgconfig
...@@ -106,8 +107,8 @@ environment = ...@@ -106,8 +107,8 @@ environment =
[gtk-2] [gtk-2]
recipe = slapos.recipe.cmmi recipe = slapos.recipe.cmmi
url = http://ftp.gnome.org/pub/gnome/core/3.12/3.12.2/sources/gtk+-2.24.23.tar.xz url = http://ftp.gnome.org/pub/gnome/core/3.14/3.14.2/sources/gtk+-2.24.25.tar.xz
md5sum = 0be39fbed4ca125645175cd6e22f2548 md5sum = 612350704dd3aacb95355a4981930c6f
pkg_config_depends = ${pango:location}/lib/pkgconfig:${pango:pkg_config_depends}:${atk:location}/lib/pkgconfig:${gdk-pixbuf:location}/lib/pkgconfig pkg_config_depends = ${pango:location}/lib/pkgconfig:${pango:pkg_config_depends}:${atk:location}/lib/pkgconfig:${gdk-pixbuf:location}/lib/pkgconfig
configure-options = configure-options =
--disable-static --disable-static
......
[buildout] [buildout]
extends = extends =
../bzip2/buildout.cfg ../bzip2/buildout.cfg
../fontconfig/buildout.cfg
../freetype/buildout.cfg ../freetype/buildout.cfg
../glibmm/buildout.cfg ../glibmm/buildout.cfg
../gtk-2/buildout.cfg ../gtk-2/buildout.cfg
../libpng/buildout.cfg
../libxml2/buildout.cfg
../perl/buildout.cfg ../perl/buildout.cfg
../xorg/buildout.cfg ../xorg/buildout.cfg
../xz-utils/buildout.cfg ../xz-utils/buildout.cfg
../zlib/buildout.cfg ../zlib/buildout.cfg
parts = parts =
gtkmm gtkmm
......
...@@ -11,8 +11,8 @@ parts = haproxy ...@@ -11,8 +11,8 @@ parts = haproxy
[haproxy] [haproxy]
recipe = slapos.recipe.cmmi recipe = slapos.recipe.cmmi
url = http://www.haproxy.org/download/1.5/src/haproxy-1.5.8.tar.gz url = http://www.haproxy.org/download/1.5/src/haproxy-1.5.11.tar.gz
md5sum = 7bffa1afa069d90ce03b7cd9aa0557cd md5sum = 5500a79d0d2b238d4a1e9749bd0c2cb2
configure-command = true configure-command = true
# If the system is running on Linux 2.6, we use "linux26" as the TARGET, # If the system is running on Linux 2.6, we use "linux26" as the TARGET,
# otherwise use "generic". # otherwise use "generic".
......
...@@ -5,7 +5,6 @@ extends = ...@@ -5,7 +5,6 @@ extends =
../bzip2/buildout.cfg ../bzip2/buildout.cfg
../freetype/buildout.cfg ../freetype/buildout.cfg
../garbage-collector/buildout.cfg ../garbage-collector/buildout.cfg
../gcc/buildout.cfg
../gettext/buildout.cfg ../gettext/buildout.cfg
../glibmm/buildout.cfg ../glibmm/buildout.cfg
../gtk-2/buildout.cfg ../gtk-2/buildout.cfg
......
[buildout]
extends =
../numpy/buildout.cfg
../matplotlib/buildout.cfg
parts =
ipython
[ipython-env]
<= numpy-env
[ipython]
recipe = zc.recipe.egg:custom
egg = ipython
environment = ipython-env
setup-eggs = ${numpy:egg}
[ipython_notebook]
recipe = zc.recipe.egg:scripts
eggs = ipython[notebook]
${matplotlib:egg}
environment = ipython-env
setup-eggs = ${numpy:egg}
initialization =
# https://github.com/ipython/ipython/issues/5420
import os
os.environ['PYTHONPATH'] = ':'.join(sys.path)
Description: CVE-2014-8137: double-free in in jas_iccattrval_destroy()
Origin: vendor, https://bugzilla.redhat.com/attachment.cgi?id=967283,
https://bugzilla.redhat.com/attachment.cgi?id=967284
Bug-Debian: https://bugs.debian.org/773463
Bug-RedHat: https://bugzilla.redhat.com/show_bug.cgi?id=1173157
Forwarded: no
Author: Tomas Hoger <thoger@redhat.com>
Last-Update: 2014-12-20
--- a/src/libjasper/base/jas_icc.c
+++ b/src/libjasper/base/jas_icc.c
@@ -1010,7 +1010,6 @@ static int jas_icccurv_input(jas_iccattr
return 0;
error:
- jas_icccurv_destroy(attrval);
return -1;
}
@@ -1128,7 +1127,6 @@ static int jas_icctxtdesc_input(jas_icca
#endif
return 0;
error:
- jas_icctxtdesc_destroy(attrval);
return -1;
}
@@ -1207,8 +1205,6 @@ static int jas_icctxt_input(jas_iccattrv
goto error;
return 0;
error:
- if (txt->string)
- jas_free(txt->string);
return -1;
}
@@ -1329,7 +1325,6 @@ static int jas_icclut8_input(jas_iccattr
goto error;
return 0;
error:
- jas_icclut8_destroy(attrval);
return -1;
}
@@ -1498,7 +1493,6 @@ static int jas_icclut16_input(jas_iccatt
goto error;
return 0;
error:
- jas_icclut16_destroy(attrval);
return -1;
}
--- a/src/libjasper/jp2/jp2_dec.c
+++ b/src/libjasper/jp2/jp2_dec.c
@@ -291,7 +291,10 @@ jas_image_t *jp2_decode(jas_stream_t *in
case JP2_COLR_ICC:
iccprof = jas_iccprof_createfrombuf(dec->colr->data.colr.iccp,
dec->colr->data.colr.iccplen);
- assert(iccprof);
+ if (!iccprof) {
+ jas_eprintf("error: failed to parse ICC profile\n");
+ goto error;
+ }
jas_iccprof_gethdr(iccprof, &icchdr);
jas_eprintf("ICC Profile CS %08x\n", icchdr.colorspc);
jas_image_setclrspc(dec->image, fromiccpcs(icchdr.colorspc));
Description: CVE-2014-8138: heap overflow in jp2_decode()
Origin: vendor, https://bugzilla.redhat.com/attachment.cgi?id=967280
Bug-Debian: https://bugs.debian.org/773463
Bug-RedHat: https://bugzilla.redhat.com/show_bug.cgi?id=1173162
Forwarded: no
Author: Tomas Hoger <thoger@redhat.com>
Last-Update: 2014-12-20
--- a/src/libjasper/jp2/jp2_dec.c
+++ b/src/libjasper/jp2/jp2_dec.c
@@ -389,6 +389,11 @@ jas_image_t *jp2_decode(jas_stream_t *in
/* Determine the type of each component. */
if (dec->cdef) {
for (i = 0; i < dec->numchans; ++i) {
+ /* Is the channel number reasonable? */
+ if (dec->cdef->data.cdef.ents[i].channo >= dec->numchans) {
+ jas_eprintf("error: invalid channel number in CDEF box\n");
+ goto error;
+ }
jas_image_setcmpttype(dec->image,
dec->chantocmptlut[dec->cdef->data.cdef.ents[i].channo],
jp2_getct(jas_image_clrspc(dec->image),
Description: CVE-2014-9029: Heap overflows in libjasper
Origin: vendor
Forwarded: no
Author: Tomas Hoger <thoger@redhat.com>
Reviewed-by: Salvatore Bonaccorso <carnil@debian.org>
Last-Update: 2014-11-28
--- a/src/libjasper/jpc/jpc_dec.c
+++ b/src/libjasper/jpc/jpc_dec.c
@@ -1280,7 +1280,7 @@ static int jpc_dec_process_coc(jpc_dec_t
jpc_coc_t *coc = &ms->parms.coc;
jpc_dec_tile_t *tile;
- if (JAS_CAST(int, coc->compno) > dec->numcomps) {
+ if (JAS_CAST(int, coc->compno) >= dec->numcomps) {
jas_eprintf("invalid component number in COC marker segment\n");
return -1;
}
@@ -1306,7 +1306,7 @@ static int jpc_dec_process_rgn(jpc_dec_t
jpc_rgn_t *rgn = &ms->parms.rgn;
jpc_dec_tile_t *tile;
- if (JAS_CAST(int, rgn->compno) > dec->numcomps) {
+ if (JAS_CAST(int, rgn->compno) >= dec->numcomps) {
jas_eprintf("invalid component number in RGN marker segment\n");
return -1;
}
@@ -1355,7 +1355,7 @@ static int jpc_dec_process_qcc(jpc_dec_t
jpc_qcc_t *qcc = &ms->parms.qcc;
jpc_dec_tile_t *tile;
- if (JAS_CAST(int, qcc->compno) > dec->numcomps) {
+ if (JAS_CAST(int, qcc->compno) >= dec->numcomps) {
jas_eprintf("invalid component number in QCC marker segment\n");
return -1;
}
...@@ -13,6 +13,9 @@ patches = ...@@ -13,6 +13,9 @@ patches =
${:_profile_base_location_}/misc-fixes.patch#1202be8418907dfe58f819f7b47da24f ${:_profile_base_location_}/misc-fixes.patch#1202be8418907dfe58f819f7b47da24f
${:_profile_base_location_}/fix-filename-buffer-overflow.patch#38403f9c82a18547beca16c9c6f4ce7a ${:_profile_base_location_}/fix-filename-buffer-overflow.patch#38403f9c82a18547beca16c9c6f4ce7a
${:_profile_base_location_}/CVE-2011-4516-and-CVE-2011-4517.patch#a9676718ed016f66a3c76acf764c9e72 ${:_profile_base_location_}/CVE-2011-4516-and-CVE-2011-4517.patch#a9676718ed016f66a3c76acf764c9e72
${:_profile_base_location_}/CVE-2014-9029.patch#d69195cf17878f024cc0b580045ec314
${:_profile_base_location_}/CVE-2014-8137.patch#bc5103b9a33315538106bf6652383a10
${:_profile_base_location_}/CVE-2014-8138.patch#bfb9604fe84b6e686fea29bd760cf34d
# jasper configure script is not executable by default # jasper configure script is not executable by default
configure-command = configure-command =
/bin/sh ./configure --prefix=${buildout:parts-directory}/${:_buildout_section_name_} --disable-static --enable-shared --disable-opengl /bin/sh ./configure --prefix=${buildout:parts-directory}/${:_buildout_section_name_} --disable-static --enable-shared --disable-opengl
......
...@@ -6,7 +6,6 @@ extends = ...@@ -6,7 +6,6 @@ extends =
../bzip2/buildout.cfg ../bzip2/buildout.cfg
../imagemagick/buildout.cfg ../imagemagick/buildout.cfg
../jbigkit/buildout.cfg ../jbigkit/buildout.cfg
../libpng/buildout.cfg
../zlib/buildout.cfg ../zlib/buildout.cfg
parts = parts =
libdmtx libdmtx
......
...@@ -4,9 +4,9 @@ parts = ...@@ -4,9 +4,9 @@ parts =
[libffi] [libffi]
recipe = slapos.recipe.cmmi recipe = slapos.recipe.cmmi
version = 3.1 version = 3.2.1
url = ftp://sourceware.org/pub/libffi/libffi-${:version}.tar.gz url = ftp://sourceware.org/pub/libffi/libffi-${:version}.tar.gz
md5sum = f5898b29bbfd70502831a212d9249d10 md5sum = 83b89587607e3eb65c70d361f13bab43
location = ${buildout:parts-directory}/${:_buildout_section_name_} location = ${buildout:parts-directory}/${:_buildout_section_name_}
patch-options = -p1 patch-options = -p1
patches = patches =
......
...@@ -18,15 +18,15 @@ environment = ...@@ -18,15 +18,15 @@ environment =
[libpng12] [libpng12]
<= libpng-common <= libpng-common
url = http://download.sourceforge.net/libpng/libpng-1.2.51.tar.xz url = http://download.sourceforge.net/libpng/libpng-1.2.52.tar.xz
md5sum = 4efba67fa5aa2b785c6fcec2cc3e90c9 md5sum = 49d5c71929bf69a172147c47b9309fbe
[libpng15] [libpng15]
<= libpng-common <= libpng-common
url = http://download.sourceforge.net/libpng/libpng-1.5.19.tar.xz url = http://download.sourceforge.net/libpng/libpng-1.5.21.tar.xz
md5sum = 1e2afb372b9979b2133829ee8136f30f md5sum = 22eaa2e90352fcd153bed0cdecf542cb
[libpng] [libpng]
<= libpng-common <= libpng-common
url = http://download.sourceforge.net/libpng/libpng-1.6.14.tar.xz url = http://download.sourceforge.net/libpng/libpng-1.6.16.tar.xz
md5sum = aefd9fda2178acab908707737174a0cd md5sum = 23b7286b5d4a86de950fd2ffc5cac742
...@@ -9,14 +9,14 @@ parts = ...@@ -9,14 +9,14 @@ parts =
[libreoffice-bin] [libreoffice-bin]
recipe = slapos.recipe.build recipe = slapos.recipe.build
# here, two %s are used, first one is for directory name (eg. x86_64), and second one is for filename (eg. x86-64). # here, two %s are used, first one is for directory name (eg. x86_64), and second one is for filename (eg. x86-64).
version = 4.3.4.1 version = 4.4.1.2
url = http://downloadarchive.documentfoundation.org/libreoffice/old/${:version}/rpm/%s/LibreOffice_${:version}_Linux_%s_rpm.tar.gz url = http://downloadarchive.documentfoundation.org/libreoffice/old/${:version}/rpm/%s/LibreOffice_${:version}_Linux_%s_rpm.tar.gz
# supported architectures md5sums # supported architectures md5sums
md5sum_x86 = 6fddbe8508fd2e6d5b942ad360013da2 md5sum_x86 = 23b740e84bc2b8774d50ac97dd553afd
md5sum_x86-64 = b3b0eab681d2cd2d22e3c065530b36c2 md5sum_x86-64 = 8de6f3d1d4af3826a3a1695a47444948
# where office code can be found? # where office code can be found?
officedir = libreoffice4.3 officedir = libreoffice4.4
# script to install # script to install
script = script =
......
...@@ -2,13 +2,11 @@ ...@@ -2,13 +2,11 @@
parts = librsvg parts = librsvg
extends = extends =
../bzip2/buildout.cfg ../bzip2/buildout.cfg
../pkgconfig/buildout.cfg
../libpng/buildout.cfg
../libxml2/buildout.cfg
../zlib/buildout.cfg
../gtk-2/buildout.cfg ../gtk-2/buildout.cfg
../xorg/buildout.cfg ../libxml2/buildout.cfg
../pkgconfig/buildout.cfg
../xz-utils/buildout.cfg ../xz-utils/buildout.cfg
../zlib/buildout.cfg
[libcroco] [libcroco]
recipe = slapos.recipe.cmmi recipe = slapos.recipe.cmmi
...@@ -23,8 +21,8 @@ environment = ...@@ -23,8 +21,8 @@ environment =
[librsvg] [librsvg]
recipe = slapos.recipe.cmmi recipe = slapos.recipe.cmmi
url = http://ftp.gnome.org/pub/gnome/core/3.12/3.12.2/sources/librsvg-2.40.2.tar.xz url = http://ftp.gnome.org/pub/gnome/core/3.14/3.14.2/sources/librsvg-2.40.5.tar.xz
md5sum = acdecdb9f08f3bf662a68bf7dafb8b82 md5sum = c2b044fccf415902a052d0e978e0ea60
pkg_config_depends = ${pango:location}/lib/pkgconfig:${pango:pkg_config_depends}:${zlib:location}/lib/pkgconfig:${gdk-pixbuf:location}/lib/pkgconfig:${libcroco:location}/lib/pkgconfig pkg_config_depends = ${pango:location}/lib/pkgconfig:${pango:pkg_config_depends}:${zlib:location}/lib/pkgconfig:${gdk-pixbuf:location}/lib/pkgconfig:${libcroco:location}/lib/pkgconfig
configure-options = configure-options =
--disable-static --disable-static
......
...@@ -9,8 +9,8 @@ parts = ...@@ -9,8 +9,8 @@ parts =
[libsigc] [libsigc]
recipe = slapos.recipe.cmmi recipe = slapos.recipe.cmmi
url = http://ftp.gnome.org/pub/gnome/core/3.12/3.12.2/sources/libsigc++-2.3.1.tar.xz url = http://ftp.gnome.org/pub/gnome/core/3.14/3.14.2/sources/libsigc++-2.4.0.tar.xz
md5sum = 06a0569e8ba161d3428e7daa838682f8 md5sum = c6cd2259f5ef973e4c8178d0abbdbfa7
configure-options = configure-options =
--disable-documentation --disable-documentation
environment = environment =
......
# libstemmer
# http://snowball.tartarus.org/
[libstemmer]
recipe = slapos.recipe.cmmi
url = http://snowball.tartarus.org/dist/libstemmer_c.tgz
md5sum = 6f32f8f81cd6fa0150333ab540af5e27
patch-options = -p0
patches =
${:_profile_base_location_}/libstemmer.Makefile.patch#f337bfe457ecf00d3297043a153d402a
location = ${buildout:parts-directory}/${:_buildout_section_name_}
configure-command = true
make-options = DESTDIR=${:location}
--- Makefile.orig 2014-12-29 15:39:57.433970435 +0100
+++ Makefile 2014-12-29 15:54:42.115248445 +0100
@@ -1,9 +1,19 @@
include mkinc.mak
-CFLAGS=-Iinclude
-all: libstemmer.o stemwords
-libstemmer.o: $(snowball_sources:.c=.o)
- $(AR) -cru $@ $^
-stemwords: examples/stemwords.o libstemmer.o
- $(CC) -o $@ $^
+CFLAGS+=-Iinclude -fPIC
+DESTDIR=/usr/local
+all: libstemmer.so libstemmer.a stemwords
+libstemmer.so: $(snowball_sources:.c=.o)
+ $(CC) $(CFLAGS) -shared $(LDFLAGS) -Wl,-soname,$@.0d -o $@.0d.0.0 $^
+ ln -s $@.0d.0.0 $@.0d
+ ln -s $@.0d.0.0 $@
+libstemmer.a: $(snowball_sources:.c=.o)
+ $(AR) -crs $@ $^
+stemwords: examples/stemwords.o libstemmer.so
+ $(CC) $(CFLAGS) $(LDFLAGS) -o $@ $^
clean:
- rm -f stemwords *.o src_c/*.o runtime/*.o libstemmer/*.o
+ rm -f stemwords libstemmer.so* libstemmer.a *.o src_c/*.o runtime/*.o libstemmer/*.o
+install: all
+ mkdir -p $(DESTDIR)/include
+ mkdir -p $(DESTDIR)/lib
+ cp -a include/*.h $(DESTDIR)/include/
+ cp -a libstemmer.* $(DESTDIR)/lib/
...@@ -4,8 +4,8 @@ parts = ...@@ -4,8 +4,8 @@ parts =
[libtasn1] [libtasn1]
recipe = slapos.recipe.cmmi recipe = slapos.recipe.cmmi
url = http://ftp.gnu.org/gnu/libtasn1/libtasn1-4.2.tar.gz url = http://ftp.gnu.org/gnu/libtasn1/libtasn1-4.5.tar.gz
md5sum = 414df906df421dee0a5cf7548788d153 md5sum = 81d272697545e82d39f6bd14854b68f0
configure-options = configure-options =
--disable-static --disable-static
--disable-gtk-doc-html --disable-gtk-doc-html
Index: tiff-4.0.3/tools/ppm2tiff.c
===================================================================
--- tiff-4.0.3.orig/tools/ppm2tiff.c 2013-06-23 10:36:50.779629492 -0400
+++ tiff-4.0.3/tools/ppm2tiff.c 2013-06-23 10:36:50.775629494 -0400
@@ -89,6 +89,7 @@
int c;
extern int optind;
extern char* optarg;
+ tmsize_t scanline_size;
if (argc < 2) {
fprintf(stderr, "%s: Too few arguments\n", argv[0]);
@@ -237,8 +238,16 @@
}
if (TIFFScanlineSize(out) > linebytes)
buf = (unsigned char *)_TIFFmalloc(linebytes);
- else
- buf = (unsigned char *)_TIFFmalloc(TIFFScanlineSize(out));
+ else {
+ scanline_size = TIFFScanlineSize(out);
+ if (scanline_size != 0)
+ buf = (unsigned char *)_TIFFmalloc(TIFFScanlineSize(out));
+ else {
+ fprintf(stderr, "%s: scanline size overflow\n",infile);
+ (void) TIFFClose(out);
+ exit(-2);
+ }
+ }
if (resolution > 0) {
TIFFSetField(out, TIFFTAG_XRESOLUTION, resolution);
TIFFSetField(out, TIFFTAG_YRESOLUTION, resolution);
Index: tiff-4.0.3/tools/tiff2pdf.c
===================================================================
--- tiff-4.0.3.orig/tools/tiff2pdf.c 2013-06-23 10:36:50.979629486 -0400
+++ tiff-4.0.3/tools/tiff2pdf.c 2013-06-23 10:36:50.975629486 -0400
@@ -3341,33 +3341,56 @@
uint32 height){
tsize_t i=0;
- uint16 ri =0;
- uint16 v_samp=1;
- uint16 h_samp=1;
- int j=0;
-
- i++;
-
- while(i<(*striplength)){
+
+ while (i < *striplength) {
+ tsize_t datalen;
+ uint16 ri;
+ uint16 v_samp;
+ uint16 h_samp;
+ int j;
+ int ncomp;
+
+ /* marker header: one or more FFs */
+ if (strip[i] != 0xff)
+ return(0);
+ i++;
+ while (i < *striplength && strip[i] == 0xff)
+ i++;
+ if (i >= *striplength)
+ return(0);
+ /* SOI is the only pre-SOS marker without a length word */
+ if (strip[i] == 0xd8)
+ datalen = 0;
+ else {
+ if ((*striplength - i) <= 2)
+ return(0);
+ datalen = (strip[i+1] << 8) | strip[i+2];
+ if (datalen < 2 || datalen >= (*striplength - i))
+ return(0);
+ }
switch( strip[i] ){
- case 0xd8:
- /* SOI - start of image */
+ case 0xd8: /* SOI - start of image */
_TIFFmemcpy(&(buffer[*bufferoffset]), &(strip[i-1]), 2);
*bufferoffset+=2;
- i+=2;
break;
- case 0xc0:
- case 0xc1:
- case 0xc3:
- case 0xc9:
- case 0xca:
+ case 0xc0: /* SOF0 */
+ case 0xc1: /* SOF1 */
+ case 0xc3: /* SOF3 */
+ case 0xc9: /* SOF9 */
+ case 0xca: /* SOF10 */
if(no==0){
- _TIFFmemcpy(&(buffer[*bufferoffset]), &(strip[i-1]), strip[i+2]+2);
- for(j=0;j<buffer[*bufferoffset+9];j++){
- if( (buffer[*bufferoffset+11+(2*j)]>>4) > h_samp)
- h_samp = (buffer[*bufferoffset+11+(2*j)]>>4);
- if( (buffer[*bufferoffset+11+(2*j)] & 0x0f) > v_samp)
- v_samp = (buffer[*bufferoffset+11+(2*j)] & 0x0f);
+ _TIFFmemcpy(&(buffer[*bufferoffset]), &(strip[i-1]), datalen+2);
+ ncomp = buffer[*bufferoffset+9];
+ if (ncomp < 1 || ncomp > 4)
+ return(0);
+ v_samp=1;
+ h_samp=1;
+ for(j=0;j<ncomp;j++){
+ uint16 samp = buffer[*bufferoffset+11+(3*j)];
+ if( (samp>>4) > h_samp)
+ h_samp = (samp>>4);
+ if( (samp & 0x0f) > v_samp)
+ v_samp = (samp & 0x0f);
}
v_samp*=8;
h_samp*=8;
@@ -3381,45 +3404,43 @@
(unsigned char) ((height>>8) & 0xff);
buffer[*bufferoffset+6]=
(unsigned char) (height & 0xff);
- *bufferoffset+=strip[i+2]+2;
- i+=strip[i+2]+2;
-
+ *bufferoffset+=datalen+2;
+ /* insert a DRI marker */
buffer[(*bufferoffset)++]=0xff;
buffer[(*bufferoffset)++]=0xdd;
buffer[(*bufferoffset)++]=0x00;
buffer[(*bufferoffset)++]=0x04;
buffer[(*bufferoffset)++]=(ri >> 8) & 0xff;
buffer[(*bufferoffset)++]= ri & 0xff;
- } else {
- i+=strip[i+2]+2;
}
break;
- case 0xc4:
- case 0xdb:
- _TIFFmemcpy(&(buffer[*bufferoffset]), &(strip[i-1]), strip[i+2]+2);
- *bufferoffset+=strip[i+2]+2;
- i+=strip[i+2]+2;
+ case 0xc4: /* DHT */
+ case 0xdb: /* DQT */
+ _TIFFmemcpy(&(buffer[*bufferoffset]), &(strip[i-1]), datalen+2);
+ *bufferoffset+=datalen+2;
break;
- case 0xda:
+ case 0xda: /* SOS */
if(no==0){
- _TIFFmemcpy(&(buffer[*bufferoffset]), &(strip[i-1]), strip[i+2]+2);
- *bufferoffset+=strip[i+2]+2;
- i+=strip[i+2]+2;
+ _TIFFmemcpy(&(buffer[*bufferoffset]), &(strip[i-1]), datalen+2);
+ *bufferoffset+=datalen+2;
} else {
buffer[(*bufferoffset)++]=0xff;
buffer[(*bufferoffset)++]=
(unsigned char)(0xd0 | ((no-1)%8));
- i+=strip[i+2]+2;
}
- _TIFFmemcpy(&(buffer[*bufferoffset]), &(strip[i-1]), (*striplength)-i-1);
- *bufferoffset+=(*striplength)-i-1;
+ i += datalen + 1;
+ /* copy remainder of strip */
+ _TIFFmemcpy(&(buffer[*bufferoffset]), &(strip[i]), *striplength - i);
+ *bufferoffset+= *striplength - i;
return(1);
default:
- i+=strip[i+2]+2;
+ /* ignore any other marker */
+ break;
}
+ i += datalen + 1;
}
-
+ /* failed to find SOS marker */
return(0);
}
#endif
Description: Buffer overflow in gif2tiff
Bug: http://bugzilla.maptools.org/show_bug.cgi?id=2450
Bug-Debian: http://bugs.debian.org/719303
Index: tiff-4.0.3/tools/gif2tiff.c
===================================================================
--- tiff-4.0.3.orig/tools/gif2tiff.c 2013-08-22 11:46:11.960846910 -0400
+++ tiff-4.0.3/tools/gif2tiff.c 2013-08-22 11:46:11.956846910 -0400
@@ -333,6 +333,8 @@
int status = 1;
datasize = getc(infile);
+ if (datasize > 12)
+ return 0;
clear = 1 << datasize;
eoi = clear + 1;
avail = clear + 2;
Description: use after free in tiff2pdf
Bug: http://bugzilla.maptools.org/show_bug.cgi?id=2449
Bug-Debian: http://bugs.debian.org/719303
Index: tiff-4.0.3/tools/tiff2pdf.c
===================================================================
--- tiff-4.0.3.orig/tools/tiff2pdf.c 2013-08-22 11:46:37.292847242 -0400
+++ tiff-4.0.3/tools/tiff2pdf.c 2013-08-22 11:46:37.292847242 -0400
@@ -2461,7 +2461,8 @@
(unsigned long) t2p->tiff_datasize,
TIFFFileName(input));
t2p->t2p_error = T2P_ERR_ERROR;
- _TIFFfree(buffer);
+ _TIFFfree(buffer);
+ return(0);
} else {
buffer=samplebuffer;
t2p->tiff_datasize *= t2p->tiff_samplesperpixel;
Description: OOB write in gif2tiff
Bug-Redhat: https://bugzilla.redhat.com/show_bug.cgi?id=996468
Index: tiff-4.0.3/tools/gif2tiff.c
===================================================================
--- tiff-4.0.3.orig/tools/gif2tiff.c 2013-08-24 11:17:13.546447901 -0400
+++ tiff-4.0.3/tools/gif2tiff.c 2013-08-24 11:17:13.546447901 -0400
@@ -400,6 +400,10 @@
}
if (oldcode == -1) {
+ if (code >= clear) {
+ fprintf(stderr, "bad input: code=%d is larger than clear=%d\n",code, clear);
+ return 0;
+ }
*(*fill)++ = suffix[code];
firstchar = oldcode = code;
return 1;
...@@ -14,12 +14,7 @@ url = http://www.imagemagick.org/download/delegates/tiff-${:version}.tar.gz ...@@ -14,12 +14,7 @@ url = http://www.imagemagick.org/download/delegates/tiff-${:version}.tar.gz
md5sum = 051c1068e6a0627f461948c365290410 md5sum = 051c1068e6a0627f461948c365290410
patch-options = -p1 patch-options = -p1
patches = patches =
${:_profile_base_location_}/CVE-2012-4564.patch#45667ee618dbe78acce1129706556124 ${:_profile_base_location_}/tiff_4.0.3-12.3.debian.patch#c246dc095f57f869b35cf40a32aa8366
${:_profile_base_location_}/CVE-2013-1960.patch#21a3d119cd3eeadd35ccc355fbd748cf
${:_profile_base_location_}/CVE-2013-1961.patch#bb219740a815b9b47698b83d0ae9f82a
${:_profile_base_location_}/CVE-2013-4231.patch#f6ff024c8df861a6dbb5a0ecd8a0f853
${:_profile_base_location_}/CVE-2013-4232.patch#b439184b3a5f434a3e3235f611b54a89
${:_profile_base_location_}/CVE-2013-4244.patch#2acff059c6156953aadb436b475e5acb
configure-options = configure-options =
--disable-static --disable-static
--without-x --without-x
......
Description: call glFlush() in tiffgt
Author: Micksa (micksa-launchpad)
Bug: http://bugzilla.maptools.org/show_bug.cgi?id=2401
Bug-Ubuntu: https://bugs.launchpad.net/ubuntu/+source/tiff/+bug/797166
Index: tiff-4.0.3/tools/tiffgt.c
===================================================================
--- tiff-4.0.3.orig/tools/tiffgt.c 2013-06-23 10:36:50.575629499 -0400
+++ tiff-4.0.3/tools/tiffgt.c 2013-06-23 10:36:50.571629497 -0400
@@ -287,6 +287,7 @@
raster_draw(void)
{
glDrawPixels(img.width, img.height, GL_RGBA, GL_UNSIGNED_BYTE, (const GLvoid *) raster);
+ glFlush();
}
static void
Index: tiff-4.0.3/tools/ppm2tiff.c
===================================================================
--- tiff-4.0.3.orig/tools/ppm2tiff.c 2013-06-23 10:36:50.779629492 -0400
+++ tiff-4.0.3/tools/ppm2tiff.c 2013-06-23 10:36:50.775629494 -0400
@@ -89,6 +89,7 @@
int c;
extern int optind;
extern char* optarg;
+ tmsize_t scanline_size;
if (argc < 2) {
fprintf(stderr, "%s: Too few arguments\n", argv[0]);
@@ -237,8 +238,16 @@
}
if (TIFFScanlineSize(out) > linebytes)
buf = (unsigned char *)_TIFFmalloc(linebytes);
- else
- buf = (unsigned char *)_TIFFmalloc(TIFFScanlineSize(out));
+ else {
+ scanline_size = TIFFScanlineSize(out);
+ if (scanline_size != 0)
+ buf = (unsigned char *)_TIFFmalloc(TIFFScanlineSize(out));
+ else {
+ fprintf(stderr, "%s: scanline size overflow\n",infile);
+ (void) TIFFClose(out);
+ exit(-2);
+ }
+ }
if (resolution > 0) {
TIFFSetField(out, TIFFTAG_XRESOLUTION, resolution);
TIFFSetField(out, TIFFTAG_YRESOLUTION, resolution);
Index: tiff-4.0.3/tools/tiff2pdf.c
===================================================================
--- tiff-4.0.3.orig/tools/tiff2pdf.c 2013-06-23 10:36:50.979629486 -0400
+++ tiff-4.0.3/tools/tiff2pdf.c 2013-06-23 10:36:50.975629486 -0400
@@ -3341,33 +3341,56 @@
uint32 height){
tsize_t i=0;
- uint16 ri =0;
- uint16 v_samp=1;
- uint16 h_samp=1;
- int j=0;
-
- i++;
-
- while(i<(*striplength)){
+
+ while (i < *striplength) {
+ tsize_t datalen;
+ uint16 ri;
+ uint16 v_samp;
+ uint16 h_samp;
+ int j;
+ int ncomp;
+
+ /* marker header: one or more FFs */
+ if (strip[i] != 0xff)
+ return(0);
+ i++;
+ while (i < *striplength && strip[i] == 0xff)
+ i++;
+ if (i >= *striplength)
+ return(0);
+ /* SOI is the only pre-SOS marker without a length word */
+ if (strip[i] == 0xd8)
+ datalen = 0;
+ else {
+ if ((*striplength - i) <= 2)
+ return(0);
+ datalen = (strip[i+1] << 8) | strip[i+2];
+ if (datalen < 2 || datalen >= (*striplength - i))
+ return(0);
+ }
switch( strip[i] ){
- case 0xd8:
- /* SOI - start of image */
+ case 0xd8: /* SOI - start of image */
_TIFFmemcpy(&(buffer[*bufferoffset]), &(strip[i-1]), 2);
*bufferoffset+=2;
- i+=2;
break;
- case 0xc0:
- case 0xc1:
- case 0xc3:
- case 0xc9:
- case 0xca:
+ case 0xc0: /* SOF0 */
+ case 0xc1: /* SOF1 */
+ case 0xc3: /* SOF3 */
+ case 0xc9: /* SOF9 */
+ case 0xca: /* SOF10 */
if(no==0){
- _TIFFmemcpy(&(buffer[*bufferoffset]), &(strip[i-1]), strip[i+2]+2);
- for(j=0;j<buffer[*bufferoffset+9];j++){
- if( (buffer[*bufferoffset+11+(2*j)]>>4) > h_samp)
- h_samp = (buffer[*bufferoffset+11+(2*j)]>>4);
- if( (buffer[*bufferoffset+11+(2*j)] & 0x0f) > v_samp)
- v_samp = (buffer[*bufferoffset+11+(2*j)] & 0x0f);
+ _TIFFmemcpy(&(buffer[*bufferoffset]), &(strip[i-1]), datalen+2);
+ ncomp = buffer[*bufferoffset+9];
+ if (ncomp < 1 || ncomp > 4)
+ return(0);
+ v_samp=1;
+ h_samp=1;
+ for(j=0;j<ncomp;j++){
+ uint16 samp = buffer[*bufferoffset+11+(3*j)];
+ if( (samp>>4) > h_samp)
+ h_samp = (samp>>4);
+ if( (samp & 0x0f) > v_samp)
+ v_samp = (samp & 0x0f);
}
v_samp*=8;
h_samp*=8;
@@ -3381,45 +3404,43 @@
(unsigned char) ((height>>8) & 0xff);
buffer[*bufferoffset+6]=
(unsigned char) (height & 0xff);
- *bufferoffset+=strip[i+2]+2;
- i+=strip[i+2]+2;
-
+ *bufferoffset+=datalen+2;
+ /* insert a DRI marker */
buffer[(*bufferoffset)++]=0xff;
buffer[(*bufferoffset)++]=0xdd;
buffer[(*bufferoffset)++]=0x00;
buffer[(*bufferoffset)++]=0x04;
buffer[(*bufferoffset)++]=(ri >> 8) & 0xff;
buffer[(*bufferoffset)++]= ri & 0xff;
- } else {
- i+=strip[i+2]+2;
}
break;
- case 0xc4:
- case 0xdb:
- _TIFFmemcpy(&(buffer[*bufferoffset]), &(strip[i-1]), strip[i+2]+2);
- *bufferoffset+=strip[i+2]+2;
- i+=strip[i+2]+2;
+ case 0xc4: /* DHT */
+ case 0xdb: /* DQT */
+ _TIFFmemcpy(&(buffer[*bufferoffset]), &(strip[i-1]), datalen+2);
+ *bufferoffset+=datalen+2;
break;
- case 0xda:
+ case 0xda: /* SOS */
if(no==0){
- _TIFFmemcpy(&(buffer[*bufferoffset]), &(strip[i-1]), strip[i+2]+2);
- *bufferoffset+=strip[i+2]+2;
- i+=strip[i+2]+2;
+ _TIFFmemcpy(&(buffer[*bufferoffset]), &(strip[i-1]), datalen+2);
+ *bufferoffset+=datalen+2;
} else {
buffer[(*bufferoffset)++]=0xff;
buffer[(*bufferoffset)++]=
(unsigned char)(0xd0 | ((no-1)%8));
- i+=strip[i+2]+2;
}
- _TIFFmemcpy(&(buffer[*bufferoffset]), &(strip[i-1]), (*striplength)-i-1);
- *bufferoffset+=(*striplength)-i-1;
+ i += datalen + 1;
+ /* copy remainder of strip */
+ _TIFFmemcpy(&(buffer[*bufferoffset]), &(strip[i]), *striplength - i);
+ *bufferoffset+= *striplength - i;
return(1);
default:
- i+=strip[i+2]+2;
+ /* ignore any other marker */
+ break;
}
+ i += datalen + 1;
}
-
+ /* failed to find SOS marker */
return(0);
}
#endif
Index: tiff-4.0.3/contrib/dbs/xtiff/xtiff.c Index: tiff-4.0.3/contrib/dbs/xtiff/xtiff.c
=================================================================== ===================================================================
--- tiff-4.0.3.orig/contrib/dbs/xtiff/xtiff.c 2013-06-23 10:36:51.163629483 -0400 --- tiff-4.0.3.orig/contrib/dbs/xtiff/xtiff.c 2013-06-23 10:36:51.163629483 -0400
...@@ -766,3 +960,941 @@ Index: tiff-4.0.3/tools/tiff2bw.c ...@@ -766,3 +960,941 @@ Index: tiff-4.0.3/tools/tiff2bw.c
TIFFSetField(out, TIFFTAG_IMAGEDESCRIPTION, thing); TIFFSetField(out, TIFFTAG_IMAGEDESCRIPTION, thing);
TIFFSetField(out, TIFFTAG_SOFTWARE, "tiff2bw"); TIFFSetField(out, TIFFTAG_SOFTWARE, "tiff2bw");
outbuf = (unsigned char *)_TIFFmalloc(TIFFScanlineSize(out)); outbuf = (unsigned char *)_TIFFmalloc(TIFFScanlineSize(out));
Description: Buffer overflow in gif2tiff
Bug: http://bugzilla.maptools.org/show_bug.cgi?id=2450
Bug-Debian: http://bugs.debian.org/719303
Index: tiff-4.0.3/tools/gif2tiff.c
===================================================================
--- tiff-4.0.3.orig/tools/gif2tiff.c 2013-08-22 11:46:11.960846910 -0400
+++ tiff-4.0.3/tools/gif2tiff.c 2013-08-22 11:46:11.956846910 -0400
@@ -333,6 +333,8 @@
int status = 1;
datasize = getc(infile);
+ if (datasize > 12)
+ return 0;
clear = 1 << datasize;
eoi = clear + 1;
avail = clear + 2;
Description: use after free in tiff2pdf
Bug: http://bugzilla.maptools.org/show_bug.cgi?id=2449
Bug-Debian: http://bugs.debian.org/719303
Index: tiff-4.0.3/tools/tiff2pdf.c
===================================================================
--- tiff-4.0.3.orig/tools/tiff2pdf.c 2013-08-22 11:46:37.292847242 -0400
+++ tiff-4.0.3/tools/tiff2pdf.c 2013-08-22 11:46:37.292847242 -0400
@@ -2461,7 +2461,8 @@
(unsigned long) t2p->tiff_datasize,
TIFFFileName(input));
t2p->t2p_error = T2P_ERR_ERROR;
- _TIFFfree(buffer);
+ _TIFFfree(buffer);
+ return(0);
} else {
buffer=samplebuffer;
t2p->tiff_datasize *= t2p->tiff_samplesperpixel;
Description: OOB write in gif2tiff
Bug-Redhat: https://bugzilla.redhat.com/show_bug.cgi?id=996468
Index: tiff-4.0.3/tools/gif2tiff.c
===================================================================
--- tiff-4.0.3.orig/tools/gif2tiff.c 2013-08-24 11:17:13.546447901 -0400
+++ tiff-4.0.3/tools/gif2tiff.c 2013-08-24 11:17:13.546447901 -0400
@@ -400,6 +400,10 @@
}
if (oldcode == -1) {
+ if (code >= clear) {
+ fprintf(stderr, "bad input: code=%d is larger than clear=%d\n",code, clear);
+ return 0;
+ }
*(*fill)++ = suffix[code];
firstchar = oldcode = code;
return 1;
Index: tiff/tools/gif2tiff.c
===================================================================
--- tiff.orig/tools/gif2tiff.c
+++ tiff/tools/gif2tiff.c
@@ -280,6 +280,10 @@ readgifimage(char* mode)
fprintf(stderr, "no colormap present for image\n");
return (0);
}
+ if (width == 0 || height == 0) {
+ fprintf(stderr, "Invalid value of width or height\n");
+ return(0);
+ }
if ((raster = (unsigned char*) _TIFFmalloc(width*height+EXTRAFUDGE)) == NULL) {
fprintf(stderr, "not enough memory for image\n");
return (0);
@@ -404,6 +408,10 @@ process(register int code, unsigned char
fprintf(stderr, "bad input: code=%d is larger than clear=%d\n",code, clear);
return 0;
}
+ if (*fill >= raster + width*height) {
+ fprintf(stderr, "raster full before eoi code\n");
+ return 0;
+ }
*(*fill)++ = suffix[code];
firstchar = oldcode = code;
return 1;
@@ -434,6 +442,10 @@ process(register int code, unsigned char
}
oldcode = incode;
do {
+ if (*fill >= raster + width*height) {
+ fprintf(stderr, "raster full before eoi code\n");
+ return 0;
+ }
*(*fill)++ = *--stackp;
} while (stackp > stack);
return 1;
Description: fix for Debian bug #741451
tiffcp crashes when converting JPEG-encoded TIFF to a different
encoding (like none or lzw). For example this will probably fail:
.
tiffcp -c none jpeg_encoded_file.tif output.tif
.
The reason is that when the input file contains JPEG data,
the tiffcp code forces conversion to RGB space. However,
the output normally inherits YCbCr subsampling parameters
from the input, which leads to a smaller working buffer
than necessary. The buffer is subsequently overrun inside
cpStripToTile() (called from writeBufferToContigTiles).
Note that the resulting TIFF file would be scrambled even
if tiffcp wouldn't crash, since the output file would contain
RGB data intepreted as subsampled YCbCr values.
.
This patch fixes the problem by forcing RGB space on the output
TIF if the input is JPEG-encoded and output is *not* JPEG-encoded.
Origin: upstream, http://bugzilla.maptools.org/show_bug.cgi?id=2480
Author: Tomasz Buchert <tomasz.buchert@inria.fr>
Bug: http://bugzilla.maptools.org/show_bug.cgi?id=2480
Bug-Debian: http://bugs.debian.org/741451
--- a/tools/tiffcp.c
+++ b/tools/tiffcp.c
@@ -629,6 +629,12 @@
TIFFSetField(out, TIFFTAG_PHOTOMETRIC,
samplesperpixel == 1 ?
PHOTOMETRIC_LOGL : PHOTOMETRIC_LOGLUV);
+ else if (input_compression == COMPRESSION_JPEG &&
+ samplesperpixel == 3) {
+ /* RGB conversion was forced above
+ hence the output will be of the same type */
+ TIFFSetField(out, TIFFTAG_PHOTOMETRIC, PHOTOMETRIC_RGB);
+ }
else
CopyTag(TIFFTAG_PHOTOMETRIC, 1, TIFF_SHORT);
if (fillorder != 0)
Description: CVE-2014-9330
Integer overflow in bmp2tiff
Origin: upstream, http://bugzilla.maptools.org/show_bug.cgi?id=2494
Bug: http://bugzilla.maptools.org/show_bug.cgi?id=2494
Bug-Debian: http://bugs.debian.org/773987
Index: tiff/tools/bmp2tiff.c
===================================================================
--- tiff.orig/tools/bmp2tiff.c
+++ tiff/tools/bmp2tiff.c
@@ -1,4 +1,4 @@
-/* $Id: bmp2tiff.c,v 1.23 2010-03-10 18:56:49 bfriesen Exp $
+/* $Id: bmp2tiff.c,v 1.24 2014-12-21 15:15:32 erouault Exp $
*
* Project: libtiff tools
* Purpose: Convert Windows BMP files in TIFF.
@@ -403,6 +403,13 @@ main(int argc, char* argv[])
width = info_hdr.iWidth;
length = (info_hdr.iHeight > 0) ? info_hdr.iHeight : -info_hdr.iHeight;
+ if( width <= 0 || length <= 0 )
+ {
+ TIFFError(infilename,
+ "Invalid dimensions of BMP file" );
+ close(fd);
+ return -1;
+ }
switch (info_hdr.iBitCount)
{
@@ -593,6 +600,14 @@ main(int argc, char* argv[])
compr_size = file_hdr.iSize - file_hdr.iOffBits;
uncompr_size = width * length;
+ /* Detect int overflow */
+ if( uncompr_size / width != length )
+ {
+ TIFFError(infilename,
+ "Invalid dimensions of BMP file" );
+ close(fd);
+ return -1;
+ }
comprbuf = (unsigned char *) _TIFFmalloc( compr_size );
if (!comprbuf) {
TIFFError(infilename,
From 0782c759084daaf9e4de7ee6be7543081823455e Mon Sep 17 00:00:00 2001
From: erouault <erouault>
Date: Sun, 21 Dec 2014 20:58:29 +0000
Subject: [PATCH] * tools/tiff2bw.c: when Photometric=RGB, the utility only
works if SamplesPerPixel = 3. Enforce that
http://bugzilla.maptools.org/show_bug.cgi?id=2485 (CVE-2014-8127)
---
ChangeLog | 6 ++++++
tools/tiff2bw.c | 5 +++++
2 files changed, 11 insertions(+)
diff --git a/tools/tiff2bw.c b/tools/tiff2bw.c
index 22467cd..94b8e31 100644
--- a/tools/tiff2bw.c
+++ b/tools/tiff2bw.c
@@ -171,6 +171,11 @@ main(int argc, char* argv[])
argv[optind], samplesperpixel);
return (-1);
}
+ if( photometric == PHOTOMETRIC_RGB && samplesperpixel != 3) {
+ fprintf(stderr, "%s: Bad samples/pixel %u for PHOTOMETRIC_RGB.\n",
+ argv[optind], samplesperpixel);
+ return (-1);
+ }
TIFFGetField(in, TIFFTAG_BITSPERSAMPLE, &bitspersample);
if (bitspersample != 8) {
fprintf(stderr,
From 3996fa0f84f4a8b7e65fe4b8f0681711022034ea Mon Sep 17 00:00:00 2001
From: erouault <erouault>
Date: Sun, 21 Dec 2014 20:04:31 +0000
Subject: [PATCH] * tools/pal2rgb.c, tools/thumbnail.c: fix crash by disabling
TIFFTAG_INKNAMES copying. The right fix would be to properly copy it, but not
worth the burden for those esoteric utilities.
http://bugzilla.maptools.org/show_bug.cgi?id=2484 (CVE-2014-8127)
---
ChangeLog | 7 +++++++
tools/pal2rgb.c | 2 +-
tools/thumbnail.c | 2 +-
3 files changed, 9 insertions(+), 2 deletions(-)
diff --git a/tools/pal2rgb.c b/tools/pal2rgb.c
index bfe7899..3fc3de3 100644
--- a/tools/pal2rgb.c
+++ b/tools/pal2rgb.c
@@ -372,7 +372,7 @@ static struct cpTag {
{ TIFFTAG_CLEANFAXDATA, 1, TIFF_SHORT },
{ TIFFTAG_CONSECUTIVEBADFAXLINES, 1, TIFF_LONG },
{ TIFFTAG_INKSET, 1, TIFF_SHORT },
- { TIFFTAG_INKNAMES, 1, TIFF_ASCII },
+ /*{ TIFFTAG_INKNAMES, 1, TIFF_ASCII },*/ /* Needs much more complicated logic. See tiffcp */
{ TIFFTAG_DOTRANGE, 2, TIFF_SHORT },
{ TIFFTAG_TARGETPRINTER, 1, TIFF_ASCII },
{ TIFFTAG_SAMPLEFORMAT, 1, TIFF_SHORT },
diff --git a/tools/thumbnail.c b/tools/thumbnail.c
index c50bbff..73f9c34 100644
--- a/tools/thumbnail.c
+++ b/tools/thumbnail.c
@@ -257,7 +257,7 @@ static struct cpTag {
{ TIFFTAG_CLEANFAXDATA, 1, TIFF_SHORT },
{ TIFFTAG_CONSECUTIVEBADFAXLINES, 1, TIFF_LONG },
{ TIFFTAG_INKSET, 1, TIFF_SHORT },
- { TIFFTAG_INKNAMES, 1, TIFF_ASCII },
+ /*{ TIFFTAG_INKNAMES, 1, TIFF_ASCII },*/ /* Needs much more complicated logic. See tiffcp */
{ TIFFTAG_DOTRANGE, 2, TIFF_SHORT },
{ TIFFTAG_TARGETPRINTER, 1, TIFF_ASCII },
{ TIFFTAG_SAMPLEFORMAT, 1, TIFF_SHORT },
From 1f7359b00663804d96c3a102bcb6ead9812c1509 Mon Sep 17 00:00:00 2001
From: erouault <erouault>
Date: Tue, 23 Dec 2014 10:15:35 +0000
Subject: [PATCH] * libtiff/tif_read.c: fix several invalid comparisons of a
uint64 value with <= 0 by casting it to int64 first. This solves crashing bug
on corrupted images generated by afl.
---
ChangeLog | 6 ++++++
libtiff/tif_read.c | 6 +++---
2 files changed, 9 insertions(+), 3 deletions(-)
diff --git a/libtiff/tif_read.c b/libtiff/tif_read.c
index 2ba822a..dfc5b07 100644
--- a/libtiff/tif_read.c
+++ b/libtiff/tif_read.c
@@ -458,7 +458,7 @@ TIFFReadRawStrip(TIFF* tif, uint32 strip, void* buf, tmsize_t size)
return ((tmsize_t)(-1));
}
bytecount = td->td_stripbytecount[strip];
- if (bytecount <= 0) {
+ if ((int64)bytecount <= 0) {
#if defined(__WIN32__) && (defined(_MSC_VER) || defined(__MINGW32__))
TIFFErrorExt(tif->tif_clientdata, module,
"%I64u: Invalid strip byte count, strip %lu",
@@ -498,7 +498,7 @@ TIFFFillStrip(TIFF* tif, uint32 strip)
if ((tif->tif_flags&TIFF_NOREADRAW)==0)
{
uint64 bytecount = td->td_stripbytecount[strip];
- if (bytecount <= 0) {
+ if ((int64)bytecount <= 0) {
#if defined(__WIN32__) && (defined(_MSC_VER) || defined(__MINGW32__))
TIFFErrorExt(tif->tif_clientdata, module,
"Invalid strip byte count %I64u, strip %lu",
@@ -801,7 +801,7 @@ TIFFFillTile(TIFF* tif, uint32 tile)
if ((tif->tif_flags&TIFF_NOREADRAW)==0)
{
uint64 bytecount = td->td_stripbytecount[tile];
- if (bytecount <= 0) {
+ if ((int64)bytecount <= 0) {
#if defined(__WIN32__) && (defined(_MSC_VER) || defined(__MINGW32__))
TIFFErrorExt(tif->tif_clientdata, module,
"%I64u: Invalid tile byte count, tile %lu",
From 662f74445b2fea2eeb759c6524661118aef567ca Mon Sep 17 00:00:00 2001
From: erouault <erouault>
Date: Sun, 21 Dec 2014 15:15:31 +0000
Subject: [PATCH] Fix various crasher bugs on fuzzed images. *
libtiff/tif_dir.c: TIFFSetField(): refuse to set negative values for
TIFFTAG_XRESOLUTION and TIFFTAG_YRESOLUTION that cause asserts when writing
the directory * libtiff/tif_dirread.c: TIFFReadDirectory(): refuse to read
ColorMap or TransferFunction if BitsPerSample has not yet been read,
otherwise reading it later will cause user code to crash if BitsPerSample > 1
* libtiff/tif_getimage.c: TIFFRGBAImageOK(): return FALSE if LOGLUV with
SamplesPerPixel != 3, or if CIELAB with SamplesPerPixel != 3 or BitsPerSample
!= 8 * libtiff/tif_next.c: in the "run mode", use tilewidth for tiled images
instead of imagewidth to avoid crash * tools/bmp2tiff.c: fix crash due to int
overflow related to input BMP dimensions * tools/tiff2pdf.c: fix crash due to
invalid tile count (should likely be checked by libtiff too). Detect invalid
settings of BitsPerSample/SamplesPerPixel for CIELAB / ITULAB *
tools/tiffcrop.c: fix crash due to invalid TileWidth/TileHeight *
tools/tiffdump.c: fix crash due to overflow of entry count.
---
ChangeLog | 19 +++++++++++++++++++
libtiff/tif_dir.c | 21 +++++++++++++++++++--
libtiff/tif_dirread.c | 17 +++++++++++++++++
libtiff/tif_getimage.c | 15 +++++++++++++++
libtiff/tif_next.c | 2 ++
tools/bmp2tiff.c | 15 +++++++++++++++
tools/tiff2pdf.c | 41 +++++++++++++++++++++++++++++++++++++++++
tools/tiffcrop.c | 7 ++++---
tools/tiffdump.c | 9 ++++++---
9 files changed, 138 insertions(+), 8 deletions(-)
diff --git a/libtiff/tif_dir.c b/libtiff/tif_dir.c
index 98cf66d..ab43a28 100644
--- a/libtiff/tif_dir.c
+++ b/libtiff/tif_dir.c
@@ -160,6 +160,7 @@ _TIFFVSetField(TIFF* tif, uint32 tag, va_list ap)
TIFFDirectory* td = &tif->tif_dir;
int status = 1;
uint32 v32, i, v;
+ double dblval;
char* s;
const TIFFField *fip = TIFFFindField(tif, tag, TIFF_ANY);
uint32 standard_tag = tag;
@@ -284,10 +285,16 @@ _TIFFVSetField(TIFF* tif, uint32 tag, va_list ap)
setDoubleArrayOneValue(&td->td_smaxsamplevalue, va_arg(ap, double), td->td_samplesperpixel);
break;
case TIFFTAG_XRESOLUTION:
- td->td_xresolution = (float) va_arg(ap, double);
+ dblval = va_arg(ap, double);
+ if( dblval < 0 )
+ goto badvaluedouble;
+ td->td_xresolution = (float) dblval;
break;
case TIFFTAG_YRESOLUTION:
- td->td_yresolution = (float) va_arg(ap, double);
+ dblval = va_arg(ap, double);
+ if( dblval < 0 )
+ goto badvaluedouble;
+ td->td_yresolution = (float) dblval;
break;
case TIFFTAG_PLANARCONFIG:
v = (uint16) va_arg(ap, uint16_vap);
@@ -694,6 +701,16 @@ _TIFFVSetField(TIFF* tif, uint32 tag, va_list ap)
va_end(ap);
}
return (0);
+badvaluedouble:
+ {
+ const TIFFField* fip=TIFFFieldWithTag(tif,tag);
+ TIFFErrorExt(tif->tif_clientdata, module,
+ "%s: Bad value %f for \"%s\" tag",
+ tif->tif_name, dblval,
+ fip ? fip->field_name : "Unknown");
+ va_end(ap);
+ }
+ return (0);
}
/*
diff --git a/libtiff/tif_dirread.c b/libtiff/tif_dirread.c
index 391c823..f66c9a7 100644
--- a/libtiff/tif_dirread.c
+++ b/libtiff/tif_dirread.c
@@ -3430,6 +3430,8 @@ TIFFReadDirectory(TIFF* tif)
const TIFFField* fip;
uint32 fii=FAILED_FII;
toff_t nextdiroff;
+ int bitspersample_read = FALSE;
+
tif->tif_diroff=tif->tif_nextdiroff;
if (!TIFFCheckDirOffset(tif,tif->tif_nextdiroff))
return 0; /* last offset or bad offset (IFD looping) */
@@ -3706,6 +3708,8 @@ TIFFReadDirectory(TIFF* tif)
}
if (!TIFFSetField(tif,dp->tdir_tag,value))
goto bad;
+ if( dp->tdir_tag == TIFFTAG_BITSPERSAMPLE )
+ bitspersample_read = TRUE;
}
break;
case TIFFTAG_SMINSAMPLEVALUE:
@@ -3763,6 +3767,19 @@ TIFFReadDirectory(TIFF* tif)
uint32 countrequired;
uint32 incrementpersample;
uint16* value=NULL;
+ /* It would be dangerous to instanciate those tag values */
+ /* since if td_bitspersample has not yet been read (due to */
+ /* unordered tags), it could be read afterwards with a */
+ /* values greater than the default one (1), which may cause */
+ /* crashes in user code */
+ if( !bitspersample_read )
+ {
+ fip = TIFFFieldWithTag(tif,dp->tdir_tag);
+ TIFFWarningExt(tif->tif_clientdata,module,
+ "Ignoring %s since BitsPerSample tag not found",
+ fip ? fip->field_name : "unknown tagname");
+ continue;
+ }
countpersample=(1L<<tif->tif_dir.td_bitspersample);
if ((dp->tdir_tag==TIFFTAG_TRANSFERFUNCTION)&&(dp->tdir_count==(uint64)countpersample))
{
diff --git a/libtiff/tif_getimage.c b/libtiff/tif_getimage.c
index 074d32a..396ad08 100644
--- a/libtiff/tif_getimage.c
+++ b/libtiff/tif_getimage.c
@@ -182,8 +182,23 @@ TIFFRGBAImageOK(TIFF* tif, char emsg[1024])
"Planarconfiguration", td->td_planarconfig);
return (0);
}
+ if( td->td_samplesperpixel != 3 )
+ {
+ sprintf(emsg,
+ "Sorry, can not handle image with %s=%d",
+ "Samples/pixel", td->td_samplesperpixel);
+ return 0;
+ }
break;
case PHOTOMETRIC_CIELAB:
+ if( td->td_samplesperpixel != 3 || td->td_bitspersample != 8 )
+ {
+ sprintf(emsg,
+ "Sorry, can not handle image with %s=%d and %s=%d",
+ "Samples/pixel", td->td_samplesperpixel,
+ "Bits/sample", td->td_bitspersample);
+ return 0;
+ }
break;
default:
sprintf(emsg, "Sorry, can not handle image with %s=%d",
diff --git a/libtiff/tif_next.c b/libtiff/tif_next.c
index 55e2537..a53c716 100644
--- a/libtiff/tif_next.c
+++ b/libtiff/tif_next.c
@@ -102,6 +102,8 @@ NeXTDecode(TIFF* tif, uint8* buf, tmsize_t occ, uint16 s)
default: {
uint32 npixels = 0, grey;
uint32 imagewidth = tif->tif_dir.td_imagewidth;
+ if( isTiled(tif) )
+ imagewidth = tif->tif_dir.td_tilewidth;
/*
* The scanline is composed of a sequence of constant
diff --git a/tools/tiff2pdf.c b/tools/tiff2pdf.c
index dfda963..f202b41 100644
--- a/tools/tiff2pdf.c
+++ b/tools/tiff2pdf.c
@@ -1167,6 +1167,15 @@ void t2p_read_tiff_init(T2P* t2p, TIFF* input){
if( (TIFFGetField(input, TIFFTAG_PLANARCONFIG, &xuint16) != 0)
&& (xuint16 == PLANARCONFIG_SEPARATE ) ){
TIFFGetField(input, TIFFTAG_SAMPLESPERPIXEL, &xuint16);
+ if( (t2p->tiff_tiles[i].tiles_tilecount % xuint16) != 0 )
+ {
+ TIFFError(
+ TIFF2PDF_MODULE,
+ "Invalid tile count, %s",
+ TIFFFileName(input));
+ t2p->t2p_error = T2P_ERR_ERROR;
+ return;
+ }
t2p->tiff_tiles[i].tiles_tilecount/= xuint16;
}
if( t2p->tiff_tiles[i].tiles_tilecount > 0){
@@ -1552,6 +1561,22 @@ void t2p_read_tiff_data(T2P* t2p, TIFF* input){
#endif
break;
case PHOTOMETRIC_CIELAB:
+ if( t2p->tiff_samplesperpixel != 3){
+ TIFFError(
+ TIFF2PDF_MODULE,
+ "Unsupported samplesperpixel = %d for CIELAB",
+ t2p->tiff_samplesperpixel);
+ t2p->t2p_error = T2P_ERR_ERROR;
+ return;
+ }
+ if( t2p->tiff_bitspersample != 8){
+ TIFFError(
+ TIFF2PDF_MODULE,
+ "Invalid bitspersample = %d for CIELAB",
+ t2p->tiff_bitspersample);
+ t2p->t2p_error = T2P_ERR_ERROR;
+ return;
+ }
t2p->pdf_labrange[0]= -127;
t2p->pdf_labrange[1]= 127;
t2p->pdf_labrange[2]= -127;
@@ -1567,6 +1592,22 @@ void t2p_read_tiff_data(T2P* t2p, TIFF* input){
t2p->pdf_colorspace=T2P_CS_LAB;
break;
case PHOTOMETRIC_ITULAB:
+ if( t2p->tiff_samplesperpixel != 3){
+ TIFFError(
+ TIFF2PDF_MODULE,
+ "Unsupported samplesperpixel = %d for ITULAB",
+ t2p->tiff_samplesperpixel);
+ t2p->t2p_error = T2P_ERR_ERROR;
+ return;
+ }
+ if( t2p->tiff_bitspersample != 8){
+ TIFFError(
+ TIFF2PDF_MODULE,
+ "Invalid bitspersample = %d for ITULAB",
+ t2p->tiff_bitspersample);
+ t2p->t2p_error = T2P_ERR_ERROR;
+ return;
+ }
t2p->pdf_labrange[0]=-85;
t2p->pdf_labrange[1]=85;
t2p->pdf_labrange[2]=-75;
diff --git a/tools/tiffcrop.c b/tools/tiffcrop.c
index f5530bb..4088463 100644
--- a/tools/tiffcrop.c
+++ b/tools/tiffcrop.c
@@ -1205,9 +1205,10 @@ static int writeBufferToContigTiles (TIFF* out, uint8* buf, uint32 imagelength,
tsize_t tilesize = TIFFTileSize(out);
unsigned char *tilebuf = NULL;
- TIFFGetField(out, TIFFTAG_TILELENGTH, &tl);
- TIFFGetField(out, TIFFTAG_TILEWIDTH, &tw);
- TIFFGetField(out, TIFFTAG_BITSPERSAMPLE, &bps);
+ if( !TIFFGetField(out, TIFFTAG_TILELENGTH, &tl) ||
+ !TIFFGetField(out, TIFFTAG_TILEWIDTH, &tw) ||
+ !TIFFGetField(out, TIFFTAG_BITSPERSAMPLE, &bps) )
+ return 1;
tile_buffsize = tilesize;
if (tilesize < (tsize_t)(tl * tile_rowsize))
diff --git a/tools/tiffdump.c b/tools/tiffdump.c
index cf5d62f..8247765 100644
--- a/tools/tiffdump.c
+++ b/tools/tiffdump.c
@@ -374,6 +374,8 @@ ReadDirectory(int fd, unsigned int ix, uint64 off)
void* datamem;
uint64 dataoffset;
int datatruncated;
+ int datasizeoverflow;
+
tag = *(uint16*)dp;
if (swabflag)
TIFFSwabShort(&tag);
@@ -412,13 +414,14 @@ ReadDirectory(int fd, unsigned int ix, uint64 off)
else
typewidth = datawidth[type];
datasize = count*typewidth;
+ datasizeoverflow = (typewidth > 0 && datasize / typewidth != count);
datafits = 1;
datamem = dp;
dataoffset = 0;
datatruncated = 0;
if (!bigtiff)
{
- if (datasize>4)
+ if (datasizeoverflow || datasize>4)
{
uint32 dataoffset32;
datafits = 0;
@@ -432,7 +435,7 @@ ReadDirectory(int fd, unsigned int ix, uint64 off)
}
else
{
- if (datasize>8)
+ if (datasizeoverflow || datasize>8)
{
datafits = 0;
datamem = NULL;
@@ -442,7 +445,7 @@ ReadDirectory(int fd, unsigned int ix, uint64 off)
}
dp += sizeof(uint64);
}
- if (datasize>0x10000)
+ if (datasizeoverflow || datasize>0x10000)
{
datatruncated = 1;
count = 0x10000/typewidth;
From 3206e0c752a62da1ae606867113ed3bf9bf73306 Mon Sep 17 00:00:00 2001
From: erouault <erouault>
Date: Sun, 21 Dec 2014 19:53:59 +0000
Subject: [PATCH] * tools/thumbnail.c: fix out-of-buffer write
http://bugzilla.maptools.org/show_bug.cgi?id=2489 (CVE-2014-8128)
---
ChangeLog | 5 +++++
tools/thumbnail.c | 8 +++++++-
2 files changed, 12 insertions(+), 1 deletion(-)
diff --git a/tools/thumbnail.c b/tools/thumbnail.c
index fab63f6..c50bbff 100644
--- a/tools/thumbnail.c
+++ b/tools/thumbnail.c
@@ -568,7 +568,13 @@ setImage1(const uint8* br, uint32 rw, uint32 rh)
err -= limit;
sy++;
if (err >= limit)
- rows[nrows++] = br + bpr*sy;
+ {
+ /* We should perhaps error loudly, but I can't make sense of that */
+ /* code... */
+ if( nrows == 256 )
+ break;
+ rows[nrows++] = br + bpr*sy;
+ }
}
setrow(row, nrows, rows);
row += tnw;
From 8b6e80fca434525497e5a31c3309a3bab5b3c1c8 Mon Sep 17 00:00:00 2001
From: erouault <erouault>
Date: Sun, 21 Dec 2014 18:52:42 +0000
Subject: [PATCH] * tools/thumbnail.c, tools/tiffcmp.c: only read/write
TIFFTAG_GROUP3OPTIONS or TIFFTAG_GROUP4OPTIONS if compression is
COMPRESSION_CCITTFAX3 or COMPRESSION_CCITTFAX4
http://bugzilla.maptools.org/show_bug.cgi?id=2493 (CVE-2014-8128)
---
ChangeLog | 7 +++++++
tools/thumbnail.c | 21 ++++++++++++++++++++-
tools/tiffcmp.c | 17 +++++++++++++++--
3 files changed, 42 insertions(+), 3 deletions(-)
diff --git a/tools/thumbnail.c b/tools/thumbnail.c
index a98a881..fab63f6 100644
--- a/tools/thumbnail.c
+++ b/tools/thumbnail.c
@@ -274,7 +274,26 @@ cpTags(TIFF* in, TIFF* out)
{
struct cpTag *p;
for (p = tags; p < &tags[NTAGS]; p++)
- cpTag(in, out, p->tag, p->count, p->type);
+ {
+ /* Horrible: but TIFFGetField() expects 2 arguments to be passed */
+ /* if we request a tag that is defined in a codec, but that codec */
+ /* isn't used */
+ if( p->tag == TIFFTAG_GROUP3OPTIONS )
+ {
+ uint16 compression;
+ if( !TIFFGetField(in, TIFFTAG_COMPRESSION, &compression) ||
+ compression != COMPRESSION_CCITTFAX3 )
+ continue;
+ }
+ if( p->tag == TIFFTAG_GROUP4OPTIONS )
+ {
+ uint16 compression;
+ if( !TIFFGetField(in, TIFFTAG_COMPRESSION, &compression) ||
+ compression != COMPRESSION_CCITTFAX4 )
+ continue;
+ }
+ cpTag(in, out, p->tag, p->count, p->type);
+ }
}
#undef NTAGS
diff --git a/tools/tiffcmp.c b/tools/tiffcmp.c
index 508a461..d6392af 100644
--- a/tools/tiffcmp.c
+++ b/tools/tiffcmp.c
@@ -260,6 +260,7 @@ tiffcmp(TIFF* tif1, TIFF* tif2)
static int
cmptags(TIFF* tif1, TIFF* tif2)
{
+ uint16 compression1, compression2;
CmpLongField(TIFFTAG_SUBFILETYPE, "SubFileType");
CmpLongField(TIFFTAG_IMAGEWIDTH, "ImageWidth");
CmpLongField(TIFFTAG_IMAGELENGTH, "ImageLength");
@@ -276,8 +277,20 @@ cmptags(TIFF* tif1, TIFF* tif2)
CmpShortField(TIFFTAG_SAMPLEFORMAT, "SampleFormat");
CmpFloatField(TIFFTAG_XRESOLUTION, "XResolution");
CmpFloatField(TIFFTAG_YRESOLUTION, "YResolution");
- CmpLongField(TIFFTAG_GROUP3OPTIONS, "Group3Options");
- CmpLongField(TIFFTAG_GROUP4OPTIONS, "Group4Options");
+ if( TIFFGetField(tif1, TIFFTAG_COMPRESSION, &compression1) &&
+ compression1 == COMPRESSION_CCITTFAX3 &&
+ TIFFGetField(tif2, TIFFTAG_COMPRESSION, &compression2) &&
+ compression2 == COMPRESSION_CCITTFAX3 )
+ {
+ CmpLongField(TIFFTAG_GROUP3OPTIONS, "Group3Options");
+ }
+ if( TIFFGetField(tif1, TIFFTAG_COMPRESSION, &compression1) &&
+ compression1 == COMPRESSION_CCITTFAX4 &&
+ TIFFGetField(tif2, TIFFTAG_COMPRESSION, &compression2) &&
+ compression2 == COMPRESSION_CCITTFAX4 )
+ {
+ CmpLongField(TIFFTAG_GROUP4OPTIONS, "Group4Options");
+ }
CmpShortField(TIFFTAG_RESOLUTIONUNIT, "ResolutionUnit");
CmpShortField(TIFFTAG_PLANARCONFIG, "PlanarConfiguration");
CmpLongField(TIFFTAG_ROWSPERSTRIP, "RowsPerStrip");
From 266bc48054b018a2f1d74562aa48eb2f509436d5 Mon Sep 17 00:00:00 2001
From: erouault <erouault>
Date: Sun, 21 Dec 2014 17:36:36 +0000
Subject: [PATCH] * tools/tiff2pdf.c: check return code of TIFFGetField() when
reading TIFFTAG_SAMPLESPERPIXEL
---
ChangeLog | 5 +++++
tools/tiff2pdf.c | 10 +++++++++-
2 files changed, 14 insertions(+), 1 deletion(-)
Index: tiff-4.0.3/tools/tiff2pdf.c
===================================================================
--- tiff-4.0.3.orig/tools/tiff2pdf.c
+++ tiff-4.0.3/tools/tiff2pdf.c
@@ -1164,7 +1164,15 @@ void t2p_read_tiff_init(T2P* t2p, TIFF*
t2p->tiff_pages[i].page_tilecount;
if( (TIFFGetField(input, TIFFTAG_PLANARCONFIG, &xuint16) != 0)
&& (xuint16 == PLANARCONFIG_SEPARATE ) ){
- TIFFGetField(input, TIFFTAG_SAMPLESPERPIXEL, &xuint16);
+ if( !TIFFGetField(input, TIFFTAG_SAMPLESPERPIXEL, &xuint16) )
+ {
+ TIFFError(
+ TIFF2PDF_MODULE,
+ "Missing SamplesPerPixel, %s",
+ TIFFFileName(input));
+ t2p->t2p_error = T2P_ERR_ERROR;
+ return;
+ }
if( (t2p->tiff_tiles[i].tiles_tilecount % xuint16) != 0 )
{
TIFFError(
From cd82b5267ad4c10eb91e4ee8a716a81362cf851c Mon Sep 17 00:00:00 2001
From: erouault <erouault>
Date: Sun, 21 Dec 2014 18:07:48 +0000
Subject: [PATCH] * libtiff/tif_next.c: check that BitsPerSample = 2. Fixes
http://bugzilla.maptools.org/show_bug.cgi?id=2487 (CVE-2014-8129)
---
ChangeLog | 5 +++++
libtiff/tif_next.c | 17 +++++++++++++++++
2 files changed, 22 insertions(+)
diff --git a/libtiff/tif_next.c b/libtiff/tif_next.c
index a53c716..d834196 100644
--- a/libtiff/tif_next.c
+++ b/libtiff/tif_next.c
@@ -141,10 +141,27 @@ NeXTDecode(TIFF* tif, uint8* buf, tmsize_t occ, uint16 s)
return (0);
}
+static int
+NeXTPreDecode(TIFF* tif, uint16 s)
+{
+ static const char module[] = "NeXTPreDecode";
+ TIFFDirectory *td = &tif->tif_dir;
+ (void)s;
+
+ if( td->td_bitspersample != 2 )
+ {
+ TIFFErrorExt(tif->tif_clientdata, module, "Unsupported BitsPerSample = %d",
+ td->td_bitspersample);
+ return (0);
+ }
+ return (1);
+}
+
int
TIFFInitNeXT(TIFF* tif, int scheme)
{
(void) scheme;
+ tif->tif_predecode = NeXTPreDecode;
tif->tif_decoderow = NeXTDecode;
tif->tif_decodestrip = NeXTDecode;
tif->tif_decodetile = NeXTDecode;
From 40a5955cbf0df62b1f9e9bd7d9657b0070725d19 Mon Sep 17 00:00:00 2001
From: erouault <erouault>
Date: Mon, 29 Dec 2014 12:09:11 +0000
Subject: [PATCH] * libtiff/tif_next.c: add new tests to check that we don't
read outside of the compressed input stream buffer.
* libtiff/tif_getimage.c: in OJPEG case, fix checks on strile width/height
---
ChangeLog | 9 +++++++++
libtiff/tif_getimage.c | 12 +++++++-----
libtiff/tif_next.c | 4 +++-
3 files changed, 19 insertions(+), 6 deletions(-)
diff --git a/libtiff/tif_getimage.c b/libtiff/tif_getimage.c
index a4f46d9..3ad8ee7 100644
--- a/libtiff/tif_getimage.c
+++ b/libtiff/tif_getimage.c
@@ -1871,7 +1871,7 @@ DECLAREContigPutFunc(putcontig8bitYCbCr42tile)
(void) y;
fromskew = (fromskew * 10) / 4;
- if ((h & 3) == 0 && (w & 1) == 0) {
+ if ((w & 3) == 0 && (h & 1) == 0) {
for (; h >= 2; h -= 2) {
x = w>>2;
do {
@@ -1948,7 +1948,7 @@ DECLAREContigPutFunc(putcontig8bitYCbCr41tile)
/* XXX adjust fromskew */
do {
x = w>>2;
- do {
+ while(x>0) {
int32 Cb = pp[4];
int32 Cr = pp[5];
@@ -1959,7 +1959,8 @@ DECLAREContigPutFunc(putcontig8bitYCbCr41tile)
cp += 4;
pp += 6;
- } while (--x);
+ x--;
+ }
if( (w&3) != 0 )
{
@@ -2050,7 +2051,7 @@ DECLAREContigPutFunc(putcontig8bitYCbCr21tile)
fromskew = (fromskew * 4) / 2;
do {
x = w>>1;
- do {
+ while(x>0) {
int32 Cb = pp[2];
int32 Cr = pp[3];
@@ -2059,7 +2060,8 @@ DECLAREContigPutFunc(putcontig8bitYCbCr21tile)
cp += 2;
pp += 4;
- } while (--x);
+ x --;
+ }
if( (w&1) != 0 )
{
diff --git a/libtiff/tif_next.c b/libtiff/tif_next.c
index d834196..dd669cc 100644
--- a/libtiff/tif_next.c
+++ b/libtiff/tif_next.c
@@ -71,7 +71,7 @@ NeXTDecode(TIFF* tif, uint8* buf, tmsize_t occ, uint16 s)
TIFFErrorExt(tif->tif_clientdata, module, "Fractional scanlines cannot be read");
return (0);
}
- for (row = buf; occ > 0; occ -= scanline, row += scanline) {
+ for (row = buf; cc > 0 && occ > 0; occ -= scanline, row += scanline) {
n = *bp++, cc--;
switch (n) {
case LITERALROW:
@@ -90,6 +90,8 @@ NeXTDecode(TIFF* tif, uint8* buf, tmsize_t occ, uint16 s)
* The scanline has a literal span that begins at some
* offset.
*/
+ if( cc < 4 )
+ goto bad;
off = (bp[0] * 256) + bp[1];
n = (bp[2] * 256) + bp[3];
if (cc < 4+n || off+n > scanline)
Picked from CVE: diff -u -r1.14 -r1.15
http://bugzilla.maptools.org/show_bug.cgi?id=2501
Author: Even Rouault <even.rouault@spatialys.com>
--- tiff-4.0.3.orig/tools/tiffdither.c
+++ tiff-4.0.3/tools/tiffdither.c
@@ -39,6 +39,7 @@
#endif
#include "tiffio.h"
+#include "tiffiop.h"
#define streq(a,b) (strcmp(a,b) == 0)
#define strneq(a,b,n) (strncmp(a,b,n) == 0)
@@ -56,7 +57,7 @@ static void usage(void);
* Floyd-Steinberg error propragation with threshold.
* This code is stolen from tiffmedian.
*/
-static void
+static int
fsdither(TIFF* in, TIFF* out)
{
unsigned char *outline, *inputline, *inptr;
@@ -68,14 +69,19 @@ fsdither(TIFF* in, TIFF* out)
int lastline, lastpixel;
int bit;
tsize_t outlinesize;
+ int errcode = 0;
imax = imagelength - 1;
jmax = imagewidth - 1;
inputline = (unsigned char *)_TIFFmalloc(TIFFScanlineSize(in));
- thisline = (short *)_TIFFmalloc(imagewidth * sizeof (short));
- nextline = (short *)_TIFFmalloc(imagewidth * sizeof (short));
+ thisline = (short *)_TIFFmalloc(TIFFSafeMultiply(tmsize_t, imagewidth, sizeof (short)));
+ nextline = (short *)_TIFFmalloc(TIFFSafeMultiply(tmsize_t, imagewidth, sizeof (short)));
outlinesize = TIFFScanlineSize(out);
outline = (unsigned char *) _TIFFmalloc(outlinesize);
+ if (! (inputline && thisline && nextline && outline)) {
+ fprintf(stderr, "Out of memory.\n");
+ goto skip_on_error;
+ }
/*
* Get first line
@@ -93,7 +99,7 @@ fsdither(TIFF* in, TIFF* out)
nextline = tmpptr;
lastline = (i == imax);
if (TIFFReadScanline(in, inputline, i, 0) <= 0)
- break;
+ goto skip_on_error;
inptr = inputline;
nextptr = nextline;
for (j = 0; j < imagewidth; ++j)
@@ -131,13 +137,18 @@ fsdither(TIFF* in, TIFF* out)
}
}
if (TIFFWriteScanline(out, outline, i-1, 0) < 0)
- break;
+ goto skip_on_error;
}
+ goto exit_label;
+
skip_on_error:
+ errcode = 1;
+ exit_label:
_TIFFfree(inputline);
_TIFFfree(thisline);
_TIFFfree(nextline);
_TIFFfree(outline);
+ return errcode;
}
static uint16 compression = COMPRESSION_PACKBITS;
Patches by Petr Gajdos (pgajdos@suse.cz) from
http://bugzilla.maptools.org/show_bug.cgi?id=2499
--- tiff-4.0.3.orig/libtiff/tif_dirinfo.c
+++ tiff-4.0.3/libtiff/tif_dirinfo.c
@@ -141,6 +141,8 @@ tiffFields[] = {
{ TIFFTAG_FAXDCS, -1, -1, TIFF_ASCII, 0, TIFF_SETGET_ASCII, TIFF_SETGET_ASCII, FIELD_CUSTOM, TRUE, FALSE, "FaxDcs", NULL },
{ TIFFTAG_STONITS, 1, 1, TIFF_DOUBLE, 0, TIFF_SETGET_DOUBLE, TIFF_SETGET_UNDEFINED, FIELD_CUSTOM, 0, 0, "StoNits", NULL },
{ TIFFTAG_INTEROPERABILITYIFD, 1, 1, TIFF_IFD8, 0, TIFF_SETGET_UNDEFINED, TIFF_SETGET_UNDEFINED, FIELD_CUSTOM, 0, 0, "InteroperabilityIFDOffset", NULL },
+ { TIFFTAG_CONSECUTIVEBADFAXLINES, 1, 1, TIFF_LONG, 0, TIFF_SETGET_UINT32, TIFF_SETGET_UINT32, FIELD_CUSTOM, TRUE, FALSE, "ConsecutiveBadFaxLines", NULL },
+ { TIFFTAG_PREDICTOR, 1, 1, TIFF_SHORT, 0, TIFF_SETGET_UINT16, TIFF_SETGET_UINT16, FIELD_CUSTOM, FALSE, FALSE, "Predictor", NULL },
/* begin DNG tags */
{ TIFFTAG_DNGVERSION, 4, 4, TIFF_BYTE, 0, TIFF_SETGET_C0_UINT8, TIFF_SETGET_UNDEFINED, FIELD_CUSTOM, 0, 0, "DNGVersion", NULL },
{ TIFFTAG_DNGBACKWARDVERSION, 4, 4, TIFF_BYTE, 0, TIFF_SETGET_C0_UINT8, TIFF_SETGET_UNDEFINED, FIELD_CUSTOM, 0, 0, "DNGBackwardVersion", NULL },
[buildout] [buildout]
extends =
../xz-utils/buildout.cfg
parts = libtool parts = libtool
[libtool] [libtool]
recipe = slapos.recipe.cmmi recipe = slapos.recipe.cmmi
md5sum = d2f3b7d4627e69e13514a40e72a24d50 md5sum = ee9c087775aeb98ce53a9c69da865a55
url = http://ftp.gnu.org/gnu/libtool/libtool-2.4.2.tar.gz url = http://ftp.gnu.org/gnu/libtool/libtool-2.4.5.tar.xz
configure-options = configure-options =
--disable-static --disable-static
environment =
PATH=${xz-utils:location}/bin:%(PATH)s
...@@ -11,8 +11,8 @@ parts = ...@@ -11,8 +11,8 @@ parts =
[libxml2] [libxml2]
recipe = slapos.recipe.cmmi recipe = slapos.recipe.cmmi
url = ftp://ftp.xmlsoft.org/libxml2/libxml2-2.9.1.tar.gz url = ftp://ftp.xmlsoft.org/libxml2/libxml2-2.9.2.tar.gz
md5sum = 9c0cfef285d5c4a5c80d00904ddab380 md5sum = 9e6a9aca9d155737868b3dc5fd82f788
configure-options = configure-options =
--disable-static --disable-static
--without-python --without-python
......
...@@ -5,5 +5,5 @@ parts = ...@@ -5,5 +5,5 @@ parts =
[libyaml] [libyaml]
recipe = slapos.recipe.cmmi recipe = slapos.recipe.cmmi
url = http://pyyaml.org/download/libyaml/yaml-0.1.4.tar.gz url = http://pyyaml.org/download/libyaml/yaml-0.1.6.tar.gz
md5sum = 36c852831d02cf90508c29852361d01b md5sum = 5fe00cda18ca5daeb43762b80c38e06e
[buildout]
parts =
lunzip
[lunzip]
recipe = slapos.recipe.cmmi
url = http://download.savannah.gnu.org/releases/lzip/lunzip/lunzip-1.6.tar.gz
md5sum = 5e6ad4fe91f235be64227bc9930986fe
configure-options =
--disable-static
[buildout]
parts =
lzo
[lzo]
recipe = slapos.recipe.cmmi
url = http://www.oberhumer.com/opensource/lzo/download/lzo-2.09.tar.gz
md5sum = c7ffc9a103afe2d1bba0b015e7aa887f
[buildout] [buildout]
extends = parts =
../autoconf/buildout.cfg make
../automake/buildout.cfg
[make] [make]
# make 3.82 breaks too many things. Stick with 3.81. # make 3.82 breaks too many things. Stick with 3.81.
......
[buildout]
extends =
../scipy/buildout.cfg
../rpy2/buildout.cfg
../pulp/buildout.cfg
../git/buildout.cfg
../mysql-python/buildout.cfg
parts =
manpy
[dream-repository.git]
recipe = slapos.recipe.build:gitclone
repository = http://git.erp5.org/repos/dream.git
branch = master
git-executable = ${git:location}/bin/git
develop = true
[dream-build]
; use a develop egg to easily develop and because dream already exist on pypi
; https://pypi.python.org/pypi/dream
recipe = zc.recipe.egg:develop
setup = ${dream-repository.git:location}
[manpy-env]
<= numpy-env
[manpy]
dependency = ${dream-build:setup}
recipe = zc.recipe.egg:script
eggs = dream
environment = manpy-env
setup-eggs =
${scipy:egg}
${mysql-python:egg}
initialization =
${pulp:initialization}
${rpy2:initialization}
...@@ -7,7 +7,6 @@ extends = ...@@ -7,7 +7,6 @@ extends =
../groonga/buildout.cfg ../groonga/buildout.cfg
../jemalloc/buildout.cfg ../jemalloc/buildout.cfg
../libaio/buildout.cfg ../libaio/buildout.cfg
../libevent/buildout.cfg
../libxml2/buildout.cfg ../libxml2/buildout.cfg
../ncurses/buildout.cfg ../ncurses/buildout.cfg
../openssl/buildout.cfg ../openssl/buildout.cfg
...@@ -21,9 +20,9 @@ parts = ...@@ -21,9 +20,9 @@ parts =
[mariadb] [mariadb]
recipe = slapos.recipe.cmmi recipe = slapos.recipe.cmmi
version = 10.0.14 version = 10.0.19
url = https://downloads.mariadb.org/f/mariadb-${:version}/source/mariadb-${:version}.tar.gz/from/http:/ftp.osuosl.org/pub/mariadb url = https://downloads.mariadb.org/f/mariadb-${:version}/source/mariadb-${:version}.tar.gz/from/http:/ftp.osuosl.org/pub/mariadb
md5sum = 80fea71de54a9cfa7f5508df53d3f06d md5sum = aeaf101c688515dc8f73a5250e6c1df9
patch-options = -p0 patch-options = -p0
patches = patches =
${:_profile_base_location_}/mariadb_10.0.8_create_system_tables__no_test.patch#a176d491cf45fccd53ee397c70393bc4 ${:_profile_base_location_}/mariadb_10.0.8_create_system_tables__no_test.patch#a176d491cf45fccd53ee397c70393bc4
...@@ -41,12 +40,13 @@ configure-options = ...@@ -41,12 +40,13 @@ configure-options =
-DENABLE_DTRACE=0 -DENABLE_DTRACE=0
-DWITH_EXTRA_CHARSETS=complex -DWITH_EXTRA_CHARSETS=complex
-DWITH_EMBEDDED_SERVER=0 -DWITH_EMBEDDED_SERVER=0
-DWITH_JEMALLOC=yes
-DWITHOUT_EXAMPLE_STORAGE_ENGINE=1 -DWITHOUT_EXAMPLE_STORAGE_ENGINE=1
-DWITHOUT_MROONGA_STORAGE_ENGINE=1
-DWITHOUT_DAEMON_EXAMPLE=1 -DWITHOUT_DAEMON_EXAMPLE=1
-DCMAKE_C_FLAGS="-I${jemalloc:location}/include -I${libaio:location}/include -I${libxml2:location}/include -I${ncurses:location}/include -I${openssl:location}/include -I${pcre:location}/include -I${readline5:location}/include -I${zlib:location}/include" -DCMAKE_C_FLAGS="-I${jemalloc:location}/include -I${libaio:location}/include -I${libxml2:location}/include -I${ncurses:location}/include -I${openssl:location}/include -I${pcre:location}/include -I${readline5:location}/include -I${zlib:location}/include"
-DCMAKE_CXX_FLAGS="-I${jemalloc:location}/include -I${libaio:location}/include -I${libxml2:location}/include -I${ncurses:location}/include -I${openssl:location}/include -I${pcre:location}/include -I${readline5:location}/include -I${zlib:location}/include" -DCMAKE_CXX_FLAGS="-I${jemalloc:location}/include -I${libaio:location}/include -I${libxml2:location}/include -I${ncurses:location}/include -I${openssl:location}/include -I${pcre:location}/include -I${readline5:location}/include -I${zlib:location}/include"
-DCMAKE_INSTALL_RPATH=${jemalloc:location}/lib:${libaio:location}/lib:${libxml2:location}/lib:${ncurses:location}/lib:${openssl:location}/lib:${pcre:location}/lib:${readline5:location}/lib:${zlib:location}/lib -DCMAKE_INSTALL_RPATH=${jemalloc:location}/lib:${libaio:location}/lib:${libxml2:location}/lib:${ncurses:location}/lib:${openssl:location}/lib:${pcre:location}/lib:${readline5:location}/lib:${zlib:location}/lib
-DWITHOUT_TOKUDB=true
environment = environment =
CMAKE_PROGRAM_PATH=${cmake:location}/bin CMAKE_PROGRAM_PATH=${cmake:location}/bin
CMAKE_INCLUDE_PATH=${libaio:location}/include:${libaio:location}/include:${libxml2:location}/include:${ncurses:location}/include:${openssl:location}/include:${pcre:location}/include:${readline5:location}/include:${zlib:location}/include CMAKE_INCLUDE_PATH=${libaio:location}/include:${libaio:location}/include:${libxml2:location}/include:${ncurses:location}/include:${openssl:location}/include:${pcre:location}/include:${readline5:location}/include:${zlib:location}/include
...@@ -57,8 +57,8 @@ environment = ...@@ -57,8 +57,8 @@ environment =
# mroonga - a storage engine for MySQL. It provides fast fulltext search feature to all MySQL users. # mroonga - a storage engine for MySQL. It provides fast fulltext search feature to all MySQL users.
# http://mroonga.github.com/ # http://mroonga.github.com/
recipe = slapos.recipe.cmmi recipe = slapos.recipe.cmmi
url = http://packages.groonga.org/source/mroonga/mroonga-4.07.tar.gz url = http://packages.groonga.org/source/mroonga/mroonga-5.03.tar.gz
md5sum = fcfca7934b1f8df981fca579d3cd41bd md5sum = 4f5413d5c94ebc44caeeb5cb62d346ed
configure-command = mkdir fake_mariadb_source && ln -s ${mariadb:location}/include/mysql/private fake_mariadb_source/sql && ./configure configure-command = mkdir fake_mariadb_source && ln -s ${mariadb:location}/include/mysql/private fake_mariadb_source/sql && ./configure
configure-options = configure-options =
--prefix=${buildout:parts-directory}/${:_buildout_section_name_} --prefix=${buildout:parts-directory}/${:_buildout_section_name_}
......
[buildout]
extends =
../numpy/buildout.cfg
../pkgconfig/buildout.cfg
../libpng/buildout.cfg
../freetype/buildout.cfg
parts =
matplotlib
[matplotlib-env]
<= numpy-env
PATH = ${pkgconfig:location}/bin:%(PATH)s
PKG_CONFIG_PATH = ${libpng:location}/lib/pkgconfig:${freetype:location}/lib/pkgconfig:${zlib:location}/lib/pkgconfig
CFLAGS = -I${libpng:location}/include -I${freetype:location}/include
[matplotlib]
recipe = zc.recipe.egg:custom
egg = matplotlib
environment = matplotlib-env
setup-eggs = ${numpy:egg}
rpath =
${libpng:location}/lib
${freetype:location}/lib
...@@ -38,6 +38,9 @@ environment = ...@@ -38,6 +38,9 @@ environment =
ACLOCAL=${automake:location}/bin/aclocal -I${libtool:location}/share/aclocal -I${pkgconfig:location}/share/aclocal ACLOCAL=${automake:location}/bin/aclocal -I${libtool:location}/share/aclocal -I${pkgconfig:location}/share/aclocal
AUTOCONF=${autoconf:location}/bin/autoconf AUTOCONF=${autoconf:location}/bin/autoconf
AUTOMAKE=${automake:location}/bin/automake AUTOMAKE=${automake:location}/bin/automake
make-options =
-j1
LIBTOOL=${libtool:location}/bin/libtool
[glu] [glu]
recipe = slapos.recipe.cmmi recipe = slapos.recipe.cmmi
......
[buildout]
parts =
msgpack-python
[msgpack-python]
recipe = zc.recipe.egg:custom
egg = msgpack-python
[buildout]
parts = netcat
[netcat]
recipe = slapos.recipe.cmmi
url = http://sourceforge.net/projects/netcat/files/netcat/0.7.1/netcat-0.7.1.tar.gz
md5sum = 088def25efe04dcdd1f8369d8926ab34
\ No newline at end of file
...@@ -11,8 +11,8 @@ parts = nginx-output ...@@ -11,8 +11,8 @@ parts = nginx-output
[nginx-common] [nginx-common]
recipe = slapos.recipe.cmmi recipe = slapos.recipe.cmmi
url = http://nginx.org/download/nginx-1.7.6.tar.gz url = http://nginx.org/download/nginx-1.7.8.tar.gz
md5sum = dd444e5333e0d324bec480e2ff67870a md5sum = fd5ab813fc1853cd8efe580ead577c3e
[nginx] [nginx]
<= nginx-common <= nginx-common
......
...@@ -4,7 +4,7 @@ parts = ...@@ -4,7 +4,7 @@ parts =
[noVNC] [noVNC]
recipe = hexagonit.recipe.download recipe = hexagonit.recipe.download
# Post-0.4 release from January 2013 # version-0.5.1 release from 29 Nov 2014
url = http://github.com/kanaka/noVNC/zipball/3b2acc2258d36137a37edfbe0f03a3099189c49d url = https://github.com/kanaka/noVNC/archive/v0.5.1.tar.gz
md5sum = a276be8fa193652bb5de8a271603f11f md5sum = ac55b2316b2164b6e09ae3bd89c37cb6
strip-top-level-dir = true strip-top-level-dir = true
...@@ -9,7 +9,22 @@ parts = ...@@ -9,7 +9,22 @@ parts =
nodejs nodejs
[nodejs] [nodejs]
<= nodejs-0.8 <= nodejs-0.10
[nodejs-0.10]
# Server-side Javascript.
recipe = slapos.recipe.cmmi
url = http://nodejs.org/dist/v0.10.36/node-v0.10.36.tar.gz
md5sum = 4b3527b830f2dacaba01aececd509c6f
configure-options =
--openssl-includes=${openssl:location}/include
--openssl-libpath=${openssl:location}/lib
environment =
HOME=${buildout:parts-directory}/${:_buildout_section_name_}
PATH=${pkgconfig:location}/bin:%(PATH)s
PKG_CONFIG_PATH=${openssl:location}/lib/pkgconfig/
CPPFLAGS=-I${zlib:location}/include
LDFLAGS=-Wl,-rpath=${openssl:location}/lib -L${zlib:location}/lib -Wl,-rpath=${zlib:location}/lib
[nodejs-0.8] [nodejs-0.8]
# Server-side Javascript. # Server-side Javascript.
......
...@@ -2,21 +2,19 @@ ...@@ -2,21 +2,19 @@
extends = extends =
../gcc/buildout.cfg ../gcc/buildout.cfg
../libatlas/buildout.cfg ../openblas/buildout.cfg
version = versions
[numpy-env] [numpy-env]
PATH = ${gcc-fortran:location}/bin:%(PATH)s PATH = ${gcc-fortran:location}/bin:%(PATH)s
BLAS = ${libatlas:location}/lib/libcblas.a OPENBLAS = ${openblas:location}/lib/libopenblas.so
LAPACK = ${libatlas:location}/lib/liblapack.a LAPACK = ${openblas:location}/lib/libopenblas.so
ATLAS = ${libatlas:location}/lib/libatlas.a ATLAS = ${openblas:location}/lib/libopenblas.so
[numpy] [numpy]
recipe = zc.recipe.egg:custom recipe = zc.recipe.egg:custom
egg = numpy egg = numpy
environment = numpy-env environment = numpy-env
# no need to add libatlas location because they are statically linked.
rpath = rpath =
${gcc-fortran:location}/lib ${gcc-fortran:location}/lib
${gcc-fortran:location}/lib64 ${gcc-fortran:location}/lib64
${openblas:location}/lib
[buildout]
parts =
openblas
[openblas]
recipe = slapos.recipe.cmmi
url = http://github.com/xianyi/OpenBLAS/tarball/v0.2.13
md5sum = 74adf4c0d0d82bff4774be5bf2134183
configure-command = true
make-options =
PREFIX="${buildout:parts-directory}/${:_buildout_section_name_}"
BINARY="$(uname -m | grep -q x86_64 && echo 64 || echo 32)"
NO_STATIC=1
USE_OPENMP=1
USE_THREAD=1
# to build generic binary that supports multiple architecture in one binary
# DYNAMIC_ARCH=1
environment =
PATH=${gcc-fortran:location}/bin:%(PATH)s
LD_LIBRARY_PATH=${gcc-fortran:location}/lib:${gcc-fortran:location}/lib64
LDFLAGS=-Wl,-rpath=${gcc-fortran:location}/lib -Wl,-rpath=${gcc-fortran:location}/lib64
...@@ -8,8 +8,8 @@ extends = ...@@ -8,8 +8,8 @@ extends =
[openldap] [openldap]
recipe = slapos.recipe.cmmi recipe = slapos.recipe.cmmi
url = ftp://ftp.openldap.org/pub/OpenLDAP/openldap-release/openldap-2.4.33.tgz url = ftp://ftp.openldap.org/pub/OpenLDAP/openldap-release/openldap-2.4.40.tgz
md5sum = 5adae44897647c15ce5abbff313bc85a md5sum = 423c1f23d2a0cb96b3e9baf7e9d7dda7
configure-options = configure-options =
--disable-static --disable-static
--disable-slapd --disable-slapd
......
...@@ -16,13 +16,14 @@ parts = ...@@ -16,13 +16,14 @@ parts =
[openssl] [openssl]
recipe = slapos.recipe.cmmi recipe = slapos.recipe.cmmi
url = https://www.openssl.org/source/openssl-1.0.1j.tar.gz url = https://www.openssl.org/source/openssl-1.0.1n.tar.gz
md5sum = f7175c9cd3c39bb1907ac8bba9df8ed3 md5sum = 139568bd5a56fa49b72a290d37113f30
patch-binary = ${patch:location}/bin/patch patch-binary = ${patch:location}/bin/patch
patches = patches =
${:_profile_base_location_}/openssl-nodoc.patch#b4887a7b4e18402447bc6227d2493b92 ${:_profile_base_location_}/openssl-nodoc.patch#dd1345ae7ed18ccf176bc6b77c516f98
${:_profile_base_location_}/openssl-exlibs.patch#dfb8979460d6d75f2d23d1ea83bbb40a ${:_profile_base_location_}/openssl-exlibs.patch#fba5c873cf974ba80a973be41da3c738
patch-options = -p0 ${:_profile_base_location_}/openssl-1.0.1m-parallel-build.patch#a1ddd93b5b296473c7446131deb31f93
patch-options = -p1
configure-command = ./config configure-command = ./config
configure-options = configure-options =
-I${zlib:location}/include -I${zlib:location}/include
...@@ -33,10 +34,6 @@ configure-options = ...@@ -33,10 +34,6 @@ configure-options =
shared no-idea no-mdc2 no-rc5 zlib shared no-idea no-mdc2 no-rc5 zlib
-Wl,-rpath=${zlib:location}/lib -Wl,-rpath=${buildout:parts-directory}/${:_buildout_section_name_}/lib -Wl,-rpath=${zlib:location}/lib -Wl,-rpath=${buildout:parts-directory}/${:_buildout_section_name_}/lib
&& make depend && make depend
# it seems that parallel build sometimes fails for openssl.
make-options =
-j1
make-targets = make-targets =
all install_sw && rm -f ${buildout:parts-directory}/${:_buildout_section_name_}/etc/ssl/certs/* && for i in ${ca-certificates:location}/certs/*/*.crt; do ln -sv $i ${buildout:parts-directory}/${:_buildout_section_name_}/etc/ssl/certs/`${buildout:parts-directory}/${:_buildout_section_name_}/bin/openssl x509 -hash -noout -in $i`.0; done; true all install_sw && rm -f ${buildout:parts-directory}/${:_buildout_section_name_}/etc/ssl/certs/* && for i in ${ca-certificates:location}/certs/*/*.crt; do ln -sv $i ${buildout:parts-directory}/${:_buildout_section_name_}/etc/ssl/certs/`${buildout:parts-directory}/${:_buildout_section_name_}/bin/openssl x509 -hash -noout -in $i`.0; done; true
environment = environment =
......
https://sources.gentoo.org/cgi-bin/viewvc.cgi/gentoo-x86/dev-libs/openssl/files/
http://rt.openssl.org/Ticket/Display.html?id=2084
--- openssl-1.0.1m/crypto/Makefile
+++ openssl-1.0.1m/crypto/Makefile
@@ -85,11 +85,11 @@
@if [ -z "$(THIS)" ]; then $(MAKE) -f $(TOP)/Makefile reflect THIS=$@; fi
subdirs:
- @target=all; $(RECURSIVE_MAKE)
+ +@target=all; $(RECURSIVE_MAKE)
files:
$(PERL) $(TOP)/util/files.pl Makefile >> $(TOP)/MINFO
- @target=files; $(RECURSIVE_MAKE)
+ +@target=files; $(RECURSIVE_MAKE)
links:
@$(PERL) $(TOP)/util/mklink.pl ../include/openssl $(EXHEADER)
@@ -100,7 +100,7 @@
# lib: $(LIB): are splitted to avoid end-less loop
lib: $(LIB)
@touch lib
-$(LIB): $(LIBOBJ)
+$(LIB): $(LIBOBJ) | subdirs
$(AR) $(LIB) $(LIBOBJ)
[ -z "$(FIPSLIBDIR)" ] || $(AR) $(LIB) $(FIPSLIBDIR)fipscanister.o
$(RANLIB) $(LIB) || echo Never mind.
@@ -111,7 +111,7 @@
fi
libs:
- @target=lib; $(RECURSIVE_MAKE)
+ +@target=lib; $(RECURSIVE_MAKE)
install:
@[ -n "$(INSTALLTOP)" ] # should be set by top Makefile...
@@ -120,7 +120,7 @@
(cp $$i $(INSTALL_PREFIX)$(INSTALLTOP)/include/openssl/$$i; \
chmod 644 $(INSTALL_PREFIX)$(INSTALLTOP)/include/openssl/$$i ); \
done;
- @target=install; $(RECURSIVE_MAKE)
+ +@target=install; $(RECURSIVE_MAKE)
lint:
@target=lint; $(RECURSIVE_MAKE)
--- openssl-1.0.1m/crypto/objects/Makefile
+++ openssl-1.0.1m/crypto/objects/Makefile
@@ -44,11 +44,11 @@
# objects.pl both reads and writes obj_mac.num
obj_mac.h: objects.pl objects.txt obj_mac.num
$(PERL) objects.pl objects.txt obj_mac.num obj_mac.h
- @sleep 1; touch obj_mac.h; sleep 1
-obj_xref.h: objxref.pl obj_xref.txt obj_mac.num
+# This doesn't really need obj_mac.h, but since that rule reads & writes
+# obj_mac.num, we can't run in parallel with it.
+obj_xref.h: objxref.pl obj_xref.txt obj_mac.num obj_mac.h
$(PERL) objxref.pl obj_mac.num obj_xref.txt > obj_xref.h
- @sleep 1; touch obj_xref.h; sleep 1
files:
$(PERL) $(TOP)/util/files.pl Makefile >> $(TOP)/MINFO
--- openssl-1.0.1m/engines/Makefile
+++ openssl-1.0.1m/engines/Makefile
@@ -72,7 +72,7 @@
all: lib subdirs
-lib: $(LIBOBJ)
+lib: $(LIBOBJ) | subdirs
@if [ -n "$(SHARED_LIBS)" ]; then \
set -e; \
for l in $(LIBNAMES); do \
@@ -89,7 +89,7 @@
subdirs:
echo $(EDIRS)
- @target=all; $(RECURSIVE_MAKE)
+ +@target=all; $(RECURSIVE_MAKE)
files:
$(PERL) $(TOP)/util/files.pl Makefile >> $(TOP)/MINFO
@@ -128,7 +128,7 @@
mv -f $(INSTALL_PREFIX)$(INSTALLTOP)/$(LIBDIR)/engines/$$pfx$$l$$sfx.new $(INSTALL_PREFIX)$(INSTALLTOP)/$(LIBDIR)/engines/$$pfx$$l$$sfx ); \
done; \
fi
- @target=install; $(RECURSIVE_MAKE)
+ +@target=install; $(RECURSIVE_MAKE)
tags:
ctags $(SRC)
--- openssl-1.0.1m/Makefile.org
+++ openssl-1.0.1m/Makefile.org
@@ -273,17 +273,17 @@
build_libs: build_crypto build_ssl build_engines
build_crypto:
- @dir=crypto; target=all; $(BUILD_ONE_CMD)
-build_ssl:
- @dir=ssl; target=all; $(BUILD_ONE_CMD)
-build_engines:
- @dir=engines; target=all; $(BUILD_ONE_CMD)
-build_apps:
- @dir=apps; target=all; $(BUILD_ONE_CMD)
-build_tests:
- @dir=test; target=all; $(BUILD_ONE_CMD)
-build_tools:
- @dir=tools; target=all; $(BUILD_ONE_CMD)
+ +@dir=crypto; target=all; $(BUILD_ONE_CMD)
+build_ssl: build_crypto
+ +@dir=ssl; target=all; $(BUILD_ONE_CMD)
+build_engines: build_crypto
+ +@dir=engines; target=all; $(BUILD_ONE_CMD)
+build_apps: build_libs
+ +@dir=apps; target=all; $(BUILD_ONE_CMD)
+build_tests: build_libs
+ +@dir=test; target=all; $(BUILD_ONE_CMD)
+build_tools: build_libs
+ +@dir=tools; target=all; $(BUILD_ONE_CMD)
all_testapps: build_libs build_testapps
build_testapps:
@@ -538,9 +538,9 @@
dist_pem_h:
(cd crypto/pem; $(MAKE) -e $(BUILDENV) pem.h; $(MAKE) clean)
-install: all install_docs install_sw
+install: install_docs install_sw
-install_sw:
+install_dirs:
@$(PERL) $(TOP)/util/mkdir-p.pl $(INSTALL_PREFIX)$(INSTALLTOP)/bin \
$(INSTALL_PREFIX)$(INSTALLTOP)/$(LIBDIR) \
$(INSTALL_PREFIX)$(INSTALLTOP)/$(LIBDIR)/engines \
@@ -549,12 +549,19 @@
$(INSTALL_PREFIX)$(OPENSSLDIR)/misc \
$(INSTALL_PREFIX)$(OPENSSLDIR)/certs \
$(INSTALL_PREFIX)$(OPENSSLDIR)/private
+ @$(PERL) $(TOP)/util/mkdir-p.pl \
+ $(INSTALL_PREFIX)$(MANDIR)/man1 \
+ $(INSTALL_PREFIX)$(MANDIR)/man3 \
+ $(INSTALL_PREFIX)$(MANDIR)/man5 \
+ $(INSTALL_PREFIX)$(MANDIR)/man7
+
+install_sw: install_dirs
@set -e; headerlist="$(EXHEADER)"; for i in $$headerlist;\
do \
(cp $$i $(INSTALL_PREFIX)$(INSTALLTOP)/include/openssl/$$i; \
chmod 644 $(INSTALL_PREFIX)$(INSTALLTOP)/include/openssl/$$i ); \
done;
- @set -e; target=install; $(RECURSIVE_BUILD_CMD)
+ +@set -e; target=install; $(RECURSIVE_BUILD_CMD)
@set -e; liblist="$(LIBS)"; for i in $$liblist ;\
do \
if [ -f "$$i" ]; then \
@@ -634,12 +641,7 @@
done; \
done
-install_docs:
- @$(PERL) $(TOP)/util/mkdir-p.pl \
- $(INSTALL_PREFIX)$(MANDIR)/man1 \
- $(INSTALL_PREFIX)$(MANDIR)/man3 \
- $(INSTALL_PREFIX)$(MANDIR)/man5 \
- $(INSTALL_PREFIX)$(MANDIR)/man7
+install_docs: install_dirs
@pod2man="`cd ./util; ./pod2mantest $(PERL)`"; \
here="`pwd`"; \
filecase=; \
--- openssl-1.0.1m/Makefile.shared
+++ openssl-1.0.1m/Makefile.shared
@@ -105,6 +105,7 @@
SHAREDFLAGS="$${SHAREDFLAGS:-$(CFLAGS) $(SHARED_LDFLAGS)}"; \
LIBPATH=`for x in $$LIBDEPS; do echo $$x; done | sed -e 's/^ *-L//;t' -e d | uniq`; \
LIBPATH=`echo $$LIBPATH | sed -e 's/ /:/g'`; \
+ [ -e $$SHLIB$$SHLIB_SOVER$$SHLIB_SUFFIX ] && exit 0; \
LD_LIBRARY_PATH=$$LIBPATH:$$LD_LIBRARY_PATH \
$${SHAREDCMD} $${SHAREDFLAGS} \
-o $$SHLIB$$SHLIB_SOVER$$SHLIB_SUFFIX \
@@ -122,6 +123,7 @@
done; \
fi; \
if [ -n "$$SHLIB_SOVER" ]; then \
+ [ -e "$$SHLIB$$SHLIB_SUFFIX" ] || \
( $(SET_X); rm -f $$SHLIB$$SHLIB_SUFFIX; \
ln -s $$prev $$SHLIB$$SHLIB_SUFFIX ); \
fi; \
--- openssl-1.0.1m/test/Makefile
+++ openssl-1.0.1m/test/Makefile
@@ -130,7 +130,7 @@
tags:
ctags $(SRC)
-tests: exe apps $(TESTS)
+tests: exe $(TESTS)
apps:
@(cd ..; $(MAKE) DIRS=apps all)
@@ -388,118 +388,118 @@
link_app.$${shlib_target}
$(RSATEST)$(EXE_EXT): $(RSATEST).o $(DLIBCRYPTO)
- @target=$(RSATEST); $(BUILD_CMD)
+ +@target=$(RSATEST); $(BUILD_CMD)
$(BNTEST)$(EXE_EXT): $(BNTEST).o $(DLIBCRYPTO)
- @target=$(BNTEST); $(BUILD_CMD)
+ +@target=$(BNTEST); $(BUILD_CMD)
$(ECTEST)$(EXE_EXT): $(ECTEST).o $(DLIBCRYPTO)
- @target=$(ECTEST); $(BUILD_CMD)
+ +@target=$(ECTEST); $(BUILD_CMD)
$(EXPTEST)$(EXE_EXT): $(EXPTEST).o $(DLIBCRYPTO)
- @target=$(EXPTEST); $(BUILD_CMD)
+ +@target=$(EXPTEST); $(BUILD_CMD)
$(IDEATEST)$(EXE_EXT): $(IDEATEST).o $(DLIBCRYPTO)
- @target=$(IDEATEST); $(BUILD_CMD)
+ +@target=$(IDEATEST); $(BUILD_CMD)
$(MD2TEST)$(EXE_EXT): $(MD2TEST).o $(DLIBCRYPTO)
- @target=$(MD2TEST); $(BUILD_CMD)
+ +@target=$(MD2TEST); $(BUILD_CMD)
$(SHATEST)$(EXE_EXT): $(SHATEST).o $(DLIBCRYPTO)
- @target=$(SHATEST); $(BUILD_CMD)
+ +@target=$(SHATEST); $(BUILD_CMD)
$(SHA1TEST)$(EXE_EXT): $(SHA1TEST).o $(DLIBCRYPTO)
- @target=$(SHA1TEST); $(BUILD_CMD)
+ +@target=$(SHA1TEST); $(BUILD_CMD)
$(SHA256TEST)$(EXE_EXT): $(SHA256TEST).o $(DLIBCRYPTO)
- @target=$(SHA256TEST); $(BUILD_CMD)
+ +@target=$(SHA256TEST); $(BUILD_CMD)
$(SHA512TEST)$(EXE_EXT): $(SHA512TEST).o $(DLIBCRYPTO)
- @target=$(SHA512TEST); $(BUILD_CMD)
+ +@target=$(SHA512TEST); $(BUILD_CMD)
$(RMDTEST)$(EXE_EXT): $(RMDTEST).o $(DLIBCRYPTO)
- @target=$(RMDTEST); $(BUILD_CMD)
+ +@target=$(RMDTEST); $(BUILD_CMD)
$(MDC2TEST)$(EXE_EXT): $(MDC2TEST).o $(DLIBCRYPTO)
- @target=$(MDC2TEST); $(BUILD_CMD)
+ +@target=$(MDC2TEST); $(BUILD_CMD)
$(MD4TEST)$(EXE_EXT): $(MD4TEST).o $(DLIBCRYPTO)
- @target=$(MD4TEST); $(BUILD_CMD)
+ +@target=$(MD4TEST); $(BUILD_CMD)
$(MD5TEST)$(EXE_EXT): $(MD5TEST).o $(DLIBCRYPTO)
- @target=$(MD5TEST); $(BUILD_CMD)
+ +@target=$(MD5TEST); $(BUILD_CMD)
$(HMACTEST)$(EXE_EXT): $(HMACTEST).o $(DLIBCRYPTO)
- @target=$(HMACTEST); $(BUILD_CMD)
+ +@target=$(HMACTEST); $(BUILD_CMD)
$(WPTEST)$(EXE_EXT): $(WPTEST).o $(DLIBCRYPTO)
- @target=$(WPTEST); $(BUILD_CMD)
+ +@target=$(WPTEST); $(BUILD_CMD)
$(RC2TEST)$(EXE_EXT): $(RC2TEST).o $(DLIBCRYPTO)
- @target=$(RC2TEST); $(BUILD_CMD)
+ +@target=$(RC2TEST); $(BUILD_CMD)
$(BFTEST)$(EXE_EXT): $(BFTEST).o $(DLIBCRYPTO)
- @target=$(BFTEST); $(BUILD_CMD)
+ +@target=$(BFTEST); $(BUILD_CMD)
$(CASTTEST)$(EXE_EXT): $(CASTTEST).o $(DLIBCRYPTO)
- @target=$(CASTTEST); $(BUILD_CMD)
+ +@target=$(CASTTEST); $(BUILD_CMD)
$(RC4TEST)$(EXE_EXT): $(RC4TEST).o $(DLIBCRYPTO)
- @target=$(RC4TEST); $(BUILD_CMD)
+ +@target=$(RC4TEST); $(BUILD_CMD)
$(RC5TEST)$(EXE_EXT): $(RC5TEST).o $(DLIBCRYPTO)
- @target=$(RC5TEST); $(BUILD_CMD)
+ +@target=$(RC5TEST); $(BUILD_CMD)
$(DESTEST)$(EXE_EXT): $(DESTEST).o $(DLIBCRYPTO)
- @target=$(DESTEST); $(BUILD_CMD)
+ +@target=$(DESTEST); $(BUILD_CMD)
$(RANDTEST)$(EXE_EXT): $(RANDTEST).o $(DLIBCRYPTO)
- @target=$(RANDTEST); $(BUILD_CMD)
+ +@target=$(RANDTEST); $(BUILD_CMD)
$(DHTEST)$(EXE_EXT): $(DHTEST).o $(DLIBCRYPTO)
- @target=$(DHTEST); $(BUILD_CMD)
+ +@target=$(DHTEST); $(BUILD_CMD)
$(DSATEST)$(EXE_EXT): $(DSATEST).o $(DLIBCRYPTO)
- @target=$(DSATEST); $(BUILD_CMD)
+ +@target=$(DSATEST); $(BUILD_CMD)
$(METHTEST)$(EXE_EXT): $(METHTEST).o $(DLIBCRYPTO)
- @target=$(METHTEST); $(BUILD_CMD)
+ +@target=$(METHTEST); $(BUILD_CMD)
$(SSLTEST)$(EXE_EXT): $(SSLTEST).o $(DLIBSSL) $(DLIBCRYPTO)
- @target=$(SSLTEST); $(FIPS_BUILD_CMD)
+ +@target=$(SSLTEST); $(FIPS_BUILD_CMD)
$(ENGINETEST)$(EXE_EXT): $(ENGINETEST).o $(DLIBCRYPTO)
- @target=$(ENGINETEST); $(BUILD_CMD)
+ +@target=$(ENGINETEST); $(BUILD_CMD)
$(EVPTEST)$(EXE_EXT): $(EVPTEST).o $(DLIBCRYPTO)
- @target=$(EVPTEST); $(BUILD_CMD)
+ +@target=$(EVPTEST); $(BUILD_CMD)
$(EVPEXTRATEST)$(EXE_EXT): $(EVPEXTRATEST).o $(DLIBCRYPTO)
- @target=$(EVPEXTRATEST); $(BUILD_CMD)
+ +@target=$(EVPEXTRATEST); $(BUILD_CMD)
$(ECDSATEST)$(EXE_EXT): $(ECDSATEST).o $(DLIBCRYPTO)
- @target=$(ECDSATEST); $(BUILD_CMD)
+ +@target=$(ECDSATEST); $(BUILD_CMD)
$(ECDHTEST)$(EXE_EXT): $(ECDHTEST).o $(DLIBCRYPTO)
- @target=$(ECDHTEST); $(BUILD_CMD)
+ +@target=$(ECDHTEST); $(BUILD_CMD)
$(IGETEST)$(EXE_EXT): $(IGETEST).o $(DLIBCRYPTO)
- @target=$(IGETEST); $(BUILD_CMD)
+ +@target=$(IGETEST); $(BUILD_CMD)
$(JPAKETEST)$(EXE_EXT): $(JPAKETEST).o $(DLIBCRYPTO)
- @target=$(JPAKETEST); $(BUILD_CMD)
+ +@target=$(JPAKETEST); $(BUILD_CMD)
$(ASN1TEST)$(EXE_EXT): $(ASN1TEST).o $(DLIBCRYPTO)
- @target=$(ASN1TEST); $(BUILD_CMD)
+ +@target=$(ASN1TEST); $(BUILD_CMD)
$(SRPTEST)$(EXE_EXT): $(SRPTEST).o $(DLIBCRYPTO)
- @target=$(SRPTEST); $(BUILD_CMD)
+ +@target=$(SRPTEST); $(BUILD_CMD)
$(HEARTBEATTEST)$(EXE_EXT): $(HEARTBEATTEST).o $(DLIBCRYPTO)
- @target=$(HEARTBEATTEST); $(BUILD_CMD_STATIC)
+ +@target=$(HEARTBEATTEST); $(BUILD_CMD_STATIC)
$(CONSTTIMETEST)$(EXE_EXT): $(CONSTTIMETEST).o
- @target=$(CONSTTIMETEST) $(BUILD_CMD)
+ +@target=$(CONSTTIMETEST) $(BUILD_CMD)
#$(AESTEST).o: $(AESTEST).c
# $(CC) -c $(CFLAGS) -DINTERMEDIATE_VALUE_KAT -DTRACE_KAT_MCT $(AESTEST).c
@@ -512,7 +512,7 @@
# fi
dummytest$(EXE_EXT): dummytest.o $(DLIBCRYPTO)
- @target=dummytest; $(BUILD_CMD)
+ +@target=dummytest; $(BUILD_CMD)
# DO NOT DELETE THIS LINE -- make depend depends on it.
--- engines/ccgost/Makefile~ 2010-08-24 23:46:34.000000000 +0200 --- a/engines/ccgost/Makefile~ 2010-08-24 23:46:34.000000000 +0200
+++ engines/ccgost/Makefile 2012-03-14 10:11:46.826419864 +0100 +++ b/engines/ccgost/Makefile 2012-03-14 10:11:46.826419864 +0100
@@ -7,6 +7,7 @@ @@ -7,6 +7,7 @@
AR= ar r AR= ar r
CFLAGS= $(INCLUDES) $(CFLAG) CFLAGS= $(INCLUDES) $(CFLAG)
......
--- Makefile 2012-01-18 14:42:28.000000000 +0100 --- a/Makefile 2012-01-18 14:42:28.000000000 +0100
+++ Makefile 2012-01-24 17:43:40.000000000 +0100 +++ b/Makefile 2012-01-24 17:43:40.000000000 +0100
@@ -494,7 +494,7 @@ @@ -494,7 +494,7 @@
dist_pem_h: dist_pem_h:
(cd crypto/pem; $(MAKE) -e $(BUILDENV) pem.h; $(MAKE) clean) (cd crypto/pem; $(MAKE) -e $(BUILDENV) pem.h; $(MAKE) clean)
......
[buildout]
extends =
../lzo/buildout.cfg
../patch/buildout.cfg
../openssl/buildout.cfg
../flex/buildout.cfg
parts =
openvpn
[openvpn]
recipe = slapos.recipe.cmmi
url = http://swupdate.openvpn.org/community/releases/openvpn-2.3.6.tar.gz
md5sum = 6ca03fe0fd093e0d01601abee808835c
patch-binary = ${patch:location}/bin/patch
patches =
${:_profile_base_location_}/openvpn-ciphernone.patch#462b53a45da2fa686822618c4faafd19
patch-options = -p0
configure-options =
--disable-plugin-auth-pam
--enable-static
/bin/bash: q: command not found
environment =
LZO_LIBS=-L${lzo:location}/lib -llzo2
LZO_CFLAGS=-I${lzo:location}/include
OPENSSL_SSL_LIBS="-L${openssl:location}/lib -lssl"
OPENSSL_SSL_CFLAGS="-I${openssl:location}/include/"
OPENSSL_CRYPTO_LIBS="-L${openssl:location}/lib -lcrypto"
OPENSSL_CRYPTO_CFLAGS="-I${openssl:location}/include"
LDFLAGS =-Wl,-rpath=${lzo:location}/lib -Wl,-rpath=${flex:location}/lib -Wl,-rpath=${openssl:location}/lib
CPPFLAGS=-I${lzo:location}/include -I${flex:location}/include
--- src/openvpn/crypto_backend.h
+++ src/openvpn/crypto_backend.h
@@ -237,8 +237,7 @@
*
* @return true iff the cipher is a CBC mode cipher.
*/
-bool cipher_kt_mode_cbc(const cipher_kt_t *cipher)
- __attribute__((nonnull));
+bool cipher_kt_mode_cbc(const cipher_kt_t *cipher);
/**
* Check if the supplied cipher is a supported OFB or CFB mode cipher.
@@ -247,8 +246,7 @@
*
* @return true iff the cipher is a OFB or CFB mode cipher.
*/
-bool cipher_kt_mode_ofb_cfb(const cipher_kt_t *cipher)
- __attribute__((nonnull));
+bool cipher_kt_mode_ofb_cfb(const cipher_kt_t *cipher);
/**
[buildout]
parts =
p7zip
[p7zip]
recipe = slapos.recipe.cmmi
url = http://downloads.sf.net/project/p7zip/p7zip/${:version}/p7zip_${:version}_src_all.tar.bz2
version = 9.38.1
md5sum = 6cba8402ccab2370d3b70c5e28b3d651
configure-command = rm -r DOC
make-options =
DEST_HOME=${buildout:parts-directory}/${:_buildout_section_name_}
[buildout]
extends =
../numpy/buildout.cfg
parts =
pandas
[pandas-env]
<= numpy-env
[pandas]
recipe = zc.recipe.egg:custom
egg = pandas
environment = pandas-env
setup-eggs = ${numpy:egg}
...@@ -6,7 +6,7 @@ parts = ...@@ -6,7 +6,7 @@ parts =
[patch] [patch]
recipe = slapos.recipe.cmmi recipe = slapos.recipe.cmmi
url = ftp://ftp.gnu.org/gnu/patch/patch-2.7.1.tar.xz url = ftp://ftp.gnu.org/gnu/patch/patch-2.7.3.tar.xz
md5sum = e9ae5393426d3ad783a300a338c09b72 md5sum = 29b87be845e4662ab0ca0d48a805ecc6
environment = environment =
PATH=${xz-utils:location}/bin:%(PATH)s PATH=${xz-utils:location}/bin:%(PATH)s
...@@ -7,9 +7,9 @@ parts = ...@@ -7,9 +7,9 @@ parts =
[perl] [perl]
recipe = slapos.recipe.cmmi recipe = slapos.recipe.cmmi
version = 5.20.0 version = 5.20.2
url = http://www.cpan.org/src/5.0/perl-${:version}.tar.bz2 url = http://www.cpan.org/src/5.0/perl-${:version}.tar.bz2
md5sum = 20cbecd4e9e880ee7a50a136c8b1484e md5sum = 21062666f1c627aeb6dbff3c6952738b
siteprefix = ${buildout:parts-directory}/site_${:_buildout_section_name_} siteprefix = ${buildout:parts-directory}/site_${:_buildout_section_name_}
patch-options = -p1 patch-options = -p1
patches = patches =
...@@ -25,4 +25,4 @@ configure-command = ...@@ -25,4 +25,4 @@ configure-command =
-Dusethreads -Dusethreads
environment = environment =
PATH=${patch:location}/bin:%(PATH)s PATH=${patch:location}/bin:%(PATH)s
post-make-hook = ${:_profile_base_location_}/perl-create-libs-symlink.py#7fded8308c1676decf77575c6d6b325f:post_make_hook post-make-hook = ${:_profile_base_location_}/perl-create-libs-symlink.py#539cb3cd0d1090f7f30a8e5a82b37854:post_make_hook
...@@ -15,7 +15,7 @@ def post_make_hook(options, buildout): ...@@ -15,7 +15,7 @@ def post_make_hook(options, buildout):
print "ERROR - no libperl.* found!" print "ERROR - no libperl.* found!"
exit exit
elif nr_matches > 1: elif nr_matches > 1:
print "WARNING - several libperl.a found, taking only the first one:", matches.join("\n") print "WARNING - several libperl.a found, taking only the first one:", "\n".join(matches)
# matches[0] is a prefix of "location" # matches[0] is a prefix of "location"
# For the symlink, we want the relative path. # For the symlink, we want the relative path.
......
...@@ -3,24 +3,23 @@ parts = poppler ...@@ -3,24 +3,23 @@ parts = poppler
extends = extends =
../bzip2/buildout.cfg ../bzip2/buildout.cfg
../fontconfig/buildout.cfg ../fontconfig/buildout.cfg
../freetype/buildout.cfg
../jbigkit/buildout.cfg ../jbigkit/buildout.cfg
../libjpeg/buildout.cfg ../libjpeg/buildout.cfg
../libpng/buildout.cfg ../libpng/buildout.cfg
../libtiff/buildout.cfg ../libtiff/buildout.cfg
../libxml2/buildout.cfg
../pkgconfig/buildout.cfg ../pkgconfig/buildout.cfg
../xz-utils/buildout.cfg ../xz-utils/buildout.cfg
../zlib/buildout.cfg ../zlib/buildout.cfg
[poppler] [poppler]
recipe = slapos.recipe.cmmi recipe = slapos.recipe.cmmi
url = http://poppler.freedesktop.org/poppler-0.26.0.tar.xz url = http://poppler.freedesktop.org/poppler-0.30.0.tar.xz
md5sum = 76c3b2f18002a0f1e2bbbc7cccd1b957 md5sum = fb0a9d82b5433c823421a123eb8e9789
configure-options = configure-options =
--disable-cairo-output --disable-cairo-output
--disable-cms --disable-cms
--disable-libcurl --disable-libcurl
--disable-libopenjpeg
--disable-gtk-doc-html --disable-gtk-doc-html
--disable-gtk-test --disable-gtk-test
--disable-poppler-cpp --disable-poppler-cpp
......
[buildout]
parts =
proj4
[proj4]
recipe = slapos.recipe.cmmi
version = 4.8.0
url = http://download.osgeo.org/proj/proj-${:version}.tar.gz
md5sum = d815838c92a29179298c126effbb1537
configure-options =
--disable-dependency-tracking
[buildout]
extends =
../glpk/buildout.cfg
../numpy/buildout.cfg
parts =
pulp
# XXX patched pulp version that does not specify pyparsing version number
# because it conflicts with slapos version
[pulp-repository.git]
recipe = slapos.recipe.build:gitclone
repository = https://gitlab.com/jerome-nexedi/pulp-or.git
branch = master
git-executable = ${git:location}/bin/git
develop = true
[pulp-build]
recipe = zc.recipe.egg:develop
setup = ${pulp-repository.git:location}
[pulp-env]
<= numpy-env
PATH=${glpk:location}/bin
[pulp]
dependencies = ${pulp-build:setup}
recipe = zc.recipe.egg:custom
egg = pulp
environment = pulp-env
setup-eggs = ${numpy:egg}
initialization =
# pulp needs glpk in $PATH
import os
os.environ['PATH'] = '${glpk:location}/bin' + os.pathsep + os.environ.get('PATH', '')
...@@ -5,8 +5,6 @@ extends = ...@@ -5,8 +5,6 @@ extends =
parts = parts =
pycrypto-python pycrypto-python
versions = versions
[pycrypto-python] [pycrypto-python]
recipe = zc.recipe.egg:custom recipe = zc.recipe.egg:custom
egg = pycrypto egg = pycrypto
...@@ -22,6 +20,3 @@ rpath = ...@@ -22,6 +20,3 @@ rpath =
ac_cv_func_malloc_0_nonnull = yes ac_cv_func_malloc_0_nonnull = yes
LDFLAGS = -L${gmp:location}/lib LDFLAGS = -L${gmp:location}/lib
CPPFLAGS = -I${gmp:location}/include CPPFLAGS = -I${gmp:location}/include
[versions]
pycrypto = 2.6.1
...@@ -27,9 +27,9 @@ python = python2.7 ...@@ -27,9 +27,9 @@ python = python2.7
[python2.7] [python2.7]
recipe = slapos.recipe.cmmi recipe = slapos.recipe.cmmi
package_version = 2.7.8 package_version = 2.7.10
package_version_suffix = package_version_suffix =
md5sum = d235bdfa75b8396942e360a70487ee00 md5sum = c685ef0b8e9f27b5e3db5db12b268ac6
# This is actually the default setting for prefix, but we can't use it in # This is actually the default setting for prefix, but we can't use it in
# other settings in this part if we don't set it explicitly here. # other settings in this part if we don't set it explicitly here.
...@@ -39,8 +39,7 @@ executable = ${:prefix}/bin/python${:version} ...@@ -39,8 +39,7 @@ executable = ${:prefix}/bin/python${:version}
patch-options = -p1 patch-options = -p1
patches = patches =
${:_profile_base_location_}/tls_sni.patch#c95af105e6e96aaa58a50137595872a0 ${:_profile_base_location_}/fix_compiler_module_issue_20613.patch#94443a77f903e9de880a029967fa6aa7
${:_profile_base_location_}/tls_sni_httplib.patch#5c9d00d23b85169df792a936a056cbcc
url = url =
http://python.org/ftp/python/${:package_version}/Python-${:package_version}${:package_version_suffix}.tar.xz http://python.org/ftp/python/${:package_version}/Python-${:package_version}${:package_version_suffix}.tar.xz
configure-options = configure-options =
......
From af98230024f639d4c719b55d0d330912c3ea7b97 Mon Sep 17 00:00:00 2001
From: Arnaud Fontaine <arnaud.fontaine@nexedi.com>
Date: Thu, 11 Dec 2014 23:48:22 +0900
Subject: [PATCH] Revert https://hg.python.org/cpython/raw-rev/42faa8054c3d.
This fixes http://bugs.python.org/issue20613.
---
Lib/compiler/pyassem.py | 257 ++++++++++++++++++++++++++++------------------
Lib/compiler/pycodegen.py | 2 +-
2 files changed, 159 insertions(+), 100 deletions(-)
diff --git a/Lib/compiler/pyassem.py b/Lib/compiler/pyassem.py
index f52f7d0..4299a17 100644
--- a/Lib/compiler/pyassem.py
+++ b/Lib/compiler/pyassem.py
@@ -21,7 +21,6 @@ class FlowGraph:
if self.current:
print "end", repr(self.current)
print " next", self.current.next
- print " prev", self.current.prev
print " ", self.current.get_children()
print repr(block)
self.current = block
@@ -41,12 +40,13 @@ class FlowGraph:
if block is None:
block = self.newBlock()
- # Note: If the current block ends with an unconditional control
- # transfer, then it is techically incorrect to add an implicit
- # transfer to the block graph. Doing so results in code generation
- # for unreachable blocks. That doesn't appear to be very common
- # with Python code and since the built-in compiler doesn't optimize
- # it out we don't either.
+ # Note: If the current block ends with an unconditional
+ # control transfer, then it is incorrect to add an implicit
+ # transfer to the block graph. The current code requires
+ # these edges to get the blocks emitted in the right order,
+ # however. :-( If a client needs to remove these edges, call
+ # pruneEdges().
+
self.current.addNext(block)
self.startBlock(block)
@@ -69,6 +69,8 @@ class FlowGraph:
def emit(self, *inst):
if self._debug:
print "\t", inst
+ if inst[0] in ['RETURN_VALUE', 'YIELD_VALUE']:
+ self.current.addOutEdge(self.exit)
if len(inst) == 2 and isinstance(inst[1], Block):
self.current.addOutEdge(inst[1])
self.current.emit(inst)
@@ -78,9 +80,118 @@ class FlowGraph:
i.e. each node appears before all of its successors
"""
- order = order_blocks(self.entry, self.exit)
+ # XXX make sure every node that doesn't have an explicit next
+ # is set so that next points to exit
+ for b in self.blocks.elements():
+ if b is self.exit:
+ continue
+ if not b.next:
+ b.addNext(self.exit)
+ order = dfs_postorder(self.entry, {})
+ order.reverse()
+ self.fixupOrder(order, self.exit)
+ # hack alert
+ if not self.exit in order:
+ order.append(self.exit)
+
return order
+ def fixupOrder(self, blocks, default_next):
+ """Fixup bad order introduced by DFS."""
+
+ # XXX This is a total mess. There must be a better way to get
+ # the code blocks in the right order.
+
+ self.fixupOrderHonorNext(blocks, default_next)
+ self.fixupOrderForward(blocks, default_next)
+
+ def fixupOrderHonorNext(self, blocks, default_next):
+ """Fix one problem with DFS.
+
+ The DFS uses child block, but doesn't know about the special
+ "next" block. As a result, the DFS can order blocks so that a
+ block isn't next to the right block for implicit control
+ transfers.
+ """
+ index = {}
+ for i in range(len(blocks)):
+ index[blocks[i]] = i
+
+ for i in range(0, len(blocks) - 1):
+ b = blocks[i]
+ n = blocks[i + 1]
+ if not b.next or b.next[0] == default_next or b.next[0] == n:
+ continue
+ # The blocks are in the wrong order. Find the chain of
+ # blocks to insert where they belong.
+ cur = b
+ chain = []
+ elt = cur
+ while elt.next and elt.next[0] != default_next:
+ chain.append(elt.next[0])
+ elt = elt.next[0]
+ # Now remove the blocks in the chain from the current
+ # block list, so that they can be re-inserted.
+ l = []
+ for b in chain:
+ assert index[b] > i
+ l.append((index[b], b))
+ l.sort()
+ l.reverse()
+ for j, b in l:
+ del blocks[index[b]]
+ # Insert the chain in the proper location
+ blocks[i:i + 1] = [cur] + chain
+ # Finally, re-compute the block indexes
+ for i in range(len(blocks)):
+ index[blocks[i]] = i
+
+ def fixupOrderForward(self, blocks, default_next):
+ """Make sure all JUMP_FORWARDs jump forward"""
+ index = {}
+ chains = []
+ cur = []
+ for b in blocks:
+ index[b] = len(chains)
+ cur.append(b)
+ if b.next and b.next[0] == default_next:
+ chains.append(cur)
+ cur = []
+ chains.append(cur)
+
+ while 1:
+ constraints = []
+
+ for i in range(len(chains)):
+ l = chains[i]
+ for b in l:
+ for c in b.get_children():
+ if index[c] < i:
+ forward_p = 0
+ for inst in b.insts:
+ if inst[0] == 'JUMP_FORWARD':
+ if inst[1] == c:
+ forward_p = 1
+ if not forward_p:
+ continue
+ constraints.append((index[c], i))
+
+ if not constraints:
+ break
+
+ # XXX just do one for now
+ # do swaps to get things in the right order
+ goes_before, a_chain = constraints[0]
+ assert a_chain > goes_before
+ c = chains[a_chain]
+ chains.remove(c)
+ chains.insert(goes_before, c)
+
+ del blocks[:]
+ for c in chains:
+ for b in c:
+ blocks.append(b)
+
def getBlocks(self):
return self.blocks.elements()
@@ -94,84 +205,27 @@ class FlowGraph:
l.extend(b.getContainedGraphs())
return l
-
-def order_blocks(start_block, exit_block):
- """Order blocks so that they are emitted in the right order"""
- # Rules:
- # - when a block has a next block, the next block must be emitted just after
- # - when a block has followers (relative jumps), it must be emitted before
- # them
- # - all reachable blocks must be emitted
+def dfs_postorder(b, seen):
+ """Depth-first search of tree rooted at b, return in postorder"""
order = []
-
- # Find all the blocks to be emitted.
- remaining = set()
- todo = [start_block]
- while todo:
- b = todo.pop()
- if b in remaining:
- continue
- remaining.add(b)
- for c in b.get_children():
- if c not in remaining:
- todo.append(c)
-
- # A block is dominated by another block if that block must be emitted
- # before it.
- dominators = {}
- for b in remaining:
- if __debug__ and b.next:
- assert b is b.next[0].prev[0], (b, b.next)
- # Make sure every block appears in dominators, even if no
- # other block must precede it.
- dominators.setdefault(b, set())
- # preceding blocks dominate following blocks
- for c in b.get_followers():
- while 1:
- dominators.setdefault(c, set()).add(b)
- # Any block that has a next pointer leading to c is also
- # dominated because the whole chain will be emitted at once.
- # Walk backwards and add them all.
- if c.prev and c.prev[0] is not b:
- c = c.prev[0]
- else:
- break
-
- def find_next():
- # Find a block that can be emitted next.
- for b in remaining:
- for c in dominators[b]:
- if c in remaining:
- break # can't emit yet, dominated by a remaining block
- else:
- return b
- assert 0, 'circular dependency, cannot find next block'
-
- b = start_block
- while 1:
- order.append(b)
- remaining.discard(b)
- if b.next:
- b = b.next[0]
+ seen[b] = b
+ for c in b.get_children():
+ if c in seen:
continue
- elif b is not exit_block and not b.has_unconditional_transfer():
- order.append(exit_block)
- if not remaining:
- break
- b = find_next()
+ order = order + dfs_postorder(c, seen)
+ order.append(b)
return order
-
class Block:
_count = 0
def __init__(self, label=''):
self.insts = []
- self.outEdges = set()
+ self.inEdges = misc.Set()
+ self.outEdges = misc.Set()
self.label = label
self.bid = Block._count
self.next = []
- self.prev = []
Block._count = Block._count + 1
def __repr__(self):
@@ -187,46 +241,51 @@ class Block:
def emit(self, inst):
op = inst[0]
+ if op[:4] == 'JUMP':
+ self.outEdges.add(inst[1])
self.insts.append(inst)
def getInstructions(self):
return self.insts
+ def addInEdge(self, block):
+ self.inEdges.add(block)
+
def addOutEdge(self, block):
self.outEdges.add(block)
def addNext(self, block):
self.next.append(block)
assert len(self.next) == 1, map(str, self.next)
- block.prev.append(self)
- assert len(block.prev) == 1, map(str, block.prev)
- _uncond_transfer = ('RETURN_VALUE', 'RAISE_VARARGS',
- 'JUMP_ABSOLUTE', 'JUMP_FORWARD', 'CONTINUE_LOOP',
- )
+ _uncond_transfer = ('RETURN_VALUE', 'RAISE_VARARGS', 'YIELD_VALUE',
+ 'JUMP_ABSOLUTE', 'JUMP_FORWARD', 'CONTINUE_LOOP')
- def has_unconditional_transfer(self):
- """Returns True if there is an unconditional transfer to an other block
- at the end of this block. This means there is no risk for the bytecode
- executer to go past this block's bytecode."""
+ def pruneNext(self):
+ """Remove bogus edge for unconditional transfers
+
+ Each block has a next edge that accounts for implicit control
+ transfers, e.g. from a JUMP_IF_FALSE to the block that will be
+ executed if the test is true.
+
+ These edges must remain for the current assembler code to
+ work. If they are removed, the dfs_postorder gets things in
+ weird orders. However, they shouldn't be there for other
+ purposes, e.g. conversion to SSA form. This method will
+ remove the next edge when it follows an unconditional control
+ transfer.
+ """
try:
op, arg = self.insts[-1]
except (IndexError, ValueError):
return
- return op in self._uncond_transfer
+ if op in self._uncond_transfer:
+ self.next = []
def get_children(self):
- return list(self.outEdges) + self.next
-
- def get_followers(self):
- """Get the whole list of followers, including the next block."""
- followers = set(self.next)
- # Blocks that must be emitted *after* this one, because of
- # bytecode offsets (e.g. relative jumps) pointing to them.
- for inst in self.insts:
- if inst[0] in PyFlowGraph.hasjrel:
- followers.add(inst[1])
- return followers
+ if self.next and self.next[0] in self.outEdges:
+ self.outEdges.remove(self.next[0])
+ return self.outEdges.elements() + self.next
def getContainedGraphs(self):
"""Return all graphs contained within this block.
@@ -387,18 +446,18 @@ class PyFlowGraph(FlowGraph):
elif inst[0] != "SET_LINENO":
pc = pc + 3
opname = inst[0]
- if opname in self.hasjrel:
+ if self.hasjrel.has_elt(opname):
oparg = inst[1]
offset = begin[oparg] - pc
insts[i] = opname, offset
- elif opname in self.hasjabs:
+ elif self.hasjabs.has_elt(opname):
insts[i] = opname, begin[inst[1]]
self.stage = FLAT
- hasjrel = set()
+ hasjrel = misc.Set()
for i in dis.hasjrel:
hasjrel.add(dis.opname[i])
- hasjabs = set()
+ hasjabs = misc.Set()
for i in dis.hasjabs:
hasjabs.add(dis.opname[i])
diff --git a/Lib/compiler/pycodegen.py b/Lib/compiler/pycodegen.py
index 6515945..3f641ba 100644
--- a/Lib/compiler/pycodegen.py
+++ b/Lib/compiler/pycodegen.py
@@ -706,7 +706,7 @@ class CodeGenerator:
self.startBlock(anchor)
self.emit('POP_BLOCK')
self.setups.pop()
- self.nextBlock(end)
+ self.startBlock(end)
self.emit('LOAD_CONST', None)
--
2.2.0.rc0.207.ga3a616c
Description: Support TLS SNI extension in ssl module
Author: markk
Bug-Python: http://bugs.python.org/issue5639
--- a/Lib/ssl.py
+++ b/Lib/ssl.py
@@ -202,6 +202,7 @@
def __init__(self, sock, keyfile=None, certfile=None,
server_side=False, cert_reqs=CERT_NONE,
ssl_version=PROTOCOL_SSLv23, ca_certs=None,
+ server_hostname=None,
do_handshake_on_connect=True,
suppress_ragged_eofs=True, ciphers=None):
# Can't use sock.type as other flags (such as SOCK_NONBLOCK) get
@@ -238,6 +239,7 @@
self._sslobj = _ssl.sslwrap(self._sock, server_side,
keyfile, certfile,
cert_reqs, ssl_version, ca_certs,
+ server_hostname,
ciphers)
if do_handshake_on_connect:
self.do_handshake()
@@ -246,6 +248,7 @@
self.cert_reqs = cert_reqs
self.ssl_version = ssl_version
self.ca_certs = ca_certs
+ self.server_hostname = server_hostname
self.ciphers = ciphers
self.do_handshake_on_connect = do_handshake_on_connect
self.suppress_ragged_eofs = suppress_ragged_eofs
@@ -411,7 +414,7 @@
raise ValueError("attempt to connect already-connected SSLSocket!")
self._sslobj = _ssl.sslwrap(self._sock, False, self.keyfile, self.certfile,
self.cert_reqs, self.ssl_version,
- self.ca_certs, self.ciphers)
+ self.ca_certs, self.server_hostname, self.ciphers)
try:
if return_errno:
rc = socket.connect_ex(self, addr)
@@ -452,6 +455,7 @@
cert_reqs=self.cert_reqs,
ssl_version=self.ssl_version,
ca_certs=self.ca_certs,
+ server_hostname=None,
ciphers=self.ciphers,
do_handshake_on_connect=self.do_handshake_on_connect,
suppress_ragged_eofs=self.suppress_ragged_eofs),
@@ -566,7 +570,7 @@
sock = sock._sock
ssl_sock = _ssl.sslwrap(sock, 0, keyfile, certfile, CERT_NONE,
- PROTOCOL_SSLv23, None)
+ PROTOCOL_SSLv23, None, None, None)
try:
sock.getpeername()
except socket_error:
--- a/Modules/_ssl.c
+++ b/Modules/_ssl.c
@@ -267,7 +267,7 @@
enum py_ssl_server_or_client socket_type,
enum py_ssl_cert_requirements certreq,
enum py_ssl_version proto_version,
- char *cacerts_file, char *ciphers)
+ char *cacerts_file, char *server_hostname, char *ciphers)
{
PySSLObject *self;
char *errstr = NULL;
@@ -389,6 +389,14 @@
PySSL_BEGIN_ALLOW_THREADS
self->ssl = SSL_new(self->ctx); /* New ssl struct */
+#if OPENSSL_VERSION_NUMBER >= 0x0090806fL && !defined(OPENSSL_NO_TLSEXT)
+ /* If SNI isn't supported, we just don't call it and fail silently,
+ * as there's not much else we can do.
+ */
+ if ((socket_type == PY_SSL_CLIENT) &&
+ (proto_version != PY_SSL_VERSION_SSL2) && server_hostname)
+ SSL_set_tlsext_host_name(self->ssl, server_hostname);
+#endif
PySSL_END_ALLOW_THREADS
SSL_set_fd(self->ssl, Sock->sock_fd); /* Set the socket for SSL */
#ifdef SSL_MODE_AUTO_RETRY
@@ -431,15 +439,16 @@
char *key_file = NULL;
char *cert_file = NULL;
char *cacerts_file = NULL;
+ char *server_hostname = NULL;
char *ciphers = NULL;
- if (!PyArg_ParseTuple(args, "O!i|zziizz:sslwrap",
+ if (!PyArg_ParseTuple(args, "O!i|zziizzz:sslwrap",
PySocketModule.Sock_Type,
&Sock,
&server_side,
&key_file, &cert_file,
&verification_mode, &protocol,
- &cacerts_file, &ciphers))
+ &cacerts_file, &server_hostname, &ciphers))
return NULL;
/*
@@ -452,13 +461,13 @@
return (PyObject *) newPySSLObject(Sock, key_file, cert_file,
server_side, verification_mode,
- protocol, cacerts_file,
+ protocol, cacerts_file, server_hostname,
ciphers);
}
PyDoc_STRVAR(ssl_doc,
"sslwrap(socket, server_side, [keyfile, certfile, certs_mode, protocol,\n"
-" cacertsfile, ciphers]) -> sslobject");
+" cacertsfile, ciphers, server_hostname]) -> sslobject");
/* SSL object methods */
Author: Arnaud Fontaine <arnaud.fontaine@nexedi.com>
Description: Enable TLS SNI support for httplib
--- a/Lib/httplib.py 2014-07-31 14:50:21.178088529 +0900
+++ b/Lib/httplib.py 2014-07-31 20:11:09.279081382 +0900
@@ -1195,7 +1195,12 @@
if self._tunnel_host:
self.sock = sock
self._tunnel()
- self.sock = ssl.wrap_socket(sock, self.key_file, self.cert_file)
+ server_hostname = self._tunnel_host
+ else:
+ server_hostname = self.host
+
+ self.sock = ssl.wrap_socket(sock, self.key_file, self.cert_file,
+ server_hostname=server_hostname)
__all__.append("HTTPSConnection")
--- a/Lib/ssl.py 2014-07-31 19:33:21.911968158 +0900
+++ b/Lib/ssl.py 2014-07-31 19:33:57.428391985 +0900
@@ -481,14 +481,15 @@
server_side=False, cert_reqs=CERT_NONE,
ssl_version=PROTOCOL_SSLv23, ca_certs=None,
do_handshake_on_connect=True,
- suppress_ragged_eofs=True, ciphers=None):
+ suppress_ragged_eofs=True, ciphers=None,
+ server_hostname=None):
return SSLSocket(sock, keyfile=keyfile, certfile=certfile,
server_side=server_side, cert_reqs=cert_reqs,
ssl_version=ssl_version, ca_certs=ca_certs,
do_handshake_on_connect=do_handshake_on_connect,
suppress_ragged_eofs=suppress_ragged_eofs,
- ciphers=ciphers)
+ ciphers=ciphers, server_hostname=server_hostname)
# some utility functions
[buildout]
[python-cliff]
recipe = zc.recipe.egg:custom
egg = cliff
setup-eggs = ${python-cliff-prep:eggs}
[python-cliff-prep]
recipe = zc.recipe.egg
eggs =
pbr
...@@ -22,5 +22,6 @@ CPATH = ${openssl:location}/include ...@@ -22,5 +22,6 @@ CPATH = ${openssl:location}/include
recipe = zc.recipe.egg recipe = zc.recipe.egg
eggs = eggs =
${python-cffi:egg} ${python-cffi:egg}
enum34
pycparser pycparser
six six
[buildout]
extends =
# ../openssl/buildout.cfg
../kerberos/buildout.cfg
# ../pkgconfig/buildout.cfg
parts =
python-kerberos
[python-kerberos]
recipe = zc.recipe.egg:custom
egg = kerberos
environment = python-kerberos-env
#setup-eggs = ${python-cryptography-prep:eggs}
[python-kerberos-env]
PATH = ${kerberos:location}/bin:%(PATH)s
#PKG_CONFIG_PATH = ${openssl:location}/lib/pkgconfig
LD_LIBRARY_PATH = ${kerberos:location}/lib
CPATH = ${kerberos:location}/include
[python-cryptography-prep]
recipe = zc.recipe.egg
eggs =
${python-cffi:egg}
enum34
pycparser
six
...@@ -15,10 +15,10 @@ extends = ...@@ -15,10 +15,10 @@ extends =
[kvm] [kvm]
recipe = slapos.recipe.cmmi recipe = slapos.recipe.cmmi
# qemu-kvm and qemu are now the same since 1.3. # qemu-kvm and qemu are now the same since 1.3.
url = http://wiki.qemu-project.org/download/qemu-1.6.1.tar.bz2 url = http://wiki.qemu-project.org/download/qemu-2.2.1.tar.bz2
md5sum = 3a897d722457c5a895cd6ac79a28fda0 md5sum = 833ff4457062456d38d6567f802ffef4
configure-options = configure-options =
--target-list=x86_64-softmmu --target-list="$(uname -m 2>/dev/null|sed 's,^i[456]86$,i386,')-softmmu"
--enable-system --enable-system
--with-system-pixman --with-system-pixman
--disable-sdl --disable-sdl
...@@ -40,25 +40,12 @@ environment = ...@@ -40,25 +40,12 @@ environment =
PKG_CONFIG_PATH=${gnutls:location}/lib/pkgconfig:${glib:location}/lib/pkgconfig:${pixman:location}/lib/pkgconfig PKG_CONFIG_PATH=${gnutls:location}/lib/pkgconfig:${glib:location}/lib/pkgconfig:${pixman:location}/lib/pkgconfig
LDFLAGS=-L${pixman:location}/lib -Wl,-rpath=${pixman:location}/lib LDFLAGS=-L${pixman:location}/lib -Wl,-rpath=${pixman:location}/lib
# The following is only available in buildout2, which we don't use yet.
[kvm-bits64]
configure-options =
--target-list=x86_64-softmmu
${kvm:configure-options}
[kvm-bits32]
configure-options =
--target-list=i386-softmmu
${kvm:configure-options}
[debian-amd64-netinst.iso] [debian-amd64-netinst.iso]
# Download the installer of Debian 7 (Wheezy) # Download the installer of Debian 8 (Jessie)
recipe = hexagonit.recipe.download recipe = hexagonit.recipe.download
url = http://cdimage.debian.org/debian-cd/7.2.0/amd64/iso-cd/debian-7.2.0-amd64-netinst.iso url = http://cdimage.debian.org/debian-cd/8.1.0/amd64/iso-cd/debian-8.1.0-amd64-netinst.iso
filename = ${:_buildout_section_name_} filename = ${:_buildout_section_name_}
md5sum = b86774fe4de88be6378ba3d71b8029bd md5sum = 1a311f9afb68d6365211b13b4342c40b
download-only = true download-only = true
mode = 0644 mode = 0644
location = ${buildout:parts-directory}/${:_buildout_section_name_} location = ${buildout:parts-directory}/${:_buildout_section_name_}
[buildout]
extends =
../bzip2/buildout.cfg
../gcc/buildout.cfg
../pcre/buildout.cfg
../readline/buildout.cfg
../xz-utils/buildout.cfg
../zlib/buildout.cfg
parts =
gcc-fortran
r-language
[r-language]
recipe = slapos.recipe.cmmi
url = http://cran.univ-paris1.fr/src/base/R-3/R-3.2.0.tar.gz
md5sum = 66fa17ad457d7e618191aa0f52fc402e
configure-options =
--enable-R-shlib
--with-readline=yes
--without-tcltk
--without-cairo
--without-libpng
--without-jpeglib
--without-libtiff
--with-system-zlib
--with-system-bzlib
--with-system-pcre
--with-system-xz
--without-ICU
--without-x
environment =
PATH=${gcc-fortran:location}/bin:%(PATH)s
CPPFLAGS=-I${bzip2:location}/include -I${pcre:location}/include -I${readline:location}/include -I${ncurses:location}/include -I${xz-utils:location}/include -I${zlib:location}/include
LDFLAGS=-L${bzip2:location}/lib -Wl,-rpath=${bzip2:location}/lib -L${gcc-fortran:location}/lib -Wl,-rpath=${gcc-fortran:location}/lib -L${gcc-fortran:location}/lib64 -Wl,-rpath=${gcc-fortran:location}/lib64 -L${ncurses:location}/lib -Wl,-rpath=${ncurses:location}/lib -L${pcre:location}/lib -Wl,-rpath=${pcre:location}/lib -L${readline:location}/lib -Wl,-rpath=${readline:location}/lib -L${xz-utils:location}/lib -Wl,-rpath=${xz-utils:location}/lib -L${zlib:location}/lib -Wl,-rpath=${zlib:location}/lib
# A minimal software release to test the r-language component (depends on gfortran).
[buildout]
extends =
../../stack/slapos.cfg
../../component/gcc/buildout.cfg
../../component/r-language/buildout.cfg
parts =
eggs
gcc-fortran
r-language
[eggs]
recipe = zc.recipe.egg
eggs =
${lxml-python:egg}
slapos.cookbook
...@@ -8,7 +8,7 @@ parts = ...@@ -8,7 +8,7 @@ parts =
[ragel] [ragel]
recipe = slapos.recipe.cmmi recipe = slapos.recipe.cmmi
url = http://www.complang.org/ragel/ragel-6.8.tar.gz url = http://www.colm.net/files/ragel/ragel-6.8.tar.gz
md5sum = 1bb39745ac23da449019f9f2cb4b0d01 md5sum = 1bb39745ac23da449019f9f2cb4b0d01
configure-options = configure-options =
--prefix=${buildout:parts-directory}/${:_buildout_section_name_} --prefix=${buildout:parts-directory}/${:_buildout_section_name_}
......
[buildout]
extends =
../../stack/slapos.cfg
../git/buildout.cfg
../babeld/buildout.cfg
../openvpn/buildout.cfg
develop =
re6stnet-repository
parts =
babeld
re6stnet-develop
re6stnet
[re6stnet-repository]
recipe = slapos.recipe.build:gitclone
repository = http://git.erp5.org/repos/re6stnet.git
branch = master
git-executable = ${git:location}/bin/git
[re6stnet-develop]
recipe = zc.recipe.egg:develop
setup = ${re6stnet-hack:dir}
[re6stnet-hack]
recipe = plone.recipe.command
stop-on-error = true
dir = ${re6stnet-repository:location}
command =
rm -f "${:dir}/re6stconf.py" && ln -s re6st-conf "${:dir}/re6stconf.py"
rm -f "${:dir}/re6stregistry.py" && ln -s re6st-registry "${:dir}/re6stregistry.py"
rm -f "${:dir}/re6stnet.py" && ln -s re6stnet "${:dir}/re6stnet.py"
sed -i 's#("git",)#("${git:location}/bin/git",)#' ${:dir}/re6st/version.py
update-command = ${:command}
[environment]
# Note: For now original PATH is appended to the end, as not all tools are
# provided by SlapOS
PATH=${openvpn:location}/sbin:${babeld:location}/bin:${bzip2:location}/bin:${gettext:location}/bin:${glib:location}/bin:${libxml2:location}/bin:${libxslt:location}/bin:${ncurses:location}/bin:${openssl:location}/bin:${pkgconfig:location}/bin:${python2.7:location}/bin:${readline:location}/bin:${sqlite3:location}/bin::${buildout:bin-directory}:${xz-utils:location}/bin:$PATH
[re6stnet]
recipe = zc.recipe.egg
eggs =
${lxml-python:egg}
${python-cffi:egg}
${python-cryptography:egg}
pyOpenSSL
re6stnet
script =
re6stnet
re6st-conf
re6st-registry
initialization =
import os
os.environ['PATH'] = os.path.expandvars('${environment:PATH}')
entry-points =
re6stnet=re6stnet:main
re6st-conf=re6stconf:main
re6st-registry=re6stregistry:main
[buildout] [buildout]
extends =
../ncurses/buildout.cfg
parts = parts =
readline5 readline5
readline readline
...@@ -40,6 +43,10 @@ patches = ...@@ -40,6 +43,10 @@ patches =
http://ftp.gnu.org/gnu/readline/readline-6.3-patches/readline63-006#71dc6ecce66d1489b96595f55d142a52 http://ftp.gnu.org/gnu/readline/readline-6.3-patches/readline63-006#71dc6ecce66d1489b96595f55d142a52
http://ftp.gnu.org/gnu/readline/readline-6.3-patches/readline63-007#062a08ed60679d3c4878710b3d595b65 http://ftp.gnu.org/gnu/readline/readline-6.3-patches/readline63-007#062a08ed60679d3c4878710b3d595b65
http://ftp.gnu.org/gnu/readline/readline-6.3-patches/readline63-008#ee1c04072154826870848d8b218d7b04 http://ftp.gnu.org/gnu/readline/readline-6.3-patches/readline63-008#ee1c04072154826870848d8b218d7b04
${:_profile_base_location_}/configure-ncurses.diff#c2bf0d0cb0ccd3d3c078ba124fffb7d2
configure-options = configure-options =
--enable-multibyte --enable-multibyte
--disable-static --disable-static
environment =
CPPFLAGS=-I${ncurses:location}/include/ncurses
LDFLAGS=-L${ncurses:location}/lib -Wl,-rpath=${ncurses:location}/lib
diff -ur ../readline-6.3.orig/aclocal.m4 ./aclocal.m4
--- ../readline-6.3.orig/aclocal.m4 2013-10-21 00:37:50.000000000 +0200
+++ ./aclocal.m4 2015-05-21 11:04:20.243591527 +0200
@@ -958,10 +958,10 @@
fi
AC_CACHE_VAL(bash_cv_termcap_lib,
[AC_CHECK_FUNC(tgetent, bash_cv_termcap_lib=libc,
- [AC_CHECK_LIB(termcap, tgetent, bash_cv_termcap_lib=libtermcap,
- [AC_CHECK_LIB(tinfo, tgetent, bash_cv_termcap_lib=libtinfo,
+ [AC_CHECK_LIB(ncurses, tgetent, bash_cv_termcap_lib=libncurses,
+ [AC_CHECK_LIB(termcap, tgetent, bash_cv_termcap_lib=libtermcap,
+ [AC_CHECK_LIB(tinfo, tgetent, bash_cv_termcap_lib=libtinfo,
[AC_CHECK_LIB(curses, tgetent, bash_cv_termcap_lib=libcurses,
- [AC_CHECK_LIB(ncurses, tgetent, bash_cv_termcap_lib=libncurses,
bash_cv_termcap_lib=gnutermcap)])])])])])
if test "X$_bash_needmsg" = "Xyes"; then
AC_MSG_CHECKING(which library has the termcap functions)
Only in ./: aclocal.m4~
Only in ./: autom4te.cache
diff -ur ../readline-6.3.orig/configure ./configure
--- ../readline-6.3.orig/configure 2013-03-13 15:14:53.000000000 +0100
+++ ./configure 2015-05-21 11:29:48.157514191 +0200
@@ -5793,6 +5793,45 @@
if test "x$ac_cv_func_tgetent" = xyes; then :
bash_cv_termcap_lib=libc
else
+ { $as_echo "$as_me:${as_lineno-$LINENO}: checking for tgetent in -lncurses" >&5
+$as_echo_n "checking for tgetent in -lncurses... " >&6; }
+if ${ac_cv_lib_ncurses_tgetent+:} false; then :
+ $as_echo_n "(cached) " >&6
+else
+ ac_check_lib_save_LIBS=$LIBS
+LIBS="-lncurses $LIBS"
+cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+/* end confdefs.h. */
+
+/* Override any GCC internal prototype to avoid an error.
+ Use char because int might match the return type of a GCC
+ builtin and then its argument prototype would still apply. */
+#ifdef __cplusplus
+extern "C"
+#endif
+char tgetent ();
+int
+main ()
+{
+return tgetent ();
+ ;
+ return 0;
+}
+_ACEOF
+if ac_fn_c_try_link "$LINENO"; then :
+ ac_cv_lib_ncurses_tgetent=yes
+else
+ ac_cv_lib_ncurses_tgetent=no
+fi
+rm -f core conftest.err conftest.$ac_objext \
+ conftest$ac_exeext conftest.$ac_ext
+LIBS=$ac_check_lib_save_LIBS
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_ncurses_tgetent" >&5
+$as_echo "$ac_cv_lib_ncurses_tgetent" >&6; }
+if test "x$ac_cv_lib_ncurses_tgetent" = xyes; then :
+ bash_cv_termcap_lib=libncurses
+else
{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for tgetent in -ltermcap" >&5
$as_echo_n "checking for tgetent in -ltermcap... " >&6; }
if ${ac_cv_lib_termcap_tgetent+:} false; then :
@@ -5910,45 +5949,6 @@
if test "x$ac_cv_lib_curses_tgetent" = xyes; then :
bash_cv_termcap_lib=libcurses
else
- { $as_echo "$as_me:${as_lineno-$LINENO}: checking for tgetent in -lncurses" >&5
-$as_echo_n "checking for tgetent in -lncurses... " >&6; }
-if ${ac_cv_lib_ncurses_tgetent+:} false; then :
- $as_echo_n "(cached) " >&6
-else
- ac_check_lib_save_LIBS=$LIBS
-LIBS="-lncurses $LIBS"
-cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-/* end confdefs.h. */
-
-/* Override any GCC internal prototype to avoid an error.
- Use char because int might match the return type of a GCC
- builtin and then its argument prototype would still apply. */
-#ifdef __cplusplus
-extern "C"
-#endif
-char tgetent ();
-int
-main ()
-{
-return tgetent ();
- ;
- return 0;
-}
-_ACEOF
-if ac_fn_c_try_link "$LINENO"; then :
- ac_cv_lib_ncurses_tgetent=yes
-else
- ac_cv_lib_ncurses_tgetent=no
-fi
-rm -f core conftest.err conftest.$ac_objext \
- conftest$ac_exeext conftest.$ac_ext
-LIBS=$ac_check_lib_save_LIBS
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_ncurses_tgetent" >&5
-$as_echo "$ac_cv_lib_ncurses_tgetent" >&6; }
-if test "x$ac_cv_lib_ncurses_tgetent" = xyes; then :
- bash_cv_termcap_lib=libncurses
-else
bash_cv_termcap_lib=gnutermcap
fi
@@ -6322,10 +6322,7 @@
$as_echo_n "(cached) " >&6
else
if test "$cross_compiling" = yes; then :
- { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5
-$as_echo "$as_me: error: in \`$ac_pwd':" >&2;}
-as_fn_error $? "cannot run test program while cross compiling
-See \`config.log' for more details" "$LINENO" 5; }
+ bash_cv_wcwidth_broken=no
else
cat confdefs.h - <<_ACEOF >conftest.$ac_ext
/* end confdefs.h. */
@@ -6409,6 +6406,7 @@
# *curses*|*termcap*|*termlib*) ;;
# *) SHLIB_LIBS="$SHLIB_LIBS $TERMCAP_LIB" ;;
# esac
+ SHLIB_LIBS="$SHLIB_LIBS $TERMCAP_LIB"
diff -ur ../readline-6.3.orig/configure.ac ./configure.ac
--- ../readline-6.3.orig/configure.ac 2014-02-11 23:12:39.000000000 +0100
+++ ./configure.ac 2015-05-21 11:25:26.164610769 +0200
@@ -222,6 +222,7 @@
# *curses*|*termcap*|*termlib*) ;;
# *) SHLIB_LIBS="$SHLIB_LIBS $TERMCAP_LIB" ;;
# esac
+ SHLIB_LIBS="$SHLIB_LIBS $TERMCAP_LIB"
AC_SUBST(SHOBJ_CC)
AC_SUBST(SHOBJ_CFLAGS)
Only in ./: configure.ac~
[buildout]
extends =
../gcc/buildout.cfg
../pcre/buildout.cfg
../r-language/buildout.cfg
../xz-utils/buildout.cfg
# rpy2 needs R in $PATH
# to use rpy2 you need a wrapper that sets ${r-language:location}/bin in PATH
# see ../manpy/buildout.cfg as an example
[rpy2_env]
PATH = ${gcc-fortran:location}/bin:${r-language:location}/bin:%(PATH)s
[rpy2]
recipe = zc.recipe.egg:custom
environment = rpy2_env
egg = rpy2
initialization =
# rpy2 needs R in $PATH
import os
os.environ['PATH'] = '${r-language:location}/bin' + os.pathsep + os.environ.get('PATH', '')
rpath =
${gcc-fortran:location}/lib
${gcc-fortran:location}/lib64
${pcre:location}/lib
${readline:location}/lib
${xz-utils:location}/lib
${r-language:location}/lib/R/lib
library-dirs =
${pcre:location}/lib
${readline:location}/lib
${xz-utils:location}/lib
include-dirs =
${readline:location}/include
...@@ -4,7 +4,7 @@ parts = ...@@ -4,7 +4,7 @@ parts =
[rsync] [rsync]
recipe = slapos.recipe.cmmi recipe = slapos.recipe.cmmi
url = http://rsync.samba.org/ftp/rsync/src/rsync-3.0.9.tar.gz url = http://rsync.samba.org/ftp/rsync/src/rsync-3.1.1.tar.gz
md5sum = 5ee72266fe2c1822333c407e1761b92b md5sum = 43bd6676f0b404326eee2d63be3cdcfe
make-options = make-options =
PREFIX=${buildout:parts-directory}/${:_buildout_section_name_} PREFIX=${buildout:parts-directory}/${:_buildout_section_name_}
...@@ -2,32 +2,39 @@ ...@@ -2,32 +2,39 @@
extends = extends =
../gdbm/buildout.cfg ../gdbm/buildout.cfg
../openssl/buildout.cfg ../libffi/buildout.cfg
../zlib/buildout.cfg
../readline/buildout.cfg
../ncurses/buildout.cfg
../libyaml/buildout.cfg ../libyaml/buildout.cfg
../openssl/buildout.cfg
../ncurses/buildout.cfg ../ncurses/buildout.cfg
../openssl/buildout.cfg
../readline/buildout.cfg ../readline/buildout.cfg
../xz-utils/buildout.cfg
../zlib/buildout.cfg ../zlib/buildout.cfg
parts = ruby parts = ruby
[ruby-1.9.3] [ruby-common]
recipe = slapos.recipe.cmmi recipe = slapos.recipe.cmmi
url = http://ftp.ruby-lang.org/pub/ruby/1.9/ruby-1.9.3-p194.tar.gz
md5sum = bc0c715c69da4d1d8bd57069c19f6c0e
configure-options = configure-options =
--enable-shared --enable-rpath --disable-install-doc --enable-shared --enable-rpath --disable-install-doc
--enable-pthread --enable-ipv6 --with-out-ext=tk,tk/tkutil,win32,win32ole
--with-ext=bigdecimal,continuation,curses,date,digest,dl,etc,fcntl,iconv,io,json,nkf,openssl,pathname,psych,pty,racc,readline,ripper,sdbm,socket,stringio,strscan,syck,syslog,zlib
environment = environment =
CPPFLAGS=-I${zlib:location}/include -I${readline:location}/include -I${libyaml:location}/include -I${ncurses:location}/include -I${ncurses:location}/include/ncursesw -I${gdbm:location}/include -I${openssl:location}/include PATH=${xz-utils:location}/bin:%(PATH)s
LDFLAGS=-L${zlib:location}/lib -L${libyaml:location}/lib -L${readline:location}/lib -L${ncurses:location}/lib -L${gdbm:location}/lib -L${openssl:location}/lib -Wl,-rpath=${zlib:location}/lib -Wl,-rpath=${libyaml:location}/lib -Wl,-rpath=${ncurses:location}/lib -Wl,-rpath=${readline:location}/lib -Wl,-rpath=${gdbm:location}/lib -Wl,-rpath=${openssl:location}/lib CPPFLAGS=-I${gdbm:location}/include -I${libffi:location}/include -I${libyaml:location}/include -I${ncurses:location}/include -I${ncurses:location}/include/ncursesw -I${openssl:location}/include -I${readline:location}/include -I${zlib:location}/include
LDFLAGS=-L${gdbm:location}/lib -L${libffi:location}/lib -L${libyaml:location}/lib -L${ncurses:location}/lib -L${openssl:location}/lib -L${readline:location}/lib -L${zlib:location}/lib -Wl,-rpath=${gdbm:location}/lib -Wl,-rpath=${libffi:location}/lib -Wl,-rpath=${libyaml:location}/lib -Wl,-rpath=${ncurses:location}/lib -Wl,-rpath=${openssl:location}/lib -Wl,-rpath=${readline:location}/lib -Wl,-rpath=${zlib:location}/lib
PKG_CONFIG_PATH=${libyaml:location}/lib/ PKG_CONFIG_PATH=${libyaml:location}/lib/
[ruby-1.9]
<= ruby-1.9.3 [ruby2.1]
<= ruby-common
url = http://ftp.ruby-lang.org/pub/ruby/2.1/ruby-2.1.6.tar.xz
md5sum = ec6f10ca331ce947802ede86259513a8
[ruby2.2]
<= ruby-common
url = http://ftp.ruby-lang.org/pub/ruby/2.2/ruby-2.2.2.tar.xz
md5sum = dbce9b9d79d90f213ba8d448b0b6ed86
[ruby] [ruby]
<= ruby2.2
[buildout]
extends =
../numpy/buildout.cfg
../scipy/buildout.cfg
parts =
scikit-learn
[scikit-learn-env]
<= numpy-env
[scikit-learn]
recipe = zc.recipe.egg:custom
egg = scikit-learn
environment = scikit-learn-env
setup-eggs = ${numpy:egg}
[buildout]
extends =
../numpy/buildout.cfg
parts =
scipy
[scipy-env]
<= numpy-env
[scipy]
recipe = zc.recipe.egg:custom
egg = scipy
environment = scipy-env
setup-eggs = ${numpy:egg}
rpath = ${numpy:rpath}
...@@ -114,7 +114,3 @@ eggs = ...@@ -114,7 +114,3 @@ eggs =
${slapos:eggs} ${slapos:eggs}
interpreter = py interpreter = py
scripts = py scripts = py
[versions]
collective.recipe.template = 1.11
z3c.recipe.scripts = 1.0.1
...@@ -5,11 +5,14 @@ parts = ...@@ -5,11 +5,14 @@ parts =
[sqlite3] [sqlite3]
recipe = slapos.recipe.cmmi recipe = slapos.recipe.cmmi
url = http://www.sqlite.org/2014/sqlite-autoconf-3080701.tar.gz url = http://sqlite.org/2015/sqlite-autoconf-3081002.tar.gz
md5sum = 8ee4541ebb3e5739e7ef5e9046e30063 md5sum = a18bfc015cd49a1e7a961b7b77bc3b37
configure-options = configure-options =
--disable-static --disable-static
--enable-readline --enable-readline
patch-options = -p1
patches =
${:_profile_base_location_}/dynamic_link.patch#328d3773b49151207d29a5a857f98337
environment = environment =
CPPFLAGS=-I${readline:location}/include -I${ncurses:location}/include CPPFLAGS=-I${readline:location}/include -I${ncurses:location}/include
LDFLAGS=-L${buildout:parts-directory}/${:_buildout_section_name_} -Wl,-rpath=${readline:location}/lib -Wl,-rpath=${ncurses:location}/lib -L${readline:location}/lib -L${ncurses:location}/lib LDFLAGS=-L${buildout:parts-directory}/${:_buildout_section_name_} -Wl,-rpath=${readline:location}/lib -Wl,-rpath=${ncurses:location}/lib -L${readline:location}/lib -L${ncurses:location}/lib
diff -ur sqlite-autoconf-3081002/Makefile.am sqlite-autoconf-3081002/Makefile.am
--- sqlite-autoconf-3081002/Makefile.am 2015-04-08 16:38:50.000000000 +0200
+++ sqlite-autoconf-3081002/Makefile.am 2015-05-09 14:23:20.000000000 +0200
@@ -7,7 +7,8 @@
bin_PROGRAMS = sqlite3
sqlite3_SOURCES = shell.c sqlite3.h
-sqlite3_LDADD = sqlite3.$(OBJEXT) @READLINE_LIBS@
+sqlite3_LDADD = $(top_builddir)/libsqlite3.la @READLINE_LIBS@
+sqlite3_DEPENDENCIES = $(top_builddir)/libsqlite3.la
include_HEADERS = sqlite3.h sqlite3ext.h
diff -ur sqlite-autoconf-3081002/Makefile.in sqlite-autoconf-3081002/Makefile.in
--- sqlite-autoconf-3081002/Makefile.in 2015-04-08 16:38:52.000000000 +0200
+++ sqlite-autoconf-3081002/Makefile.in 2015-05-09 14:23:23.000000000 +0200
@@ -109,7 +109,6 @@
PROGRAMS = $(bin_PROGRAMS)
am_sqlite3_OBJECTS = shell.$(OBJEXT)
sqlite3_OBJECTS = $(am_sqlite3_OBJECTS)
-sqlite3_DEPENDENCIES = sqlite3.$(OBJEXT)
DEFAULT_INCLUDES = -I.@am__isrc@
depcomp = $(SHELL) $(top_srcdir)/depcomp
am__depfiles_maybe = depfiles
@@ -274,7 +273,8 @@
libsqlite3_la_SOURCES = sqlite3.c
libsqlite3_la_LDFLAGS = -no-undefined -version-info 8:6:8
sqlite3_SOURCES = shell.c sqlite3.h
-sqlite3_LDADD = sqlite3.$(OBJEXT) @READLINE_LIBS@
+sqlite3_LDADD = $(top_builddir)/libsqlite3.la @READLINE_LIBS@
+sqlite3_DEPENDENCIES = $(top_builddir)/libsqlite3.la
include_HEADERS = sqlite3.h sqlite3ext.h
EXTRA_DIST = sqlite3.1 tea
pkgconfigdir = ${libdir}/pkgconfig
...@@ -6,13 +6,15 @@ parts = ...@@ -6,13 +6,15 @@ parts =
squid squid
extends = extends =
../pkgconfig/buildout.cfg ../pkgconfig/buildout.cfg
../xz-utils/buildout.cfg
[squid] [squid]
recipe = hexagonit.recipe.cmmi recipe = slapos.recipe.cmmi
url = http://www.squid-cache.org/Versions/v3/3.4/squid-3.4.7.tar.bz2 url = http://www.squid-cache.org/Versions/v3/3.4/squid-3.4.13.tar.xz
md5sum = 9951034b10f7ee0f45a95cfae61c57c2 md5sum = a5f6c978b2d7a99b161c8275e1acb470
configure-options = configure-options =
--disable-dependency-tracking --disable-dependency-tracking
--disable-static
--disable-translation --disable-translation
--disable-htcp --disable-htcp
--disable-snmp --disable-snmp
...@@ -20,6 +22,7 @@ configure-options = ...@@ -20,6 +22,7 @@ configure-options =
--disable-icmp --disable-icmp
--disable-esi --disable-esi
--disable-icap-client --disable-icap-client
--disable-ecap
--disable-wccp --disable-wccp
--disable-wccpv2 --disable-wccpv2
--disable-eui --disable-eui
...@@ -38,4 +41,4 @@ configure-options = ...@@ -38,4 +41,4 @@ configure-options =
--disable-auth-ntlm --disable-auth-ntlm
--with-krb5-config=no --with-krb5-config=no
Environment = Environment =
PATH=${pkgconfig:location}/bin:%(PATH)s PATH=${pkgconfig:location}/bin:${xz-utils:location}/bin:%(PATH)s
...@@ -6,8 +6,8 @@ parts = ...@@ -6,8 +6,8 @@ parts =
[stunnel] [stunnel]
recipe = slapos.recipe.cmmi recipe = slapos.recipe.cmmi
url = https://www.stunnel.org/downloads/stunnel-5.02.tar.gz url = https://www.stunnel.org/downloads/archive/5.x/stunnel-5.14.tar.gz
md5sum = bb48b1c18cfc0a42708ef996b1a26926 md5sum = e716501960dc6856d80f92547298f724
configure-options = configure-options =
--enable-ipv6 --enable-ipv6
--disable-libwrap --disable-libwrap
......
[buildout] [buildout]
extends = extends =
../autoconf/buildout.cfg
../automake/buildout.cfg ../automake/buildout.cfg
../jbigkit/buildout.cfg
../leptonica/buildout.cfg
../libpng/buildout.cfg ../libpng/buildout.cfg
../libtool/buildout.cfg ../libtool/buildout.cfg
../leptonica/buildout.cfg
../jbigkit/buildout.cfg
../webp/buildout.cfg ../webp/buildout.cfg
parts = parts =
...@@ -20,9 +21,8 @@ patches = ...@@ -20,9 +21,8 @@ patches =
${:_profile_base_location_}/tesseract-3.00-gcc-4.7-build.patch#ca80db3ec489c547b03f3ee48879c1b1 ${:_profile_base_location_}/tesseract-3.00-gcc-4.7-build.patch#ca80db3ec489c547b03f3ee48879c1b1
${:_profile_base_location_}/tesseract-3.01-remove-bom.patch#2e691858cb492b7c17d23bf0912b3d24 ${:_profile_base_location_}/tesseract-3.01-remove-bom.patch#2e691858cb492b7c17d23bf0912b3d24
configure-command = configure-command =
aclocal -I ${libtool:location}/share/aclocal -I config
libtoolize -f -c libtoolize -f -c
libtoolize --automake aclocal -I ${libtool:location}/share/aclocal -I config
autoheader -f autoheader -f
automake -c -a -f automake -c -a -f
autoconf -Wno-portability autoconf -Wno-portability
...@@ -38,6 +38,8 @@ environment = ...@@ -38,6 +38,8 @@ environment =
LIBLEPT_HEADERSDIR=${leptonica:location}/include LIBLEPT_HEADERSDIR=${leptonica:location}/include
CPPFLAGS=-I${leptonica:location}/include CPPFLAGS=-I${leptonica:location}/include
LDFLAGS =-L${leptonica:location}/lib -Wl,-rpath=${leptonica:location}/lib -L${jbigkit:location}/lib -Wl,-rpath=${jbigkit:location}/lib -L${zlib:location}/lib -Wl,-rpath=${zlib:location}/lib LDFLAGS =-L${leptonica:location}/lib -Wl,-rpath=${leptonica:location}/lib -L${jbigkit:location}/lib -Wl,-rpath=${jbigkit:location}/lib -L${zlib:location}/lib -Wl,-rpath=${zlib:location}/lib
make-options =
LIBTOOL=${libtool:location}/bin/libtool
[tesseract-eng-traineddata] [tesseract-eng-traineddata]
recipe = hexagonit.recipe.download recipe = hexagonit.recipe.download
......
...@@ -35,6 +35,6 @@ depends = ...@@ -35,6 +35,6 @@ depends =
${perl-XML-LibXML:location} ${perl-XML-LibXML:location}
${perl-libwww-perl:location} ${perl-libwww-perl:location}
perl-path = ${perl:location}/bin/perl perl-path = ${perl:location}/bin/perl
# validator-1_2-release in http://dvcs.w3.org/hg/markup-validator/tags # validator-1_3-release in https://dvcs.w3.org/hg/markup-validator/tags
validator-url = http://dvcs.w3.org/hg/markup-validator/archive/754b7a64cdb4.tar.bz2 validator-url = https://dvcs.w3.org/hg/markup-validator/archive/6c6c7c1f69c5.tar.bz2
dtd-url = dtd-url =
[buildout]
extends = ../git/buildout.cfg
parts =
wendelin.core
# wendelin.core installed from released egg from pypi
[wendelin.core]
recipe = zc.recipe.egg:custom
egg = wendelin.core
# wendelin.core installed from latest git version
[wendelin.core-dev]
recipe = zc.recipe.egg:develop
egg = wendelin.core
setup = ${wendelin.core-repository-submoduleinit:location}
environment = wendelin.core-dev-env
[wendelin.core-dev-env]
# wendelin.core-dev needs git to build
PATH = ${git:location}/bin:%(PATH)s
[wendelin.core-repository]
recipe = slapos.recipe.build:gitclone
repository = https://lab.nexedi.cn/nexedi/wendelin.core.git
# dir is pretty name as top-level -dev recipe
location = ${buildout:parts-directory}/wendelin.core-dev
git-executable = ${git:location}/bin/git
# TODO add `git clone --recursive` to slapos.recipe.build:gitclone
# and this way merge this -submoduleinit into -repository part
[wendelin.core-repository-submoduleinit]
recipe = plone.recipe.command
command = cd "${wendelin.core-repository:location}" && ${git:location}/bin/git submodule update --init
stop-on-error = true
# propagate location of main repo
location= ${wendelin.core-repository:location}
...@@ -10,8 +10,8 @@ parts = ...@@ -10,8 +10,8 @@ parts =
[wget] [wget]
recipe = slapos.recipe.cmmi recipe = slapos.recipe.cmmi
url = http://ftp.gnu.org/gnu/wget/wget-1.16.tar.xz url = http://ftp.gnu.org/gnu/wget/wget-1.16.3.tar.xz
md5sum = fe102975ab3a6c049777883f1bb9ad07 md5sum = d2e4455781a70140ae83b54ca594ce21
configure-options = configure-options =
--enable-ipv6 --enable-ipv6
--enable-opie --enable-opie
...@@ -24,10 +24,10 @@ patch-options = ...@@ -24,10 +24,10 @@ patch-options =
-p1 -p1
patches = patches =
${:_profile_base_location_}/wget-doc.makefile.patch#aabcc1695c7fb65ac44f295e04a2db78 ${:_profile_base_location_}/wget-doc.makefile.patch#0d23cf1ee81268a94699aebbb26058e6
environment = environment =
PATH=${pkgconfig:location}/bin:${xz-utils:location}/bin:%(PATH)s PATH=${pkgconfig:location}/bin:${xz-utils:location}/bin:%(PATH)s
PKG_CONFIG_PATH=${openssl:location}/lib/pkgconfig PKG_CONFIG_PATH=${openssl:location}/lib/pkgconfig
LDFLAGS=-L${zlib:location}/lib -Wl,-rpath=${zlib:location}/lib -L${openssl:location}/lib -L${pcre:location}/lib -Wl,-rpath=${pcre:location}/lib LDFLAGS=-L${zlib:location}/lib -Wl,-rpath=${zlib:location}/lib -L${openssl:location}/lib -L${pcre:location}/lib -Wl,-rpath=${pcre:location}/lib -Wl,-rpath=${openssl:location}/lib
CPPFLAGS=-I${zlib:location}/include -I${openssl:location}/include -I${pcre:location}/include CPPFLAGS=-I${zlib:location}/include -I${openssl:location}/include -I${pcre:location}/include
...@@ -7,16 +7,16 @@ ...@@ -7,16 +7,16 @@
-SUBDIRS = lib src doc po tests util testenv -SUBDIRS = lib src doc po tests util testenv
+SUBDIRS = lib src po tests util testenv +SUBDIRS = lib src po tests util testenv
EXTRA_DIST = ChangeLog.README MAILING-LIST \ EXTRA_DIST = MAILING-LIST \
msdos/ChangeLog msdos/config.h msdos/Makefile.DJ \ msdos/config.h msdos/Makefile.DJ \
--- a/Makefile.in 2012-08-05 22:17:17.000000000 +0200 --- a/Makefile.in 2012-08-05 22:17:17.000000000 +0200
+++ b/Makefile.in 2013-06-21 15:05:04.351269286 +0200 +++ b/Makefile.in 2013-06-21 15:05:04.351269286 +0200
@@ -1195,7 +1195,7 @@ @@ -1322,7 +1322,7 @@
ACLOCAL_AMFLAGS = -I m4 ACLOCAL_AMFLAGS = -I m4
# subdirectories in the distribution # subdirectories in the distribution
-SUBDIRS = lib src doc po tests util testenv -SUBDIRS = lib src doc po tests util testenv
+SUBDIRS = lib src po tests util testenv +SUBDIRS = lib src po tests util testenv
EXTRA_DIST = ChangeLog.README MAILING-LIST \ EXTRA_DIST = MAILING-LIST \
msdos/ChangeLog msdos/config.h msdos/Makefile.DJ \ msdos/config.h msdos/Makefile.DJ \
msdos/Makefile.WC ABOUT-NLS \ msdos/Makefile.WC ABOUT-NLS \
...@@ -4,7 +4,7 @@ parts = ...@@ -4,7 +4,7 @@ parts =
[xz-utils] [xz-utils]
recipe = slapos.recipe.cmmi recipe = slapos.recipe.cmmi
url = http://tukaani.org/xz/xz-5.0.5.tar.bz2 url = http://tukaani.org/xz/xz-5.2.0.tar.bz2
md5sum = db44efe0d53ac4317627624b98c63da0 md5sum = 867cc8611760240ebf3440bd6e170bb9
configure-options = configure-options =
--disable-static --disable-static
...@@ -28,7 +28,7 @@ from setuptools import setup, find_packages ...@@ -28,7 +28,7 @@ from setuptools import setup, find_packages
import glob import glob
import os import os
version = '0.92' version = '0.102'
name = 'slapos.cookbook' name = 'slapos.cookbook'
long_description = open("README.txt").read() + "\n" + \ long_description = open("README.txt").read() + "\n" + \
open("CHANGES.txt").read() + "\n" open("CHANGES.txt").read() + "\n"
...@@ -87,6 +87,7 @@ setup(name=name, ...@@ -87,6 +87,7 @@ setup(name=name,
'check_page_content = slapos.recipe.check_page_content:Recipe', 'check_page_content = slapos.recipe.check_page_content:Recipe',
'check_port_listening = slapos.recipe.check_port_listening:Recipe', 'check_port_listening = slapos.recipe.check_port_listening:Recipe',
'check_url_available = slapos.recipe.check_url_available:Recipe', 'check_url_available = slapos.recipe.check_url_available:Recipe',
'check_parameter = slapos.recipe.check_parameter:Recipe',
'cloud9 = slapos.recipe.cloud9:Recipe', 'cloud9 = slapos.recipe.cloud9:Recipe',
'cloudooo.test = slapos.recipe.erp5_test:CloudoooRecipe', 'cloudooo.test = slapos.recipe.erp5_test:CloudoooRecipe',
'condor = slapos.recipe.condor:Recipe', 'condor = slapos.recipe.condor:Recipe',
...@@ -174,6 +175,7 @@ setup(name=name, ...@@ -174,6 +175,7 @@ setup(name=name,
'request.serialised = slapos.recipe.request:Serialised', 'request.serialised = slapos.recipe.request:Serialised',
'request.edge = slapos.recipe.request:RequestEdge', 'request.edge = slapos.recipe.request:RequestEdge',
'requestoptional = slapos.recipe.request:RequestOptional', 'requestoptional = slapos.recipe.request:RequestOptional',
're6stnet.registry = slapos.recipe.re6stnet:Recipe',
'reverseproxy.nginx = slapos.recipe.reverse_proxy_nginx:Recipe', 'reverseproxy.nginx = slapos.recipe.reverse_proxy_nginx:Recipe',
'seleniumrunner = slapos.recipe.seleniumrunner:Recipe', 'seleniumrunner = slapos.recipe.seleniumrunner:Recipe',
'sheepdogtestbed = slapos.recipe.sheepdogtestbed:SheepDogTestBed', 'sheepdogtestbed = slapos.recipe.sheepdogtestbed:SheepDogTestBed',
...@@ -181,6 +183,7 @@ setup(name=name, ...@@ -181,6 +183,7 @@ setup(name=name,
'shellinabox = slapos.recipe.shellinabox:Recipe', 'shellinabox = slapos.recipe.shellinabox:Recipe',
'signalwrapper= slapos.recipe.signal_wrapper:Recipe', 'signalwrapper= slapos.recipe.signal_wrapper:Recipe',
'simplelogger = slapos.recipe.simplelogger:Recipe', 'simplelogger = slapos.recipe.simplelogger:Recipe',
'simplehttpserver = slapos.recipe.simplehttpserver:Recipe',
'siptester = slapos.recipe.siptester:SipTesterRecipe', 'siptester = slapos.recipe.siptester:SipTesterRecipe',
'slapconfiguration = slapos.recipe.slapconfiguration:Recipe', 'slapconfiguration = slapos.recipe.slapconfiguration:Recipe',
'slapconfiguration.serialised = slapos.recipe.slapconfiguration:Serialised', 'slapconfiguration.serialised = slapos.recipe.slapconfiguration:Serialised',
...@@ -217,6 +220,6 @@ setup(name=name, ...@@ -217,6 +220,6 @@ setup(name=name,
], ],
}, },
test_suite='slapos.test', test_suite='slapos.test',
test_requires=[ 'jsonschema' ], tests_require=[ 'jsonschema' ],
) )
...@@ -43,7 +43,7 @@ class Recipe(GenericSlapRecipe): ...@@ -43,7 +43,7 @@ class Recipe(GenericSlapRecipe):
slap_connection = self.buildout['slap-connection'] slap_connection = self.buildout['slap-connection']
takeover_wrapper = self.createPythonScript( takeover_wrapper = self.createPythonScript(
name=os.path.join(self.options['bin'], self.options['wrapper-takeover']), name=self.options['wrapper-takeover'],
absolute_function='slapos.recipe.addresiliency.takeover.run', absolute_function='slapos.recipe.addresiliency.takeover.run',
arguments={ arguments={
'server_url': slap_connection['server-url'], 'server_url': slap_connection['server-url'],
...@@ -53,6 +53,7 @@ class Recipe(GenericSlapRecipe): ...@@ -53,6 +53,7 @@ class Recipe(GenericSlapRecipe):
'partition_id': slap_connection['partition-id'], 'partition_id': slap_connection['partition-id'],
'software': slap_connection['software-release-url'], 'software': slap_connection['software-release-url'],
'namebase': self.parameter_dict['namebase'], 'namebase': self.parameter_dict['namebase'],
'takeover_triggered_file_path': self.options['takeover-triggered-file-path'],
}) })
path_list.append(takeover_wrapper) path_list.append(takeover_wrapper)
......
...@@ -11,7 +11,8 @@ logging.basicConfig(level=logging.DEBUG) ...@@ -11,7 +11,8 @@ logging.basicConfig(level=logging.DEBUG)
def takeover(server_url, key_file, cert_file, computer_guid, def takeover(server_url, key_file, cert_file, computer_guid,
partition_id, software_release, namebase, partition_id, software_release, namebase,
winner_instance_suffix = None): winner_instance_suffix = None,
takeover_triggered_file_path=None):
""" """
This function does This function does
...@@ -74,6 +75,10 @@ def takeover(server_url, key_file, cert_file, computer_guid, ...@@ -74,6 +75,10 @@ def takeover(server_url, key_file, cert_file, computer_guid,
# Note: Root instance will reconfigure itself the winning instance (software_type # Note: Root instance will reconfigure itself the winning instance (software_type
# and parameters.) # and parameters.)
# Create "lock" file preventing equeue to run import scripts
# XXX hardcoded
open(takeover_triggered_file_path, 'w').write('')
def run(args): def run(args):
slapos.recipe.addresiliency.takeover.takeover(server_url = args.pop('server_url'), slapos.recipe.addresiliency.takeover.takeover(server_url = args.pop('server_url'),
key_file = args.pop('key_file'), key_file = args.pop('key_file'),
...@@ -81,5 +86,6 @@ def run(args): ...@@ -81,5 +86,6 @@ def run(args):
computer_guid = args.pop('computer_id'), computer_guid = args.pop('computer_id'),
partition_id = args.pop('partition_id'), partition_id = args.pop('partition_id'),
software_release = args.pop('software'), software_release = args.pop('software'),
namebase = args.pop('namebase')) namebase = args.pop('namebase'),
takeover_triggered_file_path = args.pop('takeover_triggered_file_path'))
# vim: set et sts=2:
##############################################################################
#
# Copyright (c) 2015 Vifib SARL and Contributors. All Rights Reserved.
#
# WARNING: This program as such is intended to be used by professional
# programmers who take the whole responsibility of assessing all potential
# consequences resulting from its eventual inadequacies and bugs
# End users who are looking for a ready-to-use solution with commercial
# guarantees and support are strongly adviced to contract a Free Software
# Service Company
#
# This program is Free Software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 3
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
##############################################################################
from slapos.recipe.librecipe import GenericBaseRecipe
import sys
class Recipe(GenericBaseRecipe):
"""
Check listening port promise
"""
def install(self):
config = dict(
value=self.options['value'],
python_path=sys.executable,
)
if self.options.get('expected-type') == "ipv6":
template = self.getTemplateFilename('check_ipv6.py.in')
elif self.options.get('expected-type') == "ipv4":
template = self.getTemplateFilename('check_ipv4.py.in')
else:
config["expected-value"] = self.options.get('expected-value')
config["expected-not-value"] = self.options.get('expected-not-value')
template = self.getTemplateFilename('check_parameter.py.in')
promise = self.createExecutable(
self.options['path'],
self.substituteTemplate(template, config))
return [promise]
#!%(python_path)s
# BEWARE: This file is operated by slapgrid
# BEWARE: It will be overwritten automatically
import socket
address = "%(value)s"
try:
socket.inet_pton(socket.AF_INET, address)
except AttributeError: # no inet_pton here, sorry
try:
socket.inet_aton(address)
except socket.error:
sys.exit(127)
if address.count('.') != 3:
sys.exit(127)
except socket.error: # not a valid address
sys.exit(127)
#!%(python_path)s
# BEWARE: This file is operated by slapgrid
# BEWARE: It will be overwritten automatically
import socket
import sys
address = "%(value)s"
try:
socket.inet_pton(socket.AF_INET6, address)
except socket.error: # not a valid address
sys.exit(127)
#!%(python_path)s
# BEWARE: This file is operated by slapgrid
# BEWARE: It will be overwritten automatically
import socket
import sys
value = "%(value)s"
expected = "%(expected-value)s"
not_expected = "%(expected-not-value)s"
if expected != "" and value != expected:
print "FAIL: %s != %s" % (value, expected)
sys.exit(127)
if not_expected != "" and value == not_expected:
print "FAIL: %s == %s" % (value, not_expected)
sys.exit(127)
...@@ -76,10 +76,12 @@ class Part(GenericBaseRecipe): ...@@ -76,10 +76,12 @@ class Part(GenericBaseRecipe):
day_of_week_dict = dict((name, dow) for dow, name in enumerate( day_of_week_dict = dict((name, dow) for dow, name in enumerate(
"sunday monday tuesday wednesday thursday friday saturday".split()) "sunday monday tuesday wednesday thursday friday saturday".split())
for name in (name, name[:3])) for name in (name, name[:3]))
symbolic_dict = dict(hourly = '0 * * * *', symbolic_dict = dict(minutely = '* * * * *',
daily = '0 0 * * *', hourly = '0 * * * *',
monthly = '0 0 1 * *', daily = '0 0 * * *',
weekly = '0 0 * * 0') weekly = '0 0 * * 0',
monthly = '0 0 1 * *',
yearly = '0 0 1 1 *')
def systemd_to_cron(spec): def systemd_to_cron(spec):
"""Convert from systemd.time(7) calendar spec to crontab spec""" """Convert from systemd.time(7) calendar spec to crontab spec"""
...@@ -118,13 +120,22 @@ def systemd_to_cron(spec): ...@@ -118,13 +120,22 @@ def systemd_to_cron(spec):
raise ValueError raise ValueError
month, day = day month, day = day
hour, minute = time hour, minute = time
spec = minute, hour, day, month, dow spec = [minute, hour, day, month, dow]
for x, (y, z) in zip(spec, ((0, 60), (0, 24), (1, 31), (1, 12))): for i, (y, z) in enumerate(((0, 60), (0, 24), (1, 31), (1, 12))):
x = spec[i]
if x != '*': if x != '*':
for x in x.split(','): for x in x.split(','):
x = map(int, x.split('/', 1)) x = map(int, x.split('/', 1))
x[0] -= y a = x[0] - y
if x[0] < 0 or len(x) > 1 and x[0] >= x[1] or z <= sum(x): if 0 <= a < z:
raise ValueError if len(x) == 1:
continue
b = x[1]
if b > 0:
a = (z - a - 1) // b * b
if a:
spec[i] = '%s-%s/%s' % (x[0], x[0] + a, b)
continue
raise ValueError
return ' '.join(spec) return ' '.join(spec)
...@@ -32,9 +32,13 @@ class Recipe(GenericBaseRecipe): ...@@ -32,9 +32,13 @@ class Recipe(GenericBaseRecipe):
parameters = [ parameters = [
'--database', self.options['database'], '--database', self.options['database'],
'-l', self.options['log'], '--logfile', self.options['log'],
'--lockfile', self.options['lockfile']
] ]
if 'takeover-triggered-file-path' in self.options:
parameters.extend(['--takeover-triggered-file-path', self.options['takeover-triggered-file-path']])
if 'loglevel' in self.options: if 'loglevel' in self.options:
parameters.extend(['--loglevel', self.options['loglevel']]) parameters.extend(['--loglevel', self.options['loglevel']])
......
...@@ -53,6 +53,15 @@ class Recipe(GenericBaseRecipe): ...@@ -53,6 +53,15 @@ class Recipe(GenericBaseRecipe):
else: else:
networking = 'skip-networking' networking = 'skip-networking'
log_bin = self.options.get('binlog-path', '')
if log_bin:
log_bin = 'log_bin = %s' % log_bin
expire_logs_days = self.options.get('binlog-expire-days')
if expire_logs_days > 0:
expire_logs_days = 'expire_logs_days = %s' % expire_logs_days
else:
expire_logs_days = ''
mysql_conf_file = self.createFile( mysql_conf_file = self.createFile(
self.options['conf-file'], self.options['conf-file'],
self.substituteTemplate(template_filename, { self.substituteTemplate(template_filename, {
...@@ -62,6 +71,8 @@ class Recipe(GenericBaseRecipe): ...@@ -62,6 +71,8 @@ class Recipe(GenericBaseRecipe):
'socket': self.options['socket'], 'socket': self.options['socket'],
'error_log': self.options['error-log'], 'error_log': self.options['error-log'],
'slow_query_log': self.options['slow-query-log'], 'slow_query_log': self.options['slow-query-log'],
'log_bin': log_bin,
'expire_logs_days': expire_logs_days,
}) })
) )
path_list.append(mysql_conf_file) path_list.append(mysql_conf_file)
...@@ -69,16 +80,19 @@ class Recipe(GenericBaseRecipe): ...@@ -69,16 +80,19 @@ class Recipe(GenericBaseRecipe):
mysql_script_list = [] mysql_script_list = []
# user defined functions # user defined functions
udf_registration = ""
mroonga = self.options.get('mroonga', 'ha_mroonga.so') mroonga = self.options.get('mroonga', 'ha_mroonga.so')
if mroonga: if mroonga:
last_insert_grn_id = "CREATE FUNCTION last_insert_grn_id RETURNS " \ udf_registration += "CREATE FUNCTION last_insert_grn_id RETURNS " \
"INTEGER SONAME '" + mroonga + "';" "INTEGER SONAME '" + mroonga + "';\n"
else: udf_registration += "CREATE FUNCTION mroonga_snippet RETURNS " \
last_insert_grn_id = "" "STRING SONAME '" + mroonga + "';\n"
udf_registration += "CREATE FUNCTION mroonga_command RETURNS " \
"STRING SONAME '" + mroonga + "';\n"
mysql_script_list.append(self.substituteTemplate( mysql_script_list.append(self.substituteTemplate(
self.getTemplateFilename('mysql-init-function.sql.in'), self.getTemplateFilename('mysql-init-function.sql.in'),
{ {
'last_insert_grn_id': last_insert_grn_id, 'udf_registration': udf_registration,
} }
)) ))
# real database # real database
......
...@@ -48,9 +48,8 @@ innodb_locks_unsafe_for_binlog = 1 ...@@ -48,9 +48,8 @@ innodb_locks_unsafe_for_binlog = 1
#innodb_doublewrite = 0 #innodb_doublewrite = 0
#sync_frm = 0 #sync_frm = 0
# Uncomment the following if you need binary logging, which is recommended %(log_bin)s
# on production instances (either for replication or incremental backups). %(expire_logs_days)s
#log-bin=mysql-bin
# Force utf8 usage # Force utf8 usage
collation_server = utf8_unicode_ci collation_server = utf8_unicode_ci
......
USE mysql; USE mysql;
DROP FUNCTION IF EXISTS last_insert_grn_id; DROP FUNCTION IF EXISTS last_insert_grn_id;
%(last_insert_grn_id)s DROP FUNCTION IF EXISTS mroonga_snippet;
DROP FUNCTION IF EXISTS mroonga_command;
DROP FUNCTION IF EXISTS sphinx_snippets; DROP FUNCTION IF EXISTS sphinx_snippets;
#CREATE FUNCTION sphinx_snippets RETURNS STRING SONAME 'ha_sphinx.so'; %(udf_registration)s
...@@ -45,7 +45,7 @@ class Recipe(GenericBaseRecipe): ...@@ -45,7 +45,7 @@ class Recipe(GenericBaseRecipe):
path_list = [] path_list = []
if not self.isTrueValue(self.options.get('use-tap')): if self.isTrueValue(self.options.get('use-nat')):
# XXX This could be done using Jinja. # XXX This could be done using Jinja.
for port in self.options['nat-rules'].split(): for port in self.options['nat-rules'].split():
tunnel_port = int(port) + 10000 tunnel_port = int(port) + 10000
......
...@@ -7,6 +7,11 @@ import os ...@@ -7,6 +7,11 @@ import os
import socket import socket
import subprocess import subprocess
import urllib import urllib
import gzip
import shutil
from random import shuffle
import glob
import re
# XXX: give all of this through parameter, don't use this as template, but as module # XXX: give all of this through parameter, don't use this as template, but as module
qemu_img_path = '%(qemu-img-path)s' qemu_img_path = '%(qemu-img-path)s'
...@@ -19,14 +24,30 @@ default_disk_image = '%(default-disk-image)s' ...@@ -19,14 +24,30 @@ default_disk_image = '%(default-disk-image)s'
disk_path = '%(disk-path)s' disk_path = '%(disk-path)s'
virtual_hard_drive_url = '%(virtual-hard-drive-url)s'.strip() virtual_hard_drive_url = '%(virtual-hard-drive-url)s'.strip()
virtual_hard_drive_md5sum = '%(virtual-hard-drive-md5sum)s'.strip() virtual_hard_drive_md5sum = '%(virtual-hard-drive-md5sum)s'.strip()
virtual_hard_drive_gzipped = '%(virtual-hard-drive-gzipped)s'.strip()
nat_rules = '%(nat-rules)s'.strip() nat_rules = '%(nat-rules)s'.strip()
use_tap = '%(use-tap)s' use_tap = '%(use-tap)s'
use_nat = '%(use-nat)s'
tap_interface = '%(tap-interface)s' tap_interface = '%(tap-interface)s'
listen_ip = '%(ipv4)s' listen_ip = '%(ipv4)s'
mac_address = '%(mac-address)s' mac_address = '%(mac-address)s'
tap_mac_address = '%(tap-mac-address)s'
smp_count = '%(smp-count)s' smp_count = '%(smp-count)s'
smp_options = '%(smp-options)s'.strip()
numa_list = '%(numa)s'.split()
ram_size = '%(ram-size)s' ram_size = '%(ram-size)s'
pid_file_path = '%(pid-file-path)s' pid_file_path = '%(pid-file-path)s'
external_disk_number = %(external-disk-number)s
external_disk_size = '%(external-disk-size)s'
external_disk_format = '%(external-disk-format)s'
disk_storage_dict = {}
disk_storage_list = """%(disk-storage-list)s""".split('\n')
map_storage_list = []
etc_directory = '%(etc-directory)s'.strip()
httpd_port = %(httpd-port)s
netcat_bin = '%(netcat-binary)s'.strip()
cluster_doc_host = '%(cluster-doc-host)s'
cluster_doc_port = %(cluster-doc-port)s
def md5Checksum(file_path): def md5Checksum(file_path):
with open(file_path, 'rb') as fh: with open(file_path, 'rb') as fh:
...@@ -57,25 +78,74 @@ def getSocketStatus(host, port): ...@@ -57,25 +78,74 @@ def getSocketStatus(host, port):
break break
return s return s
def getMapStorageList(disk_storage_dict, external_disk_number):
map_disk_file = os.path.join(etc_directory, '.data-disk-ids')
last_disk_num_f = os.path.join(etc_directory, '.data-disk-amount')
id_list = []
last_amount = 0
map_f_exist = os.path.exists(map_disk_file)
if os.path.exists(last_disk_num_f):
with open(last_disk_num_f, 'r') as lf:
last_amount = int(lf.readline())
if map_f_exist:
with open(map_disk_file, 'r') as mf:
# ID are writen in one line: data1 data3 data2 ...
content = mf.readline()
for id in content.split(' '):
if disk_storage_dict.has_key(id):
id_list.append(id)
else:
# Mean that this disk path has been removed (disk unmounted)
last_amount -= 1
for key in disk_storage_dict:
if not key in id_list:
id_list.append(key)
if id_list:
if not map_f_exist:
# shuffle the list to not write disk in data1, data2, ... everytime
shuffle(id_list)
if external_disk_number < last_amount:
# Drop created disk is not allowed
external_disk_number = last_amount
with open(map_disk_file, 'w') as mf:
mf.write(' '.join(id_list))
with open(last_disk_num_f, 'w') as lf:
lf.write('%%s' %% external_disk_number)
return id_list, external_disk_number
# Download existing hard drive if needed at first boot # Download existing hard drive if needed at first boot
if not os.path.exists(disk_path) and virtual_hard_drive_url != '': if not os.path.exists(disk_path) and virtual_hard_drive_url != '':
print('Downloading virtual hard drive...') print('Downloading virtual hard drive...')
try: try:
urllib.urlretrieve(virtual_hard_drive_url, disk_path) downloaded_disk = disk_path
if virtual_hard_drive_gzipped == 'True':
downloaded_disk = '%%s.gz' %% disk_path
urllib.urlretrieve(virtual_hard_drive_url, downloaded_disk)
except: except:
os.remove(disk_path) os.remove(downloaded_disk)
raise raise
md5sum = virtual_hard_drive_md5sum.strip() md5sum = virtual_hard_drive_md5sum.strip()
if md5sum: if md5sum:
print('Checking MD5 checksum...') print('Checking MD5 checksum...')
local_md5sum = md5Checksum(disk_path) local_md5sum = md5Checksum(downloaded_disk)
if local_md5sum != md5sum: if local_md5sum != md5sum:
os.remove(disk_path) os.remove(downloaded_disk)
raise Exception('MD5 mismatch. MD5 of local file is %%s, Specified MD5 is %%s.' %% ( raise Exception('MD5 mismatch. MD5 of local file is %%s, Specified MD5 is %%s.' %% (
local_md5sum, md5sum)) local_md5sum, md5sum))
print('MD5sum check passed.') print('MD5sum check passed.')
else: else:
print('Warning: not checksum specified.') print('Warning: not checksum specified.')
if downloaded_disk.endswith('.gz'):
try:
with open(disk_path, 'w') as disk:
with gzip.open(downloaded_disk, 'rb') as disk_gz:
shutil.copyfileobj(disk_gz, disk)
except Exception:
if os.path.exists(disk_path):
os.remove(disk_path)
raise
os.remove(downloaded_disk)
# Create disk if doesn't exist # Create disk if doesn't exist
# XXX: move to Buildout profile # XXX: move to Buildout profile
...@@ -85,25 +155,97 @@ if not os.path.exists(disk_path): ...@@ -85,25 +155,97 @@ if not os.path.exists(disk_path):
disk_path, '%%sG' %% disk_size]) disk_path, '%%sG' %% disk_size])
print('Done.') print('Done.')
# Check and create external disk
additional_disk_list = []
for storage in disk_storage_list:
if storage:
key, val = storage.split(' ')
disk_storage_dict[key.strip()] = val.strip()
if not external_disk_format in ['qcow2', 'raw', 'vdi', 'vmdk', 'cloop']:
external_disk_format = 'qcow2'
map_storage_list, external_disk_number = getMapStorageList(disk_storage_dict,
int(external_disk_number))
assert len(map_storage_list) == len(disk_storage_dict)
if disk_storage_dict:
if external_disk_number > 0:
index = 0
while (index < len(disk_storage_dict)) and (index < external_disk_number):
path = disk_storage_dict[map_storage_list[index]]
if os.path.exists(path):
disk_filepath = os.path.join(path,
'kvm_virtual_disk.%%s' %% external_disk_format)
disk_list = glob.glob('%%s.*' %% os.path.join(path, 'kvm_virtual_disk'))
if disk_list == []:
print('Creating one additional virtual hard drive...')
subprocess.Popen([qemu_img_path, 'create' ,'-f', '%%s' %% external_disk_format,
disk_filepath, '%%sG' %% external_disk_size])
else:
# Cannot change or recreate if disk is exists
disk_filepath = disk_list[0]
additional_disk_list.append(disk_filepath)
else:
print('Data folder %%s was not used to create external disk %%r' %% (index +1))
index += 1
# Generate network parameters # Generate network parameters
# XXX: use_tap should be a boolean # XXX: use_tap should be a boolean
tap_network_parameter = []
nat_network_parameter = []
numa_parameter = []
number = -1
if use_nat == 'True':
number += 1
rules = 'user,id=lan%%s,' %% number + ','.join('hostfwd=tcp:%%s:%%s-:%%s' %% (listen_ip,
int(port) + 10000, port) for port in nat_rules.split())
if httpd_port > 0:
rules += ',guestfwd=tcp:10.0.2.100:80-cmd:%%s %%s %%s' %% (netcat_bin,
listen_ip, httpd_port)
if cluster_doc_host and cluster_doc_port > 0:
rules += ',guestfwd=tcp:10.0.2.101:443-cmd:%%s %%s %%s' %% (netcat_bin,
cluster_doc_host, cluster_doc_port)
nat_network_parameter = ['-netdev', rules,
'-device', 'e1000,netdev=lan%%s,mac=%%s' %% (number, mac_address)]
if use_tap == 'True': if use_tap == 'True':
qemu_network_parameter = 'tap,ifname=%%s,script=no,downscript=no' %% tap_interface number += 1
else: tap_network_parameter = ['-netdev',
qemu_network_parameter = 'user,' + ','.join('hostfwd=tcp:%%s:%%s-:%%s' %% (listen_ip, int(port) + 10000, port) for port in nat_rules.split()) 'tap,id=lan%%s,ifname=%%s,script=no,downscript=no' %% (number,
tap_interface),
'-device', 'e1000,netdev=lan%%s,mac=%%s' %% (number, tap_mac_address)]
smp = smp_count
if smp_options:
for option in smp_options.split(','):
key, val = option.split('=')
if key in ('cores', 'threads', 'sockets', 'maxcpus') and val.isdigit():
smp += ',%%s=%%s' %% (key, val)
kvm_argument_list = [qemu_path, kvm_argument_list = [qemu_path,
'-enable-kvm', '-net', 'nic,macaddr=%%s' %% mac_address, '-enable-kvm', '-smp', smp,
'-net', qemu_network_parameter, '-m', ram_size, '-vga', 'std',
'-smp', smp_count,
'-m', ram_size,
'-drive', 'file=%%s,if=%%s' %% (disk_path, disk_type), '-drive', 'file=%%s,if=%%s' %% (disk_path, disk_type),
'-vnc', '%%s:1,ipv4,password' %% listen_ip, '-vnc', '%%s:1,ipv4,password' %% listen_ip,
'-boot', 'menu=on', '-boot', 'order=cd,menu=on',
'-qmp', 'unix:%%s,server' %% socket_path, '-qmp', 'unix:%%s,server' %% socket_path,
'-pidfile', pid_file_path, '-pidfile', pid_file_path,
] ]
rgx = re.compile('^[\w*\,][\=\d+\-\,\w]*$')
for numa in numa_list:
if rgx.match(numa):
numa_parameter.extend(['-numa', numa])
kvm_argument_list += numa_parameter
if tap_network_parameter == [] and nat_network_parameter == []:
print 'Warning : No network interface defined.'
else:
kvm_argument_list += nat_network_parameter + tap_network_parameter
for disk in additional_disk_list:
kvm_argument_list.extend([
'-drive', 'file=%%s,if=%%s' %% (disk, disk_type)])
# Try to connect to NBD server (and second nbd if defined). # Try to connect to NBD server (and second nbd if defined).
# If not available, don't even specify it in qemu command line parameters. # If not available, don't even specify it in qemu command line parameters.
# Reason: if qemu starts with unavailable NBD drive, it will just crash. # Reason: if qemu starts with unavailable NBD drive, it will just crash.
...@@ -124,4 +266,5 @@ else: ...@@ -124,4 +266,5 @@ else:
'-drive', 'file=%%s,media=cdrom' %% default_disk_image '-drive', 'file=%%s,media=cdrom' %% default_disk_image
]) ])
print 'Starting KVM: \n %%s' %% ' '.join(kvm_argument_list)
os.execv(qemu_path, kvm_argument_list) os.execv(qemu_path, kvm_argument_list)
...@@ -65,6 +65,7 @@ class Part(GenericBaseRecipe): ...@@ -65,6 +65,7 @@ class Part(GenericBaseRecipe):
'dateext', 'dateext',
'rotate 3650', 'rotate 3650',
'compress', 'compress',
'delaycompress',
'notifempty', 'notifempty',
'sharedscripts', 'sharedscripts',
'create', 'create',
......
...@@ -36,9 +36,7 @@ class Recipe(GenericBaseRecipe): ...@@ -36,9 +36,7 @@ class Recipe(GenericBaseRecipe):
self.mode = int(self.directory.pop('mode', '700'), 8) self.mode = int(self.directory.pop('mode', '700'), 8)
def install(self): def install(self):
for path in sorted(self.directory.values()): for path in sorted(self.directory.itervalues()):
if not os.path.exists(path): if path and not os.path.isdir(path):
os.makedirs(path, self.mode) os.makedirs(path, self.mode)
elif not os.path.isdir(path):
raise OSError("%s path exits, but it's not a directory.")
return [] return []
...@@ -89,7 +89,12 @@ class Admin(NeoBaseRecipe): ...@@ -89,7 +89,12 @@ class Admin(NeoBaseRecipe):
class Master(NeoBaseRecipe): class Master(NeoBaseRecipe):
def _getOptionList(self): def _getOptionList(self):
options = self.options options = self.options
return [ r = [
'-p', options['partitions'], '-p', options['partitions'],
'-r', options['replicas'], '-r', options['replicas'],
] ]
for x in (('-C', options['upstream-cluster']),
('-M', options['upstream-masters'])):
if x[1]:
r += x
return r
##############################################################################
#
# Copyright (c) 2010 Vifib SARL and Contributors. All Rights Reserved.
#
# WARNING: This program as such is intended to be used by professional
# programmers who take the whole responsibility of assessing all potential
# consequences resulting from its eventual inadequacies and bugs
# End users who are looking for a ready-to-use solution with commercial
# guarantees and support are strongly adviced to contract a Free Software
# Service Company
#
# This program is Free Software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 3
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
##############################################################################
import subprocess
from slapos.recipe.librecipe import GenericBaseRecipe
import socket
import struct
import os, stat
import string, random
import json
import traceback
from slapos import slap
class Recipe(GenericBaseRecipe):
def __init__(self, buildout, name, options):
"""Default initialisation"""
self.slap = slap.slap()
# SLAP related information
slap_connection = buildout['slap-connection']
self.computer_id = slap_connection['computer-id']
self.computer_partition_id = slap_connection['partition-id']
self.server_url = slap_connection['server-url']
self.software_release_url = slap_connection['software-release-url']
self.key_file = slap_connection.get('key-file')
self.cert_file = slap_connection.get('cert-file')
self.slave_list = json.loads(options['slave-instance-list'])
options['slave-amount'] = '%s' % len(self.slave_list)
return GenericBaseRecipe.__init__(self, buildout, name, options)
def getSerialFromIpv6(self, ipv6):
prefix = ipv6.split('/')[0].lower()
hi, lo = struct.unpack('!QQ', socket.inet_pton(socket.AF_INET6, prefix))
ipv6_int = (hi << 64) | lo
serial = '0x1%x' % ipv6_int
# delete non significant part
for part in prefix.split(':')[::-1]:
if part:
for i in ['0']*(4 - len(part)):
part = i + part
serial = serial.split(part)[0] + part
break
return serial
def generateCertificate(self):
key_file = self.options['key-file'].strip()
cert_file = self.options['cert-file'].strip()
dh_file = self.options['dh-file'].strip()
if not os.path.exists(dh_file):
dh_command = [self.options['openssl-bin'], 'dhparam', '-out',
'%s' % dh_file, self.options['key-size']]
try:
subprocess.check_call(dh_command)
except Exception:
if os.path.exists(dh_file):
os.unlink(dh_file)
raise
if not os.path.exists(cert_file):
serial = self.getSerialFromIpv6(self.options['ipv6-prefix'].strip())
key_command = [self.options['openssl-bin'], 'genrsa', '-out',
'%s' % key_file, self.options['key-size']]
#'-config', openssl_configuration
cert_command = [self.options['openssl-bin'], 'req', '-nodes', '-new',
'-x509', '-batch', '-key', '%s' % key_file, '-set_serial',
'%s' % serial, '-days', '3650', '-out', '%s' % cert_file]
try:
subprocess.check_call(key_command)
except Exception:
if os.path.exists(key_file):
os.unlink(key_file)
raise
try:
subprocess.check_call(cert_command)
except Exception:
if os.path.exists(cert_file):
os.unlink(cert_file)
raise
def generateSlaveTokenList(self, slave_instance_list, token_file):
to_remove_dict = {}
to_add_dict = {}
token_dict = self.loadJsonFile(token_file)
reference_list = [slave_instance.get('slave_reference') for slave_instance
in slave_instance_list]
for reference in reference_list:
if not reference in token_dict:
# we generate new token
number = reference.split('-')[1]
new_token = number + ''.join(random.sample(string.ascii_lowercase, 20))
token_dict[reference] = new_token
to_add_dict[reference] = new_token
for reference in token_dict.keys():
if not reference in reference_list:
# This slave instance is destroyed ?
to_remove_dict[reference] = token_dict.pop(reference)
return token_dict, to_add_dict, to_remove_dict
def loadJsonFile(self, path):
if os.path.exists(path):
with open(path, 'r') as f:
content = f.read()
return json.loads(content)
else:
return {}
def writeFile(self, path, data):
with open(path, 'w') as f:
f.write(data)
return path
def readFile(self, path):
if os.path.exists(path):
with open(path, 'r') as f:
content = f.read()
return content
return ''
def genHash(self, length):
hash_path = os.path.join(self.options['conf-dir'], '%s-hash' % length)
if not os.path.exists(hash_path):
pool = string.letters + string.digits
hash_string = ''.join(random.choice(pool) for i in xrange(length))
self.writeFile(hash_path, hash_string)
else:
hash_string = self.readFile(hash_path)
return hash_string
def install(self):
path_list = []
token_save_path = os.path.join(self.options['conf-dir'], 'token.json')
token_list_path = self.options['token-dir']
self.generateCertificate()
wrapper = self.createFile(self.options['wrapper'], self.substituteTemplate(
self.getTemplateFilename('registry-run.in'), dict(
parameter='@%s' % self.options['config-file'],
pid_file=self.options['pid-file'],
command=self.options['command']
)
)
)
os.chmod(self.options['wrapper'], stat.S_IRWXU)
path_list.append(wrapper)
registry_url = 'http://%s:%s/' % (self.options['ipv4'], self.options['port'])
token_dict, add_token_dict, rm_token_dict = self.generateSlaveTokenList(
self.slave_list, token_save_path)
# write request add token
for reference in add_token_dict:
path = os.path.join(token_list_path, '%s.add' % reference)
if not os.path.exists(path):
self.createFile(path, add_token_dict[reference])
# write request remove token
for reference in rm_token_dict:
path = os.path.join(token_list_path, '%s.remove' % reference)
if not os.path.exists(path):
self.createFile(path, rm_token_dict[reference])
# remove request add token if exists
add_path = os.path.join(token_list_path, '%s.add' % reference)
if os.path.exists(add_path):
os.unlink(add_path)
self.createFile(token_save_path, json.dumps(token_dict))
service_dict = dict(token_base_path=token_list_path,
token_json=token_save_path,
db=self.options['db-path'],
partition_id=self.computer_partition_id,
computer_id=self.computer_id,
registry_url=registry_url)
service_dict['server_url'] = self.server_url
service_dict['cert_file'] = self.cert_file
service_dict['key_file'] = self.key_file
request_add = self.createPythonScript(
self.options['manager-wrapper'].strip(),
'%s.re6stnet.manage' % __name__, service_dict
)
path_list.append(request_add)
request_drop = self.createPythonScript(
self.options['drop-service-wrapper'].strip(),
'%s.re6stnet.requestRemoveToken' % __name__, service_dict
)
path_list.append(request_drop)
request_check = self.createPythonScript(
self.options['check-service-wrapper'].strip(),
'%s.re6stnet.checkService' % __name__, service_dict
)
path_list.append(request_check)
revoke_check = self.createPythonScript(
self.options['revoke-service-wrapper'].strip(),
'%s.re6stnet.requestRevoqueCertificate' % __name__, service_dict
)
path_list.append(revoke_check)
# Send connection parameters of slave instances
if token_dict:
self.slap.initializeConnection(self.server_url, self.key_file,
self.cert_file)
computer_partition = self.slap.registerComputerPartition(
self.computer_id,
self.computer_partition_id)
for slave_reference, token in token_dict.iteritems():
try:
status_file = os.path.join(token_list_path, '%s.status' % slave_reference)
status = self.readFile(status_file) or 'New token requested'
msg = status
if status == 'TOKEN_ADDED':
msg = 'Token is ready for use'
elif status == 'TOKEN_USED':
msg = 'Token not available, it has been used to generate re6stnet certificate.'
ipv6_file = os.path.join(token_list_path, '%s.ipv6' % slave_reference)
ipv6 = self.readFile(ipv6_file) or '::'
computer_partition.setConnectionDict(
{'token':token, '1_info':msg, 'ipv6': ipv6},
slave_reference)
except Exception:
self.logger.fatal("Error while sending slave %s informations: %s",
slave_reference, traceback.format_exc())
return path_list
# -*- coding: utf-8 -*-
import logging
import json
import os
import time
import sqlite3
import slapos
import traceback
from re6st import registry, utils, x509
from OpenSSL import crypto
log = logging.getLogger('SLAPOS-RE6STNET')
logging.basicConfig(level=logging.DEBUG)
def loadJsonFile(path):
if os.path.exists(path):
with open(path, 'r') as f:
content = f.read()
return json.loads(content)
else:
return {}
def writeFile(path, data):
with open(path, 'w') as f:
f.write(data)
def readFile(path):
if os.path.exists(path):
with open(path, 'r') as f:
content = f.read()
return content
return ''
def getDb(db_path):
db = sqlite3.connect(db_path, isolation_level=None,
check_same_thread=False)
db.text_factory = str
return db.cursor()
def bang(args):
computer_guid = args['computer_id']
partition_id = args['partition_id']
slap = slapos.slap.slap()
# Redeploy instance to update published information
slap.initializeConnection(args['server_url'], args['key_file'],
args['cert_file'])
partition = slap.registerComputerPartition(computer_guid=computer_guid,
partition_id=partition_id)
partition.bang(message='Published parameters changed!')
log.info("Bang with message 'parameters changed'...")
def requestAddToken(args, can_bang=True):
time.sleep(3)
registry_url = args['registry_url']
base_token_path = args['token_base_path']
path_list = [x for x in os.listdir(base_token_path) if x.endswith('.add')]
if not path_list:
log.info("No new token to add. Exiting...")
return
client = registry.RegistryClient(registry_url)
call_bang = False
for reference_key in path_list:
request_file = os.path.join(base_token_path, reference_key)
token = readFile(request_file)
if token :
reference = reference_key.split('.')[0]
# email is unique as reference is also unique
email = '%s@slapos' % reference.lower()
try:
result = client.requestAddToken(token, email)
except Exception:
log.debug('Request add token fail for %s... \n %s' % (request_file,
traceback.format_exc()))
continue
if result and result == token:
# update information
log.info("New token added for slave instance %s. Updating file status..." %
reference)
writeFile(os.path.join(base_token_path, '%s.status' % reference),
'TOKEN_ADDED')
os.unlink(request_file)
call_bang = True
else:
log.debug('Bad token. Request add token fail for %s...' % request_file)
if can_bang and call_bang:
bang(args)
def requestRemoveToken(args):
base_token_path = args['token_base_path']
path_list = [x for x in os.listdir(base_token_path) if x.endswith('.remove')]
if not path_list:
log.info("No token to delete. Exiting...")
return
client = registry.RegistryClient(args['registry_url'])
for reference_key in path_list:
request_file = os.path.join(base_token_path, reference_key)
token = readFile(request_file)
if token :
reference = reference_key.split('.')[0]
try:
result = client.requestDeleteToken(token)
except Exception:
log.debug('Request delete token fail for %s... \n %s' % (request_file,
traceback.format_exc()))
continue
else:
# certificate is invalidated, it will be revoked
writeFile(os.path.join(base_token_path, '%s.revoke' % reference), '')
if result == 'True':
# update information
log.info("Token deleted for slave instance %s. Clean up file status..." %
reference)
if result in ['True', 'False']:
os.unlink(request_file)
status_file = os.path.join(base_token_path, '%s.status' % reference)
if os.path.exists(status_file):
os.unlink(status_file)
ipv6_file = os.path.join(base_token_path, '%s.ipv6' % reference)
if os.path.exists(ipv6_file):
os.unlink(ipv6_file)
else:
log.debug('Bad token. Request add token fail for %s...' % request_file)
def requestRevoqueCertificate(args):
base_token_path = args['token_base_path']
db = getDb(args['db'])
path_list = [x for x in os.listdir(base_token_path) if x.endswith('.revoke')]
client = registry.RegistryClient(args['registry_url'])
for reference_key in path_list:
reference = reference_key.split('.')[0]
# XXX - email is always unique
email = '%s@slapos' % reference.lower()
cert_string = ''
try:
cert_string, = db.execute("SELECT cert FROM cert WHERE email = ?",
(email,)).next()
except StopIteration:
# Certificate was not generated yet !!!
pass
try:
if cert_string:
cert = crypto.load_certificate(crypto.FILETYPE_PEM, cert_string)
cn = x509.subnetFromCert(cert)
result = client.revoke(str(cn))
time.sleep(2)
except Exception:
log.debug('Request revoke certificate fail for %s... \n %s' % (reference,
traceback.format_exc()))
continue
else:
os.unlink(os.path.join(base_token_path, reference_key))
log.info("Certificate revoked for slave instance %s." % reference)
def dumpIPv6Network(slave_reference, db, network, ipv6_file):
email = '%s@slapos' % slave_reference.lower()
try:
cert_string, = db.execute("SELECT cert FROM cert WHERE email = ?",
(email,)).next()
except StopIteration:
# Certificate was not generated yet !!!
pass
try:
if cert_string:
cert = crypto.load_certificate(crypto.FILETYPE_PEM, cert_string)
cn = x509.subnetFromCert(cert)
subnet = network + utils.binFromSubnet(cn)
ipv6 = utils.ipFromBin(subnet)
writeFile(ipv6_file, ipv6)
except Exception:
log.debug('XXX for %s... \n %s' % (slave_reference,
traceback.format_exc()))
def checkService(args, can_bang=True):
base_token_path = args['token_base_path']
token_dict = loadJsonFile(args['token_json'])
if not token_dict:
return
db = getDb(args['db'])
call_bang = False
computer_guid = args['computer_id']
partition_id = args['partition_id']
slap = slapos.slap.slap()
client = registry.RegistryClient(args['registry_url'])
ca = client.getCa()
network = x509.networkFromCa(crypto.load_certificate(crypto.FILETYPE_PEM, ca))
# Check token status
for slave_reference, token in token_dict.iteritems():
status_file = os.path.join(base_token_path, '%s.status' % slave_reference)
ipv6_file = os.path.join(base_token_path, '%s.ipv6' % slave_reference)
if not os.path.exists(status_file):
# This token is not added yet!
continue
msg = readFile(status_file)
if msg == 'TOKEN_USED':
dumpIPv6Network(slave_reference, db, network, ipv6_file)
continue
# Check if token is not in the database
status = False
try:
token_found, = db.execute("SELECT token FROM token WHERE token = ?",
(token,)).next()
if token_found == token:
status = True
except StopIteration:
pass
if not status:
# Token is used to register client
call_bang = True
try:
writeFile(status_file, 'TOKEN_USED')
dumpIPv6Network(slave_reference, db, network, ipv6_file)
log.info("Token status of %s updated to 'used'." % slave_reference)
except IOError:
# XXX- this file should always exists
log.debug('Error when writing in file %s. Clould not update status of %s...' %
(status_file, slave_reference))
if call_bang and can_bang:
bang(args)
def manage(args):
# Request Add new tokens
requestAddToken(args)
# Request delete removed token
requestRemoveToken(args)
# check status of all token
checkService(args)
#!/bin/sh
echo $$ > %(pid_file)s
exec %(command)s \
%(parameter)s
\ No newline at end of file
...@@ -29,9 +29,11 @@ from zc.buildout import UserError ...@@ -29,9 +29,11 @@ from zc.buildout import UserError
from slapos.recipe.librecipe import wrap, JSON_SERIALISED_MAGIC_KEY from slapos.recipe.librecipe import wrap, JSON_SERIALISED_MAGIC_KEY
import json import json
from slapos import slap as slapmodule from slapos import slap as slapmodule
from slapos.slap import SoftwareProductCollection
import slapos.recipe.librecipe.generic as librecipe import slapos.recipe.librecipe.generic as librecipe
import traceback import traceback
SOFTWARE_PRODUCT_NAMESPACE = "product."
DEFAULT_SOFTWARE_TYPE = 'RootSoftwareInstance' DEFAULT_SOFTWARE_TYPE = 'RootSoftwareInstance'
class Recipe(object): class Recipe(object):
...@@ -130,6 +132,19 @@ class Recipe(object): ...@@ -130,6 +132,19 @@ class Recipe(object):
options['computer-id'], options['computer-id'],
options['partition-id'], options['partition-id'],
).request ).request
if software_url is not None and \
software_url.startswith(SOFTWARE_PRODUCT_NAMESPACE):
product = SoftwareProductCollection(self.logger, slap)
try:
software_url = product.__getattr__(
software_url[len(SOFTWARE_PRODUCT_NAMESPACE):])
except AttributeError as e:
self.logger.warning('Error on get software release : %s ' % e.message)
self._raise_request_exception = None self._raise_request_exception = None
self._raise_request_exception_formatted = None self._raise_request_exception_formatted = None
self.instance = None self.instance = None
......
...@@ -69,7 +69,6 @@ def shellinabox(args): ...@@ -69,7 +69,6 @@ def shellinabox(args):
command_line = [ command_line = [
args['shellinabox'], args['shellinabox'],
'-d',
'-c', certificate_dir, '-c', certificate_dir,
'-s', service, '-s', service,
'--ipv6', args['ipv6'], '--ipv6', args['ipv6'],
......
##############################################################################
#
# Copyright (c) 2010 Vifib SARL and Contributors. All Rights Reserved.
#
# WARNING: This program as such is intended to be used by professional
# programmers who take the whole responsibility of assessing all potential
# consequences resulting from its eventual inadequacies and bugs
# End users who are looking for a ready-to-use solution with commercial
# guarantees and support are strongly adviced to contract a Free Software
# Service Company
#
# This program is Free Software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 3
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
##############################################################################
from slapos.recipe.librecipe import GenericBaseRecipe
import string, random
import os
class Recipe(GenericBaseRecipe):
def __init__(self, buildout, name, options):
base_path = options['base-path']
pool = string.letters + string.digits
hash_string = ''.join(random.choice(pool) for i in xrange(64))
path = os.path.join(base_path, hash_string)
if os.path.exists(base_path):
path_list = os.listdir(base_path)
if len(path_list) == 1:
hash_string = path_list[0]
path = os.path.join(base_path, hash_string)
elif len(path_list) > 1:
raise ValueError("Folder %s should contain 0 or 1 element." % base_path)
options['root-dir'] = path
options['path'] = hash_string
return GenericBaseRecipe.__init__(self, buildout, name, options)
def install(self):
if not os.path.exists(self.options['root-dir']):
os.mkdir( self.options['root-dir'] )
parameters = {
'host': self.options['host'],
'port': int(self.options['port']),
'cwd': self.options['base-path'],
'log-file': self.options['log-file'],
'cert-file': self.options['cert-file'],
'key-file': self.options['key-file']
}
server = self.createPythonScript(
self.options['wrapper'].strip(),
'%s.simplehttpserver.run' % __name__, parameters
)
return [server]
from SimpleHTTPServer import SimpleHTTPRequestHandler
from BaseHTTPServer import HTTPServer
import ssl
import os
import logging
from netaddr import valid_ipv4, valid_ipv6
import socket
class ServerHandler(SimpleHTTPRequestHandler):
def respond(self, code=200, type='text/plain'):
self.send_response(code)
self.send_header("Content-type", type)
self.end_headers()
def do_GET(self):
logging.info('%s - GET: %s \n%s' % (self.client_address[0], self.path, self.headers))
if not self.path or self.path == '/':
# no access to root path
self.respond(403)
self.wfile.write("Forbidden")
return
SimpleHTTPRequestHandler.do_GET(self)
class HTTPServerV6(HTTPServer):
address_family = socket.AF_INET6
def run(args):
# minimal web server. serves files relative to the
# current directory.
logging.basicConfig(format="%(asctime)s - %(name)s - %(levelname)s - %(message)s",
filename=args['log-file'] ,level=logging.INFO)
port = args['port']
host = args['host']
os.chdir(args['cwd'])
Handler = ServerHandler
if valid_ipv6(host):
server = HTTPServerV6
else:
server = HTTPServer
httpd = server((host, port), Handler)
if args.has_key('cert-file') and args.has_key('key-file'):
httpd.socket = ssl.wrap_socket (httpd.socket,
server_side=True,
certfile=args['cert-file'],
keyfile=args['key-file'])
logging.info("Starting simple http server at https://%s:%s" % (host, port))
httpd.serve_forever()
...@@ -32,6 +32,7 @@ import slapos.slap ...@@ -32,6 +32,7 @@ import slapos.slap
from slapos.recipe.librecipe import unwrap from slapos.recipe.librecipe import unwrap
from ConfigParser import RawConfigParser from ConfigParser import RawConfigParser
from netaddr import valid_ipv4, valid_ipv6 from netaddr import valid_ipv4, valid_ipv6
from slapos.util import mkdir_p
class Recipe(object): class Recipe(object):
""" """
...@@ -61,6 +62,10 @@ class Recipe(object): ...@@ -61,6 +62,10 @@ class Recipe(object):
Partition identifier. Partition identifier.
Example: Example:
${slap-connection:partition-id} ${slap-connection:partition-id}
storage-home
Path of folder configured for data storage
Example:
${storage-configuration:storage-home}
Output: Output:
slap-software-type slap-software-type
...@@ -75,8 +80,20 @@ class Recipe(object): ...@@ -75,8 +80,20 @@ class Recipe(object):
One of the IPv6 addresses. One of the IPv6 addresses.
tap tap
Set of TAP interfaces. Set of TAP interfaces.
tap-network-information-dict
Dict of set of all TAP network information
tap-ipv4
ipv4 allowed for this TAP
tap-gateway
ipv4 of gateway interface of this TAP
tap-netmask
ipv4 netmask address of this TAP
tap-network
ipv4 network address of this TAP
configuration configuration
Dict of all parameters. Dict of all parameters.
storage-dict
Dict of partition data path when it is configured
configuration.<key> configuration.<key>
One key per partition parameter. One key per partition parameter.
Partition parameter whose name cannot be represented unambiguously in Partition parameter whose name cannot be represented unambiguously in
...@@ -91,7 +108,8 @@ class Recipe(object): ...@@ -91,7 +108,8 @@ class Recipe(object):
OPTCRE_match = RawConfigParser.OPTCRE.match OPTCRE_match = RawConfigParser.OPTCRE.match
def __init__(self, buildout, name, options): def __init__(self, buildout, name, options):
parameter_dict = self.fetch_parameter_dict(options) parameter_dict = self.fetch_parameter_dict(options,
buildout['buildout']['directory'])
match = self.OPTCRE_match match = self.OPTCRE_match
for key, value in parameter_dict.iteritems(): for key, value in parameter_dict.iteritems():
...@@ -99,7 +117,7 @@ class Recipe(object): ...@@ -99,7 +117,7 @@ class Recipe(object):
continue continue
options['configuration.' + key] = value options['configuration.' + key] = value
def fetch_parameter_dict(self, options): def fetch_parameter_dict(self, options, instance_root):
slap = slapos.slap.slap() slap = slapos.slap.slap()
slap.initializeConnection( slap.initializeConnection(
options['url'], options['url'],
...@@ -134,6 +152,14 @@ class Recipe(object): ...@@ -134,6 +152,14 @@ class Recipe(object):
v6_add = ipv6_set.add v6_add = ipv6_set.add
tap_set = set() tap_set = set()
tap_add = tap_set.add tap_add = tap_set.add
route_gw_set = set()
route_gw_add = route_gw_set.add
route_mask_set = set()
route_mask_add = route_mask_set.add
route_ipv4_set = set()
route_v4_add = route_ipv4_set.add
route_network_set = set()
route_net_add = route_network_set.add
for tap, ip in parameter_dict.pop('ip_list'): for tap, ip in parameter_dict.pop('ip_list'):
tap_add(tap) tap_add(tap)
if valid_ipv4(ip): if valid_ipv4(ip):
...@@ -141,6 +167,21 @@ class Recipe(object): ...@@ -141,6 +167,21 @@ class Recipe(object):
elif valid_ipv6(ip): elif valid_ipv6(ip):
v6_add(ip) v6_add(ip)
# XXX: emit warning on unknown address type ? # XXX: emit warning on unknown address type ?
if 'full_ip_list' in parameter_dict:
for item in parameter_dict.pop('full_ip_list'):
if len(item) == 5:
tap, ip, gw, netmask, network = item
if tap.startswith('route_'):
if valid_ipv4(gw):
route_gw_add(gw)
if valid_ipv4(netmask):
route_mask_add(netmask)
if valid_ipv4(ip):
route_v4_add(ip)
if valid_ipv4(network):
route_net_add(network)
options['ipv4'] = ipv4_set options['ipv4'] = ipv4_set
options['ipv6'] = ipv6_set options['ipv6'] = ipv6_set
...@@ -149,6 +190,35 @@ class Recipe(object): ...@@ -149,6 +190,35 @@ class Recipe(object):
options['ipv4-random'] = list(ipv4_set)[0].encode('UTF-8') options['ipv4-random'] = list(ipv4_set)[0].encode('UTF-8')
if ipv6_set: if ipv6_set:
options['ipv6-random'] = list(ipv6_set)[0].encode('UTF-8') options['ipv6-random'] = list(ipv6_set)[0].encode('UTF-8')
if route_ipv4_set:
options['tap-ipv4'] = list(route_ipv4_set)[0].encode('UTF-8')
options['tap-network-information-dict'] = dict(ipv4=route_ipv4_set,
netmask=route_mask_set,
gateway=route_gw_set,
network=route_network_set)
else:
options['tap-network-information-dict'] = {}
if route_gw_set:
options['tap-gateway'] = list(route_gw_set)[0].encode('UTF-8')
if route_mask_set:
options['tap-netmask'] = list(route_mask_set)[0].encode('UTF-8')
if route_network_set:
options['tap-network'] = list(route_network_set)[0].encode('UTF-8')
storage_home = options.get('storage-home')
storage_dict = {}
if storage_home and os.path.exists(storage_home) and \
os.path.isdir(storage_home):
for filename in os.listdir(storage_home):
storage_path = os.path.join(storage_home, filename,
options['slap-computer-partition-id'])
if os.path.exists(storage_path) and os.path.isdir(storage_path):
storage_link = os.path.join(instance_root, 'DATA', filename)
mkdir_p(os.path.join(instance_root, 'DATA'))
if not os.path.lexists(storage_link):
os.symlink(storage_path, storage_link)
storage_dict[filename] = storage_link
options['storage-dict'] = storage_dict
options['tap'] = tap_set options['tap'] = tap_set
return self._expandParameterDict(options, parameter_dict) return self._expandParameterDict(options, parameter_dict)
......
...@@ -34,6 +34,7 @@ import subprocess ...@@ -34,6 +34,7 @@ import subprocess
import slapos.slap import slapos.slap
import netaddr import netaddr
import logging import logging
import errno
import zc.buildout import zc.buildout
...@@ -54,6 +55,18 @@ class Recipe: ...@@ -54,6 +55,18 @@ class Recipe:
return ip return ip
raise AttributeError raise AttributeError
def _getTapIpAddressList(self, test_method):
"""Internal helper method to fetch full ip address assigned for tap"""
if not 'full_ip_list' in self.parameter_dict:
return ()
for item in self.parameter_dict['full_ip_list']:
if len(item) == 5:
tap, ip, gw, mask, net = item
if tap.startswith('route_') and test_method(ip) and \
test_method(gw) and test_method(mask):
return (ip, gw, mask, net)
return ()
def getLocalIPv4Address(self): def getLocalIPv4Address(self):
"""Returns local IPv4 address available on partition""" """Returns local IPv4 address available on partition"""
# XXX: Lack checking for locality of address # XXX: Lack checking for locality of address
...@@ -63,6 +76,11 @@ class Recipe: ...@@ -63,6 +76,11 @@ class Recipe:
"""Returns global IPv6 address available on partition""" """Returns global IPv6 address available on partition"""
# XXX: Lack checking for globality of address # XXX: Lack checking for globality of address
return self._getIpAddress(netaddr.valid_ipv6) return self._getIpAddress(netaddr.valid_ipv6)
def getLocalTapIPv4AddressList(self):
"""Returns global IPv6 address available for tap interface"""
# XXX: Lack checking for locality of address
return self._getTapIpAddressList(netaddr.valid_ipv4)
def getNetworkInterface(self): def getNetworkInterface(self):
"""Returns the network interface available on partition""" """Returns the network interface available on partition"""
...@@ -72,6 +90,20 @@ class Recipe: ...@@ -72,6 +90,20 @@ class Recipe:
if name: if name:
return name return name
raise AttributeError, "Not network interface found" raise AttributeError, "Not network interface found"
def mkdir_p(self, path, mode=0700):
"""
Creates a directory and its parents, if needed.
NB: If the directory already exists, it does not change its permission.
"""
try:
os.makedirs(path, mode)
except OSError as exc:
if exc.errno == errno.EEXIST and os.path.isdir(path):
pass
else:
raise
def install(self): def install(self):
slap = slapos.slap.slap() slap = slapos.slap.slap()
...@@ -81,6 +113,15 @@ class Recipe: ...@@ -81,6 +113,15 @@ class Recipe:
server_url = slap_connection['server_url'] server_url = slap_connection['server_url']
key_file = slap_connection.get('key_file') key_file = slap_connection.get('key_file')
cert_file = slap_connection.get('cert_file') cert_file = slap_connection.get('cert_file')
instance_root = self.buildout['buildout']['directory']
storage_configuration_dict = self.buildout.get('storage-configuration')
network_dict = self.buildout.get('network-information')
storage_home = ''
global_ipv4_network = ''
if storage_configuration_dict:
storage_home = storage_configuration_dict.get('storage-home')
if network_dict:
global_ipv4_network = network_dict.get('global-ipv4-network')
slap.initializeConnection(server_url, key_file, cert_file) slap.initializeConnection(server_url, key_file, cert_file)
self.computer_partition = slap.registerComputerPartition( self.computer_partition = slap.registerComputerPartition(
computer_id, computer_id,
...@@ -128,6 +169,16 @@ class Recipe: ...@@ -128,6 +169,16 @@ class Recipe:
self.getGlobalIPv6Address()) self.getGlobalIPv6Address())
buildout.set('slap-network-information', 'network-interface', buildout.set('slap-network-information', 'network-interface',
self.getNetworkInterface()) self.getNetworkInterface())
tap_ip_list = self.getLocalTapIPv4AddressList()
tap_ipv4 = tap_gateway = tap_netmask = tap_network = ''
if tap_ip_list:
tap_ipv4, tap_gateway, tap_netmask, tap_network= tap_ip_list
buildout.set('slap-network-information', 'tap-ipv4', tap_ipv4)
buildout.set('slap-network-information', 'tap-gateway', tap_gateway)
buildout.set('slap-network-information', 'tap-netmask', tap_netmask)
buildout.set('slap-network-information', 'tap-network', tap_network)
buildout.set('slap-network-information', 'global-ipv4-network',
global_ipv4_network)
# Copy/paste slap_connection # Copy/paste slap_connection
buildout.add_section('slap-connection') buildout.add_section('slap-connection')
...@@ -137,6 +188,27 @@ class Recipe: ...@@ -137,6 +188,27 @@ class Recipe:
# XXX: Needed for lxc. Use non standard API # XXX: Needed for lxc. Use non standard API
buildout.set('slap-connection', 'requested', self.computer_partition._requested_state) buildout.set('slap-connection', 'requested', self.computer_partition._requested_state)
# setup storage directory
buildout.add_section('storage-configuration')
buildout.set('storage-configuration', 'storage-home', storage_home)
if storage_home and os.path.exists(storage_home) and \
os.path.isdir(storage_home):
# Create folder instance_root/DATA/ if not exist
data_home = os.path.join(instance_root, 'DATA')
self.mkdir_p(data_home)
for filename in os.listdir(storage_home):
storage_path = os.path.join(storage_home, filename, computer_partition_id)
if os.path.exists(storage_path) and os.path.isdir(storage_path):
storage_link = os.path.join(data_home, filename)
if os.path.lexists(storage_link):
if not os.path.islink(storage_link):
raise zc.buildout.UserError(
'Target %r already exists but is not a link' % storage_link)
#os.unlink(storage_link)
else:
os.symlink(storage_path, storage_link)
buildout.set('storage-configuration', filename, storage_link)
work_directory = os.path.abspath(self.buildout['buildout'][ work_directory = os.path.abspath(self.buildout['buildout'][
'directory']) 'directory'])
buildout_filename = os.path.join(work_directory, buildout_filename = os.path.join(work_directory,
......
...@@ -33,14 +33,15 @@ class Recipe(GenericBaseRecipe): ...@@ -33,14 +33,15 @@ class Recipe(GenericBaseRecipe):
self.options['configuration-path'], self.options['configuration-path'],
self.substituteTemplate( self.substituteTemplate(
self.getTemplateFilename('tidstorage.py.in'), self.options)) self.getTemplateFilename('tidstorage.py.in'), self.options))
r = [configuration_file]
tidstorage_wrapper = self.createPythonScript( wrapper = self.options.get('tidstorage-wrapper')
self.options['tidstorage-wrapper'], wrapper and r.append(self.createPythonScript(wrapper,
'slapos.recipe.librecipe.execute.execute', 'slapos.recipe.librecipe.execute.execute',
[self.options['tidstoraged-binary'], '--nofork', '--config', [self.options['tidstoraged-binary'], '--nofork', '--config',
configuration_file]) configuration_file]))
repozo_wrapper = self.createPythonScript( r.append(self.createPythonScript(
self.options['repozo-wrapper'], self.options['repozo-wrapper'],
'slapos.recipe.librecipe.execute.execute', 'slapos.recipe.librecipe.execute.execute',
[self.options['tidstorage-repozo-binary'], [self.options['tidstorage-repozo-binary'],
...@@ -48,6 +49,6 @@ class Recipe(GenericBaseRecipe): ...@@ -48,6 +49,6 @@ class Recipe(GenericBaseRecipe):
'--repozo', self.options['repozo-binary'], '--repozo', self.options['repozo-binary'],
'--gzip', '--gzip',
'--quick', '--quick',
]) ]))
return [configuration_file, tidstorage_wrapper, repozo_wrapper] return r
...@@ -53,9 +53,7 @@ class Recipe(GenericBaseRecipe): ...@@ -53,9 +53,7 @@ class Recipe(GenericBaseRecipe):
if filename.strip()] if filename.strip()]
if environment is not None: if environment is not None:
environment = dict((k.strip(), v.strip()) for k, v in [ environment = dict((k.strip(), v.strip()) for k, v in [
line.split('=') line.split('=') for line in environment.splitlines() if line.strip() ])
for line in environment.split('\n')
])
return [self.createPythonScript( return [self.createPythonScript(
wrapper_path, wrapper_path,
'slapos.recipe.librecipe.execute.generic_exec', 'slapos.recipe.librecipe.execute.generic_exec',
......
...@@ -13,12 +13,13 @@ class TestDcron(unittest.TestCase): ...@@ -13,12 +13,13 @@ class TestDcron(unittest.TestCase):
_("10-15", "0 0 15 10 *") _("10-15", "0 0 15 10 *")
_("monday *-12-* 17:00", "00 17 * 12 1") _("monday *-12-* 17:00", "00 17 * 12 1")
_("12,14,13,12:20,10,30", "20,10,30 12,14,13,12 * * *") # TODO: sort _("12,14,13,12:20,10,30", "20,10,30 12,14,13,12 * * *") # TODO: sort
_("*-1/2-1,3 *:30", "30 * 1,3 1/2 *") _("*-1/2-1,3 *:30", "30 * 1,3 1-11/2 *")
_("03-05 08:05", "05 08 05 03 *") _("03-05 08:05", "05 08 05 03 *")
_("08:05:00", "05 08 * * *") _("08:05:00", "05 08 * * *")
_("05:40", "40 05 * * *") _("05:40", "40 05 * * *")
_("Sat,Sun 12-* 08:05", "05 08 * 12 0,6") _("Sat,Sun 12-* 08:05", "05 08 * 12 0,6")
_("Sat,Sun 08:05", "05 08 * * 0,6") _("Sat,Sun 08:05", "05 08 * * 0,6")
_("*:25/20", "25-45/20 * * * *")
def _(systemd): def _(systemd):
self.assertRaises(Exception, systemd_to_cron, systemd) self.assertRaises(Exception, systemd_to_cron, systemd)
...@@ -31,7 +32,7 @@ class TestDcron(unittest.TestCase): ...@@ -31,7 +32,7 @@ class TestDcron(unittest.TestCase):
_("08:05:40") _("08:05:40")
_("2003-03-05") _("2003-03-05")
_("0-1"); _("13-1"); _("6/4-1"); _("5/8-1") _("0-1"); _("13-1"); _("8/5-1")
_("1-0"); _("1-32"); _("1-4/3"); _("1-14/18") _("1-0"); _("1-32"); _("1-14/18")
_("24:0");_("9/9:0"); _("8/16:0") _("24:0"); _("8/16:0")
_("0:60"); _("0:22/22"); _("0:15/45") _("0:60"); _("0:15/45")
import os, time
import shutil
import sys
import tempfile
import unittest
from slapos.slap.slap import NotFoundError, ConnectionError
from slapos.recipe import re6stnet
class Re6stnetTest(unittest.TestCase):
def setUp(self):
self.ssl_dir = tempfile.mkdtemp()
self.conf_dir = tempfile.mkdtemp()
self.base_dir = tempfile.mkdtemp()
self.token_dir = tempfile.mkdtemp()
self.dir_list = [self.ssl_dir, self.conf_dir, self.base_dir, self.token_dir]
config_file = os.path.join(self.base_dir, 'config')
with open(config_file, 'w') as f:
f.write('port 9201')
self.options = options = {
'openssl-bin': '/usr/bin/openssl',
'key-file': os.path.join(self.ssl_dir, 'cert.key'),
'cert-file': os.path.join(self.ssl_dir, 'cert.crt'),
'dh-file': os.path.join(self.ssl_dir, 'dh.pem'),
'key-size': '2048',
'conf-dir': self.conf_dir,
'token-dir': self.token_dir,
'wrapper': os.path.join(self.base_dir, 'wrapper'),
'config-file': config_file,
'ipv4': '127.0.0.1',
'port': '9201',
'pid-file': '/path/to/pid/file',
'db-path': '/path/to/db',
'command': '/path/to/command',
'manager-wrapper': os.path.join(self.base_dir, 'manager_wrapper'),
'drop-service-wrapper': os.path.join(self.base_dir, 'drop_wrapper'),
'check-service-wrapper': os.path.join(self.base_dir, 'check_wrapper'),
'revoke-service-wrapper': os.path.join(self.base_dir, 'revoke_wrapper'),
'slave-instance-list': '{}'
}
def tearDown(self):
for path in self.dir_list:
if os.path.exists(path):
shutil.rmtree(path)
def new_recipe(self):
buildout = {
'buildout': {
'bin-directory': '',
'find-links': '',
'allow-hosts': '',
'develop-eggs-directory': '',
'eggs-directory': '',
'python': 'testpython',
},
'testpython': {
'executable': sys.executable,
},
'slap-connection': {
'computer-id': 'comp-test',
'partition-id': 'slappart0',
'server-url': 'http://server.com',
'software-release-url': 'http://software.com',
'key-file': '/path/to/key',
'cert-file': '/path/to/cert'
}
}
options = self.options
return re6stnet.Recipe(buildout=buildout, name='re6stnet', options=options)
def checkWrapper(self, path):
self.assertTrue(os.path.exists(path))
content = ""
token_file = os.path.join(self.options['conf-dir'], 'token.json')
with open(path, 'r') as f:
content = f.read()
self.assertIn("'token_json': '%s'" % token_file, content)
self.assertIn("'partition_id': 'slappart0'", content)
self.assertIn("'computer_id': 'comp-test'", content)
self.assertIn("'key_file': '/path/to/key'", content)
self.assertIn("'cert_file': '/path/to/cert'", content)
self.assertIn("'server_url': 'http://server.com'", content)
self.assertIn("'db': '%s'" % self.options['db-path'], content)
self.assertIn("'token_base_path': '%s'" % self.token_dir, content)
self.assertIn("'registry_url': 'http://%s:%s/'" % (self.options['ipv4'],
self.options['port']), content)
def checkRegistryWrapper(self):
path = os.path.join(self.base_dir, 'wrapper')
self.assertTrue(os.path.exists(path))
content = ""
config_file = os.path.join(self.base_dir, 'config')
with open(path, 'r') as f:
content = f.read()
self.assertIn("@%s" % config_file, content)
self.assertIn("/path/to/pid/file", content)
self.assertIn("/path/to/command", content)
def fake_generateCertificates(self):
return
def test_generateCertificates(self):
self.options['ipv6-prefix'] = '2001:db8:24::/48'
self.options['key-size'] = '2048'
recipe = self.new_recipe()
recipe.generateCertificate()
self.assertItemsEqual(os.listdir(self.ssl_dir),
['cert.key', 'cert.crt', 'dh.pem'])
last_time = time.ctime(os.stat(self.options['key-file'])[7])
recipe.generateCertificate()
self.assertTrue(os.path.exists(self.options['key-file']))
this_time = time.ctime(os.stat(self.options['key-file'])[7])
self.assertEqual(last_time, this_time)
def test_getSerialFromIpv6(self):
ipv6 = 'be28:db8:fe6a:d85:4fe:54a:ae:aea/64'
recipe = self.new_recipe()
serial = recipe.getSerialFromIpv6(ipv6)
self.assertEqual(serial, '0x1be280db8fe6a0d8504fe054a00ae0aea')
ipv6 = '2001:db8:24::/48'
serial = recipe.getSerialFromIpv6(ipv6)
self.assertEqual(serial, '0x120010db80024')
def test_install(self):
self.options.update({
'ipv6-prefix': '2001:db8:24::/48',
'slave-instance-list': '''[
{"slave_reference":"SOFTINST-58770"},
{"slave_reference":"SOFTINST-58778"}
]
'''
})
recipe = self.new_recipe()
recipe.generateCertificate = self.fake_generateCertificates
try:
recipe.install()
except (NotFoundError, ConnectionError):
# Recipe will raise not found error when trying to publish slave informations
pass
token_file = os.path.join(self.options['conf-dir'], 'token.json')
self.assertTrue(os.path.exists(token_file))
# token file must contain 2 elements
token_content = recipe.readFile(token_file)
self.assertIn('SOFTINST-58770', token_content)
self.assertIn('SOFTINST-58778', token_content)
token_dict = recipe.loadJsonFile(token_file)
self.assertEqual(len(token_dict), 2)
self.assertTrue(token_dict.has_key('SOFTINST-58770'))
self.assertTrue(token_dict.has_key('SOFTINST-58778'))
self.assertItemsEqual(os.listdir(self.token_dir),
['SOFTINST-58770.add', 'SOFTINST-58778.add'])
first_add = recipe.readFile(os.path.join(self.token_dir, 'SOFTINST-58770.add'))
self.assertEqual(token_dict['SOFTINST-58770'], first_add)
second_add = recipe.readFile(os.path.join(self.token_dir, 'SOFTINST-58778.add'))
self.assertEqual(token_dict['SOFTINST-58778'], second_add)
self.checkWrapper(os.path.join(self.base_dir, 'manager_wrapper'))
self.checkWrapper(os.path.join(self.base_dir, 'drop_wrapper'))
self.checkWrapper(os.path.join(self.base_dir, 'check_wrapper'))
self.checkWrapper(os.path.join(self.base_dir, 'revoke_wrapper'))
self.checkRegistryWrapper()
# Remove one element
self.options.update({
"slave-instance-list": """[{"slave_reference":"SOFTINST-58770"}]"""
})
recipe = self.new_recipe()
recipe.generateCertificate = self.fake_generateCertificates
try:
recipe.install()
except (NotFoundError, ConnectionError):
# Recipe will raise not found error when trying to publish slave informations
pass
token_dict = recipe.loadJsonFile(token_file)
self.assertEqual(len(token_dict), 1)
self.assertEqual(token_dict['SOFTINST-58770'], first_add)
self.assertItemsEqual(os.listdir(self.token_dir),
['SOFTINST-58770.add', 'SOFTINST-58778.remove'])
second_remove = recipe.readFile(os.path.join(self.token_dir, 'SOFTINST-58778.remove'))
self.assertEqual(second_add, second_remove)
def test_install_empty_slave(self):
self.options.update({
'ipv6-prefix': '2001:db8:24::/48'
})
recipe = self.new_recipe()
recipe.generateCertificate = self.fake_generateCertificates
recipe.install()
token_file = os.path.join(self.options['conf-dir'], 'token.json')
self.assertTrue(os.path.exists(token_file))
token_content = recipe.readFile(token_file)
self.assertEqual(token_content, '{}')
self.assertItemsEqual(os.listdir(self.options['token-dir']), [])
self.checkWrapper(os.path.join(self.base_dir, 'manager_wrapper'))
self.checkWrapper(os.path.join(self.base_dir, 'drop_wrapper'))
self.checkWrapper(os.path.join(self.base_dir, 'check_wrapper'))
self.checkWrapper(os.path.join(self.base_dir, 'revoke_wrapper'))
self.checkRegistryWrapper()
...@@ -5,7 +5,6 @@ extends = ...@@ -5,7 +5,6 @@ extends =
../../stack/slapos.cfg ../../stack/slapos.cfg
../../component/dash/buildout.cfg ../../component/dash/buildout.cfg
../../component/binutils/buildout.cfg
../../component/lxml-python/buildout.cfg ../../component/lxml-python/buildout.cfg
../../component/apache/buildout.cfg ../../component/apache/buildout.cfg
../../component/gzip/buildout.cfg ../../component/gzip/buildout.cfg
...@@ -25,7 +24,6 @@ parts += ...@@ -25,7 +24,6 @@ parts +=
template template
template-apache-frontend template-apache-frontend
template-apache-replicate template-apache-replicate
binutils
apache-2.2 apache-2.2
apache-antiloris-apache-2.2 apache-antiloris-apache-2.2
...@@ -80,7 +78,7 @@ mode = 0644 ...@@ -80,7 +78,7 @@ mode = 0644
[template-slave-list] [template-slave-list]
recipe = slapos.recipe.build:download recipe = slapos.recipe.build:download
url = ${:_profile_base_location_}/templates/apache-custom-slave-list.cfg.in url = ${:_profile_base_location_}/templates/apache-custom-slave-list.cfg.in
md5sum = bae669cdc917c68186a387903478a53d md5sum = 1fe76dde85c488e94baf8510775ebcaf
mode = 640 mode = 640
[template-slave-configuration] [template-slave-configuration]
...@@ -98,7 +96,7 @@ mode = 640 ...@@ -98,7 +96,7 @@ mode = 640
[template-apache-frontend-configuration] [template-apache-frontend-configuration]
recipe = slapos.recipe.build:download recipe = slapos.recipe.build:download
url = ${:_profile_base_location_}/templates/apache.conf.in url = ${:_profile_base_location_}/templates/apache.conf.in
md5sum = ce88924c53f09c9a3ef12ec4d8a8ad16 md5sum = 6c72015a9af4f1edab63712f5c6aec99
mode = 640 mode = 640
[template-apache-cached-configuration] [template-apache-cached-configuration]
...@@ -129,13 +127,13 @@ mode = 640 ...@@ -129,13 +127,13 @@ mode = 640
[template-default-virtualhost] [template-default-virtualhost]
recipe = slapos.recipe.build:download recipe = slapos.recipe.build:download
url = ${:_profile_base_location_}/templates/000.conf.in url = ${:_profile_base_location_}/templates/000.conf.in
md5sum = b40ffdab93a80b40046e3bbb2f7a58bc md5sum = ed1b680e31e30596bf051682ec0270b4
mode = 640 mode = 640
[template-default-slave-virtualhost] [template-default-slave-virtualhost]
recipe = slapos.recipe.build:download recipe = slapos.recipe.build:download
url = ${:_profile_base_location_}/templates/default-virtualhost.conf.in url = ${:_profile_base_location_}/templates/default-virtualhost.conf.in
md5sum = 9bd3eda3c2aad5061f6cd6985e6f18d0 md5sum = 5463dd67f1b1bea0bee57a421e371dd0
mode = 640 mode = 640
[template-log-access] [template-log-access]
......
...@@ -15,12 +15,20 @@ ...@@ -15,12 +15,20 @@
"type": "string", "type": "string",
"pattern": "^([a-zA-Z0-9]([a-zA-Z0-9\\-]{0,61}[a-zA-Z0-9])?\\.)+[a-zA-Z]{2,6}$" "pattern": "^([a-zA-Z0-9]([a-zA-Z0-9\\-]{0,61}[a-zA-Z0-9])?\\.)+[a-zA-Z]{2,6}$"
}, },
"server-alias": {
"title": "Server Alias",
"description": "Server Alias List separated by space",
"type": "string",
"default": ""
},
"type": { "type": {
"title": "Backend Type", "title": "Backend Type",
"description": "Type of the backend", "description": "Type of slave. If redirect, the slave will redirect to the given url. If zope, the rewrite rules will be compatible with Virtual Host Monster",
"type": "string", "type": "string",
"default": "", "default": "",
"enum": ["", "zope"] "enum": ["", "zope", "redirect"]
}, },
"path": { "path": {
...@@ -71,6 +79,30 @@ ...@@ -71,6 +79,30 @@
"default": "false", "default": "false",
"enum": ["false", "true"] "enum": ["false", "true"]
}, },
"disable-no-cache-request": {
"title": "Disable 'no-cache' requests",
"description": "If set to true, no-cache control headers will be disabled",
"type": "string",
"default": "false",
"enum": ["false", "true"]
},
"prefer-gzip-encoding-to-backend": {
"title": "Prefer gzip Encoding for Backend",
"description": "If set to true, if a request is made with accept encoding 'gzip', only that one will be transferred to the backend",
"type": "string",
"default": "false",
"enum": ["false", "true"]
},
"disabled-cookie-list": {
"title": "Disabled Cookies",
"description": "List of Cookies separated by space that will not be sent to the backend",
"type": "string",
"default": ""
},
"apache_custom_http": { "apache_custom_http": {
"title": "HTTP configuration", "title": "HTTP configuration",
"description": "Raw http configuration in python template format. Your site will be rejected if you use it without notification and approval of the frontend adminastrator", "description": "Raw http configuration in python template format. Your site will be rejected if you use it without notification and approval of the frontend adminastrator",
......
...@@ -12,7 +12,7 @@ gitdb = 0.5.4 ...@@ -12,7 +12,7 @@ gitdb = 0.5.4
plone.recipe.command = 1.1 plone.recipe.command = 1.1
pycrypto = 2.6.1 pycrypto = 2.6.1
rdiff-backup = 1.0.5 rdiff-backup = 1.0.5
slapos.recipe.template = 2.5 slapos.recipe.template = 2.7
slapos.toolbox = 0.40.4 slapos.toolbox = 0.40.4
smmap = 0.8.2 smmap = 0.8.2
...@@ -30,4 +30,4 @@ feedparser = 5.1.3 ...@@ -30,4 +30,4 @@ feedparser = 5.1.3
# Required by: # Required by:
# slapos.toolbox==0.40.2 # slapos.toolbox==0.40.2
paramiko = 1.15.1 paramiko = 1.15.2
...@@ -2,7 +2,8 @@ ...@@ -2,7 +2,8 @@
ServerName www.example.org ServerName www.example.org
SSLEngine on SSLEngine on
SSLProxyEngine on SSLProxyEngine on
SSLProtocol ALL -SSLv2 SSLProtocol ALL -SSLv2 -SSLv3
SSLCipherSuite HIGH:MEDIUM:!aNULL:!MD5:!RC4
# Rewrite part # Rewrite part
ProxyVia On ProxyVia On
......
...@@ -163,7 +163,7 @@ apache_custom_https = {{ dumps(apache_custom_https) }} ...@@ -163,7 +163,7 @@ apache_custom_https = {{ dumps(apache_custom_https) }}
# The slave use cache # The slave use cache
# Next line is forbidden and people who copy it will be hanged short # Next line is forbidden and people who copy it will be hanged short
{% set enable_cache = ('' ~ slave_instance.get('enable_cache', '')).lower() in TRUE_VALUES -%} {% set enable_cache = (('' ~ slave_instance.get('enable_cache', '')).lower() in TRUE_VALUES and slave_instance.get('type', '') != 'redirect') -%}
{% if enable_cache -%} {% if enable_cache -%}
{% do cached_server_dict.__setitem__(slave_instance.get('custom_domain'), slave_instance.get('url')) -%} {% do cached_server_dict.__setitem__(slave_instance.get('custom_domain'), slave_instance.get('url')) -%}
{% do slave_instance.__setitem__('url', cache_access) -%} {% do slave_instance.__setitem__('url', cache_access) -%}
......
...@@ -41,6 +41,10 @@ CustomLog "{{ access_log }}" combined ...@@ -41,6 +41,10 @@ CustomLog "{{ access_log }}" combined
<Directory {{ document_root }}> <Directory {{ document_root }}>
Order Allow,Deny Order Allow,Deny
Allow from All Allow from All
Options -Indexes
ErrorDocument 404 /notfound.html
RewriteEngine on
RewriteRule ^/?$ notfound.html [R=404,L]
</Directory> </Directory>
# List of modules # List of modules
...@@ -115,7 +119,8 @@ SSLSessionCache shmcb:/{{ httpd_mod_ssl_cache_directory }}/ssl_scache(512000) ...@@ -115,7 +119,8 @@ SSLSessionCache shmcb:/{{ httpd_mod_ssl_cache_directory }}/ssl_scache(512000)
SSLSessionCacheTimeout 300 SSLSessionCacheTimeout 300
SSLRandomSeed startup /dev/urandom 256 SSLRandomSeed startup /dev/urandom 256
SSLRandomSeed connect builtin SSLRandomSeed connect builtin
SSLProtocol ALL -SSLv2 SSLProtocol all -SSLv2 -SSLv3
SSLCipherSuite HIGH:MEDIUM:!aNULL:!MD5:!RC4
<FilesMatch "\.(cgi|shtml|phtml|php)$"> <FilesMatch "\.(cgi|shtml|phtml|php)$">
SSLOptions +StdEnvVars SSLOptions +StdEnvVars
</FilesMatch> </FilesMatch>
...@@ -127,4 +132,7 @@ include {{frontend_configuration.get('log-access-configuration')}} ...@@ -127,4 +132,7 @@ include {{frontend_configuration.get('log-access-configuration')}}
NameVirtualHost *:{{ http_port }} NameVirtualHost *:{{ http_port }}
NameVirtualHost *:{{ https_port }} NameVirtualHost *:{{ https_port }}
include {{ slave_configuration_directory }}/*.conf include {{ slave_configuration_directory }}/*.conf
\ No newline at end of file
ErrorDocument 404 /notfound.html
RewriteRule (.*) /notfound.html [R=404,L]
{% set TRUE_VALUES = ['y', 'yes', '1', 'true'] -%} {% set TRUE_VALUES = ['y', 'yes', '1', 'true'] -%}
{% set disable_no_cache_header = ('' ~ slave_parameter.get('disable-no-cache-request', '')).lower() in TRUE_VALUES -%}
{%- set prefer_gzip = ('' ~ slave_parameter.get('prefer-gzip-encoding-to-backend', '')).lower() in TRUE_VALUES -%}
<VirtualHost *:{{ https_port }}> <VirtualHost *:{{ https_port }}>
ServerName {{ slave_parameter.get('custom_domain') }} ServerName {{ slave_parameter.get('custom_domain') }}
ServerAlias {{ slave_parameter.get('custom_domain') }} ServerAlias {{ slave_parameter.get('custom_domain') }}
{%- if 'server-alias' in slave_parameter -%}
{% set server_alias_list = slave_parameter.get('server-alias', '').split() %}
{%- for server_alias in server_alias_list %}
ServerAlias {{ server_alias }}
{% endfor %}
{%- endif %}
SSLEngine on SSLEngine on
SSLProxyEngine on SSLProxyEngine on
SSLProtocol -ALL +SSLv3 +TLSv1 SSLProtocol all -SSLv2 -SSLv3
SSLHonorCipherOrder On SSLCipherSuite HIGH:MEDIUM:!aNULL:!MD5:!RC4
SSLCipherSuite RC4-SHA:HIGH:!ADH
{% set ssl_configuration_list = [('SSLCertificateFile', 'path_to_ssl_crt'), {% set ssl_configuration_list = [('SSLCertificateFile', 'path_to_ssl_crt'),
('SSLCertificateKeyFile', 'path_to_ssl_key'), ('SSLCertificateKeyFile', 'path_to_ssl_key'),
...@@ -34,6 +43,22 @@ ...@@ -34,6 +43,22 @@
ProxyTimeout 600 ProxyTimeout 600
RewriteEngine On RewriteEngine On
{% if disable_no_cache_header %}
RequestHeader unset Cache-Control
RequestHeader unset Pragma
{% endif -%}
{% if 'disabled-cookie-list' in slave_parameter -%}
{% set disabled_cookie_list = slave_parameter.get('disabled-cookie-list', '').split() %}
{%- for disabled_cookie in disabled_cookie_list %}
{{' RequestHeader edit Cookie "(^%(disabled_cookie)s=[^;]*; |; %(disabled_cookie)s=[^;]*|^%(disabled_cookie)s=[^;]*$)" ""' % dict(disabled_cookie=disabled_cookie) }}
{% endfor -%}
{% endif %}
{%- if prefer_gzip %}
RequestHeader edit Accept-Encoding "(^gzip,.*|.*, gzip,.*|.*, gzip$|^gzip$)" "gzip"
{% endif %}
{% if slave_parameter.get('type', '') == 'zope' -%} {% if slave_parameter.get('type', '') == 'zope' -%}
{% if 'default-path' in slave_parameter %} {% if 'default-path' in slave_parameter %}
RewriteRule ^/?$ {{ slave_parameter.get('default-path') }} [R=301,L] RewriteRule ^/?$ {{ slave_parameter.get('default-path') }} [R=301,L]
...@@ -42,6 +67,8 @@ ...@@ -42,6 +67,8 @@
# If so, let's use Virtual Host Daemon rewrite # If so, let's use Virtual Host Daemon rewrite
# We suppose that Apache listens to 443 (even indirectly thanks to things like iptables) # We suppose that Apache listens to 443 (even indirectly thanks to things like iptables)
RewriteRule ^/(.*)$ {{ slave_parameter.get('url', '') }}/VirtualHostBase/https/{{ slave_parameter.get('custom_domain', '') }}:443/{{ slave_parameter.get('path', '') }}/VirtualHostRoot/$1 [L,P] RewriteRule ^/(.*)$ {{ slave_parameter.get('url', '') }}/VirtualHostBase/https/{{ slave_parameter.get('custom_domain', '') }}:443/{{ slave_parameter.get('path', '') }}/VirtualHostRoot/$1 [L,P]
{% elif slave_parameter.get('type', '') == 'redirect' -%}
RewriteRule (.*) {{slave_parameter.get('url', '')}}$1 [R,L]
{% else -%} {% else -%}
{% if 'default-path' in slave_parameter %} {% if 'default-path' in slave_parameter %}
RewriteRule ^/?$ {{ slave_parameter.get('default-path') }} [R=301,L] RewriteRule ^/?$ {{ slave_parameter.get('default-path') }} [R=301,L]
...@@ -53,6 +80,14 @@ ...@@ -53,6 +80,14 @@
<VirtualHost *:{{ http_port }}> <VirtualHost *:{{ http_port }}>
ServerName {{ slave_parameter.get('custom_domain') }} ServerName {{ slave_parameter.get('custom_domain') }}
ServerAlias {{ slave_parameter.get('custom_domain') }} ServerAlias {{ slave_parameter.get('custom_domain') }}
{%- if 'server-alias' in slave_parameter %}
{% set server_alias_list = slave_parameter.get('server-alias', '').split() %}
{%- for server_alias in server_alias_list %}
ServerAlias {{ server_alias }}
{% endfor -%}
{% endif %}
SSLProxyEngine on SSLProxyEngine on
# Rewrite part # Rewrite part
ProxyVia On ProxyVia On
...@@ -69,6 +104,22 @@ ...@@ -69,6 +104,22 @@
# Remove "Secure" from cookies, as backend may be https # Remove "Secure" from cookies, as backend may be https
Header edit Set-Cookie "(?i)^(.+);secure$" "$1" Header edit Set-Cookie "(?i)^(.+);secure$" "$1"
{% if disable_no_cache_header %}
RequestHeader unset Cache-Control
RequestHeader unset Pragma
{% endif -%}
{% if 'disabled-cookie-list' in slave_parameter -%}
{% set disabled_cookie_list = slave_parameter.get('disabled-cookie-list', '').split() %}
{%- for disabled_cookie in disabled_cookie_list %}
{{' RequestHeader edit Cookie "(^%(disabled_cookie)s=[^;]*; |; %(disabled_cookie)s=[^;]*|^%(disabled_cookie)s=[^;]*$)" ""' % dict(disabled_cookie=disabled_cookie) }}
{% endfor -%}
{% endif %}
{%- if prefer_gzip %}
RequestHeader edit Accept-Encoding "(^gzip,.*|.*, gzip,.*|.*, gzip$|^gzip$)" "gzip"
{% endif %}
# Next line is forbidden and people who copy it will be hanged short # Next line is forbidden and people who copy it will be hanged short
{% set https_only = ('' ~ slave_parameter.get('https-only', '')).lower() in TRUE_VALUES -%} {% set https_only = ('' ~ slave_parameter.get('https-only', '')).lower() in TRUE_VALUES -%}
{% if https_only -%} {% if https_only -%}
...@@ -77,6 +128,8 @@ ...@@ -77,6 +128,8 @@
# on standard port (443). # on standard port (443).
RewriteCond %{SERVER_PORT} !^{{ https_port }}$ RewriteCond %{SERVER_PORT} !^{{ https_port }}$
RewriteRule ^/(.*) https://%{SERVER_NAME}/$1 [NC,R,L] RewriteRule ^/(.*) https://%{SERVER_NAME}/$1 [NC,R,L]
{% elif slave_parameter.get('type', '') == 'redirect' -%}
RewriteRule (.*) {{slave_parameter.get('url', '')}}$1 [R,L]
{% elif slave_parameter.get('type', '') == 'zope' -%} {% elif slave_parameter.get('type', '') == 'zope' -%}
{% if 'default-path' in slave_parameter %} {% if 'default-path' in slave_parameter %}
RewriteRule ^/?$ {{ slave_parameter.get('default-path') }} [R=301,L] RewriteRule ^/?$ {{ slave_parameter.get('default-path') }} [R=301,L]
......
...@@ -67,7 +67,7 @@ mode = 0644 ...@@ -67,7 +67,7 @@ mode = 0644
[template-backup-script] [template-backup-script]
recipe = slapos.recipe.template recipe = slapos.recipe.template
url = ${:_profile_base_location_}/template-backup-script.sh.in url = ${:_profile_base_location_}/template-backup-script.sh.in
md5sum = 0d8fb8ea80966af7d67ad50d50547dce md5sum = 47b20031db3b575651d8515d5add23e6
output = ${buildout:directory}/template-backup-script.sh.in output = ${buildout:directory}/template-backup-script.sh.in
mode = 0644 mode = 0644
......
...@@ -29,7 +29,7 @@ if [ $RESULT -eq 0 ] ...@@ -29,7 +29,7 @@ if [ $RESULT -eq 0 ]
then then
${coreutils-output:echo} "`${coreutils-output:date} -u`,$${:statistic_log},$${:hostname} backup success" >> $${:status_log} ${coreutils-output:echo} "`${coreutils-output:date} -u`,$${:statistic_log},$${:hostname} backup success" >> $${:status_log}
${coreutils-output:rm} -f $${:statistic_log} ${coreutils-output:rm} -f $${:statistic_log}
${findutils-output:find} rdiff-backup-data/ -name "session_statistic*" | ${coreutils-output:sort} | ${coreutils-output:tail} -n 1 | ${findutils-output:xargs} ${rdiff-backup-output:rdiff-backup} --calculate-average >> $${:statistic_log} ${findutils-output:find} rdiff-backup-data/ -maxdepth 1 -name "session_statistic*" | ${coreutils-output:sort} | ${coreutils-output:tail} -n 1 | ${findutils-output:xargs} ${rdiff-backup-output:rdiff-backup} --calculate-average >> $${:statistic_log}
else else
${coreutils-output:echo} "`${coreutils-output:date} -u`,$${:statistic_log},$${:hostname} backup failed" >> $${:status_log} ${coreutils-output:echo} "`${coreutils-output:date} -u`,$${:statistic_log},$${:hostname} backup failed" >> $${:status_log}
fi fi
......
[buildout]
parts =
request-re6stnet-token-slave
request-frontend-token-slave
eggs-directory = ${buildout:eggs-directory}
develop-eggs-directory = ${buildout:develop-eggs-directory}
offline = true
# Create all needed directories
[directory]
recipe = slapos.cookbook:mkdirectory
home = $${buildout:directory}
etc = $${:home}/etc/
var = $${:home}/var/
srv = $${:home}/srv/
bin = $${:home}/bin/
tmp = $${:home}/tmp/
[request-frontend-token-slave]
<= slap-connection
recipe = slapos.cookbook:requestoptional
name = WebSite Frontend
# XXX We have hardcoded SR URL here.
software-url = product.frontend
slave = true
config-url = http://$${request-re6stnet-token-slave:connection-ipv6}/
config-domain = $${slap-parameter:frontend-domain}
return = site_url domain
[request-re6stnet-token-slave]
<= slap-connection
recipe = slapos.cookbook:requestoptional
name = Re6st token Frontend
# XXX We have hardcoded SR URL here.
software-url = product.re6st
slave = true
return = token info_1 ipv6
[publish-connection-informations]
recipe = slapos.cookbook:publish
url = https://$${request-frontend-token-slave:connection-domain}
token = $${request-re6stnet-token-slave:connection-token}
ipv6 = $${request-re6stnet-token-slave:connection-ipv6}
info_1 = $${request-re6stnet-token-slave:info_1}
\ No newline at end of file
[buildout]
parts =
switch_softwaretype
eggs-directory = ${buildout:eggs-directory}
develop-eggs-directory = ${buildout:develop-eggs-directory}
[switch_softwaretype]
recipe = slapos.cookbook:softwaretype
default = $${instance-base-runner:rendered}
[instance-base-runner]
recipe = slapos.recipe.template:jinja2
template = ${template-cdn-request:output}
rendered = $${buildout:directory}/template-cdn.cfg
extensions = jinja2.ext.do
context = key buildout buildout:bin-directory
key develop_eggs_directory buildout:develop-eggs-directory
key eggs_directory buildout:eggs-directory
key slapparameter_dict slap-configuration:configuration
mode = 0644
[slap-configuration]
recipe = slapos.cookbook:slapconfiguration
computer = $${slap-connection:computer-id}
partition = $${slap-connection:partition-id}
url = $${slap-connection:server-url}
key = $${slap-connection:key-file}
cert = $${slap-connection:cert-file}
[buildout]
extends =
../../stack/slapos.cfg
# stacks are listed from most generic to most specific,
# to avoid versioning issues
parts =
slapos-cookbook-develop
template
eggs
template-cdn-request
[template]
recipe = slapos.recipe.template
url = ${:_profile_base_location_}/instance.cfg
output = ${buildout:directory}/template.cfg
md5sum = a91fe7c80720d57c2acbf606a6a0d84d
mode = 0644
[template-cdn-request]
recipe = slapos.recipe.template
url = ${:_profile_base_location_}/instance-cdn-request.cfg
output = ${buildout:directory}/template-cdn-request.cfg
md5sum = 4c5ad2a5e9c4364588e1e4212ed8d1aa
mode = 0644
[slapos.cookbook-repository]
branch = request.product
[eggs]
recipe = z3c.recipe.scripts
eggs =
collective.recipe.environment
cns.recipe.symlink
erp5.util
lock-file
plone.recipe.command
slapos.recipe.build
${slapos-cookbook:eggs}
\ No newline at end of file
...@@ -21,6 +21,7 @@ cert = ${slap-connection:cert-file} ...@@ -21,6 +21,7 @@ cert = ${slap-connection:cert-file}
[template-jinja2-base] [template-jinja2-base]
recipe = slapos.recipe.template:jinja2 recipe = slapos.recipe.template:jinja2
mode = 640
template = ${:_profile_base_location_}/${:filename}.in template = ${:_profile_base_location_}/${:filename}.in
rendered = ${buildout:directory}/${:filename} rendered = ${buildout:directory}/${:filename}
# XXX: extra-context is needed because we cannot append to a key of an extended # XXX: extra-context is needed because we cannot append to a key of an extended
...@@ -79,7 +80,3 @@ recipe = slapos.recipe.build:download ...@@ -79,7 +80,3 @@ recipe = slapos.recipe.build:download
url = ${:_profile_base_location_}/instance-cloudoo.cfg.in url = ${:_profile_base_location_}/instance-cloudoo.cfg.in
md5sum = 4bede3be20dbc2ecfdb5d49b3184742e md5sum = 4bede3be20dbc2ecfdb5d49b3184742e
mode = 640 mode = 640
[versions]
# use newest version of slapos.cookbook
slapos.cookbook =
...@@ -2,12 +2,16 @@ ...@@ -2,12 +2,16 @@
parts = parts =
dream_simulation dream_simulation
dream_platform dream_platform
dream_test_suite
dream_interpreter
grunt_watch
publish-connection-parameter publish-connection-parameter
eggs-directory = ${buildout:eggs-directory} eggs-directory = ${buildout:eggs-directory}
develop-eggs-directory = ${buildout:develop-eggs-directory} develop-eggs-directory = ${buildout:develop-eggs-directory}
offline = true offline = true
# parameters
[instance-parameter] [instance-parameter]
recipe = slapos.cookbook:slapconfiguration recipe = slapos.cookbook:slapconfiguration
computer = $${slap_connection:computer_id} computer = $${slap_connection:computer_id}
...@@ -15,22 +19,42 @@ partition = $${slap_connection:partition_id} ...@@ -15,22 +19,42 @@ partition = $${slap_connection:partition_id}
url = $${slap_connection:server_url} url = $${slap_connection:server_url}
key = $${slap_connection:key_file} key = $${slap_connection:key_file}
cert = $${slap_connection:cert_file} cert = $${slap_connection:cert_file}
[dream_platform_parameter]
port = 8080
host = $${instance-parameter:ipv6-random}
port = 5000 # interpreter
host = $${slap-network-information:local-ipv4} [dream_interpreter]
recipe = slapos.cookbook:wrapper
command-line = ${buildout:bin-directory}/dream_interpreter
wrapper-path = $${buildout:bin-directory}/dream_interpreter
parameters-extra = true
# service
[dream_platform] [dream_platform]
recipe = slapos.cookbook:wrapper recipe = slapos.cookbook:wrapper
command-line = ${buildout:bin-directory}/dream_platform --host $${instance-parameter:host} --port $${instance-parameter:port} command-line = ${buildout:bin-directory}/dream_platform --debug --host $${dream_platform_parameter:host} --port $${dream_platform_parameter:port} --log $${directory:log}/dream_platform.log
wrapper-path = $${directory:service}/dream_platform wrapper-path = $${directory:service}/dream_platform
parameters-extra = true parameters-extra = true
[grunt_watch]
recipe = slapos.cookbook:wrapper
command-line = bash -c 'cd ${dream-repository.git:location}; PATH=${nodejs:location}/bin/:$PATH ${dream-repository.git:location}/node_modules/grunt-cli/bin/grunt watch -f > $${directory:log}/grunt.log'
wrapper-path = $${directory:service}/dream_grunt_watch
# CLI
[dream_simulation] [dream_simulation]
recipe = slapos.cookbook:wrapper recipe = slapos.cookbook:wrapper
command-line = ${buildout:bin-directory}/dream_simulation command-line = ${buildout:bin-directory}/dream_simulation
wrapper-path = $${directory:script}/dream_simulation wrapper-path = $${directory:script}/dream_simulation
parameters-extra = true parameters-extra = true
[dream_test_suite]
recipe = slapos.cookbook:wrapper
command-line = ${dream_testrunner:script}
wrapper-path = $${directory:script}/dream_test_suite
parameters-extra = true
[directory] [directory]
recipe = slapos.cookbook:mkdirectory recipe = slapos.cookbook:mkdirectory
home = $${buildout:directory} home = $${buildout:directory}
...@@ -43,8 +67,5 @@ log = $${:var}/log ...@@ -43,8 +67,5 @@ log = $${:var}/log
[publish-connection-parameter] [publish-connection-parameter]
recipe = slapos.cookbook:publishurl recipe = slapos.cookbook:publishurl
url = http://$${instance-parameter:host}:$${instance-parameter:port} url = http://[$${dream_platform_parameter:host}]:$${dream_platform_parameter:port}
; FIXME: how to enable this without "switch" swoftare type ?
[slap-network-information]
local-ipv4 = 0.0.0.0
[buildout] [buildout]
extends = versions = versions
extends =
../../stack/slapos.cfg ../../stack/slapos.cfg
../../component/numpy/buildout.cfg ../../stack/nodejs.cfg
../../component/r-language/buildout.cfg ../../component/manpy/buildout.cfg
parts =
parts +=
rpy2
slapos-cookbook slapos-cookbook
dream-build manpy
dream dream_testrunner
dream_interpreter
npm_install
instance instance
[rpy2_env]
PATH = ${r-language:location}/bin/:%(PATH)s
[rpy2]
recipe = zc.recipe.egg:custom
environment = rpy2_env
egg = rpy2>=2.3,<2.4
[dream-repository.git]
recipe = slapos.recipe.build:gitclone
repository = http://git.erp5.org/repos/dream.git
branch = master
git-executable = ${git:location}/bin/git
[dream-build]
; use a develop egg to easily develop and because dream already exist on pypi
; https://pypi.python.org/pypi/dream
recipe = zc.recipe.egg:develop
dependency = ${scipy-build:recipe}
setup = ${dream-repository.git:location}
[dream]
recipe = zc.recipe.egg:scripts
eggs = dream
initialization =
# rpy2 needs R in $PATH
import os
os.environ['PATH'] = '${r-language:location}/bin' + os.pathsep + os.environ['PATH']
[instance] [instance]
recipe = slapos.recipe.template recipe = slapos.recipe.template
url = ${:_profile_base_location_}/instance.cfg url = ${:_profile_base_location_}/instance.cfg
output = ${buildout:directory}/instance.cfg output = ${buildout:directory}/instance.cfg
[dream_testrunner]
recipe = zc.recipe.testrunner
eggs = dream
script = dream_testrunner
initialization =
${manpy:initialization}
[dream_interpreter]
recipe = z3c.recipe.scripts
eggs = ${manpy:eggs}
interpreter = dream_interpreter
initialization =
${manpy:initialization}
[npm_install]
recipe = plone.recipe.command
stop-on-error = true
command = cd ${dream-repository.git:location} && PATH=${git:location}/bin/:$PATH ${nodejs:location}/bin/npm install .
update_command = ${:command}
[versions]
rpy2 = 2.4.0
pydot = 1.0.28
xlrd = 0.9.3
xlwt = 0.7.5
pyparsing = 2.0.3
numpy = 1.9.1
scipy = 0.13.3
simpy = 3.0.5
zope.dottedname = 4.1.0
tablib = 0.10.0
MySQL-python = 1.2.5
# indirect dependancies
collective.recipe.template = 1.11
cp.recipe.cmd = 0.5
plone.recipe.command = 1.1
slapos.recipe.template = 2.7
zope.exceptions = 4.0.7
zope.testing = 4.1.3
zc.recipe.testrunner = 2.0.0
zope.testrunner = 4.4.6
z3c.recipe.scripts = 1.0.1
[buildout]
extends =
software.cfg
# nodejs installation script does not support too deep directory structure like
# we can have when installing testnode in a webrunner. Since we do not need
# nodejs for the simulation executor, we can simply disable this section
[nodejs]
recipe =
location =
[npm_install]
recipe =
...@@ -9,14 +9,8 @@ extends = ...@@ -9,14 +9,8 @@ extends =
../../component/pwgen/buildout.cfg ../../component/pwgen/buildout.cfg
../../component/apache/buildout.cfg ../../component/apache/buildout.cfg
# Local development
develop =
${:parts-directory}/slapos.cookbook-repository
parts = parts =
# Local development
slapos-cookbook slapos-cookbook
slapos.cookbook-repository
check-recipe
template template
lxml-python lxml-python
eggs eggs
...@@ -24,20 +18,6 @@ parts = ...@@ -24,20 +18,6 @@ parts =
git git
apache apache
# Local development
[slapos.cookbook-repository]
recipe = plone.recipe.command
stop-on-error = true
location = ${buildout:parts-directory}/${:_buildout_section_name_}
command = "${git:location}/bin/git" clone --branch erp5testnode --quiet http://git.erp5.org/repos/slapos.git "${:location}"
update-command = cd "${:location}" && "${git:location}/bin/git" fetch --quiet && "${git:location}/bin/git" reset --hard @{upstream}
[check-recipe]
recipe = plone.recipe.command
stop-on-error = true
update-command = ${:command}
command = grep parts ${buildout:develop-eggs-directory}/slapos.cookbook.egg-link
[eggs] [eggs]
recipe = zc.recipe.egg recipe = zc.recipe.egg
eggs = eggs =
...@@ -79,7 +59,5 @@ md5sum = 22ffc8e212dcf2db8ad94cf0e5ac4772 ...@@ -79,7 +59,5 @@ md5sum = 22ffc8e212dcf2db8ad94cf0e5ac4772
[versions] [versions]
PyXML = 0.8.5 PyXML = 0.8.5
erp5.util = 0.4.41 erp5.util = 0.4.42
plone.recipe.command = 1.1 slapos.recipe.template = 2.7
slapos.recipe.template = 2.5
jsonschema = 2.4.0
{% set python_bin = parameter_dict['python-executable'] -%}
{% set publish_dict = {} -%}
{% set part_list = [] -%}
{% set ipv6 = (ipv6_set | list)[0] -%}
{% set ipv4 = (ipv4_set | list)[0] -%}
{% macro section(name) %}{% do part_list.append(name) %}{{ name }}{% endmacro -%}
[directory]
recipe = slapos.cookbook:mkdirectory
bin = ${buildout:directory}/bin
etc = ${buildout:directory}/etc
srv = ${buildout:directory}/srv
var = ${buildout:directory}/var
tmp = ${buildout:directory}/tmp
log = ${:var}/log
services = ${:etc}/service
script = ${:etc}/run
promises = ${:etc}/promise
run = ${:var}/run
ca-dir = ${:etc}/ssl
requests = ${:ca-dir}/requests/
private = ${:ca-dir}/private/
certs = ${:ca-dir}/certs/
newcerts = ${:ca-dir}/newcerts/
crl = ${:ca-dir}/crl/
[gateone-dir]
gateone = ${directory:srv}/gateone
log-prefix = ${directory:log}/gateone
cache = ${:gateone}/cache
sessions = ${:gateone}/sessions
users = ${:gateone}/users
conf = ${directory:etc}/gateone
ssl = ${:conf}/ssl
[gateone-configure]
recipe = slapos.cookbook:wrapper
port = 10443
ipv6 = {{ ipv6 }}
ipv4 = {{ ipv4 }}
settings-dir = ${gateone-dir:conf}
wrapper-path = ${directory:bin}/gateone-configure
# XXX- issue with dtach=true, we set --dtach=false
command =
{{ parameter_dict['gateone-bin'] }} --address=${:ipv4} --port=${:port} --certificate=${gateone-dir:ssl}/certificate.pem --keyfile=${gateone-dir:ssl}/keyfile.pem --cache_dir=${gateone-dir:cache} --pid_file=${directory:run}/gateone.pid --session_dir=${gateone-dir:sessions} --settings_dir=${:settings-dir} --user_dir=${gateone-dir:users} --log_file_prefix=${gateone-dir:log-prefix} --origins=${:ipv4} --logging=info --dtach=false
#--gid=1012 --uid=987
command-line =
${:command} --configure
environment =
PATH={{ bin_directory }}:{{ openssl_location }}/bin:{{ parameter_dict['dtach-location'] }}/bin:/usr/local/bin:/usr/bin:/bin
LD_LIBRARY_PATH={{ parameter_dict['readline-location'] }}/lib
[gateone-run]
recipe = slapos.cookbook:wrapper
port = 10443
ipv6 = {{ ipv6 }}
ipv4 = {{ ipv4 }}
wrapper-path = ${directory:services}/gateone
command-line = ${gateone-configure:command}
environment =
PATH={{ bin_directory }}:{{ openssl_location }}/bin:{{ parameter_dict['dtach-location'] }}/bin:/usr/local/bin:/usr/bin:/bin
LD_LIBRARY_PATH={{ parameter_dict['readline-location'] }}/lib
HOME=${buildout:directory}
PWD=${gateone-dir:gateone}
# --cookie_secret 45-characters
# --uid needs to be set for slapos
# --origins=${:ipv4};${:ipv6}
[certificate-authority]
recipe = slapos.cookbook:certificate_authority
openssl-binary = {{ openssl_location }}/bin/openssl
ca-dir = ${directory:ca-dir}
requests-directory = ${directory:requests}
wrapper = ${directory:services}/certificate_authority
ca-private = ${directory:private}
ca-certs = ${directory:certs}
ca-newcerts = ${directory:newcerts}
ca-crl = ${directory:crl}
[ca-nginx]
<= certificate-authority
recipe = slapos.cookbook:certificate_authority.request
key-file = ${directory:certs}/nginx.key
cert-file = ${directory:certs}/nginx.crt
executable = ${nginx-run:wrapper-path}
wrapper = ${directory:services}/nginx-proxy
[tempdirectory]
recipe = slapos.cookbook:mkdirectory
client_body_temp_path = ${directory:tmp}/client_body_temp_path
proxy_temp_path = ${directory:tmp}/proxy_temp_path
fastcgi_temp_path = ${directory:tmp}/fastcgi_temp_path
uwsgi_temp_path = ${directory:tmp}/uwsgi_temp_path
scgi_temp_path = ${directory:tmp}/scgi_temp_path
[nginx-config-dict]
nb_workers = 2
ipv6 = {{ ipv6 }}
ipv4 = {{ ipv4 }}
port = 10443
backend-url = https://${gateone-configure:ipv4}:${gateone-configure:port}
ssl-certificate = ${ca-nginx:cert-file}
ssl-key = ${ca-nginx:key-file}
pid = ${directory:run}/nginx.pid
log = ${directory:log}/nginx.log
access-log = ${directory:log}/nginx.access.log
error-log = ${directory:log}/nginx.error.log
tmp = $${directory:tmp}/
[nginx-conf]
recipe = slapos.recipe.template:jinja2
template = {{ parameter_dict['template-ngnix-conf'] }}
rendered = ${directory:etc}/nginx.conf
context =
section parameter_dict nginx-config-dict
section param_tempdir tempdirectory
[nginx-run]
recipe = slapos.cookbook:wrapper
wrapper-path = ${directory:bin}/nginx_proxy
command-line = {{ parameter_dict['nginx-location'] }}/sbin/nginx -p ${buildout:directory} -c ${nginx-conf:rendered}
[logrotate-apache]
< = logrotate-entry-base
name = apache
log = ${apache-conf:error-log} ${apache-conf:access-log}
post = {{ parameter_dict['bin-directory'] }}/slapos-kill --pidfile ${apache-conf:pid-file} -s USR1
[logrotate-entry-base]
recipe = slapos.cookbook:logrotate.d
logrotate-entries = ${logrotate:logrotate-entries}
backup = ${logrotate:backup}
[publish]
recipe = slapos.cookbook:publish
url = https://[${nginx-config-dict:ipv6}]:${gateone-run:port}
[buildout]
extends =
{{ logrotate_cfg }}
parts =
certificate-authority
ca-nginx
publish
# Complete parts with sections
{{ part_list | join('\n ') }}
eggs-directory = {{ eggs_directory }}
develop-eggs-directory = {{ develop_eggs_directory }}
offline = true
[slap-parameter]
[buildout]
parts =
cron-entry-logrotate
[cron]
recipe = slapos.cookbook:cron
cron-entries = ${logrotate-directory:cron-entries}
dcrond-binary = {{ dcron_location }}/sbin/crond
crontabs = ${logrotate-directory:crontabs}
cronstamps = ${logrotate-directory:cronstamps}
catcher = ${cron-simplelogger:wrapper}
binary = ${logrotate-directory:services}/crond
[cron-simplelogger]
recipe = slapos.cookbook:simplelogger
wrapper = ${logrotate-directory:bin}/cron_simplelogger
log = ${logrotate-directory:log}/cron.log
[logrotate]
recipe = slapos.cookbook:logrotate
logrotate-entries = ${logrotate-directory:logrotate-entries}
backup = ${logrotate-directory:logrotate-backup}
logrotate-binary = {{ logrotate_location }}/usr/sbin/logrotate
gzip-binary = {{ gzip_location }}/bin/gzip
gunzip-binary = {{ gzip_location }}/bin/gunzip
wrapper = ${logrotate-directory:bin}/logrotate
conf = ${logrotate-directory:etc}/logrotate.conf
state-file = ${logrotate-directory:srv}/logrotate.status
[cron-entry-logrotate]
recipe = slapos.cookbook:cron.d
cron-entries = ${cron:cron-entries}
name = logrotate
frequency = 0 0 * * *
command = ${logrotate:wrapper}
[logrotate-directory]
recipe = slapos.cookbook:mkdirectory
cron-entries = ${:etc}/cron.d
cronstamps = ${:etc}/cronstamps
crontabs = ${:etc}/crontabs
logrotate-backup = ${:backup}/logrotate
logrotate-entries = ${:etc}/logrotate.d
bin = ${buildout:directory}/bin
srv = ${buildout:directory}/srv
backup = ${:srv}/backup
etc = ${buildout:directory}/etc
services = ${:etc}/service
log = ${buildout:directory}/var/log
[buildout]
parts = switch-softwaretype
eggs-directory = {{ eggs_directory }}
develop-eggs-directory = {{ develop_eggs_directory }}
[slap-configuration]
recipe = slapos.cookbook:slapconfiguration.serialised
computer = ${slap-connection:computer-id}
partition = ${slap-connection:partition-id}
url = ${slap-connection:server-url}
key = ${slap-connection:key-file}
cert = ${slap-connection:cert-file}
[jinja2-template-base]
recipe = slapos.recipe.template:jinja2
rendered = ${buildout:parts-directory}/${:_buildout_section_name_}/${:filename}
extra-context =
context =
key develop_eggs_directory buildout:develop-eggs-directory
key eggs_directory buildout:eggs-directory
key ipv6_set slap-configuration:ipv6
key ipv4_set slap-configuration:ipv4
key slapparameter_dict slap-configuration:configuration
key computer_id slap-configuration:computer
raw logrotate_cfg {{ template_logrotate_base }}
raw dash_binary {{ dash_location }}/bin/dash
raw bin_directory {{ bin_directory }}
raw openssl_location {{ openssl_location }}
${:extra-context}
[dynamic-template-gateone-parameters]
bin-directory = {{ bin_directory }}
#python-executable =
readline-location = {{ readline_location }}
dtach-location = {{ dtach_location }}
gateone-location = {{ gateone_location}}
gateone-bin = {{ gateone_bin }}
nginx-location = {{ nginx_location}}
template-ngnix-conf = {{ template_nginx_conf }}
[dynamic-template-gateone]
< = jinja2-template-base
template = {{ template_gateone }}
filename = instance-gateone.cfg
extensions = jinja2.ext.do
extra-context =
section parameter_dict dynamic-template-gateone-parameters
[switch-softwaretype]
recipe = slapos.cookbook:softwaretype
default = ${dynamic-template-gateone:rendered}
gateone = ${:default}
[buildout]
extends =
../../component/dash/buildout.cfg
../../component/git/buildout.cfg
../../component/dcron/buildout.cfg
../../component/gzip/buildout.cfg
../../component/openssl/buildout.cfg
../../component/logrotate/buildout.cfg
../../component/kerberos/buildout.cfg
../../component/python-kerberos/buildout.cfg
../../component/gateone/buildout.cfg
../../component/dtach/buildout.cfg
../../component/python-2.7/buildout.cfg
../../component/nginx/buildout.cfg
../../stack/slapos.cfg
parts =
slapos-cookbook
kerberos
eggs
gateone-develop
gateone
dash
check-recipe
template
[eggs]
recipe = zc.recipe.egg
eggs =
${lxml-python:egg}
${python-cryptography:egg}
${python-kerberos:egg}
tornado
scripts =
slapos-kill
[extra-eggs]
recipe = zc.recipe.egg
interpreter = python
eggs =
${lxml-python:egg}
${python-kerberos:egg}
tornado
gateone
setuptools
pyOpenSSL
futures
[slapos.cookbook-repository]
recipe = slapos.recipe.build:gitclone
repository = http://git.erp5.org/repos/slapos.git
branch = re6st-master
git-executable = ${git:location}/bin/git
[download-base]
recipe = slapos.recipe.build:download
url = ${:_profile_base_location_}/${:filename}
mode = 644
[template-jinja2-base]
recipe = slapos.recipe.template:jinja2
template = ${:_profile_base_location_}/${:filename}.in
rendered = ${buildout:directory}/${:filename}
# XXX: extra-context is needed because we cannot append to a key of an extended
# section.
extra-context =
context =
key bin_directory buildout:bin-directory
key develop_eggs_directory buildout:develop-eggs-directory
key eggs_directory buildout:eggs-directory
${:extra-context}
[template]
< = template-jinja2-base
filename = template.cfg
template = ${:_profile_base_location_}/instance.cfg.in
md5sum = 428669a609aca3e0a7cae1387d332a75
extra-context =
key dash_location dash:location
key dtach_location dtach:location
key gateone_location gateone-repository:location
key logrotate_location logrotate:location
key nginx_location nginx:location
key openssl_location openssl:location
key readline_location readline:location
key template_nginx_conf template-nginx-conf:target
key template_logrotate_base template-logrotate-base:rendered
key template_gateone template-gateone:target
raw gateone_bin ${buildout:bin-directory}/gateone
raw python_with_eggs ${buildout:directory}/bin/${extra-eggs:interpreter}
[template-gateone]
< = download-base
filename = instance-gateone.cfg.in
md5sum = e7096a17c36c3bd27a011de57b7abfc1
[template-logrotate-base]
< = template-jinja2-base
filename = instance-logrotate-base.cfg
md5sum = f28fbd310944f321ccb34b2a34c82005
extra-context =
key dcron_location dcron:location
key gzip_location gzip:location
key logrotate_location logrotate:location
[template-nginx-conf]
< = download-base
url = ${:_profile_base_location_}/templates/${:filename}.in
filename = nginx.conf
md5sum = 72f4cc110f618b317793e21124f45121
[check-recipe]
recipe = plone.recipe.command
stop-on-error = true
update-command = ${:command}
command =
grep parts ${buildout:develop-eggs-directory}/gateone.egg-link
[versions]
\ No newline at end of file
worker_processes {{ parameter_dict['nb_workers'] }};
pid {{ parameter_dict['pid'] }};
error_log {{ parameter_dict['error-log'] }};
daemon off;
events {
worker_connections 1024;
accept_mutex off;
}
http {
default_type application/octet-stream;
access_log {{ parameter_dict['access-log'] }} combined;
client_max_body_size 10M;
map $http_upgrade $connection_upgrade {
default upgrade;
'' close;
}
server {
listen [{{ parameter_dict['ipv6'] }}]:{{ parameter_dict['port'] }} ssl;
server_name _;
ssl_certificate {{ parameter_dict['ssl-certificate'] }};
ssl_certificate_key {{ parameter_dict['ssl-key'] }};
ssl_protocols SSLv3 TLSv1 TLSv1.1 TLSv1.2;
ssl_ciphers HIGH:!aNULL:!MD5;
keepalive_timeout 90s;
client_body_temp_path {{ param_tempdir['client_body_temp_path'] }};
proxy_temp_path {{ param_tempdir['proxy_temp_path'] }};
fastcgi_temp_path {{ param_tempdir['fastcgi_temp_path'] }};
uwsgi_temp_path {{ param_tempdir['uwsgi_temp_path'] }};
scgi_temp_path {{ param_tempdir['scgi_temp_path'] }};
error_page 401 /login;
location / {
proxy_pass_header Server;
proxy_set_header Host $http_host;
proxy_redirect off;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Scheme $scheme;
proxy_pass {{ parameter_dict['backend-url'] }};
proxy_http_version 1.1;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection "upgrade";
}
}
}
\ No newline at end of file
#!{{ python_executable }}
"""Simple web-server that says "Hello World" for every path
hello-web [--logfile <logfile>] <bind-ip> <bind-port> ...
"""
import sys
import time
import argparse
from BaseHTTPServer import BaseHTTPRequestHandler, HTTPServer
from socket import AF_INET6
class WebHello(BaseHTTPRequestHandler):
def do_GET(self):
self.send_response(200) # ok
self.send_header("Content-type", "text/plain")
self.end_headers()
print >>self.wfile, \
"Hello %s at `%s` ; %s" % (
' '.join(self.server.webhello_argv) or 'world',
self.path, time.asctime())
class HTTPServerV6(HTTPServer):
address_family = AF_INET6
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--logfile', dest='logfile')
parser.add_argument('bind_ip')
parser.add_argument('bind_port', type=int)
parser.add_argument('argv_extra', metavar='...', nargs=argparse.REMAINDER)
args = parser.parse_args()
# HTTPServer logs to sys.stderr - override it if we have --logfile
if args.logfile:
f = open(args.logfile, 'a', buffering=1)
sys.stderr = f
print >>sys.stderr, '* %s Hello-Web starting at %s' % (
time.asctime(), (args.bind_ip, args.bind_port))
# TODO autodetect ipv6/ipv4
httpd = HTTPServerV6( (args.bind_ip, args.bind_port), WebHello)
httpd.webhello_argv = args.argv_extra
httpd.serve_forever()
if __name__ == '__main__':
main()
...@@ -7,6 +7,7 @@ ...@@ -7,6 +7,7 @@
parts = parts =
directory directory
hello-world hello-world
hello-world-promise
publish-connection-parameter publish-connection-parameter
# Define egg directories to be the one from Software Release # Define egg directories to be the one from Software Release
...@@ -24,7 +25,7 @@ offline = true ...@@ -24,7 +25,7 @@ offline = true
# It will then authenticate to SlapOS Master and fetch the instance parameters. # It will then authenticate to SlapOS Master and fetch the instance parameters.
# The parameters are accessible from $${instance-parameter:configuration.name-of-parameter} # The parameters are accessible from $${instance-parameter:configuration.name-of-parameter}
# Always the same. Just copy/paste. # Always the same. Just copy/paste.
# See docstring of slapos.cookbook:slapconfiguration for more informations. # See docstring of slapos.cookbook:slapconfiguration for more information.
recipe = slapos.cookbook:slapconfiguration recipe = slapos.cookbook:slapconfiguration
computer = $${slap_connection:computer_id} computer = $${slap_connection:computer_id}
partition = $${slap_connection:partition_id} partition = $${slap_connection:partition_id}
...@@ -58,24 +59,48 @@ promise = $${:etc}/promise/ ...@@ -58,24 +59,48 @@ promise = $${:etc}/promise/
# Path of the log directory used by our service (see [hello-world]) # Path of the log directory used by our service (see [hello-world])
log = $${:var}/log log = $${:var}/log
# Create a simple shell script that will only output your name if you
# specified it as instance parameter. # Create a simple web server that says "hello <configuration.name>" to the web.
# Usually, of course, we use more useful commands, like web servers.
[hello-world] [hello-world]
# helloworld service is listening on:
# - global IPv6 address, and
# - fixed port
#
# NOTE because every computer partition is allocated its own global IPv6
# address, it is ok to fix the port - different hello-world instances will have
# different IPv6 addresses and they all will be accessible at the same time.
ipv6 = $${instance-parameter:ipv6-random}
port = 7777
# full URL - for convenience
url = http://[$${:ipv6}]:$${:port}
# the service will log here
logfile = $${directory:log}/hello-world.log
# Actual script that starts the service:
# This recipe will try to "exec" the command-line after separating parameters. # This recipe will try to "exec" the command-line after separating parameters.
recipe = slapos.cookbook:wrapper recipe = slapos.cookbook:wrapper
# Notice that there is only one $ at ${dash:location}, it is because it comes from the Software Release buildout profile. command-line =
command-line = ${dash:location}/bin/dash -c 'echo "Hello $${instance-parameter:configuration.name}, it is $(date)." > $${directory:log}/log.log; sleep 1000000;' ${hello-web-bin:rendered} --logfile $${hello-world:logfile}
# Put this shell script in the "etc/service" directory. Every executable of this $${:ipv6} $${:port} $${instance-parameter:configuration.name}
# repository will be started and monitored by supervisord. If one service # Put this shell script in the "etc/service" directory. Each executable of this
# exits/crashes, it will trigger a "bang" and cause run of slapgrid for the # repository will be started and monitored by supervisord. If a service
# instance. # exits/crashes, it will trigger a "bang" and cause a re-run of the instance.
wrapper-path = $${directory:service}/hello-world wrapper-path = $${directory:service}/hello-world
# promise, that checks that hello-world service is alive
[hello-world-promise]
recipe = slapos.cookbook:check_port_listening
path = $${directory:promise}/hello-world
hostname= $${hello-world:ipv6}
port = $${hello-world:port}
# Publish all the parameters needed for the user to connect to the instance. # Publish all the parameters needed for the user to connect to the instance.
# It can be anything: URL(s), password(s), or arbitrary parameters. # It can be anything: URL(s), password(s), or arbitrary parameters.
# Here we'll just echo back the entered name as instance parameter # Here we'll just echo back the entered name as instance parameter
[publish-connection-parameter] [publish-connection-parameter]
recipe = slapos.cookbook:publish recipe = slapos.cookbook:publish
name = Hello $${instance-parameter:configuration.name}! name = Hello $${instance-parameter:configuration.name}!
url = $${hello-world:url}
[buildout] [buildout]
extends = extends =
# "slapos" stack describes basic things needed for 99.9% of SlapOS Software # "slapos" stack describes basic things needed for 99.9% of SlapOS Software
# Releases # Releases
../../stack/slapos.cfg ../../stack/slapos.cfg
# Extend here component profiles, like openssl, apache, mariadb, curl... # Extend here component profiles, like openssl, apache, mariadb, curl...
# Or/and extend a stack (lamp, tomcat) that does most of the work for you # Or/and extend a stack (lamp, tomcat) that does most of the work for you
# In this example we only need the dash binary to run a simple "hello world" # In this example we don't need anything more than python which is provided by
# shell script. # above stack/slapos.cfg
../../component/dash/buildout.cfg # ../../component/component1/buildout.cfg
# ../../component/component2/buildout.cfg
parts = parts =
# Call installation of slapos.cookbook egg defined in stack/slapos.cfg (needed # Call installation of slapos.cookbook egg defined in stack/slapos.cfg (needed
...@@ -18,6 +19,10 @@ parts = ...@@ -18,6 +19,10 @@ parts =
# instance # instance
instance-profile instance-profile
# "build" python program (install + correct shebang for our python)
hello-web-bin
# Download instance.cfg.in (buildout profile used to deployment of instance), # Download instance.cfg.in (buildout profile used to deployment of instance),
# replace all ${foo:bar} parameters by real values, and change $${foo:bar} to # replace all ${foo:bar} parameters by real values, and change $${foo:bar} to
# ${foo:bar} # ${foo:bar}
...@@ -26,5 +31,23 @@ recipe = slapos.recipe.template ...@@ -26,5 +31,23 @@ recipe = slapos.recipe.template
url = ${:_profile_base_location_}/instance.cfg.in url = ${:_profile_base_location_}/instance.cfg.in
output = ${buildout:directory}/instance.cfg output = ${buildout:directory}/instance.cfg
# MD5 checksum can be skipped for development (easier to develop), but must be filled for production # MD5 checksum can be skipped for development (easier to develop), but must be filled for production
md5sum = ed94ac99ae1e596c0da5350da6ab6f52 md5sum = 968bea0fc81dc604a874c53648b7d13f
mode = 0644 mode = 0644
# install hello-web with correct python_executable
[hello-web-bin]
recipe = slapos.recipe.template:jinja2
filename = hello-web
md5sum = da4a93ff679d40c6682859476dcf4ce0
template = ${:_profile_base_location_}/${:filename}.in
rendered = ${buildout:bin-directory}/${:filename}
mode = 0755
# XXX python_executable should be ${${buildout:python}:executable}
# but buildout cannot support such indirection.
#
# in real-cases, python software is usually installed with zc.recipe.egg
# which cares about correctly specifiing python interpreter for
# entry-points automatically.
context =
raw python_executable ${buildout:executable}
...@@ -92,4 +92,4 @@ feedparser = 5.1.3 ...@@ -92,4 +92,4 @@ feedparser = 5.1.3
# Required by: # Required by:
# slapos.toolbox==0.40.2 # slapos.toolbox==0.40.2
paramiko = 1.15.1 paramiko = 1.15.2
...@@ -48,4 +48,4 @@ PyRSS2Gen = 1.1 ...@@ -48,4 +48,4 @@ PyRSS2Gen = 1.1
cns.recipe.symlink = 0.2.3 cns.recipe.symlink = 0.2.3
collective.recipe.template = 1.11 collective.recipe.template = 1.11
plone.recipe.command = 1.1 plone.recipe.command = 1.1
slapos.recipe.template = 2.5 slapos.recipe.template = 2.7
[buildout]
parts =
instance
publish-connection-parameter
## Monitoring part XXX whe should not have to specify all parts like this
## Parts to add for monitoring
certificate-authority
cron
cron-entry-monitor
cron-entry-rss
deploy-index
deploy-settings-cgi
deploy-status-cgi
deploy-status-history-cgi
setup-static-files
certificate-authority
zero-parameters
public-symlink
cgi-httpd-wrapper
cgi-httpd-graceful-wrapper
monitor-promise
monitor-instance-log-access
## Monitor for ipython
monitor-current-log-access
monitor-deploy-set-password-cgi
extends = ${monitor-template:output}
eggs-directory = ${buildout:eggs-directory}
develop-eggs-directory = ${buildout:develop-eggs-directory}
offline = true
[slapconfiguration]
recipe = slapos.cookbook:slapconfiguration
computer = $${slap_connection:computer_id}
partition = $${slap_connection:partition_id}
url = $${slap_connection:server_url}
key = $${slap_connection:key_file}
cert = $${slap_connection:cert_file}
[instance-parameter]
port = 8888
host = $${slapconfiguration:ipv6-random}
cert_file = $${generate-certificate:cert_file}
key_file = $${generate-certificate:key_file}
logfile = $${directory:log}/ipython_notebook.log
notebook_dir = $${directory:notebook_dir}
[generate-certificate]
; TODO: there is a slapos recipe to generate certificates. Use it instead
recipe = plone.recipe.command
command =
if [ ! -e $${instance-parameter:key_file} ]
then
${openssl-output:openssl} req -x509 -nodes -days 3650 \
-subj "/C=AA/ST=X/L=X/O=Dis/CN=$${instance-parameter:host}" \
-newkey rsa:1024 -keyout $${instance-parameter:key_file} \
-out $${instance-parameter:cert_file}
fi
update-command = $${:command}
cert_file = $${directory:etc}/ipython_notebook_cert.crt
key_file = $${directory:etc}/ipython_notebook_cert.key
[instance]
recipe = slapos.cookbook:wrapper
command-line =
${buildout:bin-directory}/ipython notebook
--no-browser
--matplotlib=inline
--ip=$${instance-parameter:host}
--port=$${instance-parameter:port}
--port-retries=0
--certfile=$${instance-parameter:cert_file}
--keyfile=$${instance-parameter:key_file}
--notebook-dir=$${instance-parameter:notebook_dir}
--logfile=$${instance-parameter:logfile}
--config=$${ipython_notebook_config:rendered}
wrapper-path = $${directory:service}/ipython_notebook
parameters-extra = true
[ipython_notebook_config]
recipe = slapos.recipe.template:jinja2
template = ${ipython_notebook_config:location}/${ipython_notebook_config:filename}
rendered = $${directory:etc}/ipython_notebook_config.py
mode = 0744
context =
raw config_cfg $${buildout:directory}/knowledge0.cfg
[monitor-current-log-access]
< = monitor-directory-access
source = $${instance-parameter:logfile}
[monitor-deploy-set-password-cgi]
recipe = slapos.recipe.template:jinja2
template = ${ipython_notebook_set_password:location}/${ipython_notebook_set_password:filename}
rendered = $${monitor-directory:knowledge0-cgi}/$${:filename}
filename = ipython-notebook-password.cgi
mode = 0744
context =
raw config_cfg $${buildout:directory}/knowledge0.cfg
raw python_executable ${buildout:bin-directory}/ipython
key pwd monitor-directory:knowledge0-cgi
key this_file :filename
key httpd_graceful cgi-httpd-graceful-wrapper:rendered
[directory]
recipe = slapos.cookbook:mkdirectory
home = $${buildout:directory}
etc = $${:home}/etc
var = $${:home}/var
script = $${:etc}/run/
service = $${:etc}/service
promise = $${:etc}/promise/
log = $${:var}/log
notebook_dir = $${:var}/notebooks
[publish-connection-parameter]
recipe = slapos.cookbook:publish
url = https://[$${instance-parameter:host}]:$${instance-parameter:port}
monitor_url = $${monitor-parameters:url}
[buildout]
versions = versions
extends =
../../stack/slapos.cfg
../../stack/monitor/buildout.cfg
../../component/ipython/buildout.cfg
../../component/scipy/buildout.cfg
../../component/scikit-learn/buildout.cfg
../../component/pandas/buildout.cfg
../../component/openssl/buildout.cfg
parts =
monitor-eggs
slapos-cookbook
ipython_notebook
ipython_notebook_set_password
instance
[ipython_notebook]
; In the ipython notebook software, we use more eggs than in the minimal
; ipython notebook component
eggs +=
${scipy:egg}
${pandas:egg}
${scikit-learn:egg}
[ipython_notebook_config]
recipe = hexagonit.recipe.download
url = ${:_profile_base_location_}/template/${:filename}
download-only = true
md5sum = a5bc4ee8539109d1de7ab33b4c2c97ea
destination = ${buildout:parts-directory}/${:_buildout_section_name_}
filename = ipython_notebook_config.jinja
mode = 0644
[ipython_notebook_set_password]
recipe = hexagonit.recipe.download
url = ${:_profile_base_location_}/template/${:filename}
download-only = true
md5sum = d7d4a7e19d55bf14007819258bf42100
destination = ${buildout:parts-directory}/${:_buildout_section_name_}
filename = ipython_set_password.jinja
mode = 0644
[instance]
recipe = slapos.recipe.template
url = ${:_profile_base_location_}/instance.cfg
output = ${buildout:directory}/instance.cfg
[versions]
PyRSS2Gen = 1.1
Pygments = 2.0.2
cns.recipe.symlink = 0.2.3
ipython = 3.1.0
matplotlib = 1.4.3
mistune = 0.5.1
nose = 1.3.7
pandas = 0.16.1
plone.recipe.command = 1.1
pyzmq = 14.6.0
scikit-learn = 0.16.1
scipy = 0.15.1
simpy = 3.0.7
slapos.recipe.template = 2.7
terminado = 0.5
tornado = 4.2
# Required by:
# dream==0.0.1
MySQL-python = 1.2.5
# Required by:
# tornado==4.2
backports.ssl-match-hostname = 3.4.0.2
# Required by:
# tornado==4.2
certifi = 2015.4.28
# Required by:
# matplotlib==1.4.3
mock = 1.0.1
# Required by:
# dream==0.0.1
numpy = 1.9.2
# Required by:
# terminado==0.5
ptyprocess = 0.5
# Required by:
# dream==0.0.1
pydot = 1.0.28
# Required by:
# matplotlib==1.4.3
# pandas==0.16.1
python-dateutil = 2.4.2
# Required by:
# dream==0.0.1
rpy2 = 2.6.0
# Required by:
# rpy2==2.6.0
singledispatch = 3.4.0.3
# Required by:
# dream==0.0.1
tablib = 0.10.0
# Required by:
# dream==0.0.1
xlrd = 0.9.3
# Required by:
# dream==0.0.1
xlwt = 1.0.0
# Required by:
# dream==0.0.1
zope.dottedname = 4.1.0
import ConfigParser
knowledge_0 = '{{ config_cfg }}'
c = get_config()
parser = ConfigParser.ConfigParser()
parser.read(knowledge_0)
if parser.has_option("ipython_notebook", "password"):
c.NotebookApp.password = parser.get("ipython_notebook", "password")
#!{{ python_executable }}
import cgi
import cgitb
import ConfigParser
import os
import re
import subprocess
from IPython.lib import passwd
#cgitb.enable(display=0, logdir="/tmp/cgi.log")
cgitb.enable()
form = cgi.FieldStorage()
config_file = "{{ config_cfg }}"
if not os.path.exists(config_file):
print "Your software does <b>not</b> embed 0-knowledge. \
This interface is useless in this case</body></html>"
exit(0)
parser = ConfigParser.ConfigParser()
parser.read(config_file)
if not parser.has_section("ipython_notebook"):
parser.add_section("ipython_notebook")
if not parser.has_option("ipython_notebook", "password"):
parser.set("ipython_notebook", "password", "")
if "password" in form:
parser.set("ipython_notebook", "password", passwd(form["password"].value))
# subprocess.call('{{ httpd_graceful }}')
# TODO: we should restart ipython
with open(config_file, 'w') as file:
parser.write(file)
# TODO cleanup
print "<html><head>"
print "<link rel=\"stylesheet\" href=\"static/pure-min.css\">"
print "<link rel=\"stylesheet\" href=\"static/style.css\">"
print "</head><body>"
print "<h1>IPython Notebook Password :</h1>"
print "<form action=\"/index.cgi\" method=\"post\" class=\"pure-form-aligned\">"
print "<input type=\"hidden\" name=\"posting-script\" value=\"{{ pwd }}/{{ this_file }}\">"
print """<div class="pure-control-group">
<label for="password">Password*:</label>
<input placeholder="Set your password" type="password" name="password" id="password"></br>
</div><div class="pure-control-group">
<label for="password">Verify Password*:</label>
<input placeholder="Verify password" type="password" name="password_2" id="password_2"></br>
</div><p id="validate-status" style="color:red"></p>
<div class="pure-controls">
<button id="register-button" type="submit" class="pure-button pure-button-primary" disabled>Access</button></div>
</form>
<script type="text/javascript" src="static/jquery-1.10.2.min.js"></script>
<script type="text/javascript" src="static/monitor-register.js"></script>
</body></html>
"""
...@@ -11,6 +11,7 @@ extends = ...@@ -11,6 +11,7 @@ extends =
../../component/noVNC/buildout.cfg ../../component/noVNC/buildout.cfg
../../component/openssl/buildout.cfg ../../component/openssl/buildout.cfg
../../component/dcron/buildout.cfg ../../component/dcron/buildout.cfg
../../component/netcat/buildout.cfg
../../stack/slapos.cfg ../../stack/slapos.cfg
../../stack/nodejs.cfg ../../stack/nodejs.cfg
../../stack/resilient/buildout.cfg ../../stack/resilient/buildout.cfg
...@@ -49,6 +50,7 @@ eggs = ...@@ -49,6 +50,7 @@ eggs =
erp5.util erp5.util
cns.recipe.symlink cns.recipe.symlink
collective.recipe.template collective.recipe.template
plone.recipe.command
[http-proxy] [http-proxy]
# https://github.com/nodejitsu/node-http-proxy # https://github.com/nodejitsu/node-http-proxy
...@@ -85,7 +87,7 @@ command = ...@@ -85,7 +87,7 @@ command =
[template] [template]
recipe = slapos.recipe.template recipe = slapos.recipe.template
url = ${:_profile_base_location_}/instance.cfg.in url = ${:_profile_base_location_}/instance.cfg.in
md5sum = 4c8f07da2217e54163c265fe6fe3d41d md5sum = cf67212d3155767d0d0d8a6d75d2d8ad
output = ${buildout:directory}/template.cfg output = ${buildout:directory}/template.cfg
mode = 0644 mode = 0644
...@@ -93,7 +95,15 @@ mode = 0644 ...@@ -93,7 +95,15 @@ mode = 0644
recipe = hexagonit.recipe.download recipe = hexagonit.recipe.download
url = ${:_profile_base_location_}/instance-kvm.cfg.jinja2 url = ${:_profile_base_location_}/instance-kvm.cfg.jinja2
mode = 644 mode = 644
md5sum = ac69266206830226185e576fb6e4935a md5sum = 3e3354844b2052609e3c49eca03b607e
download-only = true
on-update = true
[template-kvm-cluster]
recipe = hexagonit.recipe.download
url = ${:_profile_base_location_}/instance-kvm-cluster.cfg.jinja2.in
mode = 644
md5sum = cc72d7b89d8b474d7b4f2c9319b385d5
download-only = true download-only = true
on-update = true on-update = true
...@@ -108,7 +118,7 @@ on-update = true ...@@ -108,7 +118,7 @@ on-update = true
[template-kvm-resilient-test] [template-kvm-resilient-test]
recipe = hexagonit.recipe.download recipe = hexagonit.recipe.download
url = ${:_profile_base_location_}/instance-kvm-resilient-test.cfg.jinja2 url = ${:_profile_base_location_}/instance-kvm-resilient-test.cfg.jinja2
md5sum = 3af2cdbaa7773e639cfdf17db60d2aed md5sum = e3d275621420f0b8c081228caeb571f9
mode = 0644 mode = 0644
download-only = true download-only = true
on-update = true on-update = true
...@@ -157,3 +167,33 @@ url = ${:_profile_base_location_}/instance-frontend.cfg.in ...@@ -157,3 +167,33 @@ url = ${:_profile_base_location_}/instance-frontend.cfg.in
md5sum = cdb690495e9eb007d2b7d2f8e12f5c59 md5sum = cdb690495e9eb007d2b7d2f8e12f5c59
output = ${buildout:directory}/template-frontend.cfg output = ${buildout:directory}/template-frontend.cfg
mode = 0644 mode = 0644
[template-apache-conf]
recipe = hexagonit.recipe.download
url = ${:_profile_base_location_}/template/apache.conf.in
mode = 644
filename = apache.conf.in
md5sum = 91f05377aff35ffbac7f2687e90b5dcc
download-only = true
on-update = true
[template-content]
recipe = hexagonit.recipe.download
url = ${:_profile_base_location_}/template/template-content.in
mode = 644
filename = template-content.in
md5sum = 47d492dafe5cb314bdc49bf013d21ead
download-only = true
on-update = true
[template-httpd]
recipe = slapos.recipe.template:jinja2
filename = template-httpd.cfg
template = ${:_profile_base_location_}/instance-kvm-http.cfg.in
rendered = ${buildout:parts-directory}/${:_buildout_section_name_}/instance-kvm-http.cfg
md5sum = 84b96dfc78e8d2611bf7210b8b6bb9c5
context =
key apache_location apache:location
raw template_apache_conf ${template-apache-conf:location}/${template-apache-conf:filename}
...@@ -6,30 +6,38 @@ extends = ...@@ -6,30 +6,38 @@ extends =
parts += parts +=
slapos.cookbook-repository slapos.cookbook-repository
slapos.toolbox-repository slapos.toolbox-repository
slapos.core-repository
erp5.util-repository erp5.util-repository
check-recipe check-recipe
develop = develop =
${:parts-directory}/slapos.cookbook-repository ${:parts-directory}/slapos.cookbook-repository
${:parts-directory}/slapos.core-repository
${:parts-directory}/slapos.toolbox-repository ${:parts-directory}/slapos.toolbox-repository
${:parts-directory}/erp5.util-repository ${:parts-directory}/erp5.util-repository
[slapos.cookbook-repository] [slapos.cookbook-repository]
recipe = slapos.recipe.build:gitclone recipe = slapos.recipe.build:gitclone
repository = http://git.erp5.org/repos/slapos.git repository = http://git.erp5.org/repos/slapos.git
branch = kvm branch = kvm-cluster
git-executable = ${git:location}/bin/git
[slapos.core-repository]
recipe = slapos.recipe.build:gitclone
repository = http://git.erp5.org/repos/slapos.core.git
branch = master
git-executable = ${git:location}/bin/git git-executable = ${git:location}/bin/git
[slapos.toolbox-repository] [slapos.toolbox-repository]
recipe = slapos.recipe.build:gitclone recipe = slapos.recipe.build:gitclone
repository = http://git.erp5.org/repos/slapos.toolbox.git repository = http://git.erp5.org/repos/slapos.toolbox.git
branch = kvmresiliency branch = master
git-executable = ${git:location}/bin/git git-executable = ${git:location}/bin/git
[erp5.util-repository] [erp5.util-repository]
recipe = slapos.recipe.build:gitclone recipe = slapos.recipe.build:gitclone
repository = http://git.erp5.org/repos/erp5.git repository = http://git.erp5.org/repos/erp5.git
branch = scalability-master2 branch = master
git-executable = ${git:location}/bin/git git-executable = ${git:location}/bin/git
[check-recipe] [check-recipe]
...@@ -38,5 +46,12 @@ stop-on-error = true ...@@ -38,5 +46,12 @@ stop-on-error = true
update-command = ${:command} update-command = ${:command}
command = command =
grep parts ${buildout:develop-eggs-directory}/slapos.cookbook.egg-link && grep parts ${buildout:develop-eggs-directory}/slapos.cookbook.egg-link &&
grep parts ${buildout:develop-eggs-directory}/slapos.core.egg-link &&
grep parts ${buildout:develop-eggs-directory}/slapos.toolbox.egg-link && grep parts ${buildout:develop-eggs-directory}/slapos.toolbox.egg-link &&
grep parts ${buildout:develop-eggs-directory}/erp5.util.egg-link grep parts ${buildout:develop-eggs-directory}/erp5.util.egg-link
[versions]
slapos.cookbook =
slapos.core =
slapos.toolbox =
erp5.util =
\ No newline at end of file
...@@ -22,6 +22,13 @@ test = $${dynamic-template-kvm-resilient-test:rendered} ...@@ -22,6 +22,13 @@ test = $${dynamic-template-kvm-resilient-test:rendered}
frozen = ${instance-frozen:output} frozen = ${instance-frozen:output}
pull-backup = ${template-pull-backup:output} pull-backup = ${template-pull-backup:output}
# XXX - If this configuration is not generated by slapgrid, use empty values
[storage-configuration]
storage-home =
[network-information]
global-ipv4-network =
[slap-configuration] [slap-configuration]
recipe = slapos.cookbook:slapconfiguration.serialised recipe = slapos.cookbook:slapconfiguration.serialised
computer = $${slap-connection:computer-id} computer = $${slap-connection:computer-id}
...@@ -29,6 +36,7 @@ partition = $${slap-connection:partition-id} ...@@ -29,6 +36,7 @@ partition = $${slap-connection:partition-id}
url = $${slap-connection:server-url} url = $${slap-connection:server-url}
key = $${slap-connection:key-file} key = $${slap-connection:key-file}
cert = $${slap-connection:cert-file} cert = $${slap-connection:cert-file}
storage-home = $${storage-configuration:storage-home}
[dynamic-template-kvm] [dynamic-template-kvm]
recipe = slapos.recipe.template:jinja2 recipe = slapos.recipe.template:jinja2
...@@ -38,16 +46,22 @@ extensions = jinja2.ext.do ...@@ -38,16 +46,22 @@ extensions = jinja2.ext.do
context = context =
key develop_eggs_directory buildout:develop-eggs-directory key develop_eggs_directory buildout:develop-eggs-directory
key eggs_directory buildout:eggs-directory key eggs_directory buildout:eggs-directory
key global_ipv4_prefix network-information:global-ipv4-network
key slapparameter_dict slap-configuration:configuration key slapparameter_dict slap-configuration:configuration
key storage_dict slap-configuration:storage-dict
key tap_network_dict slap-configuration:tap-network-information-dict
raw curl_executable_location ${curl:location}/bin/curl raw curl_executable_location ${curl:location}/bin/curl
raw dash_executable_location ${dash:location}/bin/dash raw dash_executable_location ${dash:location}/bin/dash
raw dcron_executable_location ${dcron:location}/sbin/crond raw dcron_executable_location ${dcron:location}/sbin/crond
raw debian_amd64_netinst_location ${debian-amd64-netinst.iso:location}/${debian-amd64-netinst.iso:filename} raw debian_amd64_netinst_location ${debian-amd64-netinst.iso:location}/${debian-amd64-netinst.iso:filename}
raw novnc_location ${noVNC:location} raw novnc_location ${noVNC:location}
raw netcat_bin ${netcat:location}/bin/netcat
raw openssl_executable_location ${openssl:location}/bin/openssl raw openssl_executable_location ${openssl:location}/bin/openssl
raw qemu_executable_location ${kvm:location}/bin/qemu-system-x86_64 raw qemu_executable_location ${kvm:location}/bin/qemu-system-x86_64
raw qemu_img_executable_location ${kvm:location}/bin/qemu-img raw qemu_img_executable_location ${kvm:location}/bin/qemu-img
raw sixtunnel_executable_location ${6tunnel:location}/bin/6tunnel raw sixtunnel_executable_location ${6tunnel:location}/bin/6tunnel
raw template_httpd_cfg ${template-httpd:rendered}
raw template_content ${template-content:location}/${template-content:filename}
raw websockify_executable_location ${buildout:directory}/bin/websockify raw websockify_executable_location ${buildout:directory}/bin/websockify
template-parts-destination = ${template-parts:destination} template-parts-destination = ${template-parts:destination}
template-replicated-destination = ${template-replicated:destination} template-replicated-destination = ${template-replicated:destination}
......
{
"type": "object",
"$schema": "http://json-schema.org/draft-04/schema",
"title": "Input Parameters",
"properties": {
"frontend": {
"title": "KVM frontend (web socket)",
"description": "Front end used to provide VNC.",
"properties": {
"frontend-instance-guid": {
"title": "Frontend Instance ID",
"description": "Unique identifier of the frontend instance, like \"SOFTINST-11031\".",
"type": "string",
"default": ""
},
"frontend-software-type": {
"title": "Frontend Software Type",
"description": "Type of the frontend instance, like \"frontend\".",
"type": "string",
"default": "frontend"
},
"frontend-software-url": {
"title": "Frontend Software URL",
"description": "Software Release URL of the frontend instance, like \"http://example.com/path/to/software.cfg\".",
"type": "string",
"format": "uri",
"default": "http://git.erp5.org/gitweb/slapos.git/blob_plain/refs/tags/slapos-0.92:/software/kvm/software.cfg"
}
},
"type": "object"
},
"slave-frontend": {
"title": "Web frontend",
"description": "Front end used to provide web access for internal services at the kvm.",
"properties": {
"instance-guid": {
"title": "Main Frontend Instance ID",
"description": "Unique identifier of the frontend instance, like \"SOFTINST-11031\".",
"type": "string",
"default": ""
},
"frontend-software-type": {
"title": "Frontend Software Type",
"description": "Type of the frontend instance, like \"frontend\".",
"type": "string",
"default": "custom-personal"
},
"frontend-software-url": {
"title": "Frontend Software URL",
"description": "Software Release URL of the frontend instance, like \"http://example.com/path/to/software.cfg\".",
"type": "string",
"format": "uri",
"default": "http://git.erp5.org/gitweb/slapos.git/blob_plain/HEAD:/software/apache-frontend/software.cfg"
},
"slave-frontend-dict": {
"title": "Slave Front end definition",
"description": "Slave Front end definition",
"patternProperties": {
".*": {
"properties": {
"domain": {
"title": "Name of the domain to be used",
"description": "Name of the domain to be used (example: mydomain.com). Subdomains of this domain will be used for the slave instances (example: instance12345.mydomain.com).",
"type": "string",
"default": ""
},
"url": {
"title": "URL of backend to use.",
"description": "URL of backend to use. This URL will be used to request frontend if parameter 'kvm-partition-name' is not set.",
"format": "uri",
"type": "string",
"default": ""
},
"enable-cache": {
"title": "Use cache for this slave frontend.",
"description": "Specify if slave frontend should use a squid to connect to backend.",
"type": "boolean",
"default": false
},
"type": {
"title": "Specify if slave frontend will redirect to a zope backend.",
"description": "If specified, Apache RewriteRule will use Zope's Virtual Host Daemon. Possible values: 'zope', 'default'.",
"type": "string",
"default": "default"
},
"zope-path": {
"title": "Path to the VirtualHostRoot of the zope.",
"description": "Only used if type is 'zope'. Will append the specified path to the VirtualHostRoot of the zope's VirtualHostMonster.",
"type": "string",
"default": ""
},
"https-only": {
"title": "Access website with https url only.",
"description": "Specify if website should be accessed using https only. If so, the frontend will redirect the user to https if accessed from http.",
"type": "boolean",
"default": false
},
"kvm-partition-name": {
"title": "Reference name in kvm instance definition to get url from.",
"description": "Compose url from kvm definition. Only work if 'use-nat' is true and 'service-port' is set. This will allow to get URL from defined nat-rules. Play the same as url, but help if you don't know kvm ipv6 yet.",
"type": "string",
"default": ""
},
"service-port": {
"title": "Port of service into the VM (require: kvm-name).",
"description": "This will allow to get URL from defined nat-rules. The port should exist in nat-rules of KVM you have referenced by 'kvm-partition-name'",
"type": "integer"
},
"url-scheme": {
"title": "Scheme of HTTP service into the VM (require: kvm-name).",
"description": "Say If HTTP service to run/or running into the Virtual Machine will use http or https. Possible values: http, https.",
"type": "string",
"enum": ["http", "https"],
"default": "http"
}
},
"type": "object"
}
},
"type": "object"
}
},
"type": "object"
},
"authorized-key": {
"title": "Public keys for virtual machines.",
"description": "Set the list of public keys to add in your virtual machine. The public key file will be available in the VM via url http://10.0.2.100/authorized_keys if you keep the NAT interface enabled",
"type": "array"
},
"kvm-partition-dict": {
"title": "kvm instances definition",
"description": "kvm instances definition",
"patternProperties": {
".*": {
"properties": {
"computer-guid": {
"title": "ID of the computer where to deploy this VM.",
"description": "Unique identifier of the computer, like \"COMP-1234\". By default, let Master choose a computer.",
"type": "string",
"default": ""
},
"ram-size": {
"title": "RAM size",
"description": "RAM size, in MB.",
"type": "integer",
"default": 1024,
"minimum": 128,
"multipleOf": 128,
"maximum": 16384
},
"disk-size": {
"title": "Disk size",
"description": "Disk size, in GB.",
"type": "integer",
"default": 10,
"minimum": 1,
"maximum": 1000
},
"disk-type": {
"title": "Disk type",
"description": "Type of QEMU disk drive.",
"type": "string",
"default": "virtio",
"enum": [
"ide",
"scsi",
"sd",
"mtd",
"floppy",
"pflash",
"virtio"
]
},
"cpu-count": {
"title": "CPU count",
"description": "Number of CPU cores.",
"type": "integer",
"minimum": 1,
"maximum": 8
},
"cpu-options": {
"title": "CPU Additional options: cores, threads, sockets, maxcpus.",
"description": "Additional options to use with cpu-count. Options are separated by coma: [cores=cores][,threads=threads][,sockets=sockets][,maxcpus=maxcpus]. Set this option if you know what you're doing.",
"type": "string"
},
"numa": {
"title": "Simulate a multi node NUMA system.",
"description": "Simulate a multi node NUMA system. If mem and cpus are omitted, resources are split equally. Each numa option are separated by space: node,nodeid=4,cpus=40-49,mem=64g node,nodeid=1,cpus=10-19,mem=128g. Set this option if you know what you're doing.",
"type": "string"
},
"nbd-host": {
"title": "NBD hostname or IP",
"description": "hostname (or IP) of the NBD server containing the boot image.",
"type": "string",
"format": [
"host-name",
"ip-address",
"ipv6"
],
"default": "debian.nbd.vifib.net"
},
"nbd-port": {
"title": "NBD port",
"description": "Port of the NBD server containing the boot image.",
"type": "integer",
"default": 1024,
"minimum": 1,
"maximum": 65535
},
"nbd2-host": {
"title": "Second NBD hostname or IP",
"description": "hostname (or IP) of the second NBD server (containing drivers for example).",
"type": "string",
"format": [
"host-name",
"ip-address",
"ipv6"
]
},
"nbd2-port": {
"title": "Second NBD port",
"description": "Port of the second NBD server containing the boot image.",
"type": "integer",
"minimum": 1,
"maximum": 65535
},
"virtual-hard-drive-url": {
"title": "Existing disk image URL",
"description": "If specified, will download an existing disk image (qcow2, raw, ...), and will use it as main virtual hard drive. Can be used to download and use an already installed and customized virtual hard drive.",
"format": "uri",
"type": "string"
},
"virtual-hard-drive-md5sum": {
"title": "Checksum of virtual hard drive",
"description": "MD5 checksum of virtual hard drive, used if virtual-hard-drive-url is specified.",
"type": "string"
},
"virtual-hard-drive-gzipped": {
"title": "Virtual hard drive to download is gzipped",
"description": "Define if virtual hard drive to download is gzipped using gzip. This help to reduce size of file to download.",
"type": "boolean",
"default": false
},
"external-disk-number": {
"title": "Number of additional disk to create for virtual machine",
"description": "Specify the number of additional disk to create for virtual machine in data folder of SlapOS Node. Requires instance_storage_home to be configured on SlapOS Node.",
"type": "integer",
"minimum": 0,
"default": 0
},
"external-disk-size": {
"title": "Size of additional disk to create for virtual machine, in Gigabytes",
"description": "Specify the size of additional disk to create for virtual machine in data folder of SlapOS Node. Requires instance_storage_home to be configured on SlapOS Node.",
"type": "integer",
"minimum": 10,
"maximum": 1000,
"default": 20
},
"external-disk-format": {
"title": "Type of external disk drive to create by QEMU.",
"description": "Type of QEMU disk drive, to create.",
"type": "string",
"default": "qcow2",
"enum": ["qcow2", "raw", "vdi", "vmdk", "cloop"]
},
"use-tap": {
"title": "Use QEMU TAP network interface",
"description": "Use QEMU TAP network interface, might require a bridge on SlapOS Node.",
"type": "boolean",
"default": true
},
"use-nat": {
"title": "Use QEMU USER Mode networking",
"description": "Use QEMU user-mode network stack (NAT).",
"type": "boolean",
"default": true
},
"nat-rules": {
"title": "List of rules for NAT of QEMU user mode network stack.",
"description": "List of rules for NAT of QEMU user mode network stack, as comma-separated list of ports. For each port specified, it will redirect port x of the VM (example: 80) to the port x + 10000 of the public IPv6 (example: 10080). Defaults to \"22 80 443\".",
"type": "array",
"default": [
22,
80,
443
]
}
},
"type": "object"
}
},
"type": "object"
}
}
}
{% set publish_dict = {} -%}
{% set part_list = [] -%}
{% set ipv6 = (ipv6 | list)[0] -%}
{% set frontend_dict = slapparameter_dict.get('frontend', {}) -%}
{% set slave_frontend_dict = slapparameter_dict.get('slave-frontend', {}) -%}
{% set slave_frontend_sr = slave_frontend_dict.get('software-url', 'http://git.erp5.org/gitweb/slapos.git/blob_plain/HEAD:/software/apache-frontend/software.cfg') -%}
{% set slave_frontend_stype = slave_frontend_dict.get('software-type', 'custom-personal') -%}
{% set slave_frontend_iguid = slave_frontend_dict.get('instance-guid', '') -%}
{% set kvm_instance_dict = {} -%}
{% set kvm_hostname_list = [] -%}
[request-common]
recipe = slapos.cookbook:request
software-url = ${slap-connection:software-release-url}
server-url = ${slap-connection:server-url}
key-file = ${slap-connection:key-file}
cert-file = ${slap-connection:cert-file}
computer-id = ${slap-connection:computer-id}
partition-id = ${slap-connection:partition-id}
config-use-ipv6 = {{ dumps(slapparameter_dict.get('use-ipv6', False)) }}
# Request kvm instances
{% for instance_name, kvm_parameter_dict in slapparameter_dict.get('kvm-partition-dict', {'kvm-default': {}}).items() -%}
{% set section = 'request-' ~ instance_name -%}
{% set use_nat = kvm_parameter_dict.get('use-nat', True) -%}
[{{ section }}]
<= request-common
software-type = kvm
name = {{ instance_name }}
sla-computer_guid = {{ dumps(kvm_parameter_dict.get('computer-guid', '')) }}
config-frontend-instance-name = {{ instance_name ~ ' VNC Frontend' }}
config-frontend-software-type = {{ dumps(frontend_dict.get('software-type', 'frontend')) }}
config-frontend-software-url = {{ dumps(frontend_dict.get('software-url', 'http://git.erp5.org/gitweb/slapos.git/blob_plain/refs/tags/slapos-0.92:/software/kvm/software.cfg')) }}
config-frontend-instance-guid = {{ dumps(frontend_dict.get('instance-guid', '')) }}
config-name = {{ instance_name }}
{% if slapparameter_dict.get('authorized-key', []) -%}
config-authorized-key = {{ slapparameter_dict.get('authorized-key') | join('##') }}
{% endif -%}
config-nbd-port = {{ dumps(kvm_parameter_dict.get('nbd-port', 1024)) }}
config-nbd-host = {{ dumps(kvm_parameter_dict.get('nbd-host', '')) }}
config-nbd2-port = {{ dumps(kvm_parameter_dict.get('nbd-port2', 1024)) }}
config-nbd2-host = {{ dumps(kvm_parameter_dict.get('nbd-host2', '')) }}
config-ram-size = {{ dumps(kvm_parameter_dict.get('ram-size', 1024)) }}
config-disk-size = {{ dumps(kvm_parameter_dict.get('disk-size', 10)) }}
config-disk-type = {{ dumps(kvm_parameter_dict.get('disk-type', 'virtio')) }}
config-cpu-count = {{ dumps(kvm_parameter_dict.get('cpu-count', 1)) }}
config-cpu-options = {{ dumps(kvm_parameter_dict.get('cpu-options', '')) }}
config-numa = {{ dumps(kvm_parameter_dict.get('numa', '')) }}
{% set nat_rules_list = kvm_parameter_dict.get('nat-rules', [22, 80, 443]) -%}
config-nat-rules = {{ nat_rules_list | join(' ') }}
config-publish-nat-url = True
config-use-nat = {{ use_nat }}
config-use-tap = {{ dumps(kvm_parameter_dict.get('use-tap', True)) }}
config-virtual-hard-drive-url = {{ dumps(kvm_parameter_dict.get('virtual-hard-drive-url', '')) }}
config-virtual-hard-drive-md5sum = {{ dumps(kvm_parameter_dict.get('virtual-hard-drive-md5sum', '')) }}
config-virtual-hard-drive-gzipped = {{ dumps(kvm_parameter_dict.get('virtual-hard-drive-gzipped', False)) }}
config-external-disk-number = {{ dumps(kvm_parameter_dict.get('external-disk-number', 0)) }}
config-external-disk-size = {{ dumps(kvm_parameter_dict.get('external-disk-size', 20)) }}
config-external-disk-format = {{ dumps(kvm_parameter_dict.get('external-disk-format', 'qcow2')) }}
config-enable-http-server = {{ dumps(kvm_parameter_dict.get('enable-http-server', True)) }}
config-httpd-port = {{ dumps(kvm_parameter_dict.get('httpd-port', 8081)) }}
# Enable simple http server on ipv6 so all VMs will access it
config-document-host = ${http-server:host}
config-document-port = ${http-server:port}
config-document-path = ${http-server:path}
return =
backend-url
url
{% if use_nat -%}
{% for port in nat_rules_list -%}
{{ ' ' }}nat-rule-url-{{ port }}
{% endfor -%}
{% endif -%}
{% if kvm_parameter_dict.get('use-tap', True) -%}
{{ ' ' }}tap-ipv4
{% do publish_dict.__setitem__('lan-' ~ instance_name, '${' ~ section ~ ':connection-tap-ipv4}') -%}
{% do kvm_hostname_list.append(instance_name ~ ' ' ~ '${' ~ section ~ ':connection-tap-ipv4}') -%}
{% endif -%}
{% do publish_dict.__setitem__(instance_name ~ '-backend-url', '${' ~ section ~ ':connection-backend-url}') -%}
{% do publish_dict.__setitem__(instance_name ~ '-url', '${' ~ section ~ ':connection-url}') -%}
{% do kvm_instance_dict.__setitem__(instance_name, (kvm_parameter_dict.get('use-nat', True), nat_rules_list)) -%}
{% endfor %}
#request custom kvm frontend
{% for frontend_name, frontend_parameter_dict in slave_frontend_dict.get('slave-frontend-dict', {}).items() -%}
{% set name = frontend_name -%}
{% set url = frontend_parameter_dict.get('url', '') -%}
{% set error = '' -%}
{% if frontend_parameter_dict.get('kvm-partition-name', '') != '' -%}
{% set kvm_name = frontend_parameter_dict['kvm-partition-name'] -%}
{% set service_port = frontend_parameter_dict['service-port'] -%}
{% if kvm_name in kvm_instance_dict.keys() and not kvm_instance_dict[kvm_name][0] -%}
{% set error = "You should set parameter use-nat to 'true' for '" ~ kvm_name ~ "', or provide url to use for frontend." -%}
{% elif kvm_name in kvm_instance_dict.keys() and service_port in kvm_instance_dict[kvm_name][1] -%}
{% set url = '${request-' ~ kvm_name ~ ':connection-nat-rule-url-' ~ service_port ~ '}' -%}
{% set url = frontend_parameter_dict.get('url-scheme', 'http') ~ '://' ~ url -%}
{% else -%}
{% set error = kvm_name ~ " and/or port " ~ service_port ~ " doesn't match any KVM name and/or related nat-rules in your request parameters." -%}
{% endif -%}
{% endif -%}
{% set section = 'request-' ~ name ~ '-slave-frontend' -%}
[{{ section }}]
<= request-common
recipe = slapos.cookbook:request
software-url = {{ slave_frontend_sr }}
name = Frontend {{ name }}
software-type = {{ slave_frontend_stype }}
slave = true
config-url = {{ url }}
config-custom_domain = {{ dumps(frontend_parameter_dict.get('domain', '')) }}
config-enable_cache = {{ dumps(frontend_parameter_dict.get('enable-cache', False)) }}
config-https-only = {{ dumps(frontend_parameter_dict.get('https-only', False)) }}
{% if frontend_parameter_dict.get('type', '') -%}
config-type = {{ dumps(frontend_parameter_dict['type']) }}
{% if frontend_parameter_dict.get('path', '') -%}
config-path = {{ dumps(frontend_parameter_dict['path']) }}
{% endif -%}
{% endif -%}
return =
site_url
sla-instance_guid = {{ slave_frontend_iguid }}
{% do publish_dict.__setitem__(name ~ '-url', '${' ~ section ~ ':connection-site_url}') -%}
{% if error != '' -%}
{% do publish_dict.__setitem__('1_error', error) -%}
{% endif -%}
{% endfor %}
# Enable simple http server on ipv6 so all VMs will access it
[directory]
recipe = slapos.cookbook:mkdirectory
etc = ${buildout:directory}/etc
bin = ${buildout:directory}/bin
srv = ${buildout:directory}/srv
var = ${buildout:directory}/var
log = ${:var}/log
scripts = ${:etc}/run
services = ${:etc}/service
document = ${:srv}/document
ssl = ${:etc}/ssl
[http-ssl]
recipe = plone.recipe.command
command = "{{ openssl_executable_location }}" req -newkey rsa -batch -new -x509 -days 3650 -nodes -keyout "${:key}" -out "${:cert}"
key = ${directory:ssl}/key
cert = ${directory:ssl}/cert
update-command =
stop-on-error = true
[http-server]
recipe = slapos.cookbook:simplehttpserver
host = {{ ipv6 }}
port = 9002
base-path = ${directory:document}
wrapper = ${directory:services}/simple-http-server
log-file = ${directory:log}/http.log
cert-file = ${http-ssl:cert}
key-file = ${http-ssl:key}
[write-vm-hostname]
recipe = slapos.recipe.template:jinja2
template = {{ template_content }}
filename = hosts
rendered = ${http-server:root-dir}/${:filename}
context =
raw content_list {{ kvm_hostname_list | join('#') }}
raw sep #
[publish]
recipe = slapos.cookbook:publish
{% for name, value in publish_dict.items() -%}
{{ name }} = {{ value }}
{% endfor %}
[buildout]
parts =
http-server
write-vm-hostname
publish
# Complete parts with sections
{{ part_list | join('\n ') }}
eggs-directory = {{ eggs_directory }}
develop-eggs-directory = {{ develop_eggs_directory }}
offline = true
\ No newline at end of file
[buildout]
parts =
httpd
httpd-promise
[directory]
recipe = slapos.cookbook:mkdirectory
etc = ${buildout:directory}/etc
bin = ${buildout:directory}/bin
srv = ${buildout:directory}/srv
public = ${:srv}/public/
log = ${:var}/log
services = ${:etc}/service
promises = ${:etc}/promise
run = ${:var}/run
[apache-conf]
recipe = slapos.recipe.template:jinja2
template = {{ template_apache_conf }}
rendered = ${directory:etc}/apache.conf
#ipv6 = ${slap-network-information:global-ipv6}
ipv4 = ${slap-network-information:local-ipv4}
port = ${slap-parameter:httpd-port}
error-log = ${directory:log}/apache-error.log
access-log = ${directory:log}/apache-access.log
pid-file = ${directory:run}/apache.pid
index = ${directory:public}
context =
key port :port
key ip :ipv4
key access_log :access-log
key error_log :error-log
key pid_file :pid-file
key index_folder :index
[httpd]
recipe = slapos.cookbook:wrapper
wrapper-path = ${directory:services}/httpd
command-line = "{{ apache_location }}/bin/httpd" -f "${apache-conf:rendered}" -DFOREGROUND
[httpd-promise]
recipe = slapos.cookbook:check_port_listening
path = ${directory:promises}/apache-httpd
hostname = ${apache-conf:ipv4}
port = ${apache-conf:port}
\ No newline at end of file
...@@ -19,7 +19,7 @@ ...@@ -19,7 +19,7 @@
"type": "integer", "type": "integer",
"default": 10, "default": 10,
"minimum": 1, "minimum": 1,
"maximum": 80 "maximum": 1000
}, },
"disk-type": { "disk-type": {
"title": "Disk type", "title": "Disk type",
...@@ -36,6 +36,16 @@ ...@@ -36,6 +36,16 @@
"minimum": 1, "minimum": 1,
"maximum": 8 "maximum": 8
}, },
"cpu-options": {
"title": "Additional options (cores, threads, sockets, maxcpus) to use with cpu-count.",
"description": "Additional options to use with cpu-count. Options are separated by coma: [cores=cores][,threads=threads][,sockets=sockets][,maxcpus=maxcpus]. Only set this option if you really know what you're doing.",
"type": "string"
},
"numa": {
"title": "Simulate a multi node NUMA system. If mem and cpus are omitted, resources are split equally.",
"description": "Simulate a multi node NUMA system. If mem and cpus are omitted, resources are split equally. Each numa option are separated by space: node,nodeid=4,cpus=40-49,mem=64g node,nodeid=1,cpus=10-19,mem=128g. Only set this option if you really know what you're doing.",
"type": "string"
},
"nbd-host": { "nbd-host": {
"title": "NBD hostname", "title": "NBD hostname",
...@@ -78,18 +88,58 @@ ...@@ -78,18 +88,58 @@
"description": "MD5 checksum of virtual hard drive, used if virtual-hard-drive-url is specified.", "description": "MD5 checksum of virtual hard drive, used if virtual-hard-drive-url is specified.",
"type": "string" "type": "string"
}, },
"virtual-hard-drive-gzipped": {
"title": "Define if virtual hard drive to download is gzipped",
"description": "Define if virtual hard drive to download is gzipped using gzip. This help to reduce size of file to download.",
"type": "boolean",
"default": false
},
"external-disk-number": {
"title": "Number of additional disk to create for virtual machine",
"description": "Specify the number of additional disk to create for virtual machine in data folder of SlapOS Node. Requires instance_storage_home to be configured on SlapOS Node.",
"type": "integer",
"minimum": 0,
"default": 0
},
"external-disk-size": {
"title": "Number of additional disk to create for virtual machine, in Gigabytes",
"description": "Specify the number of additional disk to create for virtual machine in data folder of SlapOS Node. Requires instance_storage_home to be configured on SlapOS Node.",
"type": "integer",
"minimum": 5,
"maximum": 1000,
"default": 20
},
"external-disk-format": {
"title": "Type of external disk drive to create by QEMU.",
"description": "Type of QEMU disk drive, to create.",
"type": "string",
"default": "qcow2",
"enum": ["qcow2", "raw", "vdi", "vmdk", "cloop"]
},
"use-tap": { "use-tap": {
"title": "Use QEMU TAP network interface", "title": "Use QEMU TAP network interface",
"description": "Use QEMU TAP network interface, requires a bridge on SlapOS Node. If false, use user-mode network stack (NAT).", "description": "Use QEMU TAP network interface, might require a bridge on SlapOS Node.",
"type": "boolean", "type": "boolean",
"default": false "default": false
}, },
"use-nat": {
"title": "Use QEMU USER Mode networking",
"description": "Use QEMU user-mode network stack (NAT).",
"type": "boolean",
"default": true
},
"nat-rules": { "nat-rules": {
"title": "List of rules for NAT of QEMU user mode network stack.", "title": "List of rules for NAT of QEMU user mode network stack.",
"description": "List of rules for NAT of QEMU user mode network stack, as comma-separated list of ports. For each port specified, it will redirect port x of the VM (example: 80) to the port x + 10000 of the public IPv6 (example: 10080). Defaults to \"22 80 443\". Ignored if \"use-tap\" parameter is enabled.", "description": "List of rules for NAT of QEMU user mode network stack, as comma-separated list of ports. For each port specified, it will redirect port x of the VM (example: 80) to the port x + 10000 of the public IPv6 (example: 10080). Defaults to \"22 80 443\". Ignored if \"use-tap\" parameter is enabled.",
"type": "string" "type": "string"
}, },
"authorized-key": {
"title": "Public keys to get from all virtual machines.",
"description": "Set the public keys to add in your virtual machine. Keys are separated with '##'. The public key file will be available in the VM via url http://10.0.2.100/authorized_keys if you keep the NAT interface enabled",
"type": "string"
},
"frontend-instance-guid": { "frontend-instance-guid": {
"title": "Frontend Instance ID", "title": "Frontend Instance ID",
"description": "Unique identifier of the frontend instance, like \"SOFTINST-11031\".", "description": "Unique identifier of the frontend instance, like \"SOFTINST-11031\".",
......
{ {
"type": "object", "type": "object",
"$schema": "http://json-schema.org/draft-04/schema", "$schema": "http://json-schema.org/draft-04/schema",
"items": { "allOf": [
"allOf": [ {
{ "$ref": "instance-kvm-input-schema.json#/"
"$ref": "instance-kvm-input-schema.json" },
} {
], "properties": {
"title": "Input Parameters", "-sla-0-computer_guid": {
"properties": { "title": "Target computer for main instance",
"-sla-0-computer_guid": { "description": "Target computer GUID for main instance.",
"title": "Target computer for main instance", "type": "string",
"description": "Target computer GUID for main instance.", "optional": true
"type": "string" },
}, "-sla-1-computer_guid": {
"-sla-1-computer_guid": { "title": "Target computer for first clone",
"title": "Target computer for first clone", "description": "Target computer for first clone and PBS.",
"description": "Target computer for first clone and PBS.", "type": "string",
"type": "string" "optional": true
}, },
"-sla-2-computer_guid": { "-sla-2-computer_guid": {
"title": "Target computer for second clone", "title": "Target computer for second clone",
"description": "Target computer for second clone and PBS.", "description": "Target computer for second clone and PBS.",
"type": "string" "type": "string",
}, "optional": true
"resiliency-backup-periodicity": { },
"title": "Periodicity of backup", "resiliency-backup-periodicity": {
"description": "Periodicity of backup, in cron format.", "title": "Periodicity of backup",
"type": "string" "description": "Periodicity of backup, in cron format.",
}, "type": "string",
"remove-backup-older-than": { "optional": true
"title": "Remove backups older than...", },
"description": "Remove all the backups in PBS that are older than specified value. It should be rdiff-backup-compatible.", "remove-backup-older-than": {
"type": "string", "title": "Remove backups older than...",
"default": "3B" "description": "Remove all the backups in PBS that are older than specified value. It should be rdiff-backup-compatible.",
}, "type": "string",
"resilient-clone-number": { "default": "3B",
"title": "Amount of backup(s) to create", "optional": true
"description": "Amount of backup(s) to create. Each backup consists of a Pull Backup Server and a clone.", },
"type": "integer", "resilient-clone-number": {
"default": 2 "title": "Amount of backup(s) to create",
}, "description": "Amount of backup(s) to create. Each backup consists of a Pull Backup Server and a clone.",
"ignore-known-hosts-file": { "type": "integer",
"title": "Ignore known_hosts file", "default": 2,
"description": "Set either to fill known_hosts file for ssh or not. Useful if main instance and PBS are using the same IP (slapos proxy, webrunner).", "optional": true
"type": "boolean", },
"default": false "ignore-known-hosts-file": {
"title": "Ignore known_hosts file",
"description": "Set either to fill known_hosts file for ssh or not. Useful if main instance and PBS are using the same IP (slapos proxy, webrunner).",
"type": "boolean",
"default": false,
"optional": true
}
} }
} }
} ]
} }
...@@ -66,5 +66,5 @@ return = ipv6 ...@@ -66,5 +66,5 @@ return = ipv6
sla-computer_guid = ${slap-connection:computer-id} sla-computer_guid = ${slap-connection:computer-id}
[slap-parameter] [slap-parameter]
virtual-hard-drive-url = https://softinst43236.host.vifib.net/data/public/fbd4ad.php?dl=true virtual-hard-drive-url = http://www.nexedi.org/static/slapos/kvm_resiliency_test/virtual.qcow2
virtual-hard-drive-md5sum = 465e1024447997e7b86ee2e5151e031b virtual-hard-drive-md5sum = 465e1024447997e7b86ee2e5151e031b
{% set enable_http = slapparameter_dict.get('enable-http-server', 'False') -%}
############################# #############################
# #
# Instanciate kvm # Instanciate kvm
...@@ -15,6 +16,17 @@ parts = ...@@ -15,6 +16,17 @@ parts =
cron cron
# cron-entry-monitor # cron-entry-monitor
frontend-promise frontend-promise
{% if slapparameter_dict.get('document-host', '') %}
cluster-url-path
{% endif -%}
{% if slapparameter_dict.get('enable-http-server', 'False') == 'True' %}
httpd
httpd-promise
publish-host-config
extends =
{{ template_httpd_cfg }}
{% endif -%}
eggs-directory = {{ eggs_directory }} eggs-directory = {{ eggs_directory }}
develop-eggs-directory = {{ develop_eggs_directory }} develop-eggs-directory = {{ develop_eggs_directory }}
...@@ -33,6 +45,7 @@ promises = ${:etc}/promise ...@@ -33,6 +45,7 @@ promises = ${:etc}/promise
novnc-conf = ${:etc}/novnc novnc-conf = ${:etc}/novnc
run = ${:var}/run run = ${:var}/run
ca-dir = ${:srv}/ssl ca-dir = ${:srv}/ssl
public = ${:srv}/public/
cron-entries = ${:etc}/cron.d cron-entries = ${:etc}/cron.d
crontabs = ${:etc}/crontabs crontabs = ${:etc}/crontabs
cronstamps = ${:etc}/cronstamps cronstamps = ${:etc}/cronstamps
...@@ -41,6 +54,10 @@ cronstamps = ${:etc}/cronstamps ...@@ -41,6 +54,10 @@ cronstamps = ${:etc}/cronstamps
recipe = slapos.cookbook:generate.mac recipe = slapos.cookbook:generate.mac
storage-path = ${directory:srv}/mac storage-path = ${directory:srv}/mac
[create-tap-mac]
recipe = slapos.cookbook:generate.mac
storage-path = ${directory:srv}/tap_mac
[gen-passwd] [gen-passwd]
recipe = slapos.cookbook:generate.password recipe = slapos.cookbook:generate.password
storage-path = ${directory:srv}/passwd storage-path = ${directory:srv}/passwd
...@@ -77,25 +94,54 @@ socket-path = ${directory:var}/qmp_socket ...@@ -77,25 +94,54 @@ socket-path = ${directory:var}/qmp_socket
pid-file-path = ${directory:run}/pid_file pid-file-path = ${directory:run}/pid_file
smp-count = ${slap-parameter:cpu-count} smp-count = ${slap-parameter:cpu-count}
smp-options = ${slap-parameter:cpu-options}
ram-size = ${slap-parameter:ram-size} ram-size = ${slap-parameter:ram-size}
numa = ${slap-parameter:numa}
mac-address = ${create-mac:mac-address} mac-address = ${create-mac:mac-address}
tap-mac-address = ${create-tap-mac:mac-address}
# XXX-Cedric: should be named runner-wrapper-path and controller-wrapper-path # XXX-Cedric: should be named runner-wrapper-path and controller-wrapper-path
runner-path = ${directory:services}/kvm runner-path = ${directory:services}/kvm
controller-path = ${directory:scripts}/kvm_controller controller-path = ${directory:scripts}/kvm_controller
use-tap = ${slap-parameter:use-tap} use-tap = ${slap-parameter:use-tap}
use-nat = ${slap-parameter:use-nat}
nat-rules = ${slap-parameter:nat-rules} nat-rules = ${slap-parameter:nat-rules}
6tunnel-wrapper-path = ${directory:services}/6tunnel 6tunnel-wrapper-path = ${directory:services}/6tunnel
virtual-hard-drive-url = ${slap-parameter:virtual-hard-drive-url} virtual-hard-drive-url = ${slap-parameter:virtual-hard-drive-url}
virtual-hard-drive-md5sum = ${slap-parameter:virtual-hard-drive-md5sum} virtual-hard-drive-md5sum = ${slap-parameter:virtual-hard-drive-md5sum}
virtual-hard-drive-gzipped = ${slap-parameter:virtual-hard-drive-gzipped}
shell-path = {{ dash_executable_location }} shell-path = {{ dash_executable_location }}
qemu-path = {{ qemu_executable_location }} qemu-path = {{ qemu_executable_location }}
qemu-img-path = {{ qemu_img_executable_location }} qemu-img-path = {{ qemu_img_executable_location }}
6tunnel-path = {{ sixtunnel_executable_location }} 6tunnel-path = {{ sixtunnel_executable_location }}
etc-directory = ${directory:etc}
disk-storage-list =
{% for key, path in storage_dict.items() -%}
{{ ' ' ~ key ~ ' ' ~ path }}
{% endfor -%}
external-disk-number = ${slap-parameter:external-disk-number}
external-disk-size = ${slap-parameter:external-disk-size}
external-disk-format = ${slap-parameter:external-disk-format}
{% if slapparameter_dict.get('enable-http-server', 'False') == 'True' or ( slapparameter_dict.get('use-tap', 'False') == 'True' and tap_network_dict.has_key('ipv4') ) -%}
httpd-port = ${slap-parameter:httpd-port}
{% else -%}
httpd-port = 0
{% endif -%}
# Main instance document server info
{% if slapparameter_dict.get('document-host', '') and slapparameter_dict.get('document-port', '') -%}
cluster-doc-host = ${tunnel-cluster-url:ipv4}
cluster-doc-port = ${tunnel-cluster-url:ipv4-port}
{% else -%}
cluster-doc-host =
cluster-doc-port = 0
{% endif -%}
netcat-binary = {{ netcat_bin }}
[kvm-vnc-promise] [kvm-vnc-promise]
recipe = slapos.cookbook:check_port_listening recipe = slapos.cookbook:check_port_listening
...@@ -107,7 +153,16 @@ port = ${kvm-instance:vnc-port} ...@@ -107,7 +153,16 @@ port = ${kvm-instance:vnc-port}
# Check that disk image is not corrupted # Check that disk image is not corrupted
recipe = collective.recipe.template recipe = collective.recipe.template
input = inline:#!/bin/sh input = inline:#!/bin/sh
# Return code 0 is "OK"
# Return code 3 is "found leaks, but image is OK"
# http://git.qemu.org/?p=qemu.git;a=blob;f=qemu-img.c;h=4e9a7f5741c9cb863d978225829e68fefcae3947;hb=HEAD#l702
${kvm-instance:qemu-img-path} check ${kvm-instance:disk-path} ${kvm-instance:qemu-img-path} check ${kvm-instance:disk-path}
RETURN_CODE=$?
if [ $RETURN_CODE -eq 0 ] || [ $RETURN_CODE -eq 3 ]; then
exit 0
else
exit 1
fi
output = ${directory:promises}/kvm-disk-image-corruption output = ${directory:promises}/kvm-disk-image-corruption
mode = 700 mode = 700
...@@ -193,7 +248,7 @@ key-file = ${slap-connection:key-file} ...@@ -193,7 +248,7 @@ key-file = ${slap-connection:key-file}
cert-file = ${slap-connection:cert-file} cert-file = ${slap-connection:cert-file}
computer-id = ${slap-connection:computer-id} computer-id = ${slap-connection:computer-id}
partition-id = ${slap-connection:partition-id} partition-id = ${slap-connection:partition-id}
name = VNC Frontend name = ${slap-parameter:frontend-instance-name}
software-type = ${slap-parameter:frontend-software-type} software-type = ${slap-parameter:frontend-software-type}
slave = true slave = true
config-host = ${novnc-instance:ip} config-host = ${novnc-instance:ip}
...@@ -211,22 +266,127 @@ curl_path = {{ curl_executable_location }} ...@@ -211,22 +266,127 @@ curl_path = {{ curl_executable_location }}
[publish-connection-information] [publish-connection-information]
recipe = slapos.cookbook:publish recipe = slapos.cookbook:publish
ipv6 = ${slap-network-information:global-ipv6}
backend-url = https://[${novnc-instance:ip}]:${novnc-instance:port}/vnc_auto.html?host=[${novnc-instance:ip}]&port=${novnc-instance:port}&encrypt=1&password=${kvm-instance:vnc-passwd} backend-url = https://[${novnc-instance:ip}]:${novnc-instance:port}/vnc_auto.html?host=[${novnc-instance:ip}]&port=${novnc-instance:port}&encrypt=1&password=${kvm-instance:vnc-passwd}
url = ${request-slave-frontend:connection-url}/vnc_auto.html?host=${request-slave-frontend:connection-domainname}&port=${request-slave-frontend:connection-port}&encrypt=1&path=${request-slave-frontend:connection-resource}&password=${kvm-instance:vnc-passwd} url = ${request-slave-frontend:connection-url}/vnc_auto.html?host=${request-slave-frontend:connection-domainname}&port=${request-slave-frontend:connection-port}&encrypt=1&path=${request-slave-frontend:connection-resource}&password=${kvm-instance:vnc-passwd}
{% set disk_number = len(storage_dict) -%}
maximum-extra-disk-amount = {{ disk_number }}
{% set iface = 'eth0' -%}
{% if slapparameter_dict.get('use-nat', 'True') == 'True' -%}
{% set iface = 'eth1' -%}
# Publish NAT port mapping status # Publish NAT port mapping status
# XXX: hardcoded value from [slap-parameter] # XXX: hardcoded value from [slap-parameter]
{% set nat_rule_list = slapparameter_dict.get('nat-rules', '22 80 443') %} {% set nat_rule_list = slapparameter_dict.get('nat-rules', '22 80 443') %}
{% for port in nat_rule_list.split(' ') -%} {% for port in nat_rule_list.split(' ') -%}
{% set external_port = 10000 + port|int() -%} {% set external_port = 10000 + port|int() -%}
nat-rule-port-{{port}} = ${slap-network-information:global-ipv6} : {{external_port}} nat-rule-port-{{port}} = ${slap-network-information:global-ipv6} : {{external_port}}
{% endfor -%} {% if slapparameter_dict.get('publish-nat-url', False) -%}
nat-rule-url-{{port}} = [${slap-network-information:global-ipv6}]:{{external_port}}
{% endif -%}
{% endfor -%}
{% endif -%}
{% if slapparameter_dict.get('use-tap', 'False') == 'True' -%}
tap-ipv4 = ${slap-network-information:tap-ipv4}
{% endif -%}
{% set kvm_http = 'http://${slap-network-information:local-ipv4}:' ~ slapparameter_dict.get('httpd-port', 8081) -%}
{% if enable_http == 'True' %}
{% if slapparameter_dict.get('use-nat', 'True') == 'True' -%}
{% set kvm_http = 'http://10.0.2.100' -%}
{% endif %}
{% if slapparameter_dict.get('authorized-key', '') -%}
7_info = Get the publick key file in your VM with the command: wget {{ kvm_http }}/${get-authorized-key:filename}
{% endif %}
{% endif %}
{% if slapparameter_dict.get('use-tap', 'False') == 'True' and tap_network_dict.has_key('ipv4') -%}
1_info = Use these configurations below to configure interface {{ iface }} in your VM.
2_info = ${network-config:ifconfig}
3_info = ${network-config:route-iface}
4_info = ${network-config:route-network}
5_info = ${network-config:route-default}
{% if enable_http == 'True' %}
6_info = Or run in your VM the command: wget -O- {{ kvm_http }}/netconfig.sh | /bin/sh -
{% endif %}
[network-config]
recipe = plone.recipe.command
path = ${directory:public}/netconfig.sh
ifconfig = ifconfig {{ iface }} ${slap-network-information:tap-ipv4} netmask ${slap-network-information:tap-netmask}
route-iface = route add ${slap-network-information:tap-gateway} dev {{ iface }}
route-network = route add -net ${slap-network-information:tap-network} netmask ${slap-network-information:tap-netmask} gw ${slap-network-information:tap-gateway}
{% if iface == 'eth0' -%}
route-default = route add default gw ${slap-network-information:tap-gateway}
{% elif global_ipv4_prefix -%}
route-default = ip route add {{ global_ipv4_prefix }} via ${slap-network-information:tap-gateway} dev {{ iface }} src ${slap-network-information:tap-ipv4}
{% else -%}
route-default =
{% endif -%}
command =
echo "#!/bin/sh" > ${:path}
echo "" >> ${:path}
echo "${:ifconfig}" >> ${:path}
echo "${:route-iface}" >> ${:path}
echo "${:route-network}" >> ${:path}
echo "${:route-default}" >> ${:path}
update-command = ${:command}
{% endif -%}
[get-authorized-key]
recipe = slapos.recipe.template:jinja2
template = {{ template_content }}
filename = authorized_keys
rendered = ${directory:public}/${:filename}
public-key = {{ slapparameter_dict.get('authorized-key', '') }}
context =
key content_list :public-key
raw sep ##
[publish-host-config]
recipe = plone.recipe.command
name = {{ slapparameter_dict.get('name', 'localhost') }}
{% if slapparameter_dict.get('use-tap', 'False') == 'True' and tap_network_dict.has_key('ipv4') -%}
local-ipv4 = ${slap-network-information:tap-ipv4}
{% else -%}
local-ipv4 = 127.0.0.1
{% endif -%}
path-host = ${directory:public}/hostname
path-ip = ${directory:public}/ipv4
command =
rm -f ${:path-host}
rm -f ${:path-ip}
echo "${:name}" > ${:path-host}
echo "${:local-ipv4}" > ${:path-ip}
update-command = ${:command}
# To access documents of main instance (in case of kvm-cluster) through http
[cluster-url-path]
recipe = slapos.recipe.template:jinja2
template = {{ template_content }}
filename = cluster.hash
rendered = ${directory:public}/${:filename}
hash-url = https://10.0.2.101:443/{{ slapparameter_dict.get('document-path', '') }}
context =
key content_list :hash-url
raw sep #
# This 6to4 tunnel help to access document url in ipv4
[tunnel-cluster-url]
recipe = slapos.cookbook:ipv4toipv6
ipv6 = {{ slapparameter_dict.get('document-host', '') }}
ipv4 = ${slap-network-information:local-ipv4}
ipv6-port = {{ slapparameter_dict.get('document-port', '') }}
ipv4-port = 16936
shell-path = {{ dash_executable_location }}
6tunnel-path = {{ sixtunnel_executable_location }}
runner-path = ${directory:services}/6tunnel-cluster
[slap-parameter] [slap-parameter]
# Default values if not specified # Default values if not specified
frontend-software-type = frontend frontend-software-type = frontend
frontend-software-url = http://git.erp5.org/gitweb/slapos.git/blob_plain/refs/tags/slapos-0.92:/software/kvm/software.cfg frontend-software-url = http://git.erp5.org/gitweb/slapos.git/blob_plain/refs/tags/slapos-0.92:/software/kvm/software.cfg
frontend-instance-guid = frontend-instance-guid =
frontend-instance-name = VNC Frontend
nbd-port = 1024 nbd-port = 1024
nbd-host = nbd-host =
nbd2-port = 1024 nbd2-port = 1024
...@@ -237,9 +397,25 @@ disk-size = 10 ...@@ -237,9 +397,25 @@ disk-size = 10
disk-type = virtio disk-type = virtio
cpu-count = 1 cpu-count = 1
# cpu-option is a string: [cores=cores][,threads=threads][,sockets=sockets][,maxcpus=maxcpus]
cpu-options =
# list of numa options separate by space: node,nodeid=1,cpus=9-15 node,nodeid=2,cpus=1,3,7
numa =
nat-rules = 22 80 443 nat-rules = 22 80 443
use-nat = True
use-tap = False use-tap = False
virtual-hard-drive-url = virtual-hard-drive-url =
virtual-hard-drive-md5sum = virtual-hard-drive-md5sum =
virtual-hard-drive-gzipped = False
external-disk-number = 0
external-disk-size = 20
external-disk-format = qcow2
# Help to get some configuration files into the vm from http
enable-http-server = False
httpd-port = 8081
# for auto config, the public key file will be available in the VM via url http://10.0.2.100/authorized_key if use-nat = True
authorized-key =
...@@ -8,6 +8,7 @@ develop-eggs-directory = ${buildout:develop-eggs-directory} ...@@ -8,6 +8,7 @@ develop-eggs-directory = ${buildout:develop-eggs-directory}
[switch-softwaretype] [switch-softwaretype]
recipe = slapos.cookbook:softwaretype recipe = slapos.cookbook:softwaretype
default = $${:kvm} default = $${:kvm}
kvm-cluster = $${dynamic-template-kvm-cluster:rendered}
kvm = $${dynamic-template-kvm:rendered} kvm = $${dynamic-template-kvm:rendered}
nbd = ${template-nbd:output} nbd = ${template-nbd:output}
frontend = ${template-frontend:output} frontend = ${template-frontend:output}
...@@ -22,6 +23,13 @@ test = $${dynamic-template-kvm-resilient-test:rendered} ...@@ -22,6 +23,13 @@ test = $${dynamic-template-kvm-resilient-test:rendered}
frozen = ${instance-frozen:output} frozen = ${instance-frozen:output}
pull-backup = ${template-pull-backup:output} pull-backup = ${template-pull-backup:output}
# XXX - If this configuration is not generated by slapgrid, use empty values
[storage-configuration]
storage-home =
[network-information]
global-ipv4-network =
[slap-configuration] [slap-configuration]
recipe = slapos.cookbook:slapconfiguration.serialised recipe = slapos.cookbook:slapconfiguration.serialised
computer = $${slap-connection:computer-id} computer = $${slap-connection:computer-id}
...@@ -29,6 +37,36 @@ partition = $${slap-connection:partition-id} ...@@ -29,6 +37,36 @@ partition = $${slap-connection:partition-id}
url = $${slap-connection:server-url} url = $${slap-connection:server-url}
key = $${slap-connection:key-file} key = $${slap-connection:key-file}
cert = $${slap-connection:cert-file} cert = $${slap-connection:cert-file}
storage-home = $${storage-configuration:storage-home}
[jinja2-template-base]
recipe = slapos.recipe.template:jinja2
rendered = $${buildout:directory}/$${:filename}
extensions = jinja2.ext.do
mode = 0644
extra-context =
context =
key develop_eggs_directory buildout:develop-eggs-directory
key eggs_directory buildout:eggs-directory
key ipv4 slap-configuration:ipv4
key ipv6 slap-configuration:ipv6
key global_ipv4_prefix network-information:global-ipv4-network
key tap_network_dict slap-configuration:tap-network-information-dict
key storage_dict slap-configuration:storage-dict
key slapparameter_dict slap-configuration:configuration
key computer_id slap-configuration:computer
raw openssl_executable_location ${openssl:location}/bin/openssl
$${:extra-context}
[dynamic-template-kvm-cluster-parameters]
[dynamic-template-kvm-cluster]
<= jinja2-template-base
template = ${template-kvm-cluster:location}/instance-kvm-cluster.cfg.jinja2.in
filename = template-kvm-cluster.cfg
extra-context =
section parameter_dict dynamic-template-kvm-cluster-parameters
raw template_content ${template-content:location}/${template-content:filename}
[dynamic-template-kvm] [dynamic-template-kvm]
recipe = slapos.recipe.template:jinja2 recipe = slapos.recipe.template:jinja2
...@@ -38,16 +76,22 @@ extensions = jinja2.ext.do ...@@ -38,16 +76,22 @@ extensions = jinja2.ext.do
context = context =
key develop_eggs_directory buildout:develop-eggs-directory key develop_eggs_directory buildout:develop-eggs-directory
key eggs_directory buildout:eggs-directory key eggs_directory buildout:eggs-directory
key global_ipv4_prefix network-information:global-ipv4-network
key slapparameter_dict slap-configuration:configuration key slapparameter_dict slap-configuration:configuration
key storage_dict slap-configuration:storage-dict
key tap_network_dict slap-configuration:tap-network-information-dict
raw curl_executable_location ${curl:location}/bin/curl raw curl_executable_location ${curl:location}/bin/curl
raw dash_executable_location ${dash:location}/bin/dash raw dash_executable_location ${dash:location}/bin/dash
raw dcron_executable_location ${dcron:location}/sbin/crond raw dcron_executable_location ${dcron:location}/sbin/crond
raw debian_amd64_netinst_location ${debian-amd64-netinst.iso:location}/${debian-amd64-netinst.iso:filename} raw debian_amd64_netinst_location ${debian-amd64-netinst.iso:location}/${debian-amd64-netinst.iso:filename}
raw novnc_location ${noVNC:location} raw novnc_location ${noVNC:location}
raw netcat_bin ${netcat:location}/bin/netcat
raw openssl_executable_location ${openssl:location}/bin/openssl raw openssl_executable_location ${openssl:location}/bin/openssl
raw qemu_executable_location ${kvm:location}/bin/qemu-system-x86_64 raw qemu_executable_location ${kvm:location}/bin/qemu-system-x86_64
raw qemu_img_executable_location ${kvm:location}/bin/qemu-img raw qemu_img_executable_location ${kvm:location}/bin/qemu-img
raw sixtunnel_executable_location ${6tunnel:location}/bin/6tunnel raw sixtunnel_executable_location ${6tunnel:location}/bin/6tunnel
raw template_httpd_cfg ${template-httpd:rendered}
raw template_content ${template-content:location}/${template-content:filename}
raw websockify_executable_location ${buildout:directory}/bin/websockify raw websockify_executable_location ${buildout:directory}/bin/websockify
template-parts-destination = ${template-parts:destination} template-parts-destination = ${template-parts:destination}
template-replicated-destination = ${template-replicated:destination} template-replicated-destination = ${template-replicated:destination}
......
...@@ -5,6 +5,6 @@ extends = development.cfg ...@@ -5,6 +5,6 @@ extends = development.cfg
[template] [template]
recipe = slapos.recipe.template recipe = slapos.recipe.template
url = ${:_profile_base_location_}/instance-for-erp5testnode.cfg.in url = ${:_profile_base_location_}/instance-for-erp5testnode.cfg.in
md5sum = e8afd5aa5b41df79238e9a84984a6aa5 md5sum = e0af93ba7209cabd5db6d9afcb15c2aa
output = ${buildout:directory}/template.cfg output = ${buildout:directory}/template.cfg
mode = 0644 mode = 0644
\ No newline at end of file
...@@ -2,46 +2,44 @@ ...@@ -2,46 +2,44 @@
extends = common.cfg extends = common.cfg
[versions] [versions]
# XXX - use websockify = 0.5.1 for compatibility with kvm frontend
websockify = 0.5.1
PyRSS2Gen = 1.1 PyRSS2Gen = 1.1
apache-libcloud = 0.15.1 apache-libcloud = 0.16.0
async = 0.6.1
cns.recipe.symlink = 0.2.3 cns.recipe.symlink = 0.2.3
collective.recipe.template = 1.11 collective.recipe.template = 1.11
ecdsa = 0.11 ecdsa = 0.11
erp5.util = 0.4.41 erp5.util = 0.4.42
gitdb = 0.5.4 gitdb = 0.6.4
plone.recipe.command = 1.1 plone.recipe.command = 1.1
pycrypto = 2.6.1 pycrypto = 2.6.1
slapos.recipe.download = 1.0.dev-r4053 slapos.recipe.download = 1.0.dev-r4053
slapos.toolbox = 0.40.4 slapos.recipe.template = 2.7
smmap = 0.8.2 slapos.toolbox = 0.46.1
websockify = 0.6.0 smmap = 0.9.0
z3c.recipe.scripts = 1.0.1 z3c.recipe.scripts = 1.0.1
# Required by: # Required by:
# websockify==0.6.0 # slapos.toolbox==0.46.1
numpy = 1.8.2 GitPython = 0.3.6
# Required by:
# slapos.toolbox==0.40.4
GitPython = 0.3.2.RC1
# Required by: # Required by:
# slapos.toolbox==0.40.4 # slapos.toolbox==0.46.1
atomize = 0.2.0 atomize = 0.2.0
# Required by: # Required by:
# slapos.toolbox==0.40.4 # slapos.toolbox==0.46.1
feedparser = 5.1.3 feedparser = 5.1.3
# Required by: # Required by:
# slapos.cookbook==0.87 # slapos.toolbox==0.46.1
jsonschema = 2.4.0 lockfile = 0.10.2
# Required by: # Required by:
# websockify==0.6.0 # websockify==0.5.1
numpy = 1.9.0 numpy = 1.9.1
# Required by: # Required by:
# slapos.toolbox==0.40.4 # slapos.toolbox==0.46.1
paramiko = 1.15.1 paramiko = 1.15.2
{
"name": "KVM",
"description": "KVM",
"serialisation": "xml",
"software-type": {
"default": {
"title": "Default",
"description": "Standalone KVM",
"request": "instance-kvm-input-schema.json",
"response": "instance-kvm-output-schema.json",
"index": 0
},
"kvm-resilient": {
"title": "Resilient",
"description": "Resilient KVM",
"request": "instance-kvm-resilient-input-schema.json",
"response": "instance-kvm-output-schema.json",
"index": 1
},
"kvm-cluster": {
"title": "Cluster",
"description": "Cluster KVM",
"serialisation": "json-in-xml",
"request": "instance-kvm-cluster-input-schema.json",
"response": "instance-kvm-output-schema.json",
"index": 2
}
}
}
ServerLimit 2
StartServers 1
MaxClients 2
LoadModule unixd_module modules/mod_unixd.so
LoadModule access_compat_module modules/mod_access_compat.so
LoadModule authz_core_module modules/mod_authz_core.so
LoadModule authz_host_module modules/mod_authz_host.so
LoadModule log_config_module modules/mod_log_config.so
LoadModule setenvif_module modules/mod_setenvif.so
LoadModule version_module modules/mod_version.so
LoadModule socache_shmcb_module modules/mod_socache_shmcb.so
LoadModule ssl_module modules/mod_ssl.so
LoadModule mime_module modules/mod_mime.so
LoadModule headers_module modules/mod_headers.so
LoadModule autoindex_module modules/mod_autoindex.so
Listen {{ ip }}:{{ port }}
PidFile "{{ pid_file }}"
ServerAdmin admin@
TypesConfig conf/mime.types
AddType application/x-compress .Z
AddType application/x-gzip .gz .tgz
ServerTokens Prod
ServerSignature Off
TraceEnable Off
ErrorLog "{{ error_log }}"
# Default apache log format with request time in microsecond at the end
LogFormat "%h %l %u %t \"%r\" %>s %b \"%{Referer}i\" \"%{User-Agent}i\" %D" combined
CustomLog "{{ access_log }}" combined
SetEnvIf X-Forwarded-For "^.*\..*\..*\..*" forwarded
# Directory protection
<Directory />
Options FollowSymLinks
AllowOverride None
Require all denied
</Directory>
DocumentRoot {{ index_folder }}
<Directory {{ index_folder }}>
Options Indexes FollowSymLinks
Require ip {{ ip }}
# Require env forwarded '{{ ip }}'
Require all denied
</Directory>
\ No newline at end of file
{% for content in content_list.split(sep) -%}
{{ content }}
{% endfor -%}
\ No newline at end of file
...@@ -6,6 +6,7 @@ extends = ...@@ -6,6 +6,7 @@ extends =
../../component/rsync/buildout.cfg ../../component/rsync/buildout.cfg
../../component/smartmontools/buildout.cfg ../../component/smartmontools/buildout.cfg
../../component/lmsensors/buildout.cfg ../../component/lmsensors/buildout.cfg
../../component/fluentd/buildout.cfg
parts = parts =
slapos-cookbook slapos-cookbook
...@@ -15,6 +16,7 @@ parts = ...@@ -15,6 +16,7 @@ parts =
rsync rsync
smartmontools smartmontools
lmsensors lmsensors
fluentd
[template] [template]
recipe = slapos.recipe.template recipe = slapos.recipe.template
...@@ -76,7 +78,7 @@ eggs = ...@@ -76,7 +78,7 @@ eggs =
PyRSS2Gen = 1.1 PyRSS2Gen = 1.1
cns.recipe.symlink = 0.2.3 cns.recipe.symlink = 0.2.3
plone.recipe.command = 1.1 plone.recipe.command = 1.1
slapos.recipe.template = 2.5 slapos.recipe.template = 2.7
z3c.recipe.scripts = 1.0.1 z3c.recipe.scripts = 1.0.1
apache-libcloud = 0.16.0 apache-libcloud = 0.16.0
...@@ -100,4 +102,4 @@ feedparser = 5.1.3 ...@@ -100,4 +102,4 @@ feedparser = 5.1.3
# Required by: # Required by:
# slapos.toolbox==0.45.1 # slapos.toolbox==0.45.1
paramiko = 1.15.1 paramiko = 1.15.2
...@@ -43,4 +43,4 @@ mode = 0644 ...@@ -43,4 +43,4 @@ mode = 0644
[versions] [versions]
plone.recipe.command = 1.1 plone.recipe.command = 1.1
slapos.recipe.template = 2.5 slapos.recipe.template = 2.7
...@@ -64,4 +64,4 @@ PyRSS2Gen = 1.1 ...@@ -64,4 +64,4 @@ PyRSS2Gen = 1.1
cns.recipe.symlink = 0.2.3 cns.recipe.symlink = 0.2.3
collective.recipe.template = 1.11 collective.recipe.template = 1.11
plone.recipe.command = 1.1 plone.recipe.command = 1.1
slapos.recipe.template = 2.5 slapos.recipe.template = 2.7
LoadModule unixd_module modules/mod_unixd.so
LoadModule access_compat_module modules/mod_access_compat.so
LoadModule authz_core_module modules/mod_authz_core.so
LoadModule authz_host_module modules/mod_authz_host.so
LoadModule log_config_module modules/mod_log_config.so
LoadModule setenvif_module modules/mod_setenvif.so
LoadModule version_module modules/mod_version.so
LoadModule proxy_module modules/mod_proxy.so
LoadModule proxy_http_module modules/mod_proxy_http.so
LoadModule socache_shmcb_module modules/mod_socache_shmcb.so
LoadModule ssl_module modules/mod_ssl.so
LoadModule mime_module modules/mod_mime.so
#LoadModule dav_module modules/mod_dav.so
#LoadModule dav_fs_module modules/mod_dav_fs.so
LoadModule negotiation_module modules/mod_negotiation.so
LoadModule rewrite_module modules/mod_rewrite.so
LoadModule headers_module modules/mod_headers.so
PidFile "{{ pid_file }}"
ServerAdmin admin@
TypesConfig conf/mime.types
AddType application/x-compress .Z
AddType application/x-gzip .gz .tgz
ServerTokens Prod
ServerSignature Off
TraceEnable Off
ErrorLog "{{ error_log }}"
# Default apache log format with request time in microsecond at the end
LogFormat "%h %l %u %t \"%r\" %>s %b \"%{Referer}i\" \"%{User-Agent}i\" %D" combined
CustomLog "{{ access_log }}" combined
{% if uri_scheme == 'https' -%}
# SSL Configuration
SSLCertificateFile {{ certificate }}
SSLCertificateKeyFile {{ key }}
SSLRandomSeed startup builtin
SSLRandomSeed connect builtin
SSLProtocol ALL -SSLv2
{% endif -%}
<Directory />
Options FollowSymLinks
AllowOverride None
Allow from all
</Directory>
Listen {{ ipv6 }}:{{ apache_port }}
<VirtualHost *:{{ apache_port }}>
{% if uri_scheme == 'https' -%}
SSLEngine On
SSLProxyEngine On
{% endif -%}
ProxyPass / http://{{ re6st_ipv4 }}:{{ re6st_port }}/
</VirtualHost>
\ No newline at end of file
[buildout]
parts =
cron-entry-logrotate
[cron]
recipe = slapos.cookbook:cron
cron-entries = ${logrotate-directory:cron-entries}
dcrond-binary = {{ dcron_location }}/sbin/crond
crontabs = ${logrotate-directory:crontabs}
cronstamps = ${logrotate-directory:cronstamps}
catcher = ${cron-simplelogger:wrapper}
binary = ${logrotate-directory:services}/crond
[cron-simplelogger]
recipe = slapos.cookbook:simplelogger
wrapper = ${logrotate-directory:bin}/cron_simplelogger
log = ${logrotate-directory:log}/cron.log
[logrotate]
recipe = slapos.cookbook:logrotate
logrotate-entries = ${logrotate-directory:logrotate-entries}
backup = ${logrotate-directory:logrotate-backup}
logrotate-binary = {{ logrotate_location }}/usr/sbin/logrotate
gzip-binary = {{ gzip_location }}/bin/gzip
gunzip-binary = {{ gzip_location }}/bin/gunzip
wrapper = ${logrotate-directory:bin}/logrotate
conf = ${logrotate-directory:etc}/logrotate.conf
state-file = ${logrotate-directory:srv}/logrotate.status
[cron-entry-logrotate]
recipe = slapos.cookbook:cron.d
cron-entries = ${cron:cron-entries}
name = logrotate
frequency = 0 0 * * *
command = ${logrotate:wrapper}
[logrotate-directory]
recipe = slapos.cookbook:mkdirectory
cron-entries = ${:etc}/cron.d
cronstamps = ${:etc}/cronstamps
crontabs = ${:etc}/crontabs
logrotate-backup = ${:backup}/logrotate
logrotate-entries = ${:etc}/logrotate.d
bin = ${buildout:directory}/bin
srv = ${buildout:directory}/srv
backup = ${:srv}/backup
etc = ${buildout:directory}/etc
services = ${:etc}/service
log = ${buildout:directory}/var/log
{
"$schema": "http://json-schema.org/draft-04/schema#",
"properties": {
"ipv6-prefix": {
"title": "Ipv6 prefix to use to setup the new re6st network",
"description": "Prefix ipv6 used by re6st to setup network. It is something like 2001:db8:42::/48",
"type": "string"
},
"key-size": {
"title": "Number of bit to use for certificate generation",
"description": "Specify the size of certificate generated by re6st. by default, generate 2048-bit key length",
"type": "integer",
"minimum": 1024,
"default": 2048
},
"prefix-length": {
"title": "Default length of allocated prefixes.",
"description": "Default length of allocated prefixes.",
"type": "integer",
"default": 16
},
"anonymous-prefix-length": {
"title": "Length of allocated anonymous prefixes.",
"description": "Length of allocated anonymous prefixes. 0 is unset.",
"type": "integer",
"default": 0
},
"mailhost": {
"title": "SMTP host to send confirmation emails.",
"description": "SMTP host to send confirmation emails. Not needed if when token is requested from slave instances.",
"type": "string",
"default": "127.0.0.1"
},
"ipv4-net": {
"title": "Enable ipv4 (ip/NET P_LENGTH).",
"description": "Enable ipv4 (ip/NET P_LENGTH). Each node is assigned a subnet of length PLEN, inside network IP/N. Ex: 10.42.0.0/16 8",
"type": "string",
"default": ""
},
"client-count": {
"title": "Number of client tunnels to set up.",
"description": "Number of client tunnels to set up.",
"type": "integer",
"default": 10
},
"tunnel-refresh": {
"title": "Interval in seconds between two tunnel refresh.",
"description": "Interval in seconds between two tunnel refresh: the worst tunnel is closed if the number of client tunnels has reached its maximum number (client-count).",
"type": "integer",
"default": 300
},
"max-clients": {
"title": "Maximum number of accepted clients per OpenVPN server.",
"description": "Maximum number of accepted clients per OpenVPN server. (if unset or 0: client-count * 2, which actually represents the average number of tunnels to other peers)",
"type": "integer",
"default": 0
},
"hello": {
"title": "Hello interval in seconds, for both wired and wireless connections.",
"description": "Hello interval in seconds, for both wired and wireless connections. OpenVPN ping-exit option is set to 4 times the hello interval. It takes between 3 and 4 times the hello interval for Babel to re-establish connection with a node for which the direct connection has been cut.",
"type": "integer",
"default": 15
},
"min-protocol": {
"title": "Reject nodes that are too old.",
"description": "Reject nodes that are too old. Default is unset.",
"type": "integer",
"default": -1
},
"encrypt": {
"title": "Specify that tunnels should be encrypted.",
"description": "Specify that tunnels should be encrypted.",
"type": "boolean",
"default": false
}
}
}
\ No newline at end of file
{
"$schema": "http://json-schema.org/draft-04/schema#",
"description": "Values returned by Re6st Master instanciation",
"properties": {
"re6stry-url": {
"description": "ipv6 url to access your re6st registry service",
"type": "string"
}
},
"type": "object"
}
\ No newline at end of file
{% set python_bin = parameter_dict['python-executable'] -%}
{% set re6st_registry = parameter_dict['re6st-registry'] -%}
{% set re6stnet = parameter_dict['re6stnet'] -%}
{% set publish_dict = {} -%}
{% set part_list = [] -%}
{% set ipv6 = (ipv6_set | list)[0] -%}
{% set ipv4 = (ipv4_set | list)[0] -%}
{% set uri_scheme = slapparameter_dict.get('uri-scheme', 'http') -%}
{% macro section(name) %}{% do part_list.append(name) %}{{ name }}{% endmacro -%}
[directory]
recipe = slapos.cookbook:mkdirectory
bin = ${buildout:directory}/bin
etc = ${buildout:directory}/etc
srv = ${buildout:directory}/srv
var = ${buildout:directory}/var
log = ${:var}/log
services = ${:etc}/service
script = ${:etc}/run
promises = ${:etc}/promise
run = ${:var}/run
ca-dir = ${:etc}/ssl
requests = ${:ca-dir}/requests
private = ${:ca-dir}/private
certs = ${:ca-dir}/certs
newcerts = ${:ca-dir}/newcerts
crl = ${:ca-dir}/crl
re6st = ${:srv}/res6stnet
[re6stnet-dirs]
recipe = slapos.cookbook:mkdirectory
registry = ${directory:re6st}/registry
log = ${directory:log}/re6stnet
conf = ${directory:etc}/re6stnet
ssl = ${:conf}/ssl
token = ${:conf}/token
run = ${directory:run}/re6stnet
[certificate-authority]
recipe = slapos.cookbook:certificate_authority
openssl-binary = {{ openssl_bin }}/openssl
ca-dir = ${directory:ca-dir}
requests-directory = ${directory:requests}
wrapper = ${directory:services}/certificate_authority
ca-private = ${directory:private}
ca-certs = ${directory:certs}
ca-newcerts = ${directory:newcerts}
ca-crl = ${directory:crl}
[apache-conf]
recipe = slapos.recipe.template:jinja2
template = {{ parameter_dict['template-apache-conf'] }}
rendered = ${directory:etc}/apache.conf
ipv6 = {{ ipv6 }}
port = 9026
error-log = ${directory:log}/apache-error.log
access-log = ${directory:log}/apache-access.log
pid-file = ${directory:run}/apache.pid
context =
key apache_port :port
key re6st_ipv4 re6st-registry:ipv4
key re6st_port re6st-registry:port
key access_log :access-log
key error_log :error-log
key pid_file :pid-file
raw certificate ${directory:certs}/apache.crt
raw key ${directory:private}/apache.key
raw ipv6 {{ ipv6 }}
raw uri_scheme {{ uri_scheme }}
{% set apache_wrapper = '${directory:services}/httpd' -%}
{% if uri_scheme == 'https' -%}
{% set apache_wrapper = '${directory:bin}/httpd_raw' -%}
{% endif -%}
[apache-httpd]
recipe = slapos.cookbook:wrapper
wrapper-path = {{ apache_wrapper }}
command-line = "{{ parameter_dict['apache-location'] }}/bin/httpd" -f "${apache-conf:rendered}" -DFOREGROUND
{% if uri_scheme == 'https' %}
[{{ section('apache-ca') }}]
<= certificate-authority
recipe = slapos.cookbook:certificate_authority.request
executable = ${apache-httpd:wrapper-path}
wrapper = ${directory:services}/httpd
key-file = ${certificate-authority:ca-private}/apache.key
cert-file = ${certificate-authority:ca-certs}/apache.crt
{% endif %}
[apache-httpd-graceful]
recipe = slapos.recipe.template:jinja2
template = {{ parameter_dict['template-wrapper'] }}
rendered = ${directory:script}/httpd-graceful
mode = 0700
context =
raw content {{ parameter_dict['apache-location'] }}/bin/httpd -Sf ${apache-conf:rendered}; if [ $? -eq 0 ]; then kill -USR1 $(cat ${apache-conf:pid-file}); fi
raw dash {{ dash_binary }}
[logrotate-apache]
< = logrotate-entry-base
name = apache
log = ${apache-conf:error-log} ${apache-conf:access-log}
post = {{ parameter_dict['bin-directory'] }}/slapos-kill --pidfile ${apache-conf:pid-file} -s USR1
[logrotate-entry-base]
recipe = slapos.cookbook:logrotate.d
logrotate-entries = ${logrotate:logrotate-entries}
backup = ${logrotate:backup}
[re6st-registry-conf-dict]
port = 9201
ipv4 = {{ ipv4 }}
ipv6 = {{ ipv6 }}
db = ${re6stnet-dirs:registry}/registry.db
ca = ${re6stnet-dirs:ssl}/re6stnet.crt
key = ${re6stnet-dirs:ssl}/re6stnet.key
dh = ${re6stnet-dirs:ssl}/dh.pem
verbose = 2
mailhost = {{ slapparameter_dict.get('mailhost', '127.0.0.1') }}
prefix-length = {{ slapparameter_dict.get('prefix-length', 16) }}
anonymous-prefix-length = {{ slapparameter_dict.get('anonymous-prefix-length', 0) }}
logfile = ${re6stnet-dirs:log}/registry.log
run-dir = ${re6stnet-dirs:run}
ipv4-net = {{ slapparameter_dict.get('ipv4-net', '') }}
client-count = {{ slapparameter_dict.get('client-count', 10) }}
tunnel-refresh = {{ slapparameter_dict.get('tunnel-refresh', 300) }}
max-clients = {{ slapparameter_dict.get('max-clients', 0) }}
hello = {{ slapparameter_dict.get('hello', 15) }}
min-protocol = {{ slapparameter_dict.get('min-protocol', -1) }}
encrypt = {{ slapparameter_dict.get('encrypt', 'False') }}
[re6st-registry-conf]
recipe = slapos.recipe.template:jinja2
template = {{ parameter_dict['template-re6st-registry-conf'] }}
rendered = ${directory:etc}/re6st-registry.conf
context = section parameter_dict re6st-registry-conf-dict
[re6st-registry]
recipe = slapos.cookbook:re6stnet.registry
command = {{ re6st_registry }}
wrapper = ${directory:services}/re6st-registry
pid-file = ${directory:run}/registry.pid
manager-wrapper = ${directory:bin}/re6stManageToken
check-service-wrapper = ${directory:bin}/re6stCheckService
drop-service-wrapper = ${directory:bin}/re6stManageDeleteToken
revoke-service-wrapper = ${directory:bin}/re6stRevokeCertificate
openssl-bin = {{ openssl_bin }}/openssl
python-bin = {{ python_bin }}
ipv6-prefix = {{ slapparameter_dict.get('ipv6-prefix', '2001:db8:24::/48') }}
key-size = {{ slapparameter_dict.get('key-size', 2048) }}
conf-dir = ${re6stnet-dirs:conf}
token-dir = ${re6stnet-dirs:token}
#Re6st config
config-file = ${re6st-registry-conf:rendered}
port = ${re6st-registry-conf-dict:port}
ipv4 = ${re6st-registry-conf-dict:ipv4}
db-path = ${re6st-registry-conf-dict:db}
key-file = ${re6st-registry-conf-dict:key}
cert-file = ${re6st-registry-conf-dict:ca}
dh-file = ${re6st-registry-conf-dict:dh}
slave-instance-list = ${slap-parameter:slave_instance_list}
environment =
PATH={{ openssl_bin }}
[re6stnet-manage]
recipe = slapos.cookbook:wrapper
wrapper-path = ${directory:script}/re6st-token-manager
command-line = "{{ python_bin }}" ${re6st-registry:manager-wrapper}
[cron-entry-re6st-check]
recipe = slapos.cookbook:cron.d
cron-entries = ${cron:cron-entries}
name = re6stnet-check-token
frequency = */5 * * * *
command = {{ python_bin }} ${re6st-registry:check-service-wrapper}
[cron-entry-re6st-revoke]
recipe = slapos.cookbook:cron.d
cron-entries = ${cron:cron-entries}
name = re6stnet-revoke-cert
frequency = */5 * * * *
command = {{ python_bin }} ${re6st-registry:revoke-service-wrapper}
[cron-entry-re6st-drop]
recipe = slapos.cookbook:cron.d
cron-entries = ${cron:cron-entries}
name = re6stnet-drop-token
frequency = */5 * * * *
command = {{ python_bin }} ${re6st-registry:drop-service-wrapper}
[logrotate-entry-re6stnet]
< = logrotate-entry-base
name = re6stnet
log = ${re6st-registry-conf-dict:logfile}
post = {{ parameter_dict['bin-directory'] }}/slapos-kill --pidfile ${re6st-registry:pid-file} -s USR1
[re6st-registry-promise]
recipe = slapos.cookbook:check_port_listening
path = ${directory:promises}/re6st-registry
hostname = ${re6st-registry:ipv4}
port = ${re6st-registry:port}
[apache-registry-promise]
recipe = slapos.cookbook:check_port_listening
path = ${directory:promises}/apache-re6st-registry
hostname = ${apache-conf:ipv6}
port = ${apache-conf:port}
{% do publish_dict.__setitem__('re6stry-url', uri_scheme ~ '://[${apache-conf:ipv6}]:${apache-conf:port}') -%}
{% do publish_dict.__setitem__('re6stry-local-url', 'http://${re6st-registry:ipv4}:${re6st-registry:port}/') -%}
{% do publish_dict.__setitem__('slave-amount', '${re6st-registry:slave-amount}') -%}
[publish]
recipe = slapos.cookbook:publish
{% for name, value in publish_dict.items() -%}
{{ name }} = {{ value }}
{% endfor -%}
[buildout]
extends =
{{ logrotate_cfg }}
parts =
certificate-authority
logrotate-apache
logrotate-entry-re6stnet
re6stnet-manage
cron-entry-logrotate
cron-entry-re6st-check
cron-entry-re6st-drop
cron-entry-re6st-revoke
apache-httpd
apache-httpd-graceful
publish
re6st-registry-promise
apache-registry-promise
# Complete parts with sections
{{ part_list | join('\n ') }}
eggs-directory = {{ eggs_directory }}
develop-eggs-directory = {{ develop_eggs_directory }}
offline = true
[slap-parameter]
slave_instance_list = {}
[buildout]
parts = switch-softwaretype
eggs-directory = {{ eggs_directory }}
develop-eggs-directory = {{ develop_eggs_directory }}
[slap-configuration]
recipe = slapos.cookbook:slapconfiguration.serialised
computer = ${slap-connection:computer-id}
partition = ${slap-connection:partition-id}
url = ${slap-connection:server-url}
key = ${slap-connection:key-file}
cert = ${slap-connection:cert-file}
[jinja2-template-base]
recipe = slapos.recipe.template:jinja2
rendered = ${buildout:parts-directory}/${:_buildout_section_name_}/${:filename}
extra-context =
context =
key develop_eggs_directory buildout:develop-eggs-directory
key eggs_directory buildout:eggs-directory
key ipv6_set slap-configuration:ipv6
key ipv4_set slap-configuration:ipv4
key slapparameter_dict slap-configuration:configuration
key computer_id slap-configuration:computer
raw logrotate_cfg {{ template_logrotate_base }}
raw dash_binary {{ dash_location }}/bin/dash
raw openssl_bin {{ openssl_location}}/bin
${:extra-context}
[dynamic-template-re6stnet-parameters]
bin-directory = {{ bin_directory }}
python-executable = {{ python_with_eggs }}
re6st-registry = {{ bin_directory }}/re6st-registry
re6stnet = {{ bin_directory }}/re6stnet
template-apache-conf = {{ template_apache_conf }}
template-wrapper = {{ template_wrapper }}
apache-location = {{ apache_location }}
template-re6st-registry-conf = {{ template_re6st_registry_conf }}
[dynamic-template-re6stnet]
< = jinja2-template-base
template = {{ template_re6stnet }}
filename = instance-re6stnet.cfg
extensions = jinja2.ext.do
extra-context =
section parameter_dict dynamic-template-re6stnet-parameters
[switch-softwaretype]
recipe = slapos.cookbook:softwaretype
default = ${dynamic-template-re6stnet:rendered}
registry = ${:default}
port {{ parameter_dict['port'] }}
4 {{ parameter_dict['ipv4'] }}
# 6 {{ parameter_dict['ipv6'] }}
db {{ parameter_dict['db'] }}
ca {{ parameter_dict['ca'] }}
key {{ parameter_dict['key'] }}
dh {{ parameter_dict['dh'] }}
{% if parameter_dict.get('ipv4-net', '') -%}
ipv4 {{ parameter_dict['ipv4-net'] }}
{% endif -%}
client-count {{ parameter_dict['client-count'] }}
tunnel-refresh {{ parameter_dict['tunnel-refresh'] }}
hello {{ parameter_dict['hello'] }}
mailhost {{ parameter_dict['mailhost'] }}
prefix-length {{ parameter_dict['prefix-length'] }}
anonymous-prefix-length {{ parameter_dict['anonymous-prefix-length'] }}
logfile {{ parameter_dict['logfile'] }}
verbose {{ parameter_dict['verbose'] }}
run {{ parameter_dict['run-dir'] }}
{% if parameter_dict.get('min-protocol', '-1') != '-1' -%}
min-protocol {{ parameter_dict['min-protocol'] }}
{% endif -%}
{% if parameter_dict.get('encrypt', 'False') in ('true', 'True', '1') -%}
encrypt
{% endif -%}
{% if parameter_dict.get('max-clients') != '0' -%}
max-clients {{ parameter_dict['max-clients'] }}
{% endif -%}
\ No newline at end of file
[buildout]
extends =
../../component/re6stnet/buildout.cfg
../../component/dash/buildout.cfg
../../component/git/buildout.cfg
../../component/dcron/buildout.cfg
../../component/gzip/buildout.cfg
../../component/openssl/buildout.cfg
../../component/logrotate/buildout.cfg
../../component/apache/buildout.cfg
../../stack/slapos.cfg
develop =
${:parts-directory}/re6stnet-repository
parts =
slapos-cookbook
eggs
dash
babeld
re6stnet-develop
re6stnet
template
slapos.cookbook-repository
check-recipe
[eggs]
recipe = zc.recipe.egg
eggs =
${lxml-python:egg}
slapos.toolbox
scripts =
slapos-kill
[extra-eggs]
recipe = zc.recipe.egg
interpreter = pythonwitheggs
eggs =
${lxml-python:egg}
${python-cffi:egg}
${python-cryptography:egg}
pyOpenSSL
miniupnpc
re6stnet
[re6stnet-repository]
repository = http://git.erp5.org/repos/re6stnet.git
branch = re6st-slapos
revision = 8130c7380cbf3f13bdc4e5ecb8e792fb7d2b7b2e
[slapos.cookbook-repository]
recipe = slapos.recipe.build:gitclone
repository = http://git.erp5.org/repos/slapos.git
branch = re6st-master
git-executable = ${git:location}/bin/git
[download-base]
recipe = slapos.recipe.build:download
url = ${:_profile_base_location_}/${:filename}
mode = 644
[template-jinja2-base]
recipe = slapos.recipe.template:jinja2
template = ${:_profile_base_location_}/${:filename}.in
rendered = ${buildout:directory}/${:filename}
# XXX: extra-context is needed because we cannot append to a key of an extended
# section.
extra-context =
context =
key bin_directory buildout:bin-directory
key develop_eggs_directory buildout:develop-eggs-directory
key eggs_directory buildout:eggs-directory
${:extra-context}
[template]
< = template-jinja2-base
filename = template.cfg
template = ${:_profile_base_location_}/instance.cfg.in
md5sum = 1f39ec8d7d91c0ba2b4ad56f27ebac39
extra-context =
key apache_location apache:location
key dash_location dash:location
key logrotate_location logrotate:location
key openssl_location openssl:location
key template_apache_conf template-apache-conf:target
key template_re6stnet template-re6stnet:target
key template_re6st_registry_conf template-re6st-registry-conf:target
key template_logrotate_base template-logrotate-base:rendered
key template_wrapper template-wrapper:target
raw python_with_eggs ${buildout:directory}/bin/${extra-eggs:interpreter}
[template-re6stnet]
< = download-base
filename = instance-re6stnet.cfg.in
md5sum = a5000a513877bdab10f160ac0aaac95f
[template-logrotate-base]
< = template-jinja2-base
filename = instance-logrotate-base.cfg
md5sum = f28fbd310944f321ccb34b2a34c82005
extra-context =
key dcron_location dcron:location
key gzip_location gzip:location
key logrotate_location logrotate:location
[template-apache-conf]
< = download-base
filename = apache.conf.in
md5sum = 6fcf417f6b9651b1ed442f00c094f50c
[template-re6st-registry-conf]
< = download-base
filename = re6st-registry.conf.in
md5sum = 5dc218f887faeffc466e41c7d6191e49
[template-wrapper]
< = download-base
filename = wrapper.in
md5sum = 69e63cb58267335e21da772bd867657e
[check-recipe]
recipe = plone.recipe.command
stop-on-error = true
update-command = ${:command}
command =
grep parts ${buildout:develop-eggs-directory}/re6stnet.egg-link
[versions]
apache-libcloud = 0.17.0
ecdsa = 0.13
gitdb = 0.6.4
plone.recipe.command = 1.1
pycrypto = 2.6.1
slapos.recipe.template = 2.7
slapos.toolbox = 0.47.3
smmap = 0.9.0
# Required by:
# slapos.toolbox==0.47.3
GitPython = 0.3.6
# Required by:
# slapos.toolbox==0.47.3
atomize = 0.2.0
# Required by:
# apache-libcloud==0.17.0
backports.ssl-match-hostname = 3.4.0.2
# Required by:
# slapos.toolbox==0.47.3
feedparser = 5.1.3
# Required by:
# slapos.toolbox==0.47.3
lockfile = 0.10.2
# Required by:
# re6stnet===0-413.gbec6b3c.dirty
miniupnpc = 1.9
# Required by:
# slapos.toolbox==0.47.3
paramiko = 1.15.2
# Required by:
# slapos.toolbox==0.47.3
rpdb = 0.1.5
{
"name": "RE6STNET",
"description": "Master instance of re6st (Resilient, Scalable, IPv6 Network application)",
"serialisation": "xml",
"software-type": {
"default": {
"title": "Default",
"description": "Re6st registry",
"request": "instance-re6stnet-input-schema.json",
"response": "instance-re6stnet-output-schema.json",
"index": 0
},
"registry": {
"title": "registry",
"description": "Re6st registry",
"request": "instance-re6stnet-resilient-input-schema.json",
"response": "instance-re6stnet-output-schema.json",
"index": 1
}
}
}
\ No newline at end of file
#!{{ dash }}
{{ content }}
\ No newline at end of file
...@@ -47,4 +47,4 @@ mode = 0644 ...@@ -47,4 +47,4 @@ mode = 0644
[versions] [versions]
plone.recipe.command = 1.1 plone.recipe.command = 1.1
slapos.recipe.template = 2.5 slapos.recipe.template = 2.7
...@@ -12,4 +12,4 @@ Pygments = 1.6 ...@@ -12,4 +12,4 @@ Pygments = 1.6
collective.recipe.environment = 0.2.0 collective.recipe.environment = 0.2.0
collective.recipe.template = 1.10 collective.recipe.template = 1.10
slapos.recipe.download = 1.0.dev-r4053 slapos.recipe.download = 1.0.dev-r4053
slapos.recipe.template = 2.5 slapos.recipe.template = 2.7
...@@ -83,4 +83,4 @@ mode = 640 ...@@ -83,4 +83,4 @@ mode = 640
Pygments = 1.6 Pygments = 1.6
collective.recipe.template = 1.10 collective.recipe.template = 1.10
plone.recipe.command = 1.1 plone.recipe.command = 1.1
slapos.recipe.template = 2.5 slapos.recipe.template = 2.7
...@@ -29,7 +29,6 @@ parts = ...@@ -29,7 +29,6 @@ parts =
eggs eggs
instance-runner-import instance-runner-import
instance-runner-export instance-runner-export
slapos-cookbook
template-slapos-cfg template-slapos-cfg
# XXX: we have to manually add this for resilience # XXX: we have to manually add this for resilience
rdiff-backup rdiff-backup
...@@ -39,21 +38,21 @@ parts = ...@@ -39,21 +38,21 @@ parts =
recipe = slapos.recipe.template recipe = slapos.recipe.template
url = ${:_profile_base_location_}/instance.cfg url = ${:_profile_base_location_}/instance.cfg
output = ${buildout:directory}/template.cfg output = ${buildout:directory}/template.cfg
md5sum = 8a47421ac6158b4ee476acab212c67d9 md5sum = bb7e0bf9959c4437ff1e23e645315ccf
mode = 0644 mode = 0644
[template-runner] [template-runner]
recipe = slapos.recipe.template recipe = slapos.recipe.template
url = ${:_profile_base_location_}/instance-runner.cfg url = ${:_profile_base_location_}/instance-runner.cfg
output = ${buildout:directory}/template-runner.cfg.in output = ${buildout:directory}/template-runner.cfg.in
md5sum = 64a2a9748829f7ecbf89dd0cb455c4c7 md5sum = 67fd0af225c6c4f93f539f82dd23a2db
mode = 0644 mode = 0644
[template-runner-import-script] [template-runner-import-script]
recipe = hexagonit.recipe.download recipe = hexagonit.recipe.download
url = ${:_profile_base_location_}/template/runner-import.sh.jinja2 url = ${:_profile_base_location_}/template/runner-import.sh.jinja2
download-only = true download-only = true
md5sum = c0d05a26b06ce172efaad03c52ef92ca md5sum = d7f88b58b2508ce5af42ea7d7241626e
filename = runner-import.sh.jinja2 filename = runner-import.sh.jinja2
mode = 0644 mode = 0644
...@@ -61,14 +60,14 @@ mode = 0644 ...@@ -61,14 +60,14 @@ mode = 0644
recipe = slapos.recipe.template recipe = slapos.recipe.template
url = ${:_profile_base_location_}/instance-runner-import.cfg.in url = ${:_profile_base_location_}/instance-runner-import.cfg.in
output = ${buildout:directory}/instance-runner-import.cfg output = ${buildout:directory}/instance-runner-import.cfg
md5sum = a85d054b3e2ae9243d8d188c897dc121 md5sum = a442695f6875ae0b65439c88db1644fb
mode = 0644 mode = 0644
[template-runner-export-script] [template-runner-export-script]
recipe = hexagonit.recipe.download recipe = hexagonit.recipe.download
url = ${:_profile_base_location_}/template/runner-export.sh.jinja2 url = ${:_profile_base_location_}/template/runner-export.sh.jinja2
download-only = true download-only = true
md5sum = 0f290b46c0b89ff84aee5c10477e07ca md5sum = 9dc934fe5015ff53869830a833266192
filename = runner-export.sh.jinja2 filename = runner-export.sh.jinja2
mode = 0644 mode = 0644
...@@ -76,7 +75,7 @@ mode = 0644 ...@@ -76,7 +75,7 @@ mode = 0644
recipe = slapos.recipe.template recipe = slapos.recipe.template
url = ${:_profile_base_location_}/instance-runner-export.cfg.in url = ${:_profile_base_location_}/instance-runner-export.cfg.in
output = ${buildout:directory}/instance-runner-export.cfg output = ${buildout:directory}/instance-runner-export.cfg
md5sum = 521bad4c571b5b2dc3eee6090802de95 md5sum = 53be4bd07b2af826d9c80eea4185f7cb
mode = 0644 mode = 0644
[template-resilient] [template-resilient]
...@@ -90,7 +89,7 @@ mode = 0644 ...@@ -90,7 +89,7 @@ mode = 0644
recipe = hexagonit.recipe.download recipe = hexagonit.recipe.download
url = ${:_profile_base_location_}/instance-resilient-test.cfg.jinja2 url = ${:_profile_base_location_}/instance-resilient-test.cfg.jinja2
download-only = true download-only = true
md5sum = b95b230bcdd7d4eaf9452f73156b27e8 md5sum = fb8c45e5c35548331fb06c4633ec592a
filename = instance-resilient-test.cfg.jinja2 filename = instance-resilient-test.cfg.jinja2
mode = 0644 mode = 0644
...@@ -106,7 +105,7 @@ mode = 0644 ...@@ -106,7 +105,7 @@ mode = 0644
recipe = hexagonit.recipe.download recipe = hexagonit.recipe.download
url = ${:_profile_base_location_}/httpd_conf.in url = ${:_profile_base_location_}/httpd_conf.in
download-only = true download-only = true
md5sum = 94bc5d6f1afc4e075d530fed496f47c2 md5sum = b5d095f54f714d17dff12c0c5fe4afb7
filename = httpd_conf.in filename = httpd_conf.in
mode = 0644 mode = 0644
...@@ -121,7 +120,7 @@ location = ${buildout:parts-directory}/${:_buildout_section_name_} ...@@ -121,7 +120,7 @@ location = ${buildout:parts-directory}/${:_buildout_section_name_}
[template-slapos-cfg] [template-slapos-cfg]
recipe = hexagonit.recipe.download recipe = hexagonit.recipe.download
url = ${:_profile_base_location_}/template/${:filename} url = ${:_profile_base_location_}/template/${:filename}
md5sum = ca57a8bf1d4da20344fab602a69b807f md5sum = c490563b831fc959a09b4b233263a6c6
location = ${buildout:parts-directory}/${:_buildout_section_name_} location = ${buildout:parts-directory}/${:_buildout_section_name_}
filename = slapos.cfg.in filename = slapos.cfg.in
download-only = true download-only = true
...@@ -172,45 +171,27 @@ destination = ${buildout:parts-directory}/monitor-template-cors-domain-cgi ...@@ -172,45 +171,27 @@ destination = ${buildout:parts-directory}/monitor-template-cors-domain-cgi
filename = cors-domain.jinja filename = cors-domain.jinja
mode = 0644 mode = 0644
[monitor-check-webrunner-internal-instance]
recipe = hexagonit.recipe.download
url = ${:_profile_base_location_}/template/${:filename}
download-only = true
#md5sum = 4c44d617d5bfd1db8695200e896480c0
destination = ${buildout:parts-directory}/${:filename}
filename = monitor-check-webrunner-internal-instances.py
mode = 0644
[eggs] [eggs]
recipe = z3c.recipe.scripts recipe = z3c.recipe.scripts
eggs = eggs =
${lxml-python:egg}
collective.recipe.environment collective.recipe.environment
cns.recipe.symlink cns.recipe.symlink
erp5.util erp5.util
hexagonit.recipe.download
inotifyx
lock-file lock-file
lxml
netaddr
plone.recipe.command plone.recipe.command
pytz
slapos.cookbook
slapos.libnetworkcache
slapos.recipe.build slapos.recipe.build
slapos.toolbox[flask_auth] slapos.toolbox[flask_auth]
slapos.core
supervisor
xml_marshaller
Gunicorn Gunicorn
${slapos-cookbook:eggs}
# Add slapos.libnetworkcache to path of slapos.core so that slaprunner can build SRs using cache
[slapos-cookbook]
eggs =
${lxml-python:egg}
slapos.cookbook
cliff
hexagonit.recipe.download
inotifyx
netaddr
netifaces
requests
slapos.core
supervisor
xml_marshaller
pytz
slapos.libnetworkcache
[extra-eggs] [extra-eggs]
recipe = zc.recipe.egg recipe = zc.recipe.egg
......
...@@ -69,3 +69,7 @@ command = ...@@ -69,3 +69,7 @@ command =
grep parts ${buildout:develop-eggs-directory}/slapos.core.egg-link grep parts ${buildout:develop-eggs-directory}/slapos.core.egg-link
# grep parts ${buildout:develop-eggs-directory}/erp5.util.egg-link # grep parts ${buildout:develop-eggs-directory}/erp5.util.egg-link
[versions]
slapos.cookbook =
slapos.core =
slapos.toolbox =
...@@ -90,7 +90,7 @@ RewriteCond %{QUERY_STRING} service=git-receive-pack [OR] ...@@ -90,7 +90,7 @@ RewriteCond %{QUERY_STRING} service=git-receive-pack [OR]
RewriteCond %{REQUEST_URI} /git-receive-pack$ RewriteCond %{REQUEST_URI} /git-receive-pack$
<LocationMatch "^/git/"> <LocationMatch "^/git/">
SetEnv GIT_PROJECT_ROOT:{{- parameters.project_private_folder -}} SetEnv GIT_PROJECT_ROOT:{{ parameters.project_private_folder }}
Order Deny,Allow Order Deny,Allow
Deny from env=AUTHREQUIRED Deny from env=AUTHREQUIRED
...@@ -101,7 +101,7 @@ RewriteCond %{REQUEST_URI} /git-receive-pack$ ...@@ -101,7 +101,7 @@ RewriteCond %{REQUEST_URI} /git-receive-pack$
</LocationMatch> </LocationMatch>
<LocationMatch "^/git-public/"> <LocationMatch "^/git-public/">
SetEnv GIT_PROJECT_ROOT:{{- parameters.project_public_folder -}} SetEnv GIT_PROJECT_ROOT:{{ parameters.project_public_folder }}
Order Deny,Allow Order Deny,Allow
Deny from env=AUTHREQUIRED Deny from env=AUTHREQUIRED
......
...@@ -54,7 +54,7 @@ name = Resilient Instance (Root Instance) ...@@ -54,7 +54,7 @@ name = Resilient Instance (Root Instance)
{% for key, value in slapparameter_dict.get('cluster', {}).iteritems() -%} {% for key, value in slapparameter_dict.get('cluster', {}).iteritems() -%}
config-{{ key }} = {{ dumps(value) }} config-{{ key }} = {{ dumps(value) }}
{% endfor -%} {% endfor -%}
config-resiliency-backup-periodicity = */6 * * * * config-resiliency-backup-periodicity = */10 * * * *
config-resilient-clone-number = 1 config-resilient-clone-number = 1
config-ignore-known-hosts-file = true config-ignore-known-hosts-file = true
# XXX hardcoded # XXX hardcoded
......
...@@ -16,13 +16,14 @@ parts += ...@@ -16,13 +16,14 @@ parts +=
publish-connection-informations publish-connection-informations
slaprunner-promise slaprunner-promise
slaprunner-frontend-promise slaprunner-frontend-promise
slaprunner-supervisord-wrapper
dropbear-promise dropbear-promise
runtestsuite runtestsuite
shellinabox-promise shellinabox-promise
symlinks symlinks
shellinabox shellinabox
slapos-cfg slapos-cfg
slapos-repo-config slapos-repo
cron-entry-backup cron-entry-backup
cron-entry-prepare-software cron-entry-prepare-software
deploy-instance-parameters deploy-instance-parameters
...@@ -49,7 +50,9 @@ parts += ...@@ -49,7 +50,9 @@ parts +=
bash-profile bash-profile
## Monitor for runner ## Monitor for runner
monitor-current-log-access monitor-current-log-access
monitor-deploy-cors-domain-cgi
monitor-check-resilient-feed-file monitor-check-resilient-feed-file
monitor-check-webrunner-internal-instance
[exporter] [exporter]
......
...@@ -14,18 +14,20 @@ parts += ...@@ -14,18 +14,20 @@ parts +=
dropbear-server-add-authorized-key dropbear-server-add-authorized-key
sshkeys-authority sshkeys-authority
slaprunner-promise slaprunner-promise
slaprunner-supervisord-wrapper
dropbear-promise dropbear-promise
runtestsuite runtestsuite
shellinabox-promise shellinabox-promise
shellinabox shellinabox
symlinks symlinks
slapos-cfg slapos-cfg
slapos-repo-config slapos-repo
cron-entry-prepare-software cron-entry-prepare-software
deploy-instance-parameters deploy-instance-parameters
instance-software-type instance-software-type
bash-profile bash-profile
supervisord-wrapper supervisord-wrapper
importer-consistency-promise
# have to repeat the next one, as it's not inherited from pbsready-import # have to repeat the next one, as it's not inherited from pbsready-import
import-on-notification import-on-notification
## Monitoring part ## Monitoring part
...@@ -52,6 +54,14 @@ parts += ...@@ -52,6 +54,14 @@ parts +=
## Monitor for import runner ## Monitor for import runner
monitor-latest-restored-backup monitor-latest-restored-backup
# For the needs of importer, we run the full slaprunner
# In case both exporter and importer (aka main instance and clone instance)
# run with the same IP (usually for testing purposes),
# run slaprunner using different ports.
[slaprunner]
proxy_port = 50000
runner_port = 50005
[importer] [importer]
recipe = slapos.recipe.template:jinja2 recipe = slapos.recipe.template:jinja2
template = ${template-runner-import-script:location}/${template-runner-import-script:filename} template = ${template-runner-import-script:location}/${template-runner-import-script:filename}
...@@ -59,12 +69,30 @@ rendered = $${directory:bin}/$${slap-parameter:namebase}-importer ...@@ -59,12 +69,30 @@ rendered = $${directory:bin}/$${slap-parameter:namebase}-importer
# backward compatibility for resilient stack # backward compatibility for resilient stack
wrapper = $${:rendered} wrapper = $${:rendered}
mode = 700 mode = 700
restore-exit-code-file=$${directory:srv}/importer-exit-code-file
context = context =
key backend_url slaprunner:access-url key backend_url slaprunner:access-url
key proxy_host slaprunner:ipv4
section directory directory section directory directory
raw shell_binary ${dash:location}/bin/dash raw shell_binary ${dash:location}/bin/dash
raw rsync_binary ${rsync:location}/bin/rsync raw rsync_binary ${rsync:location}/bin/rsync
raw curl_binary ${curl:location}/bin/curl raw restore_exit_code_file $${:restore-exit-code-file}
[importer-consistency-promise]
# Test that the importer script and "after-import" subscripts:
# 1/ Have been run in the last 24 hours
# 2/ Have succeeded
recipe = collective.recipe.template
input = inline: #!/bin/sh
EXIT_CODE_FILE=$(find "$${importer:restore-exit-code-file}" -mtime -1)
if [ -z "$EXIT_CODE_FILE" ]; then
echo "Consistency check is too old."
exit 1
fi
EXIT_CODE=$(cat $EXIT_CODE_FILE)
exit $EXIT_CODE
output = $${directory:promises}/importer-consistency-promise
mode = 755
[slap-parameter] [slap-parameter]
auto-deploy-instance = false auto-deploy-instance = false
......
...@@ -92,6 +92,7 @@ ...@@ -92,6 +92,7 @@
"exclusiveMinimum": true "exclusiveMinimum": true
}, },
"cpu-usage-ratio": { "cpu-usage-ratio": {
"title": "CPU Usage Ratio",
"description": "Ratio of the CPU use for compilation, if value is set to n, compilation will use number-of-cpu/n of cpus (need instance restart)", "description": "Ratio of the CPU use for compilation, if value is set to n, compilation will use number-of-cpu/n of cpus (need instance restart)",
"type": "integer", "type": "integer",
"default" : 4 "default" : 4
......
{
"type": "object",
"$schema": "http://json-schema.org/draft-04/schema",
"allOf": [
{
"$ref": "instance-runner-input-schema.json#/"
},
{
"properties": {
"-sla-0-computer_guid": {
"title": "Target computer for main instance",
"description": "Target computer GUID for main instance.",
"type": "string",
"optional": true
},
"-sla-1-computer_guid": {
"title": "Target computer for first clone",
"description": "Target computer for first clone and PBS.",
"type": "string",
"optional": true
},
"-sla-2-computer_guid": {
"title": "Target computer for second clone",
"description": "Target computer for second clone and PBS.",
"type": "string",
"optional": true
},
"resiliency-backup-periodicity": {
"title": "Periodicity of backup",
"description": "Periodicity of backup, in cron format.",
"type": "string",
"optional": true
},
"remove-backup-older-than": {
"title": "Remove backups older than...",
"description": "Remove all the backups in PBS that are older than specified value. It should be rdiff-backup-compatible.",
"type": "string",
"default": "3B",
"optional": true
},
"resilient-clone-number": {
"title": "Amount of backup(s) to create",
"description": "Amount of backup(s) to create. Each backup consists of a Pull Backup Server and a clone.",
"type": "integer",
"default": 2,
"optional": true
},
"ignore-known-hosts-file": {
"title": "Ignore known_hosts file",
"description": "Set either to fill known_hosts file for ssh or not. Useful if main instance and PBS are using the same IP (slapos proxy, webrunner).",
"type": "boolean",
"default": false,
"optional": true
}
}
}
]
}
\ No newline at end of file
...@@ -13,15 +13,17 @@ parts = ...@@ -13,15 +13,17 @@ parts =
publish-connection-informations publish-connection-informations
slaprunner-promise slaprunner-promise
slaprunner-frontend-promise slaprunner-frontend-promise
slaprunner-supervisord-wrapper
dropbear-promise dropbear-promise
runtestsuite runtestsuite
shellinabox-promise shellinabox-promise
symlinks symlinks
shellinabox shellinabox
slapos-cfg slapos-cfg
slapos-repo-config slapos-repo
cron-entry-prepare-software cron-entry-prepare-software
deploy-instance-parameters deploy-instance-parameters
instance-software
instance-software-type instance-software-type
minishell-cwd minishell-cwd
bash-profile bash-profile
...@@ -51,6 +53,7 @@ parts = ...@@ -51,6 +53,7 @@ parts =
## Monitor for runner ## Monitor for runner
monitor-current-log-access monitor-current-log-access
monitor-deploy-cors-domain-cgi monitor-deploy-cors-domain-cgi
monitor-check-webrunner-internal-instance
extends = ${monitor-template:output} extends = ${monitor-template:output}
...@@ -77,7 +80,7 @@ cert-file = $${slap-connection:cert-file} ...@@ -77,7 +80,7 @@ cert-file = $${slap-connection:cert-file}
computer-id = $${slap-connection:computer-id} computer-id = $${slap-connection:computer-id}
partition-id = $${slap-connection:partition-id} partition-id = $${slap-connection:partition-id}
{%- if slapparameter_dict.get('custom-frontend-instance-guid') -%} {% if slapparameter_dict.get('custom-frontend-instance-guid') -%}
sla-instance_guid = $${slap-parameter:frontend-instance-guid} sla-instance_guid = $${slap-parameter:frontend-instance-guid}
{% endif -%} {% endif -%}
...@@ -105,11 +108,12 @@ custom-frontend-url = https://$${request-custom-frontend:connection-domain} ...@@ -105,11 +108,12 @@ custom-frontend-url = https://$${request-custom-frontend:connection-domain}
# Create all needed directories # Create all needed directories
[directory] [directory]
recipe = slapos.cookbook:mkdirectory recipe = slapos.cookbook:mkdirectory
etc = $${buildout:directory}/etc/ home = $${buildout:directory}
var = $${buildout:directory}/var/ etc = $${:home}/etc/
srv = $${buildout:directory}/srv/ var = $${:home}/var/
bin = $${buildout:directory}/bin/ srv = $${:home}/srv/
tmp = $${buildout:directory}/tmp/ bin = $${:home}/bin/
tmp = $${:home}/tmp/
sshkeys = $${:srv}/sshkeys sshkeys = $${:srv}/sshkeys
services = $${:etc}/service/ services = $${:etc}/service/
...@@ -120,7 +124,7 @@ run = $${:var}/run/ ...@@ -120,7 +124,7 @@ run = $${:var}/run/
backup = $${:srv}/backup/ backup = $${:srv}/backup/
promises = $${:etc}/promise/ promises = $${:etc}/promise/
test = $${:etc}/test/ test = $${:etc}/test/
nginx-data = $${directory:srv}/nginx nginx-data = $${:srv}/nginx
ca-dir = $${:srv}/ssl ca-dir = $${:srv}/ssl
project = $${:srv}/runner/project project = $${:srv}/runner/project
...@@ -189,6 +193,17 @@ software_info_json = $${runnerdirectory:home}/software_info.json ...@@ -189,6 +193,17 @@ software_info_json = $${runnerdirectory:home}/software_info.json
instance_info_json = $${runnerdirectory:home}/instance_info.json instance_info_json = $${runnerdirectory:home}/instance_info.json
path = $${shell:path} path = $${shell:path}
#---------------------------
#--
#-- supervisord managing slaprunner instance processes
[slaprunner-supervisord-wrapper]
recipe = slapos.cookbook:wrapper
# XXX hardcoded locations
command-line = $${buildout:directory}/bin/slapos node supervisord --cfg $${directory:etc}/slapos.cfg -n
wrapper-path = $${directory:services}/slaprunner-supervisord
[test-runner] [test-runner]
<= slaprunner <= slaprunner
slapos.cfg = $${directory:etc}/slapos-test.cfg slapos.cfg = $${directory:etc}/slapos-test.cfg
...@@ -204,7 +219,7 @@ auto_deploy = True ...@@ -204,7 +219,7 @@ auto_deploy = True
[runtestsuite] [runtestsuite]
recipe = slapos.cookbook:wrapper recipe = slapos.cookbook:wrapper
command-line = ${buildout:directory}/bin/slaprunnertest command-line = ${buildout:directory}/bin/slaprunnertest
wrapper-path = $${directory:bin}/runStandaloneTestSuite wrapper-path = $${directory:bin}/runTestSuite
environment = RUNNER_CONFIG=$${slapos-cfg:rendered} environment = RUNNER_CONFIG=$${slapos-cfg:rendered}
# Deploy dropbear (minimalist SSH server) # Deploy dropbear (minimalist SSH server)
...@@ -262,7 +277,6 @@ scgi_temp_path = $${directory:tmp}/scgi_temp_path ...@@ -262,7 +277,6 @@ scgi_temp_path = $${directory:tmp}/scgi_temp_path
nb_workers = 2 nb_workers = 2
# Network # Network
local-ip = $${slap-network-information:local-ipv4} local-ip = $${slap-network-information:local-ipv4}
port = 30001
global-ip = $${slap-network-information:global-ipv6} global-ip = $${slap-network-information:global-ipv6}
global-port = $${slaprunner:runner_port} global-port = $${slaprunner:runner_port}
# Backend # Backend
...@@ -355,7 +369,7 @@ path_pid = $${directory:run}/gunicorn.pid ...@@ -355,7 +369,7 @@ path_pid = $${directory:run}/gunicorn.pid
[gunicorn-launcher] [gunicorn-launcher]
recipe = slapos.cookbook:wrapper recipe = slapos.cookbook:wrapper
command-line = $${gunicorn:bin_gunicorn} slapos.runner:app -p $${gunicorn:path_pid} -b unix:$${gunicorn:socket} -e RUNNER_CONFIG=$${slaprunner:slapos.cfg} --error-logfile $${directory:log}/$${:error-log-file} --log-level error --preload command-line = $${gunicorn:bin_gunicorn} slapos.runner.run:app -p $${gunicorn:path_pid} -b unix:$${gunicorn:socket} -e RUNNER_CONFIG=$${slaprunner:slapos.cfg} --error-logfile $${directory:log}/$${:error-log-file} --log-level error --preload
error-log-file = gunicorn-error.log error-log-file = gunicorn-error.log
wrapper-path = $${gunicorn:bin_launcher} wrapper-path = $${gunicorn:bin_launcher}
environment = PATH=$${environ:PATH}:${git:location}/bin/ environment = PATH=$${environ:PATH}:${git:location}/bin/
...@@ -558,12 +572,7 @@ repository = $${slap-parameter:slapos-repository} ...@@ -558,12 +572,7 @@ repository = $${slap-parameter:slapos-repository}
git-executable = ${git:location}/bin/git git-executable = ${git:location}/bin/git
develop = true develop = true
location = $${directory:project}/slapos location = $${directory:project}/slapos
branch = $${slap-parameter:slapos-reference}
[slapos-repo-config]
recipe = plone.recipe.command
stop-on-error = true
command = cd $${slapos-repo:location} && ${git:location}/bin/git checkout $${slap-parameter:slapos-reference} && SR=$${slap-parameter:slapos-software} && if [ -n "$SR" ] && [ ! -f "$${directory:etc}/.project" ]; then echo workspace/slapos/$${slap-parameter:slapos-software}/ > $${directory:etc}/.project; fi
update-command = true
[prepare-software] [prepare-software]
recipe = slapos.cookbook:wrapper recipe = slapos.cookbook:wrapper
...@@ -599,6 +608,11 @@ stop-on-error = true ...@@ -599,6 +608,11 @@ stop-on-error = true
software-type-path = $${directory:etc}/.software_type.xml software-type-path = $${directory:etc}/.software_type.xml
command = if [ ! -f $${:software-type-path} -a "$${slap-parameter:slapos-software-type}" != "" ]; then echo "$${slap-parameter:slapos-software-type}" > $${:software-type-path}; fi command = if [ ! -f $${:software-type-path} -a "$${slap-parameter:slapos-software-type}" != "" ]; then echo "$${slap-parameter:slapos-software-type}" > $${:software-type-path}; fi
[instance-software]
recipe = plone.recipe.command
stop-on-error = true
command = SR=$${slap-parameter:slapos-software} && if [ -n "$SR" ] && [ ! -f "$${directory:etc}/.project" ]; then echo workspace/slapos/$${slap-parameter:slapos-software}/ > $${directory:etc}/.project; fi
[slap-configuration] [slap-configuration]
recipe = slapos.cookbook:slapconfiguration.serialised recipe = slapos.cookbook:slapconfiguration.serialised
computer = $${slap-connection:computer-id} computer = $${slap-connection:computer-id}
...@@ -628,6 +642,11 @@ context = ...@@ -628,6 +642,11 @@ context =
raw path $PATH:${nano:location}/bin:${vim:location}/bin:${screen:location}/bin:${git:location}/bin:${curl:location}/bin:${python2.7:location}/bin raw path $PATH:${nano:location}/bin:${vim:location}/bin:${screen:location}/bin:${git:location}/bin:${curl:location}/bin:${python2.7:location}/bin
key workdir runnerdirectory:home key workdir runnerdirectory:home
#---------------------------
#--
#-- supervisord managing slaprunner automation features
[supervisord] [supervisord]
autorestart = false autorestart = false
autostart = false autostart = false
...@@ -696,6 +715,7 @@ path = $${directory:promises}/supervisord ...@@ -696,6 +715,7 @@ path = $${directory:promises}/supervisord
hostname = $${slaprunner:ipv4} hostname = $${slaprunner:ipv4}
port = $${supervisord:port} port = $${supervisord:port}
[monitor-current-log-access] [monitor-current-log-access]
< = monitor-directory-access < = monitor-directory-access
source = $${directory:log} source = $${directory:log}
...@@ -715,6 +735,13 @@ context = ...@@ -715,6 +735,13 @@ context =
key this_file :filename key this_file :filename
key httpd_graceful cgi-httpd-graceful-wrapper:rendered key httpd_graceful cgi-httpd-graceful-wrapper:rendered
[monitor-check-webrunner-internal-instance]
recipe = slapos.recipe.template:jinja2
template = ${monitor-check-webrunner-internal-instance:location}/${monitor-check-webrunner-internal-instance:filename}
rendered = $${monitor-directory:monitor-custom-scripts}/$${:filename}
filename = monitor-check-webrunner-internal-instance.py
mode = 0744
[monitor-httpd-cors] [monitor-httpd-cors]
recipe = plone.recipe.command recipe = plone.recipe.command
command = if [ ! -f $${:location} ]; then touch $${:location}; fi command = if [ ! -f $${:location} ]; then touch $${:location}; fi
......
...@@ -59,7 +59,7 @@ context = ...@@ -59,7 +59,7 @@ context =
mode = 0644 mode = 0644
[slap-configuration] [slap-configuration]
recipe = slapos.cookbook:slapconfiguration.serialised recipe = slapos.cookbook:slapconfiguration
computer = $${slap-connection:computer-id} computer = $${slap-connection:computer-id}
partition = $${slap-connection:partition-id} partition = $${slap-connection:partition-id}
url = $${slap-connection:server-url} url = $${slap-connection:server-url}
......
...@@ -9,39 +9,46 @@ extends = common.cfg ...@@ -9,39 +9,46 @@ extends = common.cfg
[versions] [versions]
Flask-Auth = 0.85 Flask-Auth = 0.85
PyRSS2Gen = 1.1 PyRSS2Gen = 1.1
apache-libcloud = 0.15.1 apache-libcloud = 0.17.0
async = 0.6.1
cns.recipe.symlink = 0.2.3 cns.recipe.symlink = 0.2.3
collective.recipe.environment = 0.2.0 collective.recipe.environment = 0.2.0
collective.recipe.template = 1.11 collective.recipe.template = 1.11
ecdsa = 0.11 ecdsa = 0.13
erp5.util = 0.4.41 erp5.util = 0.4.42
gitdb = 0.5.4 gitdb = 0.6.4
gunicorn = 19.1.1 gunicorn = 19.3.0
plone.recipe.command = 1.1 plone.recipe.command = 1.1
pycrypto = 2.6.1 pycrypto = 2.6.1
slapos.recipe.build = 0.12
slapos.recipe.download = 1.0.dev-r4053 slapos.recipe.download = 1.0.dev-r4053
slapos.toolbox = 0.40.4 slapos.recipe.template = 2.7
smmap = 0.8.2 slapos.toolbox = 0.48
smmap = 0.9.0
z3c.recipe.scripts = 1.0.1 z3c.recipe.scripts = 1.0.1
# Required by: # Required by:
# slapos.toolbox==0.40.4 # slapos.toolbox==0.48
GitPython = 0.3.2.RC1 GitPython = 1.0.1
# Required by: # Required by:
# slapos.toolbox==0.40.4 # slapos.toolbox==0.48
atomize = 0.2.0 atomize = 0.2.0
# Required by: # Required by:
# slapos.toolbox==0.40.4 # apache-libcloud==0.17.0
feedparser = 5.1.3 backports.ssl-match-hostname = 3.4.0.2
# Required by: # Required by:
# slapos.cookbook==0.87 # slapos.toolbox==0.48
jsonschema = 2.4.0 feedparser = 5.2.0.post1
# Required by: # Required by:
# slapos.toolbox==0.40.4 # slapos.toolbox==0.48
paramiko = 1.15.1 lockfile = 0.10.2
# Required by:
# slapos.toolbox==0.48
paramiko = 1.15.2
# Required by:
# slapos.toolbox==0.48
rpdb = 0.1.5
#!/usr/bin/python
import os
import subprocess
import sys
def runPromise(promise_path):
promise_relative_path = promise_path.replace(os.path.expanduser('~'), '~')
print 'Running promise %s...' % promise_relative_path
promise_process = subprocess.Popen(promise_path, stderr=subprocess.PIPE)
stdout, stderr = promise_process.communicate()
return_code = promise_process.returncode
if return_code == 0:
print 'Success.'
return True
else:
sys.stderr.write('Failure while running promise %s. %s\n' % (promise_relative_path, stderr))
def getPromisePathListFromPartitionPath(partition_path):
promise_directory_path = os.path.join(partition_path, 'etc/promise')
try:
promise_name_list = os.listdir(promise_directory_path)
return [os.path.join(promise_directory_path, promise_name) for promise_name in promise_name_list]
except OSError:
return []
def main():
# XXX hardcoded
partition_root_path = os.path.expanduser('~/srv/runner/instance')
success = True
for partition_name in os.listdir(partition_root_path):
partition_path = os.path.join(partition_root_path, partition_name)
for promise_path in getPromisePathListFromPartitionPath(partition_path):
result = runPromise(promise_path)
if not result:
success = False
if not success:
sys.exit(1)
if __name__ == '__main__':
main()
...@@ -2,20 +2,45 @@ ...@@ -2,20 +2,45 @@
LC_ALL=C LC_ALL=C
export LC_ALL export LC_ALL
umask 077 umask 077
srv_directory={{ directory['srv'] }}
sync_element () { sync_element () {
path=$1 path=$1
backup_path=$2 backup_path=$2
shift 2 shift 2
element_list=$* element_list=$*
# Concatenate the exclude file of each partition of webrunner
# to create a global exclude file.
exclude_content="instance/supervisord.socket"
for partition in $srv_directory/runner/instance/slappart*
do
exclude_file="$partition/srv/exporter.exclude"
if [ -e "$exclude_file" ]; then
partition_exclude_content_relative=$(cat "$exclude_file")
# For every line of the local exclude file, add the absolute path
for line in "$partition_exclude_content_relative"
do
if [ ! -z "$line" ]; then
exclude_content="$exclude_content\ninstance/$(basename $partition)/$line"
fi
done
fi
done
echo "$exclude_content" > $srv_directory/exporter.exclude
for element in $element_list for element in $element_list
do do
echo "Changing current directory to $path."
cd $path; cd $path;
if [ -f $element ] || [ -d $element ]; then if [ -f $element ] || [ -d $element ]; then
{{ rsync_binary }} -rlptgov --safe-links --delete $element $backup_path; echo "Running {{ rsync_binary }} -rlptgov --safe-links --exclude-from=$srv_directory/exporter.exclude --delete --delete-excluded $element $backup_path"
{{ rsync_binary }} -rlptgov --safe-links --exclude-from=$srv_directory/exporter.exclude --delete --delete-excluded $element $backup_path;
fi fi
done done
} }
sync_element {{ directory['srv'] }}/runner {{ directory['backup'] }}/runner/ instance project proxy.db sync_element $srv_directory/runner {{ directory['backup'] }}/runner/ instance project proxy.db
# We sync .* appart # We sync .* appart
date +%s -u > {{ directory['etc'] }}/.resilient-timestamp date +%s -u > {{ directory['etc'] }}/.resilient-timestamp
cp -r {{ directory['etc'] }}/.??* {{ directory['backup'] }}/etc/ cp -r {{ directory['etc'] }}/.??* {{ directory['backup'] }}/etc/
......
#!{{ shell_binary }} #!{{ shell_binary }}
set -e
LC_ALL=C LC_ALL=C
export LC_ALL export LC_ALL
umask 077 umask 077
srv_directory={{ directory['srv'] }}
restore_element () { restore_element () {
backup_path=$1 backup_path=$1
restore_path=$2 restore_path=$2
...@@ -11,21 +13,92 @@ restore_element () { ...@@ -11,21 +13,92 @@ restore_element () {
do do
cd $backup_path; cd $backup_path;
if [ -f $element ] || [ -d $element ]; then if [ -f $element ] || [ -d $element ]; then
{{ rsync_binary }} -av --delete $backup_path/$element $restore_path; {{ rsync_binary }} -av --delete --exclude *.sock --exclude *.pid --exclude .installed.cfg --exclude .installed-switch-softwaretype.cfg $backup_path/$element $restore_path;
fi fi
done done
} }
write_backup_proof () { write_backup_proof () {
cd {{ directory['backup'] }} cd {{ directory['backup'] }}
find -type f ! -name backup.signature ! -wholename "./rdiff-backup-data/*" -print0 | xargs -P4 -0 sha256sum | LC_ALL=C sort -k 66 > {{ directory['srv'] }}/proof.signature find -type f ! -name backup.signature ! -wholename "./rdiff-backup-data/*" -print0 | xargs -P4 -0 sha256sum | LC_ALL=C sort -k 66 > $srv_directory/proof.signature
diff -ruw {{ directory['backup'] }} {{ directory['srv'] }}/proof.signature > {{ directory['srv'] }}/backup.diff diff -ruw {{ directory['backup'] }} $srv_directory/proof.signature > $srv_directory/backup.diff || true # diff exits with code 1 when files are different
} }
# For now we just make the diff before # For now we just make the diff before
write_backup_proof write_backup_proof
restore_element {{ directory['backup'] }}/runner/ {{ directory['srv'] }}/runner instance project proxy.db restore_element {{ directory['backup'] }}/runner/ $srv_directory/runner instance project proxy.db
restore_element {{ directory['backup'] }}/etc/ {{ directory['etc'] }} config.json ssh restore_element {{ directory['backup'] }}/etc/ {{ directory['etc'] }} config.json ssh
cp -r {{ directory['backup'] }}/etc/.??* {{ directory['etc'] }}; cp -r {{ directory['backup'] }}/etc/.??* {{ directory['etc'] }};
{{ curl_binary }} --insecure -vg6L --max-time 5 {{ backend_url }}/isSRReady;
# Invoke arbitrary script to perform specific restoration
# procedure.
RESTORE_EXIT_CODE=0
runner_import_restore=$srv_directory/runner-import-restore
if [ ! -e "$runner_import_restore" ]; then
touch $runner_import_restore
chmod +x $runner_import_restore
fi
echo "Running $runner_import_restore script..."
$srv_directory/runner-import-restore || RESTORE_EXIT_CODE=$?
echo "Updating slapproxy database, software release and instances..."
HOME="{{ directory['home'] }}"
# XXX Hardcoded
export PATH="/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin"
export MAKEFLAGS=-j4
SLAPOS="{{ directory['bin'] }}/slapos"
# XXX hardcoded
SQLITE3="$HOME/software_release/parts/sqlite3/bin/sqlite3"
DATABASE="$HOME/srv/runner/proxy.db"
# Change slapproxy database to point instances to new software release
# XXX hardcoded
PARTITION=$(basename $HOME)
OLD_SOFTWARE_RELEASE=$($SQLITE3 $DATABASE "select software_release from partition11 where reference='slappart0';")
SOFTWARE_RELEASE=$(echo $OLD_SOFTWARE_RELEASE | sed -e 's/\(.*\)\(slappart\|test0-\)[0-9][0-9]\?/\1'"$PARTITION"'/')
$SQLITE3 $DATABASE "update partition11 set software_release='$SOFTWARE_RELEASE' where software_release NOT NULL;"
$SQLITE3 $DATABASE "update software11 set url='$SOFTWARE_RELEASE' where url='$OLD_SOFTWARE_RELEASE';" || $SQLITE3 $DATABASE "delete from software11 where url='$OLD_SOFTWARE_RELEASE';"
# Change slapproxy database to have all instances stopped
$SQLITE3 $DATABASE "update partition11 set slap_state='stopped';"
set -x
# Run slapproxy on different port (in case of running inside of erp5testnode
# with only one IP and port 50000 already taken by slapproxy of main instance)
HOST="{{ proxy_host }}"
PORT="50001"
URL="http://$HOST:$PORT"
$SLAPOS proxy start --cfg $HOME/etc/slapos.cfg --port $PORT >/dev/null 2>&1 &
SLAPPROXY_PID=$!
trap "kill $SLAPPROXY_PID" EXIT TERM INT
sleep 5
echo "Building newest software..."
$SLAPOS node software --cfg $HOME/etc/slapos.cfg --all --master-url=$URL --logfile $HOME/srv/runner/software.log --pidfile $HOME/var/run/slapos-node-software.pid >/dev/null 2>&1 ||
$SLAPOS node software --cfg $HOME/etc/slapos.cfg --all --master-url=$URL --logfile $HOME/srv/runner/software.log --pidfile $HOME/var/run/slapos-node-software.pid >/dev/null 2>&1 ||
$SLAPOS node software --cfg $HOME/etc/slapos.cfg --all --master-url=$URL --logfile $HOME/srv/runner/software.log --pidfile $HOME/var/run/slapos-node-software.pid >/dev/null 2>&1
# Remove defined scripts to force buildout to recreate them to have updated paths
rm $srv_directory/runner/instance/slappart*/srv/runner-import-restore || true
echo "Running slapos node instance..."
# XXX hardcoded
$SLAPOS node instance --cfg $HOME/etc/slapos.cfg --master-url=$URL --logfile $HOME/srv/runner/instance.log --pidfile $HOME/var/run/slapos-node-instance.pid >/dev/null 2>&1 || true
$SLAPOS node instance --cfg $HOME/etc/slapos.cfg --master-url=$URL --logfile $HOME/srv/runner/instance.log --pidfile $HOME/var/run/slapos-node-instance.pid >/dev/null 2>&1 || true
$SLAPOS node instance --cfg $HOME/etc/slapos.cfg --master-url=$URL --logfile $HOME/srv/runner/instance.log --pidfile $HOME/var/run/slapos-node-instance.pid >/dev/null 2>&1 || true
# Invoke defined scripts for each partition inside of slaprunner
for partition in $srv_directory/runner/instance/slappart*/
do
script=$partition/srv/runner-import-restore
if [ -e "$script" ]; then
echo "Running $script script..."
$script || RESTORE_EXIT_CODE=$?
fi
done
# Change back slapproxy database to have all instances started
$SQLITE3 $DATABASE "update partition11 set slap_state='started';"
# Write exit code to an arbitrary file that will be checked by promise/monitor
RESTORE_EXIT_CODE_FILE="{{ restore_exit_code_file }}"
echo $RESTORE_EXIT_CODE > $RESTORE_EXIT_CODE_FILE
exit $RESTORE_EXIT_CODE
...@@ -5,6 +5,7 @@ master_url = http://{{ slaprunner['ipv4'] }}:{{ slaprunner['proxy_port'] }} ...@@ -5,6 +5,7 @@ master_url = http://{{ slaprunner['ipv4'] }}:{{ slaprunner['proxy_port'] }}
computer_id = slaprunner computer_id = slaprunner
maximal_delay = 0 maximal_delay = 0
root_check = {{ slaprunner['root_check'] }} root_check = {{ slaprunner['root_check'] }}
forbid_supervisord_automatic_launch = true
[slapformat] [slapformat]
partition_amount = {{ slaprunner['partition-amount'] }} partition_amount = {{ slaprunner['partition-amount'] }}
......
...@@ -59,7 +59,7 @@ eggs = collective.recipe.template ...@@ -59,7 +59,7 @@ eggs = collective.recipe.template
collective.recipe.template = 1.11 collective.recipe.template = 1.11
plone.recipe.command = 1.1 plone.recipe.command = 1.1
slapos.recipe.build = 0.13 slapos.recipe.build = 0.13
slapos.recipe.template = 2.5 slapos.recipe.template = 2.7
# Replicate slapos stack, but without shacache to not have to compile the entire world for a simple test. # Replicate slapos stack, but without shacache to not have to compile the entire world for a simple test.
[buildout] [buildout]
......
...@@ -45,6 +45,7 @@ command = grep parts ${buildout:develop-eggs-directory}/slapos.cookbook.egg-link ...@@ -45,6 +45,7 @@ command = grep parts ${buildout:develop-eggs-directory}/slapos.cookbook.egg-link
[template-jinja2-base] [template-jinja2-base]
recipe = slapos.recipe.template:jinja2 recipe = slapos.recipe.template:jinja2
mode = 640
template = ${:_profile_base_location_}/${:filename}.in template = ${:_profile_base_location_}/${:filename}.in
rendered = ${buildout:directory}/${:filename} rendered = ${buildout:directory}/${:filename}
# XXX: extra-context is needed because we cannot append to a key of an extended # XXX: extra-context is needed because we cannot append to a key of an extended
......
# Wendelin with components taken from latest git versions
[buildout]
extends = software.cfg
# wendelin.core
parts -= wendelin.core
parts += wendelin.core-dev
# also tell erp5 to use -dev eggs instead of released ones
[eggs]
eggs -= ${wendelin.core:egg}
eggs += ${wendelin.core-dev:egg}
[buildout]
versions = versions
extends =
../../software/ipython_notebook/software.cfg
../../component/fluentd/buildout.cfg
../../component/matplotlib/buildout.cfg
../../component/ipython/buildout.cfg
../../component/pandas/buildout.cfg
../../component/wendelin.core/buildout.cfg
../../component/msgpack-python/buildout.cfg
../../component/scipy/buildout.cfg
../../component/scikit-learn/buildout.cfg
../../software/erp5/software.cfg
parts +=
wendelin
scipy
scikit-learn
pandas
msgpack-python
ipython
wendelin.core
matplotlib
fluentd
ipython_notebook
[eggs]
initialization =
import scipy.stats # load our own libstdc++ explicitly at the very beginning
extra-paths +=
${wendelin:location}
eggs +=
${scikit-learn:egg}
${scipy:egg}
${pandas:egg}
${msgpack-python:egg}
${wendelin.core:egg}
${ipython:egg}
${matplotlib:egg}
[erp5_repository_list]
repository_id_list += wendelin
[local-bt5-repository]
# we need to override it
list = ${erp5:location}/bt5 ${erp5:location}/product/ERP5/bootstrap ${wendelin:location}/bt5/
[wendelin]
<= erp5
repository = https://lab.nexedi.cn/nexedi/wendelin.git
branch = master
[versions]
scikit-learn = 0.16.1
scipy = 0.15.1
pandas = 0.16.1
msgpack-python = 0.4.6
numpy = 1.9.2
wendelin.core = 0.3
ipython = 3.1.0
matplotlib = 1.4.3
\ No newline at end of file
...@@ -88,4 +88,4 @@ feedparser = 5.1.3 ...@@ -88,4 +88,4 @@ feedparser = 5.1.3
# Required by: # Required by:
# slapos.toolbox==0.40.2 # slapos.toolbox==0.40.2
paramiko = 1.15.1 paramiko = 1.15.2
...@@ -75,19 +75,18 @@ parts = ...@@ -75,19 +75,18 @@ parts =
[versions] [versions]
Paste = 1.7.5.1 Paste = 2.0.2
PasteScript = 1.7.5 PasteScript = 2.0.2
WSGIUtils = 0.7 WSGIUtils = 0.7
plone.recipe.command = 1.1
python-magic = 0.4.6 python-magic = 0.4.6
rdiff-backup = 1.0.5 rdiff-backup = 1.0.5
slapos.recipe.template = 2.5 slapos.recipe.template = 2.7
# Required by: # Required by:
# PasteScript==1.7.5 # PasteScript==2.0
# cloudooo==1.2.5-dev # cloudooo==1.2.5.dev0
PasteDeploy = 1.5.2 PasteDeploy = 1.5.2
# Required by: # Required by:
# cloudooo==1.2.5-dev # cloudooo==1.2.5.dev0
erp5.util = 0.4.41 erp5.util = 0.4.42
...@@ -18,6 +18,7 @@ extends = ...@@ -18,6 +18,7 @@ extends =
../../component/gzip/buildout.cfg ../../component/gzip/buildout.cfg
../../component/haproxy/buildout.cfg ../../component/haproxy/buildout.cfg
../../component/hookbox/buildout.cfg ../../component/hookbox/buildout.cfg
../../component/findutils/buildout.cfg
../../component/librsvg/buildout.cfg ../../component/librsvg/buildout.cfg
../../component/imagemagick/buildout.cfg ../../component/imagemagick/buildout.cfg
../../component/inkscape/buildout.cfg ../../component/inkscape/buildout.cfg
...@@ -68,6 +69,7 @@ parts = ...@@ -68,6 +69,7 @@ parts =
apache apache
apache-antiloris apache-antiloris
file file
findutils
graphviz graphviz
haproxy haproxy
jsl jsl
...@@ -97,7 +99,6 @@ parts = ...@@ -97,7 +99,6 @@ parts =
wget wget
# Buildoutish # Buildoutish
patched-eggs
eggs eggs
testrunner testrunner
test_suite_runner test_suite_runner
...@@ -133,6 +134,7 @@ parts = ...@@ -133,6 +134,7 @@ parts =
[template-jinja2-base] [template-jinja2-base]
recipe = slapos.recipe.template:jinja2 recipe = slapos.recipe.template:jinja2
mode = 640
template = ${:_profile_base_location_}/${:filename}.in template = ${:_profile_base_location_}/${:filename}.in
rendered = ${buildout:directory}/${:filename} rendered = ${buildout:directory}/${:filename}
# XXX: extra-context is needed because we cannot append to a key of an extended # XXX: extra-context is needed because we cannot append to a key of an extended
...@@ -145,22 +147,17 @@ context = ...@@ -145,22 +147,17 @@ context =
key slapos_core_version versions:slapos.core key slapos_core_version versions:slapos.core
${:extra-context} ${:extra-context}
[mariadb-resiliency-after-import-script]
recipe = slapos.recipe.build:download
url = ${:_profile_base_location_}/template/instance-mariadb-resiliency-after-import-script.sh.in
md5sum = a9851920bb22ae58c2eb9dc3e955250c
mode = 755
[template-mariadb] [template-mariadb]
< = template-jinja2-base recipe = slapos.recipe.build:download
filename = instance-mariadb.cfg url = ${:_profile_base_location_}/instance-mariadb.cfg.in
md5sum = e0c923a8195b5a36c7df766c1fb4bfdb md5sum = a5f9888bd882331c35d48f05e468a0a2
extra-context = mode = 640
key coreutils_location coreutils:location
key dcron_location dcron:location
key gettext_location gettext:location
key grep_location grep:location
key gzip_location gzip:location
key logrotate_location logrotate:location
key mariadb_location mariadb:location
key perl_location perl:location
key perl_siteprefix perl:siteprefix
key sed_location sed:location
key xtrabackup_location xtrabackup:location
[template-zope] [template-zope]
recipe = slapos.recipe.build:download recipe = slapos.recipe.build:download
...@@ -171,7 +168,7 @@ mode = 640 ...@@ -171,7 +168,7 @@ mode = 640
[template-kumofs] [template-kumofs]
< = template-jinja2-base < = template-jinja2-base
filename = instance-kumofs.cfg filename = instance-kumofs.cfg
md5sum = 40817014a41497bceb696e512436e670 md5sum = 627369560a030b006dbdd8f10ff6694a
extra-context = extra-context =
key dash_location dash:location key dash_location dash:location
key dcron_location dcron:location key dcron_location dcron:location
...@@ -182,13 +179,13 @@ extra-context = ...@@ -182,13 +179,13 @@ extra-context =
[template-tidstorage] [template-tidstorage]
recipe = slapos.recipe.build:download recipe = slapos.recipe.build:download
url = ${:_profile_base_location_}/instance-tidstorage.cfg.in url = ${:_profile_base_location_}/instance-tidstorage.cfg.in
md5sum = 2bc13fc6cd52c7b8e2b4ddb99a69974b md5sum = 20ee9db93c57425319cd4b385d327d39
mode = 640 mode = 640
[template-cloudooo] [template-cloudooo]
recipe = slapos.recipe.build:download recipe = slapos.recipe.build:download
url = ${:_profile_base_location_}/instance-cloudoo.cfg.in url = ${:_profile_base_location_}/instance-cloudoo.cfg.in
md5sum = c93c881267beadf840dc0b339a3a466a md5sum = 79eb68a9c5073535e8a98897385828a8
mode = 640 mode = 640
[template-zope-conf] [template-zope-conf]
...@@ -202,7 +199,7 @@ mode = 640 ...@@ -202,7 +199,7 @@ mode = 640
# XXX: "template.cfg" is hardcoded in instanciation recipe # XXX: "template.cfg" is hardcoded in instanciation recipe
filename = template.cfg filename = template.cfg
template = ${:_profile_base_location_}/instance.cfg.in template = ${:_profile_base_location_}/instance.cfg.in
md5sum = 2ed28d874b5494521cb874a2474af7c5 md5sum = 0d777bfc8377e5b8ddc2e32a10f45b9b
extra-context = extra-context =
key apache_location apache:location key apache_location apache:location
key aspell_location aspell:location key aspell_location aspell:location
...@@ -217,9 +214,11 @@ extra-context = ...@@ -217,9 +214,11 @@ extra-context =
key dmtx_utils_location dmtx-utils:location key dmtx_utils_location dmtx-utils:location
key erp5_location erp5:location key erp5_location erp5:location
key file_location file:location key file_location file:location
key findutils_location findutils:location
key fontconfig_location fontconfig:location key fontconfig_location fontconfig:location
key fonts_location fonts:location key fonts_location fonts:location
key freetype_location freetype:location key freetype_location freetype:location
key gettext_location gettext:location
key git_location git:location key git_location git:location
key glib_location glib:location key glib_location glib:location
key glu_location glu:location key glu_location glu:location
...@@ -245,8 +244,11 @@ extra-context = ...@@ -245,8 +244,11 @@ extra-context =
key local_bt5_repository local-bt5-repository:list key local_bt5_repository local-bt5-repository:list
key logrotate_location logrotate:location key logrotate_location logrotate:location
key mariadb_location mariadb:location key mariadb_location mariadb:location
key mariadb_resiliency_after_import_script mariadb-resiliency-after-import-script:target
key mesa_location mesa:location key mesa_location mesa:location
key openssl_location openssl:location key openssl_location openssl:location
key perl_location perl:location
key perl_siteprefix perl:siteprefix
key poppler_location poppler:location key poppler_location poppler:location
key sed_location sed:location key sed_location sed:location
key stunnel_location stunnel:location key stunnel_location stunnel:location
...@@ -254,7 +256,7 @@ extra-context = ...@@ -254,7 +256,7 @@ extra-context =
key template_erp5_cluster template-erp5-cluster:target key template_erp5_cluster template-erp5-cluster:target
key template_erp5_single template-erp5-single:target key template_erp5_single template-erp5-single:target
key template_kumofs template-kumofs:rendered key template_kumofs template-kumofs:rendered
key template_mariadb template-mariadb:rendered key template_mariadb template-mariadb:target
key template_memcached template-memcached:rendered key template_memcached template-memcached:rendered
key template_tidstorage template-tidstorage:target key template_tidstorage template-tidstorage:target
key template_varnish template-varnish:target key template_varnish template-varnish:target
...@@ -266,12 +268,13 @@ extra-context = ...@@ -266,12 +268,13 @@ extra-context =
key wget_location wget:location key wget_location wget:location
key xdamage_location xdamage:location key xdamage_location xdamage:location
key xfixes_location xfixes:location key xfixes_location xfixes:location
key xtrabackup_location xtrabackup:location
key zlib_location zlib:location key zlib_location zlib:location
[template-memcached] [template-memcached]
< = template-jinja2-base < = template-jinja2-base
filename = instance-memcached.cfg filename = instance-memcached.cfg
md5sum = 346c864c1f119360eddb5e163f16d4f3 md5sum = de63a79e6812854c44e961f5ac3b465d
extra-context = extra-context =
key dash_location dash:location key dash_location dash:location
key dcron_location dcron:location key dcron_location dcron:location
...@@ -288,13 +291,13 @@ mode = 640 ...@@ -288,13 +291,13 @@ mode = 640
[template-erp5-cluster] [template-erp5-cluster]
recipe = slapos.recipe.build:download recipe = slapos.recipe.build:download
url = ${:_profile_base_location_}/instance-erp5-cluster.cfg.in url = ${:_profile_base_location_}/instance-erp5-cluster.cfg.in
md5sum = 1fafb23019043c069bf6e43d61010a54 md5sum = 2ff90bb68761b4930f92321f3b6ba590
mode = 640 mode = 640
[template-varnish] [template-varnish]
recipe = slapos.recipe.build:download recipe = slapos.recipe.build:download
url = ${:_profile_base_location_}/instance-varnish.cfg.in url = ${:_profile_base_location_}/instance-varnish.cfg.in
md5sum = ae0465591c22c0cb316c1706965c6b18 md5sum = 98158d0b349e3579a20ab520dfa9ebb7
mode = 640 mode = 640
[bt5-repository] [bt5-repository]
...@@ -384,22 +387,6 @@ initialization = ...@@ -384,22 +387,6 @@ initialization =
repository_id_list = list(reversed('''${erp5_repository_list:repository_id_list}'''.split())) repository_id_list = list(reversed('''${erp5_repository_list:repository_id_list}'''.split()))
sys.path[0:0] = ['/'.join(['''${buildout:parts-directory}''', x]) for x in repository_id_list] sys.path[0:0] = ['/'.join(['''${buildout:parts-directory}''', x]) for x in repository_id_list]
[patched-eggs]
recipe = minitage.recipe.egg
eggs =
Acquisition
Products.DCWorkflow
ZODB3
Acquisition-patches = ${:_profile_base_location_}/../../component/egg-patch/Acquisition/aq_dynamic.patch
Acquisition-patch-options = -p1
Acquisition-patch-binary = ${patch:location}/bin/patch
Products.DCWorkflow-patches = ${:_profile_base_location_}/../../component/egg-patch/Products.DCWorkflow/workflow_method.patch
Products.DCWorkflow-patch-options = -p1
Products.DCWorkflow-patch-binary = ${patch:location}/bin/patch
ZODB3-patches = ${:_profile_base_location_}/../../component/egg-patch/ZODB3-3.10.5.patch
ZODB3-patch-options = -p1
ZODB3-patch-binary = ${patch:location}/bin/patch
[eggs] [eggs]
recipe = zc.recipe.egg recipe = zc.recipe.egg
eggs = eggs =
...@@ -415,6 +402,7 @@ eggs = ...@@ -415,6 +402,7 @@ eggs =
Pympler Pympler
SOAPpy SOAPpy
chardet chardet
collective.recipe.template
coverage coverage
elementtree elementtree
erp5diff erp5diff
...@@ -422,6 +410,7 @@ eggs = ...@@ -422,6 +410,7 @@ eggs =
interval interval
ipdb ipdb
Jinja2 Jinja2
jsonschema
mechanize mechanize
paramiko paramiko
ply ply
...@@ -506,6 +495,15 @@ scripts = ...@@ -506,6 +495,15 @@ scripts =
extra-paths = extra-paths =
${erp5:location} ${erp5:location}
# patches for eggs
patch-binary = ${patch:location}/bin/patch
Acquisition-patches = ${:_profile_base_location_}/../../component/egg-patch/Acquisition/aq_dynamic.patch
Acquisition-patch-options = -p1
Products.DCWorkflow-patches = ${:_profile_base_location_}/../../component/egg-patch/Products.DCWorkflow/workflow_method.patch
Products.DCWorkflow-patch-options = -p1
ZODB3-patches = ${:_profile_base_location_}/../../component/egg-patch/ZODB3-3.10.5.patch
ZODB3-patch-options = -p1
[zodbanalyze] [zodbanalyze]
recipe = zc.recipe.egg recipe = zc.recipe.egg
eggs = eggs =
...@@ -541,16 +539,16 @@ scripts = ...@@ -541,16 +539,16 @@ scripts =
[versions] [versions]
# patched eggs # patched eggs
Acquisition = 2.13.8-ZMinitagePatched-AqDynamic Acquisition = 2.13.8+SlapOSPatched001
Products.DCWorkflow = 2.2.4-ZMinitagePatched-WorkflowMethod Products.DCWorkflow = 2.2.4+SlapOSPatched001
ZODB3 = 3.10.5-ZMinitagePatched-ZODB33105 ZODB3 = 3.10.5+SlapOSPatched001
# specify dev version to be sure that an old released version is not used # specify dev version to be sure that an old released version is not used
cloudooo = 1.2.5-dev cloudooo = 1.2.5-dev
# use newer version than specified in ZTK # use newer version than specified in ZTK
PasteDeploy = 1.5.2 PasteDeploy = 1.5.2
Pygments = 2.0.1 Pygments = 2.0.2
coverage = 3.7.1 coverage = 3.7.1
# test_UserManagerInterfaces in testERP5Security fails with 1.10.0. # test_UserManagerInterfaces in testERP5Security fails with 1.10.0.
...@@ -587,59 +585,58 @@ zope.app.testing = 3.8.1 ...@@ -587,59 +585,58 @@ zope.app.testing = 3.8.1
# Pinned versions # Pinned versions
MySQL-python = 1.2.5 MySQL-python = 1.2.5
Pillow = 2.6.1 Pillow = 2.8.2
Products.CMFActionIcons = 2.1.3 Products.CMFActionIcons = 2.1.3
Products.DCWorkflowGraph = 0.4.1 Products.DCWorkflowGraph = 0.4.1
Products.ExternalEditor = 1.1.0 Products.ExternalEditor = 1.1.0
Products.GenericSetup = 1.7.5 Products.GenericSetup = 1.7.5
Products.LongRequestLogger = 1.1.0 Products.LongRequestLogger = 1.1.0
Products.MimetypesRegistry = 2.0.6 Products.MimetypesRegistry = 2.0.7
Products.PluginRegistry = 1.3 Products.PluginRegistry = 1.3
Products.TIDStorage = 5.4.9 Products.TIDStorage = 5.4.9
PyPDF2 = 1.23 PyPDF2 = 1.24
PyXML = 0.8.5 PyXML = 0.8.5
Pympler = 0.3.1 Pympler = 0.4.1
StructuredText = 2.11.1 StructuredText = 2.11.1
WSGIUtils = 0.7 WSGIUtils = 0.7
apache-libcloud = 0.16.0 apache-libcloud = 0.17.0
astroid = 1.2.1 astroid = 1.3.6
async = 0.6.1
chardet = 2.3.0 chardet = 2.3.0
collective.recipe.template = 1.11
csp-eventlet = 0.7.0 csp-eventlet = 0.7.0
ecdsa = 0.11 ecdsa = 0.13
elementtree = 1.2.7-20070827-preview elementtree = 1.2.6.post20050316
erp5diff = 0.8.1.5 erp5diff = 0.8.1.7
eventlet = 0.15.2 eventlet = 0.17.4
five.formlib = 1.0.4 five.formlib = 1.0.4
five.localsitemanager = 2.0.5 five.localsitemanager = 2.0.5
gitdb = 0.6.0 gitdb = 0.6.4
greenlet = 0.4.5 greenlet = 0.4.6
http-parser = 0.8.3 http-parser = 0.8.3
httplib2 = 0.9 httplib2 = 0.9.1
huBarcode = 1.0.0 huBarcode = 1.0.0
interval = 1.0.0 interval = 1.0.0
ipdb = 0.8 ipdb = 0.8.1
ipython = 2.3.1 ipython = 3.1.0
logilab-common = 0.63.0 logilab-common = 0.63.2
minitage.paste = 1.4.6 numpy = 1.9.2
minitage.recipe.egg = 1.107
numpy = 1.9.1
plone.recipe.command = 1.1 plone.recipe.command = 1.1
ply = 3.4 ply = 3.6
polib = 1.0.5 polib = 1.0.6
pprofile = 1.7.2 pprofile = 1.7.3
pycountry = 1.10 pycountry = 1.10
pyflakes = 0.8.1 pycrypto = 2.6.1
pylint = 1.3.1 pyflakes = 0.9.1
python-ldap = 2.4.17 pylint = 1.4.3
python-ldap = 2.4.19
python-magic = 0.4.6 python-magic = 0.4.6
python-memcached = 1.53 python-memcached = 1.54
qrcode = 5.1 qrcode = 5.1
restkit = 4.2.2 restkit = 4.2.2
rtjp-eventlet = 0.3.2 rtjp-eventlet = 0.3.2
slapos.recipe.template = 2.5 slapos.recipe.template = 2.7
slapos.toolbox = 0.43.0 slapos.toolbox = 0.48
smmap = 0.8.3 smmap = 0.9.0
socketpool = 0.5.3 socketpool = 0.5.3
spyne = 2.11.0 spyne = 2.11.0
suds = 0.4 suds = 0.4
...@@ -647,57 +644,44 @@ threadframe = 0.2 ...@@ -647,57 +644,44 @@ threadframe = 0.2
timerserver = 2.0.2 timerserver = 2.0.2
urlnorm = 1.1.2 urlnorm = 1.1.2
uuid = 1.30 uuid = 1.30
validictory = 1.0.0a2 validictory = 1.0.0
xupdate-processor = 0.4 xupdate-processor = 0.4
# Required by: # Required by:
# slapos.toolbox==0.43.0 # slapos.toolbox==0.48
GitPython = 0.3.2.1 GitPython = 1.0.1
# Required by: # Required by:
# Products.CMFCore==2.2.8 # Products.CMFCore==2.2.8
Products.ZSQLMethods = 2.13.4 Products.ZSQLMethods = 2.13.4
# Required by: # Required by:
# slapos.toolbox==0.43.0 # slapos.toolbox==0.48
atomize = 0.2.0 atomize = 0.2.0
# Required by: # Required by:
# cloudooo==1.2.5-dev # apache-libcloud==0.17.0
erp5.util = 0.4.41 backports.ssl-match-hostname = 3.4.0.2
# Required by: # Required by:
# slapos.toolbox==0.43.0 # slapos.toolbox==0.48
feedparser = 5.1.3 feedparser = 5.2.0.post1
# Required by: # Required by:
# SOAPpy==0.12.0nxd001 # SOAPpy===0.12.0nxd001
fpconst = 0.7.2 fpconst = 0.7.2
# Required by: # Required by:
# minitage.recipe.egg==1.107 # slapos.toolbox==0.48
iniparse = 0.4 lockfile = 0.10.2
# Required by:
# minitage.core==2.0.57
minitage = 2.0.67
# Required by:
# minitage.recipe.common==1.90
# minitage.recipe.egg==1.107
minitage.core = 2.0.57
# Required by:
# minitage.recipe.egg==1.107
minitage.recipe.common = 1.90
# Required by: # Required by:
# minitage.recipe.egg==1.107 # slapos.toolbox==0.48
ordereddict = 1.1 paramiko = 1.15.2
# Required by: # Required by:
# slapos.toolbox==0.43.0 # slapos.toolbox==0.48
paramiko = 1.15.1 rpdb = 0.1.5
# Required by: # Required by:
# zope.app.testing==3.8.1 # zope.app.testing==3.8.1
......
...@@ -5,6 +5,7 @@ ...@@ -5,6 +5,7 @@
parts = parts =
publish-cloudooo-connection-information publish-cloudooo-connection-information
cloudooo-instance cloudooo-instance
resiliency-exclude-file
promise promise
promise-openoffice promise-openoffice
eggs-directory = {{ eggs_directory }} eggs-directory = {{ eggs_directory }}
...@@ -49,6 +50,12 @@ url-list = {{ json.get('font_url_list', []) | join(' ') }} ...@@ -49,6 +50,12 @@ url-list = {{ json.get('font_url_list', []) | join(' ') }}
service-folder = ${directory:service} service-folder = ${directory:service}
onetimedownload_path = {{ bin_directory }}/onetimedownload onetimedownload_path = {{ bin_directory }}/onetimedownload
[resiliency-exclude-file]
# Generate rdiff exclude file in case of resiliency
recipe = collective.recipe.template
input = inline: **
output = ${directory:srv}/exporter.exclude
[promise] [promise]
recipe = slapos.cookbook:check_port_listening recipe = slapos.cookbook:check_port_listening
path = ${directory:promise}/cloudooo path = ${directory:promise}/cloudooo
......
...@@ -29,6 +29,7 @@ partition-id = ${slap-connection:partition-id} ...@@ -29,6 +29,7 @@ partition-id = ${slap-connection:partition-id}
<=request-common <=request-common
name = MariaDB DataBase name = MariaDB DataBase
software-type = mariadb software-type = mariadb
config-mariadb-json = ${slap-parameter:mariadb-json}
sla-computer_guid = ${slap-parameter:mariadb-computer-guid} sla-computer_guid = ${slap-parameter:mariadb-computer-guid}
[request-cloudooo] [request-cloudooo]
......
...@@ -6,6 +6,7 @@ parts = ...@@ -6,6 +6,7 @@ parts =
logrotate-entry-kumofs logrotate-entry-kumofs
cron cron
cron-entry-logrotate cron-entry-logrotate
resiliency-exclude-file
promise-kumofs-server promise-kumofs-server
promise-kumofs-server-listen promise-kumofs-server-listen
promise-kumofs-gateway promise-kumofs-gateway
...@@ -115,6 +116,12 @@ name = logrotate ...@@ -115,6 +116,12 @@ name = logrotate
frequency = 0 0 * * * frequency = 0 0 * * *
command = ${logrotate:wrapper} command = ${logrotate:wrapper}
[resiliency-exclude-file]
# Generate rdiff exclude file in case of resiliency
recipe = collective.recipe.template
input = inline: **
output = ${rootdirectory:srv}/exporter.exclude
# Deploy zope promises scripts # Deploy zope promises scripts
[promise-template] [promise-template]
recipe = slapos.cookbook:check_port_listening recipe = slapos.cookbook:check_port_listening
......
{% if software_type == slap_software_type -%}
{% set json = json_module.loads(slapparameter_dict.get('mariadb-json', '{}')) -%}
{% set backup_periodicity = json.get('backup-periodicity', '0 22 * * *') -%}
{% set bin_directory = parameter_dict['buildout-bin-directory'] -%}
[buildout] [buildout]
parts = parts =
publish-mariadb-url publish-mariadb-url
...@@ -7,7 +12,10 @@ parts = ...@@ -7,7 +12,10 @@ parts =
cron cron
cron-entry-logrotate cron-entry-logrotate
cron-entry-mariadb-backup cron-entry-mariadb-backup
cron-entry-mariadb-backup-expire
binary-link binary-link
resiliency-exclude-file
resiliency-after-import-script
promise promise
eggs-directory = {{ eggs_directory }} eggs-directory = {{ eggs_directory }}
...@@ -18,14 +26,44 @@ offline = true ...@@ -18,14 +26,44 @@ offline = true
recipe = slapos.cookbook:publishurl recipe = slapos.cookbook:publishurl
url = mysql://${mariadb-instance:user}:${mariadb-instance:password}@${mariadb-instance:ip}:${mariadb-instance:port}/${mariadb-instance:database} url = mysql://${mariadb-instance:user}:${mariadb-instance:password}@${mariadb-instance:ip}:${mariadb-instance:port}/${mariadb-instance:database}
[binary-wrap-base]
recipe = slapos.cookbook:wrapper
# Note: --defaults-file must be the first argument, otherwise wrapped binary
# will reject it.
command-line = "{{ mariadb_location }}/bin/${:command}" --defaults-file="${mariadb-instance:conf-file}"
wrapper-path = ${rootdirectory:bin}/${:command}
parameters-extra = true
[binary-wrap-mysqldump]
< = binary-wrap-base
command = mysqldump
[cron-entry-mariadb-backup] [cron-entry-mariadb-backup]
<= cron
recipe = slapos.cookbook:cron.d recipe = slapos.cookbook:cron.d
cron-entries = ${cron:cron-entries}
name = mariadb-backup name = mariadb-backup
frequency = 0 0 * * * frequency = {{ backup_periodicity }}
command = ${mariadb-instance:backup-script} {# When binlogs are enabled:
# flush-logs: used so no manipulation on binlogs is needed to restore from
# full + binlogs. The first binlog after a dump starts from dump snapshot and
# can be fully restored.
# master-data: use value "2" as we are not in a replication case
#}
command = "${binary-wrap-mysqldump:wrapper-path}" -u root --all-databases --single-transaction --flush-logs --master-data=2 --socket=${mariadb-instance:socket} | {{ gzip_location }}/bin/gzip > "${directory:mariadb-backup-full}/$({{ coreutils_location }}/bin/date "+%Y%m%d%H%M%S").sql.gz"
{# KEEP GLOB PATTERN IN SYNC with generated filenames above
# YYYYmmddHHMMSS -#}
file-glob = ??????????????.sql.gz
[cron-entry-mariadb-backup-expire]
recipe = slapos.cookbook:cron.d
cron-entries = ${cron:cron-entries}
name = mariadb-backup-expire
frequency = {{ backup_periodicity }}
command = {{ findutils_location }}/bin/find "${directory:mariadb-backup-full}" -maxdepth 1 -name "${cron-entry-mariadb-backup:file-glob}" -daystart -mtime +7 -delete
[mariadb-instance] [mariadb-instance]
# XXX: remove backup part of this recipe. Backup part is now done in previous sections.
# Keeping it is harmless because it is supposed to be launched by cron entry, which has been removed.
recipe = slapos.cookbook:generic.mysql recipe = slapos.cookbook:generic.mysql
# Options # Options
...@@ -39,6 +77,11 @@ test-database = erp5_test ...@@ -39,6 +77,11 @@ test-database = erp5_test
mysql-test-database-base = testdb mysql-test-database-base = testdb
mysql-test-user-base = testuser mysql-test-user-base = testuser
# Backup
binlog-path = ${directory:mariadb-backup-incremental}/binlog
# XXX: binlog rotation happens along with other log's rotation
binlog-expire-days = 7
# Paths # Paths
wrapper = ${basedirectory:services}/mariadb wrapper = ${basedirectory:services}/mariadb
update-wrapper = ${basedirectory:services}/mariadb_update update-wrapper = ${basedirectory:services}/mariadb_update
...@@ -153,6 +196,21 @@ link-binary = ...@@ -153,6 +196,21 @@ link-binary =
{{ sed_location }}/bin/sed {{ sed_location }}/bin/sed
{{ mariadb_location }}/bin/mysql {{ mariadb_location }}/bin/mysql
[resiliency-exclude-file]
# Generate rdiff exclude file in case of resiliency
recipe = collective.recipe.template
input = inline: srv/mariadb/**
output = ${rootdirectory:srv}/exporter.exclude
[resiliency-after-import-script]
# Generate after import script used by importer instance of webrunner
recipe = collective.recipe.template
input = {{ mariadb_resiliency_after_import_script }}
output = ${rootdirectory:srv}/runner-import-restore
mode = 755
dash = {{ dash_location }}/bin/dash
[rootdirectory] [rootdirectory]
recipe = slapos.cookbook:mkdirectory recipe = slapos.cookbook:mkdirectory
etc = ${buildout:directory}/etc etc = ${buildout:directory}/etc
...@@ -188,3 +246,4 @@ port = ${mariadb-instance:port} ...@@ -188,3 +246,4 @@ port = ${mariadb-instance:port}
[slap-parameter] [slap-parameter]
test-database-amount = 100 test-database-amount = 100
{%- endif %}
...@@ -9,6 +9,7 @@ parts = ...@@ -9,6 +9,7 @@ parts =
logrotate-entry-kumofs logrotate-entry-kumofs
cron cron
cron-entry-logrotate cron-entry-logrotate
resiliency-exclude-file
promise-kumofs-server promise-kumofs-server
promise-kumofs-server-listen promise-kumofs-server-listen
promise-kumofs-gateway promise-kumofs-gateway
...@@ -120,6 +121,12 @@ name = logrotate ...@@ -120,6 +121,12 @@ name = logrotate
frequency = 0 0 * * * frequency = 0 0 * * *
command = ${logrotate:wrapper} command = ${logrotate:wrapper}
[resiliency-exclude-file]
# Generate rdiff exclude file in case of resiliency
recipe = collective.recipe.template
input = inline: **
output = ${rootdirectory:srv}/exporter.exclude
# Deploy zope promises scripts # Deploy zope promises scripts
[promise-template] [promise-template]
recipe = slapos.cookbook:check_port_listening recipe = slapos.cookbook:check_port_listening
......
...@@ -8,6 +8,7 @@ Note: all port counters are pre-incremented. No idea why base port is skipped. ...@@ -8,6 +8,7 @@ Note: all port counters are pre-incremented. No idea why base port is skipped.
{% set current_apache_port = apache_port_base | int -%} {% set current_apache_port = apache_port_base | int -%}
{% set current_haproxy_port = haproxy_port_base | int -%} {% set current_haproxy_port = haproxy_port_base | int -%}
{% set json = json_module.loads(slapparameter_dict['json']) -%} {% set json = json_module.loads(slapparameter_dict['json']) -%}
{% set backup_periodicity = json.get('backup-periodicity', 'daily') -%}
{% set bin_directory = parameter_dict['buildout-bin-directory'] -%} {% set bin_directory = parameter_dict['buildout-bin-directory'] -%}
{# {#
XXX: This template only supports exactly one IPv4 and one IPv6 per XXX: This template only supports exactly one IPv4 and one IPv6 per
...@@ -105,6 +106,7 @@ instance-tests = ${:instance}/tests ...@@ -105,6 +106,7 @@ instance-tests = ${:instance}/tests
log = ${:var}/log log = ${:var}/log
logrotate-backup = ${:backup}/logrotate logrotate-backup = ${:backup}/logrotate
logrotate-entries = ${:etc}/logrotate.d logrotate-entries = ${:etc}/logrotate.d
promise = ${:etc}/promise
run = ${:var}/run run = ${:var}/run
services = ${:etc}/run services = ${:etc}/run
srv = ${buildout:directory}/srv srv = ${buildout:directory}/srv
...@@ -113,6 +115,7 @@ tmp = ${buildout:directory}/tmp ...@@ -113,6 +115,7 @@ tmp = ${buildout:directory}/tmp
var = ${buildout:directory}/var var = ${buildout:directory}/var
zodb = ${:srv}/zodb zodb = ${:srv}/zodb
zodb-backup = ${:backup}/zodb zodb-backup = ${:backup}/zodb
zodb-recovery-test = ${:srv}/zodbrecoverytest
############################# #############################
# Binary symlinking # Binary symlinking
...@@ -145,6 +148,60 @@ link-binary = ...@@ -145,6 +148,60 @@ link-binary =
{{ parameter_dict['poppler'] }}/bin/pdfunite {{ parameter_dict['poppler'] }}/bin/pdfunite
{{ parameter_dict['dmtx-utils'] }}/bin/dmtxwrite {{ parameter_dict['dmtx-utils'] }}/bin/dmtxwrite
#############################
# Resiliency
#############################
# Used for ERP5 resiliency or (more probably)
# webrunner resiliency with erp5 inside.
[resiliency-exclude-file]
# Generate rdiff exclude file in case of resiliency
recipe = collective.recipe.template
input = inline: srv/zodb/**
output = ${directory:srv}/exporter.exclude
[resiliency-after-import-script]
# Generate after import script used by importer instance of webrunner
recipe = collective.recipe.template
input = inline: #!/bin/sh
# DO NOT RUN THIS SCRIPT ON PRODUCTION INSTANCE
# OR ZODB DATA WILL BE ERASED.
# This script will restore the repozo backup to the real
# zodb location. It is launched by the clone (importer) instance of webrunner
# in the end of the import script.
# Depending on the output, it will create a file containing
# the status of the restoration (success or failure).
zodb_directory="${directory:zodb}"
zodb_backup_directory="${directory:zodb-backup}"
repozo="${tidstorage:repozo-binary}"
EXIT_CODE=0
{% for zeo_id, zeo_configuration_list in json['zeo'].iteritems() -%}
{% for zeo_slave in zeo_configuration_list -%}
storage_name="{{ zeo_slave['storage-name'] }}"
zodb_path="$storage_name.fs"
pid_file=${zeo-instance-{{ zeo_id }}:pid-path}
if [ -e "$pid_file" ]; then
pid=$(cat $pid_file) > /dev/null 2>&1
if kill -0 "$pid"; then
echo "Zeo is already running with pid $pid. Aborting."
exit 1
fi
fi
echo "Removing $zodb_path..."
echo "Restoring $storage_name into $zodb_path..."
$repozo --recover --output="$zodb_directory/$zodb_path" --repository="$zodb_backup_directory/$storage_name"
CURRENT_EXIT_CODE=$?
if [ ! "$CURRENT_EXIT_CODE"="0" ]; then
EXIT_CODE="$CURRENT_EXIT_CODE"
echo "$storage_name Backup restoration failed."
fi
{% endfor -%}
{% endfor -%}
exit $EXIT_CODE
output = ${directory:srv}/runner-import-restore
mode = 755
############################# #############################
# CA # CA
############################# #############################
...@@ -504,7 +561,7 @@ repozo-wrapper = ${buildout:bin-directory}/tidstorage-repozo ...@@ -504,7 +561,7 @@ repozo-wrapper = ${buildout:bin-directory}/tidstorage-repozo
< = cron-base < = cron-base
recipe = slapos.cookbook:cron.d recipe = slapos.cookbook:cron.d
name = tidstorage name = tidstorage
frequency = 0 0 * * * frequency = {{ backup_periodicity }}
command = ${tidstorage:repozo-wrapper} command = ${tidstorage:repozo-wrapper}
[logrotate-entry-tidstorage] [logrotate-entry-tidstorage]
...@@ -524,6 +581,8 @@ parts = ...@@ -524,6 +581,8 @@ parts =
cron-entry-logrotate cron-entry-logrotate
certificate-authority certificate-authority
erp5-certificate-authority erp5-certificate-authority
resiliency-exclude-file
resiliency-after-import-script
tidstorage tidstorage
cron-entry-tidstorage-backup cron-entry-tidstorage-backup
logrotate-entry-tidstorage logrotate-entry-tidstorage
......
...@@ -6,6 +6,7 @@ parts = ...@@ -6,6 +6,7 @@ parts =
varnish-instance varnish-instance
cron cron
cron-entry-logrotate cron-entry-logrotate
resiliency-exclude-file
{# When web_checker related parameter is given, web_checker will be enabled.#} {# When web_checker related parameter is given, web_checker will be enabled.#}
{% if web_checker_parameter is defined %} {% if web_checker_parameter is defined %}
web-checker web-checker
...@@ -111,6 +112,12 @@ sharedscripts = true ...@@ -111,6 +112,12 @@ sharedscripts = true
notifempty = true notifempty = true
create = true create = true
[resiliency-exclude-file]
# Generate rdiff exclude file in case of resiliency
recipe = collective.recipe.template
input = inline: **
output = ${rootdirectory:srv}/exporter.exclude
[basedirectory] [basedirectory]
recipe = slapos.cookbook:mkdirectory recipe = slapos.cookbook:mkdirectory
services = ${rootdirectory:etc}/run services = ${rootdirectory:etc}/run
......
...@@ -124,6 +124,33 @@ extra-context = ...@@ -124,6 +124,33 @@ extra-context =
# Must match the key id in [switch-softwaretype] which uses this section. # Must match the key id in [switch-softwaretype] which uses this section.
raw software_type varnish raw software_type varnish
[dynamic-template-mariadb-parameters]
buildout-bin-directory = {{ buildout_bin_directory }}
[dynamic-template-mariadb]
< = jinja2-template-base
template = {{ template_mariadb }}
filename = instance-mariadb.cfg
extra-context =
section parameter_dict dynamic-template-mariadb-parameters
raw coreutils_location {{ coreutils_location }}
raw dash_location {{ dash_location }}
raw dcron_location {{ dcron_location }}
raw findutils_location {{ findutils_location }}
raw gettext_location {{ gettext_location }}
raw grep_location {{ grep_location }}
raw gzip_location {{ gzip_location }}
raw logrotate_location {{ logrotate_location }}
raw mariadb_location {{ mariadb_location }}
raw mariadb_resiliency_after_import_script {{ mariadb_resiliency_after_import_script }}
raw perl_location {{ perl_location }}
raw perl_siteprefix {{ perl_siteprefix }}
raw sed_location {{ sed_location }}
raw xtrabackup_location {{ xtrabackup_location }}
import json_module json
# Must match the key id in [switch-softwaretype] which uses this section.
raw software_type mariadb
[dynamic-template-zope-parameters] [dynamic-template-zope-parameters]
apache = {{ apache_location }} apache = {{ apache_location }}
aspell = {{ aspell_location }} aspell = {{ aspell_location }}
...@@ -227,7 +254,7 @@ kumofs = {{ template_kumofs }} ...@@ -227,7 +254,7 @@ kumofs = {{ template_kumofs }}
memcached = {{ template_memcached }} memcached = {{ template_memcached }}
cloudooo = ${dynamic-template-cloudooo:rendered} cloudooo = ${dynamic-template-cloudooo:rendered}
zope = ${dynamic-template-zope:rendered} zope = ${dynamic-template-zope:rendered}
mariadb = {{ template_mariadb }} mariadb = ${dynamic-template-mariadb:rendered}
tidstorage = ${dynamic-template-tidstorage:rendered} tidstorage = ${dynamic-template-tidstorage:rendered}
varnish = ${dynamic-template-varnish:rendered} varnish = ${dynamic-template-varnish:rendered}
......
#!${:dash}
# DO NOT RUN THIS SCRIPT ON PRODUCTION INSTANCE
# OR MYSQL DATA WILL BE ERASED.
# This script will import the dump of the mysql database to the real
# database. It is launched by the clone (importer) instance of webrunner
# in the end of the import script.
# Depending on the output, it will create a file containing
# the status of the restoration (success or failure)
set -e
mysql_executable="${mariadb-instance:mysql-binary}"
mysqldump_executable="${binary-wrap-mysqldump:wrapper-path}"
mariadb_data_directory="${directory:mariadb-data}"
mariadb_backup_directory="${directory:mariadb-backup-full}"
instance_directory="${buildout:directory}"
pid_file="${mariadb-instance:pid-file}"
binlog_path="${mariadb-instance:binlog-path}"
# Make sure mariadb is not already running
if [ -e "$pid_file" ]; then
pid=$(cat $pid_file) > /dev/null 2>&1
if kill -0 "$pid"; then
echo "Mariadb is already running with pid $pid. Aborting."
exit 1
fi
fi
echo "Deleting existing database..."
rm -r $mariadb_data_directory/* >/dev/null 2>&1 || true
echo "Adapting binlog database to new paths..."
new_binlog_directory="$(dirname $binlog_path)"
binlog_index_file="$new_binlog_directory/binlog.index"
old_binlog_directory="$(dirname $(head -n 1 $binlog_index_file))"
sed -e "s|$old_binlog_directory|$new_binlog_directory|g" $binlog_index_file > $binlog_index_file
echo "Starting mariadb..."
# XXX hardcoded
$instance_directory/etc/run/mariadb &
mysqld_pid=$!
trap "kill $mysqld_pid" EXIT TERM INT
sleep 30
# If mysql has stopped, abort
if ! [ -d /proc/$mysql_pid ]; then
echo "mysqld exited, aborting."
exit 1
fi
$instance_directory/etc/run/mariadb_update &
mariadb_update_pid=$!
sleep 60
# If mariadb_update is still running, abort
if [ -d /proc/$mariadb_update_pid ]; then
echo "mariadb_update still running after timeout, aborting."
kill $mariadb_update_pid
exit 1
fi
echo "Importing data..."
# Use latest dump XXX can contain funny characters
dump=$(ls -r $mariadb_backup_directory | head -1)
zcat "$mariadb_backup_directory/$dump" | $mysql_executable -u root --socket="$instance_directory/var/run/mariadb.sock"
RESTORE_EXIT_CODE=$?
if [ $RESTORE_EXIT_CODE -eq 0 ]; then
echo 'Backup restoration successfully completed.'
else
echo 'Backup restoration failed.'
fi
exit $RESTORE_EXIT_CODE
...@@ -63,4 +63,4 @@ feedparser = 5.1.1 ...@@ -63,4 +63,4 @@ feedparser = 5.1.1
# Required by: # Required by:
# slapos.toolbox==0.40.2 # slapos.toolbox==0.40.2
paramiko = 1.15.1 paramiko = 1.15.2
...@@ -206,4 +206,4 @@ feedparser = 5.1.3 ...@@ -206,4 +206,4 @@ feedparser = 5.1.3
# Required by: # Required by:
# slapos.toolbox==0.40.2 # slapos.toolbox==0.40.2
paramiko = 1.15.1 paramiko = 1.15.2
...@@ -203,7 +203,7 @@ feedparser = 5.1.3 ...@@ -203,7 +203,7 @@ feedparser = 5.1.3
# Required by: # Required by:
# slapos.toolbox==0.40.2 # slapos.toolbox==0.40.2
paramiko = 1.15.1 paramiko = 1.15.2
# Required by: # Required by:
# slapos.recipe.maarch==0.4 # slapos.recipe.maarch==0.4
......
...@@ -48,7 +48,7 @@ mode = 0644 ...@@ -48,7 +48,7 @@ mode = 0644
recipe = hexagonit.recipe.download recipe = hexagonit.recipe.download
url = ${:_profile_base_location_}/${:filename} url = ${:_profile_base_location_}/${:filename}
download-only = true download-only = true
md5sum = cb2f15850d3dc82459a0044adb4416cf md5sum = 5b12e864f1762d7984f7d4863d0b795d
destination = ${buildout:parts-directory}/monitor-template-monitor-bin destination = ${buildout:parts-directory}/monitor-template-monitor-bin
filename = monitor.py.in filename = monitor.py.in
mode = 0644 mode = 0644
...@@ -57,7 +57,7 @@ mode = 0644 ...@@ -57,7 +57,7 @@ mode = 0644
recipe = hexagonit.recipe.download recipe = hexagonit.recipe.download
url = ${:_profile_base_location_}/${:filename} url = ${:_profile_base_location_}/${:filename}
download-only = true download-only = true
md5sum = 2d48f8b8e01fa0fdde964ed1c1547f05 md5sum = 93e1dda50cb71bfe29966b2946c02dd1
filename = cgi-httpd.conf.in filename = cgi-httpd.conf.in
mode = 0644 mode = 0644
...@@ -119,7 +119,7 @@ mode = 0644 ...@@ -119,7 +119,7 @@ mode = 0644
recipe = hexagonit.recipe.download recipe = hexagonit.recipe.download
url = ${:_profile_base_location_}/${:filename} url = ${:_profile_base_location_}/${:filename}
download-only = true download-only = true
md5sum = 5f1b93ccdea7c3031aef396154c64938 md5sum = 6c84a826778cb059754623f39b33651b
destination = ${buildout:parts-directory}/monitor-template-rss-bin destination = ${buildout:parts-directory}/monitor-template-rss-bin
filename = status2rss.py filename = status2rss.py
mode = 0644 mode = 0644
......
PidFile "{{ httpd_configuration.get('pid-file') }}" PidFile "{{ httpd_configuration.get('pid-file') }}"
StartServers 1
ServerLimit 1
ThreadLimit 4
ThreadsPerChild 4
ServerName example.com ServerName example.com
ServerAdmin someone@email ServerAdmin someone@email
<IfDefine !MonitorPort> <IfDefine !MonitorPort>
......
#!{{ python_executable }} #!{{ python_executable }}
import datetime
import json import json
import os import os
import subprocess import subprocess
...@@ -36,30 +35,32 @@ option_list = [ ...@@ -36,30 +35,32 @@ option_list = [
] ]
class Popen(subprocess.Popen): class Popen(subprocess.Popen):
__timeout = None
def timeout(self, delay, delay_before_kill=5):
if self.__timeout is not None: self.__timeout.cancel()
self.__timeout = threading.Timer(delay, self.stop, [delay_before_kill])
self.__timeout.start()
def waiter():
self.wait()
self.__timeout.cancel()
threading.Thread(target=waiter).start()
def stop(self, delay_before_kill=5):
if self.__timeout is not None: self.__timeout.cancel()
self.terminate()
t = threading.Timer(delay_before_kill, self.kill)
t.start()
r = self.wait()
t.cancel()
return r
def init_db(): def set_timeout(self, timeout):
db = sqlite3.connect(db_path) self.set_timeout = None # assert we're not called twice
c = db.cursor() event = threading.Event()
c.executescript(""" event.__killed = False # we just need a mutable
def t():
# do not call wait() or poll() because they're not thread-safe
if not event.wait(timeout) and self.returncode is None:
# race condition if waitpid completes just before the signal sent ?
self.terminate()
event.__killed = True
if event.wait(5):
return
if self.returncode is None:
self.kill() # same race as for terminate ?
t = threading.Thread(target=t)
t.daemon = True
t.start()
def killed():
event.set()
t.join()
return event.__killed
return killed
def init_db(db):
db.executescript("""
CREATE TABLE IF NOT EXISTS status ( CREATE TABLE IF NOT EXISTS status (
timestamp INTEGER UNIQUE, timestamp INTEGER UNIQUE,
status VARCHAR(255)); status VARCHAR(255));
...@@ -69,8 +70,6 @@ CREATE TABLE IF NOT EXISTS individual_status ( ...@@ -69,8 +70,6 @@ CREATE TABLE IF NOT EXISTS individual_status (
element VARCHAR(255), element VARCHAR(255),
output TEXT); output TEXT);
""") """)
db.commit()
db.close()
def getListOfScripts(directory): def getListOfScripts(directory):
""" """
...@@ -109,59 +108,43 @@ def runServices(directory): ...@@ -109,59 +108,43 @@ def runServices(directory):
def runScripts(directory): def runScripts(directory):
scripts = getListOfScripts(directory)
# XXX script_timeout could be passed as parameters # XXX script_timeout could be passed as parameters
script_timeout = 60 # in seconds script_timeout = 60 # in seconds
result = {} result = {}
for script in scripts: with open(os.devnull, 'r+') as f:
command = [os.path.join(promise_dir, script)] for script in getListOfScripts(directory):
script = os.path.basename(command[0]) command = os.path.join(promise_dir, script),
result[script] = '' script = os.path.basename(script)
result[script] = ''
process_handler = Popen(command,
cwd=instance_path, p = Popen(command, cwd=instance_path,
env=None if sys.platform == 'cygwin' else {}, env=None if sys.platform == 'cygwin' else {},
stdout=subprocess.PIPE, stdin=f, stdout=f, stderr=subprocess.PIPE)
stderr=subprocess.PIPE, killed = p.set_timeout(script_timeout)
stdin=subprocess.PIPE) stderr = p.communicate()[1]
process_handler.stdin.flush() if killed():
process_handler.stdin.close() result[script] = "Time Out"
process_handler.stdin = None elif p.returncode:
process_handler.timeout(script_timeout)
process_handler.wait()
if process_handler.poll() is None:
process_handler.terminate()
result[script] = "Time Out"
elif process_handler.poll() != 0:
stderr = process_handler.communicate()[1]
if stderr is not None:
result[script] = stderr.strip() result[script] = stderr.strip()
return result return result
def writeFiles(monitors): def writeFiles(monitors):
timestamp = int(time.time()) timestamp = int(time.time())
date = datetime.datetime.now().ctime()
init_db()
db = sqlite3.connect(db_path) db = sqlite3.connect(db_path)
fail = False init_db(db)
status = SUCCESS
for key, value in monitors.iteritems(): for key, value in monitors.iteritems():
element_status = SUCCESS if value:
if value != "" : element_status = status = FAILURE
fail = True else:
element_status = FAILURE element_status = SUCCESS
db.execute("insert into individual_status(timestamp, element, output, status) values (?, ?, ?, ?)", (timestamp, key, value, element_status)) db.execute("insert into individual_status(timestamp, element, output, status) values (?, ?, ?, ?)", (timestamp, key, value, element_status))
db.commit()
status = SUCCESS
if fail:
status = FAILURE
db.execute("insert into status(timestamp, status) values (?, ?)", (timestamp, status)) db.execute("insert into status(timestamp, status) values (?, ?)", (timestamp, status))
db.commit() db.commit()
db.close() db.close()
monitors['datetime'] = date monitors['datetime'] = time.ctime(timestamp)
open(monitoring_file_json, "w+").write(json.dumps(monitors)) json.dump(monitors, open(monitoring_file_json, "w+"))
def main(): def main():
parser = OptionParser(option_list=option_list) parser = OptionParser(option_list=option_list)
...@@ -183,10 +166,6 @@ def main(): ...@@ -183,10 +166,6 @@ def main():
print json.dumps(monitors) print json.dumps(monitors)
else: else:
writeFiles(monitors) writeFiles(monitors)
if len(monitors) == 0:
exit(0)
else:
exit(1)
if __name__ == "__main__": if __name__ == "__main__":
......
...@@ -34,9 +34,12 @@ for row in rows: ...@@ -34,9 +34,12 @@ for row in rows:
event_time = datetime.datetime.fromtimestamp(line_timestamp).strftime('%Y-%m-%d %H:%M:%S') event_time = datetime.datetime.fromtimestamp(line_timestamp).strftime('%Y-%m-%d %H:%M:%S')
individual_rows = db.execute("select status, element, output from individual_status where timestamp=?", (line_timestamp,))
description = '\n'.join(['%s: %s %s' % row for row in individual_rows])
rss_item = PyRSS2Gen.RSSItem( rss_item = PyRSS2Gen.RSSItem(
title = status, title = status,
description = "%s: %s" % (event_time, status), description = "%s: %s\n%s" % (event_time, status, description),
link = LINK, link = LINK,
pubDate = event_time, pubDate = event_time,
guid = PyRSS2Gen.Guid(base64.b64encode("%s, %s" % (event_time, status))) guid = PyRSS2Gen.Guid(base64.b64encode("%s, %s" % (event_time, status)))
......
...@@ -38,7 +38,7 @@ eggs = collective.recipe.template ...@@ -38,7 +38,7 @@ eggs = collective.recipe.template
recipe = slapos.recipe.template recipe = slapos.recipe.template
url = ${:_profile_base_location_}/pbsready.cfg.in url = ${:_profile_base_location_}/pbsready.cfg.in
output = ${buildout:directory}/pbsready.cfg output = ${buildout:directory}/pbsready.cfg
md5sum = e89d8378cc610704b518a89b095d3a19 md5sum = 7d7cb70cb76a41f853e74cc34a9ef428
mode = 0644 mode = 0644
[pbsready-import] [pbsready-import]
...@@ -47,7 +47,7 @@ mode = 0644 ...@@ -47,7 +47,7 @@ mode = 0644
recipe = slapos.recipe.template recipe = slapos.recipe.template
url = ${:_profile_base_location_}/pbsready-import.cfg.in url = ${:_profile_base_location_}/pbsready-import.cfg.in
output = ${buildout:directory}/pbsready-import.cfg output = ${buildout:directory}/pbsready-import.cfg
md5sum = a5570ecfeff7a9d1b5f8be08db4feefe md5sum = dd13497575d13b92c3abb0a633777e2c
mode = 0644 mode = 0644
[pbsready-export] [pbsready-export]
...@@ -56,20 +56,20 @@ mode = 0644 ...@@ -56,20 +56,20 @@ mode = 0644
recipe = slapos.recipe.template recipe = slapos.recipe.template
url = ${:_profile_base_location_}/pbsready-export.cfg.in url = ${:_profile_base_location_}/pbsready-export.cfg.in
output = ${buildout:directory}/pbsready-export.cfg output = ${buildout:directory}/pbsready-export.cfg
md5sum = 879fff114d1dbf1f58774ccbce9bdd22 md5sum = bfd71e454140cf13179d408e10f95bf8
mode = 0644 mode = 0644
[template-pull-backup] [template-pull-backup]
recipe = slapos.recipe.template recipe = slapos.recipe.template
url = ${:_profile_base_location_}/instance-pull-backup.cfg.in url = ${:_profile_base_location_}/instance-pull-backup.cfg.in
output = ${buildout:directory}/instance-pull-backup.cfg output = ${buildout:directory}/instance-pull-backup.cfg
md5sum = 3866b0d4d2872f693b7d9519a668e6bc md5sum = 0e6a95e7a9b38d402f94c11b7d10397e
mode = 0644 mode = 0644
[template-replicated] [template-replicated]
recipe = slapos.recipe.download recipe = slapos.recipe.download
url = ${:_profile_base_location_}/template-replicated.cfg.in url = ${:_profile_base_location_}/template-replicated.cfg.in
md5sum = c781ae17375c26b08b2a11bd9b800db8 md5sum = 0641a65142fe0f624bf7c112081c2527
mode = 0644 mode = 0644
destination = ${buildout:directory}/template-replicated.cfg.in destination = ${buildout:directory}/template-replicated.cfg.in
...@@ -92,7 +92,7 @@ output = ${buildout:directory}/instance-frozen.cfg ...@@ -92,7 +92,7 @@ output = ${buildout:directory}/instance-frozen.cfg
[resilient-web-takeover-cgi-script-download] [resilient-web-takeover-cgi-script-download]
recipe = slapos.recipe.download recipe = slapos.recipe.download
url = ${:_profile_base_location_}/resilient-web-takeover-cgi-script.py.in url = ${:_profile_base_location_}/resilient-web-takeover-cgi-script.py.in
md5sum = e6262c5cf9b1c4d1ea4d959fdcbe3070 md5sum = 3aa7624af1196062d7d01946d4de9f0e
mode = 0644 mode = 0644
destination = ${buildout:directory}/resilient-web-takeover-cgi-script.py.in destination = ${buildout:directory}/resilient-web-takeover-cgi-script.py.in
...@@ -121,11 +121,6 @@ mode = 0644 ...@@ -121,11 +121,6 @@ mode = 0644
find-links = http://www.nexedi.org/static/packages/source/rdiff-backup-1.3.4nxd2.tar.gz find-links = http://www.nexedi.org/static/packages/source/rdiff-backup-1.3.4nxd2.tar.gz
[versions] [versions]
# Pin Jinja2 to 2.6, as 2.7 breaks current code
Jinja2 = 2.6
# ... And newer s.r.template requires Jinja2 >= 2.7
slapos.recipe.template = 2.4.2
rdiff-backup = 1.3.4nxd2 rdiff-backup = 1.3.4nxd2
slapos.cookbook = 0.92
...@@ -83,6 +83,7 @@ notifier-callbacks = $${basedirectory:notifier}/callbacks ...@@ -83,6 +83,7 @@ notifier-callbacks = $${basedirectory:notifier}/callbacks
[equeue] [equeue]
recipe = slapos.cookbook:equeue recipe = slapos.cookbook:equeue
socket = $${basedirectory:run}/equeue.sock socket = $${basedirectory:run}/equeue.sock
lockfile = $${basedirectory:run}/equeue.lock
log = $${basedirectory:log}/equeue.log log = $${basedirectory:log}/equeue.log
database = $${rootdirectory:srv}/equeue.db database = $${rootdirectory:srv}/equeue.db
wrapper = $${basedirectory:services}/equeue wrapper = $${basedirectory:services}/equeue
......
...@@ -5,7 +5,6 @@ extends = ${pbsready:output} ...@@ -5,7 +5,6 @@ extends = ${pbsready:output}
# Explicitely define extended parts from pbsready # Explicitely define extended parts from pbsready
# then add local parts # then add local parts
parts = parts =
resiliency
logrotate logrotate
logrotate-entry-cron logrotate-entry-cron
logrotate-entry-equeue logrotate-entry-equeue
......
...@@ -5,7 +5,6 @@ extends = ${pbsready:output} ...@@ -5,7 +5,6 @@ extends = ${pbsready:output}
# Explicitely define extended parts from pbsready # Explicitely define extended parts from pbsready
# then add local parts # then add local parts
parts = parts =
resiliency
logrotate logrotate
logrotate-entry-cron logrotate-entry-cron
logrotate-entry-equeue logrotate-entry-equeue
...@@ -18,6 +17,7 @@ parts = ...@@ -18,6 +17,7 @@ parts =
dropbear-server-pbs-authorized-key dropbear-server-pbs-authorized-key
notifier notifier
resiliency-takeover-script
resilient-web-takeover-cgi-script resilient-web-takeover-cgi-script
resilient-web-takeover-httpd-wrapper resilient-web-takeover-httpd-wrapper
resilient-web-takeover-httpd-promise resilient-web-takeover-httpd-promise
...@@ -48,6 +48,20 @@ recipe = slapos.cookbook:notifier.callback ...@@ -48,6 +48,20 @@ recipe = slapos.cookbook:notifier.callback
on-notification-id = $${slap-parameter:on-notification} on-notification-id = $${slap-parameter:on-notification}
callback = $${importer:wrapper} callback = $${importer:wrapper}
###########
# Generate the takeover script
###########
[resiliency-takeover-script]
recipe = slapos.cookbook:addresiliency
wrapper-takeover = $${rootdirectory:bin}/takeover
takeover-triggered-file-path = $${rootdirectory:srv}/takeover_triggered
# Add path of file created by takeover script when takeover is triggered
# Takeover script will create this file
# equeue process will watch for file existence.
[equeue]
takeover-triggered-file-path = $${resiliency-takeover-script:takeover-triggered-file-path}
########### ###########
# Deploy a webserver allowing to do takeover from a web browser. # Deploy a webserver allowing to do takeover from a web browser.
########### ###########
......
[buildout] [buildout]
parts = parts =
resiliency
logrotate logrotate
logrotate-entry-cron logrotate-entry-cron
logrotate-entry-equeue logrotate-entry-equeue
...@@ -126,18 +125,6 @@ frequency = daily ...@@ -126,18 +125,6 @@ frequency = daily
rotate-num = 30 rotate-num = 30
#----------------
#--
#-- Resiliency script for the node takeover
[resiliency]
recipe = slapos.cookbook:addresiliency
wrapper-takeover = takeover
services = $${basedirectory:services}
bin = $${rootdirectory:bin}
etc = $${rootdirectory:etc}
#---------------- #----------------
#-- #--
#-- Sets up an rdiff-backup server (with a dropbear server for ssh) #-- Sets up an rdiff-backup server (with a dropbear server for ssh)
...@@ -157,6 +144,7 @@ rdiffbackup-binary = ${buildout:bin-directory}/rdiff-backup ...@@ -157,6 +144,7 @@ rdiffbackup-binary = ${buildout:bin-directory}/rdiff-backup
[equeue] [equeue]
recipe = slapos.cookbook:equeue recipe = slapos.cookbook:equeue
socket = $${basedirectory:run}/equeue.sock socket = $${basedirectory:run}/equeue.sock
lockfile = $${basedirectory:run}/equeue.lock
log = $${basedirectory:log}/equeue.log log = $${basedirectory:log}/equeue.log
database = $${rootdirectory:srv}/equeue.db database = $${rootdirectory:srv}/equeue.db
wrapper = $${basedirectory:services}/equeue wrapper = $${basedirectory:services}/equeue
......
#!${buildout:executable} #!${buildout:executable}
equeue_database = '${equeue:database}'
equeue_lockfile = '${equeue:lockfile}'
takeover_script = '${resiliency-takeover-script:wrapper-takeover}'
import cgi import cgi
import cgitb import cgitb
import datetime
import gdbm
import os import os
import shutil
import subprocess import subprocess
import sys import sys
import tempfile
cgitb.enable() cgitb.enable()
def getLatestBackupDate():
"""
Get the date of the latest successful backup.
"""
# Create a copy of the db (locked by equeue process)
temporary_directory = tempfile.mkdtemp()
equeue_database_copy = os.path.join(temporary_directory, 'equeue.db')
shutil.copyfile(equeue_database, equeue_database_copy)
db = gdbm.open(equeue_database_copy)
# Usually, there is only one callback (so only one key
# in the db), but if there are several:
# Take the "oldest" one (oldest value).
last_backup = db[db.keys()[0]]
for callback in db.keys():
timestamp = float(db[callback])
if timestamp < last_backup:
last_backup = timestamp
return datetime.datetime.fromtimestamp(last_backup)
def isBackupInProgress():
"""
Check if backup is in progress (importer script is running)
by checking if equeue lockfile exists.
"""
# XXX: check if file is valid
return os.path.exists(equeue_lockfile)
print "Content-Type: text/html" print "Content-Type: text/html"
print print
form = cgi.FieldStorage() form = cgi.FieldStorage()
if "password" not in form: if "password" not in form:
...@@ -17,12 +53,14 @@ if "password" not in form: ...@@ -17,12 +53,14 @@ if "password" not in form:
<p>Calling takeover will stop and freeze the current main instance, and make this clone instance the new main instance, replacing the old one.</p> <p>Calling takeover will stop and freeze the current main instance, and make this clone instance the new main instance, replacing the old one.</p>
<p><b>Warning: submit the form only if you understand what you are doing.</b></p> <p><b>Warning: submit the form only if you understand what you are doing.</b></p>
<p>Note: the password asked here can be found within the parameters of your SlapOS instance page.</p> <p>Note: the password asked here can be found within the parameters of your SlapOS instance page.</p>
<p>Last valid backup: %s</p>
<p>Importer script(s) of backup in progress: %s</p>
<form action="/"> <form action="/">
Password: <input type="text" name="password"> Password: <input type="text" name="password">
<input type="submit" value="Take over" style="background: red;"> <input type="submit" value="Take over" style="background: red;">
</form> </form>
</body> </body>
</html>""" </html>""" % (getLatestBackupDate().strftime('%Y-%m-%d %H:%M:%S'), isBackupInProgress())
sys.exit(0) sys.exit(0)
if form['password'].value != '${:password}': if form['password'].value != '${:password}':
...@@ -31,6 +69,6 @@ if form['password'].value != '${:password}': ...@@ -31,6 +69,6 @@ if form['password'].value != '${:password}':
sys.exit(1) sys.exit(1)
# XXX hardcoded location # XXX hardcoded location
result = subprocess.check_output([os.path.expanduser("~/bin/takeover")], stderr=subprocess.STDOUT) result = subprocess.check_output([takeover_script], stderr=subprocess.STDOUT)
print 'Success.' print 'Success.'
print '<pre>%s</pre>' % result print '<pre>%s</pre>' % result
...@@ -70,7 +70,7 @@ name = {{namebase}}{{id}} ...@@ -70,7 +70,7 @@ name = {{namebase}}{{id}}
software-url = ${slap-connection:software-release-url} software-url = ${slap-connection:software-release-url}
software-type = {{typeimport}} software-type = {{typeimport}}
return = ssh-public-key ssh-url notification-url ip return = ssh-public-key ssh-url notification-url ip takeover-url takeover-password
pbs-notification-id = ${slap-connection:computer-id}-${slap-connection:partition-id}-{{namebase}}-{{id}}-push pbs-notification-id = ${slap-connection:computer-id}-${slap-connection:partition-id}-{{namebase}}-{{id}}-push
...@@ -99,6 +99,8 @@ sla-{{ key }} = {{ value }} ...@@ -99,6 +99,8 @@ sla-{{ key }} = {{ value }}
[publish-connection-informations] [publish-connection-informations]
feed-url-{{namebase}}-{{id}}-push = ${request-pbs-{{namebase}}-{{id}}:connection-feeds-url}${request-{{namebase}}-pseudo-replicating-{{id}}:pbs-notification-id} feed-url-{{namebase}}-{{id}}-push = ${request-pbs-{{namebase}}-{{id}}:connection-feeds-url}${request-{{namebase}}-pseudo-replicating-{{id}}:pbs-notification-id}
takeover-{{namebase}}-{{id}}-url = ${request-{{namebase}}-pseudo-replicating-{{id}}:connection-takeover-url}
takeover-{{namebase}}-{{id}}-password = ${request-{{namebase}}-pseudo-replicating-{{id}}:connection-takeover-password}
{% endfor -%} {% endfor -%}
......
...@@ -20,9 +20,11 @@ extensions += ...@@ -20,9 +20,11 @@ extensions +=
# Use shacache and lxml # Use shacache and lxml
extends = extends =
../component/python-2.7/buildout.cfg ../component/git/buildout.cfg
../component/lxml-python/buildout.cfg ../component/lxml-python/buildout.cfg
../component/python-2.7/buildout.cfg
../component/python-cffi/buildout.cfg ../component/python-cffi/buildout.cfg
../component/python-cliff/buildout.cfg
../component/python-cryptography/buildout.cfg ../component/python-cryptography/buildout.cfg
# Separate from site eggs # Separate from site eggs
...@@ -84,10 +86,11 @@ recipe = zc.recipe.egg ...@@ -84,10 +86,11 @@ recipe = zc.recipe.egg
eggs = eggs =
${lxml-python:egg} ${lxml-python:egg}
${python-cffi:egg} ${python-cffi:egg}
${python-cliff:egg}
${python-cryptography:egg} ${python-cryptography:egg}
pyOpenSSL pyOpenSSL
slapos.cookbook slapos.cookbook
cliff slapos.libnetworkcache
hexagonit.recipe.download hexagonit.recipe.download
inotifyx inotifyx
netaddr netaddr
...@@ -100,105 +103,106 @@ eggs = ...@@ -100,105 +103,106 @@ eggs =
[versions] [versions]
# Use SlapOS patched zc.buildout # Use SlapOS patched zc.buildout
zc.buildout = 1.7.1-dev-SlapOS-003 zc.buildout = 1.7.1.post9
# Use SlapOS patched zc.recipe.egg (zc.recipe.egg 2.x is for Buildout 2) # Use SlapOS patched zc.recipe.egg (zc.recipe.egg 2.x is for Buildout 2)
zc.recipe.egg = 1.3.2nxd001 zc.recipe.egg = 1.3.2.post4
# Use own version of h.r.download to be able to open xz-like archives # Use own version of h.r.download to be able to open .xz and .lz archives
hexagonit.recipe.download = 1.7nxd002 hexagonit.recipe.download = 1.7.post4
Jinja2 = 2.7.3 Jinja2 = 2.7.3
MarkupSafe = 0.23 MarkupSafe = 0.23
Werkzeug = 0.9.6 Werkzeug = 0.10.4
buildout-versions = 1.7 buildout-versions = 1.7
cffi = 0.8.6 cffi = 1.1.2
cmd2 = 0.6.7 cmd2 = 0.6.8
cryptography = 0.6 collective.recipe.template = 1.11
cryptography = 0.9.1
inotifyx = 0.2.2
itsdangerous = 0.24 itsdangerous = 0.24
lxml = 3.4.0 lxml = 3.4.4
meld3 = 1.0.0 meld3 = 1.0.2
mr.developer = 1.31 mr.developer = 1.33
netaddr = 0.7.14
pbr = 1.1.1
prettytable = 0.7.2 prettytable = 0.7.2
pyOpenSSL = 0.14 psutil = 2.2.1
pyOpenSSL = 0.15.1
pyparsing = 2.0.3 pyparsing = 2.0.3
setuptools = 7.0 pytz = 2015.4
six = 1.8.0 requests = 2.7.0
simplejson = 3.6.5 setuptools = 16.0
slapos.cookbook = 0.87 simplejson = 3.7.3
slapos.core = 1.3.4 six = 1.9.0
slapos.cookbook = 0.102
slapos.core = 1.3.10
slapos.extension.strip = 0.1
slapos.libnetworkcache = 0.14.2 slapos.libnetworkcache = 0.14.2
slapos.recipe.build = 0.14 slapos.recipe.build = 0.21
slapos.recipe.cmmi = 0.2 slapos.recipe.cmmi = 0.2
stevedore = 1.1.0 stevedore = 1.5.0
xml-marshaller = 0.9.7
z3c.recipe.scripts = 1.0.1
# Required by: # Required by:
# slapos.core==1.3.4 # slapos.core==1.3.10
Flask = 0.10.1 Flask = 0.10.1
# Required by: # Required by:
# cliff==1.8.0 # cliff==1.13.0
# stevedore==1.1.0 # stevedore==1.5.0
argparse = 1.2.2 argparse = 1.3.0
# Required by: # Required by:
# slapos.core==1.3.4 # slapos.core==1.3.10
cliff = 1.8.0 cliff = 1.13.0
# Required by:
# slapos.cookbook==0.87
inotifyx = 0.2.2
# Required by: # Required by:
# slapos.cookbook==0.87 # cryptography==0.9.1
lock-file = 2.0 enum34 = 1.0.4
# Required by: # Required by:
# slapos.cookbook==0.87 # jsonschema==2.5.1
netaddr = 0.7.12 functools32 = 3.2.3.post1
# Required by: # Required by:
# slapos.core==1.3.4 # cryptography==0.9.1
netifaces = 0.10.4 idna = 2.0
# Required by: # Required by:
# cliff==1.8.0 # cryptography==0.9.1
# stevedore==1.1.0 ipaddress = 1.0.7
pbr = 0.10.0
# Required by: # Required by:
# pbr==0.10.0 # slapos.cookbook==0.102
pip = 1.5.6 jsonschema = 2.5.1
# Required by: # Required by:
# slapos.core==1.3.4 # slapos.cookbook==0.102
psutil = 2.1.3 lock-file = 2.0
# Required by: # Required by:
# cffi==0.8.6 # slapos.core==1.3.10
pycparser = 2.10 netifaces = 0.10.4
# Required by: # Required by:
# slapos.cookbook==0.87 # cryptography==0.9.1
pytz = 2014.9 pyasn1 = 0.1.7
# Required by: # Required by:
# slapos.core==1.3.4 # cffi==1.1.2
requests = 2.4.3 pycparser = 2.14
# Required by: # Required by:
# slapos.core==1.3.4 # slapos.core==1.3.10
supervisor = 3.1.3 supervisor = 3.1.3
# Required by: # Required by:
# slapos.core==1.3.4 # slapos.core==1.3.10
uritemplate = 0.6 uritemplate = 0.6
# Required by: # Required by:
# slapos.cookbook==0.87 # slapos.core==1.3.10
xml-marshaller = 0.9.7 zope.interface = 4.1.2
# Required by:
# slapos.core==1.3.4
zope.interface = 4.1.1
[networkcache] [networkcache]
download-cache-url = http://www.shacache.org/shacache download-cache-url = http://www.shacache.org/shacache
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment