Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Support
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
slapos
Project overview
Project overview
Details
Activity
Releases
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Issues
0
Issues
0
List
Boards
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Analytics
Analytics
CI / CD
Repository
Value Stream
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
Paul Graydon
slapos
Commits
bd3c2b18
Commit
bd3c2b18
authored
Dec 14, 2020
by
Jérome Perrin
Browse files
Options
Browse Files
Download
Plain Diff
Remove httpd from ERP5 and use haproxy instead
See merge request
nexedi/slapos!858
parents
aeece80c
82a249b6
Changes
13
Hide whitespace changes
Inline
Side-by-side
Showing
13 changed files
with
672 additions
and
202 deletions
+672
-202
component/socat/buildout.cfg
component/socat/buildout.cfg
+1
-0
software/erp5/test/test/__init__.py
software/erp5/test/test/__init__.py
+0
-4
software/erp5/test/test/test_balancer.py
software/erp5/test/test/test_balancer.py
+193
-25
software/erp5/test/test/test_erp5.py
software/erp5/test/test/test_erp5.py
+55
-37
software/slapos-master/buildout.hash.cfg
software/slapos-master/buildout.hash.cfg
+4
-0
software/slapos-master/haproxy.cfg.in
software/slapos-master/haproxy.cfg.in
+48
-0
software/slapos-master/software.cfg
software/slapos-master/software.cfg
+3
-1
stack/erp5/buildout.cfg
stack/erp5/buildout.cfg
+8
-0
stack/erp5/buildout.hash.cfg
stack/erp5/buildout.hash.cfg
+7
-3
stack/erp5/haproxy.cfg.in
stack/erp5/haproxy.cfg.in
+187
-26
stack/erp5/instance-balancer.cfg.in
stack/erp5/instance-balancer.cfg.in
+142
-105
stack/erp5/instance.cfg.in
stack/erp5/instance.cfg.in
+5
-1
stack/erp5/rsyslogd.cfg.in
stack/erp5/rsyslogd.cfg.in
+19
-0
No files found.
component/socat/buildout.cfg
View file @
bd3c2b18
...
@@ -7,6 +7,7 @@ parts =
...
@@ -7,6 +7,7 @@ parts =
[socat]
[socat]
recipe = slapos.recipe.cmmi
recipe = slapos.recipe.cmmi
shared = true
url = http://www.dest-unreach.org/socat/download/socat-${:version}.tar.gz
url = http://www.dest-unreach.org/socat/download/socat-${:version}.tar.gz
version = 1.7.3.2
version = 1.7.3.2
md5sum = aec3154f7854580cfab0c2d81e910519
md5sum = aec3154f7854580cfab0c2d81e910519
...
...
software/erp5/test/test/__init__.py
View file @
bd3c2b18
...
@@ -48,10 +48,6 @@ def setUpModule():
...
@@ -48,10 +48,6 @@ def setUpModule():
class
ERP5InstanceTestCase
(
SlapOSInstanceTestCase
):
class
ERP5InstanceTestCase
(
SlapOSInstanceTestCase
):
"""ERP5 base test case
"""ERP5 base test case
"""
"""
# ERP5 instanciation needs to run several times before being ready, as
# the root instance request more instances.
instance_max_retry
=
7
# XXX how many times ?
def
getRootPartitionConnectionParameterDict
(
self
):
def
getRootPartitionConnectionParameterDict
(
self
):
"""Return the output paramters from the root partition"""
"""Return the output paramters from the root partition"""
return
json
.
loads
(
return
json
.
loads
(
...
...
software/erp5/test/test/test_balancer.py
View file @
bd3c2b18
...
@@ -50,7 +50,31 @@ class EchoHTTPServer(ManagedHTTPServer):
...
@@ -50,7 +50,31 @@ class EchoHTTPServer(ManagedHTTPServer):
self
.
end_headers
()
self
.
end_headers
()
self
.
wfile
.
write
(
response
)
self
.
wfile
.
write
(
response
)
log_message
=
logging
.
getLogger
(
__name__
+
'.HeaderEchoHandler'
).
info
log_message
=
logging
.
getLogger
(
__name__
+
'.EchoHTTPServer'
).
info
class
EchoHTTP11Server
(
ManagedHTTPServer
):
"""An HTTP/1.1 Server responding with the request path and incoming headers,
encoded in json.
"""
class
RequestHandler
(
BaseHTTPRequestHandler
):
protocol_version
=
'HTTP/1.1'
def
do_GET
(
self
):
# type: () -> None
self
.
send_response
(
200
)
self
.
send_header
(
"Content-Type"
,
"application/json"
)
response
=
json
.
dumps
(
{
'Path'
:
self
.
path
,
'Incoming Headers'
:
self
.
headers
.
dict
},
indent
=
2
,
)
self
.
send_header
(
"Content-Length"
,
len
(
response
))
self
.
end_headers
()
self
.
wfile
.
write
(
response
)
log_message
=
logging
.
getLogger
(
__name__
+
'.EchoHTTP11Server'
).
info
class
CaucaseService
(
ManagedResource
):
class
CaucaseService
(
ManagedResource
):
...
@@ -105,6 +129,7 @@ class CaucaseService(ManagedResource):
...
@@ -105,6 +129,7 @@ class CaucaseService(ManagedResource):
shutil
.
rmtree
(
self
.
directory
)
shutil
.
rmtree
(
self
.
directory
)
class
BalancerTestCase
(
ERP5InstanceTestCase
):
class
BalancerTestCase
(
ERP5InstanceTestCase
):
@
classmethod
@
classmethod
...
@@ -147,14 +172,14 @@ class BalancerTestCase(ERP5InstanceTestCase):
...
@@ -147,14 +172,14 @@ class BalancerTestCase(ERP5InstanceTestCase):
class
SlowHTTPServer
(
ManagedHTTPServer
):
class
SlowHTTPServer
(
ManagedHTTPServer
):
"""An HTTP Server which reply after
3
seconds.
"""An HTTP Server which reply after
2
seconds.
"""
"""
class
RequestHandler
(
BaseHTTPRequestHandler
):
class
RequestHandler
(
BaseHTTPRequestHandler
):
def
do_GET
(
self
):
def
do_GET
(
self
):
# type: () -> None
# type: () -> None
self
.
send_response
(
200
)
self
.
send_response
(
200
)
self
.
send_header
(
"Content-Type"
,
"text/plain"
)
self
.
send_header
(
"Content-Type"
,
"text/plain"
)
time
.
sleep
(
3
)
time
.
sleep
(
2
)
self
.
end_headers
()
self
.
end_headers
()
self
.
wfile
.
write
(
"OK
\
n
"
)
self
.
wfile
.
write
(
"OK
\
n
"
)
...
@@ -179,12 +204,13 @@ class TestAccessLog(BalancerTestCase, CrontabMixin):
...
@@ -179,12 +204,13 @@ class TestAccessLog(BalancerTestCase, CrontabMixin):
urlparse
.
urljoin
(
self
.
default_balancer_url
,
'/url_path'
),
urlparse
.
urljoin
(
self
.
default_balancer_url
,
'/url_path'
),
verify
=
False
,
verify
=
False
,
)
)
time
.
sleep
(.
5
)
# wait a bit more until access is logged
with
open
(
os
.
path
.
join
(
self
.
computer_partition_root_path
,
'var'
,
'log'
,
'apache-access.log'
))
as
access_log_file
:
with
open
(
os
.
path
.
join
(
self
.
computer_partition_root_path
,
'var'
,
'log'
,
'apache-access.log'
))
as
access_log_file
:
access_line
=
access_log_file
.
read
()
access_line
=
access_log_file
.
read
()
.
splitlines
()[
-
1
]
self
.
assertIn
(
'/url_path'
,
access_line
)
self
.
assertIn
(
'/url_path'
,
access_line
)
# last \d is the request time in mi
cro
seconds, since this SlowHTTPServer
# last \d is the request time in mi
lli
seconds, since this SlowHTTPServer
# sleeps for
3 seconds, it should take between 3 and 4
seconds to process
# sleeps for
2 seconds, it should take between 2 and 3
seconds to process
# the request - but our test machines can be slow sometimes, so we tolerate
# the request - but our test machines can be slow sometimes, so we tolerate
# it can take up to 20 seconds.
# it can take up to 20 seconds.
match
=
re
.
match
(
match
=
re
.
match
(
...
@@ -194,8 +220,8 @@ class TestAccessLog(BalancerTestCase, CrontabMixin):
...
@@ -194,8 +220,8 @@ class TestAccessLog(BalancerTestCase, CrontabMixin):
self.assertTrue(match)
self.assertTrue(match)
assert match
assert match
request_time = int(match.groups()[-1])
request_time = int(match.groups()[-1])
self.assertGreater(request_time,
3 * 1000
* 1000)
self.assertGreater(request_time,
2
* 1000)
self.assertLess(request_time, 20 * 1000
* 1000
)
self.assertLess(request_time, 20 * 1000)
def test_access_log_apachedex_report(self):
def test_access_log_apachedex_report(self):
# type: () -> None
# type: () -> None
...
@@ -334,17 +360,132 @@ class TestBalancer(BalancerTestCase):
...
@@ -334,17 +360,132 @@ class TestBalancer(BalancerTestCase):
requests
.
get
(
self
.
default_balancer_url
,
verify
=
False
,
cookies
=
cookies
).
text
,
requests
.
get
(
self
.
default_balancer_url
,
verify
=
False
,
cookies
=
cookies
).
text
,
'backend_web_server1'
)
'backend_web_server1'
)
def
test_balancer_stats_socket
(
self
):
# real time statistics can be obtained by using the stats socket and there
# is a wrapper which makes this a bit easier.
socat_process
=
subprocess
.
Popen
(
[
self
.
computer_partition_root_path
+
'/bin/haproxy-socat-stats'
],
stdin
=
subprocess
.
PIPE
,
stdout
=
subprocess
.
PIPE
,
stderr
=
subprocess
.
STDOUT
)
try
:
output
,
_
=
socat_process
.
communicate
(
"show stat
\
n
"
)
except
:
socat_process
.
kill
()
socat_process
.
wait
()
raise
self
.
assertEqual
(
socat_process
.
poll
(),
0
)
# output is a csv
self
.
assertIn
(
'family_default,FRONTEND,'
,
output
)
class
TestTestRunnerEntryPoints
(
BalancerTestCase
):
"""Check balancer has some entries for test runner.
"""
__partition_reference__
=
't'
@
classmethod
def
_getInstanceParameterDict
(
cls
):
# type: () -> Dict
parameter_dict
=
super
(
TestTestRunnerEntryPoints
,
cls
,
).
_getInstanceParameterDict
()
parameter_dict
[
'dummy_http_server-test-runner-address-list'
]
=
[
[
cls
.
getManagedResource
(
"backend_0"
,
EchoHTTPServer
).
hostname
,
cls
.
getManagedResource
(
"backend_0"
,
EchoHTTPServer
).
port
,
],
[
cls
.
getManagedResource
(
"backend_1"
,
EchoHTTPServer
).
hostname
,
cls
.
getManagedResource
(
"backend_1"
,
EchoHTTPServer
).
port
,
],
[
cls
.
getManagedResource
(
"backend_2"
,
EchoHTTPServer
).
hostname
,
cls
.
getManagedResource
(
"backend_2"
,
EchoHTTPServer
).
port
,
],
]
return
parameter_dict
def
test_use_proper_backend
(
self
):
# requests are directed to proper backend based on URL path
test_runner_url_list
=
self
.
getRootPartitionConnectionParameterDict
(
)[
'default-test-runner-url-list'
]
url_0
,
url_1
,
url_2
=
test_runner_url_list
self
.
assertEqual
(
urlparse
.
urlparse
(
url_0
).
netloc
,
urlparse
.
urlparse
(
url_1
).
netloc
)
self
.
assertEqual
(
urlparse
.
urlparse
(
url_0
).
netloc
,
urlparse
.
urlparse
(
url_2
).
netloc
)
path_0
=
'/VirtualHostBase/https/{netloc}/VirtualHostRoot/_vh_unit_test_0/something'
.
format
(
netloc
=
urlparse
.
urlparse
(
url_0
).
netloc
)
path_1
=
'/VirtualHostBase/https/{netloc}/VirtualHostRoot/_vh_unit_test_1/something'
.
format
(
netloc
=
urlparse
.
urlparse
(
url_0
).
netloc
)
path_2
=
'/VirtualHostBase/https/{netloc}/VirtualHostRoot/_vh_unit_test_2/something'
.
format
(
netloc
=
urlparse
.
urlparse
(
url_0
).
netloc
)
self
.
assertEqual
(
{
requests
.
get
(
url_0
+
'something'
,
verify
=
False
).
json
()[
'Path'
]
for
_
in
range
(
10
)
},
{
path_0
})
self
.
assertEqual
(
{
requests
.
get
(
url_1
+
'something'
,
verify
=
False
).
json
()[
'Path'
]
for
_
in
range
(
10
)
},
{
path_1
})
self
.
assertEqual
(
{
requests
.
get
(
url_2
+
'something'
,
verify
=
False
).
json
()[
'Path'
]
for
_
in
range
(
10
)
},
{
path_2
})
# If a test runner backend is down, others can be accessed.
self
.
getManagedResource
(
"backend_0"
,
EchoHTTPServer
).
close
()
self
.
assertEqual
(
{
requests
.
get
(
url_0
+
'something'
,
verify
=
False
).
status_code
for
_
in
range
(
5
)
},
{
503
})
self
.
assertEqual
(
{
requests
.
get
(
url_1
+
'something'
,
verify
=
False
).
json
()[
'Path'
]
for
_
in
range
(
10
)
},
{
path_1
})
class
TestHTTP
(
BalancerTestCase
):
class
TestHTTP
(
BalancerTestCase
):
"""Check HTTP protocol
"""Check HTTP protocol
with a HTTP/1.1 backend
"""
"""
@
classmethod
def
_getInstanceParameterDict
(
cls
):
# type: () -> Dict
parameter_dict
=
super
(
TestHTTP
,
cls
).
_getInstanceParameterDict
()
# use a HTTP/1.1 server instead
parameter_dict
[
'dummy_http_server'
]
=
[[
cls
.
getManagedResource
(
"HTTP/1.1 Server"
,
EchoHTTP11Server
).
netloc
,
1
,
False
]]
return
parameter_dict
__partition_reference__
=
'h'
__partition_reference__
=
'h'
def
test_http_version
(
self
):
def
test_http_version
(
self
):
# type: () -> None
# type: () -> None
# https://stackoverflow.com/questions/37012486/python-3-x-how-to-get-http-version-using-requests-library/37012810
self
.
assertEqual
(
self
.
assertEqual
(
requests
.
get
(
self
.
default_balancer_url
,
verify
=
False
).
raw
.
version
,
11
)
subprocess
.
check_output
([
'curl'
,
'--silent'
,
'--show-error'
,
'--output'
,
'/dev/null'
,
'--insecure'
,
'--write-out'
,
'%{http_version}'
,
self
.
default_balancer_url
,
]),
'2'
,
)
def
test_keep_alive
(
self
):
def
test_keep_alive
(
self
):
# type: () -> None
# type: () -> None
...
@@ -372,24 +513,27 @@ class TestHTTP(BalancerTestCase):
...
@@ -372,24 +513,27 @@ class TestHTTP(BalancerTestCase):
class
ContentTypeHTTPServer
(
ManagedHTTPServer
):
class
ContentTypeHTTPServer
(
ManagedHTTPServer
):
"""An HTTP Server which reply with content type from path.
"""An HTTP
/1.1
Server which reply with content type from path.
For example when requested http://host/text/plain it will reply
For example when requested http://host/text/plain it will reply
with Content-Type: text/plain header.
with Content-Type: text/plain header.
The body is always "OK"
The body is always "OK"
"""
"""
class
RequestHandler
(
BaseHTTPRequestHandler
):
class
RequestHandler
(
BaseHTTPRequestHandler
):
protocol_version
=
'HTTP/1.1'
def
do_GET
(
self
):
def
do_GET
(
self
):
# type: () -> None
# type: () -> None
self
.
send_response
(
200
)
self
.
send_response
(
200
)
if
self
.
path
==
'/'
:
if
self
.
path
==
'/'
:
self
.
send_header
(
"Content-Length"
,
0
)
return
self
.
end_headers
()
return
self
.
end_headers
()
content_type
=
self
.
path
[
1
:]
content_type
=
self
.
path
[
1
:]
body
=
"OK"
self
.
send_header
(
"Content-Type"
,
content_type
)
self
.
send_header
(
"Content-Type"
,
content_type
)
self
.
send_header
(
"Content-Length"
,
len
(
body
))
self
.
end_headers
()
self
.
end_headers
()
self
.
wfile
.
write
(
"OK"
)
self
.
wfile
.
write
(
body
)
log_message
=
logging
.
getLogger
(
__name__
+
'.ContentTypeHTTPServer'
).
info
log_message
=
logging
.
getLogger
(
__name__
+
'.ContentTypeHTTPServer'
).
info
...
@@ -431,9 +575,9 @@ class TestContentEncoding(BalancerTestCase):
...
@@ -431,9 +575,9 @@ class TestContentEncoding(BalancerTestCase):
resp
=
requests
.
get
(
urlparse
.
urljoin
(
self
.
default_balancer_url
,
content_type
),
verify
=
False
)
resp
=
requests
.
get
(
urlparse
.
urljoin
(
self
.
default_balancer_url
,
content_type
),
verify
=
False
)
self
.
assertEqual
(
resp
.
headers
[
'Content-Type'
],
content_type
)
self
.
assertEqual
(
resp
.
headers
[
'Content-Type'
],
content_type
)
self
.
assertEqual
(
self
.
assertEqual
(
resp
.
headers
[
'Content-Encoding'
]
,
resp
.
headers
.
get
(
'Content-Encoding'
)
,
'gzip'
,
'gzip'
,
'%s uses wrong encoding: %s'
%
(
content_type
,
resp
.
headers
[
'Content-Encoding'
]
))
'%s uses wrong encoding: %s'
%
(
content_type
,
resp
.
headers
.
get
(
'Content-Encoding'
)
))
self
.
assertEqual
(
resp
.
text
,
'OK'
)
self
.
assertEqual
(
resp
.
text
,
'OK'
)
def
test_no_gzip_encoding
(
self
):
def
test_no_gzip_encoding
(
self
):
...
@@ -443,8 +587,8 @@ class TestContentEncoding(BalancerTestCase):
...
@@ -443,8 +587,8 @@ class TestContentEncoding(BalancerTestCase):
self
.
assertEqual
(
resp
.
text
,
'OK'
)
self
.
assertEqual
(
resp
.
text
,
'OK'
)
class
CaucaseC
lientC
ertificate
(
ManagedResource
):
class
CaucaseCertificate
(
ManagedResource
):
"""A c
lient certificate issued by a caucase services
.
"""A c
ertificate signed by a caucase service
.
"""
"""
ca_crt_file
=
None
# type: str
ca_crt_file
=
None
# type: str
...
@@ -560,7 +704,7 @@ class TestFrontendXForwardedFor(BalancerTestCase):
...
@@ -560,7 +704,7 @@ class TestFrontendXForwardedFor(BalancerTestCase):
def
_getInstanceParameterDict
(
cls
):
def
_getInstanceParameterDict
(
cls
):
# type: () -> Dict
# type: () -> Dict
frontend_caucase
=
cls
.
getManagedResource
(
'frontend_caucase'
,
CaucaseService
)
frontend_caucase
=
cls
.
getManagedResource
(
'frontend_caucase'
,
CaucaseService
)
certificate
=
cls
.
getManagedResource
(
'client_certificate'
,
CaucaseC
lientC
ertificate
)
certificate
=
cls
.
getManagedResource
(
'client_certificate'
,
CaucaseCertificate
)
certificate
.
request
(
u'shared frontend'
,
frontend_caucase
)
certificate
.
request
(
u'shared frontend'
,
frontend_caucase
)
parameter_dict
=
super
(
TestFrontendXForwardedFor
,
cls
).
_getInstanceParameterDict
()
parameter_dict
=
super
(
TestFrontendXForwardedFor
,
cls
).
_getInstanceParameterDict
()
...
@@ -576,7 +720,7 @@ class TestFrontendXForwardedFor(BalancerTestCase):
...
@@ -576,7 +720,7 @@ class TestFrontendXForwardedFor(BalancerTestCase):
def
test_x_forwarded_for_added_when_verified_connection
(
self
):
def
test_x_forwarded_for_added_when_verified_connection
(
self
):
# type: () -> None
# type: () -> None
client_certificate
=
self
.
getManagedResource
(
'client_certificate'
,
CaucaseC
lientC
ertificate
)
client_certificate
=
self
.
getManagedResource
(
'client_certificate'
,
CaucaseCertificate
)
for
backend
in
(
'default'
,
'default-auth'
):
for
backend
in
(
'default'
,
'default-auth'
):
balancer_url
=
json
.
loads
(
self
.
computer_partition
.
getConnectionParameterDict
()[
'_'
])[
backend
]
balancer_url
=
json
.
loads
(
self
.
computer_partition
.
getConnectionParameterDict
()[
'_'
])[
backend
]
...
@@ -586,7 +730,7 @@ class TestFrontendXForwardedFor(BalancerTestCase):
...
@@ -586,7 +730,7 @@ class TestFrontendXForwardedFor(BalancerTestCase):
cert
=
(
client_certificate
.
cert_file
,
client_certificate
.
key_file
),
cert
=
(
client_certificate
.
cert_file
,
client_certificate
.
key_file
),
verify
=
False
,
verify
=
False
,
).
json
()
).
json
()
self
.
assertEqual
(
result
[
'Incoming Headers'
].
get
(
'x-forwarded-for'
).
split
(
', '
)[
0
],
'1.2.3.4'
)
self
.
assertEqual
(
result
[
'Incoming Headers'
].
get
(
'x-forwarded-for'
,
''
).
split
(
', '
)[
0
],
'1.2.3.4'
)
def
test_x_forwarded_for_stripped_when_not_verified_connection
(
self
):
def
test_x_forwarded_for_stripped_when_not_verified_connection
(
self
):
# type: () -> None
# type: () -> None
...
@@ -596,7 +740,7 @@ class TestFrontendXForwardedFor(BalancerTestCase):
...
@@ -596,7 +740,7 @@ class TestFrontendXForwardedFor(BalancerTestCase):
headers
=
{
'X-Forwarded-For'
:
'1.2.3.4'
},
headers
=
{
'X-Forwarded-For'
:
'1.2.3.4'
},
verify
=
False
,
verify
=
False
,
).
json
()
).
json
()
self
.
assertNotEqual
(
result
[
'Incoming Headers'
].
get
(
'x-forwarded-for'
).
split
(
', '
)[
0
],
'1.2.3.4'
)
self
.
assertNotEqual
(
result
[
'Incoming Headers'
].
get
(
'x-forwarded-for'
,
''
).
split
(
', '
)[
0
],
'1.2.3.4'
)
balancer_url
=
json
.
loads
(
self
.
computer_partition
.
getConnectionParameterDict
()[
'_'
])[
'default-auth'
]
balancer_url
=
json
.
loads
(
self
.
computer_partition
.
getConnectionParameterDict
()[
'_'
])[
'default-auth'
]
with
self
.
assertRaises
(
OpenSSL
.
SSL
.
Error
):
with
self
.
assertRaises
(
OpenSSL
.
SSL
.
Error
):
requests
.
get
(
requests
.
get
(
...
@@ -606,6 +750,30 @@ class TestFrontendXForwardedFor(BalancerTestCase):
...
@@ -606,6 +750,30 @@ class TestFrontendXForwardedFor(BalancerTestCase):
)
)
class
TestServerTLSProvidedCertificate
(
BalancerTestCase
):
"""Check that certificate and key can be provided as instance parameters.
"""
__partition_reference__
=
's'
@
classmethod
def
_getInstanceParameterDict
(
cls
):
# type: () -> Dict
server_caucase
=
cls
.
getManagedResource
(
'server_caucase'
,
CaucaseService
)
server_certificate
=
cls
.
getManagedResource
(
'server_certificate'
,
CaucaseCertificate
)
server_certificate
.
request
(
cls
.
_ipv4_address
.
decode
(),
server_caucase
)
parameter_dict
=
super
(
TestServerTLSProvidedCertificate
,
cls
).
_getInstanceParameterDict
()
with
open
(
server_certificate
.
cert_file
)
as
f
:
parameter_dict
[
'ssl'
][
'cert'
]
=
f
.
read
()
with
open
(
server_certificate
.
key_file
)
as
f
:
parameter_dict
[
'ssl'
][
'key'
]
=
f
.
read
()
return
parameter_dict
def
test_certificate_validates_with_provided_ca
(
self
):
# type: () -> None
server_certificate
=
self
.
getManagedResource
(
"server_certificate"
,
CaucaseCertificate
)
requests
.
get
(
self
.
default_balancer_url
,
verify
=
server_certificate
.
ca_crt_file
)
class
TestClientTLS
(
BalancerTestCase
):
class
TestClientTLS
(
BalancerTestCase
):
__partition_reference__
=
'c'
__partition_reference__
=
'c'
...
@@ -613,11 +781,11 @@ class TestClientTLS(BalancerTestCase):
...
@@ -613,11 +781,11 @@ class TestClientTLS(BalancerTestCase):
def
_getInstanceParameterDict
(
cls
):
def
_getInstanceParameterDict
(
cls
):
# type: () -> Dict
# type: () -> Dict
frontend_caucase1
=
cls
.
getManagedResource
(
'frontend_caucase1'
,
CaucaseService
)
frontend_caucase1
=
cls
.
getManagedResource
(
'frontend_caucase1'
,
CaucaseService
)
certificate1
=
cls
.
getManagedResource
(
'client_certificate1'
,
CaucaseC
lientC
ertificate
)
certificate1
=
cls
.
getManagedResource
(
'client_certificate1'
,
CaucaseCertificate
)
certificate1
.
request
(
u'client_certificate1'
,
frontend_caucase1
)
certificate1
.
request
(
u'client_certificate1'
,
frontend_caucase1
)
frontend_caucase2
=
cls
.
getManagedResource
(
'frontend_caucase2'
,
CaucaseService
)
frontend_caucase2
=
cls
.
getManagedResource
(
'frontend_caucase2'
,
CaucaseService
)
certificate2
=
cls
.
getManagedResource
(
'client_certificate2'
,
CaucaseC
lientC
ertificate
)
certificate2
=
cls
.
getManagedResource
(
'client_certificate2'
,
CaucaseCertificate
)
certificate2
.
request
(
u'client_certificate2'
,
frontend_caucase2
)
certificate2
.
request
(
u'client_certificate2'
,
frontend_caucase2
)
parameter_dict
=
super
(
TestClientTLS
,
cls
).
_getInstanceParameterDict
()
parameter_dict
=
super
(
TestClientTLS
,
cls
).
_getInstanceParameterDict
()
...
@@ -646,7 +814,7 @@ class TestClientTLS(BalancerTestCase):
...
@@ -646,7 +814,7 @@ class TestClientTLS(BalancerTestCase):
(
'client_certificate2'
,
'frontend_caucase2'
),
(
'client_certificate2'
,
'frontend_caucase2'
),
):
):
client_certificate
=
self
.
getManagedResource
(
client_certificate_name
,
client_certificate
=
self
.
getManagedResource
(
client_certificate_name
,
CaucaseC
lientC
ertificate
)
CaucaseCertificate
)
# when client certificate can be authenticated, backend receive the CN of
# when client certificate can be authenticated, backend receive the CN of
# the client certificate in "remote-user" header
# the client certificate in "remote-user" header
...
...
software/erp5/test/test/test_erp5.py
View file @
bd3c2b18
...
@@ -43,23 +43,44 @@ setUpModule # pyflakes
...
@@ -43,23 +43,44 @@ setUpModule # pyflakes
class
TestPublishedURLIsReachableMixin
(
object
):
class
TestPublishedURLIsReachableMixin
(
object
):
"""Mixin that checks that default page of ERP5 is reachable.
"""Mixin that checks that default page of ERP5 is reachable.
"""
"""
def
_checkERP5IsReachable
(
self
,
url
):
def
_checkERP5IsReachable
(
self
,
base_url
,
site_id
,
verify
):
# We access ERP5 trough a "virtual host", which should make
# ERP5 produce URLs using https://virtual-host-name:1234/virtual_host_root
# as base.
virtual_host_url
=
urlparse
.
urljoin
(
base_url
,
'/VirtualHostBase/https/virtual-host-name:1234/{}/VirtualHostRoot/_vh_virtual_host_root/'
.
format
(
site_id
))
# What happens is that instanciation just create the services, but does not
# What happens is that instanciation just create the services, but does not
# wait for ERP5 to be initialized. When this test run ERP5 instance is
# wait for ERP5 to be initialized. When this test run ERP5 instance is
# instanciated, but zope is still busy creating the site and haproxy replies
# instanciated, but zope is still busy creating the site and haproxy replies
# with 503 Service Unavailable when zope is not started yet, with 404 when
# with 503 Service Unavailable when zope is not started yet, with 404 when
# erp5 site is not created, with 500 when mysql is not yet reachable, so we
# erp5 site is not created, with 500 when mysql is not yet reachable, so we
# retry in a loop until we get a succesful response.
# configure this requests session to retry.
for
i
in
range
(
1
,
60
):
# XXX we should probably add a promise instead
r
=
requests
.
get
(
url
,
verify
=
False
)
# XXX can we get CA from caucase already ?
session
=
requests
.
Session
()
if
r
.
status_code
!=
requests
.
codes
.
ok
:
session
.
mount
(
delay
=
i
*
2
base_url
,
self
.
logger
.
warn
(
"ERP5 was not available, sleeping for %ds and retrying"
,
delay
)
requests
.
adapters
.
HTTPAdapter
(
time
.
sleep
(
delay
)
max_retries
=
requests
.
packages
.
urllib3
.
util
.
retry
.
Retry
(
continue
total
=
60
,
r
.
raise_for_status
()
backoff_factor
=
.
5
,
break
status_forcelist
=
(
404
,
500
,
503
))))
r
=
session
.
get
(
virtual_host_url
,
verify
=
verify
,
allow_redirects
=
False
)
self
.
assertEqual
(
r
.
status_code
,
requests
.
codes
.
found
)
# access on / are redirected to login form, with virtual host preserved
self
.
assertEqual
(
r
.
headers
.
get
(
'location'
),
'https://virtual-host-name:1234/virtual_host_root/login_form'
)
# login page can be rendered and contain the text "ERP5"
r
=
session
.
get
(
urlparse
.
urljoin
(
base_url
,
'{}/login_form'
.
format
(
site_id
)),
verify
=
verify
,
allow_redirects
=
False
,
)
self
.
assertEqual
(
r
.
status_code
,
requests
.
codes
.
ok
)
self
.
assertIn
(
"ERP5"
,
r
.
text
)
self
.
assertIn
(
"ERP5"
,
r
.
text
)
def
test_published_family_default_v6_is_reachable
(
self
):
def
test_published_family_default_v6_is_reachable
(
self
):
...
@@ -67,14 +88,20 @@ class TestPublishedURLIsReachableMixin(object):
...
@@ -67,14 +88,20 @@ class TestPublishedURLIsReachableMixin(object):
"""
"""
param_dict
=
self
.
getRootPartitionConnectionParameterDict
()
param_dict
=
self
.
getRootPartitionConnectionParameterDict
()
self
.
_checkERP5IsReachable
(
self
.
_checkERP5IsReachable
(
urlparse
.
urljoin
(
param_dict
[
'family-default-v6'
],
param_dict
[
'site-id'
]))
param_dict
[
'family-default-v6'
],
param_dict
[
'site-id'
],
verify
=
False
,
)
def
test_published_family_default_v4_is_reachable
(
self
):
def
test_published_family_default_v4_is_reachable
(
self
):
"""Tests the IPv4 URL published by the root partition is reachable.
"""Tests the IPv4 URL published by the root partition is reachable.
"""
"""
param_dict
=
self
.
getRootPartitionConnectionParameterDict
()
param_dict
=
self
.
getRootPartitionConnectionParameterDict
()
self
.
_checkERP5IsReachable
(
self
.
_checkERP5IsReachable
(
urlparse
.
urljoin
(
param_dict
[
'family-default'
],
param_dict
[
'site-id'
]))
param_dict
[
'family-default'
],
param_dict
[
'site-id'
],
verify
=
False
,
)
class
TestDefaultParameters
(
ERP5InstanceTestCase
,
TestPublishedURLIsReachableMixin
):
class
TestDefaultParameters
(
ERP5InstanceTestCase
,
TestPublishedURLIsReachableMixin
):
...
@@ -93,7 +120,7 @@ class TestMedusa(ERP5InstanceTestCase, TestPublishedURLIsReachableMixin):
...
@@ -93,7 +120,7 @@ class TestMedusa(ERP5InstanceTestCase, TestPublishedURLIsReachableMixin):
return
{
'_'
:
json
.
dumps
({
'wsgi'
:
False
})}
return
{
'_'
:
json
.
dumps
({
'wsgi'
:
False
})}
class
Test
Apache
BalancerPorts
(
ERP5InstanceTestCase
):
class
TestBalancerPorts
(
ERP5InstanceTestCase
):
"""Instanciate with two zope families, this should create for each family:
"""Instanciate with two zope families, this should create for each family:
- a balancer entry point with corresponding haproxy
- a balancer entry point with corresponding haproxy
- a balancer entry point for test runner
- a balancer entry point for test runner
...
@@ -151,33 +178,22 @@ class TestApacheBalancerPorts(ERP5InstanceTestCase):
...
@@ -151,33 +178,22 @@ class TestApacheBalancerPorts(ERP5InstanceTestCase):
3
+
5
,
3
+
5
,
len
([
p
for
p
in
all_process_info
if
p
[
'name'
].
startswith
(
'zope-'
)]))
len
([
p
for
p
in
all_process_info
if
p
[
'name'
].
startswith
(
'zope-'
)]))
def
test_
apache
_listen
(
self
):
def
test_
haproxy
_listen
(
self
):
# We have 2 families,
apache
should listen to a total of 3 ports per family
# We have 2 families,
haproxy
should listen to a total of 3 ports per family
# normal access on ipv4 and ipv6 and test runner access on ipv4 only
# normal access on ipv4 and ipv6 and test runner access on ipv4 only
with
self
.
slap
.
instance_supervisor_rpc
as
supervisor
:
with
self
.
slap
.
instance_supervisor_rpc
as
supervisor
:
all_process_info
=
supervisor
.
getAllProcessInfo
()
all_process_info
=
supervisor
.
getAllProcessInfo
()
process_info
,
=
[
p
for
p
in
all_process_info
if
p
[
'name'
]
==
'apache'
]
process_info
,
=
[
p
for
p
in
all_process_info
if
p
[
'name'
].
startswith
(
'haproxy-'
)]
apache_process
=
psutil
.
Process
(
process_info
[
'pid'
])
haproxy_master_process
=
psutil
.
Process
(
process_info
[
'pid'
])
haproxy_worker_process
,
=
haproxy_master_process
.
children
()
self
.
assertEqual
(
self
.
assertEqual
(
sorted
([
socket
.
AF_INET
]
*
4
+
[
socket
.
AF_INET6
]
*
2
),
sorted
([
socket
.
AF_INET
]
*
4
+
[
socket
.
AF_INET6
]
*
2
),
sorted
([
sorted
([
c
.
family
c
.
family
for
c
in
apache
_process
.
connections
()
for
c
in
haproxy_worker
_process
.
connections
()
if
c
.
status
==
'LISTEN'
if
c
.
status
==
'LISTEN'
]))
]))
def
test_haproxy_listen
(
self
):
# There is one haproxy per family
with
self
.
slap
.
instance_supervisor_rpc
as
supervisor
:
all_process_info
=
supervisor
.
getAllProcessInfo
()
process_info
,
=
[
p
for
p
in
all_process_info
if
p
[
'name'
].
startswith
(
'haproxy-'
)
]
haproxy_process
=
psutil
.
Process
(
process_info
[
'pid'
])
self
.
assertEqual
([
socket
.
AF_INET
,
socket
.
AF_INET
],
[
c
.
family
for
c
in
haproxy_process
.
connections
()
if
c
.
status
==
'LISTEN'
])
class
TestDisableTestRunner
(
ERP5InstanceTestCase
,
TestPublishedURLIsReachableMixin
):
class
TestDisableTestRunner
(
ERP5InstanceTestCase
,
TestPublishedURLIsReachableMixin
):
"""Test ERP5 can be instanciated without test runner.
"""Test ERP5 can be instanciated without test runner.
...
@@ -199,20 +215,22 @@ class TestDisableTestRunner(ERP5InstanceTestCase, TestPublishedURLIsReachableMix
...
@@ -199,20 +215,22 @@ class TestDisableTestRunner(ERP5InstanceTestCase, TestPublishedURLIsReachableMix
self
.
assertNotIn
(
'runUnitTest'
,
bin_programs
)
self
.
assertNotIn
(
'runUnitTest'
,
bin_programs
)
self
.
assertNotIn
(
'runTestSuite'
,
bin_programs
)
self
.
assertNotIn
(
'runTestSuite'
,
bin_programs
)
def
test_no_
apache
_testrunner_port
(
self
):
def
test_no_
haproxy
_testrunner_port
(
self
):
#
Apache only listen on two ports, there is no apache
ports allocated for test runner
#
Haproxy only listen on two ports, there is no haproxy
ports allocated for test runner
with
self
.
slap
.
instance_supervisor_rpc
as
supervisor
:
with
self
.
slap
.
instance_supervisor_rpc
as
supervisor
:
all_process_info
=
supervisor
.
getAllProcessInfo
()
all_process_info
=
supervisor
.
getAllProcessInfo
()
process_info
,
=
[
p
for
p
in
all_process_info
if
p
[
'name'
]
==
'apache'
]
process_info
,
=
[
p
for
p
in
all_process_info
if
p
[
'name'
].
startswith
(
'haproxy'
)]
apache_process
=
psutil
.
Process
(
process_info
[
'pid'
])
haproxy_master_process
=
psutil
.
Process
(
process_info
[
'pid'
])
haproxy_worker_process
,
=
haproxy_master_process
.
children
()
self
.
assertEqual
(
self
.
assertEqual
(
sorted
([
socket
.
AF_INET
,
socket
.
AF_INET6
]),
sorted
([
socket
.
AF_INET
,
socket
.
AF_INET6
]),
sorted
(
sorted
(
c
.
family
c
.
family
for
c
in
apache
_process
.
connections
()
for
c
in
haproxy_worker
_process
.
connections
()
if
c
.
status
==
'LISTEN'
if
c
.
status
==
'LISTEN'
))
))
class
TestZopeNodeParameterOverride
(
ERP5InstanceTestCase
,
TestPublishedURLIsReachableMixin
):
class
TestZopeNodeParameterOverride
(
ERP5InstanceTestCase
,
TestPublishedURLIsReachableMixin
):
"""Test override zope node parameters
"""Test override zope node parameters
"""
"""
...
...
software/slapos-master/buildout.hash.cfg
View file @
bd3c2b18
...
@@ -23,3 +23,7 @@ md5sum = 4998e62351f54700ee23a2ca8cd89329
...
@@ -23,3 +23,7 @@ md5sum = 4998e62351f54700ee23a2ca8cd89329
[template-apache-backend-conf]
[template-apache-backend-conf]
filename = apache-backend.conf.in
filename = apache-backend.conf.in
md5sum = 9d7104ce18f79a7a84988efc11f5ed23
md5sum = 9d7104ce18f79a7a84988efc11f5ed23
[template-haproxy-cfg]
filename = haproxy.cfg.in
md5sum = fec6a312e4ef84b02837742992aaf495
software/slapos-master/haproxy.cfg.in
0 → 100644
View file @
bd3c2b18
{% set server_check_path = parameter_dict['server-check-path'] -%}
global
maxconn 4096
stats socket {{ parameter_dict['socket-path'] }} level admin
defaults
mode http
retries 1
option redispatch
maxconn 2000
cookie SERVERID rewrite
balance roundrobin
stats uri /haproxy
stats realm Global\ statistics
# it is useless to have timeout much bigger than the one of apache.
# By default apache use 300s, so we set slightly more in order to
# make sure that apache will first stop the connection.
timeout server 305s
# Stop waiting in queue for a zope to become available.
# If no zope can be reached after one minute, consider the request will
# never succeed.
timeout queue 60s
# The connection should be immediate on LAN,
# so we should not set more than 5 seconds, and it could be already too much
timeout connect 5s
# As requested in haproxy doc, make this "at least equal to timeout server".
timeout client 305s
# Use "option httpclose" to not preserve client & server persistent connections
# while handling every incoming request individually, dispatching them one after
# another to servers, in HTTP close mode. This is really needed when haproxy
# is configured with maxconn to 1, without this option browsers are unable
# to render a page
option httpclose
{% for name, (port, backend_list) in sorted(parameter_dict['backend-dict'].iteritems()) -%}
listen {{ name }}
bind {{ parameter_dict['ip'] }}:{{ port }}
http-request set-header X-Balancer-Current-Cookie SERVERID
{% set has_webdav = [] -%}
{% for address, connection_count, webdav in backend_list -%}
{% if webdav %}{% do has_webdav.append(None) %}{% endif -%}
{% set server_name = name ~ '-' ~ loop.index0 -%}
server {{ server_name }} {{ address }} cookie {{ server_name }} check inter 3s rise 1 fall 2 maxqueue 5 maxconn {{ connection_count }}
{% endfor -%}
{%- if not has_webdav and server_check_path %}
option httpchk GET {{ server_check_path }}
{% endif -%}
{% endfor %}
software/slapos-master/software.cfg
View file @
bd3c2b18
...
@@ -63,7 +63,9 @@ filename = instance-balancer.cfg.in
...
@@ -63,7 +63,9 @@ filename = instance-balancer.cfg.in
[template-apache-backend-conf]
[template-apache-backend-conf]
url = ${:_profile_base_location_}/${:filename}
url = ${:_profile_base_location_}/${:filename}
filename = apache-backend.conf.in
[template-haproxy-cfg]
url = ${:_profile_base_location_}/${:filename}
[versions]
[versions]
python-memcached = 1.47
python-memcached = 1.47
...
...
stack/erp5/buildout.cfg
View file @
bd3c2b18
...
@@ -11,6 +11,8 @@ extends =
...
@@ -11,6 +11,8 @@ extends =
../../component/gzip/buildout.cfg
../../component/gzip/buildout.cfg
../../component/xz-utils/buildout.cfg
../../component/xz-utils/buildout.cfg
../../component/haproxy/buildout.cfg
../../component/haproxy/buildout.cfg
../../component/socat/buildout.cfg
../../component/rsyslogd/buildout.cfg
../../component/findutils/buildout.cfg
../../component/findutils/buildout.cfg
../../component/librsvg/buildout.cfg
../../component/librsvg/buildout.cfg
../../component/imagemagick/buildout.cfg
../../component/imagemagick/buildout.cfg
...
@@ -180,6 +182,8 @@ context =
...
@@ -180,6 +182,8 @@ context =
key gzip_location gzip:location
key gzip_location gzip:location
key xz_utils_location xz-utils:location
key xz_utils_location xz-utils:location
key haproxy_location haproxy:location
key haproxy_location haproxy:location
key socat_location socat:location
key rsyslogd_location rsyslogd:location
key instance_common_cfg instance-common:rendered
key instance_common_cfg instance-common:rendered
key jsl_location jsl:location
key jsl_location jsl:location
key jupyter_enable_default erp5-defaults:jupyter-enable-default
key jupyter_enable_default erp5-defaults:jupyter-enable-default
...
@@ -209,6 +213,7 @@ context =
...
@@ -209,6 +213,7 @@ context =
key template_balancer template-balancer:target
key template_balancer template-balancer:target
key template_erp5 template-erp5:target
key template_erp5 template-erp5:target
key template_haproxy_cfg template-haproxy-cfg:target
key template_haproxy_cfg template-haproxy-cfg:target
key template_rsyslogd_cfg template-rsyslogd-cfg:target
key template_jupyter_cfg instance-jupyter-notebook:rendered
key template_jupyter_cfg instance-jupyter-notebook:rendered
key template_kumofs template-kumofs:target
key template_kumofs template-kumofs:target
key template_mariadb template-mariadb:target
key template_mariadb template-mariadb:target
...
@@ -274,6 +279,9 @@ fontconfig-includes =
...
@@ -274,6 +279,9 @@ fontconfig-includes =
[template-haproxy-cfg]
[template-haproxy-cfg]
<= download-base
<= download-base
[template-rsyslogd-cfg]
<= download-base
[erp5-bin]
[erp5-bin]
<= erp5
<= erp5
repository = https://lab.nexedi.com/nexedi/erp5-bin.git
repository = https://lab.nexedi.com/nexedi/erp5-bin.git
...
...
stack/erp5/buildout.hash.cfg
View file @
bd3c2b18
...
@@ -70,7 +70,7 @@ md5sum = cc19560b9400cecbd23064d55c501eec
...
@@ -70,7 +70,7 @@ md5sum = cc19560b9400cecbd23064d55c501eec
[template]
[template]
filename = instance.cfg.in
filename = instance.cfg.in
md5sum =
5c5250112b87a3937f939028f9594b85
md5sum =
2ccfd6e2eb803a0d5e23e36a5e6c50ad
[monitor-template-dummy]
[monitor-template-dummy]
filename = dummy.cfg
filename = dummy.cfg
...
@@ -90,8 +90,12 @@ md5sum = 2f3ddd328ac1c375e483ecb2ef5ffb57
...
@@ -90,8 +90,12 @@ md5sum = 2f3ddd328ac1c375e483ecb2ef5ffb57
[template-balancer]
[template-balancer]
filename = instance-balancer.cfg.in
filename = instance-balancer.cfg.in
md5sum = 4
ba93d28d93bd066d5d19f4f74fc13d7
md5sum = 4
a119083eab1eadbaf44468eb4f3381f
[template-haproxy-cfg]
[template-haproxy-cfg]
filename = haproxy.cfg.in
filename = haproxy.cfg.in
md5sum = fec6a312e4ef84b02837742992aaf495
md5sum = 8de18a61607bd66341a44b95640d293f
[template-rsyslogd-cfg]
filename = rsyslogd.cfg.in
md5sum = 7030e42b50e03f24e036b7785bd6159f
stack/erp5/haproxy.cfg.in
View file @
bd3c2b18
{# This file configures haproxy to redirect requests from ports to specific urls.
# It provides TLS support for server and optionnaly for client.
#
# All parameters are given through the `parameter_dict` variable, see the
# list entries :
#
# parameter_dict = {
# # Path of the PID file. HAProxy will write its own PID to this file
# # Sending USR2 signal to this pid will cause haproxy to reload
# # its configuration.
# "pidfile": "<file_path>",
#
# # AF_UNIX socket for logs. Syslog must be listening on this socket.
# "log-socket": "<file_path>",
#
# # AF_UNIX socket for statistics and control.
# # Haproxy will listen on this socket.
# "stats-socket": "<file_path>",
#
# # IPv4 to listen on
# # All backends from `backend-dict` will listen on this IP.
# "ipv4": "0.0.0.0",
#
# # IPv6 to listen on
# # All backends from `backend-dict` will listen on this IP.
# "ipv6": "::1",
#
# # Certificate and key in PEM format. All ports will serve TLS using
# # this certificate.
# "cert": "<file_path>",
#
# # CA to verify client certificates in PEM format.
# # If set, client certificates will be verified with these CAs.
# # If not set, client certificates are not verified.
# "ca-cert": "<file_path>",
#
# # An optional CRL in PEM format (the file can contain multiple CRL)
# # This is required if ca-cert is passed.
# "crl": "<file_path>",
#
# # Path to use for HTTP health check on backends from `backend-dict`.
# "server-check-path": "/",
#
# # The mapping of backends, keyed by family name
# "backend-dict": {
# "family-secure": {
# ( 8000, # port int
# 'https', # proto str
# True, # ssl_required bool
# [ # backends
# '10.0.0.10:8001', # netloc str
# 1, # max_connection_count int
# False, # is_web_dav bool
# ],
# ),
# },
# "family-default": {
# ( 8002, # port int
# 'https', # proto str
# False, # ssl_required bool
# [ # backends
# '10.0.0.10:8003', # netloc str
# 1, # max_connection_count int
# False, # is_web_dav bool
# ],
# ),
# },
#
# # The mapping of zope paths.
# # This is a Zope specific feature.
# # `enable_authentication` has same meaning as for `backend-list`.
# "zope-virtualhost-monster-backend-dict": {
# # {(ip, port): ( enable_authentication, {frontend_path: ( internal_url ) }, ) }
# ('[::1]', 8004): (
# True, {
# 'zope-1': 'http://10.0.0.10:8001',
# 'zope-2': 'http://10.0.0.10:8002',
# },
# ),
# },
# }
#
# This sample of `parameter_dict` will make haproxy listening to :
# From to `backend-list`:
# For "family-secure":
# - 0.0.0.0:8000 redirecting internaly to http://10.0.0.10:8001 and
# - [::1]:8000 redirecting internaly to http://10.0.0.10:8001
# only accepting requests from clients providing a verified TLS certificate
# emitted by a CA from `ca-cert` and not revoked in `crl`.
# For "family-default":
# - 0.0.0.0:8002 redirecting internaly to http://10.0.0.10:8003
# - [::1]:8002 redirecting internaly to http://10.0.0.10:8003
# accepting requests from any client.
#
# For both families, X-Forwarded-For header will be stripped unless
# client presents a certificate that can be verified with `ca-cert` and `crl`.
#
# From zope-virtualhost-monster-backend-dict`:
# - [::1]:8004 with some path based rewrite-rules redirecting to:
# * http://10.0.0.10/8001 when path matches /zope-1(.*)
# * http://10.0.0.10/8002 when path matches /zope-2(.*)
# with some VirtualHostMonster rewrite rules so zope writes URLs with
# [::1]:8004 as server name.
# For more details, refer to
# https://docs.zope.org/zope2/zope2book/VirtualHosting.html#using-virtualhostroot-and-virtualhostbase-together
-#}
{% set server_check_path = parameter_dict['server-check-path'] -%}
{% set server_check_path = parameter_dict['server-check-path'] -%}
global
global
maxconn 4096
maxconn 4096
stats socket {{ parameter_dict['socket-path'] }} level admin
master-worker
pidfile {{ parameter_dict['pidfile'] }}
# SSL configuration was generated with mozilla SSL Configuration Generator
# generated 2020-10-28, Mozilla Guideline v5.6, HAProxy 2.1, OpenSSL 1.1.1g, modern configuration
# https://ssl-config.mozilla.org/#server=haproxy&version=2.1&config=modern&openssl=1.1.1g&guideline=5.6
ssl-default-bind-ciphersuites TLS_AES_128_GCM_SHA256:TLS_AES_256_GCM_SHA384:TLS_CHACHA20_POLY1305_SHA256
ssl-default-bind-options prefer-client-ciphers no-sslv3 no-tlsv10 no-tlsv11 no-tlsv12 no-tls-tickets
ssl-default-server-ciphersuites TLS_AES_128_GCM_SHA256:TLS_AES_256_GCM_SHA384:TLS_CHACHA20_POLY1305_SHA256
ssl-default-server-options no-sslv3 no-tlsv10 no-tlsv11 no-tlsv12 no-tls-tickets
stats socket {{ parameter_dict['stats-socket'] }} level admin
defaults
defaults
mode http
mode http
retries 1
retries 1
option redispatch
option redispatch
maxconn 2000
maxconn 2000
cookie SERVERID rewrite
balance roundrobin
balance roundrobin
stats uri /haproxy
stats uri /haproxy
stats realm Global\ statistics
stats realm Global\ statistics
# it is useless to have timeout much bigger than the one of apache.
# By default apache use 300s, so we set slightly more in order to
timeout connect 10s
# make sure that apache will first stop the connection.
timeout server 305s
# Stop waiting in queue for a zope to become available.
# If no zope can be reached after one minute, consider the request will
# never succeed.
timeout queue 60s
timeout queue 60s
# The connection should be immediate on LAN,
timeout server 305s
# so we should not set more than 5 seconds, and it could be already too much
timeout connect 5s
# As requested in haproxy doc, make this "at least equal to timeout server".
timeout client 305s
timeout client 305s
# Use "option httpclose" to not preserve client & server persistent connections
# while handling every incoming request individually, dispatching them one after
option http-server-close
# another to servers, in HTTP close mode. This is really needed when haproxy
# is configured with maxconn to 1, without this option browsers are unable
# compress some content types
# to render a page
compression algo gzip
option httpclose
compression type application/font-woff application/font-woff2 application/hal+json application/javascript application/json application/rss+xml application/wasm application/x-font-opentype application/x-font-ttf application/x-javascript application/xml image/svg+xml text/cache-manifest text/css text/html text/javascript text/plain text/xml
{% for name, (port, backend_list) in sorted(parameter_dict['backend-dict'].iteritems()) -%}
log {{ parameter_dict['log-socket'] }} local0 info
listen {{ name }}
bind {{ parameter_dict['ip'] }}:{{ port }}
{% set bind_ssl_crt = 'ssl crt ' ~ parameter_dict['cert'] ~ ' alpn h2,http/1.1' %}
{% for name, (port, _, certificate_authentication, backend_list) in sorted(parameter_dict['backend-dict'].iteritems()) -%}
listen family_{{ name }}
{%- if parameter_dict.get('ca-cert') -%}
{%- set ssl_auth = ' ca-file ' ~ parameter_dict['ca-cert'] ~ ' verify' ~ ( ' required' if certificate_authentication else ' optional' ) ~ ' crl-file ' ~ parameter_dict['crl'] %}
{%- else %}
{%- set ssl_auth = '' %}
{%- endif %}
bind {{ parameter_dict['ipv4'] }}:{{ port }} {{ bind_ssl_crt }} {{ ssl_auth }}
bind {{ parameter_dict['ipv6'] }}:{{ port }} {{ bind_ssl_crt }} {{ ssl_auth }}
cookie SERVERID rewrite
http-request set-header X-Balancer-Current-Cookie SERVERID
http-request set-header X-Balancer-Current-Cookie SERVERID
# remove X-Forwarded-For unless client presented a verified certificate
acl client_cert_verified ssl_c_used ssl_c_verify 0
http-request del-header X-Forwarded-For unless client_cert_verified
# set Remote-User if client presented a verified certificate
http-request del-header Remote-User
http-request set-header Remote-User %{+Q}[ssl_c_s_dn(cn)] if client_cert_verified
# logs
capture request header Referer len 512
capture request header User-Agent len 512
log-format "%{+Q}o %{-Q}ci - - [%trg] %r %ST %B %{+Q}[capture.req.hdr(0)] %{+Q}[capture.req.hdr(1)] %Tt"
{% set has_webdav = [] -%}
{% set has_webdav = [] -%}
{% for address, connection_count, webdav in backend_list -%}
{% for address, connection_count, webdav in backend_list -%}
{% if webdav %}{% do has_webdav.append(None) %}{% endif -%}
{% if webdav %}{% do has_webdav.append(None) %}{% endif -%}
{% set server_name = name ~ '-' ~ loop.index0
-
%}
{% set server_name = name ~ '-' ~ loop.index0 %}
server {{ server_name }} {{ address }} cookie {{ server_name }} check inter 3s rise 1 fall 2 maxqueue 5 maxconn {{ connection_count }}
server {{ server_name }} {{ address }} cookie {{ server_name }} check inter 3s rise 1 fall 2 maxqueue 5 maxconn {{ connection_count }}
{%
endfor -%}
{%
-
endfor -%}
{%- if not has_webdav and server_check_path %}
{%- if not has_webdav and server_check_path %}
option httpchk GET {{ server_check_path }}
option httpchk GET {{ server_check_path }}
{% endif -%}
{%- endif %}
{% endfor %}
{% for (ip, port), (_, backend_dict) in sorted(parameter_dict['zope-virtualhost-monster-backend-dict'].iteritems()) -%}
{% set group_name = 'testrunner_' ~ loop.index0 %}
frontend frontend_{{ group_name }}
bind {{ ip }}:{{ port }} {{ bind_ssl_crt }}
timeout client 8h
# logs
capture request header Referer len 512
capture request header User-Agent len 512
log-format "%{+Q}o %{-Q}ci - - [%trg] %r %ST %B %{+Q}[capture.req.hdr(0)] %{+Q}[capture.req.hdr(1)] %Tt"
{% for name in sorted(backend_dict.keys()) %}
use_backend backend_{{ group_name }}_{{ name }} if { path -m beg /{{ name }} }
{%- endfor %}
{% for name, url in sorted(backend_dict.items()) %}
backend backend_{{ group_name }}_{{ name }}
http-request replace-path ^/{{ name }}(.*) /VirtualHostBase/https/{{ ip }}:{{ port }}/VirtualHostRoot/_vh_{{ name }}\1
timeout server 8h
server {{ name }} {{ urlparse.urlparse(url).netloc }}
{%- endfor %}
{% endfor %}
{% endfor %}
stack/erp5/instance-balancer.cfg.in
View file @
bd3c2b18
...
@@ -8,10 +8,8 @@ XXX: This template only supports exactly one IPv4 and (if ipv6 is used) one IPv6
...
@@ -8,10 +8,8 @@ XXX: This template only supports exactly one IPv4 and (if ipv6 is used) one IPv6
per partition. No more (undefined result), no less (IndexError).
per partition. No more (undefined result), no less (IndexError).
-#}
-#}
{% set ipv4 = (ipv4_set | list)[0] -%}
{% set ipv4 = (ipv4_set | list)[0] -%}
{% set apache_ip_list = [ipv4] -%}
{% if ipv6_set -%}
{% if ipv6_set -%}
{% set ipv6 = (ipv6_set | list)[0] -%}
{% set ipv6 = (ipv6_set | list)[0] -%}
{% do apache_ip_list.append('[' ~ ipv6 ~ ']') -%}
{% endif -%}
{% endif -%}
[jinja2-template-base]
[jinja2-template-base]
...
@@ -28,7 +26,7 @@ mode = 644
...
@@ -28,7 +26,7 @@ mode = 644
ca_path='${directory:srv}/caucase-updater/ca.crt',
ca_path='${directory:srv}/caucase-updater/ca.crt',
crl_path='${directory:srv}/caucase-updater/crl.pem',
crl_path='${directory:srv}/caucase-updater/crl.pem',
key_path='${apache-conf-ssl:caucase-key}',
key_path='${apache-conf-ssl:caucase-key}',
on_renew='${
apache-graceful
:output}',
on_renew='${
haproxy-reload
:output}',
max_sleep=ssl_parameter_dict.get('max-crl-update-delay', 1.0),
max_sleep=ssl_parameter_dict.get('max-crl-update-delay', 1.0),
template_csr_pem=ssl_parameter_dict.get('csr'),
template_csr_pem=ssl_parameter_dict.get('csr'),
openssl=parameter_dict['openssl'] ~ '/bin/openssl',
openssl=parameter_dict['openssl'] ~ '/bin/openssl',
...
@@ -42,7 +40,7 @@ mode = 644
...
@@ -42,7 +40,7 @@ mode = 644
{% for frontend_caucase_url in frontend_caucase_url_list -%}
{% for frontend_caucase_url in frontend_caucase_url_list -%}
{% set hash = hashlib.md5(frontend_caucase_url).hexdigest() -%}
{% set hash = hashlib.md5(frontend_caucase_url).hexdigest() -%}
{% do frontend_caucase_url_hash_list.append(hash) -%}
{% do frontend_caucase_url_hash_list.append(hash) -%}
{% set data_dir = '${directory:
srv}/client-cert-ca
/%s' % hash -%}
{% set data_dir = '${directory:
client-cert-ca}
/%s' % hash -%}
{{ caucase.updater(
{{ caucase.updater(
prefix='caucase-updater-%s' % hash,
prefix='caucase-updater-%s' % hash,
buildout_bin_directory=parameter_dict['bin-directory'],
buildout_bin_directory=parameter_dict['bin-directory'],
...
@@ -51,7 +49,7 @@ mode = 644
...
@@ -51,7 +49,7 @@ mode = 644
data_dir=data_dir,
data_dir=data_dir,
ca_path='%s/ca.crt' % data_dir,
ca_path='%s/ca.crt' % data_dir,
crl_path='%s/crl.pem' % data_dir,
crl_path='%s/crl.pem' % data_dir,
on_renew='${caucase-updater-housekeeper:output}
; ${apache-graceful:output}
',
on_renew='${caucase-updater-housekeeper:output}',
max_sleep=ssl_parameter_dict.get('max-crl-update-delay', 1.0),
max_sleep=ssl_parameter_dict.get('max-crl-update-delay', 1.0),
openssl=parameter_dict['openssl'] ~ '/bin/openssl',
openssl=parameter_dict['openssl'] ~ '/bin/openssl',
)}}
)}}
...
@@ -71,24 +69,47 @@ input =
...
@@ -71,24 +69,47 @@ input =
import subprocess
import subprocess
hash_list = {{ repr(frontend_caucase_url_hash_list) }}
hash_list = {{ repr(frontend_caucase_url_hash_list) }}
crt_list = ['%s.crt' % e for e in hash_list]
crt_list = ['%s.crt' % e for e in hash_list]
crl_list = ['%s.crl' % e for e in hash_list]
for path in glob.glob('${haproxy-conf-ssl:ca-cert-dir}/*.crt'):
for path in glob.glob('${apache-conf-ssl:ca-cert-dir}/*.crt'):
if os.path.basename(path) not in crt_list:
if os.path.basename(path) not in crt_list:
os.unlink(path)
os.unlink(path)
for path in glob.glob('${apache-conf-ssl:crl-dir}/*.crl'):
crl_list = ['%s.crl' % e for e in hash_list]
for path in glob.glob('${haproxy-conf-ssl:crl-dir}/*.crl'):
if os.path.basename(path) not in crl_list:
if os.path.basename(path) not in crl_list:
os.unlink(path)
os.unlink(path)
for hash in hash_list:
for hash in hash_list:
crt = '${directory:
srv}/client-cert-ca
/%s/ca.crt' % hash
crt = '${directory:
client-cert-ca}
/%s/ca.crt' % hash
crt_link = '${
apache
-conf-ssl:ca-cert-dir}/%s.crt' % hash
crt_link = '${
haproxy
-conf-ssl:ca-cert-dir}/%s.crt' % hash
crl = '${directory:
srv}/client-cert-ca
/%s/crl.pem' % hash
crl = '${directory:
client-cert-ca}
/%s/crl.pem' % hash
crl_link = '${
apache
-conf-ssl:crl-dir}/%s.crl' % hash
crl_link = '${
haproxy
-conf-ssl:crl-dir}/%s.crl' % hash
if os.path.isfile(crt) and not os.path.islink(crt_link):
if os.path.isfile(crt) and not os.path.islink(crt_link):
os.symlink(crt, crt_link)
os.symlink(crt, crt_link)
if os.path.isfile(crl) and not os.path.islink(crl_link):
if os.path.isfile(crl) and not os.path.islink(crl_link):
os.symlink(crl, crl_link)
os.symlink(crl, crl_link)
subprocess.check_call(['{{ parameter_dict["openssl"] }}/bin/c_rehash', '${apache-conf-ssl:ca-cert-dir}'])
subprocess.check_call(['{{ parameter_dict["openssl"] }}/bin/c_rehash', '${haproxy-conf-ssl:ca-cert-dir}'])
subprocess.check_call(['{{ parameter_dict["openssl"] }}/bin/c_rehash', '${apache-conf-ssl:crl-dir}'])
subprocess.check_call(['{{ parameter_dict["openssl"] }}/bin/c_rehash', '${haproxy-conf-ssl:crl-dir}'])
# assemble all CA and all CRLs in one file for haproxy
with open('${haproxy-conf-ssl:ca-cert}.tmp', 'w') as f:
for path in glob.glob('${haproxy-conf-ssl:ca-cert-dir}/*.crt'):
with open(path) as in_f:
f.write('#{}\n'.format(path))
f.write(in_f.read() + '\n')
with open('${haproxy-conf-ssl:crl}.tmp', 'w') as f:
for path in glob.glob('${haproxy-conf-ssl:crl-dir}/*.crl'):
with open(path) as in_f:
f.write('#{}\n'.format(path))
f.write(in_f.read() + '\n')
if os.path.exists('${haproxy-conf-ssl:ca-cert}'):
os.unlink('${haproxy-conf-ssl:ca-cert}')
if os.path.exists('${haproxy-conf-ssl:crl}'):
os.unlink('${haproxy-conf-ssl:crl}')
os.rename('${haproxy-conf-ssl:ca-cert}.tmp', '${haproxy-conf-ssl:ca-cert}')
os.rename('${haproxy-conf-ssl:crl}.tmp', '${haproxy-conf-ssl:crl}')
subprocess.check_call(['${haproxy-reload:output}'])
[caucase-updater-housekeeper-run]
[caucase-updater-housekeeper-run]
recipe = plone.recipe.command
recipe = plone.recipe.command
...
@@ -97,9 +118,8 @@ update-command = ${:command}
...
@@ -97,9 +118,8 @@ update-command = ${:command}
{% endif -%}
{% endif -%}
{% set haproxy_dict = {} -%}
{% set haproxy_dict = {} -%}
{% set apache_dict = {} -%}
{% set zope_virtualhost_monster_backend_dict = {} %}
{% set zope_virtualhost_monster_backend_dict = {} %}
{% set test_runner_url_dict = {} %} {# family_name => list of
apache
URLs #}
{% set test_runner_url_dict = {} %} {# family_name => list of URLs #}
{% set next_port = itertools.count(slapparameter_dict['tcpv4-port']).next -%}
{% set next_port = itertools.count(slapparameter_dict['tcpv4-port']).next -%}
{% for family_name, parameter_id_list in sorted(
{% for family_name, parameter_id_list in sorted(
slapparameter_dict['zope-family-dict'].iteritems()) -%}
slapparameter_dict['zope-family-dict'].iteritems()) -%}
...
@@ -120,19 +140,19 @@ update-command = ${:command}
...
@@ -120,19 +140,19 @@ update-command = ${:command}
{% set test_runner_address_list = slapparameter_dict.get(parameter_id ~ '-test-runner-address-list', []) %}
{% set test_runner_address_list = slapparameter_dict.get(parameter_id ~ '-test-runner-address-list', []) %}
{% if test_runner_address_list -%}
{% if test_runner_address_list -%}
{% set test_runner_backend_mapping = {} %}
{% set test_runner_backend_mapping = {} %}
{% set test_runner_
apache
_url_list = [] %}
{% set test_runner_
balancer
_url_list = [] %}
{% set test_runner_external_port = next_port() %}
{% set test_runner_external_port = next_port() %}
{% for i, (test_runner_internal_ip, test_runner_internal_port) in enumerate(test_runner_address_list) %}
{% for i, (test_runner_internal_ip, test_runner_internal_port) in enumerate(test_runner_address_list) %}
{% do test_runner_backend_mapping.__setitem__(
{% do test_runner_backend_mapping.__setitem__(
'unit_test_' ~ i,
'unit_test_' ~ i,
'http://' ~ test_runner_internal_ip ~ ':' ~ test_runner_internal_port ) %}
'http://' ~ test_runner_internal_ip ~ ':' ~ test_runner_internal_port ) %}
{% do test_runner_
apache
_url_list.append(
{% do test_runner_
balancer
_url_list.append(
'https://' ~ ipv4 ~ ':' ~ test_runner_external_port ~ '/unit_test_' ~ i ~ '/' ) %}
'https://' ~ ipv4 ~ ':' ~ test_runner_external_port ~ '/unit_test_' ~ i ~ '/' ) %}
{% endfor %}
{% endfor %}
{% do zope_virtualhost_monster_backend_dict.__setitem__(
{% do zope_virtualhost_monster_backend_dict.__setitem__(
(ipv4, test_runner_external_port),
(ipv4, test_runner_external_port),
( ssl_authentication, test_runner_backend_mapping ) ) -%}
( ssl_authentication, test_runner_backend_mapping ) ) -%}
{% do test_runner_url_dict.__setitem__(family_name, test_runner_
apache
_url_list) -%}
{% do test_runner_url_dict.__setitem__(family_name, test_runner_
balancer
_url_list) -%}
{% endif -%}
{% endif -%}
{% endfor -%}
{% endfor -%}
...
@@ -143,50 +163,69 @@ update-command = ${:command}
...
@@ -143,50 +163,69 @@ update-command = ${:command}
# do a no-op getitem.
# do a no-op getitem.
-#}
-#}
{% do zope_family_address_list[0][0] -%}
{% do zope_family_address_list[0][0] -%}
{#
# We use to have haproxy then apache, now haproxy is playing apache's role
# To keep port stable, we consume one port so that haproxy use the same port
# that apache was using before.
-#}
{% set _ = next_port() -%}
{% set haproxy_port = next_port() -%}
{% set haproxy_port = next_port() -%}
{% set backend_path = slapparameter_dict['backend-path-dict'][family_name] -%}
{% set backend_path = slapparameter_dict['backend-path-dict'][family_name] -%}
{% do haproxy_dict.__setitem__(family_name, (haproxy_port, zope_family_address_list)) -%}
{% if has_webdav -%}
{% if has_webdav -%}
{% set internal_scheme = 'http' -%}{# mod_rewrite does not recognise webdav scheme -#}
{% set external_scheme = 'webdavs' -%}
{% set external_scheme = 'webdavs' -%}
{% else %}
{% else %}
{% set internal_scheme = 'http' -%}
{% set external_scheme = 'https' -%}
{% set external_scheme = 'https' -%}
{% endif -%}
{% endif -%}
{% do
apache_dict.__setitem__(family_name, (next_port(), external_scheme, internal_scheme ~ '://' ~ ipv4 ~ ':' ~ haproxy_port ~ backend_path, slapparameter_dict['ssl-authentication-dict'].get(family_name, False)
)) -%}
{% do
haproxy_dict.__setitem__(family_name, (haproxy_port, external_scheme, slapparameter_dict['ssl-authentication-dict'].get(family_name, False), zope_family_address_list
)) -%}
{% endfor -%}
{% endfor -%}
[haproxy-cfg-parameter-dict]
[haproxy-cfg-parameter-dict]
socket-path = ${directory:run}/haproxy.sock
ipv4 = {{ ipv4 }}
ipv6 = {{ ipv6 }}
cert = ${haproxy-conf-ssl:certificate}
{% if frontend_caucase_url_list -%}
ca-cert = ${haproxy-conf-ssl:ca-cert}
crl = ${haproxy-conf-ssl:crl}
{% endif %}
stats-socket = ${directory:run}/haproxy.sock
pidfile = ${directory:run}/haproxy.pid
log-socket = ${rsyslogd-cfg-parameter-dict:log-socket}
server-check-path = {{ dumps(slapparameter_dict['haproxy-server-check-path']) }}
server-check-path = {{ dumps(slapparameter_dict['haproxy-server-check-path']) }}
backend-dict = {{ dumps(haproxy_dict) }}
backend-dict = {{ dumps(haproxy_dict) }}
ip = {{ ipv4 }}
zope-virtualhost-monster-backend-dict = {{ dumps(zope_virtualhost_monster_backend_dict) }}
[haproxy-cfg]
[haproxy-cfg]
< = jinja2-template-base
< = jinja2-template-base
template = {{ parameter_dict['template-haproxy-cfg'] }}
template = {{ parameter_dict['template-haproxy-cfg'] }}
rendered = ${directory:etc}/haproxy.cfg
rendered = ${directory:etc}/haproxy.cfg
context = section parameter_dict haproxy-cfg-parameter-dict
context =
section parameter_dict haproxy-cfg-parameter-dict
import urlparse urlparse
extensions = jinja2.ext.do
extensions = jinja2.ext.do
[haproxy-reload]
recipe = collective.recipe.template
output = ${directory:bin}/${:_buildout_section_name_}
mode = 700
input =
inline:
#!/bin/sh
kill -USR2 $(cat "${haproxy-cfg-parameter-dict:pidfile}")
[{{ section('haproxy') }}]
[{{ section('haproxy') }}]
recipe = slapos.cookbook:wrapper
recipe = slapos.cookbook:wrapper
wrapper-path = ${directory:services}/haproxy
wrapper-path = ${directory:services
-on-watch
}/haproxy
command-line = "{{ parameter_dict['haproxy'] }}/sbin/haproxy" -f "${haproxy-cfg:rendered}"
command-line = "{{ parameter_dict['haproxy'] }}/sbin/haproxy" -f "${haproxy-cfg:rendered}"
hash-files = ${haproxy-cfg:rendered}
hash-files = ${haproxy-cfg:rendered}
[apache-conf-ssl]
[apache-conf-ssl]
cert = ${directory:apache-conf}/apache.crt
key = ${directory:apache-conf}/apache.pem
# XXX caucase is/was buggy and this certificate does not match key for instances
# XXX caucase is/was buggy and this certificate does not match key for instances
# that were updated, so don't use it yet.
# that were updated, so don't use it yet.
caucase-cert = ${directory:apache-conf}/apache-caucase.crt
caucase-cert = ${directory:apache-conf}/apache-caucase.crt
caucase-key = ${directory:apache-conf}/apache-caucase.pem
caucase-key = ${directory:apache-conf}/apache-caucase.pem
{% if frontend_caucase_url_list -%}
depends = ${caucase-updater-housekeeper-run:recipe}
ca-cert-dir = ${directory:apache-ca-cert-dir}
crl-dir = ${directory:apache-crl-dir}
{%- endif %}
[simplefile]
[simplefile]
< = jinja2-template-base
< = jinja2-template-base
...
@@ -204,95 +243,89 @@ context = key content {{content_section_name}}:content
...
@@ -204,95 +243,89 @@ context = key content {{content_section_name}}:content
mode = {{ mode }}
mode = {{ mode }}
{%- endmacro %}
{%- endmacro %}
[apache-ssl]
[{{ section('haproxy-socat-stats')}}]
{% if ssl_parameter_dict.get('key') -%}
recipe = slapos.cookbook:wrapper
key = ${apache-ssl-key:rendered}
wrapper-path = ${directory:bin}/${:_buildout_section_name_}
cert = ${apache-ssl-cert:rendered}
command-line = "{{ parameter_dict['socat'] }}/bin/socat" unix-connect:${haproxy-cfg-parameter-dict:stats-socket} stdio
{{ simplefile('apache-ssl-key', '${apache-conf-ssl:key}', ssl_parameter_dict['key']) }}
{{ simplefile('apache-ssl-cert', '${apache-conf-ssl:cert}', ssl_parameter_dict['cert']) }}
[rsyslogd-cfg-parameter-dict]
{% else %}
log-socket = ${directory:run}/log.sock
recipe = plone.recipe.command
access-log-file = ${directory:log}/apache-access.log
command = "{{ parameter_dict['openssl'] }}/bin/openssl" req -newkey rsa -batch -new -x509 -days 3650 -nodes -keyout "${:key}" -out "${:cert}"
error-log-file = ${directory:log}/apache-error.log
key = ${apache-conf-ssl:key}
pid-file = ${directory:run}/rsyslogd.pid
cert = ${apache-conf-ssl:cert}
spool-directory = ${directory:rsyslogd-spool}
{%- endif %}
[rsyslogd-cfg]
<= jinja2-template-base
template = {{ parameter_dict['template-rsyslogd-cfg'] }}
rendered = ${directory:etc}/rsyslogd.conf
context = section parameter_dict rsyslogd-cfg-parameter-dict
[{{ section ('rsyslogd') }}]
recipe = slapos.cookbook:wrapper
command-line = {{ parameter_dict['rsyslogd'] }}/sbin/rsyslogd -i ${rsyslogd-cfg-parameter-dict:pid-file} -n -f ${rsyslogd-cfg:rendered}
wrapper-path = ${directory:services-on-watch}/rsyslogd
hash-existing-files = ${buildout:directory}/software_release/buildout.cfg
hash-files = ${rsyslogd-cfg:rendered}
[apache-conf-parameter-dict]
[{{ section ('rsyslogd-listen-promise') }}]
backend-list = {{ dumps(apache_dict.values()) }}
<= monitor-promise-base
zope-virtualhost-monster-backend-dict = {{ dumps(zope_virtualhost_monster_backend_dict) }}
module = check_command_execute
ip-list = {{ dumps(apache_ip_list) }}
name = rsyslogd_listen_promise.py
pid-file = ${directory:run}/apache.pid
config-command = test -S ${rsyslogd-cfg-parameter-dict:log-socket}
log-dir = ${directory:log}
error-log = ${directory:log}/apache-error.log
access-log = ${directory:log}/apache-access.log
# Apache 2.4's default value (60 seconds) can be a bit too short
timeout = 300
# Basic SSL server configuration
cert = ${apache-ssl:cert}
key = ${apache-ssl:key}
cipher =
ssl-session-cache = ${directory:log}/apache-ssl-session-cache
{% if frontend_caucase_url_list -%}
# Client x509 auth
ca-cert-dir = ${apache-conf-ssl:ca-cert-dir}
crl-dir = ${apache-conf-ssl:crl-dir}
{%- endif %}
[apache-conf]
< = jinja2-template-base
template = {{ parameter_dict['template-apache-conf'] }}
rendered = ${directory:apache-conf}/apache.conf
context = section parameter_dict apache-conf-parameter-dict
[{{ section('apache') }}]
[haproxy-conf-ssl]
recipe = slapos.cookbook:wrapper
certificate = ${build-certificate-and-key:certificate-and-key}
wrapper-path = ${directory:services}/apache
{% if frontend_caucase_url_list -%}
command-line = "{{ parameter_dict['apache'] }}/bin/httpd" -f "${apache-conf:rendered}" -DFOREGROUND
ca-cert = ${directory:etc}/frontend-ca.pem
wait-for-files =
ca-cert-dir = ${directory:ca-cert}
${apache-conf-ssl:cert}
crl = ${directory:etc}/frontend-crl.pem
${apache-conf-ssl:key}
crl-dir = ${directory:crl}
depends = ${caucase-updater-housekeeper-run:recipe}
{%- endif %}
[apache-graceful]
[build-certificate-and-key]
recipe = collective.recipe.template
{% if ssl_parameter_dict.get('key') -%}
output = ${directory:bin}/apache-httpd-graceful
certificate-and-key = ${tls-certificate-and-key-from-parameters:rendered}
mode = 700
{{ simplefile(
input = inline:
'tls-certificate-and-key-from-parameters',
#!/bin/sh
'${directory:etc}/certificate-and-key-from-parameters.pem',
kill -USR1 "$(cat '${apache-conf-parameter-dict:pid-file}')"
ssl_parameter_dict['cert'] ~ "\n" ~ ssl_parameter_dict['key']) }}
{% else %}
recipe = plone.recipe.command
command = "{{ parameter_dict['openssl'] }}/bin/openssl" req -newkey rsa -batch -new -x509 -days 3650 -nodes -keyout "${:certificate-and-key}" -out "${:certificate-and-key}"
certificate-and-key = ${directory:etc}/certificate-and-key-generated.pem
{%- endif %}
[{{ section('
apache
-promise') }}]
[{{ section('
haproxy
-promise') }}]
<= monitor-promise-base
<= monitor-promise-base
# Check any
apache
port in ipv4, expect other ports and ipv6 to behave consistently
# Check any
haproxy
port in ipv4, expect other ports and ipv6 to behave consistently
module = check_port_listening
module = check_port_listening
name =
apache
.py
name =
haproxy
.py
config-hostname = {{ ipv4 }}
config-hostname = {{ ipv4 }}
config-port = {{
apache
_dict.values()[0][0] }}
config-port = {{
haproxy
_dict.values()[0][0] }}
[{{ section('publish') }}]
[{{ section('publish') }}]
recipe = slapos.cookbook:publish.serialised
recipe = slapos.cookbook:publish.serialised
{% for family_name, (
apache_port, scheme, _, _) in apache
_dict.items() -%}
{% for family_name, (
port, scheme, _, _) in haproxy
_dict.items() -%}
{{ family_name ~ '-v6' }} = {% if ipv6_set %}{{ scheme ~ '://[' ~ ipv6 ~ ']:' ~
apache_
port }}{% endif %}
{{ family_name ~ '-v6' }} = {% if ipv6_set %}{{ scheme ~ '://[' ~ ipv6 ~ ']:' ~ port }}{% endif %}
{{ family_name }} = {{ scheme ~ '://' ~ ipv4 ~ ':' ~
apache_
port }}
{{ family_name }} = {{ scheme ~ '://' ~ ipv4 ~ ':' ~ port }}
{% endfor -%}
{% endfor -%}
{% for family_name, test_runner_url_list in test_runner_url_dict.items() -%}
{% for family_name, test_runner_url_list in test_runner_url_dict.items() -%}
{{ family_name ~ '-test-runner-url-list' }} = {{ dumps(test_runner_url_list) }}
{{ family_name ~ '-test-runner-url-list' }} = {{ dumps(test_runner_url_list) }}
{% endfor -%}
{% endfor -%}
monitor-base-url = ${monitor-publish-parameters:monitor-base-url}
monitor-base-url = ${monitor-publish-parameters:monitor-base-url}
[{{ section('logrotate-
apache
') }}]
[{{ section('logrotate-
rsyslogd
') }}]
< = logrotate-entry-base
< = logrotate-entry-base
name = apache
name = rsyslogd
log = ${apache-conf-parameter-dict:error-log} ${apache-conf-parameter-dict:access-log}
log = ${rsyslogd-cfg-parameter-dict:access-log-file} ${rsyslogd-cfg-parameter-dict:error-log-file}
post = test ! -s ${apache-conf-parameter-dict:pid-file} || {{ parameter_dict['bin-directory'] }}/slapos-kill --pidfile ${apache-conf-parameter-dict:pid-file} -s USR1
post = test ! -s ${rsyslogd-cfg-parameter-dict:pid-file} || kill -HUP $(cat ${rsyslogd-cfg-parameter-dict:pid-file})
[directory]
[directory]
recipe = slapos.cookbook:mkdirectory
recipe = slapos.cookbook:mkdirectory
apache-conf = ${:etc}/apache
{% if frontend_caucase_url_list -%}
apache-ca-cert-dir = ${:apache-conf}/ssl.crt
apache-crl-dir = ${:apache-conf}/ssl.crl
{% endif -%}
bin = ${buildout:directory}/bin
bin = ${buildout:directory}/bin
etc = ${buildout:directory}/etc
etc = ${buildout:directory}/etc
services = ${:etc}/run
services = ${:etc}/run
...
@@ -302,6 +335,12 @@ run = ${:var}/run
...
@@ -302,6 +335,12 @@ run = ${:var}/run
log = ${:var}/log
log = ${:var}/log
srv = ${buildout:directory}/srv
srv = ${buildout:directory}/srv
apachedex = ${monitor-directory:private}/apachedex
apachedex = ${monitor-directory:private}/apachedex
rsyslogd-spool = ${:run}/rsyslogd-spool
{% if frontend_caucase_url_list -%}
ca-cert = ${:etc}/ssl.crt
crl = ${:etc}/ssl.crl
client-cert-ca = ${:srv}/client-cert-ca
{% endif -%}
[{{ section('resiliency-exclude-file') }}]
[{{ section('resiliency-exclude-file') }}]
# Generate rdiff exclude file in case of resiliency
# Generate rdiff exclude file in case of resiliency
...
@@ -325,9 +364,7 @@ command-line = "{{ parameter_dict['run-apachedex-location'] }}" "{{ parameter_di
...
@@ -325,9 +364,7 @@ command-line = "{{ parameter_dict['run-apachedex-location'] }}" "{{ parameter_di
command = generate-apachedex-report
command = generate-apachedex-report
[apachedex-parameters]
[apachedex-parameters]
# XXX - Sample log file with curent date: apache_access.log-%(date)s.gz
apache-log-list = ${rsyslogd-cfg-parameter-dict:access-log-file}
# which will be equivalent to apache_access.log-20150112.gz if the date is 2015-01-12
apache-log-list = ${apache-conf-parameter-dict:access-log}
configuration = {{ slapparameter_dict['apachedex-configuration'] }}
configuration = {{ slapparameter_dict['apachedex-configuration'] }}
promise-threshold = {{ slapparameter_dict['apachedex-promise-threshold'] }}
promise-threshold = {{ slapparameter_dict['apachedex-promise-threshold'] }}
...
...
stack/erp5/instance.cfg.in
View file @
bd3c2b18
...
@@ -56,13 +56,17 @@ openssl-location = {{ openssl_location }}
...
@@ -56,13 +56,17 @@ openssl-location = {{ openssl_location }}
[dynamic-template-balancer-parameters]
[dynamic-template-balancer-parameters]
<= default-dynamic-template-parameters
<= default-dynamic-template-parameters
apache = {{ apache_location }}
openssl = {{ openssl_location }}
openssl = {{ openssl_location }}
haproxy = {{ haproxy_location }}
haproxy = {{ haproxy_location }}
rsyslogd = {{ rsyslogd_location }}
socat = {{ socat_location }}
apachedex-location = {{ bin_directory }}/apachedex
apachedex-location = {{ bin_directory }}/apachedex
run-apachedex-location = {{ bin_directory }}/runApacheDex
run-apachedex-location = {{ bin_directory }}/runApacheDex
promise-check-apachedex-result = {{ bin_directory }}/check-apachedex-result
promise-check-apachedex-result = {{ bin_directory }}/check-apachedex-result
template-haproxy-cfg = {{ template_haproxy_cfg }}
template-haproxy-cfg = {{ template_haproxy_cfg }}
template-rsyslogd-cfg = {{ template_rsyslogd_cfg }}
# XXX: only used in software/slapos-master:
apache = {{ apache_location }}
template-apache-conf = {{ template_apache_conf }}
template-apache-conf = {{ template_apache_conf }}
[dynamic-template-balancer]
[dynamic-template-balancer]
...
...
stack/erp5/rsyslogd.cfg.in
0 → 100644
View file @
bd3c2b18
module(
load="imuxsock"
SysSock.Name="{{ parameter_dict['log-socket'] }}")
# Just simply output the raw line without any additional information, as
# haproxy emits enough information by itself
# Also cut out first empty space in msg, which is related to rsyslogd
# internal and end up cutting on 8k, as it's default of $MaxMessageSize
template(name="rawoutput" type="string" string="%msg:2:8192%\n")
$ActionFileDefaultTemplate rawoutput
$FileCreateMode 0600
$DirCreateMode 0700
$Umask 0022
$WorkDirectory {{ parameter_dict['spool-directory'] }}
local0.=info {{ parameter_dict['access-log-file'] }}
local0.warning {{ parameter_dict['error-log-file'] }}
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment