Commit 497f3ac1 authored by Łukasz Nowak's avatar Łukasz Nowak

Update Release Candidate

parents 1a8f6081 3501098b
Pipeline #16080 failed with stage
in 0 seconds
...@@ -23,16 +23,15 @@ recipe = collective.recipe.grp ...@@ -23,16 +23,15 @@ recipe = collective.recipe.grp
[proftpd] [proftpd]
recipe = slapos.recipe.cmmi recipe = slapos.recipe.cmmi
md5sum = 4040f6a6b86173e2a03f4ccdb9b9af6e md5sum = 4a9b8877b2e9b08d70e71ad56c19e2c9
url = ftp://ftp.proftpd.org/distrib/source/proftpd-1.3.6b.tar.gz url = ftp://ftp.proftpd.org/distrib/source/proftpd-1.3.7a.tar.gz
configure-options = configure-options =
--enable-openssl --enable-openssl
--enable-nls --enable-nls
--enable-ctrls --enable-ctrls
--enable-dso --enable-dso
--disable-cap --disable-cap
--with-modules=mod_sftp:mod_ban --with-modules=mod_sftp:mod_ban:mod_rewrite
--prefix=${buildout:parts-directory}/${:_buildout_section_name_}
environment = environment =
CFLAGS=-DPR_RUN_DIR=\"/proc/self/cwd/var\" CFLAGS=-DPR_RUN_DIR=\"/proc/self/cwd/var\"
CPPFLAGS=-I${zlib:location}/include -I${openssl:location}/include CPPFLAGS=-I${zlib:location}/include -I${openssl:location}/include
...@@ -47,11 +46,8 @@ patches = ...@@ -47,11 +46,8 @@ patches =
# mod_auth_web: a proftpd module to authenticate users against an HTTP service # mod_auth_web: a proftpd module to authenticate users against an HTTP service
[proftpd-mod_auth_web-repository] [proftpd-mod_auth_web-repository]
recipe = slapos.recipe.build:gitclone recipe = slapos.recipe.build:gitclone
#repository = https://github.com/proftpd/mod_auth_web repository = https://github.com/proftpd/mod_auth_web
# XXX until https://github.com/proftpd/mod_auth_web/pull/1 gets merged, we use revision = e36105808b7d07d843b11f428a666a8f3cec35e4
# a copy of this repository on nexedi gitlab
repository = https://lab.nexedi.com/jerome/mod_auth_web
revision = dec090bd0e287544a34be156ee17f715bd4286f9
git-executable = ${git:location}/bin/git git-executable = ${git:location}/bin/git
[proftpd-mod_auth_web] [proftpd-mod_auth_web]
......
...@@ -23,12 +23,12 @@ min_version = 8 ...@@ -23,12 +23,12 @@ min_version = 8
[trafficserver] [trafficserver]
recipe = slapos.recipe.cmmi recipe = slapos.recipe.cmmi
url = http://apache.claz.org/trafficserver/trafficserver-8.1.1.tar.bz2 url = http://apache.claz.org/trafficserver/trafficserver-9.0.1.tar.bz2
md5sum = 4f4d1e7de19c77157be0c2a825b31026 md5sum = 98bb2de25f332715339ade87530a9f5a
shared = true shared = true
patch-options = -p1 patch-options = -p1
patches = patches =
https://github.com/apache/trafficserver/commit/254e9e22181ca369673407bd3fcd93e7287275ac.patch#1c324f76464d33d334a15a69490ccd55 https://patch-diff.githubusercontent.com/raw/apache/trafficserver/pull/7577.patch
configure-options = configure-options =
--with-openssl=${openssl:location} --with-openssl=${openssl:location}
--with-pcre=${pcre:location} --with-pcre=${pcre:location}
......
...@@ -22,7 +22,7 @@ md5sum = 5784bea3bd608913769ff9a8afcccb68 ...@@ -22,7 +22,7 @@ md5sum = 5784bea3bd608913769ff9a8afcccb68
[profile-caddy-frontend] [profile-caddy-frontend]
filename = instance-apache-frontend.cfg.in filename = instance-apache-frontend.cfg.in
md5sum = dd6542a91746e6e3720eaa1590792dbe md5sum = 28220d18308313d49a38d39c61a7e769
[profile-caddy-replicate] [profile-caddy-replicate]
filename = instance-apache-replicate.cfg.in filename = instance-apache-replicate.cfg.in
...@@ -50,7 +50,7 @@ md5sum = 13cd08d630cc51666a9f7e469fb6ea52 ...@@ -50,7 +50,7 @@ md5sum = 13cd08d630cc51666a9f7e469fb6ea52
[template-backend-haproxy-configuration] [template-backend-haproxy-configuration]
_update_hash_filename_ = templates/backend-haproxy.cfg.in _update_hash_filename_ = templates/backend-haproxy.cfg.in
md5sum = 17f9582671327d8e4321a7fd1cdcb0fe md5sum = 5e126be0f74d8ae390a5594e1e912a59
[template-empty] [template-empty]
_update_hash_filename_ = templates/empty.in _update_hash_filename_ = templates/empty.in
...@@ -62,7 +62,7 @@ md5sum = 975177dedf677d24e14cede5d13187ce ...@@ -62,7 +62,7 @@ md5sum = 975177dedf677d24e14cede5d13187ce
[template-trafficserver-records-config] [template-trafficserver-records-config]
_update_hash_filename_ = templates/trafficserver/records.config.jinja2 _update_hash_filename_ = templates/trafficserver/records.config.jinja2
md5sum = ab0c9ea9bfe7a63a267331d9e010d0c6 md5sum = 88a2db868720009f6092843784b06611
[template-trafficserver-storage-config] [template-trafficserver-storage-config]
_update_hash_filename_ = templates/trafficserver/storage.config.jinja2 _update_hash_filename_ = templates/trafficserver/storage.config.jinja2
...@@ -70,7 +70,7 @@ md5sum = d022455a8610bac2dd51101edb035987 ...@@ -70,7 +70,7 @@ md5sum = d022455a8610bac2dd51101edb035987
[template-trafficserver-logging-yaml] [template-trafficserver-logging-yaml]
_update_hash_filename_ = templates/trafficserver/logging.yaml.jinja2 _update_hash_filename_ = templates/trafficserver/logging.yaml.jinja2
md5sum = 45f379e887de07d2b86de2f43937f856 md5sum = 368b271215a92594ca9e2fa3102d484f
[template-nginx-eventsource-slave-virtualhost] [template-nginx-eventsource-slave-virtualhost]
_update_hash_filename_ = templates/nginx-eventsource-slave.conf.in _update_hash_filename_ = templates/nginx-eventsource-slave.conf.in
......
...@@ -466,7 +466,9 @@ target = ${trafficserver-directory:configuration} ...@@ -466,7 +466,9 @@ target = ${trafficserver-directory:configuration}
recipe = slapos.cookbook:wrapper recipe = slapos.cookbook:wrapper
command-line = {{ software_parameter_dict['trafficserver'] }}/bin/traffic_manager command-line = {{ software_parameter_dict['trafficserver'] }}/bin/traffic_manager
wrapper-path = ${trafficserver-variable:wrapper-path} wrapper-path = ${trafficserver-variable:wrapper-path}
environment = TS_ROOT=${buildout:directory} environment =
TS_ROOT=${buildout:directory}
PROXY_CONFIG_CONFIG_DIR=${trafficserver-directory:configuration}
hash-existing-files = ${buildout:directory}/software_release/buildout.cfg hash-existing-files = ${buildout:directory}/software_release/buildout.cfg
[trafficserver-reload] [trafficserver-reload]
......
...@@ -120,7 +120,7 @@ backend {{ slave_instance['slave_reference'] }}-{{ scheme }} ...@@ -120,7 +120,7 @@ backend {{ slave_instance['slave_reference'] }}-{{ scheme }}
{%- endif %} {%- endif %}
{%- do active_check_option_list.append('timeout check %ss' % (slave_instance['health-check-timeout'])) %} {%- do active_check_option_list.append('timeout check %ss' % (slave_instance['health-check-timeout'])) %}
{%- endif %} {%- endif %}
server {{ slave_instance['slave_reference'] }}-backend {{ hostname }}:{{ port }} {{ ' '.join(ssl_list) }} {{ ' ' + ' '.join(active_check_list)}} server {{ slave_instance['slave_reference'] }}-backend-{{ scheme }} {{ hostname }}:{{ port }} {{ ' '.join(ssl_list) }} {{ ' ' + ' '.join(active_check_list)}}
{%- for active_check_option in active_check_option_list %} {%- for active_check_option in active_check_option_list %}
{{ active_check_option }} {{ active_check_option }}
{%- endfor %} {%- endfor %}
...@@ -162,7 +162,7 @@ backend {{ slave_instance['slave_reference'] }}-{{ scheme }}-failover ...@@ -162,7 +162,7 @@ backend {{ slave_instance['slave_reference'] }}-{{ scheme }}-failover
timeout server {{ slave_instance['request-timeout'] }}s timeout server {{ slave_instance['request-timeout'] }}s
timeout connect {{ slave_instance['backend-connect-timeout'] }}s timeout connect {{ slave_instance['backend-connect-timeout'] }}s
retries {{ slave_instance['backend-connect-retries'] }} retries {{ slave_instance['backend-connect-retries'] }}
server {{ slave_instance['slave_reference'] }}-backend {{ hostname }}:{{ port }} {{ ' '.join(ssl_list) }} server {{ slave_instance['slave_reference'] }}-backend-{{ scheme }} {{ hostname }}:{{ port }} {{ ' '.join(ssl_list) }}
{%- if path %} {%- if path %}
http-request set-path {{ path }}%[path] http-request set-path {{ path }}%[path]
{%- endif %} {%- endif %}
......
formats: logging:
formats:
- name: squid - name: squid
format: '%<cqtq> %<ttms> %<chi> %<crc>/%<pssc> %<psql> %<cqhm> %<cquc> %<cluc> %<caun> %<phr>/%<shn> %<psct>' format: '%<cqtq> %<ttms> %<chi> %<crc>/%<pssc> %<psql> %<cqhm> %<cquc> %<cluc> %<caun> %<phr>/%<shn> %<psct>'
logs: logs:
- filename: squid - filename: squid
format: squid format: squid
mode: ascii mode: ascii
......
...@@ -9,14 +9,11 @@ ...@@ -9,14 +9,11 @@
############################################################################## ##############################################################################
CONFIG proxy.config.proxy_name STRING {{ ats_configuration['hostname'] }} CONFIG proxy.config.proxy_name STRING {{ ats_configuration['hostname'] }}
CONFIG proxy.config.local_state_dir STRING {{ ats_directory['local-state'] }} CONFIG proxy.config.local_state_dir STRING {{ ats_directory['local-state'] }}
CONFIG proxy.config.config_dir STRING {{ ats_directory['configuration'] }}
CONFIG proxy.config.bin_path STRING {{ ats_directory['bin_path'] }} CONFIG proxy.config.bin_path STRING {{ ats_directory['bin_path'] }}
CONFIG proxy.config.env_prep STRING example_prep.sh CONFIG proxy.config.env_prep STRING example_prep.sh
CONFIG proxy.config.alarm_email STRING nobody
CONFIG proxy.config.syslog_facility STRING LOG_DAEMON CONFIG proxy.config.syslog_facility STRING LOG_DAEMON
CONFIG proxy.config.output.logfile STRING traffic.out CONFIG proxy.config.output.logfile STRING traffic.out
CONFIG proxy.config.admin.user_id STRING {{ '#%s' % os_module.geteuid() }} CONFIG proxy.config.admin.user_id STRING {{ '#%s' % os_module.geteuid() }}
CONFIG proxy.config.admin.number_config_bak INT 0
LOCAL proxy.local.incoming_ip_to_bind STRING {{ ats_configuration['local-ip'] }} LOCAL proxy.local.incoming_ip_to_bind STRING {{ ats_configuration['local-ip'] }}
CONFIG proxy.config.log.logfile_dir STRING {{ ats_directory['log'] }} CONFIG proxy.config.log.logfile_dir STRING {{ ats_directory['log'] }}
# Implement RFC 5861 with core # Implement RFC 5861 with core
...@@ -66,7 +63,6 @@ CONFIG proxy.config.http.insert_response_via_str INT 0 ...@@ -66,7 +63,6 @@ CONFIG proxy.config.http.insert_response_via_str INT 0
# https://docs.trafficserver.apache.org/records.config#parent-proxy-configuration # https://docs.trafficserver.apache.org/records.config#parent-proxy-configuration
# https://docs.trafficserver.apache.org/en/latest/admin-guide/files/parent.config.en.html # https://docs.trafficserver.apache.org/en/latest/admin-guide/files/parent.config.en.html
############################################################################## ##############################################################################
CONFIG proxy.config.http.parent_proxy_routing_enable INT 0
CONFIG proxy.config.http.parent_proxy.retry_time INT 300 CONFIG proxy.config.http.parent_proxy.retry_time INT 300
CONFIG proxy.config.http.parent_proxy.connect_attempts_timeout INT 30 CONFIG proxy.config.http.parent_proxy.connect_attempts_timeout INT 30
CONFIG proxy.config.http.forward.proxy_auth_to_parent INT 0 CONFIG proxy.config.http.forward.proxy_auth_to_parent INT 0
...@@ -152,7 +148,7 @@ CONFIG proxy.config.http.cache.heuristic_lm_factor FLOAT 0.10 ...@@ -152,7 +148,7 @@ CONFIG proxy.config.http.cache.heuristic_lm_factor FLOAT 0.10
############################################################################## ##############################################################################
CONFIG proxy.config.net.connections_throttle INT 30000 CONFIG proxy.config.net.connections_throttle INT 30000
CONFIG proxy.config.net.max_connections_in INT 30000 CONFIG proxy.config.net.max_connections_in INT 30000
CONFIG proxy.config.net.max_connections_active_in INT 10000 CONFIG proxy.config.net.max_requests_in INT 10000
############################################################################## ##############################################################################
# RAM and disk cache configurations. Docs: # RAM and disk cache configurations. Docs:
...@@ -197,7 +193,7 @@ CONFIG proxy.config.reverse_proxy.enabled INT 1 ...@@ -197,7 +193,7 @@ CONFIG proxy.config.reverse_proxy.enabled INT 1
# https://docs.trafficserver.apache.org/records.config#client-related-configuration # https://docs.trafficserver.apache.org/records.config#client-related-configuration
# https://docs.trafficserver.apache.org/en/latest/admin-guide/files/ssl_multicert.config.en.html # https://docs.trafficserver.apache.org/en/latest/admin-guide/files/ssl_multicert.config.en.html
############################################################################## ##############################################################################
CONFIG proxy.config.ssl.client.verify.server INT 0 CONFIG proxy.config.ssl.client.verify.server.properties STRING NONE
CONFIG proxy.config.ssl.client.CA.cert.filename STRING NULL CONFIG proxy.config.ssl.client.CA.cert.filename STRING NULL
CONFIG proxy.config.ssl.server.cipher_suite STRING ECDHE-ECDSA-AES256-GCM-SHA384:ECDHE-RSA-AES256-GCM-SHA384:ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-GCM-SHA256:DHE-RSA-AES256-GCM-SHA384:DHE-DSS-AES256-GCM-SHA384:DHE-RSA-AES128-GCM-SHA256:DHE-DSS-AES128-GCM-SHA256:ECDHE-ECDSA-AES256-SHA384:ECDHE-RSA-AES256-SHA384:ECDHE-ECDSA-AES256-SHA:ECDHE-RSA-AES256-SHA:ECDHE-ECDSA-AES128-SHA256:ECDHE-RSA-AES128-SHA256:ECDHE-ECDSA-AES128-SHA:ECDHE-RSA-AES128-SHA:DHE-RSA-AES256-SHA256:DHE-DSS-AES256-SHA256:DHE-RSA-AES128-SHA256:DHE-DSS-AES128-SHA256:DHE-RSA-AES256-SHA:DHE-DSS-AES256-SHA:DHE-RSA-AES128-SHA:DHE-DSS-AES128-SHA:!aNULL:!eNULL:!EXPORT:!DES:!RC4:!MD5:!PSK:!aECDH:!EDH-DSS-DES-CBC3-SHA:!EDH-RSA-DES-CBC3-SHA:!KRB5-DES-CBC3-SHA CONFIG proxy.config.ssl.server.cipher_suite STRING ECDHE-ECDSA-AES256-GCM-SHA384:ECDHE-RSA-AES256-GCM-SHA384:ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-GCM-SHA256:DHE-RSA-AES256-GCM-SHA384:DHE-DSS-AES256-GCM-SHA384:DHE-RSA-AES128-GCM-SHA256:DHE-DSS-AES128-GCM-SHA256:ECDHE-ECDSA-AES256-SHA384:ECDHE-RSA-AES256-SHA384:ECDHE-ECDSA-AES256-SHA:ECDHE-RSA-AES256-SHA:ECDHE-ECDSA-AES128-SHA256:ECDHE-RSA-AES128-SHA256:ECDHE-ECDSA-AES128-SHA:ECDHE-RSA-AES128-SHA:DHE-RSA-AES256-SHA256:DHE-DSS-AES256-SHA256:DHE-RSA-AES128-SHA256:DHE-DSS-AES128-SHA256:DHE-RSA-AES256-SHA:DHE-DSS-AES256-SHA:DHE-RSA-AES128-SHA:DHE-DSS-AES128-SHA:!aNULL:!eNULL:!EXPORT:!DES:!RC4:!MD5:!PSK:!aECDH:!EDH-DSS-DES-CBC3-SHA:!EDH-RSA-DES-CBC3-SHA:!KRB5-DES-CBC3-SHA
......
...@@ -72,7 +72,10 @@ from cryptography.x509.oid import NameOID ...@@ -72,7 +72,10 @@ from cryptography.x509.oid import NameOID
from slapos.testing.testcase import makeModuleSetUpAndTestCaseClass from slapos.testing.testcase import makeModuleSetUpAndTestCaseClass
from slapos.testing.utils import findFreeTCPPort from slapos.testing.utils import findFreeTCPPort
from slapos.testing.utils import getPromisePluginParameterDict from slapos.testing.utils import getPromisePluginParameterDict
setUpModule, SlapOSInstanceTestCase = makeModuleSetUpAndTestCaseClass( if int(os.environ.get('SLAPOS_HACK_STANDALONE', '0')) == 1:
SlapOSInstanceTestCase = object
else:
setUpModule, SlapOSInstanceTestCase = makeModuleSetUpAndTestCaseClass(
os.path.abspath( os.path.abspath(
os.path.join(os.path.dirname(__file__), '..', 'software.cfg'))) os.path.join(os.path.dirname(__file__), '..', 'software.cfg')))
...@@ -506,6 +509,7 @@ class TestHandler(BaseHTTPRequestHandler): ...@@ -506,6 +509,7 @@ class TestHandler(BaseHTTPRequestHandler):
if 'x-reply-body' in self.headers.dict: if 'x-reply-body' in self.headers.dict:
config['Body'] = base64.b64decode(self.headers.dict['x-reply-body']) config['Body'] = base64.b64decode(self.headers.dict['x-reply-body'])
config['X-Drop-Header'] = self.headers.dict.get('x-drop-header')
self.configuration[self.path] = config self.configuration[self.path] = config
self.send_response(201) self.send_response(201)
...@@ -524,8 +528,14 @@ class TestHandler(BaseHTTPRequestHandler): ...@@ -524,8 +528,14 @@ class TestHandler(BaseHTTPRequestHandler):
status_code = int(config.pop('status_code')) status_code = int(config.pop('status_code'))
timeout = int(config.pop('Timeout', '0')) timeout = int(config.pop('Timeout', '0'))
compress = int(config.pop('Compress', '0')) compress = int(config.pop('Compress', '0'))
drop_header_list = []
for header in config.pop('X-Drop-Header', '').split():
drop_header_list.append(header)
header_dict = config header_dict = config
else: else:
drop_header_list = []
for header in self.headers.dict.get('x-drop-header', '').split():
drop_header_list.append(header)
response = None response = None
status_code = 200 status_code = 200
timeout = int(self.headers.dict.get('timeout', '0')) timeout = int(self.headers.dict.get('timeout', '0'))
...@@ -565,9 +575,6 @@ class TestHandler(BaseHTTPRequestHandler): ...@@ -565,9 +575,6 @@ class TestHandler(BaseHTTPRequestHandler):
if self.identification is not None: if self.identification is not None:
self.send_header('X-Backend-Identification', self.identification) self.send_header('X-Backend-Identification', self.identification)
drop_header_list = []
for header in self.headers.dict.get('x-drop-header', '').split():
drop_header_list.append(header)
if 'Content-Type' not in drop_header_list: if 'Content-Type' not in drop_header_list:
self.send_header("Content-Type", "application/json") self.send_header("Content-Type", "application/json")
if 'Set-Cookie' not in drop_header_list: if 'Set-Cookie' not in drop_header_list:
...@@ -720,6 +727,7 @@ class HttpFrontendTestCase(SlapOSInstanceTestCase): ...@@ -720,6 +727,7 @@ class HttpFrontendTestCase(SlapOSInstanceTestCase):
data=cls.key_pem + cls.certificate_pem, data=cls.key_pem + cls.certificate_pem,
verify=cls.ca_certificate_file) verify=cls.ca_certificate_file)
assert upload.status_code == httplib.CREATED assert upload.status_code == httplib.CREATED
cls.runKedifaUpdater()
@classmethod @classmethod
def runKedifaUpdater(cls): def runKedifaUpdater(cls):
...@@ -1822,7 +1830,6 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin): ...@@ -1822,7 +1830,6 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin):
proto='https', ignore_header_list=None): proto='https', ignore_header_list=None):
if ignore_header_list is None: if ignore_header_list is None:
ignore_header_list = [] ignore_header_list = []
self.assertFalse('remote_user' in backend_header_dict.keys())
if 'Host' not in ignore_header_list: if 'Host' not in ignore_header_list:
self.assertEqual( self.assertEqual(
backend_header_dict['host'], backend_header_dict['host'],
...@@ -1913,7 +1920,7 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin): ...@@ -1913,7 +1920,7 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin):
'_Url_backend_log', '_Url_backend_log',
r'^\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}:\d+ ' r'^\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}:\d+ '
r'\[\d{2}\/.{3}\/\d{4}\:\d{2}\:\d{2}\:\d{2}.\d{3}\] ' r'\[\d{2}\/.{3}\/\d{4}\:\d{2}\:\d{2}\:\d{2}.\d{3}\] '
r'http-backend _Url-http\/_Url-backend ' r'http-backend _Url-http\/_Url-backend-http '
r'\d+/\d+\/\d+\/\d+\/\d+ ' r'\d+/\d+\/\d+\/\d+\/\d+ '
r'200 \d+ - - ---- ' r'200 \d+ - - ---- '
r'\d+\/\d+\/\d+\/\d+\/\d+ \d+\/\d+ ' r'\d+\/\d+\/\d+\/\d+\/\d+ \d+\/\d+ '
...@@ -3620,7 +3627,7 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin): ...@@ -3620,7 +3627,7 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin):
self.assertNotEqual(via, None) self.assertNotEqual(via, None)
self.assertRegexpMatches( self.assertRegexpMatches(
via, via,
r'^http\/1.1 caddy-frontend-1\[.*\] \(ApacheTrafficServer\/8.1.1\)$' r'^http\/1.1 caddy-frontend-1\[.*\] \(ApacheTrafficServer\/9.0.1\)$'
) )
def test_enable_cache_server_alias(self): def test_enable_cache_server_alias(self):
...@@ -3662,7 +3669,7 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin): ...@@ -3662,7 +3669,7 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin):
self.assertNotEqual(via, None) self.assertNotEqual(via, None)
self.assertRegexpMatches( self.assertRegexpMatches(
via, via,
r'^http\/1.1 caddy-frontend-1\[.*\] \(ApacheTrafficServer\/8.1.1\)$' r'^http\/1.1 caddy-frontend-1\[.*\] \(ApacheTrafficServer\/9.0.1\)$'
) )
result = fakeHTTPResult( result = fakeHTTPResult(
...@@ -3779,7 +3786,7 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin): ...@@ -3779,7 +3786,7 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin):
self.assertNotEqual(via, None) self.assertNotEqual(via, None)
self.assertRegexpMatches( self.assertRegexpMatches(
via, via,
r'^http\/1.1 caddy-frontend-1\[.*\] \(ApacheTrafficServer\/8.1.1\)$' r'^http\/1.1 caddy-frontend-1\[.*\] \(ApacheTrafficServer\/9.0.1\)$'
) )
# BEGIN: Check that squid.log is correctly filled in # BEGIN: Check that squid.log is correctly filled in
...@@ -3787,13 +3794,13 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin): ...@@ -3787,13 +3794,13 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin):
os.path.join( os.path.join(
self.instance_path, '*', 'var', 'log', 'trafficserver', 'squid.log' self.instance_path, '*', 'var', 'log', 'trafficserver', 'squid.log'
)) ))
if len(ats_log_file_list) == 1: self.assertEqual(1, len(ats_log_file_list))
ats_log_file = ats_log_file_list[0] ats_log_file = ats_log_file_list[0]
direct_pattern = re.compile( direct_pattern = re.compile(
r'.*TCP_MISS/200 .*test-path/deeper.*enablecache.example.com' r'.*TCP_MISS/200 .*test-path/deeper.*enablecache.example.com'
'.* - DIRECT*') '.* - DIRECT*')
# ATS needs some time to flush logs # ATS needs some time to flush logs
timeout = 5 timeout = 10
b = time.time() b = time.time()
while True: while True:
direct_pattern_match = 0 direct_pattern_match = 0
...@@ -3871,7 +3878,9 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin): ...@@ -3871,7 +3878,9 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin):
max_stale_age = 30 max_stale_age = 30
max_age = int(max_stale_age / 2.) max_age = int(max_stale_age / 2.)
body_200 = b'Body 200' # body_200 is big enough to trigger
# https://github.com/apache/trafficserver/issues/7880
body_200 = b'Body 200' * 500
body_502 = b'Body 502' body_502 = b'Body 502'
body_502_new = b'Body 502 new' body_502_new = b'Body 502 new'
body_200_new = b'Body 200 new' body_200_new = b'Body 200 new'
...@@ -3885,6 +3894,9 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin): ...@@ -3885,6 +3894,9 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin):
'X-Reply-Header-Cache-Control': 'max-age=%s, public' % (max_age,), 'X-Reply-Header-Cache-Control': 'max-age=%s, public' % (max_age,),
'X-Reply-Status-Code': status_code, 'X-Reply-Status-Code': status_code,
'X-Reply-Body': base64.b64encode(body), 'X-Reply-Body': base64.b64encode(body),
# drop Content-Length header to ensure
# https://github.com/apache/trafficserver/issues/7880
'X-Drop-Header': 'Content-Length',
}) })
self.assertEqual(result.status_code, httplib.CREATED) self.assertEqual(result.status_code, httplib.CREATED)
...@@ -3895,7 +3907,6 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin): ...@@ -3895,7 +3907,6 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin):
) )
self.assertEqual(result.status_code, status_code) self.assertEqual(result.status_code, status_code)
self.assertEqual(result.text, body) self.assertEqual(result.text, body)
self.assertNotIn('Expires', result.headers)
# backend returns something correctly # backend returns something correctly
configureResult('200', body_200) configureResult('200', body_200)
...@@ -3906,9 +3917,13 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin): ...@@ -3906,9 +3917,13 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin):
# even if backend returns 502, ATS gives cached result # even if backend returns 502, ATS gives cached result
checkResult(httplib.OK, body_200) checkResult(httplib.OK, body_200)
time.sleep(max_stale_age + 2) # interesting moment, time is between max_age and max_stale_age, triggers
# https://github.com/apache/trafficserver/issues/7880
time.sleep(max_age + 1)
checkResult(httplib.OK, body_200)
# max_stale_age passed, time to return 502 from the backend # max_stale_age passed, time to return 502 from the backend
time.sleep(max_stale_age + 2)
checkResult(httplib.BAD_GATEWAY, body_502) checkResult(httplib.BAD_GATEWAY, body_502)
configureResult('502', body_502_new) configureResult('502', body_502_new)
...@@ -3973,7 +3988,7 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin): ...@@ -3973,7 +3988,7 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin):
self.assertNotEqual(via, None) self.assertNotEqual(via, None)
self.assertRegexpMatches( self.assertRegexpMatches(
via, via,
r'^http\/1.1 caddy-frontend-1\[.*\] \(ApacheTrafficServer\/8.1.1\)$' r'^http\/1.1 caddy-frontend-1\[.*\] \(ApacheTrafficServer\/9.0.1\)$'
) )
# check stale-if-error support is really respected if not present in the # check stale-if-error support is really respected if not present in the
...@@ -4049,7 +4064,7 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin): ...@@ -4049,7 +4064,7 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin):
pattern = re.compile( pattern = re.compile(
r'.*ERR_READ_TIMEOUT/504 .*test_enable_cache_ats_timeout' r'.*ERR_READ_TIMEOUT/504 .*test_enable_cache_ats_timeout'
'.*TIMEOUT_DIRECT*') '.*TIMEOUT_DIRECT*')
timeout = 5 timeout = 10
b = time.time() b = time.time()
# ATS needs some time to flush logs # ATS needs some time to flush logs
while True: while True:
...@@ -4116,7 +4131,7 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin): ...@@ -4116,7 +4131,7 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin):
self.assertNotEqual(via, None) self.assertNotEqual(via, None)
self.assertRegexpMatches( self.assertRegexpMatches(
via, via,
r'^http\/1.1 caddy-frontend-1\[.*\] \(ApacheTrafficServer\/8.1.1\)$' r'^http\/1.1 caddy-frontend-1\[.*\] \(ApacheTrafficServer\/9.0.1\)$'
) )
try: try:
...@@ -4163,7 +4178,7 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin): ...@@ -4163,7 +4178,7 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin):
self.assertNotEqual(via, None) self.assertNotEqual(via, None)
self.assertRegexpMatches( self.assertRegexpMatches(
via, via,
r'^http\/1.1 caddy-frontend-1\[.*\] \(ApacheTrafficServer\/8.1.1\)$' r'^http\/1.1 caddy-frontend-1\[.*\] \(ApacheTrafficServer\/9.0.1\)$'
) )
def test_enable_http2_false(self): def test_enable_http2_false(self):
...@@ -7236,13 +7251,13 @@ backend _health-check-disabled-http ...@@ -7236,13 +7251,13 @@ backend _health-check-disabled-http
timeout server 12s timeout server 12s
timeout connect 5s timeout connect 5s
retries 3 retries 3
server _health-check-disabled-backend %s""" % (backend,), server _health-check-disabled-backend-http %s""" % (backend,),
'health-check-connect': """\ 'health-check-connect': """\
backend _health-check-connect-http backend _health-check-connect-http
timeout server 12s timeout server 12s
timeout connect 5s timeout connect 5s
retries 3 retries 3
server _health-check-connect-backend %s check inter 5s""" server _health-check-connect-backend-http %s check inter 5s"""
""" rise 1 fall 2 """ rise 1 fall 2
timeout check 2s""" % (backend,), timeout check 2s""" % (backend,),
'health-check-custom': """\ 'health-check-custom': """\
...@@ -7250,7 +7265,7 @@ backend _health-check-custom-http ...@@ -7250,7 +7265,7 @@ backend _health-check-custom-http
timeout server 12s timeout server 12s
timeout connect 5s timeout connect 5s
retries 3 retries 3
server _health-check-custom-backend %s check inter 15s""" server _health-check-custom-backend-http %s check inter 15s"""
""" rise 3 fall 7 """ rise 3 fall 7
option httpchk POST /POST-path%%20to%%20be%%20encoded HTTP/1.0 option httpchk POST /POST-path%%20to%%20be%%20encoded HTTP/1.0
timeout check 7s""" % (backend,), timeout check 7s""" % (backend,),
...@@ -7259,7 +7274,7 @@ backend _health-check-default-http ...@@ -7259,7 +7274,7 @@ backend _health-check-default-http
timeout server 12s timeout server 12s
timeout connect 5s timeout connect 5s
retries 3 retries 3
server _health-check-default-backend %s check inter 5s""" server _health-check-default-backend-http %s check inter 5s"""
""" rise 1 fall 2 """ rise 1 fall 2
option httpchk GET / HTTP/1.1 option httpchk GET / HTTP/1.1
timeout check 2s""" % (backend, ) timeout check 2s""" % (backend, )
...@@ -7334,7 +7349,7 @@ backend _health-check-default-http ...@@ -7334,7 +7349,7 @@ backend _health-check-default-http
r'^\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}:\d+ ' r'^\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}:\d+ '
r'\[\d{2}\/.{3}\/\d{4}\:\d{2}\:\d{2}\:\d{2}.\d{3}\] ' r'\[\d{2}\/.{3}\/\d{4}\:\d{2}\:\d{2}\:\d{2}.\d{3}\] '
r'https-backend _health-check-failover-url-https-failover' r'https-backend _health-check-failover-url-https-failover'
r'\/_health-check-failover-url-backend ' r'\/_health-check-failover-url-backend-https '
r'\d+/\d+\/\d+\/\d+\/\d+ ' r'\d+/\d+\/\d+\/\d+\/\d+ '
r'200 \d+ - - ---- ' r'200 \d+ - - ---- '
r'\d+\/\d+\/\d+\/\d+\/\d+ \d+\/\d+ ' r'\d+\/\d+\/\d+\/\d+\/\d+ \d+\/\d+ '
...@@ -7349,7 +7364,7 @@ backend _health-check-default-http ...@@ -7349,7 +7364,7 @@ backend _health-check-default-http
r'^\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}:\d+ ' r'^\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}:\d+ '
r'\[\d{2}\/.{3}\/\d{4}\:\d{2}\:\d{2}\:\d{2}.\d{3}\] ' r'\[\d{2}\/.{3}\/\d{4}\:\d{2}\:\d{2}\:\d{2}.\d{3}\] '
r'http-backend _health-check-failover-url-http-failover' r'http-backend _health-check-failover-url-http-failover'
r'\/_health-check-failover-url-backend ' r'\/_health-check-failover-url-backend-http '
r'\d+/\d+\/\d+\/\d+\/\d+ ' r'\d+/\d+\/\d+\/\d+\/\d+ '
r'200 \d+ - - ---- ' r'200 \d+ - - ---- '
r'\d+\/\d+\/\d+\/\d+\/\d+ \d+\/\d+ ' r'\d+\/\d+\/\d+\/\d+\/\d+ \d+\/\d+ '
......
...@@ -40,8 +40,8 @@ revision = v7.5.2-0-gca413c612f ...@@ -40,8 +40,8 @@ revision = v7.5.2-0-gca413c612f
[go_github.com_grafana_loki] [go_github.com_grafana_loki]
<= go-git-package <= go-git-package
go.importpath = github.com/grafana/loki go.importpath = github.com/grafana/loki
repository = https://github.com/grafana/loki repository = https://github.com/perrinjerome/loki
revision = v2.2.1-0-gbabea82e revision = v2.2.1-1-gda6d45f2
[go_github.com_influxdata_influxdb] [go_github.com_influxdata_influxdb]
<= go-git-package <= go-git-package
......
...@@ -105,15 +105,17 @@ template = ...@@ -105,15 +105,17 @@ template =
inline:{% raw %}#!/bin/sh -e inline:{% raw %}#!/bin/sh -e
basedir='${mariadb:location}' basedir='${mariadb:location}'
datadir='{{datadir}}' datadir='{{datadir}}'
[ -e "$datadir" ] || { marker=$datadir/.slapos_initializing
rm -vrf "$datadir.new" [ -d "$datadir/mysql" ] && [ ! -f "$marker" ] || {
find "$datadir/" -mindepth 1 ! -path $marker -delete || mkdir "$datadir"
touch "$marker"
"$basedir/scripts/mysql_install_db" \ "$basedir/scripts/mysql_install_db" \
--defaults-file='{{defaults_file}}' \ --defaults-file='{{defaults_file}}' \
--skip-name-resolve \ --skip-name-resolve \
--auth-root-authentication-method=normal \ --auth-root-authentication-method=normal \
--basedir="$basedir" --plugin_dir="$basedir/lib/plugin" \ --basedir="$basedir" --plugin_dir="$basedir/lib/plugin" \
--datadir="$datadir.new" --datadir="$datadir"
mv -v "$datadir.new" "$datadir" rm "$marker"
} }
{%- if environ is defined %} {%- if environ is defined %}
{%- for variable in environ.splitlines() %} {%- for variable in environ.splitlines() %}
......
...@@ -4,15 +4,14 @@ http://www.proftpd.org/docs/ ...@@ -4,15 +4,14 @@ http://www.proftpd.org/docs/
# Features # Features
* sftp only is enabled * sftp only is enabled, with authentication by key or password
* partially uploadloaded are not visible thanks to [`HiddenStores`](http://proftpd.org/docs/directives/linked/config_ref_HiddenStores.html) ( in fact they are, but name starts with `.` ) * partially uploadloaded are not visible thanks to [`HiddenStores`](http://proftpd.org/docs/directives/linked/config_ref_HiddenStores.html) ( in fact they are, but name starts with `.` )
* 5 failed login attempts will cause the host to be temporary banned * 5 failed login attempts will cause the host to be temporary banned
* support authentication against an external web service
# TODO # TODO
* only password login is enabled. enabling [`SFTPAuthorizedUserKeys`](http://www.proftpd.org/docs/contrib/mod_sftp.html#SFTPAuthorizedUserKeys) seems to break password only login
* log rotation * log rotation
* make sure SFTPLog is useful (seems very verbose and does not contain more than stdout) * make sure SFTPLog is useful (seems very verbose and does not contain more than stdout)
* make it easier to manage users ( using `mod_auth_web` against an ERP5 endpoint or accepting a list of user/password as instance parameter )
* allow configuring webhooks when new file is uploaded * allow configuring webhooks when new file is uploaded
...@@ -19,8 +19,8 @@ md5sum = efb4238229681447aa7fe73898dffad4 ...@@ -19,8 +19,8 @@ md5sum = efb4238229681447aa7fe73898dffad4
[instance-default] [instance-default]
filename = instance-default.cfg.in filename = instance-default.cfg.in
md5sum = 2a2c066d7d40dd8545f3008f434ee842 md5sum = dae19ec06f8da9fa2980a6d2bdf3da54
[proftpd-config-file] [proftpd-config-file]
filename = proftpd-config-file.cfg.in filename = proftpd-config-file.cfg.in
md5sum = a7c0f4607c378b640379cc258a8aadfa md5sum = 82cc600f4fce9852370f9d1f7c4cd3a6
...@@ -66,16 +66,24 @@ ban-log=${directory:log}/proftpd-ban.log ...@@ -66,16 +66,24 @@ ban-log=${directory:log}/proftpd-ban.log
ssh-host-rsa-key=${ssh-host-rsa-key:output} ssh-host-rsa-key=${ssh-host-rsa-key:output}
ssh-host-dsa-key=${ssh-host-dsa-key:output} ssh-host-dsa-key=${ssh-host-dsa-key:output}
ssh-host-ecdsa-key=${ssh-host-ecdsa-key:output} ssh-host-ecdsa-key=${ssh-host-ecdsa-key:output}
ssh-authorized-keys-dir = ${directory:ssh-authorized-keys-dir} ssh-authorized-key = ${ssh-authorized-keys:rendered}
ban-table=${directory:srv}/proftpd-ban-table ban-table=${directory:srv}/proftpd-ban-table
control-socket=${directory:var}/proftpd.sock control-socket=${directory:var}/proftpd.sock
auth-user-file=${auth-user-file:output} auth-user-file=${auth-user-file:output}
authentication-url = {{ slapparameter_dict.get('authentication-url', '')}}
recipe = slapos.cookbook:wrapper recipe = slapos.cookbook:wrapper
command-line = command-line =
{{ proftpd_bin }} --nodaemon --config ${proftpd-config-file:rendered} {{ proftpd_bin }} --nodaemon --config ${proftpd-config-file:rendered}
wrapper-path = ${directory:service}/proftpd wrapper-path = ${directory:service}/proftpd
[ssh-authorized-keys]
rendered = ${directory:ssh-authorized-keys-dir}/authorized_keys
{% if slapparameter_dict.get('ssh-key') %}
recipe = slapos.recipe.template:jinja2
template = inline:{{ slapparameter_dict['ssh-key'] | indent }}
{% endif %}
[proftpd-listen-promise] [proftpd-listen-promise]
<= monitor-promise-base <= monitor-promise-base
module = check_port_listening module = check_port_listening
...@@ -133,5 +141,9 @@ instance-promises = ...@@ -133,5 +141,9 @@ instance-promises =
[publish-connection-parameter] [publish-connection-parameter]
recipe = slapos.cookbook:publish recipe = slapos.cookbook:publish
url = ${proftpd:url} url = ${proftpd:url}
{% if not slapparameter_dict.get('authentication-url') %}
username = ${proftpd-password:username} username = ${proftpd-password:username}
{% if not slapparameter_dict.get('ssh-key') %}
password = ${proftpd-password:passwd} password = ${proftpd-password:passwd}
{% endif %}
{% endif %}
{ {
"$schema": "http://json-schema.org/draft-04/schema#", "$schema": "http://json-schema.org/draft-04/schema#",
"description": "Parameters to instantiate PoFTPd", "description": "Parameters to instantiate ProFTPd",
"additionalProperties": false, "additionalProperties": false,
"properties": { "properties": {
"port": { "port": {
"description": "Port number to listen to - default to 8022", "description": "Port number to listen to",
"type": "number" "type": "number",
"default": 8022
},
"ssh-key": {
"description": "SSH public key, in RFC4716 format. Note that this is not the default format used by openssh and that openssh keys must be converted with `ssh-keygen -e -f ~/.ssh/id_rsa.pub`",
"type": "string"
},
"authentication-url": {
"description": "URL of an HTTP endpoint to authenticate users. Endoint recieve a `application/x-www-form-urlencoded` POST request with `login` and `password` arguments and must respond with a `X-Proftpd-Authentication-Result: Success` header to signal successful authentication",
"type": "string"
} }
} }
} }
...@@ -14,7 +14,7 @@ ...@@ -14,7 +14,7 @@
"optional": true "optional": true
}, },
"password": { "password": {
"description": "Password for default username", "description": "Password for default username, when not using ssh-key",
"type": "string", "type": "string",
"optional": true "optional": true
} }
......
...@@ -20,7 +20,7 @@ SFTPEngine on ...@@ -20,7 +20,7 @@ SFTPEngine on
SFTPHostKey {{ proftpd['ssh-host-rsa-key'] }} SFTPHostKey {{ proftpd['ssh-host-rsa-key'] }}
SFTPHostKey {{ proftpd['ssh-host-dsa-key'] }} SFTPHostKey {{ proftpd['ssh-host-dsa-key'] }}
SFTPHostKey {{ proftpd['ssh-host-ecdsa-key'] }} SFTPHostKey {{ proftpd['ssh-host-ecdsa-key'] }}
#SFTPAuthorizedUserKeys file:{{ proftpd['ssh-authorized-keys-dir'] }}%u SFTPAuthorizedUserKeys file:{{ proftpd['ssh-authorized-key'] }}
# Logging # Logging
...@@ -34,6 +34,23 @@ RequireValidShell off ...@@ -34,6 +34,23 @@ RequireValidShell off
AuthUserFile {{ proftpd['auth-user-file'] }} AuthUserFile {{ proftpd['auth-user-file'] }}
# http authentication
{% if proftpd['authentication-url'] %}
LoadModule mod_auth_web.c
AuthWebURL {{ proftpd['authentication-url'] }}
AuthWebRequireHeader "X-Proftpd-Authentication-Result: Success"
AuthWebUsernameParamName login
AuthWebPasswordParamName password
AuthWebLocalUser {{ proftpd['user'] }}
# mod_auth_web only read /etc/passwd to know the home of the users,
# so we rewrite the relative paths to be relative to the data dir.
LoadModule mod_rewrite.c
RewriteEngine on
RewriteCondition %m !USER
RewriteRule ^([^/]+.*) {{ proftpd['data-dir'] }}$1
{% endif %}
# Prevent partially uploaded files to be visible # Prevent partially uploaded files to be visible
HiddenStores on HiddenStores on
DeleteAbortedStores on DeleteAbortedStores on
......
{ {
"name": "ProFTPd", "name": "ProFTPd",
"description": "ProFTPd as a SFTP server with virtual users", "description": "ProFTPd as a SFTP server with virtual users",
"serialisation": "json-in-xml", "serialisation": "xml",
"software-type": { "software-type": {
"default": { "default": {
"title": "Default", "title": "Default",
......
...@@ -27,18 +27,23 @@ ...@@ -27,18 +27,23 @@
import os import os
import shutil import shutil
from urllib.parse import urlparse from urllib.parse import urlparse, parse_qs
import tempfile import tempfile
import io import io
import subprocess import subprocess
from http.server import BaseHTTPRequestHandler
import logging
import pysftp import pysftp
import psutil import psutil
import paramiko
from paramiko.ssh_exception import SSHException from paramiko.ssh_exception import SSHException
from paramiko.ssh_exception import AuthenticationException from paramiko.ssh_exception import AuthenticationException
from slapos.testing.testcase import makeModuleSetUpAndTestCaseClass from slapos.testing.testcase import makeModuleSetUpAndTestCaseClass
from slapos.testing.utils import findFreeTCPPort from slapos.testing.utils import findFreeTCPPort
from slapos.testing.utils import ManagedHTTPServer
setUpModule, SlapOSInstanceTestCase = makeModuleSetUpAndTestCaseClass( setUpModule, SlapOSInstanceTestCase = makeModuleSetUpAndTestCaseClass(
os.path.abspath( os.path.abspath(
...@@ -176,7 +181,7 @@ class TestUserManagement(ProFTPdTestCase): ...@@ -176,7 +181,7 @@ class TestUserManagement(ProFTPdTestCase):
class TestBan(ProFTPdTestCase): class TestBan(ProFTPdTestCase):
def test_client_are_banned_after_5_wrong_passwords(self): def test_client_are_banned_after_5_wrong_passwords(self):
# Simulate failed 5 login attempts # Simulate failed 5 login attempts
for i in range(5): for _ in range(5):
with self.assertRaisesRegex(AuthenticationException, with self.assertRaisesRegex(AuthenticationException,
'Authentication failed'): 'Authentication failed'):
self._getConnection(password='wrong') self._getConnection(password='wrong')
...@@ -237,3 +242,119 @@ class TestFilesAndSocketsInInstanceDir(ProFTPdTestCase): ...@@ -237,3 +242,119 @@ class TestFilesAndSocketsInInstanceDir(ProFTPdTestCase):
s for s in self.proftpdProcess.connections('unix') s for s in self.proftpdProcess.connections('unix')
if not s.laddr.startswith(self.computer_partition_root_path) if not s.laddr.startswith(self.computer_partition_root_path)
]) ])
class TestSSHKey(TestSFTPOperations):
@classmethod
def getInstanceParameterDict(cls):
cls.ssh_key = paramiko.DSSKey.generate(1024)
return {
'ssh-key':
'---- BEGIN SSH2 PUBLIC KEY ----\n{}\n---- END SSH2 PUBLIC KEY ----'.
format(cls.ssh_key.get_base64())
}
def _getConnection(self, username=None):
"""Override to log in with the SSH key
"""
parameter_dict = self.computer_partition.getConnectionParameterDict()
sftp_url = urlparse(parameter_dict['url'])
username = username or parameter_dict['username']
cnopts = pysftp.CnOpts()
cnopts.hostkeys = None
with tempfile.NamedTemporaryFile(mode='w') as keyfile:
self.ssh_key.write_private_key(keyfile)
keyfile.flush()
return pysftp.Connection(
sftp_url.hostname,
port=sftp_url.port,
cnopts=cnopts,
username=username,
private_key=keyfile.name,
)
def test_authentication_failure(self):
parameter_dict = self.computer_partition.getConnectionParameterDict()
sftp_url = urlparse(parameter_dict['url'])
with self.assertRaisesRegex(AuthenticationException,
'Authentication failed'):
self._getConnection(username='wrong username')
cnopts = pysftp.CnOpts()
cnopts.hostkeys = None
# wrong private key
with tempfile.NamedTemporaryFile(mode='w') as keyfile:
paramiko.DSSKey.generate(1024).write_private_key(keyfile)
keyfile.flush()
with self.assertRaisesRegex(AuthenticationException,
'Authentication failed'):
pysftp.Connection(
sftp_url.hostname,
port=sftp_url.port,
cnopts=cnopts,
username=parameter_dict['username'],
private_key=keyfile.name,
)
def test_published_parameters(self):
# no password is published, we only login with key
parameter_dict = self.computer_partition.getConnectionParameterDict()
self.assertIn('username', parameter_dict)
self.assertNotIn('password', parameter_dict)
class TestAuthenticationURL(TestSFTPOperations):
class AuthenticationServer(ManagedHTTPServer):
class RequestHandler(BaseHTTPRequestHandler):
def do_POST(self):
# type: () -> None
assert self.headers[
'Content-Type'] == 'application/x-www-form-urlencoded', self.headers[
'Content-Type']
posted_data = dict(
parse_qs(
self.rfile.read(int(self.headers['Content-Length'])).decode()))
if posted_data['login'] == ['login'] and posted_data['password'] == [
'password'
]:
self.send_response(200)
self.send_header("X-Proftpd-Authentication-Result", "Success")
self.end_headers()
return self.wfile.write(b"OK")
self.send_response(401)
return self.wfile.write(b"Forbidden")
log_message = logging.getLogger(__name__ + '.AuthenticationServer').info
@classmethod
def getInstanceParameterDict(cls):
return {
'authentication-url':
cls.getManagedResource('authentication-server',
TestAuthenticationURL.AuthenticationServer).url
}
def _getConnection(self, username='login', password='password'):
"""Override to log in with the HTTP credentials by default.
"""
return super()._getConnection(username=username, password=password)
def test_authentication_success(self):
with self._getConnection() as sftp:
self.assertEqual(sftp.listdir('.'), [])
def test_authentication_failure(self):
with self.assertRaisesRegex(AuthenticationException,
'Authentication failed'):
self._getConnection(username='login', password='wrong')
def test_published_parameters(self):
# no login or password are published, logins are defined by their
# user name
parameter_dict = self.computer_partition.getConnectionParameterDict()
self.assertNotIn('username', parameter_dict)
self.assertNotIn('password', parameter_dict)
...@@ -203,7 +203,7 @@ stevedore = 1.21.0:whl ...@@ -203,7 +203,7 @@ stevedore = 1.21.0:whl
subprocess32 = 3.5.4 subprocess32 = 3.5.4
unicodecsv = 0.14.1 unicodecsv = 0.14.1
wcwidth = 0.2.5 wcwidth = 0.2.5
wheel = 0.35.1 wheel = 0.35.1:whl
xml-marshaller = 1.0.2 xml-marshaller = 1.0.2
zc.lockfile = 1.0.2 zc.lockfile = 1.0.2
zdaemon = 4.2.0 zdaemon = 4.2.0
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment