Commit 7fae24d5 authored by Łukasz Nowak's avatar Łukasz Nowak

caddy-frontend: Stabilise proxy headers

On backend side headers are asserted in tests:

 * X-Forwarded-For
 * X-Forwarded-Proto
 * X-Forwarded-Port
 * Host

In order to pass cleanly X-Forwarded-For from the frontend to the backend,
it's passed as X-Forwarded-For-Real in case of cached slaves.

Noted problem with IPv6 endpoint was used, as in this case 6tunnel IP would
be used.
parent 4d0a063e
......@@ -488,3 +488,8 @@ Each `caddy-frontend-N` partition downloads certificates from the kedifa server.
Caucase (exposed by ``kedifa-caucase-url`` in master partition parameters) is used to handle certificates for authentication to kedifa server.
If ``automatic-internal-kedifa-caucase-csr`` is enabled (by default it is) there are scripts running on master partition to simulate human to sign certificates for each caddy-frontend-N node.
Support for X-Real-Ip and X-Forwarded-For
-----------------------------------------
X-Forwarded-For and X-Real-Ip are transmitted to the backend, but only for IPv4 access to the frontend. In of IPv6 access, the provided IP will be wrong, because of using 6tunnel.
......@@ -50,11 +50,11 @@ md5sum = f20d6c3d2d94fb685f8d26dfca1e822b
[template-default-slave-virtualhost]
_update_hash_filename_ = templates/default-virtualhost.conf.in
md5sum = 7e26935bb6daf00d8fc01d97eebc7abd
md5sum = cb3f7ace99346f64f2007c3e94b05800
[template-cached-slave-virtualhost]
_update_hash_filename_ = templates/cached-virtualhost.conf.in
md5sum = a73839d777fbd548286bbeccf47be335
md5sum = e839ca3cb308f7fcdfa06c2f1b95e93f
[template-log-access]
_update_hash_filename_ = templates/template-log-access.conf.in
......@@ -70,7 +70,7 @@ md5sum = 8cde04bfd0c0e9bd56744b988275cfd8
[template-trafficserver-records-config]
_update_hash_filename_ = templates/trafficserver/records.config.jinja2
md5sum = 3a4e378932ffc7768426bb7a897e2c45
md5sum = f3f31188de56bb35383335b3219537f4
[template-trafficserver-storage-config]
_update_hash_filename_ = templates/trafficserver/storage.config.jinja2
......
......@@ -22,7 +22,10 @@
try_duration {{ slave_parameter['proxy_try_duration'] }}s
try_interval {{ slave_parameter['proxy_try_interval'] }}ms
transparent
header_upstream Host {host}
{# header_upstream -X-Forwarded-For - caddy behaviour while removing and setting header is unstable, so for now original header has to be kept, even if in that case it comes from after ATS caddy itself #}
header_upstream X-Forwarded-For {>X-Forwarded-For-Real}
header_upstream -X-Forwarded-For-Real
timeout {{ slave_parameter['request_timeout'] }}s
{%- if ssl_proxy_verify %}
{%- if 'path_to_ssl_proxy_ca_crt' in slave_parameter %}
......@@ -47,7 +50,10 @@
proxy / {{ slave_parameter.get('https_backend_url', '') }} {
try_duration {{ slave_parameter['proxy_try_duration'] }}s
try_interval {{ slave_parameter['proxy_try_interval'] }}ms
transparent
header_upstream Host {host}
{# header_upstream -X-Forwarded-For - caddy behaviour while removing and setting header is unstable, so for now original header has to be kept, even if in that case it comes from after ATS caddy itself #}
header_upstream X-Forwarded-For {>X-Forwarded-For-Real}
header_upstream -X-Forwarded-For-Real
timeout {{ slave_parameter['request_timeout'] }}s
{%- if ssl_proxy_verify %}
{%- if 'path_to_ssl_proxy_ca_crt' in slave_parameter %}
......
{%- set TRUE_VALUES = ['y', 'yes', '1', 'true'] %}
{%- set enable_cache = slave_parameter.get('enable_cache', '').lower() in TRUE_VALUES %}
{%- set disable_no_cache_header = slave_parameter.get('disable-no-cache-request', '').lower() in TRUE_VALUES %}
{%- set disable_via_header = slave_parameter.get('disable-via-header', '').lower() in TRUE_VALUES %}
{%- set prefer_gzip = slave_parameter.get('prefer-gzip-encoding-to-backend', '').lower() in TRUE_VALUES %}
......@@ -104,6 +105,12 @@
proxy /{{ proxy_name }} {{ backend_url }} {
try_duration {{ slave_parameter['proxy_try_duration'] }}s
try_interval {{ slave_parameter['proxy_try_interval'] }}ms
# force reset of X-Forwarded-For
header_upstream X-Forwarded-For {remote}
{%- if enable_cache %}
# provide a header for other components
header_upstream X-Forwarded-For-Real {remote}
{%- endif %}
{%- if proxy_name == 'prefer-gzip' %}
without /prefer-gzip
header_upstream Accept-Encoding gzip
......@@ -176,6 +183,12 @@
proxy / {{ backend_url }} {
try_duration {{ slave_parameter['proxy_try_duration'] }}s
try_interval {{ slave_parameter['proxy_try_interval'] }}ms
# force reset of X-Forwarded-For
header_upstream X-Forwarded-For {remote}
{%- if enable_cache %}
# provide a header for other components
header_upstream X-Forwarded-For-Real {remote}
{%- endif %}
transparent
insecure_skip_verify
}
......@@ -186,8 +199,13 @@
proxy /proxy/ {{ backend_url }} {
try_duration {{ slave_parameter['proxy_try_duration'] }}s
try_interval {{ slave_parameter['proxy_try_interval'] }}ms
header_upstream X-Real-IP {remote}
header_upstream Host {host}
# force reset of X-Forwarded-For
header_upstream X-Forwarded-For {remote}
{%- if enable_cache %}
# provide a header for other components
header_upstream X-Forwarded-For-Real {remote}
{%- endif %}
transparent
websocket
without /proxy/
insecure_skip_verify
......@@ -197,6 +215,12 @@
proxy / {{ backend_url }} {
try_duration {{ slave_parameter['proxy_try_duration'] }}s
try_interval {{ slave_parameter['proxy_try_interval'] }}ms
# force reset of X-Forwarded-For
header_upstream X-Forwarded-For {remote}
{%- if enable_cache %}
# provide a header for other components
header_upstream X-Forwarded-For-Real {remote}
{%- endif %}
{%- if websocket_transparent %}
transparent
{%- endif %}
......@@ -206,6 +230,12 @@
proxy /{{ websocket_path }} {{ backend_url }} {
try_duration {{ slave_parameter['proxy_try_duration'] }}s
try_interval {{ slave_parameter['proxy_try_interval'] }}ms
# force reset of X-Forwarded-For
header_upstream X-Forwarded-For {remote}
{%- if enable_cache %}
# provide a header for other components
header_upstream X-Forwarded-For-Real {remote}
{%- endif %}
websocket
{%- if websocket_transparent %}
transparent
......@@ -217,6 +247,12 @@
proxy / {{ backend_url }} {
try_duration {{ slave_parameter['proxy_try_duration'] }}s
try_interval {{ slave_parameter['proxy_try_interval'] }}ms
# force reset of X-Forwarded-For
header_upstream X-Forwarded-For {remote}
{%- if enable_cache %}
# provide a header for other components
header_upstream X-Forwarded-For-Real {remote}
{%- endif %}
websocket
{%- if websocket_transparent %}
transparent
......@@ -239,6 +275,12 @@
proxy /{{ proxy_name }} {{ backend_url }} {
try_duration {{ slave_parameter['proxy_try_duration'] }}s
try_interval {{ slave_parameter['proxy_try_interval'] }}ms
# force reset of X-Forwarded-For
header_upstream X-Forwarded-For {remote}
{%- if enable_cache %}
# provide a header for other components
header_upstream X-Forwarded-For-Real {remote}
{%- endif %}
{%- if proxy_name == 'prefer-gzip' %}
without /prefer-gzip
header_upstream Accept-Encoding gzip
......
......@@ -27,6 +27,14 @@ CONFIG proxy.config.http.cache.open_write_fail_action INT 2
CONFIG proxy.config.body_factory.template_sets_dir STRING {{ ats_configuration['templates-dir'] }}
# Support stale-if-error by returning cached content on backend 5xx or unavailability
CONFIG proxy.config.http.negative_revalidating_enabled INT 1
##############################################################################
# Proxy users variables. Docs:
# https://docs.trafficserver.apache.org/records.config#proxy-user-variables
##############################################################################
# Do not modify headers, as it needlessly pollutes information
CONFIG proxy.config.http.insert_client_ip INT 0
CONFIG proxy.config.http.insert_squid_x_forwarded_for INT 0
##############################################################################
# Thread configurations. Docs:
......@@ -98,13 +106,6 @@ CONFIG proxy.config.http.down_server.abort_threshold INT 10
CONFIG proxy.config.http.negative_caching_enabled INT 0
CONFIG proxy.config.http.negative_caching_lifetime INT 1800
##############################################################################
# Proxy users variables. Docs:
# https://docs.trafficserver.apache.org/records.config#proxy-user-variables
##############################################################################
CONFIG proxy.config.http.insert_client_ip INT 1
CONFIG proxy.config.http.insert_squid_x_forwarded_for INT 1
##############################################################################
# Security. Docs:
# https://docs.trafficserver.apache.org/records.config#security
......
......@@ -47,6 +47,7 @@ import base64
import re
from slapos.recipe.librecipe import generateHashFromFiles
import xml.etree.ElementTree as ET
import urlparse
try:
......@@ -76,6 +77,9 @@ HTTPS_PORT = '11443'
CAUCASE_PORT = '15090'
KEDIFA_PORT = '15080'
# IP to originate requests from
# has to be not partition one
SOURCE_IP = '127.0.0.1'
# for development: debugging logs and install Ctrl+C handler
if os.environ.get('SLAPOS_TEST_DEBUG'):
......@@ -401,12 +405,17 @@ class TestDataMixin(object):
def fakeHTTPSResult(domain, real_ip, path, port=HTTPS_PORT,
headers=None, cookies=None, source_ip=None):
headers=None, cookies=None, source_ip=SOURCE_IP):
if headers is None:
headers = {}
# workaround request problem of setting Accept-Encoding
# https://github.com/requests/requests/issues/2234
headers.setdefault('Accept-Encoding', 'dummy')
# Headers to tricks the whole system, like rouge user would do
headers.setdefault('X-Forwarded-For', '192.168.0.1')
headers.setdefault('X-Forwarded-Proto', 'irc')
headers.setdefault('X-Forwarded-Port', '17')
session = requests.Session()
session.mount(
'https://%s:%s' % (domain, port),
......@@ -426,14 +435,23 @@ def fakeHTTPSResult(domain, real_ip, path, port=HTTPS_PORT,
def fakeHTTPResult(domain, real_ip, path, port=HTTP_PORT,
headers=None):
headers=None, source_ip=SOURCE_IP):
if headers is None:
headers = {}
# workaround request problem of setting Accept-Encoding
# https://github.com/requests/requests/issues/2234
headers.setdefault('Accept-Encoding', 'dummy')
headers['Host'] = domain
return requests.get(
# Headers to tricks the whole system, like rouge user would do
headers.setdefault('X-Forwarded-For', '192.168.0.1')
headers.setdefault('X-Forwarded-Proto', 'irc')
headers.setdefault('X-Forwarded-Port', '17')
headers['Host'] = '%s:%s' % (domain, port)
session = requests.Session()
if source_ip is not None:
new_source = source.SourceAddressAdapter(source_ip)
session.mount('http://', new_source)
session.mount('https://', new_source)
return session.get(
'http://%s:%s/%s' % (real_ip, port, path),
headers=headers,
allow_redirects=False,
......@@ -1540,7 +1558,7 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin):
)
self.assertEqual(
'https://empty.example.com/test-path',
'https://empty.example.com:%s/test-path' % (HTTP_PORT,),
result_http.headers['Location']
)
......@@ -1575,6 +1593,33 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin):
self.assertFalse('connection-parameter-hash' in line)
self.assertFalse('timestamp' in line)
def assertBackendHeaders(
self, backend_header_dict, domain, source_ip=SOURCE_IP, port=HTTPS_PORT,
proto='https', ignore_header_list=None):
if ignore_header_list is None:
ignore_header_list = []
self.assertFalse('remote_user' in backend_header_dict.keys())
self.assertFalse('x-forwarded-for-real' in backend_header_dict.keys())
if 'Host' not in ignore_header_list:
self.assertEqual(
backend_header_dict['host'],
'%s:%s' % (domain, port))
# XXX It's really hard to play with Caddy headers, thus we have to keep
# some of them. As other solutions will come in future, more control
# over sent X-Forwarded-For will be possible
self.assertEqual(
backend_header_dict['x-forwarded-for'].split(',')[0],
source_ip
)
self.assertEqual(
backend_header_dict['x-forwarded-port'],
port
)
self.assertEqual(
backend_header_dict['x-forwarded-proto'],
proto
)
def test_url(self):
parameter_dict = self.assertSlaveBase('Url')
......@@ -1597,11 +1642,10 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin):
j = result.json()
except Exception:
raise ValueError('JSON decode problem in:\n%s' % (result.text,))
self.assertFalse('remote_user' in j['Incoming Headers'].keys())
self.assertEqual(j['Incoming Headers']['timeout'], '10')
self.assertFalse('Content-Encoding' in result.headers)
self.assertBackendHeaders(j['Incoming Headers'], parameter_dict['domain'])
self.assertEqual(
'secured=value;secure, nonsecured=value',
......@@ -1618,7 +1662,7 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin):
)
self.assertEqual(
'https://url.example.com/test-path/deeper',
'https://url.example.com:%s/test-path/deeper' % (HTTP_PORT,),
result_http.headers['Location']
)
......@@ -2243,7 +2287,7 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin):
j = result.json()
except Exception:
raise ValueError('JSON decode problem in:\n%s' % (result.text,))
self.assertFalse('remote_user' in j['Incoming Headers'].keys())
self.assertBackendHeaders(j['Incoming Headers'], parameter_dict['domain'])
self.assertEqualResultJson(
result,
......@@ -2262,7 +2306,8 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin):
)
self.assertEqual(
'https://typezope.example.com/test-path/deep/.././deeper',
'https://typezope.example.com:%s/test-path/deep/.././deeper' % (
HTTP_PORT,),
result.headers['Location']
)
......@@ -2282,7 +2327,7 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin):
j = result.json()
except Exception:
raise ValueError('JSON decode problem in:\n%s' % (result.text,))
self.assertFalse('remote_user' in j['Incoming Headers'].keys())
self.assertBackendHeaders(j['Incoming Headers'], parameter_dict['domain'])
self.assertEqualResultJson(
result,
......@@ -2317,7 +2362,7 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin):
j = result.json()
except Exception:
raise ValueError('JSON decode problem in:\n%s' % (result.text,))
self.assertFalse('remote_user' in j['Incoming Headers'].keys())
self.assertBackendHeaders(j['Incoming Headers'], parameter_dict['domain'])
self.assertEqualResultJson(
result,
......@@ -2360,7 +2405,7 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin):
j = result.json()
except Exception:
raise ValueError('JSON decode problem in:\n%s' % (result.text,))
self.assertFalse('remote_user' in j['Incoming Headers'].keys())
self.assertBackendHeaders(j['Incoming Headers'], parameter_dict['domain'])
self.assertEqualResultJson(
result,
......@@ -2380,7 +2425,8 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin):
)
self.assertEqual(
'https://%s/test-path/deep/.././deeper' % (parameter_dict['domain'],),
'https://%s:%s/test-path/deep/.././deeper' % (
parameter_dict['domain'], HTTP_PORT),
result.headers['Location']
)
......@@ -2397,7 +2443,7 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin):
j = result.json()
except Exception:
raise ValueError('JSON decode problem in:\n%s' % (result.text,))
self.assertFalse('remote_user' in j['Incoming Headers'].keys())
self.assertBackendHeaders(j['Incoming Headers'], parameter_dict['domain'])
self.assertEqualResultJson(
result,
......@@ -2420,7 +2466,8 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin):
)
self.assertEqual(
'https://%s/test-path/deep/.././deeper' % (parameter_dict['domain'],),
'https://%s:%s/test-path/deep/.././deeper' % (
parameter_dict['domain'], HTTP_PORT),
result.headers['Location']
)
......@@ -2519,6 +2566,7 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin):
j = result.json()
except Exception:
raise ValueError('JSON decode problem in:\n%s' % (result.text,))
self.assertBackendHeaders(j['Incoming Headers'], parameter_dict['domain'])
self.assertEqual(
'Upgrade',
j['Incoming Headers']['connection']
......@@ -2548,6 +2596,10 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin):
j = result.json()
except Exception:
raise ValueError('JSON decode problem in:\n%s' % (result.text,))
parsed = urlparse.urlparse(self.backend_url)
self.assertBackendHeaders(
j['Incoming Headers'], parsed.hostname, port='17', proto='irc',
ignore_header_list=['Host'])
self.assertEqual(
'Upgrade',
j['Incoming Headers']['connection']
......@@ -2579,6 +2631,7 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin):
j = result.json()
except Exception:
raise ValueError('JSON decode problem in:\n%s' % (result.text,))
self.assertBackendHeaders(j['Incoming Headers'], parameter_dict['domain'])
self.assertFalse('connection' in j['Incoming Headers'].keys())
self.assertTrue('x-real-ip' in j['Incoming Headers'])
......@@ -2597,6 +2650,7 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin):
j = result.json()
except Exception:
raise ValueError('JSON decode problem in:\n%s' % (result.text,))
self.assertBackendHeaders(j['Incoming Headers'], parameter_dict['domain'])
self.assertEqual(
'Upgrade',
j['Incoming Headers']['connection']
......@@ -2618,6 +2672,7 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin):
j = result.json()
except Exception:
raise ValueError('JSON decode problem in:\n%s' % (result.text,))
self.assertBackendHeaders(j['Incoming Headers'], parameter_dict['domain'])
self.assertEqual(
'Upgrade',
j['Incoming Headers']['connection']
......@@ -2648,6 +2703,10 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin):
j = result.json()
except Exception:
raise ValueError('JSON decode problem in:\n%s' % (result.text,))
parsed = urlparse.urlparse(self.backend_url)
self.assertBackendHeaders(
j['Incoming Headers'], parsed.hostname, port='17', proto='irc',
ignore_header_list=['Host'])
self.assertFalse('connection' in j['Incoming Headers'].keys())
self.assertFalse('x-real-ip' in j['Incoming Headers'])
......@@ -2666,6 +2725,9 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin):
j = result.json()
except Exception:
raise ValueError('JSON decode problem in:\n%s' % (result.text,))
self.assertBackendHeaders(
j['Incoming Headers'], parsed.hostname, port='17', proto='irc',
ignore_header_list=['Host'])
self.assertEqual(
'Upgrade',
j['Incoming Headers']['connection']
......@@ -2687,6 +2749,9 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin):
j = result.json()
except Exception:
raise ValueError('JSON decode problem in:\n%s' % (result.text,))
self.assertBackendHeaders(
j['Incoming Headers'], parsed.hostname, port='17', proto='irc',
ignore_header_list=['Host'])
self.assertEqual(
'Upgrade',
j['Incoming Headers']['connection']
......@@ -2803,7 +2868,8 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin):
)
self.assertEqual(
'https://sslproxyverifysslproxycacrtunverified.example.com/test-path',
'https://sslproxyverifysslproxycacrtunverified.example.com:%s/'
'test-path' % (HTTP_PORT,),
result_http.headers['Location']
)
......@@ -2823,7 +2889,7 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin):
j = result.json()
except Exception:
raise ValueError('JSON decode problem in:\n%s' % (result.text,))
self.assertFalse('remote_user' in j['Incoming Headers'].keys())
self.assertBackendHeaders(j['Incoming Headers'], parameter_dict['domain'])
self.assertFalse('Content-Encoding' in result.headers)
......@@ -2841,7 +2907,8 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin):
)
self.assertEqual(
'https://sslproxyverifysslproxycacrt.example.com/test-path',
'https://sslproxyverifysslproxycacrt.example.com:%s/test-path' % (
HTTP_PORT,),
result_http.headers['Location']
)
......@@ -2914,8 +2981,8 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin):
)
self.assertEqual(
'https://enablecachesslproxyverifysslproxycacrtunverified.example.com/'
'test-path/deeper',
'https://enablecachesslproxyverifysslproxycacrtunverified.example.com'
':%s/test-path/deeper' % (HTTP_PORT,),
result_http.headers['Location']
)
......@@ -3052,8 +3119,8 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin):
)
self.assertEqual(
'https://typezopesslproxyverifysslproxycacrtunverified.example.com/'
'test-path',
'https://typezopesslproxyverifysslproxycacrtunverified.example.com:%s/'
'test-path' % (HTTP_PORT,),
result_http.headers['Location']
)
......@@ -3072,7 +3139,7 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin):
j = result.json()
except Exception:
raise ValueError('JSON decode problem in:\n%s' % (result.text,))
self.assertFalse('remote_user' in j['Incoming Headers'].keys())
self.assertBackendHeaders(j['Incoming Headers'], parameter_dict['domain'])
self.assertEqualResultJson(
result,
......@@ -3091,7 +3158,8 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin):
)
self.assertEqual(
'https://typezopesslproxyverifysslproxycacrt.example.com/test-path',
'https://typezopesslproxyverifysslproxycacrt.example.com:'
'%s/test-path' % (HTTP_PORT,),
result.headers['Location']
)
......@@ -3131,7 +3199,7 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin):
)
self.assertEqual(
'https://monitoripv6test.example.com/test-path',
'https://monitoripv6test.example.com:%s/test-path' % (HTTP_PORT,),
result_http.headers['Location']
)
......@@ -3168,7 +3236,7 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin):
)
self.assertEqual(
'https://monitoripv4test.example.com/test-path',
'https://monitoripv4test.example.com:%s/test-path' % (HTTP_PORT,),
result_http.headers['Location']
)
......@@ -3207,7 +3275,7 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin):
)
self.assertEqual(
'https://ciphers.example.com/test-path',
'https://ciphers.example.com:%s/test-path' % (HTTP_PORT,),
result_http.headers['Location']
)
......@@ -3269,6 +3337,7 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin):
)
backend_headers = result.json()['Incoming Headers']
self.assertBackendHeaders(backend_headers, parameter_dict['domain'])
via = backend_headers.pop('via', None)
self.assertNotEqual(via, None)
self.assertRegexpMatches(
......@@ -3310,6 +3379,7 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin):
)
backend_headers = result.json()['Incoming Headers']
self.assertBackendHeaders(backend_headers, parameter_dict['domain'])
via = backend_headers.pop('via', None)
self.assertNotEqual(via, None)
self.assertRegexpMatches(
......@@ -3328,18 +3398,23 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin):
)
self.assertEqual(
'https://enablecacheserveralias1.example.com/test-path/deeper',
'https://enablecacheserveralias1.example.com:%s/test-path/deeper' % (
HTTP_PORT,),
result.headers['Location']
)
def test_enable_cache(self):
parameter_dict = self.assertSlaveBase('enable_cache')
source_ip = '127.0.0.1'
result = fakeHTTPSResult(
parameter_dict['domain'], parameter_dict['public-ipv4'],
'test-path/deep/.././deeper', headers={
'X-Reply-Header-Cache-Control': 'max-age=1, stale-while-'
'revalidate=3600, stale-if-error=3600'})
'revalidate=3600, stale-if-error=3600',
},
source_ip=source_ip
)
self.assertEqualResultJson(result, 'Path', '/test-path/deeper')
......@@ -3366,6 +3441,7 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin):
)
backend_headers = result.json()['Incoming Headers']
self.assertBackendHeaders(backend_headers, parameter_dict['domain'])
via = backend_headers.pop('via', None)
self.assertNotEqual(via, None)
self.assertRegexpMatches(
......@@ -3431,6 +3507,7 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin):
)
backend_headers = result.json()['Incoming Headers']
self.assertBackendHeaders(backend_headers, parameter_dict['domain'])
via = backend_headers.pop('via', None)
self.assertNotEqual(via, None)
self.assertRegexpMatches(
......@@ -3598,6 +3675,7 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin):
)
backend_headers = result.json()['Incoming Headers']
self.assertBackendHeaders(backend_headers, parameter_dict['domain'])
via = backend_headers.pop('via', None)
self.assertNotEqual(via, None)
self.assertRegexpMatches(
......@@ -3644,6 +3722,7 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin):
)
backend_headers = result.json()['Incoming Headers']
self.assertBackendHeaders(backend_headers, parameter_dict['domain'])
via = backend_headers.pop('via', None)
self.assertNotEqual(via, None)
self.assertRegexpMatches(
......@@ -3734,6 +3813,8 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin):
self.assertEqualResultJson(result, 'Path', '/test-path/deeper')
self.assertBackendHeaders(
result.json()['Incoming Headers'], parameter_dict['domain'])
self.assertEqual(
'gzip', result.json()['Incoming Headers']['accept-encoding'])
......@@ -3744,6 +3825,8 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin):
self.assertEqualResultJson(result, 'Path', '/test-path/deeper')
self.assertBackendHeaders(
result.json()['Incoming Headers'], parameter_dict['domain'])
self.assertEqual(
'deflate', result.json()['Incoming Headers']['accept-encoding'])
......@@ -3770,6 +3853,9 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin):
self.assertEqualResultJson(result, 'Path', '/test-path/deeper')
self.assertBackendHeaders(
result.json()['Incoming Headers'], parameter_dict['domain'],
port=HTTP_PORT, proto='http')
self.assertEqual(
'gzip', result.json()['Incoming Headers']['accept-encoding'])
......@@ -3780,6 +3866,9 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin):
self.assertEqualResultJson(result, 'Path', '/test-path/deeper')
self.assertBackendHeaders(
result.json()['Incoming Headers'], parameter_dict['domain'],
port=HTTP_PORT, proto='http')
self.assertEqual(
'deflate', result.json()['Incoming Headers']['accept-encoding'])
......@@ -3810,6 +3899,8 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin):
self.assertEqualResultJson(result, 'Path', '/test-path/deeper')
self.assertBackendHeaders(
result.json()['Incoming Headers'], parameter_dict['domain'])
self.assertEqual(
'gzip', result.json()['Incoming Headers']['accept-encoding'])
......@@ -3820,6 +3911,8 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin):
self.assertEqualResultJson(result, 'Path', '/test-path/deeper')
self.assertBackendHeaders(
result.json()['Incoming Headers'], parameter_dict['domain'])
self.assertEqual(
'deflate', result.json()['Incoming Headers']['accept-encoding'])
......@@ -3850,7 +3943,7 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin):
)
self.assertEqual(
'https://%s/test-path/deeper' % (parameter_dict['domain'],),
'https://%s:%s/test-path/deeper' % (parameter_dict['domain'], HTTP_PORT),
result.headers['Location']
)
......@@ -3865,7 +3958,7 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin):
)
self.assertEqual(
'https://%s/test-path/deeper' % (parameter_dict['domain'],),
'https://%s:%s/test-path/deeper' % (parameter_dict['domain'], HTTP_PORT),
result.headers['Location']
)
......@@ -3879,7 +3972,7 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin):
)
self.assertEqual(
'https://%s/test-path/deeper' % (parameter_dict['domain'],),
'https://%s:%s/test-path/deeper' % (parameter_dict['domain'], HTTP_PORT),
result.headers['Location']
)
......@@ -3893,7 +3986,7 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin):
)
self.assertEqual(
'https://%s/test-path/deeper' % (parameter_dict['domain'],),
'https://%s:%s/test-path/deeper' % (parameter_dict['domain'], HTTP_PORT),
result.headers['Location']
)
......@@ -3914,6 +4007,8 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin):
self.assertEqualResultJson(result, 'Path', '/test-path')
self.assertBackendHeaders(
result.json()['Incoming Headers'], parameter_dict['domain'])
self.assertEqual(
'Coffee=present', result.json()['Incoming Headers']['cookie'])
......@@ -3940,7 +4035,7 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin):
)
self.assertEqual(
'https://urlhttpsurl.example.com/test-path/deeper',
'https://urlhttpsurl.example.com:%s/test-path/deeper' % (HTTP_PORT,),
result_http.headers['Location']
)
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment