black formatting
This commit is contained in:
parent
d7333c2bed
commit
53336d9e0b
@ -98,6 +98,8 @@ all of curl's output and trace file are found in `tests/http/gen/curl`.
|
||||
|
||||
There is a lot of [`pytest` documentation](https://docs.pytest.org/) with examples. No use in repeating that here. Assuming you are somewhat familiar with it, it is useful how *this* general test suite is setup. Especially if you want to add test cases.
|
||||
|
||||
Please run [black](https://pypi.org/project/black/) when adding new tests.
|
||||
|
||||
### Servers
|
||||
|
||||
In `conftest.py` 3 "fixtures" are defined that are used by all test cases:
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
#***************************************************************************
|
||||
# ***************************************************************************
|
||||
# _ _ ____ _
|
||||
# Project ___| | | | _ \| |
|
||||
# / __| | | | |_) | |
|
||||
@ -30,56 +30,61 @@ from typing import Generator
|
||||
|
||||
import pytest
|
||||
|
||||
sys.path.append(os.path.join(os.path.dirname(__file__), '.'))
|
||||
sys.path.append(os.path.join(os.path.dirname(__file__), "."))
|
||||
|
||||
from testenv import Env, Nghttpx, Httpd, NghttpxQuic, NghttpxFwd
|
||||
|
||||
|
||||
def pytest_report_header(config):
|
||||
# Env inits its base properties only once, we can report them here
|
||||
env = Env()
|
||||
report = [
|
||||
f'Testing curl {env.curl_version()}',
|
||||
f' platform: {platform.platform()}',
|
||||
f' curl: Version: {env.curl_version_string()}',
|
||||
f' curl: Features: {env.curl_features_string()}',
|
||||
f' curl: Protocols: {env.curl_protocols_string()}',
|
||||
f' httpd: {env.httpd_version()}, http:{env.http_port} https:{env.https_port}',
|
||||
f' httpd-proxy: {env.httpd_version()}, http:{env.proxy_port} https:{env.proxys_port}'
|
||||
f"Testing curl {env.curl_version()}",
|
||||
f" platform: {platform.platform()}",
|
||||
f" curl: Version: {env.curl_version_string()}",
|
||||
f" curl: Features: {env.curl_features_string()}",
|
||||
f" curl: Protocols: {env.curl_protocols_string()}",
|
||||
f" httpd: {env.httpd_version()}, http:{env.http_port} https:{env.https_port}",
|
||||
f" httpd-proxy: {env.httpd_version()}, http:{env.proxy_port} https:{env.proxys_port}",
|
||||
]
|
||||
if env.have_h3():
|
||||
report.extend([
|
||||
f' nghttpx: {env.nghttpx_version()}, h3:{env.https_port}'
|
||||
])
|
||||
report.extend([f" nghttpx: {env.nghttpx_version()}, h3:{env.https_port}"])
|
||||
if env.has_caddy():
|
||||
report.extend([
|
||||
f' Caddy: {env.caddy_version()}, http:{env.caddy_http_port} https:{env.caddy_https_port}'
|
||||
])
|
||||
report.extend(
|
||||
[
|
||||
f" Caddy: {env.caddy_version()}, http:{env.caddy_http_port} https:{env.caddy_https_port}"
|
||||
]
|
||||
)
|
||||
if env.has_vsftpd():
|
||||
report.extend([
|
||||
f' VsFTPD: {env.vsftpd_version()}, ftp:{env.ftp_port}, ftps:{env.ftps_port}'
|
||||
])
|
||||
buildinfo_fn = os.path.join(env.build_dir, 'buildinfo.txt')
|
||||
report.extend(
|
||||
[
|
||||
f" VsFTPD: {env.vsftpd_version()}, ftp:{env.ftp_port}, ftps:{env.ftps_port}"
|
||||
]
|
||||
)
|
||||
buildinfo_fn = os.path.join(env.build_dir, "buildinfo.txt")
|
||||
if os.path.exists(buildinfo_fn):
|
||||
with open(buildinfo_fn, 'r') as file_in:
|
||||
with open(buildinfo_fn, "r") as file_in:
|
||||
for line in file_in:
|
||||
line = line.strip()
|
||||
if line and not line.startswith('#'):
|
||||
if line and not line.startswith("#"):
|
||||
report.extend([line])
|
||||
return '\n'.join(report)
|
||||
return "\n".join(report)
|
||||
|
||||
|
||||
# TODO: remove this and repeat argument everywhere, pytest-repeat can be used to repeat tests
|
||||
def pytest_generate_tests(metafunc):
|
||||
if "repeat" in metafunc.fixturenames:
|
||||
metafunc.parametrize('repeat', [0])
|
||||
metafunc.parametrize("repeat", [0])
|
||||
|
||||
|
||||
@pytest.fixture(scope="package")
|
||||
def env(pytestconfig) -> Env:
|
||||
env = Env(pytestconfig=pytestconfig)
|
||||
level = logging.DEBUG if env.verbose > 0 else logging.INFO
|
||||
logging.getLogger('').setLevel(level=level)
|
||||
if not env.curl_has_protocol('http'):
|
||||
logging.getLogger("").setLevel(level=level)
|
||||
if not env.curl_has_protocol("http"):
|
||||
pytest.skip("curl built without HTTP support")
|
||||
if not env.curl_has_protocol('https'):
|
||||
if not env.curl_has_protocol("https"):
|
||||
pytest.skip("curl built without HTTPS support")
|
||||
if env.setup_incomplete():
|
||||
pytest.skip(env.incomplete_reason())
|
||||
@ -87,24 +92,25 @@ def env(pytestconfig) -> Env:
|
||||
env.setup()
|
||||
return env
|
||||
|
||||
|
||||
@pytest.fixture(scope="package", autouse=True)
|
||||
def log_global_env_facts(record_testsuite_property, env):
|
||||
record_testsuite_property("http-port", env.http_port)
|
||||
|
||||
|
||||
@pytest.fixture(scope='package')
|
||||
@pytest.fixture(scope="package")
|
||||
def httpd(env) -> Generator[Httpd, None, None]:
|
||||
httpd = Httpd(env=env)
|
||||
if not httpd.exists():
|
||||
pytest.skip(f'httpd not found: {env.httpd}')
|
||||
pytest.skip(f"httpd not found: {env.httpd}")
|
||||
httpd.clear_logs()
|
||||
if not httpd.start():
|
||||
pytest.fail(f'failed to start httpd: {env.httpd}')
|
||||
pytest.fail(f"failed to start httpd: {env.httpd}")
|
||||
yield httpd
|
||||
httpd.stop()
|
||||
|
||||
|
||||
@pytest.fixture(scope='package')
|
||||
@pytest.fixture(scope="package")
|
||||
def nghttpx(env, httpd) -> Generator[Nghttpx, None, None]:
|
||||
nghttpx = NghttpxQuic(env=env)
|
||||
if nghttpx.exists() and (env.have_h3() or nghttpx.https_port > 0):
|
||||
@ -113,7 +119,8 @@ def nghttpx(env, httpd) -> Generator[Nghttpx, None, None]:
|
||||
yield nghttpx
|
||||
nghttpx.stop()
|
||||
|
||||
@pytest.fixture(scope='package')
|
||||
|
||||
@pytest.fixture(scope="package")
|
||||
def nghttpx_fwd(env, httpd) -> Generator[Nghttpx, None, None]:
|
||||
nghttpx = NghttpxFwd(env=env)
|
||||
if nghttpx.exists() and (env.have_h3() or nghttpx.https_port > 0):
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@ -1,6 +1,6 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
#***************************************************************************
|
||||
# ***************************************************************************
|
||||
# _ _ ____ _
|
||||
# Project ___| | | | _ \| |
|
||||
# / __| | | | |_) | |
|
||||
@ -35,8 +35,7 @@ log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class TestBasic:
|
||||
|
||||
@pytest.fixture(autouse=True, scope='class')
|
||||
@pytest.fixture(autouse=True, scope="class")
|
||||
def _class_scope(self, env, nghttpx):
|
||||
if env.have_h3():
|
||||
nghttpx.start_if_needed()
|
||||
@ -44,108 +43,116 @@ class TestBasic:
|
||||
# simple http: GET
|
||||
def test_01_01_http_get(self, env: Env, httpd):
|
||||
curl = CurlClient(env=env)
|
||||
url = f'http://{env.domain1}:{env.http_port}/data.json'
|
||||
url = f"http://{env.domain1}:{env.http_port}/data.json"
|
||||
r = curl.http_get(url=url)
|
||||
r.check_response(http_status=200)
|
||||
assert r.json['server'] == env.domain1
|
||||
assert r.json["server"] == env.domain1
|
||||
|
||||
# simple https: GET, any http version
|
||||
@pytest.mark.skipif(condition=not Env.have_ssl_curl(), reason="curl without SSL")
|
||||
def test_01_02_https_get(self, env: Env, httpd):
|
||||
curl = CurlClient(env=env)
|
||||
url = f'https://{env.domain1}:{env.https_port}/data.json'
|
||||
url = f"https://{env.domain1}:{env.https_port}/data.json"
|
||||
r = curl.http_get(url=url)
|
||||
r.check_response(http_status=200)
|
||||
assert r.json['server'] == env.domain1
|
||||
assert r.json["server"] == env.domain1
|
||||
|
||||
# simple https: GET, h2 wanted and got
|
||||
@pytest.mark.skipif(condition=not Env.have_ssl_curl(), reason="curl without SSL")
|
||||
def test_01_03_h2_get(self, env: Env, httpd):
|
||||
curl = CurlClient(env=env)
|
||||
url = f'https://{env.domain1}:{env.https_port}/data.json'
|
||||
r = curl.http_get(url=url, extra_args=['--http2'])
|
||||
r.check_response(http_status=200, protocol='HTTP/2')
|
||||
assert r.json['server'] == env.domain1
|
||||
url = f"https://{env.domain1}:{env.https_port}/data.json"
|
||||
r = curl.http_get(url=url, extra_args=["--http2"])
|
||||
r.check_response(http_status=200, protocol="HTTP/2")
|
||||
assert r.json["server"] == env.domain1
|
||||
|
||||
# simple https: GET, h2 unsupported, fallback to h1
|
||||
@pytest.mark.skipif(condition=not Env.have_ssl_curl(), reason="curl without SSL")
|
||||
def test_01_04_h2_unsupported(self, env: Env, httpd):
|
||||
curl = CurlClient(env=env)
|
||||
url = f'https://{env.domain2}:{env.https_port}/data.json'
|
||||
r = curl.http_get(url=url, extra_args=['--http2'])
|
||||
r.check_response(http_status=200, protocol='HTTP/1.1')
|
||||
assert r.json['server'] == env.domain2
|
||||
url = f"https://{env.domain2}:{env.https_port}/data.json"
|
||||
r = curl.http_get(url=url, extra_args=["--http2"])
|
||||
r.check_response(http_status=200, protocol="HTTP/1.1")
|
||||
assert r.json["server"] == env.domain2
|
||||
|
||||
# simple h3: GET, want h3 and get it
|
||||
@pytest.mark.skipif(condition=not Env.have_h3(), reason="h3 not supported")
|
||||
def test_01_05_h3_get(self, env: Env, httpd, nghttpx):
|
||||
curl = CurlClient(env=env)
|
||||
url = f'https://{env.domain1}:{env.h3_port}/data.json'
|
||||
r = curl.http_get(url=url, extra_args=['--http3-only'])
|
||||
r.check_response(http_status=200, protocol='HTTP/3')
|
||||
assert r.json['server'] == env.domain1
|
||||
url = f"https://{env.domain1}:{env.h3_port}/data.json"
|
||||
r = curl.http_get(url=url, extra_args=["--http3-only"])
|
||||
r.check_response(http_status=200, protocol="HTTP/3")
|
||||
assert r.json["server"] == env.domain1
|
||||
|
||||
# simple download, check connect/handshake timings
|
||||
@pytest.mark.skipif(condition=not Env.have_ssl_curl(), reason="curl without SSL")
|
||||
@pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
|
||||
@pytest.mark.parametrize("proto", ["http/1.1", "h2", "h3"])
|
||||
def test_01_06_timings(self, env: Env, httpd, nghttpx, proto):
|
||||
if proto == 'h3' and not env.have_h3():
|
||||
if proto == "h3" and not env.have_h3():
|
||||
pytest.skip("h3 not supported")
|
||||
curl = CurlClient(env=env)
|
||||
url = f'https://{env.authority_for(env.domain1, proto)}/data.json'
|
||||
url = f"https://{env.authority_for(env.domain1, proto)}/data.json"
|
||||
r = curl.http_download(urls=[url], alpn_proto=proto, with_stats=True)
|
||||
r.check_stats(http_status=200, count=1,
|
||||
remote_port=env.port_for(alpn_proto=proto),
|
||||
remote_ip='127.0.0.1')
|
||||
assert r.stats[0]['time_connect'] > 0, f'{r.stats[0]}'
|
||||
assert r.stats[0]['time_appconnect'] > 0, f'{r.stats[0]}'
|
||||
r.check_stats(
|
||||
http_status=200,
|
||||
count=1,
|
||||
remote_port=env.port_for(alpn_proto=proto),
|
||||
remote_ip="127.0.0.1",
|
||||
)
|
||||
assert r.stats[0]["time_connect"] > 0, f"{r.stats[0]}"
|
||||
assert r.stats[0]["time_appconnect"] > 0, f"{r.stats[0]}"
|
||||
|
||||
# simple https: HEAD
|
||||
@pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
|
||||
@pytest.mark.parametrize("proto", ["http/1.1", "h2", "h3"])
|
||||
@pytest.mark.skipif(condition=not Env.have_ssl_curl(), reason="curl without SSL")
|
||||
def test_01_07_head(self, env: Env, httpd, nghttpx, proto):
|
||||
if proto == 'h3' and not env.have_h3():
|
||||
if proto == "h3" and not env.have_h3():
|
||||
pytest.skip("h3 not supported")
|
||||
curl = CurlClient(env=env)
|
||||
url = f'https://{env.authority_for(env.domain1, proto)}/data.json'
|
||||
r = curl.http_download(urls=[url], with_stats=True, with_headers=True,
|
||||
extra_args=['-I'])
|
||||
r.check_stats(http_status=200, count=1, exitcode=0,
|
||||
remote_port=env.port_for(alpn_proto=proto),
|
||||
remote_ip='127.0.0.1')
|
||||
url = f"https://{env.authority_for(env.domain1, proto)}/data.json"
|
||||
r = curl.http_download(
|
||||
urls=[url], with_stats=True, with_headers=True, extra_args=["-I"]
|
||||
)
|
||||
r.check_stats(
|
||||
http_status=200,
|
||||
count=1,
|
||||
exitcode=0,
|
||||
remote_port=env.port_for(alpn_proto=proto),
|
||||
remote_ip="127.0.0.1",
|
||||
)
|
||||
# got the Conten-Length: header, but did not download anything
|
||||
assert r.responses[0]['header']['content-length'] == '30', f'{r.responses[0]}'
|
||||
assert r.stats[0]['size_download'] == 0, f'{r.stats[0]}'
|
||||
assert r.responses[0]["header"]["content-length"] == "30", f"{r.responses[0]}"
|
||||
assert r.stats[0]["size_download"] == 0, f"{r.stats[0]}"
|
||||
|
||||
# http: GET for HTTP/2, see Upgrade:, 101 switch
|
||||
def test_01_08_h2_upgrade(self, env: Env, httpd):
|
||||
curl = CurlClient(env=env)
|
||||
url = f'http://{env.domain1}:{env.http_port}/data.json'
|
||||
r = curl.http_get(url=url, extra_args=['--http2'])
|
||||
url = f"http://{env.domain1}:{env.http_port}/data.json"
|
||||
r = curl.http_get(url=url, extra_args=["--http2"])
|
||||
r.check_exit_code(0)
|
||||
assert len(r.responses) == 2, f'{r.responses}'
|
||||
assert r.responses[0]['status'] == 101, f'{r.responses[0]}'
|
||||
assert r.responses[1]['status'] == 200, f'{r.responses[1]}'
|
||||
assert r.responses[1]['protocol'] == 'HTTP/2', f'{r.responses[1]}'
|
||||
assert r.json['server'] == env.domain1
|
||||
assert len(r.responses) == 2, f"{r.responses}"
|
||||
assert r.responses[0]["status"] == 101, f"{r.responses[0]}"
|
||||
assert r.responses[1]["status"] == 200, f"{r.responses[1]}"
|
||||
assert r.responses[1]["protocol"] == "HTTP/2", f"{r.responses[1]}"
|
||||
assert r.json["server"] == env.domain1
|
||||
|
||||
# http: GET for HTTP/2 with prior knowledge
|
||||
def test_01_09_h2_prior_knowledge(self, env: Env, httpd):
|
||||
curl = CurlClient(env=env)
|
||||
url = f'http://{env.domain1}:{env.http_port}/data.json'
|
||||
r = curl.http_get(url=url, extra_args=['--http2-prior-knowledge'])
|
||||
url = f"http://{env.domain1}:{env.http_port}/data.json"
|
||||
r = curl.http_get(url=url, extra_args=["--http2-prior-knowledge"])
|
||||
r.check_exit_code(0)
|
||||
assert len(r.responses) == 1, f'{r.responses}'
|
||||
assert r.response['status'] == 200, f'{r.responsw}'
|
||||
assert r.response['protocol'] == 'HTTP/2', f'{r.response}'
|
||||
assert r.json['server'] == env.domain1
|
||||
assert len(r.responses) == 1, f"{r.responses}"
|
||||
assert r.response["status"] == 200, f"{r.responsw}"
|
||||
assert r.response["protocol"] == "HTTP/2", f"{r.response}"
|
||||
assert r.json["server"] == env.domain1
|
||||
|
||||
# http: strip TE header in HTTP/2 requests
|
||||
def test_01_10_te_strip(self, env: Env, httpd):
|
||||
curl = CurlClient(env=env)
|
||||
url = f'https://{env.authority_for(env.domain1, "h2")}/data.json'
|
||||
r = curl.http_get(url=url, extra_args=['--http2', '-H', 'TE: gzip'])
|
||||
r = curl.http_get(url=url, extra_args=["--http2", "-H", "TE: gzip"])
|
||||
r.check_exit_code(0)
|
||||
assert len(r.responses) == 1, f'{r.responses}'
|
||||
assert r.responses[0]['status'] == 200, f'{r.responses[1]}'
|
||||
assert r.responses[0]['protocol'] == 'HTTP/2', f'{r.responses[1]}'
|
||||
assert len(r.responses) == 1, f"{r.responses}"
|
||||
assert r.responses[0]["status"] == 200, f"{r.responses[1]}"
|
||||
assert r.responses[0]["protocol"] == "HTTP/2", f"{r.responses[1]}"
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@ -1,6 +1,6 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
#***************************************************************************
|
||||
# ***************************************************************************
|
||||
# _ _ ____ _
|
||||
# Project ___| | | | _ \| |
|
||||
# / __| | | | |_) | |
|
||||
@ -37,8 +37,7 @@ log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class TestGoAway:
|
||||
|
||||
@pytest.fixture(autouse=True, scope='class')
|
||||
@pytest.fixture(autouse=True, scope="class")
|
||||
def _class_scope(self, env, httpd, nghttpx):
|
||||
if env.have_h3():
|
||||
nghttpx.start_if_needed()
|
||||
@ -47,15 +46,18 @@ class TestGoAway:
|
||||
|
||||
# download files sequentially with delay, reload server for GOAWAY
|
||||
def test_03_01_h2_goaway(self, env: Env, httpd, nghttpx):
|
||||
proto = 'h2'
|
||||
proto = "h2"
|
||||
count = 3
|
||||
self.r = None
|
||||
|
||||
def long_run():
|
||||
curl = CurlClient(env=env)
|
||||
# send 10 chunks of 1024 bytes in a response body with 100ms delay in between
|
||||
urln = f'https://{env.authority_for(env.domain1, proto)}' \
|
||||
f'/curltest/tweak?id=[0-{count - 1}]'\
|
||||
'&chunks=10&chunk_size=1024&chunk_delay=100ms'
|
||||
urln = (
|
||||
f"https://{env.authority_for(env.domain1, proto)}"
|
||||
f"/curltest/tweak?id=[0-{count - 1}]"
|
||||
"&chunks=10&chunk_size=1024&chunk_delay=100ms"
|
||||
)
|
||||
self.r = curl.http_download(urls=[urln], alpn_proto=proto)
|
||||
|
||||
t = Thread(target=long_run)
|
||||
@ -71,27 +73,30 @@ class TestGoAway:
|
||||
# we expect to see a second connection opened afterwards
|
||||
assert r.total_connects == 2
|
||||
for idx, s in enumerate(r.stats):
|
||||
if s['num_connects'] > 0:
|
||||
log.debug(f'request {idx} connected')
|
||||
if s["num_connects"] > 0:
|
||||
log.debug(f"request {idx} connected")
|
||||
# this should take `count` seconds to retrieve
|
||||
assert r.duration >= timedelta(seconds=count)
|
||||
|
||||
# download files sequentially with delay, reload server for GOAWAY
|
||||
@pytest.mark.skipif(condition=not Env.have_h3(), reason="h3 not supported")
|
||||
def test_03_02_h3_goaway(self, env: Env, httpd, nghttpx):
|
||||
proto = 'h3'
|
||||
if proto == 'h3' and env.curl_uses_lib('msh3'):
|
||||
proto = "h3"
|
||||
if proto == "h3" and env.curl_uses_lib("msh3"):
|
||||
pytest.skip("msh3 stalls here")
|
||||
if proto == 'h3' and env.curl_uses_ossl_quic():
|
||||
pytest.skip('OpenSSL QUIC fails here')
|
||||
if proto == "h3" and env.curl_uses_ossl_quic():
|
||||
pytest.skip("OpenSSL QUIC fails here")
|
||||
count = 3
|
||||
self.r = None
|
||||
|
||||
def long_run():
|
||||
curl = CurlClient(env=env)
|
||||
# send 10 chunks of 1024 bytes in a response body with 100ms delay in between
|
||||
urln = f'https://{env.authority_for(env.domain1, proto)}' \
|
||||
f'/curltest/tweak?id=[0-{count - 1}]'\
|
||||
'&chunks=10&chunk_size=1024&chunk_delay=100ms'
|
||||
urln = (
|
||||
f"https://{env.authority_for(env.domain1, proto)}"
|
||||
f"/curltest/tweak?id=[0-{count - 1}]"
|
||||
"&chunks=10&chunk_size=1024&chunk_delay=100ms"
|
||||
)
|
||||
self.r = curl.http_download(urls=[urln], alpn_proto=proto)
|
||||
|
||||
t = Thread(target=long_run)
|
||||
@ -103,29 +108,37 @@ class TestGoAway:
|
||||
t.join()
|
||||
r: ExecResult = self.r
|
||||
# this should take `count` seconds to retrieve, maybe a little less
|
||||
assert r.duration >= timedelta(seconds=count-1)
|
||||
assert r.duration >= timedelta(seconds=count - 1)
|
||||
r.check_response(count=count, http_status=200, connect_count=2)
|
||||
# reload will shut down the connection gracefully with GOAWAY
|
||||
# we expect to see a second connection opened afterwards
|
||||
for idx, s in enumerate(r.stats):
|
||||
if s['num_connects'] > 0:
|
||||
log.debug(f'request {idx} connected')
|
||||
if s["num_connects"] > 0:
|
||||
log.debug(f"request {idx} connected")
|
||||
|
||||
# download files sequentially with delay, reload server for GOAWAY
|
||||
def test_03_03_h1_goaway(self, env: Env, httpd, nghttpx):
|
||||
proto = 'http/1.1'
|
||||
proto = "http/1.1"
|
||||
count = 3
|
||||
self.r = None
|
||||
|
||||
def long_run():
|
||||
curl = CurlClient(env=env)
|
||||
# send 10 chunks of 1024 bytes in a response body with 100ms delay in between
|
||||
# pause 2 seconds between requests
|
||||
urln = f'https://{env.authority_for(env.domain1, proto)}' \
|
||||
f'/curltest/tweak?id=[0-{count - 1}]'\
|
||||
'&chunks=10&chunk_size=1024&chunk_delay=100ms'
|
||||
self.r = curl.http_download(urls=[urln], alpn_proto=proto, extra_args=[
|
||||
'--rate', '30/m',
|
||||
])
|
||||
urln = (
|
||||
f"https://{env.authority_for(env.domain1, proto)}"
|
||||
f"/curltest/tweak?id=[0-{count - 1}]"
|
||||
"&chunks=10&chunk_size=1024&chunk_delay=100ms"
|
||||
)
|
||||
self.r = curl.http_download(
|
||||
urls=[urln],
|
||||
alpn_proto=proto,
|
||||
extra_args=[
|
||||
"--rate",
|
||||
"30/m",
|
||||
],
|
||||
)
|
||||
|
||||
t = Thread(target=long_run)
|
||||
t.start()
|
||||
@ -139,7 +152,7 @@ class TestGoAway:
|
||||
# reload will shut down the connection gracefully
|
||||
# we expect to see a second connection opened afterwards
|
||||
for idx, s in enumerate(r.stats):
|
||||
if s['num_connects'] > 0:
|
||||
log.debug(f'request {idx} connected')
|
||||
if s["num_connects"] > 0:
|
||||
log.debug(f"request {idx} connected")
|
||||
# this should take `count` seconds to retrieve
|
||||
assert r.duration >= timedelta(seconds=count)
|
||||
|
||||
@ -1,6 +1,6 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
#***************************************************************************
|
||||
# ***************************************************************************
|
||||
# _ _ ____ _
|
||||
# Project ___| | | | _ \| |
|
||||
# / __| | | | |_) | |
|
||||
@ -34,11 +34,12 @@ from testenv import Env, CurlClient
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@pytest.mark.skipif(condition=Env().slow_network, reason="not suitable for slow network tests")
|
||||
@pytest.mark.skipif(
|
||||
condition=Env().slow_network, reason="not suitable for slow network tests"
|
||||
)
|
||||
@pytest.mark.skipif(condition=Env().ci_run, reason="not suitable for CI runs")
|
||||
class TestStuttered:
|
||||
|
||||
@pytest.fixture(autouse=True, scope='class')
|
||||
@pytest.fixture(autouse=True, scope="class")
|
||||
def _class_scope(self, env, httpd, nghttpx):
|
||||
if env.have_h3():
|
||||
nghttpx.start_if_needed()
|
||||
@ -46,85 +47,116 @@ class TestStuttered:
|
||||
httpd.reload()
|
||||
|
||||
# download 1 file, check that delayed response works in general
|
||||
@pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
|
||||
@pytest.mark.parametrize("proto", ["http/1.1", "h2", "h3"])
|
||||
def test_04_01_download_1(self, env: Env, httpd, nghttpx, proto):
|
||||
if proto == 'h3' and not env.have_h3():
|
||||
if proto == "h3" and not env.have_h3():
|
||||
pytest.skip("h3 not supported")
|
||||
count = 1
|
||||
curl = CurlClient(env=env)
|
||||
urln = f'https://{env.authority_for(env.domain1, proto)}' \
|
||||
f'/curltest/tweak?id=[0-{count - 1}]'\
|
||||
'&chunks=100&chunk_size=100&chunk_delay=10ms'
|
||||
urln = (
|
||||
f"https://{env.authority_for(env.domain1, proto)}"
|
||||
f"/curltest/tweak?id=[0-{count - 1}]"
|
||||
"&chunks=100&chunk_size=100&chunk_delay=10ms"
|
||||
)
|
||||
r = curl.http_download(urls=[urln], alpn_proto=proto)
|
||||
r.check_response(count=1, http_status=200)
|
||||
|
||||
# download 50 files in 100 chunks a 100 bytes with 10ms delay between
|
||||
# prepend 100 file requests to warm up connection processing limits
|
||||
# (Apache2 increases # of parallel processed requests after successes)
|
||||
@pytest.mark.parametrize("proto", ['h2', 'h3'])
|
||||
@pytest.mark.parametrize("proto", ["h2", "h3"])
|
||||
def test_04_02_100_100_10(self, env: Env, httpd, nghttpx, proto):
|
||||
if proto == 'h3' and not env.have_h3():
|
||||
if proto == "h3" and not env.have_h3():
|
||||
pytest.skip("h3 not supported")
|
||||
count = 50
|
||||
warmups = 100
|
||||
curl = CurlClient(env=env)
|
||||
url1 = f'https://{env.authority_for(env.domain1, proto)}/data.json?[0-{warmups-1}]'
|
||||
urln = f'https://{env.authority_for(env.domain1, proto)}' \
|
||||
f'/curltest/tweak?id=[0-{count-1}]'\
|
||||
'&chunks=100&chunk_size=100&chunk_delay=10ms'
|
||||
r = curl.http_download(urls=[url1, urln], alpn_proto=proto,
|
||||
extra_args=['--parallel'])
|
||||
r.check_response(count=warmups+count, http_status=200)
|
||||
url1 = (
|
||||
f"https://{env.authority_for(env.domain1, proto)}/data.json?[0-{warmups-1}]"
|
||||
)
|
||||
urln = (
|
||||
f"https://{env.authority_for(env.domain1, proto)}"
|
||||
f"/curltest/tweak?id=[0-{count-1}]"
|
||||
"&chunks=100&chunk_size=100&chunk_delay=10ms"
|
||||
)
|
||||
r = curl.http_download(
|
||||
urls=[url1, urln], alpn_proto=proto, extra_args=["--parallel"]
|
||||
)
|
||||
r.check_response(count=warmups + count, http_status=200)
|
||||
assert r.total_connects == 1
|
||||
t_avg, i_min, t_min, i_max, t_max = self.stats_spread(r.stats[warmups:], 'time_total')
|
||||
t_avg, i_min, t_min, i_max, t_max = self.stats_spread(
|
||||
r.stats[warmups:], "time_total"
|
||||
)
|
||||
if t_max < (5 * t_min) and t_min < 2:
|
||||
log.warning(f'avg time of transfer: {t_avg} [{i_min}={t_min}, {i_max}={t_max}]')
|
||||
log.warning(
|
||||
f"avg time of transfer: {t_avg} [{i_min}={t_min}, {i_max}={t_max}]"
|
||||
)
|
||||
|
||||
# download 50 files in 1000 chunks a 10 bytes with 1ms delay between
|
||||
# prepend 100 file requests to warm up connection processing limits
|
||||
# (Apache2 increases # of parallel processed requests after successes)
|
||||
@pytest.mark.parametrize("proto", ['h2', 'h3'])
|
||||
@pytest.mark.parametrize("proto", ["h2", "h3"])
|
||||
def test_04_03_1000_10_1(self, env: Env, httpd, nghttpx, proto):
|
||||
if proto == 'h3' and not env.have_h3():
|
||||
if proto == "h3" and not env.have_h3():
|
||||
pytest.skip("h3 not supported")
|
||||
count = 50
|
||||
warmups = 100
|
||||
curl = CurlClient(env=env)
|
||||
url1 = f'https://{env.authority_for(env.domain1, proto)}/data.json?[0-{warmups-1}]'
|
||||
urln = f'https://{env.authority_for(env.domain1, proto)}' \
|
||||
f'/curltest/tweak?id=[0-{count - 1}]'\
|
||||
'&chunks=1000&chunk_size=10&chunk_delay=100us'
|
||||
r = curl.http_download(urls=[url1, urln], alpn_proto=proto,
|
||||
extra_args=['--parallel'])
|
||||
r.check_response(count=warmups+count, http_status=200)
|
||||
url1 = (
|
||||
f"https://{env.authority_for(env.domain1, proto)}/data.json?[0-{warmups-1}]"
|
||||
)
|
||||
urln = (
|
||||
f"https://{env.authority_for(env.domain1, proto)}"
|
||||
f"/curltest/tweak?id=[0-{count - 1}]"
|
||||
"&chunks=1000&chunk_size=10&chunk_delay=100us"
|
||||
)
|
||||
r = curl.http_download(
|
||||
urls=[url1, urln], alpn_proto=proto, extra_args=["--parallel"]
|
||||
)
|
||||
r.check_response(count=warmups + count, http_status=200)
|
||||
assert r.total_connects == 1
|
||||
t_avg, i_min, t_min, i_max, t_max = self.stats_spread(r.stats[warmups:], 'time_total')
|
||||
t_avg, i_min, t_min, i_max, t_max = self.stats_spread(
|
||||
r.stats[warmups:], "time_total"
|
||||
)
|
||||
if t_max < (5 * t_min):
|
||||
log.warning(f'avg time of transfer: {t_avg} [{i_min}={t_min}, {i_max}={t_max}]')
|
||||
log.warning(
|
||||
f"avg time of transfer: {t_avg} [{i_min}={t_min}, {i_max}={t_max}]"
|
||||
)
|
||||
|
||||
# download 50 files in 10000 chunks a 1 byte with 10us delay between
|
||||
# prepend 100 file requests to warm up connection processing limits
|
||||
# (Apache2 increases # of parallel processed requests after successes)
|
||||
@pytest.mark.parametrize("proto", ['h2', 'h3'])
|
||||
@pytest.mark.parametrize("proto", ["h2", "h3"])
|
||||
def test_04_04_1000_10_1(self, env: Env, httpd, nghttpx, proto):
|
||||
if proto == 'h3' and not env.have_h3():
|
||||
if proto == "h3" and not env.have_h3():
|
||||
pytest.skip("h3 not supported")
|
||||
count = 50
|
||||
warmups = 100
|
||||
curl = CurlClient(env=env)
|
||||
url1 = f'https://{env.authority_for(env.domain1, proto)}/data.json?[0-{warmups-1}]'
|
||||
urln = f'https://{env.authority_for(env.domain1, proto)}' \
|
||||
f'/curltest/tweak?id=[0-{count - 1}]'\
|
||||
'&chunks=10000&chunk_size=1&chunk_delay=50us'
|
||||
r = curl.http_download(urls=[url1, urln], alpn_proto=proto,
|
||||
extra_args=['--parallel'])
|
||||
r.check_response(count=warmups+count, http_status=200)
|
||||
url1 = (
|
||||
f"https://{env.authority_for(env.domain1, proto)}/data.json?[0-{warmups-1}]"
|
||||
)
|
||||
urln = (
|
||||
f"https://{env.authority_for(env.domain1, proto)}"
|
||||
f"/curltest/tweak?id=[0-{count - 1}]"
|
||||
"&chunks=10000&chunk_size=1&chunk_delay=50us"
|
||||
)
|
||||
r = curl.http_download(
|
||||
urls=[url1, urln], alpn_proto=proto, extra_args=["--parallel"]
|
||||
)
|
||||
r.check_response(count=warmups + count, http_status=200)
|
||||
assert r.total_connects == 1
|
||||
t_avg, i_min, t_min, i_max, t_max = self.stats_spread(r.stats[warmups:], 'time_total')
|
||||
t_avg, i_min, t_min, i_max, t_max = self.stats_spread(
|
||||
r.stats[warmups:], "time_total"
|
||||
)
|
||||
if t_max < (5 * t_min):
|
||||
log.warning(f'avg time of transfer: {t_avg} [{i_min}={t_min}, {i_max}={t_max}]')
|
||||
log.warning(
|
||||
f"avg time of transfer: {t_avg} [{i_min}={t_min}, {i_max}={t_max}]"
|
||||
)
|
||||
|
||||
def stats_spread(self, stats: List[Dict], key: str) -> Tuple[float, int, float, int, float]:
|
||||
def stats_spread(
|
||||
self, stats: List[Dict], key: str
|
||||
) -> Tuple[float, int, float, int, float]:
|
||||
stotals = 0.0
|
||||
s_min = 100.0
|
||||
i_min = -1
|
||||
@ -139,4 +171,4 @@ class TestStuttered:
|
||||
if val < s_min:
|
||||
s_min = val
|
||||
i_min = idx
|
||||
return stotals/len(stats), i_min, s_min, i_max, s_max
|
||||
return stotals / len(stats), i_min, s_min, i_max, s_max
|
||||
|
||||
@ -1,6 +1,6 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
#***************************************************************************
|
||||
# ***************************************************************************
|
||||
# _ _ ____ _
|
||||
# Project ___| | | | _ \| |
|
||||
# / __| | | | |_) | |
|
||||
@ -33,11 +33,12 @@ from testenv import Env, CurlClient
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@pytest.mark.skipif(condition=not Env.httpd_is_at_least('2.4.55'),
|
||||
reason=f"httpd version too old for this: {Env.httpd_version()}")
|
||||
@pytest.mark.skipif(
|
||||
condition=not Env.httpd_is_at_least("2.4.55"),
|
||||
reason=f"httpd version too old for this: {Env.httpd_version()}",
|
||||
)
|
||||
class TestErrors:
|
||||
|
||||
@pytest.fixture(autouse=True, scope='class')
|
||||
@pytest.fixture(autouse=True, scope="class")
|
||||
def _class_scope(self, env, httpd, nghttpx):
|
||||
if env.have_h3():
|
||||
nghttpx.start_if_needed()
|
||||
@ -45,65 +46,75 @@ class TestErrors:
|
||||
httpd.reload()
|
||||
|
||||
# download 1 file, check that we get CURLE_PARTIAL_FILE
|
||||
@pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
|
||||
@pytest.mark.parametrize("proto", ["http/1.1", "h2", "h3"])
|
||||
def test_05_01_partial_1(self, env: Env, httpd, nghttpx, proto):
|
||||
if proto == 'h3' and not env.have_h3():
|
||||
if proto == "h3" and not env.have_h3():
|
||||
pytest.skip("h3 not supported")
|
||||
if proto == 'h3' and env.curl_uses_lib('msh3'):
|
||||
if proto == "h3" and env.curl_uses_lib("msh3"):
|
||||
pytest.skip("msh3 stalls here")
|
||||
count = 1
|
||||
curl = CurlClient(env=env)
|
||||
urln = f'https://{env.authority_for(env.domain1, proto)}' \
|
||||
f'/curltest/tweak?id=[0-{count - 1}]'\
|
||||
'&chunks=3&chunk_size=16000&body_error=reset'
|
||||
r = curl.http_download(urls=[urln], alpn_proto=proto, extra_args=[
|
||||
'--retry', '0'
|
||||
])
|
||||
urln = (
|
||||
f"https://{env.authority_for(env.domain1, proto)}"
|
||||
f"/curltest/tweak?id=[0-{count - 1}]"
|
||||
"&chunks=3&chunk_size=16000&body_error=reset"
|
||||
)
|
||||
r = curl.http_download(
|
||||
urls=[urln], alpn_proto=proto, extra_args=["--retry", "0"]
|
||||
)
|
||||
r.check_exit_code(False)
|
||||
invalid_stats = []
|
||||
for idx, s in enumerate(r.stats):
|
||||
if 'exitcode' not in s or s['exitcode'] not in [18, 56, 92, 95]:
|
||||
if "exitcode" not in s or s["exitcode"] not in [18, 56, 92, 95]:
|
||||
invalid_stats.append(f'request {idx} exit with {s["exitcode"]}')
|
||||
assert len(invalid_stats) == 0, f'failed: {invalid_stats}'
|
||||
assert len(invalid_stats) == 0, f"failed: {invalid_stats}"
|
||||
|
||||
# download files, check that we get CURLE_PARTIAL_FILE for all
|
||||
@pytest.mark.parametrize("proto", ['h2', 'h3'])
|
||||
@pytest.mark.parametrize("proto", ["h2", "h3"])
|
||||
def test_05_02_partial_20(self, env: Env, httpd, nghttpx, proto):
|
||||
if proto == 'h3' and not env.have_h3():
|
||||
if proto == "h3" and not env.have_h3():
|
||||
pytest.skip("h3 not supported")
|
||||
if proto == 'h3' and env.curl_uses_lib('msh3'):
|
||||
if proto == "h3" and env.curl_uses_lib("msh3"):
|
||||
pytest.skip("msh3 stalls here")
|
||||
count = 20
|
||||
curl = CurlClient(env=env)
|
||||
urln = f'https://{env.authority_for(env.domain1, proto)}' \
|
||||
f'/curltest/tweak?id=[0-{count - 1}]'\
|
||||
'&chunks=5&chunk_size=16000&body_error=reset'
|
||||
r = curl.http_download(urls=[urln], alpn_proto=proto, extra_args=[
|
||||
'--retry', '0', '--parallel',
|
||||
])
|
||||
urln = (
|
||||
f"https://{env.authority_for(env.domain1, proto)}"
|
||||
f"/curltest/tweak?id=[0-{count - 1}]"
|
||||
"&chunks=5&chunk_size=16000&body_error=reset"
|
||||
)
|
||||
r = curl.http_download(
|
||||
urls=[urln],
|
||||
alpn_proto=proto,
|
||||
extra_args=[
|
||||
"--retry",
|
||||
"0",
|
||||
"--parallel",
|
||||
],
|
||||
)
|
||||
r.check_exit_code(False)
|
||||
assert len(r.stats) == count, f'did not get all stats: {r}'
|
||||
assert len(r.stats) == count, f"did not get all stats: {r}"
|
||||
invalid_stats = []
|
||||
for idx, s in enumerate(r.stats):
|
||||
if 'exitcode' not in s or s['exitcode'] not in [18, 55, 56, 92, 95]:
|
||||
if "exitcode" not in s or s["exitcode"] not in [18, 55, 56, 92, 95]:
|
||||
invalid_stats.append(f'request {idx} exit with {s["exitcode"]}\n{s}')
|
||||
assert len(invalid_stats) == 0, f'failed: {invalid_stats}'
|
||||
assert len(invalid_stats) == 0, f"failed: {invalid_stats}"
|
||||
|
||||
# access a resource that, on h2, RST the stream with HTTP_1_1_REQUIRED
|
||||
def test_05_03_required(self, env: Env, httpd, nghttpx):
|
||||
curl = CurlClient(env=env)
|
||||
proto = 'http/1.1'
|
||||
urln = f'https://{env.authority_for(env.domain1, proto)}/curltest/1_1'
|
||||
proto = "http/1.1"
|
||||
urln = f"https://{env.authority_for(env.domain1, proto)}/curltest/1_1"
|
||||
r = curl.http_download(urls=[urln], alpn_proto=proto)
|
||||
r.check_exit_code(0)
|
||||
r.check_response(http_status=200, count=1)
|
||||
proto = 'h2'
|
||||
urln = f'https://{env.authority_for(env.domain1, proto)}/curltest/1_1'
|
||||
proto = "h2"
|
||||
urln = f"https://{env.authority_for(env.domain1, proto)}/curltest/1_1"
|
||||
r = curl.http_download(urls=[urln], alpn_proto=proto)
|
||||
r.check_exit_code(0)
|
||||
r.check_response(http_status=200, count=1)
|
||||
# check that we did a downgrade
|
||||
assert r.stats[0]['http_version'] == '1.1', r.dump_logs()
|
||||
assert r.stats[0]["http_version"] == "1.1", r.dump_logs()
|
||||
|
||||
# On the URL used here, Apache is doing an "unclean" TLS shutdown,
|
||||
# meaning it sends no shutdown notice and just closes TCP.
|
||||
@ -114,19 +125,28 @@ class TestErrors:
|
||||
# and stop receiving when that signals the end
|
||||
# - h2 to work since it will signal the end of the response before
|
||||
# and not see the "unclean" close either
|
||||
@pytest.mark.parametrize("proto", ['http/1.0', 'http/1.1', 'h2'])
|
||||
@pytest.mark.parametrize("proto", ["http/1.0", "http/1.1", "h2"])
|
||||
def test_05_04_unclean_tls_shutdown(self, env: Env, httpd, nghttpx, proto):
|
||||
if proto == 'h3' and not env.have_h3():
|
||||
if proto == "h3" and not env.have_h3():
|
||||
pytest.skip("h3 not supported")
|
||||
count = 10 if proto == 'h2' else 1
|
||||
count = 10 if proto == "h2" else 1
|
||||
curl = CurlClient(env=env)
|
||||
url = f'https://{env.authority_for(env.domain1, proto)}'\
|
||||
f'/curltest/shutdown_unclean?id=[0-{count-1}]&chunks=4'
|
||||
r = curl.http_download(urls=[url], alpn_proto=proto, extra_args=[
|
||||
'--parallel',
|
||||
])
|
||||
if proto == 'http/1.0' and not env.curl_uses_lib('wolfssl') and \
|
||||
(env.curl_is_debug() or not env.curl_uses_lib('openssl')):
|
||||
url = (
|
||||
f"https://{env.authority_for(env.domain1, proto)}"
|
||||
f"/curltest/shutdown_unclean?id=[0-{count-1}]&chunks=4"
|
||||
)
|
||||
r = curl.http_download(
|
||||
urls=[url],
|
||||
alpn_proto=proto,
|
||||
extra_args=[
|
||||
"--parallel",
|
||||
],
|
||||
)
|
||||
if (
|
||||
proto == "http/1.0"
|
||||
and not env.curl_uses_lib("wolfssl")
|
||||
and (env.curl_is_debug() or not env.curl_uses_lib("openssl"))
|
||||
):
|
||||
# we are inconsistent if we fail or not in missing TLS shutdown
|
||||
# openssl code ignore such errors intentionally in non-debug builds
|
||||
r.check_exit_code(56)
|
||||
|
||||
@ -1,6 +1,6 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
#***************************************************************************
|
||||
# ***************************************************************************
|
||||
# _ _ ____ _
|
||||
# Project ___| | | | _ \| |
|
||||
# / __| | | | |_) | |
|
||||
@ -34,8 +34,7 @@ log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class TestEyeballs:
|
||||
|
||||
@pytest.fixture(autouse=True, scope='class')
|
||||
@pytest.fixture(autouse=True, scope="class")
|
||||
def _class_scope(self, env, httpd, nghttpx):
|
||||
if env.have_h3():
|
||||
nghttpx.start_if_needed()
|
||||
@ -47,9 +46,9 @@ class TestEyeballs:
|
||||
def test_06_01_h3_only(self, env: Env, httpd, nghttpx):
|
||||
curl = CurlClient(env=env)
|
||||
urln = f'https://{env.authority_for(env.domain1, "h3")}/data.json'
|
||||
r = curl.http_download(urls=[urln], extra_args=['--http3-only'])
|
||||
r = curl.http_download(urls=[urln], extra_args=["--http3-only"])
|
||||
r.check_response(count=1, http_status=200)
|
||||
assert r.stats[0]['http_version'] == '3'
|
||||
assert r.stats[0]["http_version"] == "3"
|
||||
|
||||
# download using only HTTP/3 on missing server
|
||||
@pytest.mark.skipif(condition=not Env.have_h3(), reason="missing HTTP/3 support")
|
||||
@ -57,7 +56,7 @@ class TestEyeballs:
|
||||
nghttpx.stop_if_running()
|
||||
curl = CurlClient(env=env)
|
||||
urln = f'https://{env.authority_for(env.domain1, "h3")}/data.json'
|
||||
r = curl.http_download(urls=[urln], extra_args=['--http3-only'])
|
||||
r = curl.http_download(urls=[urln], extra_args=["--http3-only"])
|
||||
r.check_response(exitcode=7, http_status=None)
|
||||
|
||||
# download using HTTP/3 on missing server with fallback on h2
|
||||
@ -66,9 +65,9 @@ class TestEyeballs:
|
||||
nghttpx.stop_if_running()
|
||||
curl = CurlClient(env=env)
|
||||
urln = f'https://{env.authority_for(env.domain1, "h3")}/data.json'
|
||||
r = curl.http_download(urls=[urln], extra_args=['--http3'])
|
||||
r = curl.http_download(urls=[urln], extra_args=["--http3"])
|
||||
r.check_response(count=1, http_status=200)
|
||||
assert r.stats[0]['http_version'] == '2'
|
||||
assert r.stats[0]["http_version"] == "2"
|
||||
|
||||
# download using HTTP/3 on missing server with fallback on http/1.1
|
||||
@pytest.mark.skipif(condition=not Env.have_h3(), reason="missing HTTP/3 support")
|
||||
@ -76,9 +75,9 @@ class TestEyeballs:
|
||||
nghttpx.stop_if_running()
|
||||
curl = CurlClient(env=env)
|
||||
urln = f'https://{env.authority_for(env.domain2, "h3")}/data.json'
|
||||
r = curl.http_download(urls=[urln], extra_args=['--http3'])
|
||||
r = curl.http_download(urls=[urln], extra_args=["--http3"])
|
||||
r.check_response(count=1, http_status=200)
|
||||
assert r.stats[0]['http_version'] == '1.1'
|
||||
assert r.stats[0]["http_version"] == "1.1"
|
||||
|
||||
# make a successful https: transfer and observer the timer stats
|
||||
def test_06_10_stats_success(self, env: Env, httpd, nghttpx):
|
||||
@ -86,27 +85,28 @@ class TestEyeballs:
|
||||
urln = f'https://{env.authority_for(env.domain1, "h2")}/data.json'
|
||||
r = curl.http_download(urls=[urln])
|
||||
r.check_response(count=1, http_status=200)
|
||||
assert r.stats[0]['time_connect'] > 0.0
|
||||
assert r.stats[0]['time_appconnect'] > 0.0
|
||||
assert r.stats[0]["time_connect"] > 0.0
|
||||
assert r.stats[0]["time_appconnect"] > 0.0
|
||||
|
||||
# make https: to a hostname that tcp connects, but will not verify
|
||||
def test_06_11_stats_fail_verify(self, env: Env, httpd, nghttpx):
|
||||
curl = CurlClient(env=env)
|
||||
urln = f'https://not-valid.com:{env.https_port}/data.json'
|
||||
r = curl.http_download(urls=[urln], extra_args=[
|
||||
'--resolve', f'not-valid.com:{env.https_port}:127.0.0.1'
|
||||
])
|
||||
urln = f"https://not-valid.com:{env.https_port}/data.json"
|
||||
r = curl.http_download(
|
||||
urls=[urln],
|
||||
extra_args=["--resolve", f"not-valid.com:{env.https_port}:127.0.0.1"],
|
||||
)
|
||||
r.check_response(count=1, http_status=0, exitcode=False)
|
||||
assert r.stats[0]['time_connect'] > 0.0 # was tcp connected
|
||||
assert r.stats[0]['time_appconnect'] == 0 # but not SSL verified
|
||||
assert r.stats[0]["time_connect"] > 0.0 # was tcp connected
|
||||
assert r.stats[0]["time_appconnect"] == 0 # but not SSL verified
|
||||
|
||||
# make https: to an invalid address
|
||||
def test_06_12_stats_fail_tcp(self, env: Env, httpd, nghttpx):
|
||||
curl = CurlClient(env=env)
|
||||
urln = 'https://not-valid.com:1/data.json'
|
||||
r = curl.http_download(urls=[urln], extra_args=[
|
||||
'--resolve', f'not-valid.com:{1}:127.0.0.1'
|
||||
])
|
||||
urln = "https://not-valid.com:1/data.json"
|
||||
r = curl.http_download(
|
||||
urls=[urln], extra_args=["--resolve", f"not-valid.com:{1}:127.0.0.1"]
|
||||
)
|
||||
r.check_response(count=1, http_status=None, exitcode=False)
|
||||
assert r.stats[0]['time_connect'] == 0 # no one should have listened
|
||||
assert r.stats[0]['time_appconnect'] == 0 # did not happen either
|
||||
assert r.stats[0]["time_connect"] == 0 # no one should have listened
|
||||
assert r.stats[0]["time_appconnect"] == 0 # did not happen either
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@ -1,6 +1,6 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
#***************************************************************************
|
||||
# ***************************************************************************
|
||||
# _ _ ____ _
|
||||
# Project ___| | | | _ \| |
|
||||
# / __| | | | |_) | |
|
||||
@ -40,8 +40,7 @@ log = logging.getLogger(__name__)
|
||||
@pytest.mark.skipif(condition=not Env.has_caddy(), reason="missing caddy")
|
||||
@pytest.mark.skipif(condition=not Env.have_ssl_curl(), reason="curl without SSL")
|
||||
class TestCaddy:
|
||||
|
||||
@pytest.fixture(autouse=True, scope='class')
|
||||
@pytest.fixture(autouse=True, scope="class")
|
||||
def caddy(self, env):
|
||||
caddy = Caddy(env=env)
|
||||
assert caddy.start()
|
||||
@ -50,204 +49,228 @@ class TestCaddy:
|
||||
|
||||
def _make_docs_file(self, docs_dir: str, fname: str, fsize: int):
|
||||
fpath = os.path.join(docs_dir, fname)
|
||||
data1k = 1024*'x'
|
||||
data1k = 1024 * "x"
|
||||
flen = 0
|
||||
with open(fpath, 'w') as fd:
|
||||
with open(fpath, "w") as fd:
|
||||
while flen < fsize:
|
||||
fd.write(data1k)
|
||||
flen += len(data1k)
|
||||
return flen
|
||||
|
||||
@pytest.fixture(autouse=True, scope='class')
|
||||
@pytest.fixture(autouse=True, scope="class")
|
||||
def _class_scope(self, env, caddy):
|
||||
self._make_docs_file(docs_dir=caddy.docs_dir, fname='data10k.data', fsize=10*1024)
|
||||
self._make_docs_file(docs_dir=caddy.docs_dir, fname='data1.data', fsize=1024*1024)
|
||||
self._make_docs_file(docs_dir=caddy.docs_dir, fname='data5.data', fsize=5*1024*1024)
|
||||
self._make_docs_file(docs_dir=caddy.docs_dir, fname='data10.data', fsize=10*1024*1024)
|
||||
self._make_docs_file(docs_dir=caddy.docs_dir, fname='data100.data', fsize=100*1024*1024)
|
||||
env.make_data_file(indir=env.gen_dir, fname="data-10m", fsize=10*1024*1024)
|
||||
self._make_docs_file(
|
||||
docs_dir=caddy.docs_dir, fname="data10k.data", fsize=10 * 1024
|
||||
)
|
||||
self._make_docs_file(
|
||||
docs_dir=caddy.docs_dir, fname="data1.data", fsize=1024 * 1024
|
||||
)
|
||||
self._make_docs_file(
|
||||
docs_dir=caddy.docs_dir, fname="data5.data", fsize=5 * 1024 * 1024
|
||||
)
|
||||
self._make_docs_file(
|
||||
docs_dir=caddy.docs_dir, fname="data10.data", fsize=10 * 1024 * 1024
|
||||
)
|
||||
self._make_docs_file(
|
||||
docs_dir=caddy.docs_dir, fname="data100.data", fsize=100 * 1024 * 1024
|
||||
)
|
||||
env.make_data_file(indir=env.gen_dir, fname="data-10m", fsize=10 * 1024 * 1024)
|
||||
|
||||
# download 1 file
|
||||
@pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
|
||||
@pytest.mark.parametrize("proto", ["http/1.1", "h2", "h3"])
|
||||
def test_08_01_download_1(self, env: Env, caddy: Caddy, proto):
|
||||
if proto == 'h3' and not env.have_h3_curl():
|
||||
if proto == "h3" and not env.have_h3_curl():
|
||||
pytest.skip("h3 not supported in curl")
|
||||
if proto == 'h3' and env.curl_uses_lib('msh3'):
|
||||
if proto == "h3" and env.curl_uses_lib("msh3"):
|
||||
pytest.skip("msh3 itself crashes")
|
||||
curl = CurlClient(env=env)
|
||||
url = f'https://{env.domain1}:{caddy.port}/data.json'
|
||||
url = f"https://{env.domain1}:{caddy.port}/data.json"
|
||||
r = curl.http_download(urls=[url], alpn_proto=proto)
|
||||
r.check_response(count=1, http_status=200)
|
||||
|
||||
# download 1MB files sequentially
|
||||
@pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
|
||||
@pytest.mark.parametrize("proto", ["http/1.1", "h2", "h3"])
|
||||
def test_08_02_download_1mb_sequential(self, env: Env, caddy: Caddy, proto):
|
||||
if proto == 'h3' and not env.have_h3_curl():
|
||||
if proto == "h3" and not env.have_h3_curl():
|
||||
pytest.skip("h3 not supported in curl")
|
||||
if proto == 'h3' and env.curl_uses_lib('msh3'):
|
||||
if proto == "h3" and env.curl_uses_lib("msh3"):
|
||||
pytest.skip("msh3 itself crashes")
|
||||
count = 50
|
||||
curl = CurlClient(env=env)
|
||||
urln = f'https://{env.domain1}:{caddy.port}/data1.data?[0-{count-1}]'
|
||||
urln = f"https://{env.domain1}:{caddy.port}/data1.data?[0-{count-1}]"
|
||||
r = curl.http_download(urls=[urln], alpn_proto=proto)
|
||||
r.check_response(count=count, http_status=200, connect_count=1)
|
||||
|
||||
# download 1MB files parallel
|
||||
@pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
|
||||
@pytest.mark.parametrize("proto", ["http/1.1", "h2", "h3"])
|
||||
def test_08_03_download_1mb_parallel(self, env: Env, caddy: Caddy, proto):
|
||||
if proto == 'h3' and not env.have_h3_curl():
|
||||
if proto == "h3" and not env.have_h3_curl():
|
||||
pytest.skip("h3 not supported in curl")
|
||||
if proto == 'h3' and env.curl_uses_lib('msh3'):
|
||||
if proto == "h3" and env.curl_uses_lib("msh3"):
|
||||
pytest.skip("msh3 itself crashes")
|
||||
count = 20
|
||||
curl = CurlClient(env=env)
|
||||
urln = f'https://{env.domain1}:{caddy.port}/data1.data?[0-{count-1}]'
|
||||
r = curl.http_download(urls=[urln], alpn_proto=proto, extra_args=[
|
||||
'--parallel'
|
||||
])
|
||||
urln = f"https://{env.domain1}:{caddy.port}/data1.data?[0-{count-1}]"
|
||||
r = curl.http_download(urls=[urln], alpn_proto=proto, extra_args=["--parallel"])
|
||||
r.check_response(count=count, http_status=200)
|
||||
if proto == 'http/1.1':
|
||||
if proto == "http/1.1":
|
||||
# http/1.1 parallel transfers will open multiple connections
|
||||
assert r.total_connects > 1, r.dump_logs()
|
||||
else:
|
||||
assert r.total_connects == 1, r.dump_logs()
|
||||
|
||||
# download 5MB files sequentially
|
||||
@pytest.mark.skipif(condition=Env().slow_network, reason="not suitable for slow network tests")
|
||||
@pytest.mark.skipif(
|
||||
condition=Env().slow_network, reason="not suitable for slow network tests"
|
||||
)
|
||||
@pytest.mark.skipif(condition=Env().ci_run, reason="not suitable for CI runs")
|
||||
@pytest.mark.parametrize("proto", ['h2', 'h3'])
|
||||
@pytest.mark.parametrize("proto", ["h2", "h3"])
|
||||
def test_08_04a_download_10mb_sequential(self, env: Env, caddy: Caddy, proto):
|
||||
if proto == 'h3' and not env.have_h3_curl():
|
||||
if proto == "h3" and not env.have_h3_curl():
|
||||
pytest.skip("h3 not supported in curl")
|
||||
if proto == 'h3' and env.curl_uses_lib('msh3'):
|
||||
if proto == "h3" and env.curl_uses_lib("msh3"):
|
||||
pytest.skip("msh3 itself crashes")
|
||||
count = 40
|
||||
curl = CurlClient(env=env)
|
||||
urln = f'https://{env.domain1}:{caddy.port}/data5.data?[0-{count-1}]'
|
||||
urln = f"https://{env.domain1}:{caddy.port}/data5.data?[0-{count-1}]"
|
||||
r = curl.http_download(urls=[urln], alpn_proto=proto)
|
||||
r.check_response(count=count, http_status=200, connect_count=1)
|
||||
|
||||
# download 10MB files sequentially
|
||||
@pytest.mark.skipif(condition=Env().slow_network, reason="not suitable for slow network tests")
|
||||
@pytest.mark.skipif(
|
||||
condition=Env().slow_network, reason="not suitable for slow network tests"
|
||||
)
|
||||
@pytest.mark.skipif(condition=Env().ci_run, reason="not suitable for CI runs")
|
||||
@pytest.mark.parametrize("proto", ['h2', 'h3'])
|
||||
@pytest.mark.parametrize("proto", ["h2", "h3"])
|
||||
def test_08_04b_download_10mb_sequential(self, env: Env, caddy: Caddy, proto):
|
||||
if proto == 'h3' and not env.have_h3_curl():
|
||||
if proto == "h3" and not env.have_h3_curl():
|
||||
pytest.skip("h3 not supported in curl")
|
||||
if proto == 'h3' and env.curl_uses_lib('msh3'):
|
||||
if proto == "h3" and env.curl_uses_lib("msh3"):
|
||||
pytest.skip("msh3 itself crashes")
|
||||
count = 20
|
||||
curl = CurlClient(env=env)
|
||||
urln = f'https://{env.domain1}:{caddy.port}/data10.data?[0-{count-1}]'
|
||||
urln = f"https://{env.domain1}:{caddy.port}/data10.data?[0-{count-1}]"
|
||||
r = curl.http_download(urls=[urln], alpn_proto=proto)
|
||||
r.check_response(count=count, http_status=200, connect_count=1)
|
||||
|
||||
# download 10MB files parallel
|
||||
@pytest.mark.skipif(condition=Env().slow_network, reason="not suitable for slow network tests")
|
||||
@pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
|
||||
@pytest.mark.skipif(
|
||||
condition=Env().slow_network, reason="not suitable for slow network tests"
|
||||
)
|
||||
@pytest.mark.parametrize("proto", ["http/1.1", "h2", "h3"])
|
||||
@pytest.mark.skipif(condition=Env().ci_run, reason="not suitable for CI runs")
|
||||
def test_08_05_download_1mb_parallel(self, env: Env, caddy: Caddy, proto):
|
||||
if proto == 'h3' and not env.have_h3_curl():
|
||||
if proto == "h3" and not env.have_h3_curl():
|
||||
pytest.skip("h3 not supported in curl")
|
||||
if proto == 'h3' and env.curl_uses_lib('msh3'):
|
||||
if proto == "h3" and env.curl_uses_lib("msh3"):
|
||||
pytest.skip("msh3 itself crashes")
|
||||
if proto == 'http/1.1' and env.curl_uses_lib('mbedtls'):
|
||||
pytest.skip("mbedtls 3.6.0 fails on 50 connections with: "\
|
||||
"ssl_handshake returned: (-0x7F00) SSL - Memory allocation failed")
|
||||
if proto == "http/1.1" and env.curl_uses_lib("mbedtls"):
|
||||
pytest.skip(
|
||||
"mbedtls 3.6.0 fails on 50 connections with: "
|
||||
"ssl_handshake returned: (-0x7F00) SSL - Memory allocation failed"
|
||||
)
|
||||
count = 50
|
||||
curl = CurlClient(env=env)
|
||||
urln = f'https://{env.domain1}:{caddy.port}/data10.data?[0-{count-1}]'
|
||||
r = curl.http_download(urls=[urln], alpn_proto=proto, extra_args=[
|
||||
'--parallel'
|
||||
])
|
||||
urln = f"https://{env.domain1}:{caddy.port}/data10.data?[0-{count-1}]"
|
||||
r = curl.http_download(urls=[urln], alpn_proto=proto, extra_args=["--parallel"])
|
||||
r.check_response(count=count, http_status=200)
|
||||
if proto == 'http/1.1':
|
||||
if proto == "http/1.1":
|
||||
# http/1.1 parallel transfers will open multiple connections
|
||||
assert r.total_connects > 1, r.dump_logs()
|
||||
else:
|
||||
assert r.total_connects == 1, r.dump_logs()
|
||||
|
||||
# post data parallel, check that they were echoed
|
||||
@pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
|
||||
@pytest.mark.parametrize("proto", ["http/1.1", "h2", "h3"])
|
||||
def test_08_06_post_parallel(self, env: Env, httpd, caddy, proto):
|
||||
if proto == 'h3' and not env.have_h3():
|
||||
if proto == "h3" and not env.have_h3():
|
||||
pytest.skip("h3 not supported")
|
||||
if proto == 'h3' and env.curl_uses_lib('msh3'):
|
||||
if proto == "h3" and env.curl_uses_lib("msh3"):
|
||||
pytest.skip("msh3 stalls here")
|
||||
# limit since we use a separate connection in h1
|
||||
count = 20
|
||||
data = '0123456789'
|
||||
data = "0123456789"
|
||||
curl = CurlClient(env=env)
|
||||
url = f'https://{env.domain2}:{caddy.port}/curltest/echo?id=[0-{count-1}]'
|
||||
r = curl.http_upload(urls=[url], data=data, alpn_proto=proto,
|
||||
extra_args=['--parallel'])
|
||||
url = f"https://{env.domain2}:{caddy.port}/curltest/echo?id=[0-{count-1}]"
|
||||
r = curl.http_upload(
|
||||
urls=[url], data=data, alpn_proto=proto, extra_args=["--parallel"]
|
||||
)
|
||||
r.check_stats(count=count, http_status=200, exitcode=0)
|
||||
for i in range(count):
|
||||
respdata = open(curl.response_file(i)).readlines()
|
||||
assert respdata == [data]
|
||||
|
||||
# put large file, check that they length were echoed
|
||||
@pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
|
||||
@pytest.mark.parametrize("proto", ["http/1.1", "h2", "h3"])
|
||||
def test_08_07_put_large(self, env: Env, httpd, caddy, proto):
|
||||
if proto == 'h3' and not env.have_h3():
|
||||
if proto == "h3" and not env.have_h3():
|
||||
pytest.skip("h3 not supported")
|
||||
if proto == 'h3' and env.curl_uses_lib('msh3'):
|
||||
if proto == "h3" and env.curl_uses_lib("msh3"):
|
||||
pytest.skip("msh3 stalls here")
|
||||
# limit since we use a separate connection in h1<
|
||||
count = 1
|
||||
fdata = os.path.join(env.gen_dir, 'data-10m')
|
||||
fdata = os.path.join(env.gen_dir, "data-10m")
|
||||
curl = CurlClient(env=env)
|
||||
url = f'https://{env.domain2}:{caddy.port}/curltest/put?id=[0-{count-1}]'
|
||||
url = f"https://{env.domain2}:{caddy.port}/curltest/put?id=[0-{count-1}]"
|
||||
r = curl.http_put(urls=[url], fdata=fdata, alpn_proto=proto)
|
||||
exp_data = [f'{os.path.getsize(fdata)}']
|
||||
exp_data = [f"{os.path.getsize(fdata)}"]
|
||||
r.check_response(count=count, http_status=200)
|
||||
for i in range(count):
|
||||
respdata = open(curl.response_file(i)).readlines()
|
||||
assert respdata == exp_data
|
||||
|
||||
@pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
|
||||
@pytest.mark.parametrize("proto", ["http/1.1", "h2", "h3"])
|
||||
def test_08_08_earlydata(self, env: Env, httpd, caddy, proto):
|
||||
if not env.curl_uses_lib('gnutls'):
|
||||
pytest.skip('TLS earlydata only implemented in GnuTLS')
|
||||
if proto == 'h3' and not env.have_h3():
|
||||
if not env.curl_uses_lib("gnutls"):
|
||||
pytest.skip("TLS earlydata only implemented in GnuTLS")
|
||||
if proto == "h3" and not env.have_h3():
|
||||
pytest.skip("h3 not supported")
|
||||
count = 2
|
||||
docname = 'data10k.data'
|
||||
url = f'https://{env.domain1}:{caddy.port}/{docname}'
|
||||
client = LocalClient(name='hx-download', env=env)
|
||||
docname = "data10k.data"
|
||||
url = f"https://{env.domain1}:{caddy.port}/{docname}"
|
||||
client = LocalClient(name="hx-download", env=env)
|
||||
if not client.exists():
|
||||
pytest.skip(f'example client not built: {client.name}')
|
||||
r = client.run(args=[
|
||||
'-n', f'{count}',
|
||||
'-e', # use TLS earlydata
|
||||
'-f', # forbid reuse of connections
|
||||
'-r', f'{env.domain1}:{caddy.port}:127.0.0.1',
|
||||
'-V', proto, url
|
||||
])
|
||||
pytest.skip(f"example client not built: {client.name}")
|
||||
r = client.run(
|
||||
args=[
|
||||
"-n",
|
||||
f"{count}",
|
||||
"-e", # use TLS earlydata
|
||||
"-f", # forbid reuse of connections
|
||||
"-r",
|
||||
f"{env.domain1}:{caddy.port}:127.0.0.1",
|
||||
"-V",
|
||||
proto,
|
||||
url,
|
||||
]
|
||||
)
|
||||
r.check_exit_code(0)
|
||||
srcfile = os.path.join(caddy.docs_dir, docname)
|
||||
self.check_downloads(client, srcfile, count)
|
||||
earlydata = {}
|
||||
for line in r.trace_lines:
|
||||
m = re.match(r'^\[t-(\d+)] EarlyData: (-?\d+)', line)
|
||||
m = re.match(r"^\[t-(\d+)] EarlyData: (-?\d+)", line)
|
||||
if m:
|
||||
earlydata[int(m.group(1))] = int(m.group(2))
|
||||
assert earlydata[0] == 0, f'{earlydata}'
|
||||
if proto == 'h3':
|
||||
assert earlydata[1] == 71, f'{earlydata}'
|
||||
assert earlydata[0] == 0, f"{earlydata}"
|
||||
if proto == "h3":
|
||||
assert earlydata[1] == 71, f"{earlydata}"
|
||||
else:
|
||||
# Caddy does not support early data on TCP
|
||||
assert earlydata[1] == 0, f'{earlydata}'
|
||||
assert earlydata[1] == 0, f"{earlydata}"
|
||||
|
||||
def check_downloads(self, client, srcfile: str, count: int,
|
||||
complete: bool = True):
|
||||
def check_downloads(self, client, srcfile: str, count: int, complete: bool = True):
|
||||
for i in range(count):
|
||||
dfile = client.download_file(i)
|
||||
assert os.path.exists(dfile)
|
||||
if complete and not filecmp.cmp(srcfile, dfile, shallow=False):
|
||||
diff = "".join(difflib.unified_diff(a=open(srcfile).readlines(),
|
||||
b=open(dfile).readlines(),
|
||||
fromfile=srcfile,
|
||||
tofile=dfile,
|
||||
n=1))
|
||||
assert False, f'download {dfile} differs:\n{diff}'
|
||||
diff = "".join(
|
||||
difflib.unified_diff(
|
||||
a=open(srcfile).readlines(),
|
||||
b=open(dfile).readlines(),
|
||||
fromfile=srcfile,
|
||||
tofile=dfile,
|
||||
n=1,
|
||||
)
|
||||
)
|
||||
assert False, f"download {dfile} differs:\n{diff}"
|
||||
|
||||
@ -1,6 +1,6 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
#***************************************************************************
|
||||
# ***************************************************************************
|
||||
# _ _ ____ _
|
||||
# Project ___| | | | _ \| |
|
||||
# / __| | | | |_) | |
|
||||
@ -35,25 +35,27 @@ log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class TestPush:
|
||||
|
||||
@pytest.fixture(autouse=True, scope='class')
|
||||
@pytest.fixture(autouse=True, scope="class")
|
||||
def _class_scope(self, env, httpd):
|
||||
push_dir = os.path.join(httpd.docs_dir, 'push')
|
||||
push_dir = os.path.join(httpd.docs_dir, "push")
|
||||
if not os.path.exists(push_dir):
|
||||
os.makedirs(push_dir)
|
||||
env.make_data_file(indir=push_dir, fname="data1", fsize=1*1024)
|
||||
env.make_data_file(indir=push_dir, fname="data2", fsize=1*1024)
|
||||
env.make_data_file(indir=push_dir, fname="data3", fsize=1*1024)
|
||||
httpd.set_extra_config(env.domain1, [
|
||||
'H2EarlyHints on',
|
||||
'<Location /push/data1>',
|
||||
' H2PushResource /push/data2',
|
||||
'</Location>',
|
||||
'<Location /push/data2>',
|
||||
' H2PushResource /push/data1',
|
||||
' H2PushResource /push/data3',
|
||||
'</Location>',
|
||||
])
|
||||
env.make_data_file(indir=push_dir, fname="data1", fsize=1 * 1024)
|
||||
env.make_data_file(indir=push_dir, fname="data2", fsize=1 * 1024)
|
||||
env.make_data_file(indir=push_dir, fname="data3", fsize=1 * 1024)
|
||||
httpd.set_extra_config(
|
||||
env.domain1,
|
||||
[
|
||||
"H2EarlyHints on",
|
||||
"<Location /push/data1>",
|
||||
" H2PushResource /push/data2",
|
||||
"</Location>",
|
||||
"<Location /push/data2>",
|
||||
" H2PushResource /push/data1",
|
||||
" H2PushResource /push/data3",
|
||||
"</Location>",
|
||||
],
|
||||
)
|
||||
# activate the new config
|
||||
httpd.reload()
|
||||
yield
|
||||
@ -63,22 +65,25 @@ class TestPush:
|
||||
# download a file that triggers a "103 Early Hints" response
|
||||
def test_09_01_h2_early_hints(self, env: Env, httpd):
|
||||
curl = CurlClient(env=env)
|
||||
url = f'https://{env.domain1}:{env.https_port}/push/data1'
|
||||
r = curl.http_download(urls=[url], alpn_proto='h2', with_stats=False,
|
||||
with_headers=True)
|
||||
url = f"https://{env.domain1}:{env.https_port}/push/data1"
|
||||
r = curl.http_download(
|
||||
urls=[url], alpn_proto="h2", with_stats=False, with_headers=True
|
||||
)
|
||||
r.check_exit_code(0)
|
||||
assert len(r.responses) == 2, f'{r.responses}'
|
||||
assert r.responses[0]['status'] == 103, f'{r.responses}'
|
||||
assert 'link' in r.responses[0]['header'], f'{r.responses[0]}'
|
||||
assert r.responses[0]['header']['link'] == '</push/data2>; rel=preload', f'{r.responses[0]}'
|
||||
assert len(r.responses) == 2, f"{r.responses}"
|
||||
assert r.responses[0]["status"] == 103, f"{r.responses}"
|
||||
assert "link" in r.responses[0]["header"], f"{r.responses[0]}"
|
||||
assert (
|
||||
r.responses[0]["header"]["link"] == "</push/data2>; rel=preload"
|
||||
), f"{r.responses[0]}"
|
||||
|
||||
def test_09_02_h2_push(self, env: Env, httpd):
|
||||
# use localhost as we do not have resolve support in local client
|
||||
url = f'https://localhost:{env.https_port}/push/data1'
|
||||
client = LocalClient(name='h2-serverpush', env=env)
|
||||
url = f"https://localhost:{env.https_port}/push/data1"
|
||||
client = LocalClient(name="h2-serverpush", env=env)
|
||||
if not client.exists():
|
||||
pytest.skip(f'example client not built: {client.name}')
|
||||
pytest.skip(f"example client not built: {client.name}")
|
||||
r = client.run(args=[url])
|
||||
r.check_exit_code(0)
|
||||
assert os.path.exists(client.download_file(0))
|
||||
assert os.path.exists(os.path.join(client.run_dir, 'push0')), r.dump_logs()
|
||||
assert os.path.exists(os.path.join(client.run_dir, "push0")), r.dump_logs()
|
||||
|
||||
@ -1,6 +1,6 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
#***************************************************************************
|
||||
# ***************************************************************************
|
||||
# _ _ ____ _
|
||||
# Project ___| | | | _ \| |
|
||||
# / __| | | | |_) | |
|
||||
@ -37,22 +37,21 @@ log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class TestProxy:
|
||||
|
||||
@pytest.fixture(autouse=True, scope='class')
|
||||
@pytest.fixture(autouse=True, scope="class")
|
||||
def _class_scope(self, env, httpd, nghttpx_fwd):
|
||||
push_dir = os.path.join(httpd.docs_dir, 'push')
|
||||
push_dir = os.path.join(httpd.docs_dir, "push")
|
||||
if not os.path.exists(push_dir):
|
||||
os.makedirs(push_dir)
|
||||
if env.have_nghttpx():
|
||||
nghttpx_fwd.start_if_needed()
|
||||
env.make_data_file(indir=env.gen_dir, fname="data-100k", fsize=100*1024)
|
||||
env.make_data_file(indir=env.gen_dir, fname="data-10m", fsize=10*1024*1024)
|
||||
env.make_data_file(indir=env.gen_dir, fname="data-100k", fsize=100 * 1024)
|
||||
env.make_data_file(indir=env.gen_dir, fname="data-10m", fsize=10 * 1024 * 1024)
|
||||
httpd.clear_extra_configs()
|
||||
httpd.reload()
|
||||
|
||||
def get_tunnel_proto_used(self, r: ExecResult):
|
||||
for line in r.trace_lines:
|
||||
m = re.match(r'.* CONNECT tunnel: (\S+) negotiated$', line)
|
||||
m = re.match(r".* CONNECT tunnel: (\S+) negotiated$", line)
|
||||
if m:
|
||||
return m.group(1)
|
||||
assert False, f'tunnel protocol not found in:\n{"".join(r.trace_lines)}'
|
||||
@ -61,49 +60,57 @@ class TestProxy:
|
||||
# download via http: proxy (no tunnel)
|
||||
def test_10_01_proxy_http(self, env: Env, httpd):
|
||||
curl = CurlClient(env=env)
|
||||
url = f'http://localhost:{env.http_port}/data.json'
|
||||
r = curl.http_download(urls=[url], alpn_proto='http/1.1', with_stats=True,
|
||||
extra_args=curl.get_proxy_args(proxys=False))
|
||||
url = f"http://localhost:{env.http_port}/data.json"
|
||||
r = curl.http_download(
|
||||
urls=[url],
|
||||
alpn_proto="http/1.1",
|
||||
with_stats=True,
|
||||
extra_args=curl.get_proxy_args(proxys=False),
|
||||
)
|
||||
r.check_response(count=1, http_status=200)
|
||||
|
||||
# download via https: proxy (no tunnel)
|
||||
@pytest.mark.skipif(condition=not Env.curl_has_feature('HTTPS-proxy'),
|
||||
reason='curl lacks HTTPS-proxy support')
|
||||
@pytest.mark.parametrize("proto", ['http/1.1', 'h2'])
|
||||
@pytest.mark.skipif(
|
||||
condition=not Env.curl_has_feature("HTTPS-proxy"),
|
||||
reason="curl lacks HTTPS-proxy support",
|
||||
)
|
||||
@pytest.mark.parametrize("proto", ["http/1.1", "h2"])
|
||||
def test_10_02_proxys_down(self, env: Env, httpd, proto):
|
||||
if proto == 'h2' and not env.curl_uses_lib('nghttp2'):
|
||||
pytest.skip('only supported with nghttp2')
|
||||
if proto == "h2" and not env.curl_uses_lib("nghttp2"):
|
||||
pytest.skip("only supported with nghttp2")
|
||||
curl = CurlClient(env=env)
|
||||
url = f'http://localhost:{env.http_port}/data.json'
|
||||
url = f"http://localhost:{env.http_port}/data.json"
|
||||
xargs = curl.get_proxy_args(proto=proto)
|
||||
r = curl.http_download(urls=[url], alpn_proto='http/1.1', with_stats=True,
|
||||
extra_args=xargs)
|
||||
r.check_response(count=1, http_status=200,
|
||||
protocol='HTTP/2' if proto == 'h2' else 'HTTP/1.1')
|
||||
r = curl.http_download(
|
||||
urls=[url], alpn_proto="http/1.1", with_stats=True, extra_args=xargs
|
||||
)
|
||||
r.check_response(
|
||||
count=1, http_status=200, protocol="HTTP/2" if proto == "h2" else "HTTP/1.1"
|
||||
)
|
||||
|
||||
# upload via https: with proto (no tunnel)
|
||||
@pytest.mark.skipif(condition=not Env.have_ssl_curl(), reason="curl without SSL")
|
||||
@pytest.mark.parametrize("proto", ['http/1.1', 'h2'])
|
||||
@pytest.mark.parametrize("fname, fcount", [
|
||||
['data.json', 5],
|
||||
['data-100k', 5],
|
||||
['data-1m', 2]
|
||||
])
|
||||
@pytest.mark.skipif(condition=not Env.have_nghttpx(),
|
||||
reason="no nghttpx available")
|
||||
def test_10_02_proxys_up(self, env: Env, httpd, nghttpx, proto,
|
||||
fname, fcount):
|
||||
if proto == 'h2' and not env.curl_uses_lib('nghttp2'):
|
||||
pytest.skip('only supported with nghttp2')
|
||||
@pytest.mark.parametrize("proto", ["http/1.1", "h2"])
|
||||
@pytest.mark.parametrize(
|
||||
"fname, fcount", [["data.json", 5], ["data-100k", 5], ["data-1m", 2]]
|
||||
)
|
||||
@pytest.mark.skipif(condition=not Env.have_nghttpx(), reason="no nghttpx available")
|
||||
def test_10_02_proxys_up(self, env: Env, httpd, nghttpx, proto, fname, fcount):
|
||||
if proto == "h2" and not env.curl_uses_lib("nghttp2"):
|
||||
pytest.skip("only supported with nghttp2")
|
||||
count = fcount
|
||||
srcfile = os.path.join(httpd.docs_dir, fname)
|
||||
curl = CurlClient(env=env)
|
||||
url = f'http://localhost:{env.http_port}/curltest/echo?id=[0-{count-1}]'
|
||||
url = f"http://localhost:{env.http_port}/curltest/echo?id=[0-{count-1}]"
|
||||
xargs = curl.get_proxy_args(proto=proto)
|
||||
r = curl.http_upload(urls=[url], data=f'@{srcfile}', alpn_proto=proto,
|
||||
extra_args=xargs)
|
||||
r.check_response(count=count, http_status=200,
|
||||
protocol='HTTP/2' if proto == 'h2' else 'HTTP/1.1')
|
||||
r = curl.http_upload(
|
||||
urls=[url], data=f"@{srcfile}", alpn_proto=proto, extra_args=xargs
|
||||
)
|
||||
r.check_response(
|
||||
count=count,
|
||||
http_status=200,
|
||||
protocol="HTTP/2" if proto == "h2" else "HTTP/1.1",
|
||||
)
|
||||
indata = open(srcfile).readlines()
|
||||
for i in range(count):
|
||||
respdata = open(curl.response_file(i)).readlines()
|
||||
@ -112,82 +119,97 @@ class TestProxy:
|
||||
# download http: via http: proxytunnel
|
||||
def test_10_03_proxytunnel_http(self, env: Env, httpd):
|
||||
curl = CurlClient(env=env)
|
||||
url = f'http://localhost:{env.http_port}/data.json'
|
||||
url = f"http://localhost:{env.http_port}/data.json"
|
||||
xargs = curl.get_proxy_args(proxys=False, tunnel=True)
|
||||
r = curl.http_download(urls=[url], alpn_proto='http/1.1', with_stats=True,
|
||||
extra_args=xargs)
|
||||
r = curl.http_download(
|
||||
urls=[url], alpn_proto="http/1.1", with_stats=True, extra_args=xargs
|
||||
)
|
||||
r.check_response(count=1, http_status=200)
|
||||
|
||||
# download http: via https: proxytunnel
|
||||
@pytest.mark.skipif(condition=not Env.curl_has_feature('HTTPS-proxy'),
|
||||
reason='curl lacks HTTPS-proxy support')
|
||||
@pytest.mark.skipif(
|
||||
condition=not Env.curl_has_feature("HTTPS-proxy"),
|
||||
reason="curl lacks HTTPS-proxy support",
|
||||
)
|
||||
@pytest.mark.skipif(condition=not Env.have_nghttpx(), reason="no nghttpx available")
|
||||
def test_10_04_proxy_https(self, env: Env, httpd, nghttpx_fwd):
|
||||
curl = CurlClient(env=env)
|
||||
url = f'http://localhost:{env.http_port}/data.json'
|
||||
url = f"http://localhost:{env.http_port}/data.json"
|
||||
xargs = curl.get_proxy_args(tunnel=True)
|
||||
r = curl.http_download(urls=[url], alpn_proto='http/1.1', with_stats=True,
|
||||
extra_args=xargs)
|
||||
r = curl.http_download(
|
||||
urls=[url], alpn_proto="http/1.1", with_stats=True, extra_args=xargs
|
||||
)
|
||||
r.check_response(count=1, http_status=200)
|
||||
|
||||
# download https: with proto via http: proxytunnel
|
||||
@pytest.mark.parametrize("proto", ['http/1.1', 'h2'])
|
||||
@pytest.mark.parametrize("proto", ["http/1.1", "h2"])
|
||||
@pytest.mark.skipif(condition=not Env.have_ssl_curl(), reason="curl without SSL")
|
||||
def test_10_05_proxytunnel_http(self, env: Env, httpd, proto):
|
||||
curl = CurlClient(env=env)
|
||||
url = f'https://localhost:{env.https_port}/data.json'
|
||||
url = f"https://localhost:{env.https_port}/data.json"
|
||||
xargs = curl.get_proxy_args(proxys=False, tunnel=True)
|
||||
r = curl.http_download(urls=[url], alpn_proto=proto, with_stats=True,
|
||||
extra_args=xargs)
|
||||
r.check_response(count=1, http_status=200,
|
||||
protocol='HTTP/2' if proto == 'h2' else 'HTTP/1.1')
|
||||
r = curl.http_download(
|
||||
urls=[url], alpn_proto=proto, with_stats=True, extra_args=xargs
|
||||
)
|
||||
r.check_response(
|
||||
count=1, http_status=200, protocol="HTTP/2" if proto == "h2" else "HTTP/1.1"
|
||||
)
|
||||
|
||||
# download https: with proto via https: proxytunnel
|
||||
@pytest.mark.skipif(condition=not Env.curl_has_feature('HTTPS-proxy'),
|
||||
reason='curl lacks HTTPS-proxy support')
|
||||
@pytest.mark.parametrize("proto", ['http/1.1', 'h2'])
|
||||
@pytest.mark.parametrize("tunnel", ['http/1.1', 'h2'])
|
||||
@pytest.mark.skipif(
|
||||
condition=not Env.curl_has_feature("HTTPS-proxy"),
|
||||
reason="curl lacks HTTPS-proxy support",
|
||||
)
|
||||
@pytest.mark.parametrize("proto", ["http/1.1", "h2"])
|
||||
@pytest.mark.parametrize("tunnel", ["http/1.1", "h2"])
|
||||
@pytest.mark.skipif(condition=not Env.have_nghttpx(), reason="no nghttpx available")
|
||||
def test_10_06_proxytunnel_https(self, env: Env, httpd, nghttpx_fwd, proto, tunnel):
|
||||
if tunnel == 'h2' and not env.curl_uses_lib('nghttp2'):
|
||||
pytest.skip('only supported with nghttp2')
|
||||
if tunnel == "h2" and not env.curl_uses_lib("nghttp2"):
|
||||
pytest.skip("only supported with nghttp2")
|
||||
curl = CurlClient(env=env)
|
||||
url = f'https://localhost:{env.https_port}/data.json?[0-0]'
|
||||
url = f"https://localhost:{env.https_port}/data.json?[0-0]"
|
||||
xargs = curl.get_proxy_args(tunnel=True, proto=tunnel)
|
||||
r = curl.http_download(urls=[url], alpn_proto=proto, with_stats=True,
|
||||
extra_args=xargs)
|
||||
r.check_response(count=1, http_status=200,
|
||||
protocol='HTTP/2' if proto == 'h2' else 'HTTP/1.1')
|
||||
assert self.get_tunnel_proto_used(r) == 'HTTP/2' \
|
||||
if tunnel == 'h2' else 'HTTP/1.1'
|
||||
srcfile = os.path.join(httpd.docs_dir, 'data.json')
|
||||
r = curl.http_download(
|
||||
urls=[url], alpn_proto=proto, with_stats=True, extra_args=xargs
|
||||
)
|
||||
r.check_response(
|
||||
count=1, http_status=200, protocol="HTTP/2" if proto == "h2" else "HTTP/1.1"
|
||||
)
|
||||
assert (
|
||||
self.get_tunnel_proto_used(r) == "HTTP/2" if tunnel == "h2" else "HTTP/1.1"
|
||||
)
|
||||
srcfile = os.path.join(httpd.docs_dir, "data.json")
|
||||
dfile = curl.download_file(0)
|
||||
assert filecmp.cmp(srcfile, dfile, shallow=False)
|
||||
|
||||
# download many https: with proto via https: proxytunnel
|
||||
@pytest.mark.skipif(condition=not Env.have_ssl_curl(), reason="curl without SSL")
|
||||
@pytest.mark.parametrize("proto", ['http/1.1', 'h2'])
|
||||
@pytest.mark.parametrize("tunnel", ['http/1.1', 'h2'])
|
||||
@pytest.mark.parametrize("fname, fcount", [
|
||||
['data.json', 100],
|
||||
['data-100k', 20],
|
||||
['data-1m', 5]
|
||||
])
|
||||
@pytest.mark.parametrize("proto", ["http/1.1", "h2"])
|
||||
@pytest.mark.parametrize("tunnel", ["http/1.1", "h2"])
|
||||
@pytest.mark.parametrize(
|
||||
"fname, fcount", [["data.json", 100], ["data-100k", 20], ["data-1m", 5]]
|
||||
)
|
||||
@pytest.mark.skipif(condition=not Env.have_nghttpx(), reason="no nghttpx available")
|
||||
def test_10_07_pts_down_small(self, env: Env, httpd, nghttpx_fwd, proto,
|
||||
tunnel, fname, fcount):
|
||||
if tunnel == 'h2' and not env.curl_uses_lib('nghttp2'):
|
||||
pytest.skip('only supported with nghttp2')
|
||||
def test_10_07_pts_down_small(
|
||||
self, env: Env, httpd, nghttpx_fwd, proto, tunnel, fname, fcount
|
||||
):
|
||||
if tunnel == "h2" and not env.curl_uses_lib("nghttp2"):
|
||||
pytest.skip("only supported with nghttp2")
|
||||
count = fcount
|
||||
curl = CurlClient(env=env)
|
||||
url = f'https://localhost:{env.https_port}/{fname}?[0-{count-1}]'
|
||||
url = f"https://localhost:{env.https_port}/{fname}?[0-{count-1}]"
|
||||
xargs = curl.get_proxy_args(tunnel=True, proto=tunnel)
|
||||
r = curl.http_download(urls=[url], alpn_proto=proto, with_stats=True,
|
||||
extra_args=xargs)
|
||||
r.check_response(count=count, http_status=200,
|
||||
protocol='HTTP/2' if proto == 'h2' else 'HTTP/1.1')
|
||||
assert self.get_tunnel_proto_used(r) == 'HTTP/2' \
|
||||
if tunnel == 'h2' else 'HTTP/1.1'
|
||||
r = curl.http_download(
|
||||
urls=[url], alpn_proto=proto, with_stats=True, extra_args=xargs
|
||||
)
|
||||
r.check_response(
|
||||
count=count,
|
||||
http_status=200,
|
||||
protocol="HTTP/2" if proto == "h2" else "HTTP/1.1",
|
||||
)
|
||||
assert (
|
||||
self.get_tunnel_proto_used(r) == "HTTP/2" if tunnel == "h2" else "HTTP/1.1"
|
||||
)
|
||||
srcfile = os.path.join(httpd.docs_dir, fname)
|
||||
for i in range(count):
|
||||
dfile = curl.download_file(i)
|
||||
@ -196,50 +218,53 @@ class TestProxy:
|
||||
|
||||
# upload many https: with proto via https: proxytunnel
|
||||
@pytest.mark.skipif(condition=not Env.have_ssl_curl(), reason="curl without SSL")
|
||||
@pytest.mark.parametrize("proto", ['http/1.1', 'h2'])
|
||||
@pytest.mark.parametrize("tunnel", ['http/1.1', 'h2'])
|
||||
@pytest.mark.parametrize("fname, fcount", [
|
||||
['data.json', 50],
|
||||
['data-100k', 20],
|
||||
['data-1m', 5]
|
||||
])
|
||||
@pytest.mark.parametrize("proto", ["http/1.1", "h2"])
|
||||
@pytest.mark.parametrize("tunnel", ["http/1.1", "h2"])
|
||||
@pytest.mark.parametrize(
|
||||
"fname, fcount", [["data.json", 50], ["data-100k", 20], ["data-1m", 5]]
|
||||
)
|
||||
@pytest.mark.skipif(condition=not Env.have_nghttpx(), reason="no nghttpx available")
|
||||
def test_10_08_upload_seq_large(self, env: Env, httpd, nghttpx, proto,
|
||||
tunnel, fname, fcount):
|
||||
if tunnel == 'h2' and not env.curl_uses_lib('nghttp2'):
|
||||
pytest.skip('only supported with nghttp2')
|
||||
def test_10_08_upload_seq_large(
|
||||
self, env: Env, httpd, nghttpx, proto, tunnel, fname, fcount
|
||||
):
|
||||
if tunnel == "h2" and not env.curl_uses_lib("nghttp2"):
|
||||
pytest.skip("only supported with nghttp2")
|
||||
count = fcount
|
||||
srcfile = os.path.join(httpd.docs_dir, fname)
|
||||
curl = CurlClient(env=env)
|
||||
url = f'https://localhost:{env.https_port}/curltest/echo?id=[0-{count-1}]'
|
||||
url = f"https://localhost:{env.https_port}/curltest/echo?id=[0-{count-1}]"
|
||||
xargs = curl.get_proxy_args(tunnel=True, proto=tunnel)
|
||||
r = curl.http_upload(urls=[url], data=f'@{srcfile}', alpn_proto=proto,
|
||||
extra_args=xargs)
|
||||
assert self.get_tunnel_proto_used(r) == 'HTTP/2' \
|
||||
if tunnel == 'h2' else 'HTTP/1.1'
|
||||
r = curl.http_upload(
|
||||
urls=[url], data=f"@{srcfile}", alpn_proto=proto, extra_args=xargs
|
||||
)
|
||||
assert (
|
||||
self.get_tunnel_proto_used(r) == "HTTP/2" if tunnel == "h2" else "HTTP/1.1"
|
||||
)
|
||||
r.check_response(count=count, http_status=200)
|
||||
indata = open(srcfile).readlines()
|
||||
for i in range(count):
|
||||
respdata = open(curl.response_file(i)).readlines()
|
||||
assert respdata == indata, f'resonse {i} differs'
|
||||
assert respdata == indata, f"resonse {i} differs"
|
||||
assert r.total_connects == 1, r.dump_logs()
|
||||
|
||||
@pytest.mark.skipif(condition=not Env.have_ssl_curl(), reason="curl without SSL")
|
||||
@pytest.mark.parametrize("tunnel", ['http/1.1', 'h2'])
|
||||
@pytest.mark.parametrize("tunnel", ["http/1.1", "h2"])
|
||||
@pytest.mark.skipif(condition=not Env.have_nghttpx(), reason="no nghttpx available")
|
||||
def test_10_09_reuse_ser(self, env: Env, httpd, nghttpx_fwd, tunnel):
|
||||
if tunnel == 'h2' and not env.curl_uses_lib('nghttp2'):
|
||||
pytest.skip('only supported with nghttp2')
|
||||
if tunnel == "h2" and not env.curl_uses_lib("nghttp2"):
|
||||
pytest.skip("only supported with nghttp2")
|
||||
curl = CurlClient(env=env)
|
||||
url1 = f'https://localhost:{env.https_port}/data.json'
|
||||
url2 = f'http://localhost:{env.http_port}/data.json'
|
||||
url1 = f"https://localhost:{env.https_port}/data.json"
|
||||
url2 = f"http://localhost:{env.http_port}/data.json"
|
||||
xargs = curl.get_proxy_args(tunnel=True, proto=tunnel)
|
||||
r = curl.http_download(urls=[url1, url2], alpn_proto='http/1.1', with_stats=True,
|
||||
extra_args=xargs)
|
||||
r = curl.http_download(
|
||||
urls=[url1, url2], alpn_proto="http/1.1", with_stats=True, extra_args=xargs
|
||||
)
|
||||
r.check_response(count=2, http_status=200)
|
||||
assert self.get_tunnel_proto_used(r) == 'HTTP/2' \
|
||||
if tunnel == 'h2' else 'HTTP/1.1'
|
||||
if tunnel == 'h2':
|
||||
assert (
|
||||
self.get_tunnel_proto_used(r) == "HTTP/2" if tunnel == "h2" else "HTTP/1.1"
|
||||
)
|
||||
if tunnel == "h2":
|
||||
# TODO: we would like to reuse the first connection for the
|
||||
# second URL, but this is currently not possible
|
||||
# assert r.total_connects == 1
|
||||
@ -248,123 +273,151 @@ class TestProxy:
|
||||
assert r.total_connects == 2
|
||||
|
||||
@pytest.mark.skipif(condition=not Env.have_ssl_curl(), reason="curl without SSL")
|
||||
@pytest.mark.parametrize("tunnel", ['http/1.1', 'h2'])
|
||||
@pytest.mark.parametrize("tunnel", ["http/1.1", "h2"])
|
||||
@pytest.mark.skipif(condition=not Env.have_nghttpx(), reason="no nghttpx available")
|
||||
def test_10_10_reuse_proxy(self, env: Env, httpd, nghttpx_fwd, tunnel):
|
||||
# url twice via https: proxy separated with '--next', will reuse
|
||||
if tunnel == 'h2' and not env.curl_uses_lib('nghttp2'):
|
||||
pytest.skip('only supported with nghttp2')
|
||||
if tunnel == "h2" and not env.curl_uses_lib("nghttp2"):
|
||||
pytest.skip("only supported with nghttp2")
|
||||
curl = CurlClient(env=env)
|
||||
url = f'https://localhost:{env.https_port}/data.json'
|
||||
url = f"https://localhost:{env.https_port}/data.json"
|
||||
proxy_args = curl.get_proxy_args(tunnel=True, proto=tunnel)
|
||||
r1 = curl.http_download(urls=[url], alpn_proto='http/1.1', with_stats=True,
|
||||
extra_args=proxy_args)
|
||||
r1 = curl.http_download(
|
||||
urls=[url], alpn_proto="http/1.1", with_stats=True, extra_args=proxy_args
|
||||
)
|
||||
r1.check_response(count=1, http_status=200)
|
||||
assert self.get_tunnel_proto_used(r1) == 'HTTP/2' \
|
||||
if tunnel == 'h2' else 'HTTP/1.1'
|
||||
assert (
|
||||
self.get_tunnel_proto_used(r1) == "HTTP/2" if tunnel == "h2" else "HTTP/1.1"
|
||||
)
|
||||
# get the args, duplicate separated with '--next'
|
||||
x2_args = r1.args[1:]
|
||||
x2_args.append('--next')
|
||||
x2_args.append("--next")
|
||||
x2_args.extend(proxy_args)
|
||||
r2 = curl.http_download(urls=[url], alpn_proto='http/1.1', with_stats=True,
|
||||
extra_args=x2_args)
|
||||
r2 = curl.http_download(
|
||||
urls=[url], alpn_proto="http/1.1", with_stats=True, extra_args=x2_args
|
||||
)
|
||||
r2.check_response(count=2, http_status=200)
|
||||
assert r2.total_connects == 1
|
||||
|
||||
@pytest.mark.skipif(condition=not Env.have_ssl_curl(), reason="curl without SSL")
|
||||
@pytest.mark.parametrize("tunnel", ['http/1.1', 'h2'])
|
||||
@pytest.mark.parametrize("tunnel", ["http/1.1", "h2"])
|
||||
@pytest.mark.skipif(condition=not Env.have_nghttpx(), reason="no nghttpx available")
|
||||
@pytest.mark.skipif(condition=not Env.curl_uses_lib('openssl'), reason="tls13-ciphers not supported")
|
||||
@pytest.mark.skipif(
|
||||
condition=not Env.curl_uses_lib("openssl"), reason="tls13-ciphers not supported"
|
||||
)
|
||||
def test_10_11_noreuse_proxy_https(self, env: Env, httpd, nghttpx_fwd, tunnel):
|
||||
# different --proxy-tls13-ciphers, no reuse of connection for https:
|
||||
curl = CurlClient(env=env)
|
||||
if tunnel == 'h2' and not env.curl_uses_lib('nghttp2'):
|
||||
pytest.skip('only supported with nghttp2')
|
||||
url = f'https://localhost:{env.https_port}/data.json'
|
||||
if tunnel == "h2" and not env.curl_uses_lib("nghttp2"):
|
||||
pytest.skip("only supported with nghttp2")
|
||||
url = f"https://localhost:{env.https_port}/data.json"
|
||||
proxy_args = curl.get_proxy_args(tunnel=True, proto=tunnel)
|
||||
r1 = curl.http_download(urls=[url], alpn_proto='http/1.1', with_stats=True,
|
||||
extra_args=proxy_args)
|
||||
r1 = curl.http_download(
|
||||
urls=[url], alpn_proto="http/1.1", with_stats=True, extra_args=proxy_args
|
||||
)
|
||||
r1.check_response(count=1, http_status=200)
|
||||
assert self.get_tunnel_proto_used(r1) == 'HTTP/2' \
|
||||
if tunnel == 'h2' else 'HTTP/1.1'
|
||||
assert (
|
||||
self.get_tunnel_proto_used(r1) == "HTTP/2" if tunnel == "h2" else "HTTP/1.1"
|
||||
)
|
||||
# get the args, duplicate separated with '--next'
|
||||
x2_args = r1.args[1:]
|
||||
x2_args.append('--next')
|
||||
x2_args.append("--next")
|
||||
x2_args.extend(proxy_args)
|
||||
x2_args.extend(['--proxy-tls13-ciphers', 'TLS_AES_256_GCM_SHA384'])
|
||||
r2 = curl.http_download(urls=[url], alpn_proto='http/1.1', with_stats=True,
|
||||
extra_args=x2_args)
|
||||
x2_args.extend(["--proxy-tls13-ciphers", "TLS_AES_256_GCM_SHA384"])
|
||||
r2 = curl.http_download(
|
||||
urls=[url], alpn_proto="http/1.1", with_stats=True, extra_args=x2_args
|
||||
)
|
||||
r2.check_response(count=2, http_status=200)
|
||||
assert r2.total_connects == 2
|
||||
|
||||
@pytest.mark.skipif(condition=not Env.have_ssl_curl(), reason="curl without SSL")
|
||||
@pytest.mark.parametrize("tunnel", ['http/1.1', 'h2'])
|
||||
@pytest.mark.parametrize("tunnel", ["http/1.1", "h2"])
|
||||
@pytest.mark.skipif(condition=not Env.have_nghttpx(), reason="no nghttpx available")
|
||||
@pytest.mark.skipif(condition=not Env.curl_uses_lib('openssl'), reason="tls13-ciphers not supported")
|
||||
@pytest.mark.skipif(
|
||||
condition=not Env.curl_uses_lib("openssl"), reason="tls13-ciphers not supported"
|
||||
)
|
||||
def test_10_12_noreuse_proxy_http(self, env: Env, httpd, nghttpx_fwd, tunnel):
|
||||
# different --proxy-tls13-ciphers, no reuse of connection for http:
|
||||
if tunnel == 'h2' and not env.curl_uses_lib('nghttp2'):
|
||||
pytest.skip('only supported with nghttp2')
|
||||
if tunnel == "h2" and not env.curl_uses_lib("nghttp2"):
|
||||
pytest.skip("only supported with nghttp2")
|
||||
curl = CurlClient(env=env)
|
||||
url = f'http://localhost:{env.http_port}/data.json'
|
||||
url = f"http://localhost:{env.http_port}/data.json"
|
||||
proxy_args = curl.get_proxy_args(tunnel=True, proto=tunnel)
|
||||
r1 = curl.http_download(urls=[url], alpn_proto='http/1.1', with_stats=True,
|
||||
extra_args=proxy_args)
|
||||
r1 = curl.http_download(
|
||||
urls=[url], alpn_proto="http/1.1", with_stats=True, extra_args=proxy_args
|
||||
)
|
||||
r1.check_response(count=1, http_status=200)
|
||||
assert self.get_tunnel_proto_used(r1) == 'HTTP/2' \
|
||||
if tunnel == 'h2' else 'HTTP/1.1'
|
||||
assert (
|
||||
self.get_tunnel_proto_used(r1) == "HTTP/2" if tunnel == "h2" else "HTTP/1.1"
|
||||
)
|
||||
# get the args, duplicate separated with '--next'
|
||||
x2_args = r1.args[1:]
|
||||
x2_args.append('--next')
|
||||
x2_args.append("--next")
|
||||
x2_args.extend(proxy_args)
|
||||
x2_args.extend(['--proxy-tls13-ciphers', 'TLS_AES_256_GCM_SHA384'])
|
||||
r2 = curl.http_download(urls=[url], alpn_proto='http/1.1', with_stats=True,
|
||||
extra_args=x2_args)
|
||||
x2_args.extend(["--proxy-tls13-ciphers", "TLS_AES_256_GCM_SHA384"])
|
||||
r2 = curl.http_download(
|
||||
urls=[url], alpn_proto="http/1.1", with_stats=True, extra_args=x2_args
|
||||
)
|
||||
r2.check_response(count=2, http_status=200)
|
||||
assert r2.total_connects == 2
|
||||
|
||||
@pytest.mark.skipif(condition=not Env.have_ssl_curl(), reason="curl without SSL")
|
||||
@pytest.mark.parametrize("tunnel", ['http/1.1', 'h2'])
|
||||
@pytest.mark.parametrize("tunnel", ["http/1.1", "h2"])
|
||||
@pytest.mark.skipif(condition=not Env.have_nghttpx(), reason="no nghttpx available")
|
||||
@pytest.mark.skipif(condition=not Env.curl_uses_lib('openssl'), reason="tls13-ciphers not supported")
|
||||
@pytest.mark.skipif(
|
||||
condition=not Env.curl_uses_lib("openssl"), reason="tls13-ciphers not supported"
|
||||
)
|
||||
def test_10_13_noreuse_https(self, env: Env, httpd, nghttpx_fwd, tunnel):
|
||||
# different --tls13-ciphers on https: same proxy config
|
||||
if tunnel == 'h2' and not env.curl_uses_lib('nghttp2'):
|
||||
pytest.skip('only supported with nghttp2')
|
||||
if tunnel == "h2" and not env.curl_uses_lib("nghttp2"):
|
||||
pytest.skip("only supported with nghttp2")
|
||||
curl = CurlClient(env=env)
|
||||
url = f'https://localhost:{env.https_port}/data.json'
|
||||
url = f"https://localhost:{env.https_port}/data.json"
|
||||
proxy_args = curl.get_proxy_args(tunnel=True, proto=tunnel)
|
||||
r1 = curl.http_download(urls=[url], alpn_proto='http/1.1', with_stats=True,
|
||||
extra_args=proxy_args)
|
||||
r1 = curl.http_download(
|
||||
urls=[url], alpn_proto="http/1.1", with_stats=True, extra_args=proxy_args
|
||||
)
|
||||
r1.check_response(count=1, http_status=200)
|
||||
assert self.get_tunnel_proto_used(r1) == 'HTTP/2' \
|
||||
if tunnel == 'h2' else 'HTTP/1.1'
|
||||
assert (
|
||||
self.get_tunnel_proto_used(r1) == "HTTP/2" if tunnel == "h2" else "HTTP/1.1"
|
||||
)
|
||||
# get the args, duplicate separated with '--next'
|
||||
x2_args = r1.args[1:]
|
||||
x2_args.append('--next')
|
||||
x2_args.append("--next")
|
||||
x2_args.extend(proxy_args)
|
||||
x2_args.extend(['--tls13-ciphers', 'TLS_AES_256_GCM_SHA384'])
|
||||
r2 = curl.http_download(urls=[url], alpn_proto='http/1.1', with_stats=True,
|
||||
extra_args=x2_args)
|
||||
x2_args.extend(["--tls13-ciphers", "TLS_AES_256_GCM_SHA384"])
|
||||
r2 = curl.http_download(
|
||||
urls=[url], alpn_proto="http/1.1", with_stats=True, extra_args=x2_args
|
||||
)
|
||||
r2.check_response(count=2, http_status=200)
|
||||
assert r2.total_connects == 2
|
||||
|
||||
# download via https: proxy (no tunnel) using IP address
|
||||
@pytest.mark.skipif(condition=not Env.curl_has_feature('HTTPS-proxy'),
|
||||
reason='curl lacks HTTPS-proxy support')
|
||||
@pytest.mark.skipif(condition=Env.curl_uses_lib('bearssl'), reason="ip address cert verification not supported")
|
||||
@pytest.mark.parametrize("proto", ['http/1.1', 'h2'])
|
||||
@pytest.mark.skipif(
|
||||
condition=not Env.curl_has_feature("HTTPS-proxy"),
|
||||
reason="curl lacks HTTPS-proxy support",
|
||||
)
|
||||
@pytest.mark.skipif(
|
||||
condition=Env.curl_uses_lib("bearssl"),
|
||||
reason="ip address cert verification not supported",
|
||||
)
|
||||
@pytest.mark.parametrize("proto", ["http/1.1", "h2"])
|
||||
def test_10_14_proxys_ip_addr(self, env: Env, httpd, proto):
|
||||
if proto == 'h2' and not env.curl_uses_lib('nghttp2'):
|
||||
pytest.skip('only supported with nghttp2')
|
||||
if proto == "h2" and not env.curl_uses_lib("nghttp2"):
|
||||
pytest.skip("only supported with nghttp2")
|
||||
curl = CurlClient(env=env)
|
||||
url = f'http://localhost:{env.http_port}/data.json'
|
||||
url = f"http://localhost:{env.http_port}/data.json"
|
||||
xargs = curl.get_proxy_args(proto=proto, use_ip=True)
|
||||
r = curl.http_download(urls=[url], alpn_proto='http/1.1', with_stats=True,
|
||||
extra_args=xargs)
|
||||
if env.curl_uses_lib('mbedtls') and \
|
||||
not env.curl_lib_version_at_least('mbedtls', '3.5.0'):
|
||||
r.check_exit_code(60) # CURLE_PEER_FAILED_VERIFICATION
|
||||
r = curl.http_download(
|
||||
urls=[url], alpn_proto="http/1.1", with_stats=True, extra_args=xargs
|
||||
)
|
||||
if env.curl_uses_lib("mbedtls") and not env.curl_lib_version_at_least(
|
||||
"mbedtls", "3.5.0"
|
||||
):
|
||||
r.check_exit_code(60) # CURLE_PEER_FAILED_VERIFICATION
|
||||
else:
|
||||
r.check_response(count=1, http_status=200,
|
||||
protocol='HTTP/2' if proto == 'h2' else 'HTTP/1.1')
|
||||
r.check_response(
|
||||
count=1,
|
||||
http_status=200,
|
||||
protocol="HTTP/2" if proto == "h2" else "HTTP/1.1",
|
||||
)
|
||||
|
||||
@ -1,6 +1,6 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
#***************************************************************************
|
||||
# ***************************************************************************
|
||||
# _ _ ____ _
|
||||
# Project ___| | | | _ \| |
|
||||
# / __| | | | |_) | |
|
||||
@ -37,8 +37,8 @@ from testenv import Env, CurlClient
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
class UDSFaker:
|
||||
|
||||
class UDSFaker:
|
||||
def __init__(self, path):
|
||||
self._uds_path = path
|
||||
self._done = False
|
||||
@ -73,12 +73,14 @@ class UDSFaker:
|
||||
c, client_address = self._socket.accept()
|
||||
try:
|
||||
c.recv(16)
|
||||
c.sendall("""HTTP/1.1 200 Ok
|
||||
c.sendall(
|
||||
"""HTTP/1.1 200 Ok
|
||||
Server: UdsFaker
|
||||
Content-Type: application/json
|
||||
Content-Length: 19
|
||||
|
||||
{ "host": "faked" }""".encode())
|
||||
{ "host": "faked" }""".encode()
|
||||
)
|
||||
finally:
|
||||
c.close()
|
||||
|
||||
@ -89,10 +91,9 @@ Content-Length: 19
|
||||
|
||||
|
||||
class TestUnix:
|
||||
|
||||
@pytest.fixture(scope="class")
|
||||
def uds_faker(self, env: Env) -> Generator[UDSFaker, None, None]:
|
||||
uds_path = os.path.join(env.gen_dir, 'uds_11.sock')
|
||||
uds_path = os.path.join(env.gen_dir, "uds_11.sock")
|
||||
faker = UDSFaker(path=uds_path)
|
||||
faker.start()
|
||||
yield faker
|
||||
@ -101,32 +102,44 @@ class TestUnix:
|
||||
# download http: via Unix socket
|
||||
def test_11_01_unix_connect_http(self, env: Env, httpd, uds_faker):
|
||||
curl = CurlClient(env=env)
|
||||
url = f'http://{env.domain1}:{env.http_port}/data.json'
|
||||
r = curl.http_download(urls=[url], with_stats=True,
|
||||
extra_args=[
|
||||
'--unix-socket', uds_faker.path,
|
||||
])
|
||||
url = f"http://{env.domain1}:{env.http_port}/data.json"
|
||||
r = curl.http_download(
|
||||
urls=[url],
|
||||
with_stats=True,
|
||||
extra_args=[
|
||||
"--unix-socket",
|
||||
uds_faker.path,
|
||||
],
|
||||
)
|
||||
r.check_response(count=1, http_status=200)
|
||||
|
||||
# download https: via Unix socket
|
||||
@pytest.mark.skipif(condition=not Env.have_ssl_curl(), reason="curl without SSL")
|
||||
def test_11_02_unix_connect_http(self, env: Env, httpd, uds_faker):
|
||||
curl = CurlClient(env=env)
|
||||
url = f'https://{env.domain1}:{env.https_port}/data.json'
|
||||
r = curl.http_download(urls=[url], with_stats=True,
|
||||
extra_args=[
|
||||
'--unix-socket', uds_faker.path,
|
||||
])
|
||||
url = f"https://{env.domain1}:{env.https_port}/data.json"
|
||||
r = curl.http_download(
|
||||
urls=[url],
|
||||
with_stats=True,
|
||||
extra_args=[
|
||||
"--unix-socket",
|
||||
uds_faker.path,
|
||||
],
|
||||
)
|
||||
r.check_response(exitcode=35, http_status=None)
|
||||
|
||||
# download HTTP/3 via Unix socket
|
||||
@pytest.mark.skipif(condition=not Env.have_h3(), reason='h3 not supported')
|
||||
@pytest.mark.skipif(condition=not Env.have_h3(), reason="h3 not supported")
|
||||
def test_11_03_unix_connect_quic(self, env: Env, httpd, uds_faker):
|
||||
curl = CurlClient(env=env)
|
||||
url = f'https://{env.domain1}:{env.https_port}/data.json'
|
||||
r = curl.http_download(urls=[url], with_stats=True,
|
||||
alpn_proto='h3',
|
||||
extra_args=[
|
||||
'--unix-socket', uds_faker.path,
|
||||
])
|
||||
url = f"https://{env.domain1}:{env.https_port}/data.json"
|
||||
r = curl.http_download(
|
||||
urls=[url],
|
||||
with_stats=True,
|
||||
alpn_proto="h3",
|
||||
extra_args=[
|
||||
"--unix-socket",
|
||||
uds_faker.path,
|
||||
],
|
||||
)
|
||||
r.check_response(exitcode=96, http_status=None)
|
||||
|
||||
@ -1,6 +1,6 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
#***************************************************************************
|
||||
# ***************************************************************************
|
||||
# _ _ ____ _
|
||||
# Project ___| | | | _ \| |
|
||||
# / __| | | | |_) | |
|
||||
@ -37,40 +37,56 @@ log = logging.getLogger(__name__)
|
||||
|
||||
@pytest.mark.skipif(condition=not Env.have_ssl_curl(), reason="curl without SSL")
|
||||
class TestReuse:
|
||||
|
||||
# check if HTTP/1.1 handles 'Connection: close' correctly
|
||||
@pytest.mark.parametrize("proto", ['http/1.1'])
|
||||
@pytest.mark.parametrize("proto", ["http/1.1"])
|
||||
def test_12_01_h1_conn_close(self, env: Env, httpd, nghttpx, proto):
|
||||
httpd.clear_extra_configs()
|
||||
httpd.set_extra_config('base', [
|
||||
'MaxKeepAliveRequests 1',
|
||||
])
|
||||
httpd.set_extra_config(
|
||||
"base",
|
||||
[
|
||||
"MaxKeepAliveRequests 1",
|
||||
],
|
||||
)
|
||||
httpd.reload()
|
||||
count = 100
|
||||
curl = CurlClient(env=env)
|
||||
urln = f'https://{env.authority_for(env.domain1, proto)}/data.json?[0-{count-1}]'
|
||||
urln = (
|
||||
f"https://{env.authority_for(env.domain1, proto)}/data.json?[0-{count-1}]"
|
||||
)
|
||||
r = curl.http_download(urls=[urln], alpn_proto=proto)
|
||||
r.check_response(count=count, http_status=200)
|
||||
# Server sends `Connection: close` on every 2nd request, requiring
|
||||
# a new connection
|
||||
delta = 5
|
||||
assert (count/2 - delta) < r.total_connects < (count/2 + delta)
|
||||
assert (count / 2 - delta) < r.total_connects < (count / 2 + delta)
|
||||
|
||||
@pytest.mark.skipif(condition=Env.httpd_is_at_least('2.5.0'),
|
||||
reason="httpd 2.5+ handles KeepAlives different")
|
||||
@pytest.mark.parametrize("proto", ['http/1.1'])
|
||||
@pytest.mark.skipif(
|
||||
condition=Env.httpd_is_at_least("2.5.0"),
|
||||
reason="httpd 2.5+ handles KeepAlives different",
|
||||
)
|
||||
@pytest.mark.parametrize("proto", ["http/1.1"])
|
||||
def test_12_02_h1_conn_timeout(self, env: Env, httpd, nghttpx, proto):
|
||||
httpd.clear_extra_configs()
|
||||
httpd.set_extra_config('base', [
|
||||
'KeepAliveTimeout 1',
|
||||
])
|
||||
httpd.set_extra_config(
|
||||
"base",
|
||||
[
|
||||
"KeepAliveTimeout 1",
|
||||
],
|
||||
)
|
||||
httpd.reload()
|
||||
count = 5
|
||||
curl = CurlClient(env=env)
|
||||
urln = f'https://{env.authority_for(env.domain1, proto)}/data.json?[0-{count-1}]'
|
||||
r = curl.http_download(urls=[urln], alpn_proto=proto, extra_args=[
|
||||
'--rate', '30/m',
|
||||
])
|
||||
urln = (
|
||||
f"https://{env.authority_for(env.domain1, proto)}/data.json?[0-{count-1}]"
|
||||
)
|
||||
r = curl.http_download(
|
||||
urls=[urln],
|
||||
alpn_proto=proto,
|
||||
extra_args=[
|
||||
"--rate",
|
||||
"30/m",
|
||||
],
|
||||
)
|
||||
r.check_response(count=count, http_status=200)
|
||||
# Connections time out on server before we send another request,
|
||||
assert r.total_connects == count
|
||||
@ -81,22 +97,29 @@ class TestReuse:
|
||||
httpd.reload()
|
||||
count = 2
|
||||
# write a alt-svc file the advises h3 instead of h2
|
||||
asfile = os.path.join(env.gen_dir, 'alt-svc-12_03.txt')
|
||||
asfile = os.path.join(env.gen_dir, "alt-svc-12_03.txt")
|
||||
ts = datetime.now() + timedelta(hours=24)
|
||||
expires = f'{ts.year:04}{ts.month:02}{ts.day:02} {ts.hour:02}:{ts.minute:02}:{ts.second:02}'
|
||||
with open(asfile, 'w') as fd:
|
||||
fd.write(f'h2 {env.domain1} {env.https_port} h3 {env.domain1} {env.https_port} "{expires}" 0 0')
|
||||
log.info(f'altscv: {open(asfile).readlines()}')
|
||||
expires = f"{ts.year:04}{ts.month:02}{ts.day:02} {ts.hour:02}:{ts.minute:02}:{ts.second:02}"
|
||||
with open(asfile, "w") as fd:
|
||||
fd.write(
|
||||
f'h2 {env.domain1} {env.https_port} h3 {env.domain1} {env.https_port} "{expires}" 0 0'
|
||||
)
|
||||
log.info(f"altscv: {open(asfile).readlines()}")
|
||||
curl = CurlClient(env=env)
|
||||
urln = f'https://{env.authority_for(env.domain1, "h2")}/data.json?[0-{count-1}]'
|
||||
r = curl.http_download(urls=[urln], with_stats=True, extra_args=[
|
||||
'--alt-svc', f'{asfile}',
|
||||
])
|
||||
r = curl.http_download(
|
||||
urls=[urln],
|
||||
with_stats=True,
|
||||
extra_args=[
|
||||
"--alt-svc",
|
||||
f"{asfile}",
|
||||
],
|
||||
)
|
||||
r.check_response(count=count, http_status=200)
|
||||
# We expect the connection to be reused
|
||||
assert r.total_connects == 1
|
||||
for s in r.stats:
|
||||
assert s['http_version'] == '3', f'{s}'
|
||||
assert s["http_version"] == "3", f"{s}"
|
||||
|
||||
@pytest.mark.skipif(condition=not Env.have_h3(), reason="h3 not supported")
|
||||
def test_12_04_alt_svc_h3h2(self, env: Env, httpd, nghttpx):
|
||||
@ -104,22 +127,29 @@ class TestReuse:
|
||||
httpd.reload()
|
||||
count = 2
|
||||
# write a alt-svc file the advises h2 instead of h3
|
||||
asfile = os.path.join(env.gen_dir, 'alt-svc-12_04.txt')
|
||||
asfile = os.path.join(env.gen_dir, "alt-svc-12_04.txt")
|
||||
ts = datetime.now() + timedelta(hours=24)
|
||||
expires = f'{ts.year:04}{ts.month:02}{ts.day:02} {ts.hour:02}:{ts.minute:02}:{ts.second:02}'
|
||||
with open(asfile, 'w') as fd:
|
||||
fd.write(f'h3 {env.domain1} {env.https_port} h2 {env.domain1} {env.https_port} "{expires}" 0 0')
|
||||
log.info(f'altscv: {open(asfile).readlines()}')
|
||||
expires = f"{ts.year:04}{ts.month:02}{ts.day:02} {ts.hour:02}:{ts.minute:02}:{ts.second:02}"
|
||||
with open(asfile, "w") as fd:
|
||||
fd.write(
|
||||
f'h3 {env.domain1} {env.https_port} h2 {env.domain1} {env.https_port} "{expires}" 0 0'
|
||||
)
|
||||
log.info(f"altscv: {open(asfile).readlines()}")
|
||||
curl = CurlClient(env=env)
|
||||
urln = f'https://{env.authority_for(env.domain1, "h2")}/data.json?[0-{count-1}]'
|
||||
r = curl.http_download(urls=[urln], with_stats=True, extra_args=[
|
||||
'--alt-svc', f'{asfile}',
|
||||
])
|
||||
r = curl.http_download(
|
||||
urls=[urln],
|
||||
with_stats=True,
|
||||
extra_args=[
|
||||
"--alt-svc",
|
||||
f"{asfile}",
|
||||
],
|
||||
)
|
||||
r.check_response(count=count, http_status=200)
|
||||
# We expect the connection to be reused and use HTTP/2
|
||||
assert r.total_connects == 1
|
||||
for s in r.stats:
|
||||
assert s['http_version'] == '2', f'{s}'
|
||||
assert s["http_version"] == "2", f"{s}"
|
||||
|
||||
@pytest.mark.skipif(condition=not Env.have_h3(), reason="h3 not supported")
|
||||
def test_12_05_alt_svc_h3h1(self, env: Env, httpd, nghttpx):
|
||||
@ -127,19 +157,26 @@ class TestReuse:
|
||||
httpd.reload()
|
||||
count = 2
|
||||
# write a alt-svc file the advises h1 instead of h3
|
||||
asfile = os.path.join(env.gen_dir, 'alt-svc-12_05.txt')
|
||||
asfile = os.path.join(env.gen_dir, "alt-svc-12_05.txt")
|
||||
ts = datetime.now() + timedelta(hours=24)
|
||||
expires = f'{ts.year:04}{ts.month:02}{ts.day:02} {ts.hour:02}:{ts.minute:02}:{ts.second:02}'
|
||||
with open(asfile, 'w') as fd:
|
||||
fd.write(f'h3 {env.domain1} {env.https_port} http/1.1 {env.domain1} {env.https_port} "{expires}" 0 0')
|
||||
log.info(f'altscv: {open(asfile).readlines()}')
|
||||
expires = f"{ts.year:04}{ts.month:02}{ts.day:02} {ts.hour:02}:{ts.minute:02}:{ts.second:02}"
|
||||
with open(asfile, "w") as fd:
|
||||
fd.write(
|
||||
f'h3 {env.domain1} {env.https_port} http/1.1 {env.domain1} {env.https_port} "{expires}" 0 0'
|
||||
)
|
||||
log.info(f"altscv: {open(asfile).readlines()}")
|
||||
curl = CurlClient(env=env)
|
||||
urln = f'https://{env.authority_for(env.domain1, "h2")}/data.json?[0-{count-1}]'
|
||||
r = curl.http_download(urls=[urln], with_stats=True, extra_args=[
|
||||
'--alt-svc', f'{asfile}',
|
||||
])
|
||||
r = curl.http_download(
|
||||
urls=[urln],
|
||||
with_stats=True,
|
||||
extra_args=[
|
||||
"--alt-svc",
|
||||
f"{asfile}",
|
||||
],
|
||||
)
|
||||
r.check_response(count=count, http_status=200)
|
||||
# We expect the connection to be reused and use HTTP/1.1
|
||||
assert r.total_connects == 1
|
||||
for s in r.stats:
|
||||
assert s['http_version'] == '1.1', f'{s}'
|
||||
assert s["http_version"] == "1.1", f"{s}"
|
||||
|
||||
@ -1,6 +1,6 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
#***************************************************************************
|
||||
# ***************************************************************************
|
||||
# _ _ ____ _
|
||||
# Project ___| | | | _ \| |
|
||||
# / __| | | | |_) | |
|
||||
@ -34,11 +34,11 @@ from testenv import Env, CurlClient, ExecResult
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@pytest.mark.skipif(condition=Env.setup_incomplete(),
|
||||
reason=f"missing: {Env.incomplete_reason()}")
|
||||
@pytest.mark.skipif(
|
||||
condition=Env.setup_incomplete(), reason=f"missing: {Env.incomplete_reason()}"
|
||||
)
|
||||
class TestProxyAuth:
|
||||
|
||||
@pytest.fixture(autouse=True, scope='class')
|
||||
@pytest.fixture(autouse=True, scope="class")
|
||||
def _class_scope(self, env, httpd, nghttpx_fwd):
|
||||
if env.have_nghttpx():
|
||||
nghttpx_fwd.start_if_needed()
|
||||
@ -51,7 +51,7 @@ class TestProxyAuth:
|
||||
|
||||
def get_tunnel_proto_used(self, r: ExecResult):
|
||||
for line in r.trace_lines:
|
||||
m = re.match(r'.* CONNECT tunnel: (\S+) negotiated$', line)
|
||||
m = re.match(r".* CONNECT tunnel: (\S+) negotiated$", line)
|
||||
if m:
|
||||
return m.group(1)
|
||||
assert False, f'tunnel protocol not found in:\n{"".join(r.trace_lines)}'
|
||||
@ -60,95 +60,117 @@ class TestProxyAuth:
|
||||
# download via http: proxy (no tunnel), no auth
|
||||
def test_13_01_proxy_no_auth(self, env: Env, httpd):
|
||||
curl = CurlClient(env=env)
|
||||
url = f'http://localhost:{env.http_port}/data.json'
|
||||
r = curl.http_download(urls=[url], alpn_proto='http/1.1', with_stats=True,
|
||||
extra_args=curl.get_proxy_args(proxys=False))
|
||||
url = f"http://localhost:{env.http_port}/data.json"
|
||||
r = curl.http_download(
|
||||
urls=[url],
|
||||
alpn_proto="http/1.1",
|
||||
with_stats=True,
|
||||
extra_args=curl.get_proxy_args(proxys=False),
|
||||
)
|
||||
r.check_response(count=1, http_status=407)
|
||||
|
||||
# download via http: proxy (no tunnel), auth
|
||||
def test_13_02_proxy_auth(self, env: Env, httpd):
|
||||
curl = CurlClient(env=env)
|
||||
url = f'http://localhost:{env.http_port}/data.json'
|
||||
url = f"http://localhost:{env.http_port}/data.json"
|
||||
xargs = curl.get_proxy_args(proxys=False)
|
||||
xargs.extend(['--proxy-user', 'proxy:proxy'])
|
||||
r = curl.http_download(urls=[url], alpn_proto='http/1.1', with_stats=True,
|
||||
extra_args=xargs)
|
||||
xargs.extend(["--proxy-user", "proxy:proxy"])
|
||||
r = curl.http_download(
|
||||
urls=[url], alpn_proto="http/1.1", with_stats=True, extra_args=xargs
|
||||
)
|
||||
r.check_response(count=1, http_status=200)
|
||||
|
||||
@pytest.mark.skipif(condition=not Env.curl_has_feature('HTTPS-proxy'),
|
||||
reason='curl lacks HTTPS-proxy support')
|
||||
@pytest.mark.skipif(
|
||||
condition=not Env.curl_has_feature("HTTPS-proxy"),
|
||||
reason="curl lacks HTTPS-proxy support",
|
||||
)
|
||||
@pytest.mark.skipif(condition=not Env.have_nghttpx(), reason="no nghttpx available")
|
||||
def test_13_03_proxys_no_auth(self, env: Env, httpd, nghttpx_fwd):
|
||||
curl = CurlClient(env=env)
|
||||
url = f'http://localhost:{env.http_port}/data.json'
|
||||
url = f"http://localhost:{env.http_port}/data.json"
|
||||
xargs = curl.get_proxy_args(proxys=True)
|
||||
r = curl.http_download(urls=[url], alpn_proto='http/1.1', with_stats=True,
|
||||
extra_args=xargs)
|
||||
r = curl.http_download(
|
||||
urls=[url], alpn_proto="http/1.1", with_stats=True, extra_args=xargs
|
||||
)
|
||||
r.check_response(count=1, http_status=407)
|
||||
|
||||
@pytest.mark.skipif(condition=not Env.curl_has_feature('HTTPS-proxy'),
|
||||
reason='curl lacks HTTPS-proxy support')
|
||||
@pytest.mark.skipif(
|
||||
condition=not Env.curl_has_feature("HTTPS-proxy"),
|
||||
reason="curl lacks HTTPS-proxy support",
|
||||
)
|
||||
@pytest.mark.skipif(condition=not Env.have_nghttpx(), reason="no nghttpx available")
|
||||
def test_13_04_proxys_auth(self, env: Env, httpd, nghttpx_fwd):
|
||||
curl = CurlClient(env=env)
|
||||
url = f'http://localhost:{env.http_port}/data.json'
|
||||
url = f"http://localhost:{env.http_port}/data.json"
|
||||
xargs = curl.get_proxy_args(proxys=True)
|
||||
xargs.extend(['--proxy-user', 'proxy:proxy'])
|
||||
r = curl.http_download(urls=[url], alpn_proto='http/1.1', with_stats=True,
|
||||
extra_args=xargs)
|
||||
xargs.extend(["--proxy-user", "proxy:proxy"])
|
||||
r = curl.http_download(
|
||||
urls=[url], alpn_proto="http/1.1", with_stats=True, extra_args=xargs
|
||||
)
|
||||
r.check_response(count=1, http_status=200)
|
||||
|
||||
def test_13_05_tunnel_http_no_auth(self, env: Env, httpd):
|
||||
curl = CurlClient(env=env)
|
||||
url = f'http://localhost:{env.http_port}/data.json'
|
||||
url = f"http://localhost:{env.http_port}/data.json"
|
||||
xargs = curl.get_proxy_args(proxys=False, tunnel=True)
|
||||
r = curl.http_download(urls=[url], alpn_proto='http/1.1', with_stats=True,
|
||||
extra_args=xargs)
|
||||
r = curl.http_download(
|
||||
urls=[url], alpn_proto="http/1.1", with_stats=True, extra_args=xargs
|
||||
)
|
||||
# expect "COULD_NOT_CONNECT"
|
||||
r.check_response(exitcode=56, http_status=None)
|
||||
|
||||
def test_13_06_tunnel_http_auth(self, env: Env, httpd):
|
||||
curl = CurlClient(env=env)
|
||||
url = f'http://localhost:{env.http_port}/data.json'
|
||||
url = f"http://localhost:{env.http_port}/data.json"
|
||||
xargs = curl.get_proxy_args(proxys=False, tunnel=True)
|
||||
xargs.extend(['--proxy-user', 'proxy:proxy'])
|
||||
r = curl.http_download(urls=[url], alpn_proto='http/1.1', with_stats=True,
|
||||
extra_args=xargs)
|
||||
xargs.extend(["--proxy-user", "proxy:proxy"])
|
||||
r = curl.http_download(
|
||||
urls=[url], alpn_proto="http/1.1", with_stats=True, extra_args=xargs
|
||||
)
|
||||
r.check_response(count=1, http_status=200)
|
||||
|
||||
@pytest.mark.skipif(condition=not Env.have_nghttpx(), reason="no nghttpx available")
|
||||
@pytest.mark.skipif(condition=not Env.curl_has_feature('HTTPS-proxy'),
|
||||
reason='curl lacks HTTPS-proxy support')
|
||||
@pytest.mark.parametrize("proto", ['http/1.1', 'h2'])
|
||||
@pytest.mark.parametrize("tunnel", ['http/1.1', 'h2'])
|
||||
@pytest.mark.skipif(
|
||||
condition=not Env.curl_has_feature("HTTPS-proxy"),
|
||||
reason="curl lacks HTTPS-proxy support",
|
||||
)
|
||||
@pytest.mark.parametrize("proto", ["http/1.1", "h2"])
|
||||
@pytest.mark.parametrize("tunnel", ["http/1.1", "h2"])
|
||||
def test_13_07_tunnels_no_auth(self, env: Env, httpd, proto, tunnel):
|
||||
if tunnel == 'h2' and not env.curl_uses_lib('nghttp2'):
|
||||
pytest.skip('only supported with nghttp2')
|
||||
if tunnel == "h2" and not env.curl_uses_lib("nghttp2"):
|
||||
pytest.skip("only supported with nghttp2")
|
||||
curl = CurlClient(env=env)
|
||||
url = f'https://localhost:{env.https_port}/data.json'
|
||||
url = f"https://localhost:{env.https_port}/data.json"
|
||||
xargs = curl.get_proxy_args(proxys=True, tunnel=True, proto=tunnel)
|
||||
r = curl.http_download(urls=[url], alpn_proto=proto, with_stats=True,
|
||||
extra_args=xargs)
|
||||
r = curl.http_download(
|
||||
urls=[url], alpn_proto=proto, with_stats=True, extra_args=xargs
|
||||
)
|
||||
# expect "COULD_NOT_CONNECT"
|
||||
r.check_response(exitcode=56, http_status=None)
|
||||
assert self.get_tunnel_proto_used(r) == 'HTTP/2' \
|
||||
if tunnel == 'h2' else 'HTTP/1.1'
|
||||
assert (
|
||||
self.get_tunnel_proto_used(r) == "HTTP/2" if tunnel == "h2" else "HTTP/1.1"
|
||||
)
|
||||
|
||||
@pytest.mark.skipif(condition=not Env.have_nghttpx(), reason="no nghttpx available")
|
||||
@pytest.mark.skipif(condition=not Env.curl_has_feature('HTTPS-proxy'),
|
||||
reason='curl lacks HTTPS-proxy support')
|
||||
@pytest.mark.parametrize("proto", ['http/1.1', 'h2'])
|
||||
@pytest.mark.parametrize("tunnel", ['http/1.1', 'h2'])
|
||||
@pytest.mark.skipif(
|
||||
condition=not Env.curl_has_feature("HTTPS-proxy"),
|
||||
reason="curl lacks HTTPS-proxy support",
|
||||
)
|
||||
@pytest.mark.parametrize("proto", ["http/1.1", "h2"])
|
||||
@pytest.mark.parametrize("tunnel", ["http/1.1", "h2"])
|
||||
def test_13_08_tunnels_auth(self, env: Env, httpd, proto, tunnel):
|
||||
if tunnel == 'h2' and not env.curl_uses_lib('nghttp2'):
|
||||
pytest.skip('only supported with nghttp2')
|
||||
if tunnel == "h2" and not env.curl_uses_lib("nghttp2"):
|
||||
pytest.skip("only supported with nghttp2")
|
||||
curl = CurlClient(env=env)
|
||||
url = f'https://localhost:{env.https_port}/data.json'
|
||||
url = f"https://localhost:{env.https_port}/data.json"
|
||||
xargs = curl.get_proxy_args(proxys=True, tunnel=True, proto=tunnel)
|
||||
xargs.extend(['--proxy-user', 'proxy:proxy'])
|
||||
r = curl.http_download(urls=[url], alpn_proto=proto, with_stats=True,
|
||||
extra_args=xargs)
|
||||
r.check_response(count=1, http_status=200,
|
||||
protocol='HTTP/2' if proto == 'h2' else 'HTTP/1.1')
|
||||
assert self.get_tunnel_proto_used(r) == 'HTTP/2' \
|
||||
if tunnel == 'h2' else 'HTTP/1.1'
|
||||
xargs.extend(["--proxy-user", "proxy:proxy"])
|
||||
r = curl.http_download(
|
||||
urls=[url], alpn_proto=proto, with_stats=True, extra_args=xargs
|
||||
)
|
||||
r.check_response(
|
||||
count=1, http_status=200, protocol="HTTP/2" if proto == "h2" else "HTTP/1.1"
|
||||
)
|
||||
assert (
|
||||
self.get_tunnel_proto_used(r) == "HTTP/2" if tunnel == "h2" else "HTTP/1.1"
|
||||
)
|
||||
|
||||
@ -1,6 +1,6 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
#***************************************************************************
|
||||
# ***************************************************************************
|
||||
# _ _ ____ _
|
||||
# Project ___| | | | _ \| |
|
||||
# / __| | | | |_) | |
|
||||
@ -35,103 +35,124 @@ log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class TestAuth:
|
||||
|
||||
@pytest.fixture(autouse=True, scope='class')
|
||||
@pytest.fixture(autouse=True, scope="class")
|
||||
def _class_scope(self, env, httpd, nghttpx):
|
||||
if env.have_h3():
|
||||
nghttpx.start_if_needed()
|
||||
env.make_data_file(indir=env.gen_dir, fname="data-10m", fsize=10*1024*1024)
|
||||
env.make_data_file(indir=env.gen_dir, fname="data-10m", fsize=10 * 1024 * 1024)
|
||||
httpd.clear_extra_configs()
|
||||
httpd.reload()
|
||||
|
||||
# download 1 file, not authenticated
|
||||
@pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
|
||||
@pytest.mark.parametrize("proto", ["http/1.1", "h2", "h3"])
|
||||
def test_14_01_digest_get_noauth(self, env: Env, httpd, nghttpx, proto):
|
||||
if proto == 'h3' and not env.have_h3():
|
||||
if proto == "h3" and not env.have_h3():
|
||||
pytest.skip("h3 not supported")
|
||||
curl = CurlClient(env=env)
|
||||
url = f'https://{env.authority_for(env.domain1, proto)}/restricted/digest/data.json'
|
||||
url = f"https://{env.authority_for(env.domain1, proto)}/restricted/digest/data.json"
|
||||
r = curl.http_download(urls=[url], alpn_proto=proto)
|
||||
r.check_response(http_status=401)
|
||||
|
||||
# download 1 file, authenticated
|
||||
@pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
|
||||
@pytest.mark.parametrize("proto", ["http/1.1", "h2", "h3"])
|
||||
def test_14_02_digest_get_auth(self, env: Env, httpd, nghttpx, proto):
|
||||
if proto == 'h3' and not env.have_h3():
|
||||
if proto == "h3" and not env.have_h3():
|
||||
pytest.skip("h3 not supported")
|
||||
curl = CurlClient(env=env)
|
||||
url = f'https://{env.authority_for(env.domain1, proto)}/restricted/digest/data.json'
|
||||
r = curl.http_download(urls=[url], alpn_proto=proto, extra_args=[
|
||||
'--digest', '--user', 'test:test'
|
||||
])
|
||||
url = f"https://{env.authority_for(env.domain1, proto)}/restricted/digest/data.json"
|
||||
r = curl.http_download(
|
||||
urls=[url], alpn_proto=proto, extra_args=["--digest", "--user", "test:test"]
|
||||
)
|
||||
r.check_response(http_status=200)
|
||||
|
||||
# PUT data, authenticated
|
||||
@pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
|
||||
@pytest.mark.parametrize("proto", ["http/1.1", "h2", "h3"])
|
||||
def test_14_03_digest_put_auth(self, env: Env, httpd, nghttpx, proto):
|
||||
if proto == 'h3' and not env.have_h3():
|
||||
if proto == "h3" and not env.have_h3():
|
||||
pytest.skip("h3 not supported")
|
||||
data='0123456789'
|
||||
data = "0123456789"
|
||||
curl = CurlClient(env=env)
|
||||
url = f'https://{env.authority_for(env.domain1, proto)}/restricted/digest/data.json'
|
||||
r = curl.http_upload(urls=[url], data=data, alpn_proto=proto, extra_args=[
|
||||
'--digest', '--user', 'test:test'
|
||||
])
|
||||
url = f"https://{env.authority_for(env.domain1, proto)}/restricted/digest/data.json"
|
||||
r = curl.http_upload(
|
||||
urls=[url],
|
||||
data=data,
|
||||
alpn_proto=proto,
|
||||
extra_args=["--digest", "--user", "test:test"],
|
||||
)
|
||||
r.check_response(http_status=200)
|
||||
|
||||
# PUT data, digest auth large pw
|
||||
@pytest.mark.parametrize("proto", ['h2', 'h3'])
|
||||
@pytest.mark.parametrize("proto", ["h2", "h3"])
|
||||
def test_14_04_digest_large_pw(self, env: Env, httpd, nghttpx, proto):
|
||||
if proto == 'h3' and not env.have_h3():
|
||||
if proto == "h3" and not env.have_h3():
|
||||
pytest.skip("h3 not supported")
|
||||
data='0123456789'
|
||||
password = 'x' * 65535
|
||||
data = "0123456789"
|
||||
password = "x" * 65535
|
||||
curl = CurlClient(env=env)
|
||||
url = f'https://{env.authority_for(env.domain1, proto)}/restricted/digest/data.json'
|
||||
r = curl.http_upload(urls=[url], data=data, alpn_proto=proto, extra_args=[
|
||||
'--digest', '--user', f'test:{password}',
|
||||
'--trace-config', 'http/2,http/3'
|
||||
])
|
||||
url = f"https://{env.authority_for(env.domain1, proto)}/restricted/digest/data.json"
|
||||
r = curl.http_upload(
|
||||
urls=[url],
|
||||
data=data,
|
||||
alpn_proto=proto,
|
||||
extra_args=[
|
||||
"--digest",
|
||||
"--user",
|
||||
f"test:{password}",
|
||||
"--trace-config",
|
||||
"http/2,http/3",
|
||||
],
|
||||
)
|
||||
# digest does not submit the password, but a hash of it, so all
|
||||
# works and, since the pw is not correct, we get a 401
|
||||
r.check_response(http_status=401)
|
||||
|
||||
# PUT data, basic auth large pw
|
||||
@pytest.mark.parametrize("proto", ['h2', 'h3'])
|
||||
@pytest.mark.parametrize("proto", ["h2", "h3"])
|
||||
def test_14_05_basic_large_pw(self, env: Env, httpd, nghttpx, proto):
|
||||
if proto == 'h3' and not env.have_h3():
|
||||
if proto == "h3" and not env.have_h3():
|
||||
pytest.skip("h3 not supported")
|
||||
if proto == 'h3' and not env.curl_uses_lib('ngtcp2'):
|
||||
if proto == "h3" and not env.curl_uses_lib("ngtcp2"):
|
||||
# See <https://github.com/cloudflare/quiche/issues/1573>
|
||||
pytest.skip("quiche/openssl-quic have problems with large requests")
|
||||
# just large enough that nghttp2 will submit
|
||||
password = 'x' * (47 * 1024)
|
||||
fdata = os.path.join(env.gen_dir, 'data-10m')
|
||||
password = "x" * (47 * 1024)
|
||||
fdata = os.path.join(env.gen_dir, "data-10m")
|
||||
curl = CurlClient(env=env)
|
||||
url = f'https://{env.authority_for(env.domain1, proto)}/restricted/digest/data.json'
|
||||
r = curl.http_upload(urls=[url], data=f'@{fdata}', alpn_proto=proto, extra_args=[
|
||||
'--basic', '--user', f'test:{password}',
|
||||
'--trace-config', 'http/2,http/3'
|
||||
])
|
||||
url = f"https://{env.authority_for(env.domain1, proto)}/restricted/digest/data.json"
|
||||
r = curl.http_upload(
|
||||
urls=[url],
|
||||
data=f"@{fdata}",
|
||||
alpn_proto=proto,
|
||||
extra_args=[
|
||||
"--basic",
|
||||
"--user",
|
||||
f"test:{password}",
|
||||
"--trace-config",
|
||||
"http/2,http/3",
|
||||
],
|
||||
)
|
||||
# but apache denies on length limit
|
||||
r.check_response(http_status=431)
|
||||
|
||||
# PUT data, basic auth with very large pw
|
||||
@pytest.mark.parametrize("proto", ['h2', 'h3'])
|
||||
@pytest.mark.parametrize("proto", ["h2", "h3"])
|
||||
def test_14_06_basic_very_large_pw(self, env: Env, httpd, nghttpx, proto):
|
||||
if proto == 'h3' and not env.have_h3():
|
||||
if proto == "h3" and not env.have_h3():
|
||||
pytest.skip("h3 not supported")
|
||||
if proto == 'h3' and env.curl_uses_lib('quiche'):
|
||||
if proto == "h3" and env.curl_uses_lib("quiche"):
|
||||
# See <https://github.com/cloudflare/quiche/issues/1573>
|
||||
pytest.skip("quiche has problems with large requests")
|
||||
password = 'x' * (64 * 1024)
|
||||
fdata = os.path.join(env.gen_dir, 'data-10m')
|
||||
password = "x" * (64 * 1024)
|
||||
fdata = os.path.join(env.gen_dir, "data-10m")
|
||||
curl = CurlClient(env=env)
|
||||
url = f'https://{env.authority_for(env.domain1, proto)}/restricted/digest/data.json'
|
||||
r = curl.http_upload(urls=[url], data=f'@{fdata}', alpn_proto=proto, extra_args=[
|
||||
'--basic', '--user', f'test:{password}'
|
||||
])
|
||||
url = f"https://{env.authority_for(env.domain1, proto)}/restricted/digest/data.json"
|
||||
r = curl.http_upload(
|
||||
urls=[url],
|
||||
data=f"@{fdata}",
|
||||
alpn_proto=proto,
|
||||
extra_args=["--basic", "--user", f"test:{password}"],
|
||||
)
|
||||
# Depending on protocol, we might have an error sending or
|
||||
# the server might shutdown the connection and we see the error
|
||||
# on receiving
|
||||
assert r.exit_code in [55, 56], f'{r.dump_logs()}'
|
||||
assert r.exit_code in [55, 56], f"{r.dump_logs()}"
|
||||
|
||||
@ -1,6 +1,6 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
#***************************************************************************
|
||||
# ***************************************************************************
|
||||
# _ _ ____ _
|
||||
# Project ___| | | | _ \| |
|
||||
# / __| | | | |_) | |
|
||||
@ -35,14 +35,11 @@ log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class TestTracing:
|
||||
|
||||
# default verbose output
|
||||
def test_15_01_trace_defaults(self, env: Env, httpd):
|
||||
curl = CurlClient(env=env)
|
||||
url = f'http://{env.domain1}:{env.http_port}/data.json'
|
||||
r = curl.http_get(url=url, def_tracing=False, extra_args=[
|
||||
'-v'
|
||||
])
|
||||
url = f"http://{env.domain1}:{env.http_port}/data.json"
|
||||
r = curl.http_get(url=url, def_tracing=False, extra_args=["-v"])
|
||||
r.check_response(http_status=200)
|
||||
trace = r.trace_lines
|
||||
assert len(trace) > 0
|
||||
@ -50,43 +47,43 @@ class TestTracing:
|
||||
# trace ids
|
||||
def test_15_02_trace_ids(self, env: Env, httpd):
|
||||
curl = CurlClient(env=env)
|
||||
url = f'http://{env.domain1}:{env.http_port}/data.json'
|
||||
r = curl.http_get(url=url, def_tracing=False, extra_args=[
|
||||
'-v', '--trace-config', 'ids'
|
||||
])
|
||||
url = f"http://{env.domain1}:{env.http_port}/data.json"
|
||||
r = curl.http_get(
|
||||
url=url, def_tracing=False, extra_args=["-v", "--trace-config", "ids"]
|
||||
)
|
||||
r.check_response(http_status=200)
|
||||
for line in r.trace_lines:
|
||||
m = re.match(r'^\[0-[0x]] .+', line)
|
||||
for line in r.trace_lines:
|
||||
m = re.match(r"^\[0-[0x]] .+", line)
|
||||
if m is None:
|
||||
assert False, f'no match: {line}'
|
||||
assert False, f"no match: {line}"
|
||||
|
||||
# trace ids+time
|
||||
def test_15_03_trace_ids_time(self, env: Env, httpd):
|
||||
curl = CurlClient(env=env)
|
||||
url = f'http://{env.domain1}:{env.http_port}/data.json'
|
||||
r = curl.http_get(url=url, def_tracing=False, extra_args=[
|
||||
'-v', '--trace-config', 'ids,time'
|
||||
])
|
||||
url = f"http://{env.domain1}:{env.http_port}/data.json"
|
||||
r = curl.http_get(
|
||||
url=url, def_tracing=False, extra_args=["-v", "--trace-config", "ids,time"]
|
||||
)
|
||||
r.check_response(http_status=200)
|
||||
for line in r.trace_lines:
|
||||
m = re.match(r'^([0-9:.]+) \[0-[0x]] .+', line)
|
||||
for line in r.trace_lines:
|
||||
m = re.match(r"^([0-9:.]+) \[0-[0x]] .+", line)
|
||||
if m is None:
|
||||
assert False, f'no match: {line}'
|
||||
assert False, f"no match: {line}"
|
||||
|
||||
# trace all
|
||||
def test_15_04_trace_all(self, env: Env, httpd):
|
||||
curl = CurlClient(env=env)
|
||||
url = f'http://{env.domain1}:{env.http_port}/data.json'
|
||||
r = curl.http_get(url=url, def_tracing=False, extra_args=[
|
||||
'-v', '--trace-config', 'all'
|
||||
])
|
||||
url = f"http://{env.domain1}:{env.http_port}/data.json"
|
||||
r = curl.http_get(
|
||||
url=url, def_tracing=False, extra_args=["-v", "--trace-config", "all"]
|
||||
)
|
||||
r.check_response(http_status=200)
|
||||
found_tcp = False
|
||||
for line in r.trace_lines:
|
||||
m = re.match(r'^([0-9:.]+) \[0-[0x]] .+', line)
|
||||
for line in r.trace_lines:
|
||||
m = re.match(r"^([0-9:.]+) \[0-[0x]] .+", line)
|
||||
if m is None:
|
||||
assert False, f'no match: {line}'
|
||||
m = re.match(r'^([0-9:.]+) \[0-[0x]] . \[TCP].+', line)
|
||||
assert False, f"no match: {line}"
|
||||
m = re.match(r"^([0-9:.]+) \[0-[0x]] . \[TCP].+", line)
|
||||
if m is not None:
|
||||
found_tcp = True
|
||||
if not found_tcp:
|
||||
@ -95,17 +92,19 @@ class TestTracing:
|
||||
# trace all, no TCP, no time
|
||||
def test_15_05_trace_all(self, env: Env, httpd):
|
||||
curl = CurlClient(env=env)
|
||||
url = f'http://{env.domain1}:{env.http_port}/data.json'
|
||||
r = curl.http_get(url=url, def_tracing=False, extra_args=[
|
||||
'-v', '--trace-config', 'all,-tcp,-time'
|
||||
])
|
||||
url = f"http://{env.domain1}:{env.http_port}/data.json"
|
||||
r = curl.http_get(
|
||||
url=url,
|
||||
def_tracing=False,
|
||||
extra_args=["-v", "--trace-config", "all,-tcp,-time"],
|
||||
)
|
||||
r.check_response(http_status=200)
|
||||
found_tcp = False
|
||||
for line in r.trace_lines:
|
||||
m = re.match(r'^\[0-[0x]] .+', line)
|
||||
for line in r.trace_lines:
|
||||
m = re.match(r"^\[0-[0x]] .+", line)
|
||||
if m is None:
|
||||
assert False, f'no match: {line}'
|
||||
m = re.match(r'^\[0-[0x]] . \[TCP].+', line)
|
||||
assert False, f"no match: {line}"
|
||||
m = re.match(r"^\[0-[0x]] . \[TCP].+", line)
|
||||
if m is not None:
|
||||
found_tcp = True
|
||||
if found_tcp:
|
||||
|
||||
@ -1,6 +1,6 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
#***************************************************************************
|
||||
# ***************************************************************************
|
||||
# _ _ ____ _
|
||||
# Project ___| | | | _ \| |
|
||||
# / __| | | | |_) | |
|
||||
@ -35,97 +35,121 @@ log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class TestInfo:
|
||||
|
||||
@pytest.fixture(autouse=True, scope='class')
|
||||
@pytest.fixture(autouse=True, scope="class")
|
||||
def _class_scope(self, env, httpd, nghttpx):
|
||||
if env.have_h3():
|
||||
nghttpx.start_if_needed()
|
||||
httpd.clear_extra_configs()
|
||||
httpd.reload()
|
||||
|
||||
@pytest.fixture(autouse=True, scope='class')
|
||||
@pytest.fixture(autouse=True, scope="class")
|
||||
def _class_scope(self, env, httpd):
|
||||
indir = httpd.docs_dir
|
||||
env.make_data_file(indir=indir, fname="data-10k", fsize=10*1024)
|
||||
env.make_data_file(indir=indir, fname="data-100k", fsize=100*1024)
|
||||
env.make_data_file(indir=indir, fname="data-1m", fsize=1024*1024)
|
||||
env.make_data_file(indir=indir, fname="data-10k", fsize=10 * 1024)
|
||||
env.make_data_file(indir=indir, fname="data-100k", fsize=100 * 1024)
|
||||
env.make_data_file(indir=indir, fname="data-1m", fsize=1024 * 1024)
|
||||
|
||||
# download plain file
|
||||
@pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
|
||||
@pytest.mark.parametrize("proto", ["http/1.1", "h2", "h3"])
|
||||
def test_16_01_info_download(self, env: Env, httpd, nghttpx, proto):
|
||||
if proto == 'h3' and not env.have_h3():
|
||||
if proto == "h3" and not env.have_h3():
|
||||
pytest.skip("h3 not supported")
|
||||
count = 2
|
||||
curl = CurlClient(env=env)
|
||||
url = f'https://{env.authority_for(env.domain1, proto)}/data.json?[0-{count-1}]'
|
||||
url = f"https://{env.authority_for(env.domain1, proto)}/data.json?[0-{count-1}]"
|
||||
r = curl.http_download(urls=[url], alpn_proto=proto, with_stats=True)
|
||||
r.check_stats(count=count, http_status=200, exitcode=0,
|
||||
remote_port=env.port_for(alpn_proto=proto),
|
||||
remote_ip='127.0.0.1')
|
||||
r.check_stats(
|
||||
count=count,
|
||||
http_status=200,
|
||||
exitcode=0,
|
||||
remote_port=env.port_for(alpn_proto=proto),
|
||||
remote_ip="127.0.0.1",
|
||||
)
|
||||
for idx, s in enumerate(r.stats):
|
||||
self.check_stat(idx, s, r, dl_size=30, ul_size=0)
|
||||
|
||||
# download plain file with a 302 redirect
|
||||
@pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
|
||||
@pytest.mark.parametrize("proto", ["http/1.1", "h2", "h3"])
|
||||
def test_16_02_info_302_download(self, env: Env, httpd, nghttpx, proto):
|
||||
if proto == 'h3' and not env.have_h3():
|
||||
if proto == "h3" and not env.have_h3():
|
||||
pytest.skip("h3 not supported")
|
||||
count = 2
|
||||
curl = CurlClient(env=env)
|
||||
url = f'https://{env.authority_for(env.domain1, proto)}/data.json.302?[0-{count-1}]'
|
||||
r = curl.http_download(urls=[url], alpn_proto=proto, with_stats=True, extra_args=[
|
||||
'--location'
|
||||
])
|
||||
r.check_stats(count=count, http_status=200, exitcode=0,
|
||||
remote_port=env.port_for(alpn_proto=proto),
|
||||
remote_ip='127.0.0.1')
|
||||
url = f"https://{env.authority_for(env.domain1, proto)}/data.json.302?[0-{count-1}]"
|
||||
r = curl.http_download(
|
||||
urls=[url], alpn_proto=proto, with_stats=True, extra_args=["--location"]
|
||||
)
|
||||
r.check_stats(
|
||||
count=count,
|
||||
http_status=200,
|
||||
exitcode=0,
|
||||
remote_port=env.port_for(alpn_proto=proto),
|
||||
remote_ip="127.0.0.1",
|
||||
)
|
||||
for idx, s in enumerate(r.stats):
|
||||
self.check_stat(idx, s, r, dl_size=30, ul_size=0)
|
||||
|
||||
@pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
|
||||
@pytest.mark.parametrize("proto", ["http/1.1", "h2", "h3"])
|
||||
def test_16_03_info_upload(self, env: Env, httpd, nghttpx, proto):
|
||||
if proto == 'h3' and not env.have_h3():
|
||||
if proto == "h3" and not env.have_h3():
|
||||
pytest.skip("h3 not supported")
|
||||
count = 2
|
||||
fdata = os.path.join(env.gen_dir, 'data-100k')
|
||||
fdata = os.path.join(env.gen_dir, "data-100k")
|
||||
fsize = 100 * 1024
|
||||
curl = CurlClient(env=env)
|
||||
url = f'https://{env.authority_for(env.domain1, proto)}/curltest/echo?id=[0-{count-1}]'
|
||||
r = curl.http_upload(urls=[url], data=f'@{fdata}', alpn_proto=proto,
|
||||
with_headers=True, extra_args=[
|
||||
'--trace-config', 'http/2,http/3'
|
||||
])
|
||||
url = f"https://{env.authority_for(env.domain1, proto)}/curltest/echo?id=[0-{count-1}]"
|
||||
r = curl.http_upload(
|
||||
urls=[url],
|
||||
data=f"@{fdata}",
|
||||
alpn_proto=proto,
|
||||
with_headers=True,
|
||||
extra_args=["--trace-config", "http/2,http/3"],
|
||||
)
|
||||
r.check_response(count=count, http_status=200)
|
||||
r.check_stats(count=count, http_status=200, exitcode=0,
|
||||
remote_port=env.port_for(alpn_proto=proto),
|
||||
remote_ip='127.0.0.1')
|
||||
r.check_stats(
|
||||
count=count,
|
||||
http_status=200,
|
||||
exitcode=0,
|
||||
remote_port=env.port_for(alpn_proto=proto),
|
||||
remote_ip="127.0.0.1",
|
||||
)
|
||||
for idx, s in enumerate(r.stats):
|
||||
self.check_stat(idx, s, r, dl_size=fsize, ul_size=fsize)
|
||||
|
||||
# download plain file via http: ('time_appconnect' is 0)
|
||||
@pytest.mark.parametrize("proto", ['http/1.1'])
|
||||
@pytest.mark.parametrize("proto", ["http/1.1"])
|
||||
def test_16_04_info_http_download(self, env: Env, httpd, nghttpx, proto):
|
||||
count = 2
|
||||
curl = CurlClient(env=env)
|
||||
url = f'http://{env.domain1}:{env.http_port}/data.json?[0-{count-1}]'
|
||||
url = f"http://{env.domain1}:{env.http_port}/data.json?[0-{count-1}]"
|
||||
r = curl.http_download(urls=[url], alpn_proto=proto, with_stats=True)
|
||||
r.check_stats(count=count, http_status=200, exitcode=0,
|
||||
remote_port=env.http_port, remote_ip='127.0.0.1')
|
||||
r.check_stats(
|
||||
count=count,
|
||||
http_status=200,
|
||||
exitcode=0,
|
||||
remote_port=env.http_port,
|
||||
remote_ip="127.0.0.1",
|
||||
)
|
||||
for idx, s in enumerate(r.stats):
|
||||
self.check_stat(idx, s, r, dl_size=30, ul_size=0)
|
||||
|
||||
def check_stat(self, idx, s, r, dl_size=None, ul_size=None):
|
||||
self.check_stat_times(s, idx)
|
||||
# we always send something
|
||||
self.check_stat_positive(s, idx, 'size_request')
|
||||
self.check_stat_positive(s, idx, "size_request")
|
||||
# we always receive response headers
|
||||
self.check_stat_positive(s, idx, 'size_header')
|
||||
self.check_stat_positive(s, idx, "size_header")
|
||||
if ul_size is not None:
|
||||
assert s['size_upload'] == ul_size, f'stat #{idx}\n{r.dump_logs()}' # the file we sent
|
||||
assert s['size_request'] >= s['size_upload'], \
|
||||
f'stat #{idx}, "size_request" smaller than "size_upload", {s}\n{r.dump_logs()}'
|
||||
assert (
|
||||
s["size_upload"] == ul_size
|
||||
), f"stat #{idx}\n{r.dump_logs()}" # the file we sent
|
||||
assert (
|
||||
s["size_request"] >= s["size_upload"]
|
||||
), f'stat #{idx}, "size_request" smaller than "size_upload", {s}\n{r.dump_logs()}'
|
||||
if dl_size is not None:
|
||||
assert s['size_download'] == dl_size, f'stat #{idx}\n{r.dump_logs()}' # the file we received
|
||||
assert (
|
||||
s["size_download"] == dl_size
|
||||
), f"stat #{idx}\n{r.dump_logs()}" # the file we received
|
||||
|
||||
def check_stat_positive(self, s, idx, key):
|
||||
assert key in s, f'stat #{idx} "{key}" missing: {s}'
|
||||
@ -137,20 +161,29 @@ class TestInfo:
|
||||
|
||||
def check_stat_times(self, s, idx):
|
||||
# check timings reported on a transfer for consistency
|
||||
url = s['url_effective']
|
||||
url = s["url_effective"]
|
||||
# all stat keys which reporting timings
|
||||
all_keys = {
|
||||
'time_appconnect', 'time_connect', 'time_redirect',
|
||||
'time_pretransfer', 'time_starttransfer', 'time_total'
|
||||
"time_appconnect",
|
||||
"time_connect",
|
||||
"time_redirect",
|
||||
"time_pretransfer",
|
||||
"time_starttransfer",
|
||||
"time_total",
|
||||
}
|
||||
# stat keys where we expect a positive value
|
||||
pos_keys = {'time_pretransfer', 'time_starttransfer', 'time_total', 'time_queue'}
|
||||
if s['num_connects'] > 0:
|
||||
pos_keys.add('time_connect')
|
||||
if url.startswith('https:'):
|
||||
pos_keys.add('time_appconnect')
|
||||
if s['num_redirects'] > 0:
|
||||
pos_keys.add('time_redirect')
|
||||
pos_keys = {
|
||||
"time_pretransfer",
|
||||
"time_starttransfer",
|
||||
"time_total",
|
||||
"time_queue",
|
||||
}
|
||||
if s["num_connects"] > 0:
|
||||
pos_keys.add("time_connect")
|
||||
if url.startswith("https:"):
|
||||
pos_keys.add("time_appconnect")
|
||||
if s["num_redirects"] > 0:
|
||||
pos_keys.add("time_redirect")
|
||||
zero_keys = all_keys - pos_keys
|
||||
# assert all zeros are zeros and the others are positive
|
||||
for key in zero_keys:
|
||||
@ -158,18 +191,23 @@ class TestInfo:
|
||||
for key in pos_keys:
|
||||
self.check_stat_positive(s, idx, key)
|
||||
# assert that all timers before "time_pretransfer" are less or equal
|
||||
for key in ['time_appconnect', 'time_connect', 'time_namelookup']:
|
||||
assert s[key] < s['time_pretransfer'], f'time "{key}" larger than' \
|
||||
f'"time_pretransfer": {s}'
|
||||
for key in ["time_appconnect", "time_connect", "time_namelookup"]:
|
||||
assert s[key] < s["time_pretransfer"], (
|
||||
f'time "{key}" larger than' f'"time_pretransfer": {s}'
|
||||
)
|
||||
# assert transfer start is after pretransfer
|
||||
assert s['time_pretransfer'] <= s['time_starttransfer'], f'"time_pretransfer" '\
|
||||
f'greater than "time_starttransfer", {s}'
|
||||
assert s["time_pretransfer"] <= s["time_starttransfer"], (
|
||||
f'"time_pretransfer" ' f'greater than "time_starttransfer", {s}'
|
||||
)
|
||||
# assert that transfer start is before total
|
||||
assert s['time_starttransfer'] <= s['time_total'], f'"time_starttransfer" '\
|
||||
f'greater than "time_total", {s}'
|
||||
if s['num_redirects'] > 0:
|
||||
assert s['time_queue'] < s['time_starttransfer'], f'"time_queue" '\
|
||||
f'greater/equal than "time_starttransfer", {s}'
|
||||
assert s["time_starttransfer"] <= s["time_total"], (
|
||||
f'"time_starttransfer" ' f'greater than "time_total", {s}'
|
||||
)
|
||||
if s["num_redirects"] > 0:
|
||||
assert s["time_queue"] < s["time_starttransfer"], (
|
||||
f'"time_queue" ' f'greater/equal than "time_starttransfer", {s}'
|
||||
)
|
||||
else:
|
||||
assert s['time_queue'] <= s['time_starttransfer'], f'"time_queue" '\
|
||||
f'greater than "time_starttransfer", {s}'
|
||||
assert s["time_queue"] <= s["time_starttransfer"], (
|
||||
f'"time_queue" ' f'greater than "time_starttransfer", {s}'
|
||||
)
|
||||
|
||||
@ -1,6 +1,6 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
#***************************************************************************
|
||||
# ***************************************************************************
|
||||
# _ _ ____ _
|
||||
# Project ___| | | | _ \| |
|
||||
# / __| | | | |_) | |
|
||||
@ -37,54 +37,59 @@ log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class TestSSLUse:
|
||||
|
||||
@pytest.fixture(autouse=True, scope='class')
|
||||
@pytest.fixture(autouse=True, scope="class")
|
||||
def _class_scope(self, env, httpd, nghttpx):
|
||||
env.make_data_file(indir=httpd.docs_dir, fname="data-10k", fsize=10*1024)
|
||||
env.make_data_file(indir=httpd.docs_dir, fname="data-10k", fsize=10 * 1024)
|
||||
if env.have_h3():
|
||||
nghttpx.start_if_needed()
|
||||
|
||||
@pytest.fixture(autouse=True, scope='function')
|
||||
@pytest.fixture(autouse=True, scope="function")
|
||||
def _function_scope(self, request, env, httpd):
|
||||
httpd.clear_extra_configs()
|
||||
if 'httpd' not in request.node._fixtureinfo.argnames:
|
||||
if "httpd" not in request.node._fixtureinfo.argnames:
|
||||
httpd.reload_if_config_changed()
|
||||
|
||||
def test_17_01_sslinfo_plain(self, env: Env, nghttpx):
|
||||
proto = 'http/1.1'
|
||||
proto = "http/1.1"
|
||||
curl = CurlClient(env=env)
|
||||
url = f'https://{env.authority_for(env.domain1, proto)}/curltest/sslinfo'
|
||||
url = f"https://{env.authority_for(env.domain1, proto)}/curltest/sslinfo"
|
||||
r = curl.http_get(url=url, alpn_proto=proto)
|
||||
assert r.json['HTTPS'] == 'on', f'{r.json}'
|
||||
assert 'SSL_SESSION_ID' in r.json, f'{r.json}'
|
||||
assert 'SSL_SESSION_RESUMED' in r.json, f'{r.json}'
|
||||
assert r.json['SSL_SESSION_RESUMED'] == 'Initial', f'{r.json}'
|
||||
assert r.json["HTTPS"] == "on", f"{r.json}"
|
||||
assert "SSL_SESSION_ID" in r.json, f"{r.json}"
|
||||
assert "SSL_SESSION_RESUMED" in r.json, f"{r.json}"
|
||||
assert r.json["SSL_SESSION_RESUMED"] == "Initial", f"{r.json}"
|
||||
|
||||
@pytest.mark.parametrize("tls_max", ['1.2', '1.3'])
|
||||
@pytest.mark.parametrize("tls_max", ["1.2", "1.3"])
|
||||
def test_17_02_sslinfo_reconnect(self, env: Env, tls_max):
|
||||
proto = 'http/1.1'
|
||||
proto = "http/1.1"
|
||||
count = 3
|
||||
exp_resumed = 'Resumed'
|
||||
xargs = ['--sessionid', '--tls-max', tls_max, f'--tlsv{tls_max}']
|
||||
if env.curl_uses_lib('libressl'):
|
||||
if tls_max == '1.3':
|
||||
exp_resumed = 'Initial' # 1.2 works in LibreSSL, but 1.3 does not, TODO
|
||||
if env.curl_uses_lib('rustls-ffi'):
|
||||
exp_resumed = 'Initial' # Rustls does not support sessions, TODO
|
||||
if env.curl_uses_lib('bearssl') and tls_max == '1.3':
|
||||
pytest.skip('BearSSL does not support TLSv1.3')
|
||||
if env.curl_uses_lib('mbedtls') and tls_max == '1.3' and \
|
||||
not env.curl_lib_version_at_least('mbedtls', '3.6.0'):
|
||||
pytest.skip('mbedtls TLSv1.3 session resume not working in 3.6.0')
|
||||
exp_resumed = "Resumed"
|
||||
xargs = ["--sessionid", "--tls-max", tls_max, f"--tlsv{tls_max}"]
|
||||
if env.curl_uses_lib("libressl"):
|
||||
if tls_max == "1.3":
|
||||
exp_resumed = "Initial" # 1.2 works in LibreSSL, but 1.3 does not, TODO
|
||||
if env.curl_uses_lib("rustls-ffi"):
|
||||
exp_resumed = "Initial" # Rustls does not support sessions, TODO
|
||||
if env.curl_uses_lib("bearssl") and tls_max == "1.3":
|
||||
pytest.skip("BearSSL does not support TLSv1.3")
|
||||
if (
|
||||
env.curl_uses_lib("mbedtls")
|
||||
and tls_max == "1.3"
|
||||
and not env.curl_lib_version_at_least("mbedtls", "3.6.0")
|
||||
):
|
||||
pytest.skip("mbedtls TLSv1.3 session resume not working in 3.6.0")
|
||||
|
||||
run_env = os.environ.copy()
|
||||
run_env['CURL_DEBUG'] = 'ssl'
|
||||
run_env["CURL_DEBUG"] = "ssl"
|
||||
curl = CurlClient(env=env, run_env=run_env)
|
||||
# tell the server to close the connection after each request
|
||||
urln = f'https://{env.authority_for(env.domain1, proto)}/curltest/sslinfo?'\
|
||||
f'id=[0-{count-1}]&close'
|
||||
r = curl.http_download(urls=[urln], alpn_proto=proto, with_stats=True,
|
||||
extra_args=xargs)
|
||||
urln = (
|
||||
f"https://{env.authority_for(env.domain1, proto)}/curltest/sslinfo?"
|
||||
f"id=[0-{count-1}]&close"
|
||||
)
|
||||
r = curl.http_download(
|
||||
urls=[urln], alpn_proto=proto, with_stats=True, extra_args=xargs
|
||||
)
|
||||
r.check_response(count=count, http_status=200)
|
||||
# should have used one connection for each request, sessions after
|
||||
# first should have been resumed
|
||||
@ -94,296 +99,348 @@ class TestSSLUse:
|
||||
assert os.path.exists(dfile)
|
||||
with open(dfile) as f:
|
||||
djson = json.load(f)
|
||||
assert djson['HTTPS'] == 'on', f'{i}: {djson}'
|
||||
assert djson["HTTPS"] == "on", f"{i}: {djson}"
|
||||
if i == 0:
|
||||
assert djson['SSL_SESSION_RESUMED'] == 'Initial', f'{i}: {djson}\n{r.dump_logs()}'
|
||||
assert (
|
||||
djson["SSL_SESSION_RESUMED"] == "Initial"
|
||||
), f"{i}: {djson}\n{r.dump_logs()}"
|
||||
else:
|
||||
assert djson['SSL_SESSION_RESUMED'] == exp_resumed, f'{i}: {djson}\n{r.dump_logs()}'
|
||||
assert (
|
||||
djson["SSL_SESSION_RESUMED"] == exp_resumed
|
||||
), f"{i}: {djson}\n{r.dump_logs()}"
|
||||
|
||||
# use host name with trailing dot, verify handshake
|
||||
@pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
|
||||
@pytest.mark.parametrize("proto", ["http/1.1", "h2", "h3"])
|
||||
def test_17_03_trailing_dot(self, env: Env, proto):
|
||||
if proto == 'h3' and not env.have_h3():
|
||||
if proto == "h3" and not env.have_h3():
|
||||
pytest.skip("h3 not supported")
|
||||
curl = CurlClient(env=env)
|
||||
domain = f'{env.domain1}.'
|
||||
url = f'https://{env.authority_for(domain, proto)}/curltest/sslinfo'
|
||||
domain = f"{env.domain1}."
|
||||
url = f"https://{env.authority_for(domain, proto)}/curltest/sslinfo"
|
||||
r = curl.http_get(url=url, alpn_proto=proto)
|
||||
assert r.exit_code == 0, f'{r}'
|
||||
assert r.json, f'{r}'
|
||||
if proto != 'h3': # we proxy h3
|
||||
assert r.exit_code == 0, f"{r}"
|
||||
assert r.json, f"{r}"
|
||||
if proto != "h3": # we proxy h3
|
||||
# the SNI the server received is without trailing dot
|
||||
assert r.json['SSL_TLS_SNI'] == env.domain1, f'{r.json}'
|
||||
assert r.json["SSL_TLS_SNI"] == env.domain1, f"{r.json}"
|
||||
|
||||
# use host name with double trailing dot, verify handshake
|
||||
@pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
|
||||
@pytest.mark.parametrize("proto", ["http/1.1", "h2", "h3"])
|
||||
def test_17_04_double_dot(self, env: Env, proto):
|
||||
if proto == 'h3' and not env.have_h3():
|
||||
if proto == "h3" and not env.have_h3():
|
||||
pytest.skip("h3 not supported")
|
||||
curl = CurlClient(env=env)
|
||||
domain = f'{env.domain1}..'
|
||||
url = f'https://{env.authority_for(domain, proto)}/curltest/sslinfo'
|
||||
r = curl.http_get(url=url, alpn_proto=proto, extra_args=[
|
||||
'-H', f'Host: {env.domain1}',
|
||||
])
|
||||
domain = f"{env.domain1}.."
|
||||
url = f"https://{env.authority_for(domain, proto)}/curltest/sslinfo"
|
||||
r = curl.http_get(
|
||||
url=url,
|
||||
alpn_proto=proto,
|
||||
extra_args=[
|
||||
"-H",
|
||||
f"Host: {env.domain1}",
|
||||
],
|
||||
)
|
||||
if r.exit_code == 0:
|
||||
assert r.json, f'{r.stdout}'
|
||||
assert r.json, f"{r.stdout}"
|
||||
# the SNI the server received is without trailing dot
|
||||
if proto != 'h3': # we proxy h3
|
||||
assert r.json['SSL_TLS_SNI'] == env.domain1, f'{r.json}'
|
||||
assert False, f'should not have succeeded: {r.json}'
|
||||
if proto != "h3": # we proxy h3
|
||||
assert r.json["SSL_TLS_SNI"] == env.domain1, f"{r.json}"
|
||||
assert False, f"should not have succeeded: {r.json}"
|
||||
# 7 - Rustls rejects a servername with .. during setup
|
||||
# 35 - LibreSSL rejects setting an SNI name with trailing dot
|
||||
# 60 - peer name matching failed against certificate
|
||||
assert r.exit_code in [7, 35, 60], f'{r}'
|
||||
assert r.exit_code in [7, 35, 60], f"{r}"
|
||||
|
||||
# use ip address for connect
|
||||
@pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
|
||||
@pytest.mark.parametrize("proto", ["http/1.1", "h2", "h3"])
|
||||
def test_17_05_ip_addr(self, env: Env, proto):
|
||||
if env.curl_uses_lib('bearssl'):
|
||||
if env.curl_uses_lib("bearssl"):
|
||||
pytest.skip("BearSSL does not support cert verification with IP addresses")
|
||||
if env.curl_uses_lib('mbedtls'):
|
||||
if env.curl_uses_lib("mbedtls"):
|
||||
pytest.skip("mbedTLS does use IP addresses in SNI")
|
||||
if proto == 'h3' and not env.have_h3():
|
||||
if proto == "h3" and not env.have_h3():
|
||||
pytest.skip("h3 not supported")
|
||||
curl = CurlClient(env=env)
|
||||
domain = '127.0.0.1'
|
||||
url = f'https://{env.authority_for(domain, proto)}/curltest/sslinfo'
|
||||
domain = "127.0.0.1"
|
||||
url = f"https://{env.authority_for(domain, proto)}/curltest/sslinfo"
|
||||
r = curl.http_get(url=url, alpn_proto=proto)
|
||||
assert r.exit_code == 0, f'{r}'
|
||||
assert r.json, f'{r}'
|
||||
if proto != 'h3': # we proxy h3
|
||||
assert r.exit_code == 0, f"{r}"
|
||||
assert r.json, f"{r}"
|
||||
if proto != "h3": # we proxy h3
|
||||
# the SNI should not have been used
|
||||
assert 'SSL_TLS_SNI' not in r.json, f'{r.json}'
|
||||
assert "SSL_TLS_SNI" not in r.json, f"{r.json}"
|
||||
|
||||
# use localhost for connect
|
||||
@pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
|
||||
@pytest.mark.parametrize("proto", ["http/1.1", "h2", "h3"])
|
||||
def test_17_06_localhost(self, env: Env, proto):
|
||||
if proto == 'h3' and not env.have_h3():
|
||||
if proto == "h3" and not env.have_h3():
|
||||
pytest.skip("h3 not supported")
|
||||
curl = CurlClient(env=env)
|
||||
domain = 'localhost'
|
||||
url = f'https://{env.authority_for(domain, proto)}/curltest/sslinfo'
|
||||
domain = "localhost"
|
||||
url = f"https://{env.authority_for(domain, proto)}/curltest/sslinfo"
|
||||
r = curl.http_get(url=url, alpn_proto=proto)
|
||||
assert r.exit_code == 0, f'{r}'
|
||||
assert r.json, f'{r}'
|
||||
if proto != 'h3': # we proxy h3
|
||||
assert r.json['SSL_TLS_SNI'] == domain, f'{r.json}'
|
||||
assert r.exit_code == 0, f"{r}"
|
||||
assert r.json, f"{r}"
|
||||
if proto != "h3": # we proxy h3
|
||||
assert r.json["SSL_TLS_SNI"] == domain, f"{r.json}"
|
||||
|
||||
@staticmethod
|
||||
def gen_test_17_07_list():
|
||||
tls13_tests = [
|
||||
[None, True],
|
||||
[['TLS_AES_128_GCM_SHA256'], True],
|
||||
[['TLS_AES_256_GCM_SHA384'], False],
|
||||
[['TLS_CHACHA20_POLY1305_SHA256'], True],
|
||||
[['TLS_AES_256_GCM_SHA384',
|
||||
'TLS_CHACHA20_POLY1305_SHA256'], True],
|
||||
[["TLS_AES_128_GCM_SHA256"], True],
|
||||
[["TLS_AES_256_GCM_SHA384"], False],
|
||||
[["TLS_CHACHA20_POLY1305_SHA256"], True],
|
||||
[["TLS_AES_256_GCM_SHA384", "TLS_CHACHA20_POLY1305_SHA256"], True],
|
||||
]
|
||||
tls12_tests = [
|
||||
[None, True],
|
||||
[['ECDHE-ECDSA-AES128-GCM-SHA256', 'ECDHE-RSA-AES128-GCM-SHA256'], True],
|
||||
[['ECDHE-ECDSA-AES256-GCM-SHA384', 'ECDHE-RSA-AES256-GCM-SHA384'], False],
|
||||
[['ECDHE-ECDSA-CHACHA20-POLY1305', 'ECDHE-RSA-CHACHA20-POLY1305'], True],
|
||||
[['ECDHE-ECDSA-AES256-GCM-SHA384', 'ECDHE-RSA-AES256-GCM-SHA384',
|
||||
'ECDHE-ECDSA-CHACHA20-POLY1305', 'ECDHE-RSA-CHACHA20-POLY1305'], True],
|
||||
[["ECDHE-ECDSA-AES128-GCM-SHA256", "ECDHE-RSA-AES128-GCM-SHA256"], True],
|
||||
[["ECDHE-ECDSA-AES256-GCM-SHA384", "ECDHE-RSA-AES256-GCM-SHA384"], False],
|
||||
[["ECDHE-ECDSA-CHACHA20-POLY1305", "ECDHE-RSA-CHACHA20-POLY1305"], True],
|
||||
[
|
||||
[
|
||||
"ECDHE-ECDSA-AES256-GCM-SHA384",
|
||||
"ECDHE-RSA-AES256-GCM-SHA384",
|
||||
"ECDHE-ECDSA-CHACHA20-POLY1305",
|
||||
"ECDHE-RSA-CHACHA20-POLY1305",
|
||||
],
|
||||
True,
|
||||
],
|
||||
]
|
||||
ret = []
|
||||
for tls_proto in ['TLSv1.3 +TLSv1.2', 'TLSv1.3', 'TLSv1.2']:
|
||||
for tls_proto in ["TLSv1.3 +TLSv1.2", "TLSv1.3", "TLSv1.2"]:
|
||||
for [ciphers13, succeed13] in tls13_tests:
|
||||
for [ciphers12, succeed12] in tls12_tests:
|
||||
ret.append([tls_proto, ciphers13, ciphers12, succeed13, succeed12])
|
||||
return ret
|
||||
|
||||
@pytest.mark.parametrize("tls_proto, ciphers13, ciphers12, succeed13, succeed12", gen_test_17_07_list())
|
||||
def test_17_07_ssl_ciphers(self, env: Env, httpd, tls_proto, ciphers13, ciphers12, succeed13, succeed12):
|
||||
@pytest.mark.parametrize(
|
||||
"tls_proto, ciphers13, ciphers12, succeed13, succeed12", gen_test_17_07_list()
|
||||
)
|
||||
def test_17_07_ssl_ciphers(
|
||||
self, env: Env, httpd, tls_proto, ciphers13, ciphers12, succeed13, succeed12
|
||||
):
|
||||
# to test setting cipher suites, the AES 256 ciphers are disabled in the test server
|
||||
httpd.set_extra_config('base', [
|
||||
'SSLCipherSuite SSL'
|
||||
' ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-GCM-SHA256'
|
||||
':ECDHE-ECDSA-CHACHA20-POLY1305:ECDHE-RSA-CHACHA20-POLY1305',
|
||||
'SSLCipherSuite TLSv1.3'
|
||||
' TLS_AES_128_GCM_SHA256:TLS_CHACHA20_POLY1305_SHA256',
|
||||
f'SSLProtocol {tls_proto}'
|
||||
])
|
||||
httpd.set_extra_config(
|
||||
"base",
|
||||
[
|
||||
"SSLCipherSuite SSL"
|
||||
" ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-GCM-SHA256"
|
||||
":ECDHE-ECDSA-CHACHA20-POLY1305:ECDHE-RSA-CHACHA20-POLY1305",
|
||||
"SSLCipherSuite TLSv1.3"
|
||||
" TLS_AES_128_GCM_SHA256:TLS_CHACHA20_POLY1305_SHA256",
|
||||
f"SSLProtocol {tls_proto}",
|
||||
],
|
||||
)
|
||||
httpd.reload_if_config_changed()
|
||||
proto = 'http/1.1'
|
||||
proto = "http/1.1"
|
||||
curl = CurlClient(env=env)
|
||||
url = f'https://{env.authority_for(env.domain1, proto)}/curltest/sslinfo'
|
||||
url = f"https://{env.authority_for(env.domain1, proto)}/curltest/sslinfo"
|
||||
# SSL backend specifics
|
||||
if env.curl_uses_lib('gnutls'):
|
||||
pytest.skip('GnuTLS does not support setting ciphers')
|
||||
elif env.curl_uses_lib('boringssl'):
|
||||
if env.curl_uses_lib("gnutls"):
|
||||
pytest.skip("GnuTLS does not support setting ciphers")
|
||||
elif env.curl_uses_lib("boringssl"):
|
||||
if ciphers13 is not None:
|
||||
pytest.skip('BoringSSL does not support setting TLSv1.3 ciphers')
|
||||
elif env.curl_uses_lib('schannel'): # not in CI, so untested
|
||||
pytest.skip("BoringSSL does not support setting TLSv1.3 ciphers")
|
||||
elif env.curl_uses_lib("schannel"): # not in CI, so untested
|
||||
if ciphers12 is not None:
|
||||
pytest.skip('Schannel does not support setting TLSv1.2 ciphers by name')
|
||||
elif env.curl_uses_lib('bearssl'):
|
||||
if tls_proto == 'TLSv1.3':
|
||||
pytest.skip('BearSSL does not support TLSv1.3')
|
||||
tls_proto = 'TLSv1.2'
|
||||
elif env.curl_uses_lib('mbedtls') and not env.curl_lib_version_at_least('mbedtls', '3.6.0'):
|
||||
if tls_proto == 'TLSv1.3':
|
||||
pytest.skip('mbedTLS < 3.6.0 does not support TLSv1.3')
|
||||
elif env.curl_uses_lib('sectransp'): # not in CI, so untested
|
||||
if tls_proto == 'TLSv1.3':
|
||||
pytest.skip('Secure Transport does not support TLSv1.3')
|
||||
tls_proto = 'TLSv1.2'
|
||||
pytest.skip("Schannel does not support setting TLSv1.2 ciphers by name")
|
||||
elif env.curl_uses_lib("bearssl"):
|
||||
if tls_proto == "TLSv1.3":
|
||||
pytest.skip("BearSSL does not support TLSv1.3")
|
||||
tls_proto = "TLSv1.2"
|
||||
elif env.curl_uses_lib("mbedtls") and not env.curl_lib_version_at_least(
|
||||
"mbedtls", "3.6.0"
|
||||
):
|
||||
if tls_proto == "TLSv1.3":
|
||||
pytest.skip("mbedTLS < 3.6.0 does not support TLSv1.3")
|
||||
elif env.curl_uses_lib("sectransp"): # not in CI, so untested
|
||||
if tls_proto == "TLSv1.3":
|
||||
pytest.skip("Secure Transport does not support TLSv1.3")
|
||||
tls_proto = "TLSv1.2"
|
||||
# test
|
||||
extra_args = ['--tls13-ciphers', ':'.join(ciphers13)] if ciphers13 else []
|
||||
extra_args += ['--ciphers', ':'.join(ciphers12)] if ciphers12 else []
|
||||
extra_args = ["--tls13-ciphers", ":".join(ciphers13)] if ciphers13 else []
|
||||
extra_args += ["--ciphers", ":".join(ciphers12)] if ciphers12 else []
|
||||
r = curl.http_get(url=url, alpn_proto=proto, extra_args=extra_args)
|
||||
if tls_proto != 'TLSv1.2' and succeed13:
|
||||
if tls_proto != "TLSv1.2" and succeed13:
|
||||
assert r.exit_code == 0, r.dump_logs()
|
||||
assert r.json['HTTPS'] == 'on', r.dump_logs()
|
||||
assert r.json['SSL_PROTOCOL'] == 'TLSv1.3', r.dump_logs()
|
||||
assert ciphers13 is None or r.json['SSL_CIPHER'] in ciphers13, r.dump_logs()
|
||||
elif tls_proto == 'TLSv1.2' and succeed12:
|
||||
assert r.json["HTTPS"] == "on", r.dump_logs()
|
||||
assert r.json["SSL_PROTOCOL"] == "TLSv1.3", r.dump_logs()
|
||||
assert ciphers13 is None or r.json["SSL_CIPHER"] in ciphers13, r.dump_logs()
|
||||
elif tls_proto == "TLSv1.2" and succeed12:
|
||||
assert r.exit_code == 0, r.dump_logs()
|
||||
assert r.json['HTTPS'] == 'on', r.dump_logs()
|
||||
assert r.json['SSL_PROTOCOL'] == 'TLSv1.2', r.dump_logs()
|
||||
assert ciphers12 is None or r.json['SSL_CIPHER'] in ciphers12, r.dump_logs()
|
||||
assert r.json["HTTPS"] == "on", r.dump_logs()
|
||||
assert r.json["SSL_PROTOCOL"] == "TLSv1.2", r.dump_logs()
|
||||
assert ciphers12 is None or r.json["SSL_CIPHER"] in ciphers12, r.dump_logs()
|
||||
else:
|
||||
assert r.exit_code != 0, r.dump_logs()
|
||||
|
||||
@pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
|
||||
@pytest.mark.parametrize("proto", ["http/1.1", "h2", "h3"])
|
||||
def test_17_08_cert_status(self, env: Env, proto):
|
||||
if proto == 'h3' and not env.have_h3():
|
||||
if proto == "h3" and not env.have_h3():
|
||||
pytest.skip("h3 not supported")
|
||||
if not env.curl_uses_lib('openssl') and \
|
||||
not env.curl_uses_lib('gnutls') and \
|
||||
not env.curl_uses_lib('quictls'):
|
||||
if (
|
||||
not env.curl_uses_lib("openssl")
|
||||
and not env.curl_uses_lib("gnutls")
|
||||
and not env.curl_uses_lib("quictls")
|
||||
):
|
||||
pytest.skip("TLS library does not support --cert-status")
|
||||
curl = CurlClient(env=env)
|
||||
domain = 'localhost'
|
||||
url = f'https://{env.authority_for(domain, proto)}/'
|
||||
r = curl.http_get(url=url, alpn_proto=proto, extra_args=[
|
||||
'--cert-status'
|
||||
])
|
||||
domain = "localhost"
|
||||
url = f"https://{env.authority_for(domain, proto)}/"
|
||||
r = curl.http_get(url=url, alpn_proto=proto, extra_args=["--cert-status"])
|
||||
# CURLE_SSL_INVALIDCERTSTATUS, our certs have no OCSP info
|
||||
assert r.exit_code == 91, f'{r}'
|
||||
assert r.exit_code == 91, f"{r}"
|
||||
|
||||
@staticmethod
|
||||
def gen_test_17_09_list():
|
||||
return [[tls_proto, max_ver, min_ver]
|
||||
for tls_proto in ['TLSv1', 'TLSv1.1', 'TLSv1.2', 'TLSv1.3']
|
||||
for max_ver in range(5)
|
||||
for min_ver in range(-2, 4)]
|
||||
return [
|
||||
[tls_proto, max_ver, min_ver]
|
||||
for tls_proto in ["TLSv1", "TLSv1.1", "TLSv1.2", "TLSv1.3"]
|
||||
for max_ver in range(5)
|
||||
for min_ver in range(-2, 4)
|
||||
]
|
||||
|
||||
@pytest.mark.parametrize("tls_proto, max_ver, min_ver", gen_test_17_09_list())
|
||||
def test_17_09_ssl_min_max(self, env: Env, httpd, tls_proto, max_ver, min_ver):
|
||||
httpd.set_extra_config('base', [
|
||||
f'SSLProtocol {tls_proto}',
|
||||
'SSLCipherSuite ALL:@SECLEVEL=0',
|
||||
])
|
||||
httpd.set_extra_config(
|
||||
"base",
|
||||
[
|
||||
f"SSLProtocol {tls_proto}",
|
||||
"SSLCipherSuite ALL:@SECLEVEL=0",
|
||||
],
|
||||
)
|
||||
httpd.reload_if_config_changed()
|
||||
proto = 'http/1.1'
|
||||
proto = "http/1.1"
|
||||
run_env = os.environ.copy()
|
||||
if env.curl_uses_lib('gnutls'):
|
||||
if env.curl_uses_lib("gnutls"):
|
||||
# we need to override any default system configuration since
|
||||
# we want to test all protocol versions. Ubuntu (or the GH image)
|
||||
# disable TSL1.0 and TLS1.1 system wide. We do not want.
|
||||
our_config = os.path.join(env.gen_dir, 'gnutls_config')
|
||||
our_config = os.path.join(env.gen_dir, "gnutls_config")
|
||||
if not os.path.exists(our_config):
|
||||
with open(our_config, 'w') as fd:
|
||||
fd.write('# empty\n')
|
||||
run_env['GNUTLS_SYSTEM_PRIORITY_FILE'] = our_config
|
||||
with open(our_config, "w") as fd:
|
||||
fd.write("# empty\n")
|
||||
run_env["GNUTLS_SYSTEM_PRIORITY_FILE"] = our_config
|
||||
curl = CurlClient(env=env, run_env=run_env)
|
||||
url = f'https://{env.authority_for(env.domain1, proto)}/curltest/sslinfo'
|
||||
url = f"https://{env.authority_for(env.domain1, proto)}/curltest/sslinfo"
|
||||
# SSL backend specifics
|
||||
if env.curl_uses_lib('bearssl'):
|
||||
supported = ['TLSv1', 'TLSv1.1', 'TLSv1.2', None]
|
||||
elif env.curl_uses_lib('sectransp'): # not in CI, so untested
|
||||
supported = ['TLSv1', 'TLSv1.1', 'TLSv1.2', None]
|
||||
elif env.curl_uses_lib('gnutls'):
|
||||
supported = ['TLSv1', 'TLSv1.1', 'TLSv1.2', 'TLSv1.3']
|
||||
elif env.curl_uses_lib('quiche'):
|
||||
supported = ['TLSv1', 'TLSv1.1', 'TLSv1.2', 'TLSv1.3']
|
||||
if env.curl_uses_lib("bearssl"):
|
||||
supported = ["TLSv1", "TLSv1.1", "TLSv1.2", None]
|
||||
elif env.curl_uses_lib("sectransp"): # not in CI, so untested
|
||||
supported = ["TLSv1", "TLSv1.1", "TLSv1.2", None]
|
||||
elif env.curl_uses_lib("gnutls"):
|
||||
supported = ["TLSv1", "TLSv1.1", "TLSv1.2", "TLSv1.3"]
|
||||
elif env.curl_uses_lib("quiche"):
|
||||
supported = ["TLSv1", "TLSv1.1", "TLSv1.2", "TLSv1.3"]
|
||||
else: # most SSL backends dropped support for TLSv1.0, TLSv1.1
|
||||
supported = [None, None, 'TLSv1.2', 'TLSv1.3']
|
||||
supported = [None, None, "TLSv1.2", "TLSv1.3"]
|
||||
# test
|
||||
extra_args = [[], ['--tlsv1'], ['--tlsv1.0'], ['--tlsv1.1'], ['--tlsv1.2'], ['--tlsv1.3']][min_ver+2] + \
|
||||
[['--tls-max', '1.0'], ['--tls-max', '1.1'], ['--tls-max', '1.2'], ['--tls-max', '1.3'], []][max_ver]
|
||||
extra_args.extend(['--trace-config', 'ssl'])
|
||||
extra_args = [
|
||||
[],
|
||||
["--tlsv1"],
|
||||
["--tlsv1.0"],
|
||||
["--tlsv1.1"],
|
||||
["--tlsv1.2"],
|
||||
["--tlsv1.3"],
|
||||
][min_ver + 2] + [
|
||||
["--tls-max", "1.0"],
|
||||
["--tls-max", "1.1"],
|
||||
["--tls-max", "1.2"],
|
||||
["--tls-max", "1.3"],
|
||||
[],
|
||||
][
|
||||
max_ver
|
||||
]
|
||||
extra_args.extend(["--trace-config", "ssl"])
|
||||
r = curl.http_get(url=url, alpn_proto=proto, extra_args=extra_args)
|
||||
if max_ver >= min_ver and tls_proto in supported[max(0, min_ver):min(max_ver, 3)+1]:
|
||||
assert r.exit_code == 0, f'extra_args={extra_args}\n{r.dump_logs()}'
|
||||
assert r.json['HTTPS'] == 'on', r.dump_logs()
|
||||
assert r.json['SSL_PROTOCOL'] == tls_proto, r.dump_logs()
|
||||
if (
|
||||
max_ver >= min_ver
|
||||
and tls_proto in supported[max(0, min_ver) : min(max_ver, 3) + 1]
|
||||
):
|
||||
assert r.exit_code == 0, f"extra_args={extra_args}\n{r.dump_logs()}"
|
||||
assert r.json["HTTPS"] == "on", r.dump_logs()
|
||||
assert r.json["SSL_PROTOCOL"] == tls_proto, r.dump_logs()
|
||||
else:
|
||||
assert r.exit_code != 0, f'extra_args={extra_args}\n{r.dump_logs()}'
|
||||
assert r.exit_code != 0, f"extra_args={extra_args}\n{r.dump_logs()}"
|
||||
|
||||
def test_17_10_h3_session_reuse(self, env: Env, httpd, nghttpx):
|
||||
if not env.have_h3():
|
||||
pytest.skip("h3 not supported")
|
||||
if not env.curl_uses_lib('quictls') and \
|
||||
not env.curl_uses_lib('gnutls') and \
|
||||
not env.curl_uses_lib('wolfssl'):
|
||||
if (
|
||||
not env.curl_uses_lib("quictls")
|
||||
and not env.curl_uses_lib("gnutls")
|
||||
and not env.curl_uses_lib("wolfssl")
|
||||
):
|
||||
pytest.skip("QUIC session reuse not implemented")
|
||||
count = 2
|
||||
docname = 'data-10k'
|
||||
url = f'https://localhost:{env.https_port}/{docname}'
|
||||
client = LocalClient(name='hx-download', env=env)
|
||||
docname = "data-10k"
|
||||
url = f"https://localhost:{env.https_port}/{docname}"
|
||||
client = LocalClient(name="hx-download", env=env)
|
||||
if not client.exists():
|
||||
pytest.skip(f'example client not built: {client.name}')
|
||||
r = client.run(args=[
|
||||
'-n', f'{count}',
|
||||
'-f', # forbid reuse of connections
|
||||
'-r', f'{env.domain1}:{env.port_for("h3")}:127.0.0.1',
|
||||
'-V', 'h3', url
|
||||
])
|
||||
pytest.skip(f"example client not built: {client.name}")
|
||||
r = client.run(
|
||||
args=[
|
||||
"-n",
|
||||
f"{count}",
|
||||
"-f", # forbid reuse of connections
|
||||
"-r",
|
||||
f'{env.domain1}:{env.port_for("h3")}:127.0.0.1',
|
||||
"-V",
|
||||
"h3",
|
||||
url,
|
||||
]
|
||||
)
|
||||
r.check_exit_code(0)
|
||||
# check that TLS session was reused as expected
|
||||
reused_session = False
|
||||
for line in r.trace_lines:
|
||||
m = re.match(r'\[1-1] \* SSL reusing session.*', line)
|
||||
m = re.match(r"\[1-1] \* SSL reusing session.*", line)
|
||||
if m:
|
||||
reused_session = True
|
||||
assert reused_session, f'{r}\n{r.dump_logs()}'
|
||||
assert reused_session, f"{r}\n{r.dump_logs()}"
|
||||
|
||||
# use host name server has no certificate for
|
||||
@pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
|
||||
@pytest.mark.parametrize("proto", ["http/1.1", "h2", "h3"])
|
||||
def test_17_11_wrong_host(self, env: Env, proto):
|
||||
if proto == 'h3' and not env.have_h3():
|
||||
if proto == "h3" and not env.have_h3():
|
||||
pytest.skip("h3 not supported")
|
||||
curl = CurlClient(env=env)
|
||||
domain = f'insecure.{env.tld}'
|
||||
url = f'https://{domain}:{env.port_for(proto)}/curltest/sslinfo'
|
||||
domain = f"insecure.{env.tld}"
|
||||
url = f"https://{domain}:{env.port_for(proto)}/curltest/sslinfo"
|
||||
r = curl.http_get(url=url, alpn_proto=proto)
|
||||
assert r.exit_code == 60, f'{r}'
|
||||
assert r.exit_code == 60, f"{r}"
|
||||
|
||||
# use host name server has no cert for with --insecure
|
||||
@pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
|
||||
@pytest.mark.parametrize("proto", ["http/1.1", "h2", "h3"])
|
||||
def test_17_12_insecure(self, env: Env, proto):
|
||||
if proto == 'h3' and not env.have_h3():
|
||||
if proto == "h3" and not env.have_h3():
|
||||
pytest.skip("h3 not supported")
|
||||
curl = CurlClient(env=env)
|
||||
domain = f'insecure.{env.tld}'
|
||||
url = f'https://{domain}:{env.port_for(proto)}/curltest/sslinfo'
|
||||
r = curl.http_get(url=url, alpn_proto=proto, extra_args=[
|
||||
'--insecure'
|
||||
])
|
||||
assert r.exit_code == 0, f'{r}'
|
||||
assert r.json, f'{r}'
|
||||
domain = f"insecure.{env.tld}"
|
||||
url = f"https://{domain}:{env.port_for(proto)}/curltest/sslinfo"
|
||||
r = curl.http_get(url=url, alpn_proto=proto, extra_args=["--insecure"])
|
||||
assert r.exit_code == 0, f"{r}"
|
||||
assert r.json, f"{r}"
|
||||
|
||||
# connect to an expired certificate
|
||||
@pytest.mark.parametrize("proto", ['http/1.1', 'h2'])
|
||||
@pytest.mark.parametrize("proto", ["http/1.1", "h2"])
|
||||
def test_17_14_expired_cert(self, env: Env, proto):
|
||||
if proto == 'h3' and not env.have_h3():
|
||||
if proto == "h3" and not env.have_h3():
|
||||
pytest.skip("h3 not supported")
|
||||
curl = CurlClient(env=env)
|
||||
url = f'https://{env.expired_domain}:{env.port_for(proto)}/'
|
||||
url = f"https://{env.expired_domain}:{env.port_for(proto)}/"
|
||||
r = curl.http_get(url=url, alpn_proto=proto)
|
||||
assert r.exit_code == 60, f'{r}' # peer failed verification
|
||||
assert r.exit_code == 60, f"{r}" # peer failed verification
|
||||
exp_trace = None
|
||||
match_trace = None
|
||||
if env.curl_uses_lib('openssl') or env.curl_uses_lib('quictls'):
|
||||
exp_trace = r'.*SSL certificate problem: certificate has expired$'
|
||||
elif env.curl_uses_lib('gnutls'):
|
||||
exp_trace = r'.*server verification failed: certificate has expired\..*'
|
||||
elif env.curl_uses_lib('wolfssl'):
|
||||
exp_trace = r'.*server verification failed: certificate has expired\.$'
|
||||
if env.curl_uses_lib("openssl") or env.curl_uses_lib("quictls"):
|
||||
exp_trace = r".*SSL certificate problem: certificate has expired$"
|
||||
elif env.curl_uses_lib("gnutls"):
|
||||
exp_trace = r".*server verification failed: certificate has expired\..*"
|
||||
elif env.curl_uses_lib("wolfssl"):
|
||||
exp_trace = r".*server verification failed: certificate has expired\.$"
|
||||
if exp_trace is not None:
|
||||
for line in r.trace_lines:
|
||||
if re.match(exp_trace, line):
|
||||
@ -391,58 +448,63 @@ class TestSSLUse:
|
||||
break
|
||||
assert match_trace, f'Did not find "{exp_trace}" in trace\n{r.dump_logs()}'
|
||||
|
||||
@pytest.mark.skipif(condition=not Env.curl_has_feature('SSLS-EXPORT'),
|
||||
reason='curl lacks SSL session export support')
|
||||
@pytest.mark.skipif(
|
||||
condition=not Env.curl_has_feature("SSLS-EXPORT"),
|
||||
reason="curl lacks SSL session export support",
|
||||
)
|
||||
def test_17_15_session_export(self, env: Env, httpd):
|
||||
proto = 'http/1.1'
|
||||
if env.curl_uses_lib('libressl'):
|
||||
pytest.skip('Libressl resumption does not work inTLSv1.3')
|
||||
if env.curl_uses_lib('rustls-ffi'):
|
||||
pytest.skip('rustsls does not expose sessions')
|
||||
if env.curl_uses_lib('bearssl'):
|
||||
pytest.skip('BearSSL does not support TLSv1.3')
|
||||
if env.curl_uses_lib('mbedtls') and \
|
||||
not env.curl_lib_version_at_least('mbedtls', '3.6.0'):
|
||||
pytest.skip('mbedtls TLSv1.3 session resume not working before 3.6.0')
|
||||
proto = "http/1.1"
|
||||
if env.curl_uses_lib("libressl"):
|
||||
pytest.skip("Libressl resumption does not work inTLSv1.3")
|
||||
if env.curl_uses_lib("rustls-ffi"):
|
||||
pytest.skip("rustsls does not expose sessions")
|
||||
if env.curl_uses_lib("bearssl"):
|
||||
pytest.skip("BearSSL does not support TLSv1.3")
|
||||
if env.curl_uses_lib("mbedtls") and not env.curl_lib_version_at_least(
|
||||
"mbedtls", "3.6.0"
|
||||
):
|
||||
pytest.skip("mbedtls TLSv1.3 session resume not working before 3.6.0")
|
||||
run_env = os.environ.copy()
|
||||
run_env['CURL_DEBUG'] = 'ssl,scache'
|
||||
run_env["CURL_DEBUG"] = "ssl,scache"
|
||||
# clean session file first, then reuse
|
||||
session_file = os.path.join(env.gen_dir, 'test_17_15.sessions')
|
||||
session_file = os.path.join(env.gen_dir, "test_17_15.sessions")
|
||||
if os.path.exists(session_file):
|
||||
return os.remove(session_file)
|
||||
xargs = ['--tls-max', '1.3', '--tlsv1.3', '--ssl-sessions', session_file]
|
||||
xargs = ["--tls-max", "1.3", "--tlsv1.3", "--ssl-sessions", session_file]
|
||||
curl = CurlClient(env=env, run_env=run_env)
|
||||
# tell the server to close the connection after each request
|
||||
url = f'https://{env.authority_for(env.domain1, proto)}/curltest/sslinfo'
|
||||
url = f"https://{env.authority_for(env.domain1, proto)}/curltest/sslinfo"
|
||||
r = curl.http_get(url=url, alpn_proto=proto, extra_args=xargs)
|
||||
assert r.exit_code == 0, f'{r}'
|
||||
assert r.json['HTTPS'] == 'on', f'{r.json}'
|
||||
assert r.json['SSL_SESSION_RESUMED'] == 'Initial', f'{r.json}\n{r.dump_logs()}'
|
||||
assert r.exit_code == 0, f"{r}"
|
||||
assert r.json["HTTPS"] == "on", f"{r.json}"
|
||||
assert r.json["SSL_SESSION_RESUMED"] == "Initial", f"{r.json}\n{r.dump_logs()}"
|
||||
# ok, run again, sessions should be imported
|
||||
run_dir2 = os.path.join(env.gen_dir, 'curl2')
|
||||
run_dir2 = os.path.join(env.gen_dir, "curl2")
|
||||
curl = CurlClient(env=env, run_env=run_env, run_dir=run_dir2)
|
||||
r = curl.http_get(url=url, alpn_proto=proto, extra_args=xargs)
|
||||
assert r.exit_code == 0, f'{r}'
|
||||
assert r.json['SSL_SESSION_RESUMED'] == 'Resumed', f'{r.json}\n{r.dump_logs()}'
|
||||
assert r.exit_code == 0, f"{r}"
|
||||
assert r.json["SSL_SESSION_RESUMED"] == "Resumed", f"{r.json}\n{r.dump_logs()}"
|
||||
|
||||
# verify the ciphers are ignored when talking TLSv1.3 only
|
||||
# see issue #16232
|
||||
def test_17_16_h3_ignore_ciphers12(self, env: Env):
|
||||
proto = 'h3'
|
||||
if proto == 'h3' and not env.have_h3():
|
||||
proto = "h3"
|
||||
if proto == "h3" and not env.have_h3():
|
||||
pytest.skip("h3 not supported")
|
||||
curl = CurlClient(env=env)
|
||||
url = f'https://{env.authority_for(env.domain1, proto)}/curltest/sslinfo'
|
||||
r = curl.http_get(url=url, alpn_proto=proto, extra_args=[
|
||||
'--ciphers', 'NONSENSE'
|
||||
])
|
||||
assert r.exit_code == 0, f'{r}'
|
||||
url = f"https://{env.authority_for(env.domain1, proto)}/curltest/sslinfo"
|
||||
r = curl.http_get(
|
||||
url=url, alpn_proto=proto, extra_args=["--ciphers", "NONSENSE"]
|
||||
)
|
||||
assert r.exit_code == 0, f"{r}"
|
||||
|
||||
def test_17_17_h1_ignore_ciphers13(self, env: Env):
|
||||
proto = 'http/1.1'
|
||||
proto = "http/1.1"
|
||||
curl = CurlClient(env=env)
|
||||
url = f'https://{env.authority_for(env.domain1, proto)}/curltest/sslinfo'
|
||||
r = curl.http_get(url=url, alpn_proto=proto, extra_args=[
|
||||
'--tls13-ciphers', 'NONSENSE', '--tls-max', '1.2'
|
||||
])
|
||||
assert r.exit_code == 0, f'{r}'
|
||||
url = f"https://{env.authority_for(env.domain1, proto)}/curltest/sslinfo"
|
||||
r = curl.http_get(
|
||||
url=url,
|
||||
alpn_proto=proto,
|
||||
extra_args=["--tls13-ciphers", "NONSENSE", "--tls-max", "1.2"],
|
||||
)
|
||||
assert r.exit_code == 0, f"{r}"
|
||||
|
||||
@ -1,6 +1,6 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
#***************************************************************************
|
||||
# ***************************************************************************
|
||||
# _ _ ____ _
|
||||
# Project ___| | | | _ \| |
|
||||
# / __| | | | |_) | |
|
||||
@ -34,26 +34,25 @@ log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class TestMethods:
|
||||
|
||||
@pytest.fixture(autouse=True, scope='class')
|
||||
@pytest.fixture(autouse=True, scope="class")
|
||||
def _class_scope(self, env, httpd, nghttpx):
|
||||
if env.have_h3():
|
||||
nghttpx.start_if_needed()
|
||||
httpd.clear_extra_configs()
|
||||
httpd.reload_if_config_changed()
|
||||
indir = httpd.docs_dir
|
||||
env.make_data_file(indir=indir, fname="data-10k", fsize=10*1024)
|
||||
env.make_data_file(indir=indir, fname="data-100k", fsize=100*1024)
|
||||
env.make_data_file(indir=indir, fname="data-1m", fsize=1024*1024)
|
||||
env.make_data_file(indir=indir, fname="data-10k", fsize=10 * 1024)
|
||||
env.make_data_file(indir=indir, fname="data-100k", fsize=100 * 1024)
|
||||
env.make_data_file(indir=indir, fname="data-1m", fsize=1024 * 1024)
|
||||
|
||||
# download 1 file
|
||||
@pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
|
||||
@pytest.mark.parametrize("proto", ["http/1.1", "h2", "h3"])
|
||||
def test_18_01_delete(self, env: Env, httpd, nghttpx, proto):
|
||||
if proto == 'h3' and not env.have_h3():
|
||||
if proto == "h3" and not env.have_h3():
|
||||
pytest.skip("h3 not supported")
|
||||
count = 1
|
||||
curl = CurlClient(env=env)
|
||||
url = f'https://{env.authority_for(env.domain1, proto)}/curltest/tweak?id=[0-{count-1}]'
|
||||
url = f"https://{env.authority_for(env.domain1, proto)}/curltest/tweak?id=[0-{count-1}]"
|
||||
r = curl.http_delete(urls=[url], alpn_proto=proto)
|
||||
r.check_stats(count=count, http_status=204, exitcode=0)
|
||||
|
||||
@ -62,10 +61,12 @@ class TestMethods:
|
||||
# - 10ms later DATA frame length=0 and eos=1
|
||||
# should be accepted
|
||||
def test_18_02_delete_h2_special(self, env: Env, httpd, nghttpx):
|
||||
proto = 'h2'
|
||||
proto = "h2"
|
||||
count = 1
|
||||
curl = CurlClient(env=env)
|
||||
url = f'https://{env.authority_for(env.domain1, proto)}/curltest/tweak?id=[0-{count-1}]'\
|
||||
'&chunks=1&chunk_size=0&chunk_delay=10ms'
|
||||
url = (
|
||||
f"https://{env.authority_for(env.domain1, proto)}/curltest/tweak?id=[0-{count-1}]"
|
||||
"&chunks=1&chunk_size=0&chunk_delay=10ms"
|
||||
)
|
||||
r = curl.http_delete(urls=[url], alpn_proto=proto)
|
||||
r.check_stats(count=count, http_status=204, exitcode=0)
|
||||
|
||||
@ -1,6 +1,6 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
#***************************************************************************
|
||||
# ***************************************************************************
|
||||
# _ _ ____ _
|
||||
# Project ___| | | | _ \| |
|
||||
# / __| | | | |_) | |
|
||||
@ -35,141 +35,158 @@ log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class TestShutdown:
|
||||
|
||||
@pytest.fixture(autouse=True, scope='class')
|
||||
@pytest.fixture(autouse=True, scope="class")
|
||||
def _class_scope(self, env, httpd, nghttpx):
|
||||
if env.have_h3():
|
||||
nghttpx.start_if_needed()
|
||||
httpd.clear_extra_configs()
|
||||
httpd.reload()
|
||||
|
||||
@pytest.fixture(autouse=True, scope='class')
|
||||
@pytest.fixture(autouse=True, scope="class")
|
||||
def _class_scope(self, env, httpd):
|
||||
indir = httpd.docs_dir
|
||||
env.make_data_file(indir=indir, fname="data-10k", fsize=10*1024)
|
||||
env.make_data_file(indir=indir, fname="data-100k", fsize=100*1024)
|
||||
env.make_data_file(indir=indir, fname="data-1m", fsize=1024*1024)
|
||||
env.make_data_file(indir=indir, fname="data-10k", fsize=10 * 1024)
|
||||
env.make_data_file(indir=indir, fname="data-100k", fsize=100 * 1024)
|
||||
env.make_data_file(indir=indir, fname="data-1m", fsize=1024 * 1024)
|
||||
|
||||
# check with `tcpdump` that we see curl TCP RST packets
|
||||
@pytest.mark.skipif(condition=not Env.tcpdump(), reason="tcpdump not available")
|
||||
@pytest.mark.parametrize("proto", ['http/1.1'])
|
||||
@pytest.mark.parametrize("proto", ["http/1.1"])
|
||||
def test_19_01_check_tcp_rst(self, env: Env, httpd, proto):
|
||||
if env.ci_run:
|
||||
pytest.skip("seems not to work in CI")
|
||||
curl = CurlClient(env=env)
|
||||
url = f'https://{env.authority_for(env.domain1, proto)}/data.json?[0-1]'
|
||||
r = curl.http_download(urls=[url], alpn_proto=proto, with_tcpdump=True, extra_args=[
|
||||
'--parallel'
|
||||
])
|
||||
url = f"https://{env.authority_for(env.domain1, proto)}/data.json?[0-1]"
|
||||
r = curl.http_download(
|
||||
urls=[url], alpn_proto=proto, with_tcpdump=True, extra_args=["--parallel"]
|
||||
)
|
||||
r.check_response(http_status=200, count=2)
|
||||
assert r.tcpdump
|
||||
assert len(r.tcpdump.stats) != 0, f'Expected TCP RSTs packets: {r.tcpdump.stderr}'
|
||||
assert (
|
||||
len(r.tcpdump.stats) != 0
|
||||
), f"Expected TCP RSTs packets: {r.tcpdump.stderr}"
|
||||
|
||||
# check with `tcpdump` that we do NOT see TCP RST when CURL_GRACEFUL_SHUTDOWN set
|
||||
@pytest.mark.skipif(condition=not Env.tcpdump(), reason="tcpdump not available")
|
||||
@pytest.mark.parametrize("proto", ['http/1.1', 'h2'])
|
||||
@pytest.mark.parametrize("proto", ["http/1.1", "h2"])
|
||||
def test_19_02_check_shutdown(self, env: Env, httpd, proto):
|
||||
if not env.curl_is_debug():
|
||||
pytest.skip('only works for curl debug builds')
|
||||
curl = CurlClient(env=env, run_env={
|
||||
'CURL_GRACEFUL_SHUTDOWN': '2000',
|
||||
'CURL_DEBUG': 'ssl,tcp'
|
||||
})
|
||||
url = f'https://{env.authority_for(env.domain1, proto)}/data.json?[0-1]'
|
||||
r = curl.http_download(urls=[url], alpn_proto=proto, with_tcpdump=True, extra_args=[
|
||||
'--parallel'
|
||||
])
|
||||
pytest.skip("only works for curl debug builds")
|
||||
curl = CurlClient(
|
||||
env=env, run_env={"CURL_GRACEFUL_SHUTDOWN": "2000", "CURL_DEBUG": "ssl,tcp"}
|
||||
)
|
||||
url = f"https://{env.authority_for(env.domain1, proto)}/data.json?[0-1]"
|
||||
r = curl.http_download(
|
||||
urls=[url], alpn_proto=proto, with_tcpdump=True, extra_args=["--parallel"]
|
||||
)
|
||||
r.check_response(http_status=200, count=2)
|
||||
assert r.tcpdump
|
||||
assert len(r.tcpdump.stats) == 0, 'Unexpected TCP RSTs packets'
|
||||
assert len(r.tcpdump.stats) == 0, "Unexpected TCP RSTs packets"
|
||||
|
||||
# run downloads where the server closes the connection after each request
|
||||
@pytest.mark.parametrize("proto", ['http/1.1'])
|
||||
@pytest.mark.parametrize("proto", ["http/1.1"])
|
||||
def test_19_03_shutdown_by_server(self, env: Env, httpd, proto):
|
||||
if not env.curl_is_debug():
|
||||
pytest.skip('only works for curl debug builds')
|
||||
pytest.skip("only works for curl debug builds")
|
||||
count = 10
|
||||
curl = CurlClient(env=env, run_env={
|
||||
'CURL_GRACEFUL_SHUTDOWN': '2000',
|
||||
'CURL_DEBUG': 'ssl'
|
||||
})
|
||||
url = f'https://{env.authority_for(env.domain1, proto)}/curltest/tweak/?'\
|
||||
f'id=[0-{count-1}]&with_cl&close'
|
||||
curl = CurlClient(
|
||||
env=env, run_env={"CURL_GRACEFUL_SHUTDOWN": "2000", "CURL_DEBUG": "ssl"}
|
||||
)
|
||||
url = (
|
||||
f"https://{env.authority_for(env.domain1, proto)}/curltest/tweak/?"
|
||||
f"id=[0-{count-1}]&with_cl&close"
|
||||
)
|
||||
r = curl.http_download(urls=[url], alpn_proto=proto)
|
||||
r.check_response(http_status=200, count=count)
|
||||
shutdowns = [line for line in r.trace_lines
|
||||
if re.match(r'.*CCACHE\] shutdown #\d+, done=1', line)]
|
||||
assert len(shutdowns) == count, f'{shutdowns}'
|
||||
shutdowns = [
|
||||
line
|
||||
for line in r.trace_lines
|
||||
if re.match(r".*CCACHE\] shutdown #\d+, done=1", line)
|
||||
]
|
||||
assert len(shutdowns) == count, f"{shutdowns}"
|
||||
|
||||
# run downloads with CURLOPT_FORBID_REUSE set, meaning *we* close
|
||||
# the connection after each request
|
||||
@pytest.mark.parametrize("proto", ['http/1.1'])
|
||||
@pytest.mark.parametrize("proto", ["http/1.1"])
|
||||
def test_19_04_shutdown_by_curl(self, env: Env, httpd, proto):
|
||||
if not env.curl_is_debug():
|
||||
pytest.skip('only works for curl debug builds')
|
||||
pytest.skip("only works for curl debug builds")
|
||||
count = 10
|
||||
docname = 'data.json'
|
||||
url = f'https://localhost:{env.https_port}/{docname}'
|
||||
client = LocalClient(name='hx-download', env=env, run_env={
|
||||
'CURL_GRACEFUL_SHUTDOWN': '2000',
|
||||
'CURL_DEBUG': 'ssl'
|
||||
})
|
||||
docname = "data.json"
|
||||
url = f"https://localhost:{env.https_port}/{docname}"
|
||||
client = LocalClient(
|
||||
name="hx-download",
|
||||
env=env,
|
||||
run_env={"CURL_GRACEFUL_SHUTDOWN": "2000", "CURL_DEBUG": "ssl"},
|
||||
)
|
||||
if not client.exists():
|
||||
pytest.skip(f'example client not built: {client.name}')
|
||||
r = client.run(args=[
|
||||
'-n', f'{count}', '-f', '-V', proto, url
|
||||
])
|
||||
pytest.skip(f"example client not built: {client.name}")
|
||||
r = client.run(args=["-n", f"{count}", "-f", "-V", proto, url])
|
||||
r.check_exit_code(0)
|
||||
shutdowns = [line for line in r.trace_lines
|
||||
if re.match(r'.*CCACHE\] shutdown #\d+, done=1', line)]
|
||||
assert len(shutdowns) == count, f'{shutdowns}'
|
||||
shutdowns = [
|
||||
line
|
||||
for line in r.trace_lines
|
||||
if re.match(r".*CCACHE\] shutdown #\d+, done=1", line)
|
||||
]
|
||||
assert len(shutdowns) == count, f"{shutdowns}"
|
||||
|
||||
# run event-based downloads with CURLOPT_FORBID_REUSE set, meaning *we* close
|
||||
# the connection after each request
|
||||
@pytest.mark.parametrize("proto", ['http/1.1'])
|
||||
@pytest.mark.parametrize("proto", ["http/1.1"])
|
||||
def test_19_05_event_shutdown_by_server(self, env: Env, httpd, proto):
|
||||
if not env.curl_is_debug():
|
||||
pytest.skip('only works for curl debug builds')
|
||||
pytest.skip("only works for curl debug builds")
|
||||
count = 10
|
||||
curl = CurlClient(env=env, run_env={
|
||||
# forbid connection reuse to trigger shutdowns after transfer
|
||||
'CURL_FORBID_REUSE': '1',
|
||||
# make socket receives block 50% of the time to delay shutdown
|
||||
'CURL_DBG_SOCK_RBLOCK': '50',
|
||||
'CURL_DEBUG': 'ssl'
|
||||
})
|
||||
url = f'https://{env.authority_for(env.domain1, proto)}/curltest/tweak/?'\
|
||||
f'id=[0-{count-1}]&with_cl&'
|
||||
r = curl.http_download(urls=[url], alpn_proto=proto, extra_args=[
|
||||
'--test-event'
|
||||
])
|
||||
curl = CurlClient(
|
||||
env=env,
|
||||
run_env={
|
||||
# forbid connection reuse to trigger shutdowns after transfer
|
||||
"CURL_FORBID_REUSE": "1",
|
||||
# make socket receives block 50% of the time to delay shutdown
|
||||
"CURL_DBG_SOCK_RBLOCK": "50",
|
||||
"CURL_DEBUG": "ssl",
|
||||
},
|
||||
)
|
||||
url = (
|
||||
f"https://{env.authority_for(env.domain1, proto)}/curltest/tweak/?"
|
||||
f"id=[0-{count-1}]&with_cl&"
|
||||
)
|
||||
r = curl.http_download(
|
||||
urls=[url], alpn_proto=proto, extra_args=["--test-event"]
|
||||
)
|
||||
r.check_response(http_status=200, count=count)
|
||||
# check that we closed all connections
|
||||
closings = [line for line in r.trace_lines
|
||||
if re.match(r'.*CCACHE\] closing #\d+', line)]
|
||||
assert len(closings) == count, f'{closings}'
|
||||
closings = [
|
||||
line for line in r.trace_lines if re.match(r".*CCACHE\] closing #\d+", line)
|
||||
]
|
||||
assert len(closings) == count, f"{closings}"
|
||||
# check that all connection sockets were removed from event
|
||||
removes = [line for line in r.trace_lines
|
||||
if re.match(r'.*socket cb: socket \d+ REMOVED', line)]
|
||||
assert len(removes) == count, f'{removes}'
|
||||
removes = [
|
||||
line
|
||||
for line in r.trace_lines
|
||||
if re.match(r".*socket cb: socket \d+ REMOVED", line)
|
||||
]
|
||||
assert len(removes) == count, f"{removes}"
|
||||
|
||||
# check graceful shutdown on multiplexed http
|
||||
@pytest.mark.parametrize("proto", ['h2', 'h3'])
|
||||
@pytest.mark.parametrize("proto", ["h2", "h3"])
|
||||
def test_19_06_check_shutdown(self, env: Env, httpd, nghttpx, proto):
|
||||
if proto == 'h3' and not env.have_h3():
|
||||
if proto == "h3" and not env.have_h3():
|
||||
pytest.skip("h3 not supported")
|
||||
if not env.curl_is_debug():
|
||||
pytest.skip('only works for curl debug builds')
|
||||
curl = CurlClient(env=env, run_env={
|
||||
'CURL_GRACEFUL_SHUTDOWN': '2000',
|
||||
'CURL_DEBUG': 'all'
|
||||
})
|
||||
url = f'https://{env.authority_for(env.domain1, proto)}/data.json?[0-1]'
|
||||
r = curl.http_download(urls=[url], alpn_proto=proto, with_tcpdump=True, extra_args=[
|
||||
'--parallel'
|
||||
])
|
||||
pytest.skip("only works for curl debug builds")
|
||||
curl = CurlClient(
|
||||
env=env, run_env={"CURL_GRACEFUL_SHUTDOWN": "2000", "CURL_DEBUG": "all"}
|
||||
)
|
||||
url = f"https://{env.authority_for(env.domain1, proto)}/data.json?[0-1]"
|
||||
r = curl.http_download(
|
||||
urls=[url], alpn_proto=proto, with_tcpdump=True, extra_args=["--parallel"]
|
||||
)
|
||||
r.check_response(http_status=200, count=2)
|
||||
# check connection cache closings
|
||||
shutdowns = [line for line in r.trace_lines
|
||||
if re.match(r'.*CCACHE\] shutdown #\d+, done=1', line)]
|
||||
assert len(shutdowns) == 1, f'{shutdowns}'
|
||||
shutdowns = [
|
||||
line
|
||||
for line in r.trace_lines
|
||||
if re.match(r".*CCACHE\] shutdown #\d+, done=1", line)
|
||||
]
|
||||
assert len(shutdowns) == 1, f"{shutdowns}"
|
||||
|
||||
@ -1,6 +1,6 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
#***************************************************************************
|
||||
# ***************************************************************************
|
||||
# _ _ ____ _
|
||||
# Project ___| | | | _ \| |
|
||||
# / __| | | | |_) | |
|
||||
@ -38,19 +38,19 @@ from testenv import Env, CurlClient, LocalClient
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@pytest.mark.skipif(condition=not Env.curl_has_protocol('ws'),
|
||||
reason='curl lacks ws protocol support')
|
||||
@pytest.mark.skipif(
|
||||
condition=not Env.curl_has_protocol("ws"), reason="curl lacks ws protocol support"
|
||||
)
|
||||
class TestWebsockets:
|
||||
|
||||
def check_alive(self, env, timeout=5):
|
||||
curl = CurlClient(env=env)
|
||||
url = f'http://localhost:{env.ws_port}/'
|
||||
url = f"http://localhost:{env.ws_port}/"
|
||||
end = datetime.now() + timedelta(seconds=timeout)
|
||||
while datetime.now() < end:
|
||||
r = curl.http_download(urls=[url])
|
||||
if r.exit_code == 0:
|
||||
return True
|
||||
time.sleep(.1)
|
||||
time.sleep(0.1)
|
||||
return False
|
||||
|
||||
def _mkpath(self, path):
|
||||
@ -61,93 +61,91 @@ class TestWebsockets:
|
||||
if os.path.exists(path):
|
||||
return shutil.rmtree(path)
|
||||
|
||||
@pytest.fixture(autouse=True, scope='class')
|
||||
@pytest.fixture(autouse=True, scope="class")
|
||||
def ws_echo(self, env):
|
||||
run_dir = os.path.join(env.gen_dir, 'ws-echo-server')
|
||||
err_file = os.path.join(run_dir, 'stderr')
|
||||
run_dir = os.path.join(env.gen_dir, "ws-echo-server")
|
||||
err_file = os.path.join(run_dir, "stderr")
|
||||
self._rmrf(run_dir)
|
||||
self._mkpath(run_dir)
|
||||
|
||||
with open(err_file, 'w') as cerr:
|
||||
cmd = os.path.join(env.project_dir,
|
||||
'tests/http/testenv/ws_echo_server.py')
|
||||
args = [cmd, '--port', str(env.ws_port)]
|
||||
p = subprocess.Popen(args=args, cwd=run_dir, stderr=cerr,
|
||||
stdout=cerr)
|
||||
with open(err_file, "w") as cerr:
|
||||
cmd = os.path.join(env.project_dir, "tests/http/testenv/ws_echo_server.py")
|
||||
args = [cmd, "--port", str(env.ws_port)]
|
||||
p = subprocess.Popen(args=args, cwd=run_dir, stderr=cerr, stdout=cerr)
|
||||
assert self.check_alive(env)
|
||||
yield
|
||||
p.terminate()
|
||||
|
||||
def test_20_01_basic(self, env: Env, ws_echo):
|
||||
curl = CurlClient(env=env)
|
||||
url = f'http://localhost:{env.ws_port}/'
|
||||
url = f"http://localhost:{env.ws_port}/"
|
||||
r = curl.http_download(urls=[url])
|
||||
r.check_response(http_status=426)
|
||||
|
||||
def test_20_02_pingpong_small(self, env: Env, ws_echo):
|
||||
payload = 125 * "x"
|
||||
client = LocalClient(env=env, name='ws-pingpong')
|
||||
client = LocalClient(env=env, name="ws-pingpong")
|
||||
if not client.exists():
|
||||
pytest.skip(f'example client not built: {client.name}')
|
||||
url = f'ws://localhost:{env.ws_port}/'
|
||||
pytest.skip(f"example client not built: {client.name}")
|
||||
url = f"ws://localhost:{env.ws_port}/"
|
||||
r = client.run(args=[url, payload])
|
||||
r.check_exit_code(0)
|
||||
|
||||
# the python websocket server does not like 'large' control frames
|
||||
def test_20_03_pingpong_too_large(self, env: Env, ws_echo):
|
||||
payload = 127 * "x"
|
||||
client = LocalClient(env=env, name='ws-pingpong')
|
||||
client = LocalClient(env=env, name="ws-pingpong")
|
||||
if not client.exists():
|
||||
pytest.skip(f'example client not built: {client.name}')
|
||||
url = f'ws://localhost:{env.ws_port}/'
|
||||
pytest.skip(f"example client not built: {client.name}")
|
||||
url = f"ws://localhost:{env.ws_port}/"
|
||||
r = client.run(args=[url, payload])
|
||||
r.check_exit_code(56)
|
||||
|
||||
def test_20_04_data_small(self, env: Env, ws_echo):
|
||||
client = LocalClient(env=env, name='ws-data')
|
||||
client = LocalClient(env=env, name="ws-data")
|
||||
if not client.exists():
|
||||
pytest.skip(f'example client not built: {client.name}')
|
||||
url = f'ws://localhost:{env.ws_port}/'
|
||||
r = client.run(args=['-m', str(0), '-M', str(10), url])
|
||||
pytest.skip(f"example client not built: {client.name}")
|
||||
url = f"ws://localhost:{env.ws_port}/"
|
||||
r = client.run(args=["-m", str(0), "-M", str(10), url])
|
||||
r.check_exit_code(0)
|
||||
|
||||
def test_20_05_data_med(self, env: Env, ws_echo):
|
||||
client = LocalClient(env=env, name='ws-data')
|
||||
client = LocalClient(env=env, name="ws-data")
|
||||
if not client.exists():
|
||||
pytest.skip(f'example client not built: {client.name}')
|
||||
url = f'ws://localhost:{env.ws_port}/'
|
||||
r = client.run(args=['-m', str(120), '-M', str(130), url])
|
||||
pytest.skip(f"example client not built: {client.name}")
|
||||
url = f"ws://localhost:{env.ws_port}/"
|
||||
r = client.run(args=["-m", str(120), "-M", str(130), url])
|
||||
r.check_exit_code(0)
|
||||
|
||||
def test_20_06_data_large(self, env: Env, ws_echo):
|
||||
client = LocalClient(env=env, name='ws-data')
|
||||
client = LocalClient(env=env, name="ws-data")
|
||||
if not client.exists():
|
||||
pytest.skip(f'example client not built: {client.name}')
|
||||
url = f'ws://localhost:{env.ws_port}/'
|
||||
r = client.run(args=['-m', str(65535 - 5), '-M', str(65535 + 5), url])
|
||||
pytest.skip(f"example client not built: {client.name}")
|
||||
url = f"ws://localhost:{env.ws_port}/"
|
||||
r = client.run(args=["-m", str(65535 - 5), "-M", str(65535 + 5), url])
|
||||
r.check_exit_code(0)
|
||||
|
||||
def test_20_07_data_large_small_recv(self, env: Env, ws_echo):
|
||||
run_env = os.environ.copy()
|
||||
run_env['CURL_WS_CHUNK_SIZE'] = '1024'
|
||||
client = LocalClient(env=env, name='ws-data', run_env=run_env)
|
||||
run_env["CURL_WS_CHUNK_SIZE"] = "1024"
|
||||
client = LocalClient(env=env, name="ws-data", run_env=run_env)
|
||||
if not client.exists():
|
||||
pytest.skip(f'example client not built: {client.name}')
|
||||
url = f'ws://localhost:{env.ws_port}/'
|
||||
r = client.run(args=['-m', str(65535 - 5), '-M', str(65535 + 5), url])
|
||||
pytest.skip(f"example client not built: {client.name}")
|
||||
url = f"ws://localhost:{env.ws_port}/"
|
||||
r = client.run(args=["-m", str(65535 - 5), "-M", str(65535 + 5), url])
|
||||
r.check_exit_code(0)
|
||||
|
||||
# Send large frames and simulate send blocking on 8192 bytes chunks
|
||||
# Simlates error reported in #15865
|
||||
def test_20_08_data_very_large(self, env: Env, ws_echo):
|
||||
run_env = os.environ.copy()
|
||||
run_env['CURL_WS_CHUNK_EAGAIN'] = '8192'
|
||||
client = LocalClient(env=env, name='ws-data', run_env=run_env)
|
||||
run_env["CURL_WS_CHUNK_EAGAIN"] = "8192"
|
||||
client = LocalClient(env=env, name="ws-data", run_env=run_env)
|
||||
if not client.exists():
|
||||
pytest.skip(f'example client not built: {client.name}')
|
||||
url = f'ws://localhost:{env.ws_port}/'
|
||||
pytest.skip(f"example client not built: {client.name}")
|
||||
url = f"ws://localhost:{env.ws_port}/"
|
||||
count = 10
|
||||
large = 512 * 1024
|
||||
large = 20000
|
||||
r = client.run(args=['-c', str(count), '-m', str(large), url])
|
||||
r = client.run(args=["-c", str(count), "-m", str(large), url])
|
||||
r.check_exit_code(0)
|
||||
|
||||
@ -1,6 +1,6 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
#***************************************************************************
|
||||
# ***************************************************************************
|
||||
# _ _ ____ _
|
||||
# Project ___| | | | _ \| |
|
||||
# / __| | | | |_) | |
|
||||
@ -39,8 +39,7 @@ log = logging.getLogger(__name__)
|
||||
|
||||
@pytest.mark.skipif(condition=not Env.has_vsftpd(), reason="missing vsftpd")
|
||||
class TestVsFTPD:
|
||||
|
||||
@pytest.fixture(autouse=True, scope='class')
|
||||
@pytest.fixture(autouse=True, scope="class")
|
||||
def vsftpd(self, env):
|
||||
vsftpd = VsFTPD(env=env)
|
||||
assert vsftpd.start()
|
||||
@ -49,86 +48,82 @@ class TestVsFTPD:
|
||||
|
||||
def _make_docs_file(self, docs_dir: str, fname: str, fsize: int):
|
||||
fpath = os.path.join(docs_dir, fname)
|
||||
data1k = 1024*'x'
|
||||
data1k = 1024 * "x"
|
||||
flen = 0
|
||||
with open(fpath, 'w') as fd:
|
||||
with open(fpath, "w") as fd:
|
||||
while flen < fsize:
|
||||
fd.write(data1k)
|
||||
flen += len(data1k)
|
||||
return flen
|
||||
|
||||
@pytest.fixture(autouse=True, scope='class')
|
||||
@pytest.fixture(autouse=True, scope="class")
|
||||
def _class_scope(self, env, vsftpd):
|
||||
if os.path.exists(vsftpd.docs_dir):
|
||||
shutil.rmtree(vsftpd.docs_dir)
|
||||
if not os.path.exists(vsftpd.docs_dir):
|
||||
os.makedirs(vsftpd.docs_dir)
|
||||
self._make_docs_file(docs_dir=vsftpd.docs_dir, fname='data-1k', fsize=1024)
|
||||
self._make_docs_file(docs_dir=vsftpd.docs_dir, fname='data-10k', fsize=10*1024)
|
||||
self._make_docs_file(docs_dir=vsftpd.docs_dir, fname='data-1m', fsize=1024*1024)
|
||||
self._make_docs_file(docs_dir=vsftpd.docs_dir, fname='data-10m', fsize=10*1024*1024)
|
||||
self._make_docs_file(docs_dir=vsftpd.docs_dir, fname="data-1k", fsize=1024)
|
||||
self._make_docs_file(
|
||||
docs_dir=vsftpd.docs_dir, fname="data-10k", fsize=10 * 1024
|
||||
)
|
||||
self._make_docs_file(
|
||||
docs_dir=vsftpd.docs_dir, fname="data-1m", fsize=1024 * 1024
|
||||
)
|
||||
self._make_docs_file(
|
||||
docs_dir=vsftpd.docs_dir, fname="data-10m", fsize=10 * 1024 * 1024
|
||||
)
|
||||
env.make_data_file(indir=env.gen_dir, fname="upload-1k", fsize=1024)
|
||||
env.make_data_file(indir=env.gen_dir, fname="upload-100k", fsize=100*1024)
|
||||
env.make_data_file(indir=env.gen_dir, fname="upload-1m", fsize=1024*1024)
|
||||
env.make_data_file(indir=env.gen_dir, fname="upload-100k", fsize=100 * 1024)
|
||||
env.make_data_file(indir=env.gen_dir, fname="upload-1m", fsize=1024 * 1024)
|
||||
|
||||
def test_30_01_list_dir(self, env: Env, vsftpd: VsFTPD):
|
||||
curl = CurlClient(env=env)
|
||||
url = f'ftp://{env.ftp_domain}:{vsftpd.port}/'
|
||||
url = f"ftp://{env.ftp_domain}:{vsftpd.port}/"
|
||||
r = curl.ftp_get(urls=[url], with_stats=True)
|
||||
r.check_stats(count=1, http_status=226)
|
||||
lines = open(os.path.join(curl.run_dir, 'download_#1.data')).readlines()
|
||||
assert len(lines) == 4, f'list: {lines}'
|
||||
lines = open(os.path.join(curl.run_dir, "download_#1.data")).readlines()
|
||||
assert len(lines) == 4, f"list: {lines}"
|
||||
|
||||
# download 1 file, no SSL
|
||||
@pytest.mark.parametrize("docname", [
|
||||
'data-1k', 'data-1m', 'data-10m'
|
||||
])
|
||||
@pytest.mark.parametrize("docname", ["data-1k", "data-1m", "data-10m"])
|
||||
def test_30_02_download_1(self, env: Env, vsftpd: VsFTPD, docname):
|
||||
curl = CurlClient(env=env)
|
||||
srcfile = os.path.join(vsftpd.docs_dir, f'{docname}')
|
||||
srcfile = os.path.join(vsftpd.docs_dir, f"{docname}")
|
||||
count = 1
|
||||
url = f'ftp://{env.ftp_domain}:{vsftpd.port}/{docname}?[0-{count-1}]'
|
||||
url = f"ftp://{env.ftp_domain}:{vsftpd.port}/{docname}?[0-{count-1}]"
|
||||
r = curl.ftp_get(urls=[url], with_stats=True)
|
||||
r.check_stats(count=count, http_status=226)
|
||||
self.check_downloads(curl, srcfile, count)
|
||||
|
||||
@pytest.mark.parametrize("docname", [
|
||||
'data-1k', 'data-1m', 'data-10m'
|
||||
])
|
||||
@pytest.mark.parametrize("docname", ["data-1k", "data-1m", "data-10m"])
|
||||
def test_30_03_download_10_serial(self, env: Env, vsftpd: VsFTPD, docname):
|
||||
curl = CurlClient(env=env)
|
||||
srcfile = os.path.join(vsftpd.docs_dir, f'{docname}')
|
||||
srcfile = os.path.join(vsftpd.docs_dir, f"{docname}")
|
||||
count = 10
|
||||
url = f'ftp://{env.ftp_domain}:{vsftpd.port}/{docname}?[0-{count-1}]'
|
||||
url = f"ftp://{env.ftp_domain}:{vsftpd.port}/{docname}?[0-{count-1}]"
|
||||
r = curl.ftp_get(urls=[url], with_stats=True)
|
||||
r.check_stats(count=count, http_status=226)
|
||||
self.check_downloads(curl, srcfile, count)
|
||||
|
||||
@pytest.mark.parametrize("docname", [
|
||||
'data-1k', 'data-1m', 'data-10m'
|
||||
])
|
||||
@pytest.mark.parametrize("docname", ["data-1k", "data-1m", "data-10m"])
|
||||
def test_30_04_download_10_parallel(self, env: Env, vsftpd: VsFTPD, docname):
|
||||
curl = CurlClient(env=env)
|
||||
srcfile = os.path.join(vsftpd.docs_dir, f'{docname}')
|
||||
srcfile = os.path.join(vsftpd.docs_dir, f"{docname}")
|
||||
count = 10
|
||||
url = f'ftp://{env.ftp_domain}:{vsftpd.port}/{docname}?[0-{count-1}]'
|
||||
r = curl.ftp_get(urls=[url], with_stats=True, extra_args=[
|
||||
'--parallel'
|
||||
])
|
||||
url = f"ftp://{env.ftp_domain}:{vsftpd.port}/{docname}?[0-{count-1}]"
|
||||
r = curl.ftp_get(urls=[url], with_stats=True, extra_args=["--parallel"])
|
||||
r.check_stats(count=count, http_status=226)
|
||||
self.check_downloads(curl, srcfile, count)
|
||||
|
||||
@pytest.mark.parametrize("docname", [
|
||||
'upload-1k', 'upload-100k', 'upload-1m'
|
||||
])
|
||||
@pytest.mark.parametrize("docname", ["upload-1k", "upload-100k", "upload-1m"])
|
||||
def test_30_05_upload_1(self, env: Env, vsftpd: VsFTPD, docname):
|
||||
curl = CurlClient(env=env)
|
||||
srcfile = os.path.join(env.gen_dir, docname)
|
||||
dstfile = os.path.join(vsftpd.docs_dir, docname)
|
||||
self._rmf(dstfile)
|
||||
count = 1
|
||||
url = f'ftp://{env.ftp_domain}:{vsftpd.port}/'
|
||||
r = curl.ftp_upload(urls=[url], fupload=f'{srcfile}', with_stats=True)
|
||||
url = f"ftp://{env.ftp_domain}:{vsftpd.port}/"
|
||||
r = curl.ftp_upload(urls=[url], fupload=f"{srcfile}", with_stats=True)
|
||||
r.check_stats(count=count, http_status=226)
|
||||
self.check_upload(env, vsftpd, docname=docname)
|
||||
|
||||
@ -139,68 +134,76 @@ class TestVsFTPD:
|
||||
# check with `tcpdump` if curl causes any TCP RST packets
|
||||
@pytest.mark.skipif(condition=not Env.tcpdump(), reason="tcpdump not available")
|
||||
def test_30_06_shutdownh_download(self, env: Env, vsftpd: VsFTPD):
|
||||
docname = 'data-1k'
|
||||
docname = "data-1k"
|
||||
curl = CurlClient(env=env)
|
||||
count = 1
|
||||
url = f'ftp://{env.ftp_domain}:{vsftpd.port}/{docname}?[0-{count-1}]'
|
||||
url = f"ftp://{env.ftp_domain}:{vsftpd.port}/{docname}?[0-{count-1}]"
|
||||
r = curl.ftp_get(urls=[url], with_stats=True, with_tcpdump=True)
|
||||
r.check_stats(count=count, http_status=226)
|
||||
assert r.tcpdump
|
||||
assert len(r.tcpdump.stats) == 0, 'Unexpected TCP RSTs packets'
|
||||
assert len(r.tcpdump.stats) == 0, "Unexpected TCP RSTs packets"
|
||||
|
||||
# check with `tcpdump` if curl causes any TCP RST packets
|
||||
@pytest.mark.skipif(condition=not Env.tcpdump(), reason="tcpdump not available")
|
||||
def test_30_07_shutdownh_upload(self, env: Env, vsftpd: VsFTPD):
|
||||
docname = 'upload-1k'
|
||||
docname = "upload-1k"
|
||||
curl = CurlClient(env=env)
|
||||
srcfile = os.path.join(env.gen_dir, docname)
|
||||
dstfile = os.path.join(vsftpd.docs_dir, docname)
|
||||
self._rmf(dstfile)
|
||||
count = 1
|
||||
url = f'ftp://{env.ftp_domain}:{vsftpd.port}/'
|
||||
r = curl.ftp_upload(urls=[url], fupload=f'{srcfile}', with_stats=True, with_tcpdump=True)
|
||||
url = f"ftp://{env.ftp_domain}:{vsftpd.port}/"
|
||||
r = curl.ftp_upload(
|
||||
urls=[url], fupload=f"{srcfile}", with_stats=True, with_tcpdump=True
|
||||
)
|
||||
r.check_stats(count=count, http_status=226)
|
||||
assert r.tcpdump
|
||||
assert len(r.tcpdump.stats) == 0, 'Unexpected TCP RSTs packets'
|
||||
assert len(r.tcpdump.stats) == 0, "Unexpected TCP RSTs packets"
|
||||
|
||||
def test_30_08_active_download(self, env: Env, vsftpd: VsFTPD):
|
||||
docname = 'data-10k'
|
||||
docname = "data-10k"
|
||||
curl = CurlClient(env=env)
|
||||
srcfile = os.path.join(vsftpd.docs_dir, f'{docname}')
|
||||
srcfile = os.path.join(vsftpd.docs_dir, f"{docname}")
|
||||
count = 1
|
||||
url = f'ftp://{env.ftp_domain}:{vsftpd.port}/{docname}?[0-{count-1}]'
|
||||
r = curl.ftp_get(urls=[url], with_stats=True, extra_args=[
|
||||
'--ftp-port', '127.0.0.1'
|
||||
])
|
||||
url = f"ftp://{env.ftp_domain}:{vsftpd.port}/{docname}?[0-{count-1}]"
|
||||
r = curl.ftp_get(
|
||||
urls=[url], with_stats=True, extra_args=["--ftp-port", "127.0.0.1"]
|
||||
)
|
||||
r.check_stats(count=count, http_status=226)
|
||||
self.check_downloads(curl, srcfile, count)
|
||||
|
||||
def test_30_09_active_upload(self, env: Env, vsftpd: VsFTPD):
|
||||
docname = 'upload-1k'
|
||||
docname = "upload-1k"
|
||||
curl = CurlClient(env=env)
|
||||
srcfile = os.path.join(env.gen_dir, docname)
|
||||
dstfile = os.path.join(vsftpd.docs_dir, docname)
|
||||
self._rmf(dstfile)
|
||||
count = 1
|
||||
url = f'ftp://{env.ftp_domain}:{vsftpd.port}/'
|
||||
r = curl.ftp_upload(urls=[url], fupload=f'{srcfile}', with_stats=True, extra_args=[
|
||||
'--ftp-port', '127.0.0.1'
|
||||
])
|
||||
url = f"ftp://{env.ftp_domain}:{vsftpd.port}/"
|
||||
r = curl.ftp_upload(
|
||||
urls=[url],
|
||||
fupload=f"{srcfile}",
|
||||
with_stats=True,
|
||||
extra_args=["--ftp-port", "127.0.0.1"],
|
||||
)
|
||||
r.check_stats(count=count, http_status=226)
|
||||
self.check_upload(env, vsftpd, docname=docname)
|
||||
|
||||
def check_downloads(self, client, srcfile: str, count: int,
|
||||
complete: bool = True):
|
||||
def check_downloads(self, client, srcfile: str, count: int, complete: bool = True):
|
||||
for i in range(count):
|
||||
dfile = client.download_file(i)
|
||||
assert os.path.exists(dfile)
|
||||
if complete and not filecmp.cmp(srcfile, dfile, shallow=False):
|
||||
diff = "".join(difflib.unified_diff(a=open(srcfile).readlines(),
|
||||
b=open(dfile).readlines(),
|
||||
fromfile=srcfile,
|
||||
tofile=dfile,
|
||||
n=1))
|
||||
assert False, f'download {dfile} differs:\n{diff}'
|
||||
diff = "".join(
|
||||
difflib.unified_diff(
|
||||
a=open(srcfile).readlines(),
|
||||
b=open(dfile).readlines(),
|
||||
fromfile=srcfile,
|
||||
tofile=dfile,
|
||||
n=1,
|
||||
)
|
||||
)
|
||||
assert False, f"download {dfile} differs:\n{diff}"
|
||||
|
||||
def check_upload(self, env, vsftpd: VsFTPD, docname):
|
||||
srcfile = os.path.join(env.gen_dir, docname)
|
||||
@ -208,9 +211,13 @@ class TestVsFTPD:
|
||||
assert os.path.exists(srcfile)
|
||||
assert os.path.exists(dstfile)
|
||||
if not filecmp.cmp(srcfile, dstfile, shallow=False):
|
||||
diff = "".join(difflib.unified_diff(a=open(srcfile).readlines(),
|
||||
b=open(dstfile).readlines(),
|
||||
fromfile=srcfile,
|
||||
tofile=dstfile,
|
||||
n=1))
|
||||
assert False, f'upload {dstfile} differs:\n{diff}'
|
||||
diff = "".join(
|
||||
difflib.unified_diff(
|
||||
a=open(srcfile).readlines(),
|
||||
b=open(dstfile).readlines(),
|
||||
fromfile=srcfile,
|
||||
tofile=dstfile,
|
||||
n=1,
|
||||
)
|
||||
)
|
||||
assert False, f"upload {dstfile} differs:\n{diff}"
|
||||
|
||||
@ -1,6 +1,6 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
#***************************************************************************
|
||||
# ***************************************************************************
|
||||
# _ _ ____ _
|
||||
# Project ___| | | | _ \| |
|
||||
# / __| | | | |_) | |
|
||||
@ -39,103 +39,98 @@ log = logging.getLogger(__name__)
|
||||
|
||||
@pytest.mark.skipif(condition=not Env.has_vsftpd(), reason="missing vsftpd")
|
||||
class TestVsFTPD:
|
||||
|
||||
SUPPORTS_SSL = True
|
||||
|
||||
@pytest.fixture(autouse=True, scope='class')
|
||||
@pytest.fixture(autouse=True, scope="class")
|
||||
def vsftpds(self, env):
|
||||
if not TestVsFTPD.SUPPORTS_SSL:
|
||||
pytest.skip('vsftpd does not seem to support SSL')
|
||||
pytest.skip("vsftpd does not seem to support SSL")
|
||||
vsftpds = VsFTPD(env=env, with_ssl=True)
|
||||
if not vsftpds.start():
|
||||
vsftpds.stop()
|
||||
TestVsFTPD.SUPPORTS_SSL = False
|
||||
pytest.skip('vsftpd does not seem to support SSL')
|
||||
pytest.skip("vsftpd does not seem to support SSL")
|
||||
yield vsftpds
|
||||
vsftpds.stop()
|
||||
|
||||
def _make_docs_file(self, docs_dir: str, fname: str, fsize: int):
|
||||
fpath = os.path.join(docs_dir, fname)
|
||||
data1k = 1024*'x'
|
||||
data1k = 1024 * "x"
|
||||
flen = 0
|
||||
with open(fpath, 'w') as fd:
|
||||
with open(fpath, "w") as fd:
|
||||
while flen < fsize:
|
||||
fd.write(data1k)
|
||||
flen += len(data1k)
|
||||
return flen
|
||||
|
||||
@pytest.fixture(autouse=True, scope='class')
|
||||
@pytest.fixture(autouse=True, scope="class")
|
||||
def _class_scope(self, env, vsftpds):
|
||||
if os.path.exists(vsftpds.docs_dir):
|
||||
shutil.rmtree(vsftpds.docs_dir)
|
||||
if not os.path.exists(vsftpds.docs_dir):
|
||||
os.makedirs(vsftpds.docs_dir)
|
||||
self._make_docs_file(docs_dir=vsftpds.docs_dir, fname='data-1k', fsize=1024)
|
||||
self._make_docs_file(docs_dir=vsftpds.docs_dir, fname='data-10k', fsize=10*1024)
|
||||
self._make_docs_file(docs_dir=vsftpds.docs_dir, fname='data-1m', fsize=1024*1024)
|
||||
self._make_docs_file(docs_dir=vsftpds.docs_dir, fname='data-10m', fsize=10*1024*1024)
|
||||
self._make_docs_file(docs_dir=vsftpds.docs_dir, fname="data-1k", fsize=1024)
|
||||
self._make_docs_file(
|
||||
docs_dir=vsftpds.docs_dir, fname="data-10k", fsize=10 * 1024
|
||||
)
|
||||
self._make_docs_file(
|
||||
docs_dir=vsftpds.docs_dir, fname="data-1m", fsize=1024 * 1024
|
||||
)
|
||||
self._make_docs_file(
|
||||
docs_dir=vsftpds.docs_dir, fname="data-10m", fsize=10 * 1024 * 1024
|
||||
)
|
||||
env.make_data_file(indir=env.gen_dir, fname="upload-1k", fsize=1024)
|
||||
env.make_data_file(indir=env.gen_dir, fname="upload-100k", fsize=100*1024)
|
||||
env.make_data_file(indir=env.gen_dir, fname="upload-1m", fsize=1024*1024)
|
||||
env.make_data_file(indir=env.gen_dir, fname="upload-100k", fsize=100 * 1024)
|
||||
env.make_data_file(indir=env.gen_dir, fname="upload-1m", fsize=1024 * 1024)
|
||||
|
||||
def test_31_01_list_dir(self, env: Env, vsftpds: VsFTPD):
|
||||
curl = CurlClient(env=env)
|
||||
url = f'ftp://{env.ftp_domain}:{vsftpds.port}/'
|
||||
url = f"ftp://{env.ftp_domain}:{vsftpds.port}/"
|
||||
r = curl.ftp_ssl_get(urls=[url], with_stats=True)
|
||||
r.check_stats(count=1, http_status=226)
|
||||
lines = open(os.path.join(curl.run_dir, 'download_#1.data')).readlines()
|
||||
assert len(lines) == 4, f'list: {lines}'
|
||||
lines = open(os.path.join(curl.run_dir, "download_#1.data")).readlines()
|
||||
assert len(lines) == 4, f"list: {lines}"
|
||||
|
||||
# download 1 file, no SSL
|
||||
@pytest.mark.parametrize("docname", [
|
||||
'data-1k', 'data-1m', 'data-10m'
|
||||
])
|
||||
@pytest.mark.parametrize("docname", ["data-1k", "data-1m", "data-10m"])
|
||||
def test_31_02_download_1(self, env: Env, vsftpds: VsFTPD, docname):
|
||||
curl = CurlClient(env=env)
|
||||
srcfile = os.path.join(vsftpds.docs_dir, f'{docname}')
|
||||
srcfile = os.path.join(vsftpds.docs_dir, f"{docname}")
|
||||
count = 1
|
||||
url = f'ftp://{env.ftp_domain}:{vsftpds.port}/{docname}?[0-{count-1}]'
|
||||
url = f"ftp://{env.ftp_domain}:{vsftpds.port}/{docname}?[0-{count-1}]"
|
||||
r = curl.ftp_ssl_get(urls=[url], with_stats=True)
|
||||
r.check_stats(count=count, http_status=226)
|
||||
self.check_downloads(curl, srcfile, count)
|
||||
|
||||
@pytest.mark.parametrize("docname", [
|
||||
'data-1k', 'data-1m', 'data-10m'
|
||||
])
|
||||
@pytest.mark.parametrize("docname", ["data-1k", "data-1m", "data-10m"])
|
||||
def test_31_03_download_10_serial(self, env: Env, vsftpds: VsFTPD, docname):
|
||||
curl = CurlClient(env=env)
|
||||
srcfile = os.path.join(vsftpds.docs_dir, f'{docname}')
|
||||
srcfile = os.path.join(vsftpds.docs_dir, f"{docname}")
|
||||
count = 10
|
||||
url = f'ftp://{env.ftp_domain}:{vsftpds.port}/{docname}?[0-{count-1}]'
|
||||
url = f"ftp://{env.ftp_domain}:{vsftpds.port}/{docname}?[0-{count-1}]"
|
||||
r = curl.ftp_ssl_get(urls=[url], with_stats=True)
|
||||
r.check_stats(count=count, http_status=226)
|
||||
self.check_downloads(curl, srcfile, count)
|
||||
|
||||
@pytest.mark.parametrize("docname", [
|
||||
'data-1k', 'data-1m', 'data-10m'
|
||||
])
|
||||
@pytest.mark.parametrize("docname", ["data-1k", "data-1m", "data-10m"])
|
||||
def test_31_04_download_10_parallel(self, env: Env, vsftpds: VsFTPD, docname):
|
||||
curl = CurlClient(env=env)
|
||||
srcfile = os.path.join(vsftpds.docs_dir, f'{docname}')
|
||||
srcfile = os.path.join(vsftpds.docs_dir, f"{docname}")
|
||||
count = 10
|
||||
url = f'ftp://{env.ftp_domain}:{vsftpds.port}/{docname}?[0-{count-1}]'
|
||||
r = curl.ftp_ssl_get(urls=[url], with_stats=True, extra_args=[
|
||||
'--parallel'
|
||||
])
|
||||
url = f"ftp://{env.ftp_domain}:{vsftpds.port}/{docname}?[0-{count-1}]"
|
||||
r = curl.ftp_ssl_get(urls=[url], with_stats=True, extra_args=["--parallel"])
|
||||
r.check_stats(count=count, http_status=226)
|
||||
self.check_downloads(curl, srcfile, count)
|
||||
|
||||
@pytest.mark.parametrize("docname", [
|
||||
'upload-1k', 'upload-100k', 'upload-1m'
|
||||
])
|
||||
@pytest.mark.parametrize("docname", ["upload-1k", "upload-100k", "upload-1m"])
|
||||
def test_31_05_upload_1(self, env: Env, vsftpds: VsFTPD, docname):
|
||||
curl = CurlClient(env=env)
|
||||
srcfile = os.path.join(env.gen_dir, docname)
|
||||
dstfile = os.path.join(vsftpds.docs_dir, docname)
|
||||
self._rmf(dstfile)
|
||||
count = 1
|
||||
url = f'ftp://{env.ftp_domain}:{vsftpds.port}/'
|
||||
r = curl.ftp_ssl_upload(urls=[url], fupload=f'{srcfile}', with_stats=True)
|
||||
url = f"ftp://{env.ftp_domain}:{vsftpds.port}/"
|
||||
r = curl.ftp_ssl_upload(urls=[url], fupload=f"{srcfile}", with_stats=True)
|
||||
r.check_stats(count=count, http_status=226)
|
||||
self.check_upload(env, vsftpds, docname=docname)
|
||||
|
||||
@ -146,109 +141,125 @@ class TestVsFTPD:
|
||||
# check with `tcpdump` if curl causes any TCP RST packets
|
||||
@pytest.mark.skipif(condition=not Env.tcpdump(), reason="tcpdump not available")
|
||||
def test_31_06_shutdownh_download(self, env: Env, vsftpds: VsFTPD):
|
||||
docname = 'data-1k'
|
||||
docname = "data-1k"
|
||||
curl = CurlClient(env=env)
|
||||
count = 1
|
||||
url = f'ftp://{env.ftp_domain}:{vsftpds.port}/{docname}?[0-{count-1}]'
|
||||
url = f"ftp://{env.ftp_domain}:{vsftpds.port}/{docname}?[0-{count-1}]"
|
||||
r = curl.ftp_ssl_get(urls=[url], with_stats=True, with_tcpdump=True)
|
||||
r.check_stats(count=count, http_status=226)
|
||||
# vsftp closes control connection without niceties,
|
||||
# disregard RST packets it sent from its port to curl
|
||||
assert len(r.tcpdump.stats_excluding(src_port=env.ftps_port)) == 0, 'Unexpected TCP RSTs packets'
|
||||
assert (
|
||||
len(r.tcpdump.stats_excluding(src_port=env.ftps_port)) == 0
|
||||
), "Unexpected TCP RSTs packets"
|
||||
|
||||
# check with `tcpdump` if curl causes any TCP RST packets
|
||||
@pytest.mark.skipif(condition=not Env.tcpdump(), reason="tcpdump not available")
|
||||
def test_31_07_shutdownh_upload(self, env: Env, vsftpds: VsFTPD):
|
||||
docname = 'upload-1k'
|
||||
docname = "upload-1k"
|
||||
curl = CurlClient(env=env)
|
||||
srcfile = os.path.join(env.gen_dir, docname)
|
||||
dstfile = os.path.join(vsftpds.docs_dir, docname)
|
||||
self._rmf(dstfile)
|
||||
count = 1
|
||||
url = f'ftp://{env.ftp_domain}:{vsftpds.port}/'
|
||||
r = curl.ftp_ssl_upload(urls=[url], fupload=f'{srcfile}', with_stats=True, with_tcpdump=True)
|
||||
url = f"ftp://{env.ftp_domain}:{vsftpds.port}/"
|
||||
r = curl.ftp_ssl_upload(
|
||||
urls=[url], fupload=f"{srcfile}", with_stats=True, with_tcpdump=True
|
||||
)
|
||||
r.check_stats(count=count, http_status=226)
|
||||
# vsftp closes control connection without niceties,
|
||||
# disregard RST packets it sent from its port to curl
|
||||
assert len(r.tcpdump.stats_excluding(src_port=env.ftps_port)) == 0, 'Unexpected TCP RSTs packets'
|
||||
assert (
|
||||
len(r.tcpdump.stats_excluding(src_port=env.ftps_port)) == 0
|
||||
), "Unexpected TCP RSTs packets"
|
||||
|
||||
def test_31_08_upload_ascii(self, env: Env, vsftpds: VsFTPD):
|
||||
docname = 'upload-ascii'
|
||||
docname = "upload-ascii"
|
||||
line_length = 21
|
||||
srcfile = os.path.join(env.gen_dir, docname)
|
||||
dstfile = os.path.join(vsftpds.docs_dir, docname)
|
||||
env.make_data_file(indir=env.gen_dir, fname=docname, fsize=100*1024,
|
||||
line_length=line_length)
|
||||
env.make_data_file(
|
||||
indir=env.gen_dir, fname=docname, fsize=100 * 1024, line_length=line_length
|
||||
)
|
||||
srcsize = os.path.getsize(srcfile)
|
||||
self._rmf(dstfile)
|
||||
count = 1
|
||||
curl = CurlClient(env=env)
|
||||
url = f'ftp://{env.ftp_domain}:{vsftpds.port}/'
|
||||
r = curl.ftp_ssl_upload(urls=[url], fupload=f'{srcfile}', with_stats=True,
|
||||
extra_args=['--use-ascii'])
|
||||
url = f"ftp://{env.ftp_domain}:{vsftpds.port}/"
|
||||
r = curl.ftp_ssl_upload(
|
||||
urls=[url],
|
||||
fupload=f"{srcfile}",
|
||||
with_stats=True,
|
||||
extra_args=["--use-ascii"],
|
||||
)
|
||||
r.check_stats(count=count, http_status=226)
|
||||
# expect the uploaded file to be number of converted newlines larger
|
||||
dstsize = os.path.getsize(dstfile)
|
||||
newlines = len(open(srcfile).readlines())
|
||||
assert (srcsize + newlines) == dstsize, \
|
||||
f'expected source with {newlines} lines to be that much larger,'\
|
||||
f'instead srcsize={srcsize}, upload size={dstsize}, diff={dstsize-srcsize}'
|
||||
assert (srcsize + newlines) == dstsize, (
|
||||
f"expected source with {newlines} lines to be that much larger,"
|
||||
f"instead srcsize={srcsize}, upload size={dstsize}, diff={dstsize-srcsize}"
|
||||
)
|
||||
|
||||
def test_31_08_active_download(self, env: Env, vsftpds: VsFTPD):
|
||||
docname = 'data-10k'
|
||||
docname = "data-10k"
|
||||
curl = CurlClient(env=env)
|
||||
srcfile = os.path.join(vsftpds.docs_dir, f'{docname}')
|
||||
srcfile = os.path.join(vsftpds.docs_dir, f"{docname}")
|
||||
count = 1
|
||||
url = f'ftp://{env.ftp_domain}:{vsftpds.port}/{docname}?[0-{count-1}]'
|
||||
r = curl.ftp_ssl_get(urls=[url], with_stats=True, extra_args=[
|
||||
'--ftp-port', '127.0.0.1'
|
||||
])
|
||||
url = f"ftp://{env.ftp_domain}:{vsftpds.port}/{docname}?[0-{count-1}]"
|
||||
r = curl.ftp_ssl_get(
|
||||
urls=[url], with_stats=True, extra_args=["--ftp-port", "127.0.0.1"]
|
||||
)
|
||||
r.check_stats(count=count, http_status=226)
|
||||
self.check_downloads(curl, srcfile, count)
|
||||
|
||||
def test_31_09_active_upload(self, env: Env, vsftpds: VsFTPD):
|
||||
docname = 'upload-1k'
|
||||
docname = "upload-1k"
|
||||
curl = CurlClient(env=env)
|
||||
srcfile = os.path.join(env.gen_dir, docname)
|
||||
dstfile = os.path.join(vsftpds.docs_dir, docname)
|
||||
self._rmf(dstfile)
|
||||
count = 1
|
||||
url = f'ftp://{env.ftp_domain}:{vsftpds.port}/'
|
||||
r = curl.ftp_ssl_upload(urls=[url], fupload=f'{srcfile}', with_stats=True, extra_args=[
|
||||
'--ftp-port', '127.0.0.1'
|
||||
])
|
||||
url = f"ftp://{env.ftp_domain}:{vsftpds.port}/"
|
||||
r = curl.ftp_ssl_upload(
|
||||
urls=[url],
|
||||
fupload=f"{srcfile}",
|
||||
with_stats=True,
|
||||
extra_args=["--ftp-port", "127.0.0.1"],
|
||||
)
|
||||
r.check_stats(count=count, http_status=226)
|
||||
self.check_upload(env, vsftpds, docname=docname)
|
||||
|
||||
@pytest.mark.parametrize("indata", [
|
||||
'1234567890', ''
|
||||
])
|
||||
@pytest.mark.parametrize("indata", ["1234567890", ""])
|
||||
def test_31_10_upload_stdin(self, env: Env, vsftpds: VsFTPD, indata):
|
||||
curl = CurlClient(env=env)
|
||||
docname = "upload_31_10"
|
||||
dstfile = os.path.join(vsftpds.docs_dir, docname)
|
||||
self._rmf(dstfile)
|
||||
count = 1
|
||||
url = f'ftp://{env.ftp_domain}:{vsftpds.port}/{docname}'
|
||||
url = f"ftp://{env.ftp_domain}:{vsftpds.port}/{docname}"
|
||||
r = curl.ftp_ssl_upload(urls=[url], updata=indata, with_stats=True)
|
||||
r.check_stats(count=count, http_status=226)
|
||||
assert os.path.exists(dstfile)
|
||||
destdata = open(dstfile).readlines()
|
||||
expdata = [indata] if len(indata) else []
|
||||
assert expdata == destdata, f'exected: {expdata}, got: {destdata}'
|
||||
assert expdata == destdata, f"exected: {expdata}, got: {destdata}"
|
||||
|
||||
def check_downloads(self, client, srcfile: str, count: int,
|
||||
complete: bool = True):
|
||||
def check_downloads(self, client, srcfile: str, count: int, complete: bool = True):
|
||||
for i in range(count):
|
||||
dfile = client.download_file(i)
|
||||
assert os.path.exists(dfile)
|
||||
if complete and not filecmp.cmp(srcfile, dfile, shallow=False):
|
||||
diff = "".join(difflib.unified_diff(a=open(srcfile).readlines(),
|
||||
b=open(dfile).readlines(),
|
||||
fromfile=srcfile,
|
||||
tofile=dfile,
|
||||
n=1))
|
||||
assert False, f'download {dfile} differs:\n{diff}'
|
||||
diff = "".join(
|
||||
difflib.unified_diff(
|
||||
a=open(srcfile).readlines(),
|
||||
b=open(dfile).readlines(),
|
||||
fromfile=srcfile,
|
||||
tofile=dfile,
|
||||
n=1,
|
||||
)
|
||||
)
|
||||
assert False, f"download {dfile} differs:\n{diff}"
|
||||
|
||||
def check_upload(self, env, vsftpd: VsFTPD, docname):
|
||||
srcfile = os.path.join(env.gen_dir, docname)
|
||||
@ -256,9 +267,13 @@ class TestVsFTPD:
|
||||
assert os.path.exists(srcfile)
|
||||
assert os.path.exists(dstfile)
|
||||
if not filecmp.cmp(srcfile, dstfile, shallow=False):
|
||||
diff = "".join(difflib.unified_diff(a=open(srcfile).readlines(),
|
||||
b=open(dstfile).readlines(),
|
||||
fromfile=srcfile,
|
||||
tofile=dstfile,
|
||||
n=1))
|
||||
assert False, f'upload {dstfile} differs:\n{diff}'
|
||||
diff = "".join(
|
||||
difflib.unified_diff(
|
||||
a=open(srcfile).readlines(),
|
||||
b=open(dstfile).readlines(),
|
||||
fromfile=srcfile,
|
||||
tofile=dstfile,
|
||||
n=1,
|
||||
)
|
||||
)
|
||||
assert False, f"upload {dstfile} differs:\n{diff}"
|
||||
|
||||
@ -1,6 +1,6 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
#***************************************************************************
|
||||
# ***************************************************************************
|
||||
# _ _ ____ _
|
||||
# Project ___| | | | _ \| |
|
||||
# / __| | | | |_) | |
|
||||
@ -25,8 +25,10 @@
|
||||
###########################################################################
|
||||
# ruff: noqa: F401, E402
|
||||
import pytest
|
||||
pytest.register_assert_rewrite("testenv.env", "testenv.curl", "testenv.caddy",
|
||||
"testenv.httpd", "testenv.nghttpx")
|
||||
|
||||
pytest.register_assert_rewrite(
|
||||
"testenv.env", "testenv.curl", "testenv.caddy", "testenv.httpd", "testenv.nghttpx"
|
||||
)
|
||||
|
||||
from .env import Env
|
||||
from .certs import TestCA, Credentials
|
||||
|
||||
@ -1,6 +1,6 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
#***************************************************************************
|
||||
# ***************************************************************************
|
||||
# _ _ ____ _
|
||||
# Project ___| | | | _ \| |
|
||||
# / __| | | | |_) | |
|
||||
@ -39,15 +39,14 @@ log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Caddy:
|
||||
|
||||
def __init__(self, env: Env):
|
||||
self.env = env
|
||||
self._caddy = os.environ['CADDY'] if 'CADDY' in os.environ else env.caddy
|
||||
self._caddy_dir = os.path.join(env.gen_dir, 'caddy')
|
||||
self._docs_dir = os.path.join(self._caddy_dir, 'docs')
|
||||
self._conf_file = os.path.join(self._caddy_dir, 'Caddyfile')
|
||||
self._error_log = os.path.join(self._caddy_dir, 'caddy.log')
|
||||
self._tmp_dir = os.path.join(self._caddy_dir, 'tmp')
|
||||
self._caddy = os.environ["CADDY"] if "CADDY" in os.environ else env.caddy
|
||||
self._caddy_dir = os.path.join(env.gen_dir, "caddy")
|
||||
self._docs_dir = os.path.join(self._caddy_dir, "docs")
|
||||
self._conf_file = os.path.join(self._caddy_dir, "Caddyfile")
|
||||
self._error_log = os.path.join(self._caddy_dir, "caddy.log")
|
||||
self._tmp_dir = os.path.join(self._caddy_dir, "tmp")
|
||||
self._process = None
|
||||
self._rmf(self._error_log)
|
||||
|
||||
@ -78,11 +77,11 @@ class Caddy:
|
||||
if self._process:
|
||||
self.stop()
|
||||
self._write_config()
|
||||
args = [
|
||||
self._caddy, 'run'
|
||||
]
|
||||
caddyerr = open(self._error_log, 'a')
|
||||
self._process = subprocess.Popen(args=args, cwd=self._caddy_dir, stderr=caddyerr)
|
||||
args = [self._caddy, "run"]
|
||||
caddyerr = open(self._error_log, "a")
|
||||
self._process = subprocess.Popen(
|
||||
args=args, cwd=self._caddy_dir, stderr=caddyerr
|
||||
)
|
||||
if self._process.returncode is not None:
|
||||
return False
|
||||
return not wait_live or self.wait_live(timeout=timedelta(seconds=5))
|
||||
@ -109,12 +108,12 @@ class Caddy:
|
||||
curl = CurlClient(env=self.env, run_dir=self._tmp_dir)
|
||||
try_until = datetime.now() + timeout
|
||||
while datetime.now() < try_until:
|
||||
check_url = f'https://{self.env.domain1}:{self.port}/'
|
||||
check_url = f"https://{self.env.domain1}:{self.port}/"
|
||||
r = curl.http_get(url=check_url)
|
||||
if r.exit_code != 0:
|
||||
return True
|
||||
log.debug(f'waiting for caddy to stop responding: {r}')
|
||||
time.sleep(.1)
|
||||
log.debug(f"waiting for caddy to stop responding: {r}")
|
||||
time.sleep(0.1)
|
||||
log.debug(f"Server still responding after {timeout}")
|
||||
return False
|
||||
|
||||
@ -122,11 +121,11 @@ class Caddy:
|
||||
curl = CurlClient(env=self.env, run_dir=self._tmp_dir)
|
||||
try_until = datetime.now() + timeout
|
||||
while datetime.now() < try_until:
|
||||
check_url = f'https://{self.env.domain1}:{self.port}/'
|
||||
check_url = f"https://{self.env.domain1}:{self.port}/"
|
||||
r = curl.http_get(url=check_url)
|
||||
if r.exit_code == 0:
|
||||
return True
|
||||
time.sleep(.1)
|
||||
time.sleep(0.1)
|
||||
log.error(f"Caddy still not responding after {timeout}")
|
||||
return False
|
||||
|
||||
@ -147,30 +146,30 @@ class Caddy:
|
||||
assert creds2 # convince pytype this isn't None
|
||||
self._mkpath(self._docs_dir)
|
||||
self._mkpath(self._tmp_dir)
|
||||
with open(os.path.join(self._docs_dir, 'data.json'), 'w') as fd:
|
||||
with open(os.path.join(self._docs_dir, "data.json"), "w") as fd:
|
||||
data = {
|
||||
'server': f'{domain1}',
|
||||
"server": f"{domain1}",
|
||||
}
|
||||
fd.write(JSONEncoder().encode(data))
|
||||
with open(self._conf_file, 'w') as fd:
|
||||
conf = [ # base server config
|
||||
'{',
|
||||
f' http_port {self.env.caddy_http_port}',
|
||||
f' https_port {self.env.caddy_https_port}',
|
||||
f' servers :{self.env.caddy_https_port} {{',
|
||||
' protocols h3 h2 h1',
|
||||
' }',
|
||||
'}',
|
||||
f'{domain1}:{self.env.caddy_https_port} {{',
|
||||
' file_server * {',
|
||||
f' root {self._docs_dir}',
|
||||
' }',
|
||||
f' tls {creds1.cert_file} {creds1.pkey_file}',
|
||||
'}',
|
||||
f'{domain2} {{',
|
||||
f' reverse_proxy /* http://localhost:{self.env.http_port} {{',
|
||||
' }',
|
||||
f' tls {creds2.cert_file} {creds2.pkey_file}',
|
||||
'}',
|
||||
with open(self._conf_file, "w") as fd:
|
||||
conf = [ # base server config
|
||||
"{",
|
||||
f" http_port {self.env.caddy_http_port}",
|
||||
f" https_port {self.env.caddy_https_port}",
|
||||
f" servers :{self.env.caddy_https_port} {{",
|
||||
" protocols h3 h2 h1",
|
||||
" }",
|
||||
"}",
|
||||
f"{domain1}:{self.env.caddy_https_port} {{",
|
||||
" file_server * {",
|
||||
f" root {self._docs_dir}",
|
||||
" }",
|
||||
f" tls {creds1.cert_file} {creds1.pkey_file}",
|
||||
"}",
|
||||
f"{domain2} {{",
|
||||
f" reverse_proxy /* http://localhost:{self.env.http_port} {{",
|
||||
" }",
|
||||
f" tls {creds2.cert_file} {creds2.pkey_file}",
|
||||
"}",
|
||||
]
|
||||
fd.write("\n".join(conf))
|
||||
|
||||
@ -1,6 +1,6 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
#***************************************************************************
|
||||
# ***************************************************************************
|
||||
# _ _ ____ _
|
||||
# Project ___| | | | _ \| |
|
||||
# / __| | | | |_) | |
|
||||
@ -36,52 +36,59 @@ from cryptography.hazmat.primitives import hashes
|
||||
from cryptography.hazmat.primitives.asymmetric import ec, rsa
|
||||
from cryptography.hazmat.primitives.asymmetric.ec import EllipticCurvePrivateKey
|
||||
from cryptography.hazmat.primitives.asymmetric.rsa import RSAPrivateKey
|
||||
from cryptography.hazmat.primitives.serialization import Encoding, PrivateFormat, NoEncryption, load_pem_private_key
|
||||
from cryptography.hazmat.primitives.serialization import (
|
||||
Encoding,
|
||||
PrivateFormat,
|
||||
NoEncryption,
|
||||
load_pem_private_key,
|
||||
)
|
||||
from cryptography.x509 import ExtendedKeyUsageOID, NameOID
|
||||
|
||||
|
||||
EC_SUPPORTED = {}
|
||||
EC_SUPPORTED.update([(curve.name.upper(), curve) for curve in [
|
||||
ec.SECP192R1,
|
||||
ec.SECP224R1,
|
||||
ec.SECP256R1,
|
||||
ec.SECP384R1,
|
||||
]])
|
||||
EC_SUPPORTED.update(
|
||||
[
|
||||
(curve.name.upper(), curve)
|
||||
for curve in [
|
||||
ec.SECP192R1,
|
||||
ec.SECP224R1,
|
||||
ec.SECP256R1,
|
||||
ec.SECP384R1,
|
||||
]
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
def _private_key(key_type):
|
||||
if isinstance(key_type, str):
|
||||
key_type = key_type.upper()
|
||||
m = re.match(r'^(RSA)?(\d+)$', key_type)
|
||||
m = re.match(r"^(RSA)?(\d+)$", key_type)
|
||||
if m:
|
||||
key_type = int(m.group(2))
|
||||
|
||||
if isinstance(key_type, int):
|
||||
return rsa.generate_private_key(
|
||||
public_exponent=65537,
|
||||
key_size=key_type,
|
||||
backend=default_backend()
|
||||
public_exponent=65537, key_size=key_type, backend=default_backend()
|
||||
)
|
||||
if not isinstance(key_type, ec.EllipticCurve) and key_type in EC_SUPPORTED:
|
||||
key_type = EC_SUPPORTED[key_type]
|
||||
return ec.generate_private_key(
|
||||
curve=key_type,
|
||||
backend=default_backend()
|
||||
)
|
||||
return ec.generate_private_key(curve=key_type, backend=default_backend())
|
||||
|
||||
|
||||
class CertificateSpec:
|
||||
|
||||
def __init__(self, name: Optional[str] = None,
|
||||
domains: Optional[List[str]] = None,
|
||||
email: Optional[str] = None,
|
||||
key_type: Optional[str] = None,
|
||||
single_file: bool = False,
|
||||
valid_from: timedelta = timedelta(days=-1),
|
||||
valid_to: timedelta = timedelta(days=89),
|
||||
client: bool = False,
|
||||
check_valid: bool = True,
|
||||
sub_specs: Optional[List['CertificateSpec']] = None):
|
||||
def __init__(
|
||||
self,
|
||||
name: Optional[str] = None,
|
||||
domains: Optional[List[str]] = None,
|
||||
email: Optional[str] = None,
|
||||
key_type: Optional[str] = None,
|
||||
single_file: bool = False,
|
||||
valid_from: timedelta = timedelta(days=-1),
|
||||
valid_to: timedelta = timedelta(days=89),
|
||||
client: bool = False,
|
||||
check_valid: bool = True,
|
||||
sub_specs: Optional[List["CertificateSpec"]] = None,
|
||||
):
|
||||
self._name = name
|
||||
self.domains = domains
|
||||
self.client = client
|
||||
@ -113,12 +120,9 @@ class CertificateSpec:
|
||||
|
||||
|
||||
class Credentials:
|
||||
|
||||
def __init__(self,
|
||||
name: str,
|
||||
cert: Any,
|
||||
pkey: Any,
|
||||
issuer: Optional['Credentials'] = None):
|
||||
def __init__(
|
||||
self, name: str, cert: Any, pkey: Any, issuer: Optional["Credentials"] = None
|
||||
):
|
||||
self._name = name
|
||||
self._cert = cert
|
||||
self._pkey = pkey
|
||||
@ -161,18 +165,25 @@ class Credentials:
|
||||
def pkey_pem(self) -> bytes:
|
||||
return self._pkey.private_bytes(
|
||||
Encoding.PEM,
|
||||
PrivateFormat.TraditionalOpenSSL if self.key_type.startswith('rsa') else PrivateFormat.PKCS8,
|
||||
NoEncryption())
|
||||
PrivateFormat.TraditionalOpenSSL
|
||||
if self.key_type.startswith("rsa")
|
||||
else PrivateFormat.PKCS8,
|
||||
NoEncryption(),
|
||||
)
|
||||
|
||||
@property
|
||||
def issuer(self) -> Optional['Credentials']:
|
||||
def issuer(self) -> Optional["Credentials"]:
|
||||
return self._issuer
|
||||
|
||||
def set_store(self, store: 'CertStore'):
|
||||
def set_store(self, store: "CertStore"):
|
||||
self._store = store
|
||||
|
||||
def set_files(self, cert_file: str, pkey_file: Optional[str] = None,
|
||||
combined_file: Optional[str] = None):
|
||||
def set_files(
|
||||
self,
|
||||
cert_file: str,
|
||||
pkey_file: Optional[str] = None,
|
||||
combined_file: Optional[str] = None,
|
||||
):
|
||||
self._cert_file = cert_file
|
||||
self._pkey_file = pkey_file
|
||||
self._combined_file = combined_file
|
||||
@ -189,28 +200,39 @@ class Credentials:
|
||||
def combined_file(self) -> Optional[str]:
|
||||
return self._combined_file
|
||||
|
||||
def get_first(self, name) -> Optional['Credentials']:
|
||||
def get_first(self, name) -> Optional["Credentials"]:
|
||||
creds = self._store.get_credentials_for_name(name) if self._store else []
|
||||
return creds[0] if len(creds) else None
|
||||
|
||||
def get_credentials_for_name(self, name) -> List['Credentials']:
|
||||
def get_credentials_for_name(self, name) -> List["Credentials"]:
|
||||
return self._store.get_credentials_for_name(name) if self._store else []
|
||||
|
||||
def issue_certs(self, specs: List[CertificateSpec],
|
||||
chain: Optional[List['Credentials']] = None) -> List['Credentials']:
|
||||
def issue_certs(
|
||||
self, specs: List[CertificateSpec], chain: Optional[List["Credentials"]] = None
|
||||
) -> List["Credentials"]:
|
||||
return [self.issue_cert(spec=spec, chain=chain) for spec in specs]
|
||||
|
||||
def issue_cert(self, spec: CertificateSpec,
|
||||
chain: Optional[List['Credentials']] = None) -> 'Credentials':
|
||||
def issue_cert(
|
||||
self, spec: CertificateSpec, chain: Optional[List["Credentials"]] = None
|
||||
) -> "Credentials":
|
||||
key_type = spec.key_type if spec.key_type else self.key_type
|
||||
creds = None
|
||||
if self._store:
|
||||
creds = self._store.load_credentials(
|
||||
name=spec.name, key_type=key_type, single_file=spec.single_file,
|
||||
issuer=self, check_valid=spec.check_valid)
|
||||
name=spec.name,
|
||||
key_type=key_type,
|
||||
single_file=spec.single_file,
|
||||
issuer=self,
|
||||
check_valid=spec.check_valid,
|
||||
)
|
||||
if creds is None:
|
||||
creds = TestCA.create_credentials(spec=spec, issuer=self, key_type=key_type,
|
||||
valid_from=spec.valid_from, valid_to=spec.valid_to)
|
||||
creds = TestCA.create_credentials(
|
||||
spec=spec,
|
||||
issuer=self,
|
||||
key_type=key_type,
|
||||
valid_from=spec.valid_from,
|
||||
valid_to=spec.valid_to,
|
||||
)
|
||||
if self._store:
|
||||
self._store.save(creds, single_file=spec.single_file)
|
||||
if spec.type == "ca":
|
||||
@ -227,7 +249,6 @@ class Credentials:
|
||||
|
||||
|
||||
class CertStore:
|
||||
|
||||
def __init__(self, fpath: str):
|
||||
self._store_dir = fpath
|
||||
if not os.path.exists(self._store_dir):
|
||||
@ -238,9 +259,13 @@ class CertStore:
|
||||
def path(self) -> str:
|
||||
return self._store_dir
|
||||
|
||||
def save(self, creds: Credentials, name: Optional[str] = None,
|
||||
chain: Optional[List[Credentials]] = None,
|
||||
single_file: bool = False) -> None:
|
||||
def save(
|
||||
self,
|
||||
creds: Credentials,
|
||||
name: Optional[str] = None,
|
||||
chain: Optional[List[Credentials]] = None,
|
||||
single_file: bool = False,
|
||||
) -> None:
|
||||
name = name if name is not None else creds.name
|
||||
cert_file = self.get_cert_file(name=name, key_type=creds.key_type)
|
||||
pkey_file = self.get_pkey_file(name=name, key_type=creds.key_type)
|
||||
@ -274,7 +299,7 @@ class CertStore:
|
||||
chain.append(creds)
|
||||
if not with_root and len(chain) > 1:
|
||||
chain = chain[:-1]
|
||||
chain_file = os.path.join(self._store_dir, f'{name}-{infix}.pem')
|
||||
chain_file = os.path.join(self._store_dir, f"{name}-{infix}.pem")
|
||||
with open(chain_file, "wb") as fd:
|
||||
for c in chain:
|
||||
fd.write(c.cert_pem)
|
||||
@ -289,14 +314,14 @@ class CertStore:
|
||||
|
||||
def get_cert_file(self, name: str, key_type=None) -> str:
|
||||
key_infix = ".{0}".format(key_type) if key_type is not None else ""
|
||||
return os.path.join(self._store_dir, f'{name}{key_infix}.cert.pem')
|
||||
return os.path.join(self._store_dir, f"{name}{key_infix}.cert.pem")
|
||||
|
||||
def get_pkey_file(self, name: str, key_type=None) -> str:
|
||||
key_infix = ".{0}".format(key_type) if key_type is not None else ""
|
||||
return os.path.join(self._store_dir, f'{name}{key_infix}.pkey.pem')
|
||||
return os.path.join(self._store_dir, f"{name}{key_infix}.pkey.pem")
|
||||
|
||||
def get_combined_file(self, name: str, key_type=None) -> str:
|
||||
return os.path.join(self._store_dir, f'{name}.pem')
|
||||
return os.path.join(self._store_dir, f"{name}.pem")
|
||||
|
||||
def load_pem_cert(self, fpath: str) -> x509.Certificate:
|
||||
with open(fpath) as fd:
|
||||
@ -306,27 +331,36 @@ class CertStore:
|
||||
with open(fpath) as fd:
|
||||
return load_pem_private_key("".join(fd.readlines()).encode(), password=None)
|
||||
|
||||
def load_credentials(self, name: str, key_type=None,
|
||||
single_file: bool = False,
|
||||
issuer: Optional[Credentials] = None,
|
||||
check_valid: bool = False):
|
||||
def load_credentials(
|
||||
self,
|
||||
name: str,
|
||||
key_type=None,
|
||||
single_file: bool = False,
|
||||
issuer: Optional[Credentials] = None,
|
||||
check_valid: bool = False,
|
||||
):
|
||||
cert_file = self.get_cert_file(name=name, key_type=key_type)
|
||||
pkey_file = cert_file if single_file else self.get_pkey_file(name=name, key_type=key_type)
|
||||
pkey_file = (
|
||||
cert_file
|
||||
if single_file
|
||||
else self.get_pkey_file(name=name, key_type=key_type)
|
||||
)
|
||||
comb_file = self.get_combined_file(name=name, key_type=key_type)
|
||||
if os.path.isfile(cert_file) and os.path.isfile(pkey_file):
|
||||
cert = self.load_pem_cert(cert_file)
|
||||
pkey = self.load_pem_pkey(pkey_file)
|
||||
try:
|
||||
now = datetime.now(tz=timezone.utc)
|
||||
if check_valid and \
|
||||
((cert.not_valid_after_utc < now) or
|
||||
(cert.not_valid_before_utc > now)):
|
||||
if check_valid and (
|
||||
(cert.not_valid_after_utc < now)
|
||||
or (cert.not_valid_before_utc > now)
|
||||
):
|
||||
return None
|
||||
except AttributeError: # older python
|
||||
now = datetime.now()
|
||||
if check_valid and \
|
||||
((cert.not_valid_after < now) or
|
||||
(cert.not_valid_before > now)):
|
||||
if check_valid and (
|
||||
(cert.not_valid_after < now) or (cert.not_valid_before > now)
|
||||
):
|
||||
return None
|
||||
creds = Credentials(name=name, cert=cert, pkey=pkey, issuer=issuer)
|
||||
creds.set_store(self)
|
||||
@ -337,9 +371,10 @@ class CertStore:
|
||||
|
||||
|
||||
class TestCA:
|
||||
|
||||
@classmethod
|
||||
def create_root(cls, name: str, store_dir: str, key_type: str = "rsa2048") -> Credentials:
|
||||
def create_root(
|
||||
cls, name: str, store_dir: str, key_type: str = "rsa2048"
|
||||
) -> Credentials:
|
||||
store = CertStore(fpath=store_dir)
|
||||
creds = store.load_credentials(name="ca", key_type=key_type, issuer=None)
|
||||
if creds is None:
|
||||
@ -349,36 +384,61 @@ class TestCA:
|
||||
return creds
|
||||
|
||||
@staticmethod
|
||||
def create_credentials(spec: CertificateSpec, issuer: Credentials, key_type: Any,
|
||||
valid_from: timedelta = timedelta(days=-1),
|
||||
valid_to: timedelta = timedelta(days=89),
|
||||
) -> Credentials:
|
||||
def create_credentials(
|
||||
spec: CertificateSpec,
|
||||
issuer: Credentials,
|
||||
key_type: Any,
|
||||
valid_from: timedelta = timedelta(days=-1),
|
||||
valid_to: timedelta = timedelta(days=89),
|
||||
) -> Credentials:
|
||||
"""
|
||||
Create a certificate signed by this CA for the given domains.
|
||||
|
||||
:returns: the certificate and private key PEM file paths
|
||||
"""
|
||||
if spec.domains and len(spec.domains):
|
||||
creds = TestCA._make_server_credentials(name=spec.name, domains=spec.domains,
|
||||
issuer=issuer, valid_from=valid_from,
|
||||
valid_to=valid_to, key_type=key_type)
|
||||
creds = TestCA._make_server_credentials(
|
||||
name=spec.name,
|
||||
domains=spec.domains,
|
||||
issuer=issuer,
|
||||
valid_from=valid_from,
|
||||
valid_to=valid_to,
|
||||
key_type=key_type,
|
||||
)
|
||||
elif spec.client:
|
||||
creds = TestCA._make_client_credentials(name=spec.name, issuer=issuer,
|
||||
email=spec.email, valid_from=valid_from,
|
||||
valid_to=valid_to, key_type=key_type)
|
||||
creds = TestCA._make_client_credentials(
|
||||
name=spec.name,
|
||||
issuer=issuer,
|
||||
email=spec.email,
|
||||
valid_from=valid_from,
|
||||
valid_to=valid_to,
|
||||
key_type=key_type,
|
||||
)
|
||||
elif spec.name:
|
||||
creds = TestCA._make_ca_credentials(name=spec.name, issuer=issuer,
|
||||
valid_from=valid_from, valid_to=valid_to,
|
||||
key_type=key_type)
|
||||
creds = TestCA._make_ca_credentials(
|
||||
name=spec.name,
|
||||
issuer=issuer,
|
||||
valid_from=valid_from,
|
||||
valid_to=valid_to,
|
||||
key_type=key_type,
|
||||
)
|
||||
else:
|
||||
raise Exception(f"unrecognized certificate specification: {spec}")
|
||||
return creds
|
||||
|
||||
@staticmethod
|
||||
def _make_x509_name(org_name: Optional[str] = None, common_name: Optional[str] = None, parent: x509.Name = None) -> x509.Name:
|
||||
def _make_x509_name(
|
||||
org_name: Optional[str] = None,
|
||||
common_name: Optional[str] = None,
|
||||
parent: x509.Name = None,
|
||||
) -> x509.Name:
|
||||
name_pieces = []
|
||||
if org_name:
|
||||
oid = NameOID.ORGANIZATIONAL_UNIT_NAME if parent else NameOID.ORGANIZATION_NAME
|
||||
oid = (
|
||||
NameOID.ORGANIZATIONAL_UNIT_NAME
|
||||
if parent
|
||||
else NameOID.ORGANIZATION_NAME
|
||||
)
|
||||
name_pieces.append(x509.NameAttribute(oid, org_name))
|
||||
elif common_name:
|
||||
name_pieces.append(x509.NameAttribute(NameOID.COMMON_NAME, common_name))
|
||||
@ -388,11 +448,11 @@ class TestCA:
|
||||
|
||||
@staticmethod
|
||||
def _make_csr(
|
||||
subject: x509.Name,
|
||||
pkey: Any,
|
||||
issuer_subject: Optional[Credentials],
|
||||
valid_from_delta: Optional[timedelta] = None,
|
||||
valid_until_delta: Optional[timedelta] = None
|
||||
subject: x509.Name,
|
||||
pkey: Any,
|
||||
issuer_subject: Optional[Credentials],
|
||||
valid_from_delta: Optional[timedelta] = None,
|
||||
valid_until_delta: Optional[timedelta] = None,
|
||||
):
|
||||
pubkey = pkey.public_key()
|
||||
issuer_subject = issuer_subject if issuer_subject is not None else subject
|
||||
@ -420,28 +480,35 @@ class TestCA:
|
||||
|
||||
@staticmethod
|
||||
def _add_ca_usages(csr: Any) -> Any:
|
||||
return csr.add_extension(
|
||||
x509.BasicConstraints(ca=True, path_length=9),
|
||||
critical=True,
|
||||
).add_extension(
|
||||
x509.KeyUsage(
|
||||
digital_signature=True,
|
||||
content_commitment=False,
|
||||
key_encipherment=False,
|
||||
data_encipherment=False,
|
||||
key_agreement=False,
|
||||
key_cert_sign=True,
|
||||
crl_sign=True,
|
||||
encipher_only=False,
|
||||
decipher_only=False),
|
||||
critical=True
|
||||
).add_extension(
|
||||
x509.ExtendedKeyUsage([
|
||||
ExtendedKeyUsageOID.CLIENT_AUTH,
|
||||
ExtendedKeyUsageOID.SERVER_AUTH,
|
||||
ExtendedKeyUsageOID.CODE_SIGNING,
|
||||
]),
|
||||
critical=True
|
||||
return (
|
||||
csr.add_extension(
|
||||
x509.BasicConstraints(ca=True, path_length=9),
|
||||
critical=True,
|
||||
)
|
||||
.add_extension(
|
||||
x509.KeyUsage(
|
||||
digital_signature=True,
|
||||
content_commitment=False,
|
||||
key_encipherment=False,
|
||||
data_encipherment=False,
|
||||
key_agreement=False,
|
||||
key_cert_sign=True,
|
||||
crl_sign=True,
|
||||
encipher_only=False,
|
||||
decipher_only=False,
|
||||
),
|
||||
critical=True,
|
||||
)
|
||||
.add_extension(
|
||||
x509.ExtendedKeyUsage(
|
||||
[
|
||||
ExtendedKeyUsageOID.CLIENT_AUTH,
|
||||
ExtendedKeyUsageOID.SERVER_AUTH,
|
||||
ExtendedKeyUsageOID.CODE_SIGNING,
|
||||
]
|
||||
),
|
||||
critical=True,
|
||||
)
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
@ -454,33 +521,47 @@ class TestCA:
|
||||
except: # noqa: E722
|
||||
names.append(x509.DNSName(name))
|
||||
|
||||
return csr.add_extension(
|
||||
x509.BasicConstraints(ca=False, path_length=None),
|
||||
critical=True,
|
||||
).add_extension(
|
||||
x509.AuthorityKeyIdentifier.from_issuer_subject_key_identifier(
|
||||
issuer.certificate.extensions.get_extension_for_class(
|
||||
x509.SubjectKeyIdentifier).value),
|
||||
critical=False
|
||||
).add_extension(
|
||||
x509.SubjectAlternativeName(names), critical=True,
|
||||
).add_extension(
|
||||
x509.ExtendedKeyUsage([
|
||||
ExtendedKeyUsageOID.SERVER_AUTH,
|
||||
]),
|
||||
critical=False
|
||||
return (
|
||||
csr.add_extension(
|
||||
x509.BasicConstraints(ca=False, path_length=None),
|
||||
critical=True,
|
||||
)
|
||||
.add_extension(
|
||||
x509.AuthorityKeyIdentifier.from_issuer_subject_key_identifier(
|
||||
issuer.certificate.extensions.get_extension_for_class(
|
||||
x509.SubjectKeyIdentifier
|
||||
).value
|
||||
),
|
||||
critical=False,
|
||||
)
|
||||
.add_extension(
|
||||
x509.SubjectAlternativeName(names),
|
||||
critical=True,
|
||||
)
|
||||
.add_extension(
|
||||
x509.ExtendedKeyUsage(
|
||||
[
|
||||
ExtendedKeyUsageOID.SERVER_AUTH,
|
||||
]
|
||||
),
|
||||
critical=False,
|
||||
)
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def _add_client_usages(csr: Any, issuer: Credentials, rfc82name: Optional[str] = None) -> Any:
|
||||
def _add_client_usages(
|
||||
csr: Any, issuer: Credentials, rfc82name: Optional[str] = None
|
||||
) -> Any:
|
||||
cert = csr.add_extension(
|
||||
x509.BasicConstraints(ca=False, path_length=None),
|
||||
critical=True,
|
||||
).add_extension(
|
||||
x509.AuthorityKeyIdentifier.from_issuer_subject_key_identifier(
|
||||
issuer.certificate.extensions.get_extension_for_class(
|
||||
x509.SubjectKeyIdentifier).value),
|
||||
critical=False
|
||||
x509.SubjectKeyIdentifier
|
||||
).value
|
||||
),
|
||||
critical=False,
|
||||
)
|
||||
if rfc82name:
|
||||
cert.add_extension(
|
||||
@ -488,19 +569,23 @@ class TestCA:
|
||||
critical=True,
|
||||
)
|
||||
cert.add_extension(
|
||||
x509.ExtendedKeyUsage([
|
||||
ExtendedKeyUsageOID.CLIENT_AUTH,
|
||||
]),
|
||||
critical=True
|
||||
x509.ExtendedKeyUsage(
|
||||
[
|
||||
ExtendedKeyUsageOID.CLIENT_AUTH,
|
||||
]
|
||||
),
|
||||
critical=True,
|
||||
)
|
||||
return cert
|
||||
|
||||
@staticmethod
|
||||
def _make_ca_credentials(name, key_type: Any,
|
||||
issuer: Optional[Credentials] = None,
|
||||
valid_from: timedelta = timedelta(days=-1),
|
||||
valid_to: timedelta = timedelta(days=89),
|
||||
) -> Credentials:
|
||||
def _make_ca_credentials(
|
||||
name,
|
||||
key_type: Any,
|
||||
issuer: Optional[Credentials] = None,
|
||||
valid_from: timedelta = timedelta(days=-1),
|
||||
valid_to: timedelta = timedelta(days=89),
|
||||
) -> Credentials:
|
||||
pkey = _private_key(key_type=key_type)
|
||||
if issuer is not None:
|
||||
issuer_subject = issuer.certificate.subject
|
||||
@ -508,47 +593,70 @@ class TestCA:
|
||||
else:
|
||||
issuer_subject = None
|
||||
issuer_key = pkey
|
||||
subject = TestCA._make_x509_name(org_name=name, parent=issuer.subject if issuer else None)
|
||||
csr = TestCA._make_csr(subject=subject,
|
||||
issuer_subject=issuer_subject, pkey=pkey,
|
||||
valid_from_delta=valid_from, valid_until_delta=valid_to)
|
||||
subject = TestCA._make_x509_name(
|
||||
org_name=name, parent=issuer.subject if issuer else None
|
||||
)
|
||||
csr = TestCA._make_csr(
|
||||
subject=subject,
|
||||
issuer_subject=issuer_subject,
|
||||
pkey=pkey,
|
||||
valid_from_delta=valid_from,
|
||||
valid_until_delta=valid_to,
|
||||
)
|
||||
csr = TestCA._add_ca_usages(csr)
|
||||
cert = csr.sign(private_key=issuer_key,
|
||||
algorithm=hashes.SHA256(),
|
||||
backend=default_backend())
|
||||
cert = csr.sign(
|
||||
private_key=issuer_key, algorithm=hashes.SHA256(), backend=default_backend()
|
||||
)
|
||||
return Credentials(name=name, cert=cert, pkey=pkey, issuer=issuer)
|
||||
|
||||
@staticmethod
|
||||
def _make_server_credentials(name: str, domains: List[str], issuer: Credentials,
|
||||
key_type: Any,
|
||||
valid_from: timedelta = timedelta(days=-1),
|
||||
valid_to: timedelta = timedelta(days=89),
|
||||
) -> Credentials:
|
||||
def _make_server_credentials(
|
||||
name: str,
|
||||
domains: List[str],
|
||||
issuer: Credentials,
|
||||
key_type: Any,
|
||||
valid_from: timedelta = timedelta(days=-1),
|
||||
valid_to: timedelta = timedelta(days=89),
|
||||
) -> Credentials:
|
||||
pkey = _private_key(key_type=key_type)
|
||||
subject = TestCA._make_x509_name(common_name=name, parent=issuer.subject)
|
||||
csr = TestCA._make_csr(subject=subject,
|
||||
issuer_subject=issuer.certificate.subject, pkey=pkey,
|
||||
valid_from_delta=valid_from, valid_until_delta=valid_to)
|
||||
csr = TestCA._make_csr(
|
||||
subject=subject,
|
||||
issuer_subject=issuer.certificate.subject,
|
||||
pkey=pkey,
|
||||
valid_from_delta=valid_from,
|
||||
valid_until_delta=valid_to,
|
||||
)
|
||||
csr = TestCA._add_leaf_usages(csr, domains=domains, issuer=issuer)
|
||||
cert = csr.sign(private_key=issuer.private_key,
|
||||
algorithm=hashes.SHA256(),
|
||||
backend=default_backend())
|
||||
cert = csr.sign(
|
||||
private_key=issuer.private_key,
|
||||
algorithm=hashes.SHA256(),
|
||||
backend=default_backend(),
|
||||
)
|
||||
return Credentials(name=name, cert=cert, pkey=pkey, issuer=issuer)
|
||||
|
||||
@staticmethod
|
||||
def _make_client_credentials(name: str,
|
||||
issuer: Credentials, email: Optional[str],
|
||||
key_type: Any,
|
||||
valid_from: timedelta = timedelta(days=-1),
|
||||
valid_to: timedelta = timedelta(days=89),
|
||||
) -> Credentials:
|
||||
def _make_client_credentials(
|
||||
name: str,
|
||||
issuer: Credentials,
|
||||
email: Optional[str],
|
||||
key_type: Any,
|
||||
valid_from: timedelta = timedelta(days=-1),
|
||||
valid_to: timedelta = timedelta(days=89),
|
||||
) -> Credentials:
|
||||
pkey = _private_key(key_type=key_type)
|
||||
subject = TestCA._make_x509_name(common_name=name, parent=issuer.subject)
|
||||
csr = TestCA._make_csr(subject=subject,
|
||||
issuer_subject=issuer.certificate.subject, pkey=pkey,
|
||||
valid_from_delta=valid_from, valid_until_delta=valid_to)
|
||||
csr = TestCA._make_csr(
|
||||
subject=subject,
|
||||
issuer_subject=issuer.certificate.subject,
|
||||
pkey=pkey,
|
||||
valid_from_delta=valid_from,
|
||||
valid_until_delta=valid_to,
|
||||
)
|
||||
csr = TestCA._add_client_usages(csr, issuer=issuer, rfc82name=email)
|
||||
cert = csr.sign(private_key=issuer.private_key,
|
||||
algorithm=hashes.SHA256(),
|
||||
backend=default_backend())
|
||||
cert = csr.sign(
|
||||
private_key=issuer.private_key,
|
||||
algorithm=hashes.SHA256(),
|
||||
backend=default_backend(),
|
||||
)
|
||||
return Credentials(name=name, cert=cert, pkey=pkey, issuer=issuer)
|
||||
|
||||
@ -1,6 +1,6 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
#***************************************************************************
|
||||
# ***************************************************************************
|
||||
# _ _ ____ _
|
||||
# Project ___| | | | _ \| |
|
||||
# / __| | | | |_) | |
|
||||
@ -39,19 +39,23 @@ log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class LocalClient:
|
||||
|
||||
def __init__(self, name: str, env: Env, run_dir: Optional[str] = None,
|
||||
timeout: Optional[float] = None,
|
||||
run_env: Optional[Dict[str,str]] = None):
|
||||
def __init__(
|
||||
self,
|
||||
name: str,
|
||||
env: Env,
|
||||
run_dir: Optional[str] = None,
|
||||
timeout: Optional[float] = None,
|
||||
run_env: Optional[Dict[str, str]] = None,
|
||||
):
|
||||
self.name = name
|
||||
self.path = os.path.join(env.build_dir, f'tests/http/clients/{name}')
|
||||
self.path = os.path.join(env.build_dir, f"tests/http/clients/{name}")
|
||||
self.env = env
|
||||
self._run_env = run_env
|
||||
self._timeout = timeout if timeout else env.test_timeout
|
||||
self._curl = os.environ['CURL'] if 'CURL' in os.environ else env.curl
|
||||
self._curl = os.environ["CURL"] if "CURL" in os.environ else env.curl
|
||||
self._run_dir = run_dir if run_dir else os.path.join(env.gen_dir, name)
|
||||
self._stdoutfile = f'{self._run_dir}/stdout'
|
||||
self._stderrfile = f'{self._run_dir}/stderr'
|
||||
self._stdoutfile = f"{self._run_dir}/stdout"
|
||||
self._stderrfile = f"{self._run_dir}/stderr"
|
||||
self._rmrf(self._run_dir)
|
||||
self._mkpath(self._run_dir)
|
||||
|
||||
@ -67,7 +71,7 @@ class LocalClient:
|
||||
return os.path.exists(self.path)
|
||||
|
||||
def download_file(self, i: int) -> str:
|
||||
return os.path.join(self._run_dir, f'download_{i}.data')
|
||||
return os.path.join(self._run_dir, f"download_{i}.data")
|
||||
|
||||
def _rmf(self, path):
|
||||
if os.path.exists(path):
|
||||
@ -91,31 +95,44 @@ class LocalClient:
|
||||
run_env = None
|
||||
if self._run_env:
|
||||
run_env = self._run_env.copy()
|
||||
for key in ['CURL_DEBUG']:
|
||||
for key in ["CURL_DEBUG"]:
|
||||
if key in os.environ and key not in run_env:
|
||||
run_env[key] = os.environ[key]
|
||||
try:
|
||||
with open(self._stdoutfile, 'w') as cout, open(self._stderrfile, 'w') as cerr:
|
||||
p = subprocess.run(myargs, stderr=cerr, stdout=cout,
|
||||
cwd=self._run_dir, shell=False,
|
||||
input=None, env=run_env,
|
||||
timeout=self._timeout)
|
||||
with open(self._stdoutfile, "w") as cout, open(
|
||||
self._stderrfile, "w"
|
||||
) as cerr:
|
||||
p = subprocess.run(
|
||||
myargs,
|
||||
stderr=cerr,
|
||||
stdout=cout,
|
||||
cwd=self._run_dir,
|
||||
shell=False,
|
||||
input=None,
|
||||
env=run_env,
|
||||
timeout=self._timeout,
|
||||
)
|
||||
exitcode = p.returncode
|
||||
except subprocess.TimeoutExpired:
|
||||
log.warning(f'Timeout after {self._timeout}s: {args}')
|
||||
log.warning(f"Timeout after {self._timeout}s: {args}")
|
||||
exitcode = -1
|
||||
exception = 'TimeoutExpired'
|
||||
exception = "TimeoutExpired"
|
||||
coutput = open(self._stdoutfile).readlines()
|
||||
cerrput = open(self._stderrfile).readlines()
|
||||
return ExecResult(args=myargs, exit_code=exitcode, exception=exception,
|
||||
stdout=coutput, stderr=cerrput,
|
||||
duration=datetime.now() - start)
|
||||
return ExecResult(
|
||||
args=myargs,
|
||||
exit_code=exitcode,
|
||||
exception=exception,
|
||||
stdout=coutput,
|
||||
stderr=cerrput,
|
||||
duration=datetime.now() - start,
|
||||
)
|
||||
|
||||
def dump_logs(self):
|
||||
lines = []
|
||||
lines.append('>>--stdout ----------------------------------------------\n')
|
||||
lines.append(">>--stdout ----------------------------------------------\n")
|
||||
lines.extend(open(self._stdoutfile).readlines())
|
||||
lines.append('>>--stderr ----------------------------------------------\n')
|
||||
lines.append(">>--stderr ----------------------------------------------\n")
|
||||
lines.extend(open(self._stderrfile).readlines())
|
||||
lines.append('<<-------------------------------------------------------\n')
|
||||
return ''.join(lines)
|
||||
lines.append("<<-------------------------------------------------------\n")
|
||||
return "".join(lines)
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@ -1,6 +1,6 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
#***************************************************************************
|
||||
# ***************************************************************************
|
||||
# _ _ ____ _
|
||||
# Project ___| | | | _ \| |
|
||||
# / __| | | | |_) | |
|
||||
@ -52,92 +52,93 @@ def init_config_from(conf_path):
|
||||
|
||||
TESTS_HTTPD_PATH = os.path.dirname(os.path.dirname(__file__))
|
||||
TOP_PATH = os.path.join(os.getcwd(), os.path.pardir)
|
||||
DEF_CONFIG = init_config_from(os.path.join(TOP_PATH, 'tests', 'http', 'config.ini'))
|
||||
CURL = os.path.join(TOP_PATH, 'src', 'curl')
|
||||
DEF_CONFIG = init_config_from(os.path.join(TOP_PATH, "tests", "http", "config.ini"))
|
||||
CURL = os.path.join(TOP_PATH, "src", "curl")
|
||||
|
||||
|
||||
class EnvConfig:
|
||||
|
||||
def __init__(self):
|
||||
self.tests_dir = TESTS_HTTPD_PATH
|
||||
self.gen_dir = os.path.join(self.tests_dir, 'gen')
|
||||
self.gen_dir = os.path.join(self.tests_dir, "gen")
|
||||
self.project_dir = os.path.dirname(os.path.dirname(self.tests_dir))
|
||||
self.build_dir = TOP_PATH
|
||||
self.config = DEF_CONFIG
|
||||
# check cur and its features
|
||||
self.curl = CURL
|
||||
if 'CURL' in os.environ:
|
||||
self.curl = os.environ['CURL']
|
||||
if "CURL" in os.environ:
|
||||
self.curl = os.environ["CURL"]
|
||||
self.curl_props = {
|
||||
'version_string': '',
|
||||
'version': '',
|
||||
'os': '',
|
||||
'fullname': '',
|
||||
'features_string': '',
|
||||
'features': set(),
|
||||
'protocols_string': '',
|
||||
'protocols': set(),
|
||||
'libs': set(),
|
||||
'lib_versions': set(),
|
||||
"version_string": "",
|
||||
"version": "",
|
||||
"os": "",
|
||||
"fullname": "",
|
||||
"features_string": "",
|
||||
"features": set(),
|
||||
"protocols_string": "",
|
||||
"protocols": set(),
|
||||
"libs": set(),
|
||||
"lib_versions": set(),
|
||||
}
|
||||
self.curl_is_debug = False
|
||||
self.curl_protos = []
|
||||
p = subprocess.run(args=[self.curl, '-V'],
|
||||
capture_output=True, text=True)
|
||||
p = subprocess.run(args=[self.curl, "-V"], capture_output=True, text=True)
|
||||
if p.returncode != 0:
|
||||
raise RuntimeError(f'{self.curl} -V failed with exit code: {p.returncode}')
|
||||
if p.stderr.startswith('WARNING:'):
|
||||
raise RuntimeError(f"{self.curl} -V failed with exit code: {p.returncode}")
|
||||
if p.stderr.startswith("WARNING:"):
|
||||
self.curl_is_debug = True
|
||||
for line in p.stdout.splitlines(keepends=False):
|
||||
if line.startswith('curl '):
|
||||
self.curl_props['version_string'] = line
|
||||
m = re.match(r'^curl (?P<version>\S+) (?P<os>\S+) (?P<libs>.*)$', line)
|
||||
if line.startswith("curl "):
|
||||
self.curl_props["version_string"] = line
|
||||
m = re.match(r"^curl (?P<version>\S+) (?P<os>\S+) (?P<libs>.*)$", line)
|
||||
if m:
|
||||
self.curl_props['fullname'] = m.group(0)
|
||||
self.curl_props['version'] = m.group('version')
|
||||
self.curl_props['os'] = m.group('os')
|
||||
self.curl_props['lib_versions'] = {
|
||||
lib.lower() for lib in m.group('libs').split(' ')
|
||||
self.curl_props["fullname"] = m.group(0)
|
||||
self.curl_props["version"] = m.group("version")
|
||||
self.curl_props["os"] = m.group("os")
|
||||
self.curl_props["lib_versions"] = {
|
||||
lib.lower() for lib in m.group("libs").split(" ")
|
||||
}
|
||||
self.curl_props['libs'] = {
|
||||
re.sub(r'/[a-z0-9.-]*', '', lib) for lib in self.curl_props['lib_versions']
|
||||
self.curl_props["libs"] = {
|
||||
re.sub(r"/[a-z0-9.-]*", "", lib)
|
||||
for lib in self.curl_props["lib_versions"]
|
||||
}
|
||||
if line.startswith('Features: '):
|
||||
self.curl_props['features_string'] = line[10:]
|
||||
self.curl_props['features'] = {
|
||||
feat.lower() for feat in line[10:].split(' ')
|
||||
if line.startswith("Features: "):
|
||||
self.curl_props["features_string"] = line[10:]
|
||||
self.curl_props["features"] = {
|
||||
feat.lower() for feat in line[10:].split(" ")
|
||||
}
|
||||
if line.startswith('Protocols: '):
|
||||
self.curl_props['protocols_string'] = line[11:]
|
||||
self.curl_props['protocols'] = {
|
||||
prot.lower() for prot in line[11:].split(' ')
|
||||
if line.startswith("Protocols: "):
|
||||
self.curl_props["protocols_string"] = line[11:]
|
||||
self.curl_props["protocols"] = {
|
||||
prot.lower() for prot in line[11:].split(" ")
|
||||
}
|
||||
|
||||
self.ports = alloc_ports(port_specs={
|
||||
'ftp': socket.SOCK_STREAM,
|
||||
'ftps': socket.SOCK_STREAM,
|
||||
'http': socket.SOCK_STREAM,
|
||||
'https': socket.SOCK_STREAM,
|
||||
'nghttpx_https': socket.SOCK_STREAM,
|
||||
'proxy': socket.SOCK_STREAM,
|
||||
'proxys': socket.SOCK_STREAM,
|
||||
'h2proxys': socket.SOCK_STREAM,
|
||||
'caddy': socket.SOCK_STREAM,
|
||||
'caddys': socket.SOCK_STREAM,
|
||||
'ws': socket.SOCK_STREAM,
|
||||
})
|
||||
self.httpd = self.config['httpd']['httpd']
|
||||
self.apxs = self.config['httpd']['apxs']
|
||||
self.ports = alloc_ports(
|
||||
port_specs={
|
||||
"ftp": socket.SOCK_STREAM,
|
||||
"ftps": socket.SOCK_STREAM,
|
||||
"http": socket.SOCK_STREAM,
|
||||
"https": socket.SOCK_STREAM,
|
||||
"nghttpx_https": socket.SOCK_STREAM,
|
||||
"proxy": socket.SOCK_STREAM,
|
||||
"proxys": socket.SOCK_STREAM,
|
||||
"h2proxys": socket.SOCK_STREAM,
|
||||
"caddy": socket.SOCK_STREAM,
|
||||
"caddys": socket.SOCK_STREAM,
|
||||
"ws": socket.SOCK_STREAM,
|
||||
}
|
||||
)
|
||||
self.httpd = self.config["httpd"]["httpd"]
|
||||
self.apxs = self.config["httpd"]["apxs"]
|
||||
if len(self.apxs) == 0:
|
||||
self.apxs = None
|
||||
self._httpd_version = None
|
||||
|
||||
self.examples_pem = {
|
||||
'key': 'xxx',
|
||||
'cert': 'xxx',
|
||||
"key": "xxx",
|
||||
"cert": "xxx",
|
||||
}
|
||||
self.htdocs_dir = os.path.join(self.gen_dir, 'htdocs')
|
||||
self.tld = 'http.curl.se'
|
||||
self.htdocs_dir = os.path.join(self.gen_dir, "htdocs")
|
||||
self.tld = "http.curl.se"
|
||||
self.domain1 = f"one.{self.tld}"
|
||||
self.domain1brotli = f"brotli.one.{self.tld}"
|
||||
self.domain2 = f"two.{self.tld}"
|
||||
@ -145,60 +146,82 @@ class EnvConfig:
|
||||
self.proxy_domain = f"proxy.{self.tld}"
|
||||
self.expired_domain = f"expired.{self.tld}"
|
||||
self.cert_specs = [
|
||||
CertificateSpec(domains=[self.domain1, self.domain1brotli, 'localhost', '127.0.0.1'], key_type='rsa2048'),
|
||||
CertificateSpec(domains=[self.domain2], key_type='rsa2048'),
|
||||
CertificateSpec(domains=[self.ftp_domain], key_type='rsa2048'),
|
||||
CertificateSpec(domains=[self.proxy_domain, '127.0.0.1'], key_type='rsa2048'),
|
||||
CertificateSpec(domains=[self.expired_domain], key_type='rsa2048',
|
||||
valid_from=timedelta(days=-100), valid_to=timedelta(days=-10)),
|
||||
CertificateSpec(name="clientsX", sub_specs=[
|
||||
CertificateSpec(name="user1", client=True),
|
||||
]),
|
||||
CertificateSpec(
|
||||
domains=[self.domain1, self.domain1brotli, "localhost", "127.0.0.1"],
|
||||
key_type="rsa2048",
|
||||
),
|
||||
CertificateSpec(domains=[self.domain2], key_type="rsa2048"),
|
||||
CertificateSpec(domains=[self.ftp_domain], key_type="rsa2048"),
|
||||
CertificateSpec(
|
||||
domains=[self.proxy_domain, "127.0.0.1"], key_type="rsa2048"
|
||||
),
|
||||
CertificateSpec(
|
||||
domains=[self.expired_domain],
|
||||
key_type="rsa2048",
|
||||
valid_from=timedelta(days=-100),
|
||||
valid_to=timedelta(days=-10),
|
||||
),
|
||||
CertificateSpec(
|
||||
name="clientsX",
|
||||
sub_specs=[
|
||||
CertificateSpec(name="user1", client=True),
|
||||
],
|
||||
),
|
||||
]
|
||||
|
||||
self.nghttpx = self.config['nghttpx']['nghttpx']
|
||||
self.nghttpx = self.config["nghttpx"]["nghttpx"]
|
||||
if len(self.nghttpx.strip()) == 0:
|
||||
self.nghttpx = None
|
||||
self._nghttpx_version = None
|
||||
self.nghttpx_with_h3 = False
|
||||
if self.nghttpx is not None:
|
||||
p = subprocess.run(args=[self.nghttpx, '-v'],
|
||||
capture_output=True, text=True)
|
||||
p = subprocess.run(
|
||||
args=[self.nghttpx, "-v"], capture_output=True, text=True
|
||||
)
|
||||
if p.returncode != 0:
|
||||
# not a working nghttpx
|
||||
self.nghttpx = None
|
||||
else:
|
||||
self._nghttpx_version = re.sub(r'^nghttpx\s*', '', p.stdout.strip())
|
||||
self.nghttpx_with_h3 = re.match(r'.* nghttp3/.*', p.stdout.strip()) is not None
|
||||
log.debug(f'nghttpx -v: {p.stdout}')
|
||||
self._nghttpx_version = re.sub(r"^nghttpx\s*", "", p.stdout.strip())
|
||||
self.nghttpx_with_h3 = (
|
||||
re.match(r".* nghttp3/.*", p.stdout.strip()) is not None
|
||||
)
|
||||
log.debug(f"nghttpx -v: {p.stdout}")
|
||||
|
||||
self.caddy = self.config['caddy']['caddy']
|
||||
self.caddy = self.config["caddy"]["caddy"]
|
||||
self._caddy_version = None
|
||||
if len(self.caddy.strip()) == 0:
|
||||
self.caddy = None
|
||||
if self.caddy is not None:
|
||||
try:
|
||||
p = subprocess.run(args=[self.caddy, 'version'],
|
||||
capture_output=True, text=True)
|
||||
p = subprocess.run(
|
||||
args=[self.caddy, "version"], capture_output=True, text=True
|
||||
)
|
||||
if p.returncode != 0:
|
||||
# not a working caddy
|
||||
self.caddy = None
|
||||
m = re.match(r'v?(\d+\.\d+\.\d+).*', p.stdout)
|
||||
m = re.match(r"v?(\d+\.\d+\.\d+).*", p.stdout)
|
||||
if m:
|
||||
self._caddy_version = m.group(1)
|
||||
else:
|
||||
raise RuntimeError(f'Unable to determine cadd version from: {p.stdout}')
|
||||
raise RuntimeError(
|
||||
f"Unable to determine cadd version from: {p.stdout}"
|
||||
)
|
||||
# TODO: specify specific exceptions here
|
||||
except: # noqa: E722
|
||||
self.caddy = None
|
||||
|
||||
self.vsftpd = self.config['vsftpd']['vsftpd']
|
||||
self.vsftpd = self.config["vsftpd"]["vsftpd"]
|
||||
self._vsftpd_version = None
|
||||
if self.vsftpd is not None:
|
||||
try:
|
||||
with tempfile.TemporaryFile('w+') as tmp:
|
||||
p = subprocess.run(args=[self.vsftpd, '-v'],
|
||||
capture_output=True, text=True, stdin=tmp)
|
||||
with tempfile.TemporaryFile("w+") as tmp:
|
||||
p = subprocess.run(
|
||||
args=[self.vsftpd, "-v"],
|
||||
capture_output=True,
|
||||
text=True,
|
||||
stdin=tmp,
|
||||
)
|
||||
if p.returncode != 0:
|
||||
# not a working vsftpd
|
||||
self.vsftpd = None
|
||||
@ -211,36 +234,41 @@ class EnvConfig:
|
||||
# any data there instead.
|
||||
tmp.seek(0)
|
||||
ver_text = tmp.read()
|
||||
m = re.match(r'vsftpd: version (\d+\.\d+\.\d+)', ver_text)
|
||||
m = re.match(r"vsftpd: version (\d+\.\d+\.\d+)", ver_text)
|
||||
if m:
|
||||
self._vsftpd_version = m.group(1)
|
||||
elif len(p.stderr) == 0:
|
||||
# vsftp does not use stdout or stderr for printing its version... -.-
|
||||
self._vsftpd_version = 'unknown'
|
||||
self._vsftpd_version = "unknown"
|
||||
else:
|
||||
raise Exception(f'Unable to determine VsFTPD version from: {p.stderr}')
|
||||
raise Exception(
|
||||
f"Unable to determine VsFTPD version from: {p.stderr}"
|
||||
)
|
||||
except Exception:
|
||||
self.vsftpd = None
|
||||
|
||||
self._tcpdump = shutil.which('tcpdump')
|
||||
self._tcpdump = shutil.which("tcpdump")
|
||||
|
||||
@property
|
||||
def httpd_version(self):
|
||||
if self._httpd_version is None and self.apxs is not None:
|
||||
try:
|
||||
p = subprocess.run(args=[self.apxs, '-q', 'HTTPD_VERSION'],
|
||||
capture_output=True, text=True)
|
||||
p = subprocess.run(
|
||||
args=[self.apxs, "-q", "HTTPD_VERSION"],
|
||||
capture_output=True,
|
||||
text=True,
|
||||
)
|
||||
if p.returncode != 0:
|
||||
log.error(f'{self.apxs} failed to query HTTPD_VERSION: {p}')
|
||||
log.error(f"{self.apxs} failed to query HTTPD_VERSION: {p}")
|
||||
else:
|
||||
self._httpd_version = p.stdout.strip()
|
||||
except Exception:
|
||||
log.exception(f'{self.apxs} failed to run')
|
||||
log.exception(f"{self.apxs} failed to run")
|
||||
return self._httpd_version
|
||||
|
||||
def versiontuple(self, v):
|
||||
v = re.sub(r'(\d+\.\d+(\.\d+)?)(-\S+)?', r'\1', v)
|
||||
return tuple(map(int, v.split('.')))
|
||||
v = re.sub(r"(\d+\.\d+(\.\d+)?)(-\S+)?", r"\1", v)
|
||||
return tuple(map(int, v.split(".")))
|
||||
|
||||
def httpd_is_at_least(self, minv):
|
||||
if self.httpd_version is None:
|
||||
@ -255,15 +283,17 @@ class EnvConfig:
|
||||
return hv >= self.versiontuple(minv)
|
||||
|
||||
def is_complete(self) -> bool:
|
||||
return os.path.isfile(self.httpd) and \
|
||||
self.apxs is not None and \
|
||||
os.path.isfile(self.apxs)
|
||||
return (
|
||||
os.path.isfile(self.httpd)
|
||||
and self.apxs is not None
|
||||
and os.path.isfile(self.apxs)
|
||||
)
|
||||
|
||||
def get_incomplete_reason(self) -> Optional[str]:
|
||||
if self.httpd is None or len(self.httpd.strip()) == 0:
|
||||
return 'httpd not configured, see `--with-test-httpd=<path>`'
|
||||
return "httpd not configured, see `--with-test-httpd=<path>`"
|
||||
if not os.path.isfile(self.httpd):
|
||||
return f'httpd ({self.httpd}) not found'
|
||||
return f"httpd ({self.httpd}) not found"
|
||||
if self.apxs is None:
|
||||
return "command apxs not found (commonly provided in apache2-dev)"
|
||||
if not os.path.isfile(self.apxs):
|
||||
@ -288,7 +318,6 @@ class EnvConfig:
|
||||
|
||||
|
||||
class Env:
|
||||
|
||||
CONFIG = EnvConfig()
|
||||
|
||||
@staticmethod
|
||||
@ -309,73 +338,74 @@ class Env:
|
||||
|
||||
@staticmethod
|
||||
def have_ssl_curl() -> bool:
|
||||
return Env.curl_has_feature('ssl') or Env.curl_has_feature('multissl')
|
||||
return Env.curl_has_feature("ssl") or Env.curl_has_feature("multissl")
|
||||
|
||||
@staticmethod
|
||||
def have_h2_curl() -> bool:
|
||||
return 'http2' in Env.CONFIG.curl_props['features']
|
||||
return "http2" in Env.CONFIG.curl_props["features"]
|
||||
|
||||
@staticmethod
|
||||
def have_h3_curl() -> bool:
|
||||
return 'http3' in Env.CONFIG.curl_props['features']
|
||||
return "http3" in Env.CONFIG.curl_props["features"]
|
||||
|
||||
@staticmethod
|
||||
def curl_uses_lib(libname: str) -> bool:
|
||||
return libname.lower() in Env.CONFIG.curl_props['libs']
|
||||
return libname.lower() in Env.CONFIG.curl_props["libs"]
|
||||
|
||||
@staticmethod
|
||||
def curl_uses_ossl_quic() -> bool:
|
||||
if Env.have_h3_curl():
|
||||
return not Env.curl_uses_lib('ngtcp2') and Env.curl_uses_lib('nghttp3')
|
||||
return not Env.curl_uses_lib("ngtcp2") and Env.curl_uses_lib("nghttp3")
|
||||
return False
|
||||
|
||||
@staticmethod
|
||||
def curl_version_string() -> str:
|
||||
return Env.CONFIG.curl_props['version_string']
|
||||
return Env.CONFIG.curl_props["version_string"]
|
||||
|
||||
@staticmethod
|
||||
def curl_features_string() -> str:
|
||||
return Env.CONFIG.curl_props['features_string']
|
||||
return Env.CONFIG.curl_props["features_string"]
|
||||
|
||||
@staticmethod
|
||||
def curl_has_feature(feature: str) -> bool:
|
||||
return feature.lower() in Env.CONFIG.curl_props['features']
|
||||
return feature.lower() in Env.CONFIG.curl_props["features"]
|
||||
|
||||
@staticmethod
|
||||
def curl_protocols_string() -> str:
|
||||
return Env.CONFIG.curl_props['protocols_string']
|
||||
return Env.CONFIG.curl_props["protocols_string"]
|
||||
|
||||
@staticmethod
|
||||
def curl_has_protocol(protocol: str) -> bool:
|
||||
return protocol.lower() in Env.CONFIG.curl_props['protocols']
|
||||
return protocol.lower() in Env.CONFIG.curl_props["protocols"]
|
||||
|
||||
@staticmethod
|
||||
def curl_lib_version(libname: str) -> str:
|
||||
prefix = f'{libname.lower()}/'
|
||||
for lversion in Env.CONFIG.curl_props['lib_versions']:
|
||||
prefix = f"{libname.lower()}/"
|
||||
for lversion in Env.CONFIG.curl_props["lib_versions"]:
|
||||
if lversion.startswith(prefix):
|
||||
return lversion[len(prefix):]
|
||||
return 'unknown'
|
||||
return lversion[len(prefix) :]
|
||||
return "unknown"
|
||||
|
||||
@staticmethod
|
||||
def curl_lib_version_at_least(libname: str, min_version) -> bool:
|
||||
lversion = Env.curl_lib_version(libname)
|
||||
if lversion != 'unknown':
|
||||
return Env.CONFIG.versiontuple(min_version) <= \
|
||||
Env.CONFIG.versiontuple(lversion)
|
||||
if lversion != "unknown":
|
||||
return Env.CONFIG.versiontuple(min_version) <= Env.CONFIG.versiontuple(
|
||||
lversion
|
||||
)
|
||||
return False
|
||||
|
||||
@staticmethod
|
||||
def curl_os() -> str:
|
||||
return Env.CONFIG.curl_props['os']
|
||||
return Env.CONFIG.curl_props["os"]
|
||||
|
||||
@staticmethod
|
||||
def curl_fullname() -> str:
|
||||
return Env.CONFIG.curl_props['fullname']
|
||||
return Env.CONFIG.curl_props["fullname"]
|
||||
|
||||
@staticmethod
|
||||
def curl_version() -> str:
|
||||
return Env.CONFIG.curl_props['version']
|
||||
return Env.CONFIG.curl_props["version"]
|
||||
|
||||
@staticmethod
|
||||
def curl_is_debug() -> bool:
|
||||
@ -422,17 +452,16 @@ class Env:
|
||||
return Env.CONFIG.tcpdmp
|
||||
|
||||
def __init__(self, pytestconfig=None):
|
||||
self._verbose = pytestconfig.option.verbose \
|
||||
if pytestconfig is not None else 0
|
||||
self._verbose = pytestconfig.option.verbose if pytestconfig is not None else 0
|
||||
self._ca = None
|
||||
self._test_timeout = 300.0 if self._verbose > 1 else 60.0 # seconds
|
||||
|
||||
def issue_certs(self):
|
||||
if self._ca is None:
|
||||
ca_dir = os.path.join(self.CONFIG.gen_dir, 'ca')
|
||||
self._ca = TestCA.create_root(name=self.CONFIG.tld,
|
||||
store_dir=ca_dir,
|
||||
key_type="rsa2048")
|
||||
ca_dir = os.path.join(self.CONFIG.gen_dir, "ca")
|
||||
self._ca = TestCA.create_root(
|
||||
name=self.CONFIG.tld, store_dir=ca_dir, key_type="rsa2048"
|
||||
)
|
||||
self._ca.issue_certs(self.CONFIG.cert_specs)
|
||||
|
||||
def setup(self):
|
||||
@ -508,15 +537,15 @@ class Env:
|
||||
|
||||
@property
|
||||
def http_port(self) -> int:
|
||||
return self.CONFIG.ports['http']
|
||||
return self.CONFIG.ports["http"]
|
||||
|
||||
@property
|
||||
def https_port(self) -> int:
|
||||
return self.CONFIG.ports['https']
|
||||
return self.CONFIG.ports["https"]
|
||||
|
||||
@property
|
||||
def nghttpx_https_port(self) -> int:
|
||||
return self.CONFIG.ports['nghttpx_https']
|
||||
return self.CONFIG.ports["nghttpx_https"]
|
||||
|
||||
@property
|
||||
def h3_port(self) -> int:
|
||||
@ -524,27 +553,27 @@ class Env:
|
||||
|
||||
@property
|
||||
def proxy_port(self) -> int:
|
||||
return self.CONFIG.ports['proxy']
|
||||
return self.CONFIG.ports["proxy"]
|
||||
|
||||
@property
|
||||
def proxys_port(self) -> int:
|
||||
return self.CONFIG.ports['proxys']
|
||||
return self.CONFIG.ports["proxys"]
|
||||
|
||||
@property
|
||||
def ftp_port(self) -> int:
|
||||
return self.CONFIG.ports['ftp']
|
||||
return self.CONFIG.ports["ftp"]
|
||||
|
||||
@property
|
||||
def ftps_port(self) -> int:
|
||||
return self.CONFIG.ports['ftps']
|
||||
return self.CONFIG.ports["ftps"]
|
||||
|
||||
@property
|
||||
def h2proxys_port(self) -> int:
|
||||
return self.CONFIG.ports['h2proxys']
|
||||
return self.CONFIG.ports["h2proxys"]
|
||||
|
||||
def pts_port(self, proto: str = 'http/1.1') -> int:
|
||||
def pts_port(self, proto: str = "http/1.1") -> int:
|
||||
# proxy tunnel port
|
||||
return self.CONFIG.ports['h2proxys' if proto == 'h2' else 'proxys']
|
||||
return self.CONFIG.ports["h2proxys" if proto == "h2" else "proxys"]
|
||||
|
||||
@property
|
||||
def caddy(self) -> str:
|
||||
@ -552,11 +581,11 @@ class Env:
|
||||
|
||||
@property
|
||||
def caddy_https_port(self) -> int:
|
||||
return self.CONFIG.ports['caddys']
|
||||
return self.CONFIG.ports["caddys"]
|
||||
|
||||
@property
|
||||
def caddy_http_port(self) -> int:
|
||||
return self.CONFIG.ports['caddy']
|
||||
return self.CONFIG.ports["caddy"]
|
||||
|
||||
@property
|
||||
def vsftpd(self) -> str:
|
||||
@ -564,7 +593,7 @@ class Env:
|
||||
|
||||
@property
|
||||
def ws_port(self) -> int:
|
||||
return self.CONFIG.ports['ws']
|
||||
return self.CONFIG.ports["ws"]
|
||||
|
||||
@property
|
||||
def curl(self) -> str:
|
||||
@ -584,37 +613,44 @@ class Env:
|
||||
|
||||
@property
|
||||
def slow_network(self) -> bool:
|
||||
return "CURL_DBG_SOCK_WBLOCK" in os.environ or \
|
||||
"CURL_DBG_SOCK_WPARTIAL" in os.environ
|
||||
return (
|
||||
"CURL_DBG_SOCK_WBLOCK" in os.environ
|
||||
or "CURL_DBG_SOCK_WPARTIAL" in os.environ
|
||||
)
|
||||
|
||||
@property
|
||||
def ci_run(self) -> bool:
|
||||
return "CURL_CI" in os.environ
|
||||
|
||||
def port_for(self, alpn_proto: Optional[str] = None):
|
||||
if alpn_proto is None or \
|
||||
alpn_proto in ['h2', 'http/1.1', 'http/1.0', 'http/0.9']:
|
||||
if alpn_proto is None or alpn_proto in [
|
||||
"h2",
|
||||
"http/1.1",
|
||||
"http/1.0",
|
||||
"http/0.9",
|
||||
]:
|
||||
return self.https_port
|
||||
if alpn_proto in ['h3']:
|
||||
if alpn_proto in ["h3"]:
|
||||
return self.h3_port
|
||||
return self.http_port
|
||||
|
||||
def authority_for(self, domain: str, alpn_proto: Optional[str] = None):
|
||||
return f'{domain}:{self.port_for(alpn_proto=alpn_proto)}'
|
||||
return f"{domain}:{self.port_for(alpn_proto=alpn_proto)}"
|
||||
|
||||
def make_data_file(self, indir: str, fname: str, fsize: int,
|
||||
line_length: int = 1024) -> str:
|
||||
def make_data_file(
|
||||
self, indir: str, fname: str, fsize: int, line_length: int = 1024
|
||||
) -> str:
|
||||
if line_length < 11:
|
||||
raise RuntimeError('line_length less than 11 not supported')
|
||||
raise RuntimeError("line_length less than 11 not supported")
|
||||
fpath = os.path.join(indir, fname)
|
||||
s10 = "0123456789"
|
||||
s = round((line_length / 10) + 1) * s10
|
||||
s = s[0:line_length-11]
|
||||
with open(fpath, 'w') as fd:
|
||||
s = s[0 : line_length - 11]
|
||||
with open(fpath, "w") as fd:
|
||||
for i in range(int(fsize / line_length)):
|
||||
fd.write(f"{i:09d}-{s}\n")
|
||||
remain = int(fsize % line_length)
|
||||
if remain != 0:
|
||||
i = int(fsize / line_length) + 1
|
||||
fd.write(f"{i:09d}-{s}"[0:remain-1] + "\n")
|
||||
fd.write(f"{i:09d}-{s}"[0 : remain - 1] + "\n")
|
||||
return fpath
|
||||
|
||||
@ -1,6 +1,6 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
#***************************************************************************
|
||||
# ***************************************************************************
|
||||
# _ _ ____ _
|
||||
# Project ___| | | | _ \| |
|
||||
# / __| | | | |_) | |
|
||||
@ -42,53 +42,71 @@ log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Httpd:
|
||||
|
||||
MODULES = [
|
||||
'log_config', 'logio', 'unixd', 'version', 'watchdog',
|
||||
'authn_core', 'authn_file',
|
||||
'authz_user', 'authz_core', 'authz_host',
|
||||
'auth_basic', 'auth_digest',
|
||||
'alias', 'env', 'filter', 'headers', 'mime', 'setenvif',
|
||||
'socache_shmcb',
|
||||
'rewrite', 'http2', 'ssl', 'proxy', 'proxy_http', 'proxy_connect',
|
||||
'brotli',
|
||||
'mpm_event',
|
||||
"log_config",
|
||||
"logio",
|
||||
"unixd",
|
||||
"version",
|
||||
"watchdog",
|
||||
"authn_core",
|
||||
"authn_file",
|
||||
"authz_user",
|
||||
"authz_core",
|
||||
"authz_host",
|
||||
"auth_basic",
|
||||
"auth_digest",
|
||||
"alias",
|
||||
"env",
|
||||
"filter",
|
||||
"headers",
|
||||
"mime",
|
||||
"setenvif",
|
||||
"socache_shmcb",
|
||||
"rewrite",
|
||||
"http2",
|
||||
"ssl",
|
||||
"proxy",
|
||||
"proxy_http",
|
||||
"proxy_connect",
|
||||
"brotli",
|
||||
"mpm_event",
|
||||
]
|
||||
COMMON_MODULES_DIRS = [
|
||||
'/usr/lib/apache2/modules', # debian
|
||||
'/usr/libexec/apache2/', # macos
|
||||
"/usr/lib/apache2/modules", # debian
|
||||
"/usr/libexec/apache2/", # macos
|
||||
]
|
||||
|
||||
MOD_CURLTEST = None
|
||||
|
||||
def __init__(self, env: Env, proxy_auth: bool = False):
|
||||
self.env = env
|
||||
self._apache_dir = os.path.join(env.gen_dir, 'apache')
|
||||
self._run_dir = os.path.join(self._apache_dir, 'run')
|
||||
self._lock_dir = os.path.join(self._apache_dir, 'locks')
|
||||
self._docs_dir = os.path.join(self._apache_dir, 'docs')
|
||||
self._conf_dir = os.path.join(self._apache_dir, 'conf')
|
||||
self._conf_file = os.path.join(self._conf_dir, 'test.conf')
|
||||
self._logs_dir = os.path.join(self._apache_dir, 'logs')
|
||||
self._error_log = os.path.join(self._logs_dir, 'error_log')
|
||||
self._tmp_dir = os.path.join(self._apache_dir, 'tmp')
|
||||
self._basic_passwords = os.path.join(self._conf_dir, 'basic.passwords')
|
||||
self._digest_passwords = os.path.join(self._conf_dir, 'digest.passwords')
|
||||
self._apache_dir = os.path.join(env.gen_dir, "apache")
|
||||
self._run_dir = os.path.join(self._apache_dir, "run")
|
||||
self._lock_dir = os.path.join(self._apache_dir, "locks")
|
||||
self._docs_dir = os.path.join(self._apache_dir, "docs")
|
||||
self._conf_dir = os.path.join(self._apache_dir, "conf")
|
||||
self._conf_file = os.path.join(self._conf_dir, "test.conf")
|
||||
self._logs_dir = os.path.join(self._apache_dir, "logs")
|
||||
self._error_log = os.path.join(self._logs_dir, "error_log")
|
||||
self._tmp_dir = os.path.join(self._apache_dir, "tmp")
|
||||
self._basic_passwords = os.path.join(self._conf_dir, "basic.passwords")
|
||||
self._digest_passwords = os.path.join(self._conf_dir, "digest.passwords")
|
||||
self._mods_dir = None
|
||||
self._auth_digest = True
|
||||
self._proxy_auth_basic = proxy_auth
|
||||
self._extra_configs = {}
|
||||
self._loaded_extra_configs = None
|
||||
assert env.apxs
|
||||
p = subprocess.run(args=[env.apxs, '-q', 'libexecdir'],
|
||||
capture_output=True, text=True)
|
||||
p = subprocess.run(
|
||||
args=[env.apxs, "-q", "libexecdir"], capture_output=True, text=True
|
||||
)
|
||||
if p.returncode != 0:
|
||||
raise Exception(f'{env.apxs} failed to query libexecdir: {p}')
|
||||
raise Exception(f"{env.apxs} failed to query libexecdir: {p}")
|
||||
self._mods_dir = p.stdout.strip()
|
||||
if self._mods_dir is None:
|
||||
raise Exception('apache modules dir cannot be found')
|
||||
raise Exception("apache modules dir cannot be found")
|
||||
if not os.path.exists(self._mods_dir):
|
||||
raise Exception(f'apache modules dir does not exist: {self._mods_dir}')
|
||||
raise Exception(f"apache modules dir does not exist: {self._mods_dir}")
|
||||
self._process = None
|
||||
self._rmf(self._error_log)
|
||||
self._init_curltest()
|
||||
@ -115,50 +133,62 @@ class Httpd:
|
||||
def set_proxy_auth(self, active: bool):
|
||||
self._proxy_auth_basic = active
|
||||
|
||||
def _run(self, args, intext=''):
|
||||
def _run(self, args, intext=""):
|
||||
env = os.environ.copy()
|
||||
env['APACHE_RUN_DIR'] = self._run_dir
|
||||
env['APACHE_RUN_USER'] = os.environ['USER']
|
||||
env['APACHE_LOCK_DIR'] = self._lock_dir
|
||||
env['APACHE_CONFDIR'] = self._apache_dir
|
||||
p = subprocess.run(args, stderr=subprocess.PIPE, stdout=subprocess.PIPE,
|
||||
cwd=self.env.gen_dir,
|
||||
input=intext.encode() if intext else None,
|
||||
env=env)
|
||||
env["APACHE_RUN_DIR"] = self._run_dir
|
||||
env["APACHE_RUN_USER"] = os.environ["USER"]
|
||||
env["APACHE_LOCK_DIR"] = self._lock_dir
|
||||
env["APACHE_CONFDIR"] = self._apache_dir
|
||||
p = subprocess.run(
|
||||
args,
|
||||
stderr=subprocess.PIPE,
|
||||
stdout=subprocess.PIPE,
|
||||
cwd=self.env.gen_dir,
|
||||
input=intext.encode() if intext else None,
|
||||
env=env,
|
||||
)
|
||||
start = datetime.now()
|
||||
return ExecResult(args=args, exit_code=p.returncode,
|
||||
stdout=p.stdout.decode().splitlines(),
|
||||
stderr=p.stderr.decode().splitlines(),
|
||||
duration=datetime.now() - start)
|
||||
return ExecResult(
|
||||
args=args,
|
||||
exit_code=p.returncode,
|
||||
stdout=p.stdout.decode().splitlines(),
|
||||
stderr=p.stderr.decode().splitlines(),
|
||||
duration=datetime.now() - start,
|
||||
)
|
||||
|
||||
def _cmd_httpd(self, cmd: str):
|
||||
args = [self.env.httpd,
|
||||
"-d", self._apache_dir,
|
||||
"-f", self._conf_file,
|
||||
"-k", cmd]
|
||||
args = [
|
||||
self.env.httpd,
|
||||
"-d",
|
||||
self._apache_dir,
|
||||
"-f",
|
||||
self._conf_file,
|
||||
"-k",
|
||||
cmd,
|
||||
]
|
||||
return self._run(args=args)
|
||||
|
||||
def start(self):
|
||||
if self._process:
|
||||
self.stop()
|
||||
self._write_config()
|
||||
with open(self._error_log, 'a') as fd:
|
||||
fd.write('start of server\n')
|
||||
with open(os.path.join(self._apache_dir, 'xxx'), 'a') as fd:
|
||||
fd.write('start of server\n')
|
||||
r = self._cmd_httpd('start')
|
||||
with open(self._error_log, "a") as fd:
|
||||
fd.write("start of server\n")
|
||||
with open(os.path.join(self._apache_dir, "xxx"), "a") as fd:
|
||||
fd.write("start of server\n")
|
||||
r = self._cmd_httpd("start")
|
||||
if r.exit_code != 0:
|
||||
log.error(f'failed to start httpd: {r}')
|
||||
log.error(f"failed to start httpd: {r}")
|
||||
return False
|
||||
self._loaded_extra_configs = copy.deepcopy(self._extra_configs)
|
||||
return self.wait_live(timeout=timedelta(seconds=5))
|
||||
|
||||
def stop(self):
|
||||
r = self._cmd_httpd('stop')
|
||||
r = self._cmd_httpd("stop")
|
||||
self._loaded_extra_configs = None
|
||||
if r.exit_code == 0:
|
||||
return self.wait_dead(timeout=timedelta(seconds=5))
|
||||
log.fatal(f'stopping httpd failed: {r}')
|
||||
log.fatal(f"stopping httpd failed: {r}")
|
||||
return r.exit_code == 0
|
||||
|
||||
def restart(self):
|
||||
@ -170,7 +200,7 @@ class Httpd:
|
||||
r = self._cmd_httpd("graceful")
|
||||
self._loaded_extra_configs = None
|
||||
if r.exit_code != 0:
|
||||
log.error(f'failed to reload httpd: {r}')
|
||||
log.error(f"failed to reload httpd: {r}")
|
||||
self._loaded_extra_configs = copy.deepcopy(self._extra_configs)
|
||||
return self.wait_live(timeout=timedelta(seconds=5))
|
||||
|
||||
@ -183,22 +213,23 @@ class Httpd:
|
||||
curl = CurlClient(env=self.env, run_dir=self._tmp_dir)
|
||||
try_until = datetime.now() + timeout
|
||||
while datetime.now() < try_until:
|
||||
r = curl.http_get(url=f'http://{self.env.domain1}:{self.env.http_port}/')
|
||||
r = curl.http_get(url=f"http://{self.env.domain1}:{self.env.http_port}/")
|
||||
if r.exit_code != 0:
|
||||
return True
|
||||
time.sleep(.1)
|
||||
time.sleep(0.1)
|
||||
log.debug(f"Server still responding after {timeout}")
|
||||
return False
|
||||
|
||||
def wait_live(self, timeout: timedelta):
|
||||
curl = CurlClient(env=self.env, run_dir=self._tmp_dir,
|
||||
timeout=timeout.total_seconds())
|
||||
curl = CurlClient(
|
||||
env=self.env, run_dir=self._tmp_dir, timeout=timeout.total_seconds()
|
||||
)
|
||||
try_until = datetime.now() + timeout
|
||||
while datetime.now() < try_until:
|
||||
r = curl.http_get(url=f'http://{self.env.domain1}:{self.env.http_port}/')
|
||||
r = curl.http_get(url=f"http://{self.env.domain1}:{self.env.http_port}/")
|
||||
if r.exit_code == 0:
|
||||
return True
|
||||
time.sleep(.1)
|
||||
time.sleep(0.1)
|
||||
log.debug(f"Server still not responding after {timeout}")
|
||||
return False
|
||||
|
||||
@ -227,266 +258,306 @@ class Httpd:
|
||||
self._mkpath(self._conf_dir)
|
||||
self._mkpath(self._logs_dir)
|
||||
self._mkpath(self._tmp_dir)
|
||||
self._mkpath(os.path.join(self._docs_dir, 'two'))
|
||||
with open(os.path.join(self._docs_dir, 'data.json'), 'w') as fd:
|
||||
self._mkpath(os.path.join(self._docs_dir, "two"))
|
||||
with open(os.path.join(self._docs_dir, "data.json"), "w") as fd:
|
||||
data = {
|
||||
'server': f'{domain1}',
|
||||
"server": f"{domain1}",
|
||||
}
|
||||
fd.write(JSONEncoder().encode(data))
|
||||
with open(os.path.join(self._docs_dir, 'two/data.json'), 'w') as fd:
|
||||
with open(os.path.join(self._docs_dir, "two/data.json"), "w") as fd:
|
||||
data = {
|
||||
'server': f'{domain2}',
|
||||
"server": f"{domain2}",
|
||||
}
|
||||
fd.write(JSONEncoder().encode(data))
|
||||
if self._proxy_auth_basic:
|
||||
with open(self._basic_passwords, 'w') as fd:
|
||||
fd.write('proxy:$apr1$FQfeInbs$WQZbODJlVg60j0ogEIlTW/\n')
|
||||
with open(self._basic_passwords, "w") as fd:
|
||||
fd.write("proxy:$apr1$FQfeInbs$WQZbODJlVg60j0ogEIlTW/\n")
|
||||
if self._auth_digest:
|
||||
with open(self._digest_passwords, 'w') as fd:
|
||||
fd.write('test:restricted area:57123e269fd73d71ae0656594e938e2f\n')
|
||||
self._mkpath(os.path.join(self.docs_dir, 'restricted/digest'))
|
||||
with open(os.path.join(self.docs_dir, 'restricted/digest/data.json'), 'w') as fd:
|
||||
with open(self._digest_passwords, "w") as fd:
|
||||
fd.write("test:restricted area:57123e269fd73d71ae0656594e938e2f\n")
|
||||
self._mkpath(os.path.join(self.docs_dir, "restricted/digest"))
|
||||
with open(
|
||||
os.path.join(self.docs_dir, "restricted/digest/data.json"), "w"
|
||||
) as fd:
|
||||
fd.write('{"area":"digest"}\n')
|
||||
with open(self._conf_file, 'w') as fd:
|
||||
with open(self._conf_file, "w") as fd:
|
||||
for m in self.MODULES:
|
||||
if os.path.exists(os.path.join(self._mods_dir, f'mod_{m}.so')):
|
||||
if os.path.exists(os.path.join(self._mods_dir, f"mod_{m}.so")):
|
||||
fd.write(f'LoadModule {m}_module "{self._mods_dir}/mod_{m}.so"\n')
|
||||
if Httpd.MOD_CURLTEST is not None:
|
||||
fd.write(f'LoadModule curltest_module "{Httpd.MOD_CURLTEST}"\n')
|
||||
conf = [ # base server config
|
||||
conf = [ # base server config
|
||||
f'ServerRoot "{self._apache_dir}"',
|
||||
'DefaultRuntimeDir logs',
|
||||
'PidFile httpd.pid',
|
||||
f'ErrorLog {self._error_log}',
|
||||
f'LogLevel {self._get_log_level()}',
|
||||
'StartServers 4',
|
||||
'ReadBufferSize 16000',
|
||||
'H2MinWorkers 16',
|
||||
'H2MaxWorkers 256',
|
||||
f'Listen {self.env.http_port}',
|
||||
f'Listen {self.env.https_port}',
|
||||
f'Listen {self.env.proxy_port}',
|
||||
f'Listen {self.env.proxys_port}',
|
||||
"DefaultRuntimeDir logs",
|
||||
"PidFile httpd.pid",
|
||||
f"ErrorLog {self._error_log}",
|
||||
f"LogLevel {self._get_log_level()}",
|
||||
"StartServers 4",
|
||||
"ReadBufferSize 16000",
|
||||
"H2MinWorkers 16",
|
||||
"H2MaxWorkers 256",
|
||||
f"Listen {self.env.http_port}",
|
||||
f"Listen {self.env.https_port}",
|
||||
f"Listen {self.env.proxy_port}",
|
||||
f"Listen {self.env.proxys_port}",
|
||||
f'TypesConfig "{self._conf_dir}/mime.types',
|
||||
'SSLSessionCache "shmcb:ssl_gcache_data(32000)"',
|
||||
]
|
||||
if 'base' in self._extra_configs:
|
||||
conf.extend(self._extra_configs['base'])
|
||||
conf.extend([ # plain http host for domain1
|
||||
f'<VirtualHost *:{self.env.http_port}>',
|
||||
f' ServerName {domain1}',
|
||||
' ServerAlias localhost',
|
||||
f' DocumentRoot "{self._docs_dir}"',
|
||||
' Protocols h2c http/1.1',
|
||||
' H2Direct on',
|
||||
])
|
||||
if "base" in self._extra_configs:
|
||||
conf.extend(self._extra_configs["base"])
|
||||
conf.extend(
|
||||
[ # plain http host for domain1
|
||||
f"<VirtualHost *:{self.env.http_port}>",
|
||||
f" ServerName {domain1}",
|
||||
" ServerAlias localhost",
|
||||
f' DocumentRoot "{self._docs_dir}"',
|
||||
" Protocols h2c http/1.1",
|
||||
" H2Direct on",
|
||||
]
|
||||
)
|
||||
conf.extend(self._curltest_conf(domain1))
|
||||
conf.extend([
|
||||
'</VirtualHost>',
|
||||
'',
|
||||
])
|
||||
conf.extend([ # https host for domain1, h1 + h2
|
||||
f'<VirtualHost *:{self.env.https_port}>',
|
||||
f' ServerName {domain1}',
|
||||
' ServerAlias localhost',
|
||||
' Protocols h2 http/1.1',
|
||||
' SSLEngine on',
|
||||
f' SSLCertificateFile {creds1.cert_file}',
|
||||
f' SSLCertificateKeyFile {creds1.pkey_file}',
|
||||
f' DocumentRoot "{self._docs_dir}"',
|
||||
])
|
||||
conf.extend(
|
||||
[
|
||||
"</VirtualHost>",
|
||||
"",
|
||||
]
|
||||
)
|
||||
conf.extend(
|
||||
[ # https host for domain1, h1 + h2
|
||||
f"<VirtualHost *:{self.env.https_port}>",
|
||||
f" ServerName {domain1}",
|
||||
" ServerAlias localhost",
|
||||
" Protocols h2 http/1.1",
|
||||
" SSLEngine on",
|
||||
f" SSLCertificateFile {creds1.cert_file}",
|
||||
f" SSLCertificateKeyFile {creds1.pkey_file}",
|
||||
f' DocumentRoot "{self._docs_dir}"',
|
||||
]
|
||||
)
|
||||
conf.extend(self._curltest_conf(domain1))
|
||||
if domain1 in self._extra_configs:
|
||||
conf.extend(self._extra_configs[domain1])
|
||||
conf.extend([
|
||||
'</VirtualHost>',
|
||||
'',
|
||||
])
|
||||
conf.extend(
|
||||
[
|
||||
"</VirtualHost>",
|
||||
"",
|
||||
]
|
||||
)
|
||||
# Alternate to domain1 with BROTLI compression
|
||||
conf.extend([ # https host for domain1, h1 + h2
|
||||
f'<VirtualHost *:{self.env.https_port}>',
|
||||
f' ServerName {domain1brotli}',
|
||||
' Protocols h2 http/1.1',
|
||||
' SSLEngine on',
|
||||
f' SSLCertificateFile {creds1.cert_file}',
|
||||
f' SSLCertificateKeyFile {creds1.pkey_file}',
|
||||
f' DocumentRoot "{self._docs_dir}"',
|
||||
' SetOutputFilter BROTLI_COMPRESS',
|
||||
])
|
||||
conf.extend(
|
||||
[ # https host for domain1, h1 + h2
|
||||
f"<VirtualHost *:{self.env.https_port}>",
|
||||
f" ServerName {domain1brotli}",
|
||||
" Protocols h2 http/1.1",
|
||||
" SSLEngine on",
|
||||
f" SSLCertificateFile {creds1.cert_file}",
|
||||
f" SSLCertificateKeyFile {creds1.pkey_file}",
|
||||
f' DocumentRoot "{self._docs_dir}"',
|
||||
" SetOutputFilter BROTLI_COMPRESS",
|
||||
]
|
||||
)
|
||||
conf.extend(self._curltest_conf(domain1))
|
||||
if domain1 in self._extra_configs:
|
||||
conf.extend(self._extra_configs[domain1])
|
||||
conf.extend([
|
||||
'</VirtualHost>',
|
||||
'',
|
||||
])
|
||||
conf.extend([ # plain http host for domain2
|
||||
f'<VirtualHost *:{self.env.http_port}>',
|
||||
f' ServerName {domain2}',
|
||||
' ServerAlias localhost',
|
||||
f' DocumentRoot "{self._docs_dir}"',
|
||||
' Protocols h2c http/1.1',
|
||||
])
|
||||
conf.extend(
|
||||
[
|
||||
"</VirtualHost>",
|
||||
"",
|
||||
]
|
||||
)
|
||||
conf.extend(
|
||||
[ # plain http host for domain2
|
||||
f"<VirtualHost *:{self.env.http_port}>",
|
||||
f" ServerName {domain2}",
|
||||
" ServerAlias localhost",
|
||||
f' DocumentRoot "{self._docs_dir}"',
|
||||
" Protocols h2c http/1.1",
|
||||
]
|
||||
)
|
||||
conf.extend(self._curltest_conf(domain2))
|
||||
conf.extend([
|
||||
'</VirtualHost>',
|
||||
'',
|
||||
])
|
||||
conf.extend([ # https host for domain2, no h2
|
||||
f'<VirtualHost *:{self.env.https_port}>',
|
||||
f' ServerName {domain2}',
|
||||
' Protocols http/1.1',
|
||||
' SSLEngine on',
|
||||
f' SSLCertificateFile {creds2.cert_file}',
|
||||
f' SSLCertificateKeyFile {creds2.pkey_file}',
|
||||
f' DocumentRoot "{self._docs_dir}/two"',
|
||||
])
|
||||
conf.extend(
|
||||
[
|
||||
"</VirtualHost>",
|
||||
"",
|
||||
]
|
||||
)
|
||||
conf.extend(
|
||||
[ # https host for domain2, no h2
|
||||
f"<VirtualHost *:{self.env.https_port}>",
|
||||
f" ServerName {domain2}",
|
||||
" Protocols http/1.1",
|
||||
" SSLEngine on",
|
||||
f" SSLCertificateFile {creds2.cert_file}",
|
||||
f" SSLCertificateKeyFile {creds2.pkey_file}",
|
||||
f' DocumentRoot "{self._docs_dir}/two"',
|
||||
]
|
||||
)
|
||||
conf.extend(self._curltest_conf(domain2))
|
||||
if domain2 in self._extra_configs:
|
||||
conf.extend(self._extra_configs[domain2])
|
||||
conf.extend([
|
||||
'</VirtualHost>',
|
||||
'',
|
||||
])
|
||||
conf.extend([ # https host for expired domain
|
||||
f'<VirtualHost *:{self.env.https_port}>',
|
||||
f' ServerName {exp_domain}',
|
||||
' Protocols h2 http/1.1',
|
||||
' SSLEngine on',
|
||||
f' SSLCertificateFile {exp_creds.cert_file}',
|
||||
f' SSLCertificateKeyFile {exp_creds.pkey_file}',
|
||||
f' DocumentRoot "{self._docs_dir}/expired"',
|
||||
])
|
||||
conf.extend(
|
||||
[
|
||||
"</VirtualHost>",
|
||||
"",
|
||||
]
|
||||
)
|
||||
conf.extend(
|
||||
[ # https host for expired domain
|
||||
f"<VirtualHost *:{self.env.https_port}>",
|
||||
f" ServerName {exp_domain}",
|
||||
" Protocols h2 http/1.1",
|
||||
" SSLEngine on",
|
||||
f" SSLCertificateFile {exp_creds.cert_file}",
|
||||
f" SSLCertificateKeyFile {exp_creds.pkey_file}",
|
||||
f' DocumentRoot "{self._docs_dir}/expired"',
|
||||
]
|
||||
)
|
||||
conf.extend(self._curltest_conf(exp_domain))
|
||||
if exp_domain in self._extra_configs:
|
||||
conf.extend(self._extra_configs[exp_domain])
|
||||
conf.extend([
|
||||
'</VirtualHost>',
|
||||
'',
|
||||
])
|
||||
conf.extend([ # http forward proxy
|
||||
f'<VirtualHost *:{self.env.proxy_port}>',
|
||||
f' ServerName {proxy_domain}',
|
||||
' Protocols h2c http/1.1',
|
||||
' ProxyRequests On',
|
||||
' H2ProxyRequests On',
|
||||
' ProxyVia On',
|
||||
f' AllowCONNECT {self.env.http_port} {self.env.https_port}',
|
||||
])
|
||||
conf.extend(
|
||||
[
|
||||
"</VirtualHost>",
|
||||
"",
|
||||
]
|
||||
)
|
||||
conf.extend(
|
||||
[ # http forward proxy
|
||||
f"<VirtualHost *:{self.env.proxy_port}>",
|
||||
f" ServerName {proxy_domain}",
|
||||
" Protocols h2c http/1.1",
|
||||
" ProxyRequests On",
|
||||
" H2ProxyRequests On",
|
||||
" ProxyVia On",
|
||||
f" AllowCONNECT {self.env.http_port} {self.env.https_port}",
|
||||
]
|
||||
)
|
||||
conf.extend(self._get_proxy_conf())
|
||||
conf.extend([
|
||||
'</VirtualHost>',
|
||||
'',
|
||||
])
|
||||
conf.extend([ # https forward proxy
|
||||
f'<VirtualHost *:{self.env.proxys_port}>',
|
||||
f' ServerName {proxy_domain}',
|
||||
' Protocols h2 http/1.1',
|
||||
' SSLEngine on',
|
||||
f' SSLCertificateFile {proxy_creds.cert_file}',
|
||||
f' SSLCertificateKeyFile {proxy_creds.pkey_file}',
|
||||
' ProxyRequests On',
|
||||
' H2ProxyRequests On',
|
||||
' ProxyVia On',
|
||||
f' AllowCONNECT {self.env.http_port} {self.env.https_port}',
|
||||
])
|
||||
conf.extend(
|
||||
[
|
||||
"</VirtualHost>",
|
||||
"",
|
||||
]
|
||||
)
|
||||
conf.extend(
|
||||
[ # https forward proxy
|
||||
f"<VirtualHost *:{self.env.proxys_port}>",
|
||||
f" ServerName {proxy_domain}",
|
||||
" Protocols h2 http/1.1",
|
||||
" SSLEngine on",
|
||||
f" SSLCertificateFile {proxy_creds.cert_file}",
|
||||
f" SSLCertificateKeyFile {proxy_creds.pkey_file}",
|
||||
" ProxyRequests On",
|
||||
" H2ProxyRequests On",
|
||||
" ProxyVia On",
|
||||
f" AllowCONNECT {self.env.http_port} {self.env.https_port}",
|
||||
]
|
||||
)
|
||||
conf.extend(self._get_proxy_conf())
|
||||
conf.extend([
|
||||
'</VirtualHost>',
|
||||
'',
|
||||
])
|
||||
conf.extend(
|
||||
[
|
||||
"</VirtualHost>",
|
||||
"",
|
||||
]
|
||||
)
|
||||
|
||||
fd.write("\n".join(conf))
|
||||
with open(os.path.join(self._conf_dir, 'mime.types'), 'w') as fd:
|
||||
fd.write("\n".join([
|
||||
'text/html html',
|
||||
'application/json json',
|
||||
''
|
||||
]))
|
||||
with open(os.path.join(self._conf_dir, "mime.types"), "w") as fd:
|
||||
fd.write(
|
||||
"\n".join(
|
||||
["text/html html", "application/json json", ""]
|
||||
)
|
||||
)
|
||||
|
||||
def _get_proxy_conf(self):
|
||||
if self._proxy_auth_basic:
|
||||
return [
|
||||
' <Proxy "*">',
|
||||
' AuthType Basic',
|
||||
" AuthType Basic",
|
||||
' AuthName "Restricted Proxy"',
|
||||
' AuthBasicProvider file',
|
||||
" AuthBasicProvider file",
|
||||
f' AuthUserFile "{self._basic_passwords}"',
|
||||
' Require user proxy',
|
||||
' </Proxy>',
|
||||
" Require user proxy",
|
||||
" </Proxy>",
|
||||
]
|
||||
else:
|
||||
return [
|
||||
' <Proxy "*">',
|
||||
' Require ip 127.0.0.1',
|
||||
' </Proxy>',
|
||||
" Require ip 127.0.0.1",
|
||||
" </Proxy>",
|
||||
]
|
||||
|
||||
def _get_log_level(self):
|
||||
if self.env.verbose > 3:
|
||||
return 'trace2'
|
||||
return "trace2"
|
||||
if self.env.verbose > 2:
|
||||
return 'trace1'
|
||||
return "trace1"
|
||||
if self.env.verbose > 1:
|
||||
return 'debug'
|
||||
return 'info'
|
||||
return "debug"
|
||||
return "info"
|
||||
|
||||
def _curltest_conf(self, servername) -> List[str]:
|
||||
lines = []
|
||||
if Httpd.MOD_CURLTEST is not None:
|
||||
lines.extend([
|
||||
' Redirect 302 /data.json.302 /data.json',
|
||||
' Redirect 301 /curltest/echo301 /curltest/echo',
|
||||
' Redirect 302 /curltest/echo302 /curltest/echo',
|
||||
' Redirect 303 /curltest/echo303 /curltest/echo',
|
||||
' Redirect 307 /curltest/echo307 /curltest/echo',
|
||||
' <Location /curltest/sslinfo>',
|
||||
' SSLOptions StdEnvVars',
|
||||
' SetHandler curltest-sslinfo',
|
||||
' </Location>',
|
||||
' <Location /curltest/echo>',
|
||||
' SetHandler curltest-echo',
|
||||
' </Location>',
|
||||
' <Location /curltest/put>',
|
||||
' SetHandler curltest-put',
|
||||
' </Location>',
|
||||
' <Location /curltest/tweak>',
|
||||
' SetHandler curltest-tweak',
|
||||
' </Location>',
|
||||
' Redirect 302 /tweak /curltest/tweak',
|
||||
' <Location /curltest/1_1>',
|
||||
' SetHandler curltest-1_1-required',
|
||||
' </Location>',
|
||||
' <Location /curltest/shutdown_unclean>',
|
||||
' SetHandler curltest-tweak',
|
||||
' SetEnv force-response-1.0 1',
|
||||
' </Location>',
|
||||
' SetEnvIf Request_URI "/shutdown_unclean" ssl-unclean=1',
|
||||
])
|
||||
lines.extend(
|
||||
[
|
||||
" Redirect 302 /data.json.302 /data.json",
|
||||
" Redirect 301 /curltest/echo301 /curltest/echo",
|
||||
" Redirect 302 /curltest/echo302 /curltest/echo",
|
||||
" Redirect 303 /curltest/echo303 /curltest/echo",
|
||||
" Redirect 307 /curltest/echo307 /curltest/echo",
|
||||
" <Location /curltest/sslinfo>",
|
||||
" SSLOptions StdEnvVars",
|
||||
" SetHandler curltest-sslinfo",
|
||||
" </Location>",
|
||||
" <Location /curltest/echo>",
|
||||
" SetHandler curltest-echo",
|
||||
" </Location>",
|
||||
" <Location /curltest/put>",
|
||||
" SetHandler curltest-put",
|
||||
" </Location>",
|
||||
" <Location /curltest/tweak>",
|
||||
" SetHandler curltest-tweak",
|
||||
" </Location>",
|
||||
" Redirect 302 /tweak /curltest/tweak",
|
||||
" <Location /curltest/1_1>",
|
||||
" SetHandler curltest-1_1-required",
|
||||
" </Location>",
|
||||
" <Location /curltest/shutdown_unclean>",
|
||||
" SetHandler curltest-tweak",
|
||||
" SetEnv force-response-1.0 1",
|
||||
" </Location>",
|
||||
' SetEnvIf Request_URI "/shutdown_unclean" ssl-unclean=1',
|
||||
]
|
||||
)
|
||||
if self._auth_digest:
|
||||
lines.extend([
|
||||
f' <Directory {self.docs_dir}/restricted/digest>',
|
||||
' AuthType Digest',
|
||||
' AuthName "restricted area"',
|
||||
f' AuthDigestDomain "https://{servername}"',
|
||||
' AuthBasicProvider file',
|
||||
f' AuthUserFile "{self._digest_passwords}"',
|
||||
' Require valid-user',
|
||||
' </Directory>',
|
||||
|
||||
])
|
||||
lines.extend(
|
||||
[
|
||||
f" <Directory {self.docs_dir}/restricted/digest>",
|
||||
" AuthType Digest",
|
||||
' AuthName "restricted area"',
|
||||
f' AuthDigestDomain "https://{servername}"',
|
||||
" AuthBasicProvider file",
|
||||
f' AuthUserFile "{self._digest_passwords}"',
|
||||
" Require valid-user",
|
||||
" </Directory>",
|
||||
]
|
||||
)
|
||||
return lines
|
||||
|
||||
def _init_curltest(self):
|
||||
if Httpd.MOD_CURLTEST is not None:
|
||||
return
|
||||
local_dir = os.path.dirname(inspect.getfile(Httpd))
|
||||
p = subprocess.run([self.env.apxs, '-c', 'mod_curltest.c'],
|
||||
capture_output=True,
|
||||
cwd=os.path.join(local_dir, 'mod_curltest'))
|
||||
p = subprocess.run(
|
||||
[self.env.apxs, "-c", "mod_curltest.c"],
|
||||
capture_output=True,
|
||||
cwd=os.path.join(local_dir, "mod_curltest"),
|
||||
)
|
||||
rv = p.returncode
|
||||
if rv != 0:
|
||||
log.error(f"compiling mod_curltest failed: {p.stderr}")
|
||||
raise Exception(f"compiling mod_curltest failed: {p.stderr}")
|
||||
Httpd.MOD_CURLTEST = os.path.join(
|
||||
local_dir, 'mod_curltest/.libs/mod_curltest.so')
|
||||
local_dir, "mod_curltest/.libs/mod_curltest.so"
|
||||
)
|
||||
|
||||
@ -1,6 +1,6 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
#***************************************************************************
|
||||
# ***************************************************************************
|
||||
# _ _ ____ _
|
||||
# Project ___| | | | _ \| |
|
||||
# / __| | | | |_) | |
|
||||
@ -40,7 +40,6 @@ log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Nghttpx:
|
||||
|
||||
def __init__(self, env: Env, port: int, https_port: int, name: str):
|
||||
self.env = env
|
||||
self._name = name
|
||||
@ -48,11 +47,11 @@ class Nghttpx:
|
||||
self._https_port = https_port
|
||||
self._cmd = env.nghttpx
|
||||
self._run_dir = os.path.join(env.gen_dir, name)
|
||||
self._pid_file = os.path.join(self._run_dir, 'nghttpx.pid')
|
||||
self._conf_file = os.path.join(self._run_dir, 'nghttpx.conf')
|
||||
self._error_log = os.path.join(self._run_dir, 'nghttpx.log')
|
||||
self._stderr = os.path.join(self._run_dir, 'nghttpx.stderr')
|
||||
self._tmp_dir = os.path.join(self._run_dir, 'tmp')
|
||||
self._pid_file = os.path.join(self._run_dir, "nghttpx.pid")
|
||||
self._conf_file = os.path.join(self._run_dir, "nghttpx.conf")
|
||||
self._error_log = os.path.join(self._run_dir, "nghttpx.log")
|
||||
self._stderr = os.path.join(self._run_dir, "nghttpx.stderr")
|
||||
self._tmp_dir = os.path.join(self._run_dir, "tmp")
|
||||
self._process: Optional[subprocess.Popen] = None
|
||||
self._rmf(self._pid_file)
|
||||
self._rmf(self._error_log)
|
||||
@ -113,15 +112,17 @@ class Nghttpx:
|
||||
return False
|
||||
while datetime.now() < end_wait:
|
||||
try:
|
||||
log.debug(f'waiting for nghttpx({running.pid}) to exit.')
|
||||
log.debug(f"waiting for nghttpx({running.pid}) to exit.")
|
||||
running.wait(2)
|
||||
log.debug(f'nghttpx({running.pid}) terminated -> {running.returncode}')
|
||||
log.debug(
|
||||
f"nghttpx({running.pid}) terminated -> {running.returncode}"
|
||||
)
|
||||
break
|
||||
except subprocess.TimeoutExpired:
|
||||
log.warning(f'nghttpx({running.pid}), not shut down yet.')
|
||||
log.warning(f"nghttpx({running.pid}), not shut down yet.")
|
||||
os.kill(running.pid, signal.SIGQUIT)
|
||||
if datetime.now() >= end_wait:
|
||||
log.error(f'nghttpx({running.pid}), terminate forcefully.')
|
||||
log.error(f"nghttpx({running.pid}), terminate forcefully.")
|
||||
os.kill(running.pid, signal.SIGKILL)
|
||||
running.terminate()
|
||||
running.wait(1)
|
||||
@ -133,21 +134,34 @@ class Nghttpx:
|
||||
try_until = datetime.now() + timeout
|
||||
while datetime.now() < try_until:
|
||||
if self._https_port > 0:
|
||||
check_url = f'https://{self.env.domain1}:{self._https_port}/'
|
||||
r = curl.http_get(url=check_url, extra_args=[
|
||||
'--trace', 'curl.trace', '--trace-time',
|
||||
'--connect-timeout', '1'
|
||||
])
|
||||
check_url = f"https://{self.env.domain1}:{self._https_port}/"
|
||||
r = curl.http_get(
|
||||
url=check_url,
|
||||
extra_args=[
|
||||
"--trace",
|
||||
"curl.trace",
|
||||
"--trace-time",
|
||||
"--connect-timeout",
|
||||
"1",
|
||||
],
|
||||
)
|
||||
else:
|
||||
check_url = f'https://{self.env.domain1}:{self._port}/'
|
||||
r = curl.http_get(url=check_url, extra_args=[
|
||||
'--trace', 'curl.trace', '--trace-time',
|
||||
'--http3-only', '--connect-timeout', '1'
|
||||
])
|
||||
check_url = f"https://{self.env.domain1}:{self._port}/"
|
||||
r = curl.http_get(
|
||||
url=check_url,
|
||||
extra_args=[
|
||||
"--trace",
|
||||
"curl.trace",
|
||||
"--trace-time",
|
||||
"--http3-only",
|
||||
"--connect-timeout",
|
||||
"1",
|
||||
],
|
||||
)
|
||||
if r.exit_code != 0:
|
||||
return True
|
||||
log.debug(f'waiting for nghttpx to stop responding: {r}')
|
||||
time.sleep(.1)
|
||||
log.debug(f"waiting for nghttpx to stop responding: {r}")
|
||||
time.sleep(0.1)
|
||||
log.debug(f"Server still responding after {timeout}")
|
||||
return False
|
||||
|
||||
@ -156,21 +170,34 @@ class Nghttpx:
|
||||
try_until = datetime.now() + timeout
|
||||
while datetime.now() < try_until:
|
||||
if self._https_port > 0:
|
||||
check_url = f'https://{self.env.domain1}:{self._https_port}/'
|
||||
r = curl.http_get(url=check_url, extra_args=[
|
||||
'--trace', 'curl.trace', '--trace-time',
|
||||
'--connect-timeout', '1'
|
||||
])
|
||||
check_url = f"https://{self.env.domain1}:{self._https_port}/"
|
||||
r = curl.http_get(
|
||||
url=check_url,
|
||||
extra_args=[
|
||||
"--trace",
|
||||
"curl.trace",
|
||||
"--trace-time",
|
||||
"--connect-timeout",
|
||||
"1",
|
||||
],
|
||||
)
|
||||
else:
|
||||
check_url = f'https://{self.env.domain1}:{self._port}/'
|
||||
r = curl.http_get(url=check_url, extra_args=[
|
||||
'--http3-only', '--trace', 'curl.trace', '--trace-time',
|
||||
'--connect-timeout', '1'
|
||||
])
|
||||
check_url = f"https://{self.env.domain1}:{self._port}/"
|
||||
r = curl.http_get(
|
||||
url=check_url,
|
||||
extra_args=[
|
||||
"--http3-only",
|
||||
"--trace",
|
||||
"curl.trace",
|
||||
"--trace-time",
|
||||
"--connect-timeout",
|
||||
"1",
|
||||
],
|
||||
)
|
||||
if r.exit_code == 0:
|
||||
return True
|
||||
log.debug(f'waiting for nghttpx to become responsive: {r}')
|
||||
time.sleep(.1)
|
||||
log.debug(f"waiting for nghttpx to become responsive: {r}")
|
||||
time.sleep(0.1)
|
||||
log.error(f"Server still not responding after {timeout}")
|
||||
return False
|
||||
|
||||
@ -183,18 +210,19 @@ class Nghttpx:
|
||||
return os.makedirs(path)
|
||||
|
||||
def _write_config(self):
|
||||
with open(self._conf_file, 'w') as fd:
|
||||
fd.write('# nghttpx test config')
|
||||
fd.write("\n".join([
|
||||
'# do we need something here?'
|
||||
]))
|
||||
with open(self._conf_file, "w") as fd:
|
||||
fd.write("# nghttpx test config")
|
||||
fd.write("\n".join(["# do we need something here?"]))
|
||||
|
||||
|
||||
class NghttpxQuic(Nghttpx):
|
||||
|
||||
def __init__(self, env: Env):
|
||||
super().__init__(env=env, name='nghttpx-quic', port=env.h3_port,
|
||||
https_port=env.nghttpx_https_port)
|
||||
super().__init__(
|
||||
env=env,
|
||||
name="nghttpx-quic",
|
||||
port=env.h3_port,
|
||||
https_port=env.nghttpx_https_port,
|
||||
)
|
||||
|
||||
def start(self, wait_live=True):
|
||||
self._mkpath(self._tmp_dir)
|
||||
@ -204,25 +232,25 @@ class NghttpxQuic(Nghttpx):
|
||||
assert creds # convince pytype this isn't None
|
||||
args = [
|
||||
self._cmd,
|
||||
f'--frontend=*,{self.env.h3_port};quic',
|
||||
'--frontend-quic-early-data',
|
||||
f'--frontend=*,{self.env.nghttpx_https_port};tls',
|
||||
f'--backend=127.0.0.1,{self.env.https_port};{self.env.domain1};sni={self.env.domain1};proto=h2;tls',
|
||||
f'--backend=127.0.0.1,{self.env.http_port}',
|
||||
'--log-level=INFO',
|
||||
f'--pid-file={self._pid_file}',
|
||||
f'--errorlog-file={self._error_log}',
|
||||
f'--conf={self._conf_file}',
|
||||
f'--cacert={self.env.ca.cert_file}',
|
||||
f"--frontend=*,{self.env.h3_port};quic",
|
||||
"--frontend-quic-early-data",
|
||||
f"--frontend=*,{self.env.nghttpx_https_port};tls",
|
||||
f"--backend=127.0.0.1,{self.env.https_port};{self.env.domain1};sni={self.env.domain1};proto=h2;tls",
|
||||
f"--backend=127.0.0.1,{self.env.http_port}",
|
||||
"--log-level=INFO",
|
||||
f"--pid-file={self._pid_file}",
|
||||
f"--errorlog-file={self._error_log}",
|
||||
f"--conf={self._conf_file}",
|
||||
f"--cacert={self.env.ca.cert_file}",
|
||||
creds.pkey_file,
|
||||
creds.cert_file,
|
||||
'--frontend-http3-window-size=1M',
|
||||
'--frontend-http3-max-window-size=10M',
|
||||
'--frontend-http3-connection-window-size=10M',
|
||||
'--frontend-http3-max-connection-window-size=100M',
|
||||
"--frontend-http3-window-size=1M",
|
||||
"--frontend-http3-max-window-size=10M",
|
||||
"--frontend-http3-connection-window-size=10M",
|
||||
"--frontend-http3-max-connection-window-size=100M",
|
||||
# f'--frontend-quic-debug-log',
|
||||
]
|
||||
ngerr = open(self._stderr, 'a')
|
||||
ngerr = open(self._stderr, "a")
|
||||
self._process = subprocess.Popen(args=args, stderr=ngerr)
|
||||
if self._process.returncode is not None:
|
||||
return False
|
||||
@ -230,10 +258,10 @@ class NghttpxQuic(Nghttpx):
|
||||
|
||||
|
||||
class NghttpxFwd(Nghttpx):
|
||||
|
||||
def __init__(self, env: Env):
|
||||
super().__init__(env=env, name='nghttpx-fwd', port=env.h2proxys_port,
|
||||
https_port=0)
|
||||
super().__init__(
|
||||
env=env, name="nghttpx-fwd", port=env.h2proxys_port, https_port=0
|
||||
)
|
||||
|
||||
def start(self, wait_live=True):
|
||||
self._mkpath(self._tmp_dir)
|
||||
@ -243,18 +271,18 @@ class NghttpxFwd(Nghttpx):
|
||||
assert creds # convince pytype this isn't None
|
||||
args = [
|
||||
self._cmd,
|
||||
'--http2-proxy',
|
||||
f'--frontend=*,{self.env.h2proxys_port}',
|
||||
f'--backend=127.0.0.1,{self.env.proxy_port}',
|
||||
'--log-level=INFO',
|
||||
f'--pid-file={self._pid_file}',
|
||||
f'--errorlog-file={self._error_log}',
|
||||
f'--conf={self._conf_file}',
|
||||
f'--cacert={self.env.ca.cert_file}',
|
||||
"--http2-proxy",
|
||||
f"--frontend=*,{self.env.h2proxys_port}",
|
||||
f"--backend=127.0.0.1,{self.env.proxy_port}",
|
||||
"--log-level=INFO",
|
||||
f"--pid-file={self._pid_file}",
|
||||
f"--errorlog-file={self._error_log}",
|
||||
f"--conf={self._conf_file}",
|
||||
f"--cacert={self.env.ca.cert_file}",
|
||||
creds.pkey_file,
|
||||
creds.cert_file,
|
||||
]
|
||||
ngerr = open(self._stderr, 'a')
|
||||
ngerr = open(self._stderr, "a")
|
||||
self._process = subprocess.Popen(args=args, stderr=ngerr)
|
||||
if self._process.returncode is not None:
|
||||
return False
|
||||
@ -264,12 +292,12 @@ class NghttpxFwd(Nghttpx):
|
||||
curl = CurlClient(env=self.env, run_dir=self._tmp_dir)
|
||||
try_until = datetime.now() + timeout
|
||||
while datetime.now() < try_until:
|
||||
check_url = f'https://{self.env.proxy_domain}:{self.env.h2proxys_port}/'
|
||||
check_url = f"https://{self.env.proxy_domain}:{self.env.h2proxys_port}/"
|
||||
r = curl.http_get(url=check_url)
|
||||
if r.exit_code != 0:
|
||||
return True
|
||||
log.debug(f'waiting for nghttpx-fwd to stop responding: {r}')
|
||||
time.sleep(.1)
|
||||
log.debug(f"waiting for nghttpx-fwd to stop responding: {r}")
|
||||
time.sleep(0.1)
|
||||
log.debug(f"Server still responding after {timeout}")
|
||||
return False
|
||||
|
||||
@ -277,13 +305,13 @@ class NghttpxFwd(Nghttpx):
|
||||
curl = CurlClient(env=self.env, run_dir=self._tmp_dir)
|
||||
try_until = datetime.now() + timeout
|
||||
while datetime.now() < try_until:
|
||||
check_url = f'https://{self.env.proxy_domain}:{self.env.h2proxys_port}/'
|
||||
r = curl.http_get(url=check_url, extra_args=[
|
||||
'--trace', 'curl.trace', '--trace-time'
|
||||
])
|
||||
check_url = f"https://{self.env.proxy_domain}:{self.env.h2proxys_port}/"
|
||||
r = curl.http_get(
|
||||
url=check_url, extra_args=["--trace", "curl.trace", "--trace-time"]
|
||||
)
|
||||
if r.exit_code == 0:
|
||||
return True
|
||||
log.debug(f'waiting for nghttpx-fwd to become responsive: {r}')
|
||||
time.sleep(.1)
|
||||
log.debug(f"waiting for nghttpx-fwd to become responsive: {r}")
|
||||
time.sleep(0.1)
|
||||
log.error(f"Server still not responding after {timeout}")
|
||||
return False
|
||||
|
||||
@ -1,6 +1,6 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
#***************************************************************************
|
||||
# ***************************************************************************
|
||||
# _ _ ____ _
|
||||
# Project ___| | | | _ \| |
|
||||
# / __| | | | |_) | |
|
||||
@ -37,7 +37,7 @@ def alloc_ports(port_specs: Dict[str, int]) -> Dict[str, int]:
|
||||
for name, ptype in port_specs.items():
|
||||
try:
|
||||
s = socket.socket(type=ptype)
|
||||
s.bind(('', 0))
|
||||
s.bind(("", 0))
|
||||
ports[name] = s.getsockname()[1]
|
||||
socks.append(s)
|
||||
except Exception as e:
|
||||
|
||||
@ -1,6 +1,6 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
#***************************************************************************
|
||||
# ***************************************************************************
|
||||
# _ _ ____ _
|
||||
# Project ___| | | | _ \| |
|
||||
# / __| | | | |_) | |
|
||||
@ -39,25 +39,24 @@ log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class VsFTPD:
|
||||
|
||||
def __init__(self, env: Env, with_ssl=False):
|
||||
self.env = env
|
||||
self._cmd = env.vsftpd
|
||||
self._scheme = 'ftp'
|
||||
self._scheme = "ftp"
|
||||
self._with_ssl = with_ssl
|
||||
if self._with_ssl:
|
||||
self._port = self.env.ftps_port
|
||||
name = 'vsftpds'
|
||||
name = "vsftpds"
|
||||
else:
|
||||
self._port = self.env.ftp_port
|
||||
name = 'vsftpd'
|
||||
name = "vsftpd"
|
||||
self._vsftpd_dir = os.path.join(env.gen_dir, name)
|
||||
self._run_dir = os.path.join(self._vsftpd_dir, 'run')
|
||||
self._docs_dir = os.path.join(self._vsftpd_dir, 'docs')
|
||||
self._tmp_dir = os.path.join(self._vsftpd_dir, 'tmp')
|
||||
self._conf_file = os.path.join(self._vsftpd_dir, 'test.conf')
|
||||
self._pid_file = os.path.join(self._vsftpd_dir, 'vsftpd.pid')
|
||||
self._error_log = os.path.join(self._vsftpd_dir, 'vsftpd.log')
|
||||
self._run_dir = os.path.join(self._vsftpd_dir, "run")
|
||||
self._docs_dir = os.path.join(self._vsftpd_dir, "docs")
|
||||
self._tmp_dir = os.path.join(self._vsftpd_dir, "tmp")
|
||||
self._conf_file = os.path.join(self._vsftpd_dir, "test.conf")
|
||||
self._pid_file = os.path.join(self._vsftpd_dir, "vsftpd.pid")
|
||||
self._error_log = os.path.join(self._vsftpd_dir, "vsftpd.log")
|
||||
self._process = None
|
||||
|
||||
self.clear_logs()
|
||||
@ -116,9 +115,9 @@ class VsFTPD:
|
||||
self._write_config()
|
||||
args = [
|
||||
self._cmd,
|
||||
f'{self._conf_file}',
|
||||
f"{self._conf_file}",
|
||||
]
|
||||
procerr = open(self._error_log, 'a')
|
||||
procerr = open(self._error_log, "a")
|
||||
self._process = subprocess.Popen(args=args, stderr=procerr)
|
||||
if self._process.returncode is not None:
|
||||
return False
|
||||
@ -128,12 +127,12 @@ class VsFTPD:
|
||||
curl = CurlClient(env=self.env, run_dir=self._tmp_dir)
|
||||
try_until = datetime.now() + timeout
|
||||
while datetime.now() < try_until:
|
||||
check_url = f'{self._scheme}://{self.domain}:{self.port}/'
|
||||
r = curl.ftp_get(urls=[check_url], extra_args=['-v'])
|
||||
check_url = f"{self._scheme}://{self.domain}:{self.port}/"
|
||||
r = curl.ftp_get(urls=[check_url], extra_args=["-v"])
|
||||
if r.exit_code != 0:
|
||||
return True
|
||||
log.debug(f'waiting for vsftpd to stop responding: {r}')
|
||||
time.sleep(.1)
|
||||
log.debug(f"waiting for vsftpd to stop responding: {r}")
|
||||
time.sleep(0.1)
|
||||
log.debug(f"Server still responding after {timeout}")
|
||||
return False
|
||||
|
||||
@ -141,14 +140,15 @@ class VsFTPD:
|
||||
curl = CurlClient(env=self.env, run_dir=self._tmp_dir)
|
||||
try_until = datetime.now() + timeout
|
||||
while datetime.now() < try_until:
|
||||
check_url = f'{self._scheme}://{self.domain}:{self.port}/'
|
||||
r = curl.ftp_get(urls=[check_url], extra_args=[
|
||||
'--trace', 'curl-start.trace', '--trace-time'
|
||||
])
|
||||
check_url = f"{self._scheme}://{self.domain}:{self.port}/"
|
||||
r = curl.ftp_get(
|
||||
urls=[check_url],
|
||||
extra_args=["--trace", "curl-start.trace", "--trace-time"],
|
||||
)
|
||||
if r.exit_code == 0:
|
||||
return True
|
||||
log.debug(f'waiting for vsftpd to become responsive: {r}')
|
||||
time.sleep(.1)
|
||||
log.debug(f"waiting for vsftpd to become responsive: {r}")
|
||||
time.sleep(0.1)
|
||||
log.error(f"Server still not responding after {timeout}")
|
||||
return False
|
||||
|
||||
@ -164,34 +164,36 @@ class VsFTPD:
|
||||
self._mkpath(self._docs_dir)
|
||||
self._mkpath(self._tmp_dir)
|
||||
conf = [ # base server config
|
||||
'listen=YES',
|
||||
'run_as_launching_user=YES',
|
||||
'#listen_address=127.0.0.1',
|
||||
f'listen_port={self.port}',
|
||||
'local_enable=NO',
|
||||
'anonymous_enable=YES',
|
||||
f'anon_root={self._docs_dir}',
|
||||
'dirmessage_enable=YES',
|
||||
'write_enable=YES',
|
||||
'anon_upload_enable=YES',
|
||||
'log_ftp_protocol=YES',
|
||||
'xferlog_enable=YES',
|
||||
'xferlog_std_format=NO',
|
||||
f'vsftpd_log_file={self._error_log}',
|
||||
'\n',
|
||||
"listen=YES",
|
||||
"run_as_launching_user=YES",
|
||||
"#listen_address=127.0.0.1",
|
||||
f"listen_port={self.port}",
|
||||
"local_enable=NO",
|
||||
"anonymous_enable=YES",
|
||||
f"anon_root={self._docs_dir}",
|
||||
"dirmessage_enable=YES",
|
||||
"write_enable=YES",
|
||||
"anon_upload_enable=YES",
|
||||
"log_ftp_protocol=YES",
|
||||
"xferlog_enable=YES",
|
||||
"xferlog_std_format=NO",
|
||||
f"vsftpd_log_file={self._error_log}",
|
||||
"\n",
|
||||
]
|
||||
if self._with_ssl:
|
||||
creds = self.env.get_credentials(self.domain)
|
||||
assert creds # convince pytype this isn't None
|
||||
conf.extend([
|
||||
'ssl_enable=YES',
|
||||
'debug_ssl=YES',
|
||||
'allow_anon_ssl=YES',
|
||||
f'rsa_cert_file={creds.cert_file}',
|
||||
f'rsa_private_key_file={creds.pkey_file}',
|
||||
# require_ssl_reuse=YES means ctrl and data connection need to use the same session
|
||||
'require_ssl_reuse=NO',
|
||||
])
|
||||
conf.extend(
|
||||
[
|
||||
"ssl_enable=YES",
|
||||
"debug_ssl=YES",
|
||||
"allow_anon_ssl=YES",
|
||||
f"rsa_cert_file={creds.cert_file}",
|
||||
f"rsa_private_key_file={creds.pkey_file}",
|
||||
# require_ssl_reuse=YES means ctrl and data connection need to use the same session
|
||||
"require_ssl_reuse=NO",
|
||||
]
|
||||
)
|
||||
|
||||
with open(self._conf_file, 'w') as fd:
|
||||
with open(self._conf_file, "w") as fd:
|
||||
fd.write("\n".join(conf))
|
||||
|
||||
@ -1,6 +1,6 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
#***************************************************************************
|
||||
# ***************************************************************************
|
||||
# _ _ ____ _
|
||||
# Project ___| | | | _ \| |
|
||||
# / __| | | | |_) | |
|
||||
@ -46,11 +46,13 @@ async def run_server(port):
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(prog='scorecard', description="""
|
||||
parser = argparse.ArgumentParser(
|
||||
prog="scorecard",
|
||||
description="""
|
||||
Run a websocket echo server.
|
||||
""")
|
||||
parser.add_argument("--port", type=int,
|
||||
default=9876, help="port to listen on")
|
||||
""",
|
||||
)
|
||||
parser.add_argument("--port", type=int, default=9876, help="port to listen on")
|
||||
args = parser.parse_args()
|
||||
|
||||
logging.basicConfig(
|
||||
|
||||
Loading…
Reference in New Issue
Block a user