vtls and h2 improvements
- eliminate receive loop in vtls to fill buffer. This may lead to partial reads of data which is counter productive - let http2 instead loop smarter to process pending network data without transfer switches scorecard improvements - do not start caddy when only httpd is requested - allow curl -v to stderr file on --curl-verbose Closes #10891
This commit is contained in:
parent
3da642c4f0
commit
8f50e393ab
17
lib/bufq.c
17
lib/bufq.c
@ -76,12 +76,19 @@ static size_t chunk_read(struct buf_chunk *chunk,
|
||||
unsigned char *p = &chunk->x.data[chunk->r_offset];
|
||||
size_t n = chunk->w_offset - chunk->r_offset;
|
||||
DEBUGASSERT(chunk->w_offset >= chunk->r_offset);
|
||||
if(n) {
|
||||
n = CURLMIN(n, len);
|
||||
memcpy(buf, p, n);
|
||||
chunk->r_offset += n;
|
||||
if(!n) {
|
||||
return 0;
|
||||
}
|
||||
else if(n <= len) {
|
||||
memcpy(buf, p, n);
|
||||
chunk->r_offset = chunk->w_offset = 0;
|
||||
return n;
|
||||
}
|
||||
else {
|
||||
memcpy(buf, p, len);
|
||||
chunk->r_offset += len;
|
||||
return len;
|
||||
}
|
||||
return n;
|
||||
}
|
||||
|
||||
static ssize_t chunk_slurpn(struct buf_chunk *chunk, size_t max_len,
|
||||
|
||||
11
lib/http2.c
11
lib/http2.c
@ -1643,6 +1643,7 @@ static ssize_t stream_recv(struct Curl_cfilter *cf, struct Curl_easy *data,
|
||||
ssize_t nread = -1;
|
||||
|
||||
*err = CURLE_AGAIN;
|
||||
drained_transfer(cf, data);
|
||||
if(!Curl_bufq_is_empty(&stream->h2_recvbuf)) {
|
||||
nread = Curl_bufq_read(&stream->h2_recvbuf,
|
||||
(unsigned char *)buf, len, err);
|
||||
@ -1682,7 +1683,6 @@ static CURLcode h2_progress_ingress(struct Curl_cfilter *cf,
|
||||
struct HTTP *stream = data->req.p.http;
|
||||
CURLcode result = CURLE_OK;
|
||||
ssize_t nread;
|
||||
bool keep_reading = TRUE;
|
||||
|
||||
/* Process network input buffer fist */
|
||||
if(!Curl_bufq_is_empty(&ctx->inbufq)) {
|
||||
@ -1694,12 +1694,11 @@ static CURLcode h2_progress_ingress(struct Curl_cfilter *cf,
|
||||
|
||||
/* Receive data from the "lower" filters, e.g. network until
|
||||
* it is time to stop or we have enough data for this stream */
|
||||
while(keep_reading &&
|
||||
!ctx->conn_closed && /* not closed the connection */
|
||||
while(!ctx->conn_closed && /* not closed the connection */
|
||||
!stream->closed && /* nor the stream */
|
||||
Curl_bufq_is_empty(&ctx->inbufq) && /* and we consumed our input */
|
||||
!Curl_bufq_is_full(&stream->h2_recvbuf) && /* enough? */
|
||||
Curl_bufq_len(&stream->h2_recvbuf) < data->set.buffer_size) {
|
||||
1 /* Curl_bufq_len(&stream->h2_recvbuf) < data->set.buffer_size */) {
|
||||
|
||||
nread = Curl_bufq_slurp(&ctx->inbufq, nw_in_reader, cf, &result);
|
||||
DEBUGF(LOG_CF(data, cf, "read %zd bytes nw data -> %zd, %d",
|
||||
@ -1716,7 +1715,6 @@ static CURLcode h2_progress_ingress(struct Curl_cfilter *cf,
|
||||
break;
|
||||
}
|
||||
|
||||
keep_reading = Curl_bufq_is_full(&ctx->inbufq);
|
||||
if(h2_process_pending_input(cf, data, &result))
|
||||
return result;
|
||||
}
|
||||
@ -1755,9 +1753,6 @@ static ssize_t cf_h2_recv(struct Curl_cfilter *cf, struct Curl_easy *data,
|
||||
goto out;
|
||||
|
||||
nread = stream_recv(cf, data, buf, len, err);
|
||||
if(Curl_bufq_is_empty(&stream->h2_recvbuf)) {
|
||||
drained_transfer(cf, data);
|
||||
}
|
||||
}
|
||||
|
||||
if(nread > 0) {
|
||||
|
||||
@ -1578,45 +1578,20 @@ static ssize_t ssl_cf_recv(struct Curl_cfilter *cf,
|
||||
CURLcode *err)
|
||||
{
|
||||
struct cf_call_data save;
|
||||
ssize_t nread, n;
|
||||
ssize_t nread;
|
||||
|
||||
CF_DATA_SAVE(save, cf, data);
|
||||
/* SSL backends like OpenSSL/wolfSSL prefer to give us 1 TLS record content
|
||||
* at a time when reading. But commonly, more data is available.
|
||||
* So we try to fill the buffer we are called with until we
|
||||
* are full or no more data is available. */
|
||||
*err = CURLE_OK;
|
||||
nread = 0;
|
||||
while(len) {
|
||||
n = Curl_ssl->recv_plain(cf, data, buf, len, err);
|
||||
if(n < 0) {
|
||||
if(*err != CURLE_AGAIN) {
|
||||
/* serious err, fail */
|
||||
nread = -1;
|
||||
goto out;
|
||||
}
|
||||
/* would block, return this to caller if we have read nothing so far,
|
||||
* otherwise return amount read without error. */
|
||||
if(nread == 0)
|
||||
nread = -1;
|
||||
else
|
||||
*err = CURLE_OK;
|
||||
goto out;
|
||||
}
|
||||
else if(n == 0) {
|
||||
/* eof */
|
||||
break;
|
||||
}
|
||||
else {
|
||||
DEBUGASSERT((size_t)n <= len);
|
||||
nread += (size_t)n;
|
||||
buf += (size_t)n;
|
||||
len -= (size_t)n;
|
||||
}
|
||||
nread = Curl_ssl->recv_plain(cf, data, buf, len, err);
|
||||
if(nread > 0) {
|
||||
DEBUGASSERT((size_t)nread <= len);
|
||||
}
|
||||
out:
|
||||
CF_DATA_RESTORE(cf, save);
|
||||
return nread;
|
||||
else if(nread == 0) {
|
||||
/* eof */
|
||||
*err = CURLE_OK;
|
||||
}
|
||||
DEBUGF(LOG_CF(data, cf, "cf_recv(len=%zu) -> %zd, %d", len, nread, *err));
|
||||
CF_DATA_RESTORE(cf, save);
|
||||
return nread;
|
||||
}
|
||||
|
||||
static int ssl_cf_get_select_socks(struct Curl_cfilter *cf,
|
||||
|
||||
@ -28,10 +28,10 @@ import argparse
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
from datetime import datetime
|
||||
from statistics import mean
|
||||
from typing import Dict, Any
|
||||
from typing import Dict, Any, Optional, List
|
||||
|
||||
from testenv import Env, Httpd, Nghttpx, CurlClient, Caddy, ExecResult
|
||||
|
||||
@ -45,12 +45,18 @@ class ScoreCardException(Exception):
|
||||
|
||||
class ScoreCard:
|
||||
|
||||
def __init__(self):
|
||||
self.verbose = 0
|
||||
self.env = None
|
||||
self.httpd = None
|
||||
self.nghttpx = None
|
||||
self.caddy = None
|
||||
def __init__(self, env: Env,
|
||||
httpd: Optional[Httpd],
|
||||
nghttpx: Optional[Nghttpx],
|
||||
caddy: Optional[Caddy],
|
||||
verbose: int,
|
||||
curl_verbose: int):
|
||||
self.verbose = verbose
|
||||
self.env = env
|
||||
self.httpd = httpd
|
||||
self.nghttpx = nghttpx
|
||||
self.caddy = caddy
|
||||
self._silent_curl = not curl_verbose
|
||||
|
||||
def info(self, msg):
|
||||
if self.verbose > 0:
|
||||
@ -69,9 +75,10 @@ class ScoreCard:
|
||||
hs_samples = []
|
||||
errors = []
|
||||
for i in range(sample_size):
|
||||
curl = CurlClient(env=self.env, silent=True)
|
||||
curl = CurlClient(env=self.env, silent=self._silent_curl)
|
||||
url = f'https://{authority}/'
|
||||
r = curl.http_download(urls=[url], alpn_proto=proto, no_save=True)
|
||||
r = curl.http_download(urls=[url], alpn_proto=proto,
|
||||
no_save=True)
|
||||
if r.exit_code == 0 and len(r.stats) == 1:
|
||||
c_samples.append(r.stats[0]['time_connect'])
|
||||
hs_samples.append(r.stats[0]['time_appconnect'])
|
||||
@ -97,7 +104,7 @@ class ScoreCard:
|
||||
hs_samples = []
|
||||
errors = []
|
||||
for i in range(sample_size):
|
||||
curl = CurlClient(env=self.env, silent=True)
|
||||
curl = CurlClient(env=self.env, silent=self._silent_curl)
|
||||
args = [
|
||||
'--http3-only' if proto == 'h3' else '--http2',
|
||||
f'--{ipv}', f'https://{authority}/'
|
||||
@ -108,8 +115,10 @@ class ScoreCard:
|
||||
hs_samples.append(r.stats[0]['time_appconnect'])
|
||||
else:
|
||||
errors.append(f'exit={r.exit_code}')
|
||||
props[authority][f'{ipv}-connect'] = mean(c_samples) if len(c_samples) else -1
|
||||
props[authority][f'{ipv}-handshake'] = mean(hs_samples) if len(hs_samples) else -1
|
||||
props[authority][f'{ipv}-connect'] = mean(c_samples) \
|
||||
if len(c_samples) else -1
|
||||
props[authority][f'{ipv}-handshake'] = mean(hs_samples) \
|
||||
if len(hs_samples) else -1
|
||||
props[authority][f'{ipv}-errors'] = errors
|
||||
self.info('ok.\n')
|
||||
return props
|
||||
@ -142,7 +151,7 @@ class ScoreCard:
|
||||
errors = []
|
||||
self.info(f'single...')
|
||||
for i in range(sample_size):
|
||||
curl = CurlClient(env=self.env, silent=True)
|
||||
curl = CurlClient(env=self.env, silent=self._silent_curl)
|
||||
r = curl.http_download(urls=[url], alpn_proto=proto, no_save=True,
|
||||
with_headers=False)
|
||||
err = self._check_downloads(r, count)
|
||||
@ -165,7 +174,7 @@ class ScoreCard:
|
||||
url = f'{url}?[0-{count - 1}]'
|
||||
self.info(f'serial...')
|
||||
for i in range(sample_size):
|
||||
curl = CurlClient(env=self.env, silent=True)
|
||||
curl = CurlClient(env=self.env, silent=self._silent_curl)
|
||||
r = curl.http_download(urls=[url], alpn_proto=proto, no_save=True,
|
||||
with_headers=False)
|
||||
err = self._check_downloads(r, count)
|
||||
@ -188,10 +197,11 @@ class ScoreCard:
|
||||
url = f'{url}?[0-{count - 1}]'
|
||||
self.info(f'parallel...')
|
||||
for i in range(sample_size):
|
||||
curl = CurlClient(env=self.env, silent=True)
|
||||
curl = CurlClient(env=self.env, silent=self._silent_curl)
|
||||
r = curl.http_download(urls=[url], alpn_proto=proto, no_save=True,
|
||||
with_headers=False,
|
||||
extra_args=['--parallel', '--parallel-max', str(count)])
|
||||
extra_args=['--parallel',
|
||||
'--parallel-max', str(count)])
|
||||
err = self._check_downloads(r, count)
|
||||
if err:
|
||||
errors.append(err)
|
||||
@ -205,20 +215,21 @@ class ScoreCard:
|
||||
'errors': errors
|
||||
}
|
||||
|
||||
def download_url(self, url: str, proto: str, count: int):
|
||||
self.info(f' {url}: ')
|
||||
def download_url(self, label: str, url: str, proto: str, count: int):
|
||||
self.info(f' {count}x{label}: ')
|
||||
props = {
|
||||
'single': self.transfer_single(url=url, proto=proto, count=10),
|
||||
'serial': self.transfer_serial(url=url, proto=proto, count=count),
|
||||
'parallel': self.transfer_parallel(url=url, proto=proto, count=count),
|
||||
'parallel': self.transfer_parallel(url=url, proto=proto,
|
||||
count=count),
|
||||
}
|
||||
self.info(f'ok.\n')
|
||||
return props
|
||||
|
||||
def downloads(self, proto: str, test_httpd: bool = True,
|
||||
test_caddy: bool = True) -> Dict[str, Any]:
|
||||
def downloads(self, proto: str, count: int,
|
||||
fsizes: List[int]) -> Dict[str, Any]:
|
||||
scores = {}
|
||||
if test_httpd:
|
||||
if self.httpd:
|
||||
if proto == 'h3':
|
||||
port = self.env.h3_port
|
||||
via = 'nghttpx'
|
||||
@ -228,43 +239,47 @@ class ScoreCard:
|
||||
via = 'httpd'
|
||||
descr = f'port {port}'
|
||||
self.info(f'{via} downloads\n')
|
||||
self._make_docs_file(docs_dir=self.httpd.docs_dir, fname='score1.data', fsize=1024*1024)
|
||||
url1 = f'https://{self.env.domain1}:{port}/score1.data'
|
||||
self._make_docs_file(docs_dir=self.httpd.docs_dir, fname='score10.data', fsize=10*1024*1024)
|
||||
url10 = f'https://{self.env.domain1}:{port}/score10.data'
|
||||
self._make_docs_file(docs_dir=self.httpd.docs_dir, fname='score100.data', fsize=100*1024*1024)
|
||||
url100 = f'https://{self.env.domain1}:{port}/score100.data'
|
||||
scores[via] = {
|
||||
'description': descr,
|
||||
'1MB': self.download_url(url=url1, proto=proto, count=50),
|
||||
'10MB': self.download_url(url=url10, proto=proto, count=50),
|
||||
'100MB': self.download_url(url=url100, proto=proto, count=50),
|
||||
}
|
||||
if test_caddy and self.caddy:
|
||||
for fsize in fsizes:
|
||||
label = f'{int(fsize / 1024)}KB' if fsize < 1024*1024 else \
|
||||
f'{int(fsize / (1024 * 1024))}MB'
|
||||
fname = f'score{label}.data'
|
||||
self._make_docs_file(docs_dir=self.httpd.docs_dir,
|
||||
fname=fname, fsize=fsize)
|
||||
url = f'https://{self.env.domain1}:{port}/{fname}'
|
||||
results = self.download_url(label=label, url=url,
|
||||
proto=proto, count=count)
|
||||
scores[via][label] = results
|
||||
if self.caddy:
|
||||
port = self.caddy.port
|
||||
via = 'caddy'
|
||||
descr = f'port {port}'
|
||||
self.info('caddy downloads\n')
|
||||
self._make_docs_file(docs_dir=self.caddy.docs_dir, fname='score1.data', fsize=1024 * 1024)
|
||||
url1 = f'https://{self.env.domain1}:{port}/score1.data'
|
||||
self._make_docs_file(docs_dir=self.caddy.docs_dir, fname='score10.data', fsize=10 * 1024 * 1024)
|
||||
url10 = f'https://{self.env.domain1}:{port}/score10.data'
|
||||
self._make_docs_file(docs_dir=self.caddy.docs_dir, fname='score100.data', fsize=100 * 1024 * 1024)
|
||||
url100 = f'https://{self.env.domain1}:{port}/score100.data'
|
||||
scores[via] = {
|
||||
'description': descr,
|
||||
'1MB': self.download_url(url=url1, proto=proto, count=50),
|
||||
'10MB': self.download_url(url=url10, proto=proto, count=50),
|
||||
'100MB': self.download_url(url=url100, proto=proto, count=50),
|
||||
}
|
||||
for fsize in fsizes:
|
||||
label = f'{int(fsize / 1024)}KB' if fsize < 1024*1024 else \
|
||||
f'{int(fsize / (1024 * 1024))}MB'
|
||||
fname = f'score{label}.data'
|
||||
self._make_docs_file(docs_dir=self.caddy.docs_dir,
|
||||
fname=fname, fsize=fsize)
|
||||
url = f'https://{self.env.domain1}:{port}/{fname}'
|
||||
results = self.download_url(label=label, url=url,
|
||||
proto=proto, count=count)
|
||||
scores[via][label] = results
|
||||
return scores
|
||||
|
||||
def do_requests(self, url: str, proto: str, count: int, max_parallel: int = 1):
|
||||
def do_requests(self, url: str, proto: str, count: int,
|
||||
max_parallel: int = 1):
|
||||
sample_size = 1
|
||||
samples = []
|
||||
errors = []
|
||||
url = f'{url}?[0-{count - 1}]'
|
||||
extra_args = ['--parallel', '--parallel-max', str(max_parallel)] if max_parallel > 1 else []
|
||||
extra_args = ['--parallel', '--parallel-max', str(max_parallel)] \
|
||||
if max_parallel > 1 else []
|
||||
self.info(f'{max_parallel}...')
|
||||
for i in range(sample_size):
|
||||
curl = CurlClient(env=self.env)
|
||||
@ -275,7 +290,7 @@ class ScoreCard:
|
||||
if err:
|
||||
errors.append(err)
|
||||
else:
|
||||
for s in r.stats:
|
||||
for _ in r.stats:
|
||||
samples.append(count / r.duration.total_seconds())
|
||||
return {
|
||||
'count': count,
|
||||
@ -288,18 +303,21 @@ class ScoreCard:
|
||||
self.info(f' {url}: ')
|
||||
props = {
|
||||
'serial': self.do_requests(url=url, proto=proto, count=count),
|
||||
'par-6': self.do_requests(url=url, proto=proto, count=count, max_parallel=6),
|
||||
'par-25': self.do_requests(url=url, proto=proto, count=count, max_parallel=25),
|
||||
'par-50': self.do_requests(url=url, proto=proto, count=count, max_parallel=50),
|
||||
'par-100': self.do_requests(url=url, proto=proto, count=count, max_parallel=100),
|
||||
'par-6': self.do_requests(url=url, proto=proto, count=count,
|
||||
max_parallel=6),
|
||||
'par-25': self.do_requests(url=url, proto=proto, count=count,
|
||||
max_parallel=25),
|
||||
'par-50': self.do_requests(url=url, proto=proto, count=count,
|
||||
max_parallel=50),
|
||||
'par-100': self.do_requests(url=url, proto=proto, count=count,
|
||||
max_parallel=100),
|
||||
}
|
||||
self.info(f'ok.\n')
|
||||
return props
|
||||
|
||||
def requests(self, proto: str, test_httpd: bool = True,
|
||||
test_caddy: bool = True) -> Dict[str, Any]:
|
||||
def requests(self, proto: str) -> Dict[str, Any]:
|
||||
scores = {}
|
||||
if test_httpd:
|
||||
if self.httpd:
|
||||
if proto == 'h3':
|
||||
port = self.env.h3_port
|
||||
via = 'nghttpx'
|
||||
@ -309,18 +327,20 @@ class ScoreCard:
|
||||
via = 'httpd'
|
||||
descr = f'port {port}'
|
||||
self.info(f'{via} requests\n')
|
||||
self._make_docs_file(docs_dir=self.httpd.docs_dir, fname='reqs10.data', fsize=10*1024)
|
||||
self._make_docs_file(docs_dir=self.httpd.docs_dir,
|
||||
fname='reqs10.data', fsize=10*1024)
|
||||
url1 = f'https://{self.env.domain1}:{port}/reqs10.data'
|
||||
scores[via] = {
|
||||
'description': descr,
|
||||
'10KB': self.requests_url(url=url1, proto=proto, count=10000),
|
||||
}
|
||||
if test_caddy and self.caddy:
|
||||
if self.caddy:
|
||||
port = self.caddy.port
|
||||
via = 'caddy'
|
||||
descr = f'port {port}'
|
||||
self.info('caddy requests\n')
|
||||
self._make_docs_file(docs_dir=self.caddy.docs_dir, fname='req10.data', fsize=10 * 1024)
|
||||
self._make_docs_file(docs_dir=self.caddy.docs_dir,
|
||||
fname='req10.data', fsize=10 * 1024)
|
||||
url1 = f'https://{self.env.domain1}:{port}/req10.data'
|
||||
scores[via] = {
|
||||
'description': descr,
|
||||
@ -330,10 +350,9 @@ class ScoreCard:
|
||||
|
||||
def score_proto(self, proto: str,
|
||||
handshakes: bool = True,
|
||||
downloads: bool = True,
|
||||
requests: bool = True,
|
||||
test_httpd: bool = True,
|
||||
test_caddy: bool = True):
|
||||
downloads: Optional[List[int]] = None,
|
||||
download_count: int = 50,
|
||||
requests: bool = True):
|
||||
self.info(f"scoring {proto}\n")
|
||||
p = {}
|
||||
if proto == 'h3':
|
||||
@ -355,7 +374,8 @@ class ScoreCard:
|
||||
elif proto == 'h1' or proto == 'http/1.1':
|
||||
proto = 'http/1.1'
|
||||
p['name'] = proto
|
||||
p['implementation'] = 'hyper' if self.env.curl_uses_lib('hyper') else 'native'
|
||||
p['implementation'] = 'hyper' if self.env.curl_uses_lib('hyper')\
|
||||
else 'native'
|
||||
else:
|
||||
raise ScoreCardException(f"unknown protocol: {proto}")
|
||||
|
||||
@ -364,20 +384,18 @@ class ScoreCard:
|
||||
p['version'] = Env.curl_lib_version(p['implementation'])
|
||||
|
||||
score = {
|
||||
'curl': self.env.curl_version(),
|
||||
'curl': self.env.curl_fullname(),
|
||||
'os': self.env.curl_os(),
|
||||
'protocol': p,
|
||||
}
|
||||
if handshakes:
|
||||
score['handshakes'] = self.handshakes(proto=proto)
|
||||
if downloads:
|
||||
if downloads and len(downloads) > 0:
|
||||
score['downloads'] = self.downloads(proto=proto,
|
||||
test_httpd=test_httpd,
|
||||
test_caddy=test_caddy)
|
||||
count=download_count,
|
||||
fsizes=downloads)
|
||||
if requests:
|
||||
score['requests'] = self.requests(proto=proto,
|
||||
test_httpd=test_httpd,
|
||||
test_caddy=test_caddy)
|
||||
score['requests'] = self.requests(proto=proto)
|
||||
self.info("\n")
|
||||
return score
|
||||
|
||||
@ -394,8 +412,7 @@ class ScoreCard:
|
||||
return f'{val:0.000f} r/s' if val >= 0 else '--'
|
||||
|
||||
def print_score(self, score):
|
||||
print(f'{score["protocol"]["name"].upper()} in curl {score["curl"]} ({score["os"]}) via '
|
||||
f'{score["protocol"]["implementation"]}/{score["protocol"]["version"]} ')
|
||||
print(f'{score["protocol"]["name"].upper()} in {score["curl"]}')
|
||||
if 'handshakes' in score:
|
||||
print(f'{"Handshakes":<24} {"ipv4":25} {"ipv6":28}')
|
||||
print(f' {"Host":<17} {"Connect":>12} {"Handshake":>12} '
|
||||
@ -404,12 +421,13 @@ class ScoreCard:
|
||||
print(f' {key:<17} {self.fmt_ms(val["ipv4-connect"]):>12} '
|
||||
f'{self.fmt_ms(val["ipv4-handshake"]):>12} '
|
||||
f'{self.fmt_ms(val["ipv6-connect"]):>12} '
|
||||
f'{self.fmt_ms(val["ipv6-handshake"]):>12} {"/".join(val["ipv4-errors"] + val["ipv6-errors"]):<20}'
|
||||
f'{self.fmt_ms(val["ipv6-handshake"]):>12} '
|
||||
f'{"/".join(val["ipv4-errors"] + val["ipv6-errors"]):<20}'
|
||||
)
|
||||
if 'downloads' in score:
|
||||
print('Downloads')
|
||||
print(f' {"Server":<8} {"Size":>8} '
|
||||
f'{"Single":>12} {"Serial":>12} {"Parallel":>12} {"Errors":<20}')
|
||||
print(f' {"Server":<8} {"Size":>8} {"Single":>12} {"Serial":>12}'
|
||||
f' {"Parallel":>12} {"Errors":<20}')
|
||||
skeys = {}
|
||||
for dkey, dval in score["downloads"].items():
|
||||
for k in dval.keys():
|
||||
@ -450,105 +468,132 @@ class ScoreCard:
|
||||
f'{self.fmt_reqs(sval["par-100"]["speed"]):>12} '
|
||||
f' {"/".join(errors):<20}')
|
||||
|
||||
def main(self):
|
||||
parser = argparse.ArgumentParser(prog='scorecard', description="""
|
||||
Run a range of tests to give a scorecard for a HTTP protocol
|
||||
'h3' or 'h2' implementation in curl.
|
||||
""")
|
||||
parser.add_argument("-v", "--verbose", action='count', default=1,
|
||||
help="log more output on stderr")
|
||||
parser.add_argument("-j", "--json", action='store_true', default=False,
|
||||
help="print json instead of text")
|
||||
parser.add_argument("-H", "--handshakes", action='store_true', default=False,
|
||||
help="evaluate handshakes only")
|
||||
parser.add_argument("-d", "--downloads", action='store_true', default=False,
|
||||
help="evaluate downloads only")
|
||||
parser.add_argument("-r", "--requests", action='store_true', default=False,
|
||||
help="evaluate requests only")
|
||||
parser.add_argument("--httpd", action='store_true', default=False,
|
||||
help="evaluate httpd server only")
|
||||
parser.add_argument("--caddy", action='store_true', default=False,
|
||||
help="evaluate caddy server only")
|
||||
parser.add_argument("protocol", default='h2', nargs='?', help="Name of protocol to score")
|
||||
args = parser.parse_args()
|
||||
|
||||
self.verbose = args.verbose
|
||||
if args.verbose > 0:
|
||||
console = logging.StreamHandler()
|
||||
console.setLevel(logging.INFO)
|
||||
console.setFormatter(logging.Formatter(logging.BASIC_FORMAT))
|
||||
logging.getLogger('').addHandler(console)
|
||||
def parse_size(s):
|
||||
m = re.match(r'(\d+)(mb|kb|gb)?', s, re.IGNORECASE)
|
||||
if m is None:
|
||||
raise Exception(f'unrecognized size: {s}')
|
||||
size = int(m.group(1))
|
||||
if m.group(2).lower() == 'kb':
|
||||
size *= 1024
|
||||
elif m.group(2).lower() == 'mb':
|
||||
size *= 1024 * 1024
|
||||
elif m.group(2).lower() == 'gb':
|
||||
size *= 1024 * 1024 * 1024
|
||||
return size
|
||||
|
||||
protocol = args.protocol
|
||||
handshakes = True
|
||||
downloads = True
|
||||
requests = True
|
||||
test_httpd = protocol != 'h3'
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(prog='scorecard', description="""
|
||||
Run a range of tests to give a scorecard for a HTTP protocol
|
||||
'h3' or 'h2' implementation in curl.
|
||||
""")
|
||||
parser.add_argument("-v", "--verbose", action='count', default=1,
|
||||
help="log more output on stderr")
|
||||
parser.add_argument("-j", "--json", action='store_true',
|
||||
default=False, help="print json instead of text")
|
||||
parser.add_argument("-H", "--handshakes", action='store_true',
|
||||
default=False, help="evaluate handshakes only")
|
||||
parser.add_argument("-d", "--downloads", action='store_true',
|
||||
default=False, help="evaluate downloads only")
|
||||
parser.add_argument("--download", action='append', type=str,
|
||||
default=None, help="evaluate download size")
|
||||
parser.add_argument("--download-count", action='store', type=int,
|
||||
default=50, help="perform that many downloads")
|
||||
parser.add_argument("-r", "--requests", action='store_true',
|
||||
default=False, help="evaluate requests only")
|
||||
parser.add_argument("--httpd", action='store_true', default=False,
|
||||
help="evaluate httpd server only")
|
||||
parser.add_argument("--caddy", action='store_true', default=False,
|
||||
help="evaluate caddy server only")
|
||||
parser.add_argument("--curl-verbose", action='store_true',
|
||||
default=False, help="run curl with `-v`")
|
||||
parser.add_argument("protocol", default='h2', nargs='?',
|
||||
help="Name of protocol to score")
|
||||
args = parser.parse_args()
|
||||
|
||||
if args.verbose > 0:
|
||||
console = logging.StreamHandler()
|
||||
console.setLevel(logging.INFO)
|
||||
console.setFormatter(logging.Formatter(logging.BASIC_FORMAT))
|
||||
logging.getLogger('').addHandler(console)
|
||||
|
||||
protocol = args.protocol
|
||||
handshakes = True
|
||||
downloads = [1024*1024, 10*1024*1024, 100*1024*1024]
|
||||
requests = True
|
||||
test_httpd = protocol != 'h3'
|
||||
test_caddy = True
|
||||
if args.handshakes:
|
||||
downloads = None
|
||||
requests = False
|
||||
if args.downloads:
|
||||
handshakes = False
|
||||
requests = False
|
||||
if args.download:
|
||||
downloads = sorted([parse_size(x) for x in args.download])
|
||||
handshakes = False
|
||||
requests = False
|
||||
if args.requests:
|
||||
handshakes = False
|
||||
downloads = None
|
||||
if args.caddy:
|
||||
test_caddy = True
|
||||
if args.handshakes:
|
||||
downloads = False
|
||||
requests = False
|
||||
if args.downloads:
|
||||
handshakes = False
|
||||
requests = False
|
||||
if args.requests:
|
||||
handshakes = False
|
||||
downloads = False
|
||||
if args.caddy:
|
||||
test_caddy = True
|
||||
test_httpd = False
|
||||
if args.httpd:
|
||||
test_caddy = False
|
||||
test_httpd = True
|
||||
test_httpd = False
|
||||
if args.httpd:
|
||||
test_caddy = False
|
||||
test_httpd = True
|
||||
|
||||
rv = 0
|
||||
self.env = Env()
|
||||
self.env.setup()
|
||||
self.env.test_timeout = None
|
||||
self.httpd = None
|
||||
self.nghttpx = None
|
||||
self.caddy = None
|
||||
try:
|
||||
self.httpd = Httpd(env=self.env)
|
||||
assert self.httpd.exists(), f'httpd not found: {self.env.httpd}'
|
||||
self.httpd.clear_logs()
|
||||
assert self.httpd.start()
|
||||
rv = 0
|
||||
env = Env()
|
||||
env.setup()
|
||||
env.test_timeout = None
|
||||
httpd = None
|
||||
nghttpx = None
|
||||
caddy = None
|
||||
try:
|
||||
if test_httpd:
|
||||
httpd = Httpd(env=env)
|
||||
assert httpd.exists(), \
|
||||
f'httpd not found: {env.httpd}'
|
||||
httpd.clear_logs()
|
||||
assert httpd.start()
|
||||
if 'h3' == protocol:
|
||||
self.nghttpx = Nghttpx(env=self.env)
|
||||
self.nghttpx.clear_logs()
|
||||
assert self.nghttpx.start()
|
||||
if self.env.caddy:
|
||||
self.caddy = Caddy(env=self.env)
|
||||
self.caddy.clear_logs()
|
||||
assert self.caddy.start()
|
||||
nghttpx = Nghttpx(env=env)
|
||||
nghttpx.clear_logs()
|
||||
assert nghttpx.start()
|
||||
if test_caddy and env.caddy:
|
||||
caddy = Caddy(env=env)
|
||||
caddy.clear_logs()
|
||||
assert caddy.start()
|
||||
|
||||
score = self.score_proto(proto=protocol, handshakes=handshakes,
|
||||
downloads=downloads,
|
||||
requests=requests,
|
||||
test_caddy=test_caddy,
|
||||
test_httpd=test_httpd)
|
||||
if args.json:
|
||||
print(json.JSONEncoder(indent=2).encode(score))
|
||||
else:
|
||||
self.print_score(score)
|
||||
card = ScoreCard(env=env, httpd=httpd, nghttpx=nghttpx, caddy=caddy,
|
||||
verbose=args.verbose, curl_verbose=args.curl_verbose)
|
||||
score = card.score_proto(proto=protocol,
|
||||
handshakes=handshakes,
|
||||
downloads=downloads,
|
||||
download_count=args.download_count,
|
||||
requests=requests)
|
||||
if args.json:
|
||||
print(json.JSONEncoder(indent=2).encode(score))
|
||||
else:
|
||||
card.print_score(score)
|
||||
|
||||
except ScoreCardException as ex:
|
||||
sys.stderr.write(f"ERROR: {str(ex)}\n")
|
||||
rv = 1
|
||||
except KeyboardInterrupt:
|
||||
log.warning("aborted")
|
||||
rv = 1
|
||||
finally:
|
||||
if self.caddy:
|
||||
self.caddy.stop()
|
||||
self.caddy = None
|
||||
if self.nghttpx:
|
||||
self.nghttpx.stop(wait_dead=False)
|
||||
if self.httpd:
|
||||
self.httpd.stop()
|
||||
self.httpd = None
|
||||
sys.exit(rv)
|
||||
except ScoreCardException as ex:
|
||||
sys.stderr.write(f"ERROR: {str(ex)}\n")
|
||||
rv = 1
|
||||
except KeyboardInterrupt:
|
||||
log.warning("aborted")
|
||||
rv = 1
|
||||
finally:
|
||||
if caddy:
|
||||
caddy.stop()
|
||||
if nghttpx:
|
||||
nghttpx.stop(wait_dead=False)
|
||||
if httpd:
|
||||
httpd.stop()
|
||||
sys.exit(rv)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
ScoreCard().main()
|
||||
main()
|
||||
|
||||
@ -68,6 +68,7 @@ class EnvConfig:
|
||||
self.curl_props = {
|
||||
'version': None,
|
||||
'os': None,
|
||||
'fullname': None,
|
||||
'features': [],
|
||||
'protocols': [],
|
||||
'libs': [],
|
||||
@ -82,6 +83,7 @@ class EnvConfig:
|
||||
if l.startswith('curl '):
|
||||
m = re.match(r'^curl (?P<version>\S+) (?P<os>\S+) (?P<libs>.*)$', l)
|
||||
if m:
|
||||
self.curl_props['fullname'] = m.group(0)
|
||||
self.curl_props['version'] = m.group('version')
|
||||
self.curl_props['os'] = m.group('os')
|
||||
self.curl_props['lib_versions'] = [
|
||||
@ -251,7 +253,6 @@ class Env:
|
||||
def curl_has_protocol(protocol: str) -> bool:
|
||||
return protocol.lower() in Env.CONFIG.curl_props['protocols']
|
||||
|
||||
|
||||
@staticmethod
|
||||
def curl_lib_version(libname: str) -> str:
|
||||
prefix = f'{libname.lower()}/'
|
||||
@ -264,6 +265,10 @@ class Env:
|
||||
def curl_os() -> str:
|
||||
return Env.CONFIG.curl_props['os']
|
||||
|
||||
@staticmethod
|
||||
def curl_fullname() -> str:
|
||||
return Env.CONFIG.curl_props['fullname']
|
||||
|
||||
@staticmethod
|
||||
def curl_version() -> str:
|
||||
return Env.CONFIG.curl_props['version']
|
||||
|
||||
Loading…
Reference in New Issue
Block a user