http: negotiation and room for alt-svc/https rr to navigate

Add a 'wanted' major HTTP version bitmask next to the 'allowed' bitmask
in HTTP version negotiation. This will try connections as specified in
'wanted', but enabled Alt-Svc and HTTPS-RR to redirect to other major
HTTP versions, if those are 'allowed'.

Changes libcurl internal default to `CURL_HTTP_VERSION_NONE` and removes
the code in curl that sets `CURL_HTTP_VERSION_2TLS` if the command line
does not say anything else.

Closes #16117
This commit is contained in:
Stefan Eissing 2025-01-28 14:11:59 +01:00 committed by Daniel Stenberg
parent a1850ad7de
commit 279a4772ae
No known key found for this signature in database
GPG Key ID: 5CC908FDB71E12C2
11 changed files with 143 additions and 96 deletions

View File

@ -192,7 +192,7 @@ static void cf_hc_reset(struct Curl_cfilter *cf, struct Curl_easy *data)
ctx->state = CF_HC_INIT; ctx->state = CF_HC_INIT;
ctx->result = CURLE_OK; ctx->result = CURLE_OK;
ctx->hard_eyeballs_timeout_ms = data->set.happy_eyeballs_timeout; ctx->hard_eyeballs_timeout_ms = data->set.happy_eyeballs_timeout;
ctx->soft_eyeballs_timeout_ms = data->set.happy_eyeballs_timeout / 2; ctx->soft_eyeballs_timeout_ms = data->set.happy_eyeballs_timeout / 4;
} }
} }
@ -263,8 +263,8 @@ static bool time_to_start_next(struct Curl_cfilter *cf,
break; break;
} }
if(i == idx) { if(i == idx) {
CURL_TRC_CF(data, cf, "all previous ballers have failed, time to start " CURL_TRC_CF(data, cf, "all previous attempts failed, starting %s",
"baller %zu [%s]", idx, ctx->ballers[idx].name); ctx->ballers[idx].name);
return TRUE; return TRUE;
} }
elapsed_ms = Curl_timediff(now, ctx->started); elapsed_ms = Curl_timediff(now, ctx->started);
@ -315,7 +315,7 @@ static CURLcode cf_hc_connect(struct Curl_cfilter *cf,
cf_hc_baller_init(&ctx->ballers[0], cf, data, cf->conn->transport); cf_hc_baller_init(&ctx->ballers[0], cf, data, cf->conn->transport);
if(ctx->baller_count > 1) { if(ctx->baller_count > 1) {
Curl_expire(data, ctx->soft_eyeballs_timeout_ms, EXPIRE_ALPN_EYEBALLS); Curl_expire(data, ctx->soft_eyeballs_timeout_ms, EXPIRE_ALPN_EYEBALLS);
CURL_TRC_CF(data, cf, "set expire for starting next baller in %ums", CURL_TRC_CF(data, cf, "set next attempt to start in %ums",
ctx->soft_eyeballs_timeout_ms); ctx->soft_eyeballs_timeout_ms);
} }
ctx->state = CF_HC_CONNECT; ctx->state = CF_HC_CONNECT;
@ -351,7 +351,7 @@ static CURLcode cf_hc_connect(struct Curl_cfilter *cf,
if(failed_ballers == ctx->baller_count) { if(failed_ballers == ctx->baller_count) {
/* all have failed. we give up */ /* all have failed. we give up */
CURL_TRC_CF(data, cf, "connect, all failed"); CURL_TRC_CF(data, cf, "connect, all attempts failed");
for(i = 0; i < ctx->baller_count; i++) { for(i = 0; i < ctx->baller_count; i++) {
if(ctx->ballers[i].result) { if(ctx->ballers[i].result) {
result = ctx->ballers[i].result; result = ctx->ballers[i].result;
@ -450,7 +450,6 @@ static bool cf_hc_data_pending(struct Curl_cfilter *cf,
if(cf->connected) if(cf->connected)
return cf->next->cft->has_data_pending(cf->next, data); return cf->next->cft->has_data_pending(cf->next, data);
CURL_TRC_CF((struct Curl_easy *)data, cf, "data_pending");
for(i = 0; i < ctx->baller_count; i++) for(i = 0; i < ctx->baller_count; i++)
if(cf_hc_baller_data_pending(&ctx->ballers[i], data)) if(cf_hc_baller_data_pending(&ctx->ballers[i], data))
return TRUE; return TRUE;
@ -606,8 +605,6 @@ static CURLcode cf_hc_create(struct Curl_cfilter **pcf,
ctx->baller_count = alpn_count; ctx->baller_count = alpn_count;
result = Curl_cf_create(&cf, &Curl_cft_http_connect, ctx); result = Curl_cf_create(&cf, &Curl_cft_http_connect, ctx);
CURL_TRC_CF(data, cf, "created with %zu ALPNs -> %d",
ctx->baller_count, result);
if(result) if(result)
goto out; goto out;
ctx = NULL; ctx = NULL;
@ -637,6 +634,17 @@ out:
return result; return result;
} }
static bool cf_https_alpns_contain(enum alpnid id,
enum alpnid *list, size_t len)
{
size_t i;
for(i = 0; i < len; ++i) {
if(id == list[i])
return TRUE;
}
return FALSE;
}
CURLcode Curl_cf_https_setup(struct Curl_easy *data, CURLcode Curl_cf_https_setup(struct Curl_easy *data,
struct connectdata *conn, struct connectdata *conn,
int sockindex, int sockindex,
@ -645,41 +653,55 @@ CURLcode Curl_cf_https_setup(struct Curl_easy *data,
enum alpnid alpn_ids[2]; enum alpnid alpn_ids[2];
size_t alpn_count = 0; size_t alpn_count = 0;
CURLcode result = CURLE_OK; CURLcode result = CURLE_OK;
struct Curl_cfilter cf_fake, *cf = NULL;
(void)sockindex; (void)sockindex;
(void)remotehost; (void)remotehost;
/* we want to log for the filter before we create it, fake it. */
memset(&cf_fake, 0, sizeof(cf_fake));
cf_fake.cft = &Curl_cft_http_connect;
cf = &cf_fake;
if(conn->bits.tls_enable_alpn) { if(conn->bits.tls_enable_alpn) {
#ifdef USE_HTTPSRR #ifdef USE_HTTPSRR
/* Is there a HTTPSRR and if so, do its ALPNs it apply here?
* We are here after having selected a connection to a host+port and
* can no longer change that. Any HTTPSRR advice for other hosts and ports
* we need to ignore. */
if(conn->dns_entry && conn->dns_entry->hinfo && if(conn->dns_entry && conn->dns_entry->hinfo &&
!conn->dns_entry->hinfo->no_def_alpn) { !conn->dns_entry->hinfo->no_def_alpn && /* ALPNs are defaults */
size_t i, j; (!conn->dns_entry->hinfo->target || /* for same host */
!conn->dns_entry->hinfo->target[0] ||
(conn->dns_entry->hinfo->target[0] == '.' &&
!conn->dns_entry->hinfo->target[0])) &&
(conn->dns_entry->hinfo->port < 0 || /* for same port */
conn->dns_entry->hinfo->port == conn->remote_port)) {
size_t i;
for(i = 0; i < CURL_ARRAYSIZE(conn->dns_entry->hinfo->alpns) && for(i = 0; i < CURL_ARRAYSIZE(conn->dns_entry->hinfo->alpns) &&
alpn_count < CURL_ARRAYSIZE(alpn_ids); ++i) { alpn_count < CURL_ARRAYSIZE(alpn_ids); ++i) {
bool present = FALSE;
enum alpnid alpn = conn->dns_entry->hinfo->alpns[i]; enum alpnid alpn = conn->dns_entry->hinfo->alpns[i];
for(j = 0; j < alpn_count; ++j) { if(cf_https_alpns_contain(alpn, alpn_ids, alpn_count))
if(alpn == alpn_ids[j]) {
present = TRUE;
break;
}
}
if(present)
continue; continue;
switch(alpn) { switch(alpn) {
case ALPN_h3: case ALPN_h3:
if(Curl_conn_may_http3(data, conn)) if(Curl_conn_may_http3(data, conn))
break; /* not possible */ break; /* not possible */
if(data->state.http_neg.allowed & CURL_HTTP_V3x) if(data->state.http_neg.allowed & CURL_HTTP_V3x) {
CURL_TRC_CF(data, cf, "adding h3 via HTTPS-RR");
alpn_ids[alpn_count++] = alpn; alpn_ids[alpn_count++] = alpn;
}
break; break;
case ALPN_h2: case ALPN_h2:
if(data->state.http_neg.allowed & CURL_HTTP_V2x) if(data->state.http_neg.allowed & CURL_HTTP_V2x) {
CURL_TRC_CF(data, cf, "adding h2 via HTTPS-RR");
alpn_ids[alpn_count++] = alpn; alpn_ids[alpn_count++] = alpn;
}
break; break;
case ALPN_h1: case ALPN_h1:
if(data->state.http_neg.allowed & CURL_HTTP_V1x) if(data->state.http_neg.allowed & CURL_HTTP_V1x) {
CURL_TRC_CF(data, cf, "adding h1 via HTTPS-RR");
alpn_ids[alpn_count++] = alpn; alpn_ids[alpn_count++] = alpn;
}
break; break;
default: /* ignore */ default: /* ignore */
break; break;
@ -688,18 +710,28 @@ CURLcode Curl_cf_https_setup(struct Curl_easy *data,
} }
#endif #endif
if(!alpn_count) { if((alpn_count < CURL_ARRAYSIZE(alpn_ids)) &&
if(data->state.http_neg.allowed & CURL_HTTP_V3x) { (data->state.http_neg.wanted & CURL_HTTP_V3x) &&
result = Curl_conn_may_http3(data, conn); !cf_https_alpns_contain(ALPN_h3, alpn_ids, alpn_count)) {
if(!result) result = Curl_conn_may_http3(data, conn);
alpn_ids[alpn_count++] = ALPN_h3; if(!result) {
else if(data->state.http_neg.allowed == CURL_HTTP_V3x) CURL_TRC_CF(data, cf, "adding wanted h3");
goto out; /* only h3 allowed, not possible, error out */ alpn_ids[alpn_count++] = ALPN_h3;
} }
if(data->state.http_neg.allowed & CURL_HTTP_V2x) else if(data->state.http_neg.wanted == CURL_HTTP_V3x)
alpn_ids[alpn_count++] = ALPN_h2; goto out; /* only h3 allowed, not possible, error out */
else if(data->state.http_neg.allowed & CURL_HTTP_V1x) }
alpn_ids[alpn_count++] = ALPN_h1; if((alpn_count < CURL_ARRAYSIZE(alpn_ids)) &&
(data->state.http_neg.wanted & CURL_HTTP_V2x) &&
!cf_https_alpns_contain(ALPN_h2, alpn_ids, alpn_count)) {
CURL_TRC_CF(data, cf, "adding wanted h2");
alpn_ids[alpn_count++] = ALPN_h2;
}
else if((alpn_count < CURL_ARRAYSIZE(alpn_ids)) &&
(data->state.http_neg.wanted & CURL_HTTP_V1x) &&
!cf_https_alpns_contain(ALPN_h1, alpn_ids, alpn_count)) {
CURL_TRC_CF(data, cf, "adding wanted h1");
alpn_ids[alpn_count++] = ALPN_h1;
} }
} }

View File

@ -193,31 +193,33 @@ void Curl_http_neg_init(struct Curl_easy *data, struct http_negotiation *neg)
neg->accept_09 = data->set.http09_allowed; neg->accept_09 = data->set.http09_allowed;
switch(data->set.httpwant) { switch(data->set.httpwant) {
case CURL_HTTP_VERSION_1_0: case CURL_HTTP_VERSION_1_0:
neg->allowed = (CURL_HTTP_V1x); neg->wanted = neg->allowed = (CURL_HTTP_V1x);
neg->only_10 = TRUE; neg->only_10 = TRUE;
break; break;
case CURL_HTTP_VERSION_1_1: case CURL_HTTP_VERSION_1_1:
neg->allowed = (CURL_HTTP_V1x); neg->wanted = neg->allowed = (CURL_HTTP_V1x);
break; break;
case CURL_HTTP_VERSION_2_0: case CURL_HTTP_VERSION_2_0:
neg->allowed = (CURL_HTTP_V1x | CURL_HTTP_V2x); neg->wanted = neg->allowed = (CURL_HTTP_V1x | CURL_HTTP_V2x);
neg->h2_upgrade = TRUE; neg->h2_upgrade = TRUE;
break; break;
case CURL_HTTP_VERSION_2TLS: case CURL_HTTP_VERSION_2TLS:
neg->allowed = (CURL_HTTP_V1x | CURL_HTTP_V2x); neg->wanted = neg->allowed = (CURL_HTTP_V1x | CURL_HTTP_V2x);
break; break;
case CURL_HTTP_VERSION_2_PRIOR_KNOWLEDGE: case CURL_HTTP_VERSION_2_PRIOR_KNOWLEDGE:
neg->allowed = (CURL_HTTP_V2x); neg->wanted = neg->allowed = (CURL_HTTP_V2x);
data->state.http_neg.h2_prior_knowledge = TRUE; data->state.http_neg.h2_prior_knowledge = TRUE;
break; break;
case CURL_HTTP_VERSION_3: case CURL_HTTP_VERSION_3:
neg->allowed = (CURL_HTTP_V1x | CURL_HTTP_V2x | CURL_HTTP_V3x); neg->wanted = (CURL_HTTP_V1x | CURL_HTTP_V2x | CURL_HTTP_V3x);
neg->allowed = neg->wanted;
break; break;
case CURL_HTTP_VERSION_3ONLY: case CURL_HTTP_VERSION_3ONLY:
neg->allowed = (CURL_HTTP_V3x); neg->wanted = neg->allowed = (CURL_HTTP_V3x);
break; break;
case CURL_HTTP_VERSION_NONE: case CURL_HTTP_VERSION_NONE:
default: default:
neg->wanted = (CURL_HTTP_V1x | CURL_HTTP_V2x);
neg->allowed = (CURL_HTTP_V1x | CURL_HTTP_V2x | CURL_HTTP_V3x); neg->allowed = (CURL_HTTP_V1x | CURL_HTTP_V2x | CURL_HTTP_V3x);
break; break;
} }
@ -229,7 +231,7 @@ CURLcode Curl_http_setup_conn(struct Curl_easy *data,
/* allocate the HTTP-specific struct for the Curl_easy, only to survive /* allocate the HTTP-specific struct for the Curl_easy, only to survive
during this request */ during this request */
connkeep(conn, "HTTP default"); connkeep(conn, "HTTP default");
if(data->state.http_neg.allowed == CURL_HTTP_V3x) { if(data->state.http_neg.wanted == CURL_HTTP_V3x) {
/* only HTTP/3, needs to work */ /* only HTTP/3, needs to work */
CURLcode result = Curl_conn_may_http3(data, conn); CURLcode result = Curl_conn_may_http3(data, conn);
if(result) if(result)
@ -573,6 +575,7 @@ CURLcode Curl_http_auth_act(struct Curl_easy *data)
(data->req.httpversion_sent > 11)) { (data->req.httpversion_sent > 11)) {
infof(data, "Forcing HTTP/1.1 for NTLM"); infof(data, "Forcing HTTP/1.1 for NTLM");
connclose(conn, "Force HTTP/1.1 connection"); connclose(conn, "Force HTTP/1.1 connection");
data->state.http_neg.wanted = CURL_HTTP_V1x;
data->state.http_neg.allowed = CURL_HTTP_V1x; data->state.http_neg.allowed = CURL_HTTP_V1x;
} }
} }
@ -2849,7 +2852,7 @@ CURLcode Curl_http(struct Curl_easy *data, bool *done)
} }
if(!Curl_conn_is_ssl(conn, FIRSTSOCKET) && (httpversion < 20) && if(!Curl_conn_is_ssl(conn, FIRSTSOCKET) && (httpversion < 20) &&
(data->state.http_neg.allowed & CURL_HTTP_V2x) && (data->state.http_neg.wanted & CURL_HTTP_V2x) &&
data->state.http_neg.h2_upgrade) { data->state.http_neg.h2_upgrade) {
/* append HTTP2 upgrade magic stuff to the HTTP request if it is not done /* append HTTP2 upgrade magic stuff to the HTTP request if it is not done
over SSL */ over SSL */

View File

@ -76,6 +76,7 @@ struct dynhds;
struct http_negotiation { struct http_negotiation {
unsigned char rcvd_min; /* minimum version seen in responses, 09, 10, 11 */ unsigned char rcvd_min; /* minimum version seen in responses, 09, 10, 11 */
http_majors wanted; /* wanted major versions when talking to server */
http_majors allowed; /* allowed major versions when talking to server */ http_majors allowed; /* allowed major versions when talking to server */
BIT(h2_upgrade); /* Do HTTP Upgrade from 1.1 to 2 */ BIT(h2_upgrade); /* Do HTTP Upgrade from 1.1 to 2 */
BIT(h2_prior_knowledge); /* Directly do HTTP/2 without ALPN/SSL */ BIT(h2_prior_knowledge); /* Directly do HTTP/2 without ALPN/SSL */

View File

@ -2795,7 +2795,7 @@ out:
bool Curl_http2_may_switch(struct Curl_easy *data) bool Curl_http2_may_switch(struct Curl_easy *data)
{ {
if(Curl_conn_http_version(data, data->conn) < 20 && if(Curl_conn_http_version(data, data->conn) < 20 &&
(data->state.http_neg.allowed & CURL_HTTP_V2x) && (data->state.http_neg.wanted & CURL_HTTP_V2x) &&
data->state.http_neg.h2_prior_knowledge) { data->state.http_neg.h2_prior_knowledge) {
#ifndef CURL_DISABLE_PROXY #ifndef CURL_DISABLE_PROXY
if(data->conn->bits.httpproxy && !data->conn->bits.tunnel_proxy) { if(data->conn->bits.httpproxy && !data->conn->bits.tunnel_proxy) {

View File

@ -1918,6 +1918,7 @@ static CURLMcode state_performing(struct Curl_easy *data,
if(!ret) { if(!ret) {
infof(data, "Downgrades to HTTP/1.1"); infof(data, "Downgrades to HTTP/1.1");
streamclose(data->conn, "Disconnect HTTP/2 for HTTP/1"); streamclose(data->conn, "Disconnect HTTP/2 for HTTP/1");
data->state.http_neg.wanted = CURL_HTTP_V1x;
data->state.http_neg.allowed = CURL_HTTP_V1x; data->state.http_neg.allowed = CURL_HTTP_V1x;
/* clear the error message bit too as we ignore the one we got */ /* clear the error message bit too as we ignore the one we got */
data->state.errorbuf = FALSE; data->state.errorbuf = FALSE;

View File

@ -600,11 +600,6 @@ static CURLcode setopt_long(struct Curl_easy *data, CURLoption option,
*/ */
switch(arg) { switch(arg) {
case CURL_HTTP_VERSION_NONE: case CURL_HTTP_VERSION_NONE:
#ifdef USE_HTTP2
/* This seems an undesirable quirk to force a behaviour on lower
* implementations that they should recognize independently? */
arg = CURL_HTTP_VERSION_2TLS;
#endif
/* accepted */ /* accepted */
break; break;
case CURL_HTTP_VERSION_1_0: case CURL_HTTP_VERSION_1_0:

View File

@ -473,11 +473,7 @@ CURLcode Curl_init_userdefined(struct Curl_easy *data)
set->maxage_conn = 118; set->maxage_conn = 118;
set->maxlifetime_conn = 0; set->maxlifetime_conn = 0;
set->http09_allowed = FALSE; set->http09_allowed = FALSE;
#ifdef USE_HTTP2 set->httpwant = CURL_HTTP_VERSION_NONE
set->httpwant = CURL_HTTP_VERSION_2TLS
#else
set->httpwant = CURL_HTTP_VERSION_1_1
#endif
; ;
#if defined(USE_HTTP2) || defined(USE_HTTP3) #if defined(USE_HTTP2) || defined(USE_HTTP3)
memset(&set->priority, 0, sizeof(set->priority)); memset(&set->priority, 0, sizeof(set->priority));
@ -1034,9 +1030,7 @@ static bool url_match_conn(struct connectdata *conn, void *userdata)
#ifndef CURL_DISABLE_HTTP #ifndef CURL_DISABLE_HTTP
/* If looking for HTTP and the HTTP versions allowed do not include /* If looking for HTTP and the HTTP versions allowed do not include
* the HTTP version of conn, continue looking. * the HTTP version of conn, continue looking. */
* CURL_HTTP_VERSION_2TLS is default which indicates no preference,
* so we take any existing connection. */
if((needle->handler->protocol & PROTO_FAMILY_HTTP)) { if((needle->handler->protocol & PROTO_FAMILY_HTTP)) {
switch(Curl_conn_http_version(data, conn)) { switch(Curl_conn_http_version(data, conn)) {
case 30: case 30:
@ -3074,44 +3068,47 @@ static CURLcode parse_connect_to_slist(struct Curl_easy *data,
enum alpnid srcalpnid = ALPN_none; enum alpnid srcalpnid = ALPN_none;
bool hit = FALSE; bool hit = FALSE;
struct altsvc *as = NULL; struct altsvc *as = NULL;
int allowed_versions = ALPN_none; int allowed_alpns = ALPN_none;
struct http_negotiation *neg = &data->state.http_neg;
if(data->state.http_neg.allowed & CURL_HTTP_V3x) DEBUGF(infof(data, "Alt-svc check wanted=%x, allowed=%x",
allowed_versions |= ALPN_h3; neg->wanted, neg->allowed));
if(data->state.http_neg.allowed & CURL_HTTP_V2x) if(neg->allowed & CURL_HTTP_V3x)
allowed_versions |= ALPN_h2; allowed_alpns |= ALPN_h3;
if(data->state.http_neg.allowed & CURL_HTTP_V1x) if(neg->allowed & CURL_HTTP_V2x)
allowed_versions |= ALPN_h1; allowed_alpns |= ALPN_h2;
allowed_versions &= (int)data->asi->flags; if(neg->allowed & CURL_HTTP_V1x)
allowed_alpns |= ALPN_h1;
allowed_alpns &= (int)data->asi->flags;
host = conn->host.rawalloc; host = conn->host.rawalloc;
DEBUGF(infof(data, "check Alt-Svc for host %s", host)); DEBUGF(infof(data, "check Alt-Svc for host %s", host));
#ifdef USE_HTTP3 #ifdef USE_HTTP3
if(!hit && (allowed_versions & ALPN_h3)) { if(!hit && (neg->wanted & CURL_HTTP_V3x)) {
srcalpnid = ALPN_h3; srcalpnid = ALPN_h3;
hit = Curl_altsvc_lookup(data->asi, hit = Curl_altsvc_lookup(data->asi,
ALPN_h3, host, conn->remote_port, /* from */ ALPN_h3, host, conn->remote_port, /* from */
&as /* to */, &as /* to */,
allowed_versions); allowed_alpns);
} }
#endif #endif
#ifdef USE_HTTP2 #ifdef USE_HTTP2
if(!hit && (allowed_versions & ALPN_h2) && if(!hit && (neg->wanted & CURL_HTTP_V2x) &&
!data->state.http_neg.h2_prior_knowledge) { !neg->h2_prior_knowledge) {
srcalpnid = ALPN_h2; srcalpnid = ALPN_h2;
hit = Curl_altsvc_lookup(data->asi, hit = Curl_altsvc_lookup(data->asi,
ALPN_h2, host, conn->remote_port, /* from */ ALPN_h2, host, conn->remote_port, /* from */
&as /* to */, &as /* to */,
allowed_versions); allowed_alpns);
} }
#endif #endif
if(!hit && (allowed_versions & ALPN_h1) && if(!hit && (neg->wanted & CURL_HTTP_V1x) &&
!data->state.http_neg.only_10) { !neg->only_10) {
srcalpnid = ALPN_h1; srcalpnid = ALPN_h1;
hit = Curl_altsvc_lookup(data->asi, hit = Curl_altsvc_lookup(data->asi,
ALPN_h1, host, conn->remote_port, /* from */ ALPN_h1, host, conn->remote_port, /* from */
&as /* to */, &as /* to */,
allowed_versions); allowed_alpns);
} }
if(hit) { if(hit) {
@ -3131,15 +3128,15 @@ static CURLcode parse_connect_to_slist(struct Curl_easy *data,
/* protocol version switch */ /* protocol version switch */
switch(as->dst.alpnid) { switch(as->dst.alpnid) {
case ALPN_h1: case ALPN_h1:
data->state.http_neg.allowed = CURL_HTTP_V1x; neg->wanted = neg->allowed = CURL_HTTP_V1x;
data->state.http_neg.only_10 = FALSE; neg->only_10 = FALSE;
break; break;
case ALPN_h2: case ALPN_h2:
data->state.http_neg.allowed = CURL_HTTP_V2x; neg->wanted = neg->allowed = CURL_HTTP_V2x;
break; break;
case ALPN_h3: case ALPN_h3:
conn->transport = TRNSPRT_QUIC; conn->transport = TRNSPRT_QUIC;
data->state.http_neg.allowed = CURL_HTTP_V3x; neg->wanted = neg->allowed = CURL_HTTP_V3x;
break; break;
default: /* should not be possible */ default: /* should not be possible */
break; break;

View File

@ -1541,7 +1541,7 @@ static CURLcode cf_ssl_create(struct Curl_cfilter **pcf,
DEBUGASSERT(data->conn); DEBUGASSERT(data->conn);
ctx = cf_ctx_new(data, alpn_get_spec(data->state.http_neg.allowed, ctx = cf_ctx_new(data, alpn_get_spec(data->state.http_neg.wanted,
conn->bits.tls_enable_alpn)); conn->bits.tls_enable_alpn));
if(!ctx) { if(!ctx) {
result = CURLE_OUT_OF_MEMORY; result = CURLE_OUT_OF_MEMORY;

View File

@ -1310,6 +1310,7 @@ static CURLcode ws_setup_conn(struct Curl_easy *data,
/* WebSockets is 1.1 only (for now) */ /* WebSockets is 1.1 only (for now) */
data->state.http_neg.accept_09 = FALSE; data->state.http_neg.accept_09 = FALSE;
data->state.http_neg.only_10 = FALSE; data->state.http_neg.only_10 = FALSE;
data->state.http_neg.wanted = CURL_HTTP_V1x;
data->state.http_neg.allowed = CURL_HTTP_V1x; data->state.http_neg.allowed = CURL_HTTP_V1x;
return Curl_http_setup_conn(data, conn); return Curl_http_setup_conn(data, conn);
} }

View File

@ -1087,8 +1087,6 @@ static CURLcode config2setopts(struct GlobalConfig *global,
if(config->httpversion) if(config->httpversion)
my_setopt_enum(curl, CURLOPT_HTTP_VERSION, config->httpversion); my_setopt_enum(curl, CURLOPT_HTTP_VERSION, config->httpversion);
else if(feature_http2)
my_setopt_enum(curl, CURLOPT_HTTP_VERSION, CURL_HTTP_VERSION_2TLS);
/* curl 7.19.1 (the 301 version existed in 7.18.2), /* curl 7.19.1 (the 301 version existed in 7.18.2),
303 was added in 7.26.0 */ 303 was added in 7.26.0 */

View File

@ -76,30 +76,24 @@ class TestReuse:
assert r.total_connects == count assert r.total_connects == count
@pytest.mark.skipif(condition=not Env.have_h3(), reason="h3 not supported") @pytest.mark.skipif(condition=not Env.have_h3(), reason="h3 not supported")
def test_12_03_alt_svc_h2h3(self, env: Env, httpd, nghttpx): def test_12_03_as_follow_h2h3(self, env: Env, httpd, nghttpx):
# Without '--http*` an Alt-Svc redirection from h2 to h3 is allowed
httpd.clear_extra_configs() httpd.clear_extra_configs()
httpd.reload() httpd.reload()
count = 2 # write a alt-svc file that advises h3 instead of h2
# write a alt-svc file the advises h3 instead of h2
asfile = os.path.join(env.gen_dir, 'alt-svc-12_03.txt') asfile = os.path.join(env.gen_dir, 'alt-svc-12_03.txt')
ts = datetime.now() + timedelta(hours=24) self.create_asfile(asfile, f'h2 {env.domain1} {env.https_port} h3 {env.domain1} {env.h3_port}')
expires = f'{ts.year:04}{ts.month:02}{ts.day:02} {ts.hour:02}:{ts.minute:02}:{ts.second:02}'
with open(asfile, 'w') as fd:
fd.write(f'h2 {env.domain1} {env.https_port} h3 {env.domain1} {env.https_port} "{expires}" 0 0')
log.info(f'altscv: {open(asfile).readlines()}')
curl = CurlClient(env=env) curl = CurlClient(env=env)
urln = f'https://{env.authority_for(env.domain1, "h2")}/data.json?[0-{count-1}]' urln = f'https://{env.authority_for(env.domain1, "h2")}/data.json'
r = curl.http_download(urls=[urln], with_stats=True, extra_args=[ r = curl.http_download(urls=[urln], with_stats=True, extra_args=[
'--alt-svc', f'{asfile}', '--http3', '--alt-svc', f'{asfile}',
]) ])
r.check_response(count=count, http_status=200) r.check_response(count=1, http_status=200)
# We expect the connection to be reused assert r.stats[0]['http_version'] == '3', f'{r.stats}'
assert r.total_connects == 1
for s in r.stats:
assert s['http_version'] == '3', f'{s}'
@pytest.mark.skipif(condition=not Env.have_h3(), reason="h3 not supported") @pytest.mark.skipif(condition=not Env.have_h3(), reason="h3 not supported")
def test_12_04_alt_svc_h3h2(self, env: Env, httpd, nghttpx): def test_12_04_as_follow_h3h2(self, env: Env, httpd, nghttpx):
# With '--http3` an Alt-Svc redirection from h3 to h2 is allowed
httpd.clear_extra_configs() httpd.clear_extra_configs()
httpd.reload() httpd.reload()
count = 2 count = 2
@ -122,7 +116,8 @@ class TestReuse:
assert s['http_version'] == '2', f'{s}' assert s['http_version'] == '2', f'{s}'
@pytest.mark.skipif(condition=not Env.have_h3(), reason="h3 not supported") @pytest.mark.skipif(condition=not Env.have_h3(), reason="h3 not supported")
def test_12_05_alt_svc_h3h1(self, env: Env, httpd, nghttpx): def test_12_05_as_follow_h3h1(self, env: Env, httpd, nghttpx):
# With '--http3` an Alt-Svc redirection from h3 to h1 is allowed
httpd.clear_extra_configs() httpd.clear_extra_configs()
httpd.reload() httpd.reload()
count = 2 count = 2
@ -145,7 +140,8 @@ class TestReuse:
assert s['http_version'] == '1.1', f'{s}' assert s['http_version'] == '1.1', f'{s}'
@pytest.mark.skipif(condition=not Env.have_h3(), reason="h3 not supported") @pytest.mark.skipif(condition=not Env.have_h3(), reason="h3 not supported")
def test_12_06_alt_svc_h3h1_h3only(self, env: Env, httpd, nghttpx): def test_12_06_as_ignore_h3h1(self, env: Env, httpd, nghttpx):
# With '--http3-only` an Alt-Svc redirection from h3 to h1 is ignored
httpd.clear_extra_configs() httpd.clear_extra_configs()
httpd.reload() httpd.reload()
count = 2 count = 2
@ -166,3 +162,26 @@ class TestReuse:
assert r.total_connects == 1 assert r.total_connects == 1
for s in r.stats: for s in r.stats:
assert s['http_version'] == '3', f'{s}' assert s['http_version'] == '3', f'{s}'
@pytest.mark.skipif(condition=not Env.have_h3(), reason="h3 not supported")
def test_12_07_as_ignore_h2h3(self, env: Env, httpd, nghttpx):
# With '--http2` an Alt-Svc redirection from h2 to h3 is ignored
httpd.clear_extra_configs()
httpd.reload()
# write a alt-svc file that advises h3 instead of h2
asfile = os.path.join(env.gen_dir, 'alt-svc-12_03.txt')
self.create_asfile(asfile, f'h2 {env.domain1} {env.https_port} h3 {env.domain1} {env.h3_port}')
curl = CurlClient(env=env)
urln = f'https://{env.authority_for(env.domain1, "h2")}/data.json'
r = curl.http_download(urls=[urln], with_stats=True, extra_args=[
'--alt-svc', f'{asfile}', '--http2'
])
r.check_response(count=1, http_status=200)
assert r.stats[0]['http_version'] == '2', f'{r.stats}'
def create_asfile(self, fpath, line):
ts = datetime.now() + timedelta(hours=24)
expires = f'{ts.year:04}{ts.month:02}{ts.day:02} {ts.hour:02}:{ts.minute:02}:{ts.second:02}'
with open(fpath, 'w') as fd:
fd.write(f'{line} "{expires}" 0 0')
log.info(f'altscv: {open(fpath).readlines()}')