edneapi on x2pagentd (and everything using hg-http)

Summary: this should enable everything using rust's http client to go over x2pagentd. The biggest user is edenapi. Should work on all platforms (including Windows!).

Reviewed By: ahornby

Differential Revision: D31430275

fbshipit-source-id: f90a633eb3cc4e82447b1b76200499016dcb6b8e
This commit is contained in:
Jan Mazur 2021-10-08 06:44:14 -07:00 committed by Facebook GitHub Bot
parent 6817ad53cf
commit 42232c29e4
7 changed files with 41 additions and 68 deletions

View File

@ -44,16 +44,6 @@ Create a new client repository. Enable LFS there.
> remotenames =
> EOF
Check if we can do LFS operations through HTTP proxy
$ PROXY_PORT=$(get_free_socket)
$ lfs_port=$(echo $lfs_uri | grep -Eo ":[0-9]+\/?" | grep -Eo "[0-9]+")
$ ncat -lkv localhost $PROXY_PORT -c "tee -a ${TESTTMP}/ncat_proxy.log | ncat localhost ${lfs_port} | tee -a ${TESTTMP}/ncat_proxy.log" 2>/dev/null &
$ ncat_pid=$!
$ cat >> .hg/hgrc <<EOF
> [auth_proxy]
> http_proxy=http://localhost:$PROXY_PORT
> EOF
Update in the client repo
$ hgmn pull -q
devel-warn: applied empty changegroup at* (glob)
@ -190,9 +180,3 @@ Create a new client repository, using getpack (with its own cachepath)
A lfs-largefile-renamed
lfs-largefile-for-rename
R lfs-largefile-for-rename
$ cat "${TESTTMP}/ncat_proxy.log" | grep -o "^HTTP/1.1 200 OK"
HTTP/1.1 200 OK
HTTP/1.1 200 OK
$ kill $ncat_pid

View File

@ -53,6 +53,7 @@ pub struct HgHttpConfig {
pub disable_tls_verification: bool,
pub convert_cert: bool,
pub client_info: Option<String>,
pub unix_socket_path: Option<String>,
}
/// Set a global configuration that will be applied to all HTTP requests in
@ -67,6 +68,7 @@ pub fn set_global_config(config: HgHttpConfig) {
.set_verify_tls_host(!config.disable_tls_verification)
.set_client_info(&config.client_info)
.set_verbose(config.verbose)
.set_auth_proxy_socket_path(config.unix_socket_path.clone())
.set_convert_cert(config.convert_cert);
});
}

View File

@ -14,6 +14,7 @@ use clidispatch::io::IO;
use clidispatch::{dispatch, errors};
use clientinfo::ClientInfo;
use configparser::config::ConfigSet;
use configparser::configmodel::ConfigExt;
use fail::FailScenario;
use hg_http::HgHttpConfig;
use once_cell::sync::Lazy;
@ -620,6 +621,9 @@ fn setup_http(config: &ConfigSet, global_opts: &HgGlobalOpts) {
.get_or("http", "convert-cert", || cfg!(windows))
.unwrap_or(cfg!(windows)),
client_info: ClientInfo::new(config).and_then(|i| i.into_json()).ok(),
unix_socket_path: config
.get_nonempty_opt("auth_proxy", "unix_socket_path")
.expect("Can't get auth_proxy.unix_socket_path config"),
};
hg_http::set_global_config(http_config);
}

View File

@ -17,6 +17,7 @@ curl-sys = "0.4"
env_logger = "0.7"
futures = { version = "0.3.13", features = ["async-await", "compat"] }
http = "0.2"
maplit = "1.0"
once_cell = "1.4"
openssl = "0.10.35"
parking_lot = "0.10.2"

View File

@ -5,7 +5,9 @@
* GNU General Public License version 2.
*/
use maplit::hashmap;
use std::borrow::Cow;
use std::collections::HashMap;
use std::convert::{TryFrom, TryInto};
use std::fmt;
use std::fs::File;
@ -163,7 +165,7 @@ pub struct RequestId(usize);
#[derive(Clone, Debug)]
pub struct Request {
ctx: RequestContext,
headers: Vec<(String, String)>,
headers: HashMap<String, String>,
cert: Option<PathBuf>,
key: Option<PathBuf>,
cainfo: Option<PathBuf>,
@ -240,7 +242,9 @@ impl Request {
ctx,
// Always set Expect so we can disable curl automatically expecting "100-continue".
// That would require two response reads, which breaks the http_client model.
headers: vec![("Expect".to_string(), "".to_string())],
headers: hashmap! {
"Expect".to_string() => "".to_string(),
},
cert: None,
key: None,
cainfo: None,
@ -393,10 +397,15 @@ impl Request {
/// Set a request header.
pub fn set_header(&mut self, name: impl ToString, value: impl ToString) -> &mut Self {
self.headers.push((name.to_string(), value.to_string()));
self.headers
.insert(name.to_string().to_lowercase(), value.to_string());
self
}
pub fn get_header_mut<'a>(&'a mut self, name: impl ToString) -> Option<&'a mut String> {
self.headers.get_mut(&name.to_string().to_lowercase())
}
/// Specify a client certificate for TLS mutual authentiation.
///
/// This should be a path to a base64-encoded PEM file containing the
@ -629,10 +638,23 @@ impl Request {
.trigger_new_request(&mut self);
let body_size = self.ctx.body.as_ref().map(|body| body.len() as u64);
let url = self.ctx.url().clone();
let mut url = self.ctx.url().clone();
if self.auth_proxy_socket_path.is_some() {
url.set_scheme("http")
.expect("Failed setting url scheme to http");
self.set_header("x-x2pagentd-ws-over-h1", "1")
.set_verify_tls_cert(false)
.set_verify_tls_host(false)
.set_convert_cert(false);
if let Some(user_agent) = self.get_header_mut("user-agent") {
user_agent.push_str("+x2pagentd");
}
}
let handler = create_handler(self.ctx);
let mut easy = Easy2::new(handler);
easy.url(url.as_str())?;
easy.verbose(self.verbose)?;
easy.unix_socket_path(self.auth_proxy_socket_path)?;
@ -684,12 +706,12 @@ impl Request {
// as a regular header (without setting CURLOPT_ACCEPT_ENCODING) and decode the response
// manually. This allows us to ensure support for formats we care about (e.g., zstd).
self.headers
.push((header::ACCEPT_ENCODING.as_str().into(), encoding));
.insert(header::ACCEPT_ENCODING.as_str().into(), encoding);
}
// Add headers.
let mut headers = List::new();
for (name, value) in self.headers {
for (name, value) in self.headers.iter() {
let header = format!("{}: {}", name, value);
headers.append(&header)?;
}

View File

@ -12,7 +12,6 @@ auth = { path = "../auth" }
bincode = "1.3.3"
blake2 = "0.8"
byteorder = "1.3"
configmodel = { path = "../configmodel" }
configparser = { path = "../configparser" }
crossbeam = "0.8"
edenapi = { path = "../edenapi" }

View File

@ -24,7 +24,6 @@ use std::{
};
use anyhow::{anyhow, bail, ensure, format_err, Context, Result};
use configmodel::ConfigExt;
use futures::{
future::FutureExt,
stream::{iter, FuturesUnordered, StreamExt, TryStreamExt},
@ -114,8 +113,6 @@ struct HttpOptions {
backoff_times: Vec<f32>,
throttle_backoff_times: Vec<f32>,
request_timeout: Duration,
proxy_unix_socket: Option<String>,
auth_http_proxy: Option<String>,
use_client_certs: bool,
}
@ -1056,7 +1053,7 @@ impl LfsRemoteInner {
async fn send_with_retry(
client: &HttpClient,
method: Method,
mut url: Url,
url: Url,
add_extra: impl Fn(Request) -> Request,
check_status: impl Fn(StatusCode) -> Result<(), TransferError>,
http_options: &HttpOptions,
@ -1071,23 +1068,10 @@ impl LfsRemoteInner {
});
}
let mut user_agent_sufix = "";
if http_options.proxy_unix_socket.is_some() || http_options.auth_http_proxy.is_some() {
url.set_scheme("http").expect("Couldn't set url scheme");
user_agent_sufix = "+proxied";
}
let span = trace_span!("LfsRemoteInner::send_with_retry", url = %url);
let host_str = url.host_str().expect("No host in url").to_string();
if let Some(proxy_url) = &http_options.auth_http_proxy {
let proxy_url = Url::parse(&proxy_url).expect("Failed to parse http proxy url");
url.set_host(proxy_url.host_str())
.expect("Couldn't set url host");
url.set_port(proxy_url.port())
.expect("Couldn't set url port");
}
async move {
let mut backoff = http_options.backoff_times.iter().copied();
@ -1101,10 +1085,7 @@ impl LfsRemoteInner {
let mut req = Request::new(url.clone(), method)
.header("Accept", "application/vnd.git-lfs+json")
.header("Content-Type", "application/vnd.git-lfs+json")
.header(
"User-Agent",
format!("{}{}", &http_options.user_agent, user_agent_sufix),
)
.header("User-Agent", &http_options.user_agent)
.header("X-Attempt", attempt.to_string())
.header("X-Attempts-Left", backoff.len().to_string())
.header("Host", host_str.clone())
@ -1126,18 +1107,7 @@ impl LfsRemoteInner {
req.set_min_transfer_speed(mts);
}
if http_options.proxy_unix_socket.is_some()
|| http_options.auth_http_proxy.is_some()
{
if http_options.proxy_unix_socket.is_some() {
req.set_auth_proxy_socket_path(http_options.proxy_unix_socket.clone());
}
req.set_header("x-x2pagentd-ws-over-h1", "1");
req.set_verify_tls_cert(false)
.set_verify_tls_host(false)
.set_verbose(true)
.set_convert_cert(false);
} else if let Some(auth) = &http_options.auth {
if let Some(auth) = &http_options.auth {
if let Some(cert) = &auth.cert {
req.set_cert(cert);
}
@ -1521,7 +1491,7 @@ impl LfsRemote {
// A trailing '/' needs to be present so that `Url::join` doesn't remove the reponame
// present at the end of the config.
url.push('/');
let mut url = Url::parse(&url)?;
let url = Url::parse(&url)?;
let move_after_upload = config.get_or("lfs", "moveafterupload", || false)?;
let ignore_prefetch_errors = config.get_or("lfs", "ignore-prefetch-errors", || false)?;
@ -1541,17 +1511,10 @@ impl LfsRemote {
if !["http", "https"].contains(&url.scheme()) {
bail!("Unsupported url: {}", url);
}
let proxy_unix_socket =
config.get_nonempty_opt::<String>("auth_proxy", "unix_socket_path")?;
let auth_http_proxy = config.get_nonempty_opt::<String>("auth_proxy", "http_proxy")?;
let mut use_client_certs = config.get_or("lfs", "use-client-certs", || true)?;
let use_client_certs = config.get_or("lfs", "use-client-certs", || true)?;
let auth = if proxy_unix_socket.is_some() || auth_http_proxy.is_some() {
url.set_scheme("http").expect("Couldn't change url scheme");
use_client_certs = false;
None
} else if use_client_certs {
let auth = if use_client_certs {
AuthSection::from_config(config).best_match_for(&url)?
} else {
None
@ -1626,8 +1589,6 @@ impl LfsRemote {
backoff_times,
throttle_backoff_times,
request_timeout,
proxy_unix_socket,
auth_http_proxy,
use_client_certs,
},
}),