add hostname of the server that generated URI to URIs returned in response to batch endpoint

Summary:
This shows how basic interaction with LFS server works (it's git-lfs protocol pretty much): https://www.internalfb.com/intern/wiki/Source_Control/Mononoke/Production/LFS_Server/#sending-requests-to-lfs

Pretty much client asks: "Hey, where can I get objects (a,b,c,d)?"
Server responds with urls where objects can be fetched from.
Those urls are for LFS server again.
When client eventually fetches the objects, we would like to know which server (particularly which region) generated the URLs for it.
In order to do that I'm adding hostname of the server that respond to /batch endpoint in http query param. We'll be able to log it to scuba in /download and /upload endpoints.

Differential Revision: D46964214

fbshipit-source-id: aa6b3594391a66b9bd931fb393cf5d672fbac4d8
This commit is contained in:
Jan Mazur 2023-06-30 06:03:09 -07:00 committed by Facebook GitHub Bot
parent 165cc08b36
commit c3cf284765
12 changed files with 103 additions and 72 deletions

View File

@ -27,6 +27,7 @@ futures-util = "0.3.7"
gotham = "0.7.1"
gotham_derive = "0.7.0"
gotham_ext = { version = "0.1.0", path = "../gotham_ext" }
hostname = { version = "0.1.0", git = "https://github.com/facebookexperimental/rust-shed.git", branch = "main" }
http = "0.2"
hyper = { version = "0.14.26", features = ["client", "http1", "http2", "stream"] }
hyper-openssl = "0.9"

View File

@ -706,6 +706,8 @@ mod test {
use crate::lfs_server_context::ServerUris;
use crate::Repo;
const SERVER_HOSTNAME: &str = "mzr.re";
fn obj(oid: Sha256, size: u64) -> RequestObject {
RequestObject {
oid: oid.into(),
@ -877,8 +879,8 @@ mod test {
fn upload_uri(object: &RequestObject) -> Result<Uri, Error> {
let r = format!(
"http://foo.com/repo123/upload/{}/{}",
object.oid, object.size
"http://foo.com/repo123/upload/{}/{}?server_hostname={}",
object.oid, object.size, SERVER_HOSTNAME
)
.parse()?;
Ok(r)
@ -915,6 +917,7 @@ mod test {
repository: "repo123".to_string(),
server: Arc::new(server),
host: "foo.com".to_string(),
server_hostname: Arc::new(SERVER_HOSTNAME.to_string()),
};
let res = batch_upload_response_objects(

View File

@ -25,6 +25,7 @@ use gotham::state::FromState;
use gotham::state::State;
use gotham_derive::StateData;
use gotham_ext::body_ext::BodyExt;
use hostname::get_hostname;
use http::header::HeaderMap;
use http::uri::Authority;
use http::uri::Parts;
@ -70,6 +71,7 @@ struct LfsServerContextInner {
config_handle: ConfigHandle<ServerConfig>,
logger: Logger,
qps: Arc<Option<Qps>>,
server_hostname: Arc<String>,
}
#[derive(Clone, StateData)]
@ -93,6 +95,8 @@ impl LfsServerContext {
.map_err(Error::from)
.context(ErrorKind::HttpClientInitializationFailed)?;
let client = Client::builder().build(connector);
let server_hostname =
Arc::new(get_hostname().unwrap_or_else(|_| "UNKNOWN_HOSTNAME".to_string()));
let inner = LfsServerContextInner {
repositories,
@ -103,6 +107,7 @@ impl LfsServerContext {
config_handle,
logger,
qps: Arc::new(qps),
server_hostname,
};
Ok(LfsServerContext {
@ -118,7 +123,15 @@ impl LfsServerContext {
host: String,
method: LfsMethod,
) -> Result<RepositoryRequestContext, LfsServerContextErrorKind> {
let (repo, client, server, always_wait_for_upstream, max_upload_size, config) = {
let (
repo,
client,
server,
always_wait_for_upstream,
max_upload_size,
config,
server_hostname,
) = {
let inner = self.inner.lock().expect("poisoned lock");
match inner.repositories.get(&repository) {
@ -129,6 +142,7 @@ impl LfsServerContext {
inner.always_wait_for_upstream,
inner.max_upload_size,
inner.config_handle.get(),
inner.server_hostname.clone(),
),
None => {
return Err(LfsServerContextErrorKind::RepositoryDoesNotExist(
@ -150,6 +164,7 @@ impl LfsServerContext {
repository,
server,
host,
server_hostname,
},
client: HttpClient::Enabled(client),
config,
@ -398,6 +413,7 @@ pub struct UriBuilder {
pub repository: String,
pub server: Arc<ServerUris>,
pub host: String,
pub server_hostname: Arc<String>,
}
impl UriBuilder {
@ -412,15 +428,18 @@ impl UriBuilder {
pub fn upload_uri(&self, object: &RequestObject) -> Result<Uri, ErrorKind> {
self.pick_uri()?
.build(format_args!(
"{}/upload/{}/{}",
&self.repository, object.oid, object.size
"{}/upload/{}/{}?server_hostname={}",
&self.repository, object.oid, object.size, self.server_hostname,
))
.map_err(|e| ErrorKind::UriBuilderFailed("upload_uri", e))
}
pub fn download_uri(&self, content_id: &ContentId) -> Result<Uri, ErrorKind> {
self.pick_uri()?
.build(format_args!("{}/download/{}", &self.repository, content_id))
.build(format_args!(
"{}/download/{}?server_hostname={}",
&self.repository, content_id, self.server_hostname
))
.map_err(|e| ErrorKind::UriBuilderFailed("download_uri", e))
}
@ -431,8 +450,8 @@ impl UriBuilder {
) -> Result<Uri, ErrorKind> {
self.pick_uri()?
.build(format_args!(
"{}/download/{}?routing={}",
&self.repository, content_id, routing_key
"{}/download/{}?routing={}&server_hostname={}",
&self.repository, content_id, routing_key, self.server_hostname
))
.map_err(|e| ErrorKind::UriBuilderFailed("consistent_download_uri", e))
}
@ -533,6 +552,7 @@ mod test {
const ONES_HASH: &str = "1111111111111111111111111111111111111111111111111111111111111111";
const TWOS_HASH: &str = "2222222222222222222222222222222222222222222222222222222222222222";
const SIZE: u64 = 123;
const SERVER_HOSTNAME: &str = "mzr.re";
pub fn uri_builder(
self_uris: Vec<&str>,
@ -547,6 +567,7 @@ mod test {
repository: "repo123".to_string(),
server: Arc::new(server),
host,
server_hostname: Arc::new(SERVER_HOSTNAME.to_string()),
})
}
@ -636,6 +657,14 @@ mod test {
Sha256::from_str(TWOS_HASH)
}
fn build_url(base: &str, hash: &str, size: Option<u64>) -> String {
if let Some(size) = size {
format!("{base}/{hash}/{size}?server_hostname={SERVER_HOSTNAME}")
} else {
format!("{base}/{hash}?server_hostname={SERVER_HOSTNAME}")
}
}
#[test]
fn test_basic_upload_uri() -> Result<(), Error> {
let b = uri_builder(
@ -645,7 +674,7 @@ mod test {
)?;
assert_eq!(
b.upload_uri(&obj()?)?.to_string(),
format!("http://foo.com/repo123/upload/{}/{}", ONES_HASH, SIZE),
build_url("http://foo.com/repo123/upload", ONES_HASH, Some(SIZE)),
);
Ok(())
}
@ -659,7 +688,7 @@ mod test {
)?;
assert_eq!(
b.upload_uri(&obj()?)?.to_string(),
format!("http://foo.com/repo123/upload/{}/{}", ONES_HASH, SIZE),
build_url("http://foo.com/repo123/upload", ONES_HASH, Some(SIZE)),
);
Ok(())
}
@ -673,7 +702,7 @@ mod test {
)?;
assert_eq!(
b.upload_uri(&obj()?)?.to_string(),
format!("http://foo.com/bar/repo123/upload/{}/{}", ONES_HASH, SIZE),
build_url("http://foo.com/bar/repo123/upload", ONES_HASH, Some(SIZE)),
);
Ok(())
}
@ -687,7 +716,8 @@ mod test {
)?;
assert_eq!(
b.upload_uri(&obj()?)?.to_string(),
format!("http://foo.com/bar/repo123/upload/{}/{}", ONES_HASH, SIZE),
//format!("http://foo.com/bar/repo123/upload/{}/{}", ONES_HASH, SIZE),
build_url("http://foo.com/bar/repo123/upload", ONES_HASH, Some(SIZE)),
);
Ok(())
}
@ -701,7 +731,7 @@ mod test {
)?;
assert_eq!(
b.download_uri(&content_id()?)?.to_string(),
format!("http://foo.com/repo123/download/{}", ONES_HASH),
build_url("http://foo.com/repo123/download", ONES_HASH, None),
);
Ok(())
}
@ -715,7 +745,7 @@ mod test {
)?;
assert_eq!(
b.download_uri(&content_id()?)?.to_string(),
format!("http://foo.com/repo123/download/{}", ONES_HASH),
build_url("http://foo.com/repo123/download", ONES_HASH, None),
);
Ok(())
}
@ -729,7 +759,7 @@ mod test {
)?;
assert_eq!(
b.download_uri(&content_id()?)?.to_string(),
format!("http://foo.com/bar/repo123/download/{}", ONES_HASH),
build_url("http://foo.com/bar/repo123/download", ONES_HASH, None),
);
Ok(())
}
@ -743,7 +773,7 @@ mod test {
)?;
assert_eq!(
b.download_uri(&content_id()?)?.to_string(),
format!("http://foo.com/bar/repo123/download/{}", ONES_HASH),
build_url("http://foo.com/bar/repo123/download", ONES_HASH, None),
);
Ok(())
}
@ -759,8 +789,8 @@ mod test {
b.consistent_download_uri(&content_id()?, format!("{}", oid()?))?
.to_string(),
format!(
"http://foo.com/repo123/download/{}?routing={}",
ONES_HASH, TWOS_HASH
"http://foo.com/repo123/download/{}?routing={}&server_hostname={}",
ONES_HASH, TWOS_HASH, SERVER_HOSTNAME
),
);
Ok(())

View File

@ -45,7 +45,7 @@
# Make sure we get a normal download URL
$ curl -s --data-binary @batch.json "$LFS_URI/objects/batch" | jq ".objects[0].actions.download.href"
"http://$LOCALIP:*/repo1/download/d28548bc21aabf04d143886d717d72375e3deecd0dafb3d110676b70a192cb5d" (glob)
"http://$LOCALIP:*/repo1/download/d28548bc21aabf04d143886d717d72375e3deecd0dafb3d110676b70a192cb5d?server_hostname=*" (glob)
# Update the config to enable consistent routing
$ sed -i 's/"enable_consistent_routing": false/"enable_consistent_routing": true/g' "$LIVE_CONFIG"
@ -55,7 +55,7 @@
# Make sure we get a normal download URL
$ curl -s --data-binary @batch.json "$LFS_URI/objects/batch" | jq ".objects[0].actions.download.href"
"http://$LOCALIP:*/repo1/download/d28548bc21aabf04d143886d717d72375e3deecd0dafb3d110676b70a192cb5d?routing=ab02c2a1923c8eb11cb3ddab70320746d71d32ad63f255698dc67c3295757746" (glob)
"http://$LOCALIP:*/repo1/download/d28548bc21aabf04d143886d717d72375e3deecd0dafb3d110676b70a192cb5d?routing=ab02c2a1923c8eb11cb3ddab70320746d71d32ad63f255698dc67c3295757746?server_hostname=*" (glob)
# Make sure we can read it back
$ hg --config extensions.lfs= debuglfsreceive ab02c2a1923c8eb11cb3ddab70320746d71d32ad63f255698dc67c3295757746 2048 "$LFS_URI" | sha256sum
@ -63,5 +63,5 @@
# Verify that we used the consistent URL
$ tail -n 2 "$LFS_LOG"
IN > GET /repo1/download/d28548bc21aabf04d143886d717d72375e3deecd0dafb3d110676b70a192cb5d?routing=ab02c2a1923c8eb11cb3ddab70320746d71d32ad63f255698dc67c3295757746 -
OUT < GET /repo1/download/d28548bc21aabf04d143886d717d72375e3deecd0dafb3d110676b70a192cb5d?routing=ab02c2a1923c8eb11cb3ddab70320746d71d32ad63f255698dc67c3295757746 200 OK
IN > GET /repo1/download/d28548bc21aabf04d143886d717d72375e3deecd0dafb3d110676b70a192cb5d?routing=ab02c2a1923c8eb11cb3ddab70320746d71d32ad63f255698dc67c3295757746&server_hostname=* - (glob)
OUT < GET /repo1/download/d28548bc21aabf04d143886d717d72375e3deecd0dafb3d110676b70a192cb5d?routing=ab02c2a1923c8eb11cb3ddab70320746d71d32ad63f255698dc67c3295757746&server_hostname=* 200 OK (glob)

View File

@ -48,8 +48,8 @@
$ cat "$log_proxy"
IN > POST /lfs_repo/objects/batch -
OUT < POST /lfs_repo/objects/batch 200 OK
IN > GET /lfs_repo/download/d28548bc21aabf04d143886d717d72375e3deecd0dafb3d110676b70a192cb5d -
OUT < GET /lfs_repo/download/d28548bc21aabf04d143886d717d72375e3deecd0dafb3d110676b70a192cb5d 200 OK
IN > GET /lfs_repo/download/d28548bc21aabf04d143886d717d72375e3deecd0dafb3d110676b70a192cb5d?server_hostname=* - (glob)
OUT < GET /lfs_repo/download/d28548bc21aabf04d143886d717d72375e3deecd0dafb3d110676b70a192cb5d?server_hostname=* 200 OK (glob)
$ wait_for_json_record_count "$scuba_proxy" 2
$ jq -S .normal.batch_order < "$scuba_proxy"

View File

@ -45,8 +45,8 @@
$ cat "$log_upstream"
IN > POST /lfs1/objects/batch -
OUT < POST /lfs1/objects/batch 200 OK
IN > GET /lfs1/download/d28548bc21aabf04d143886d717d72375e3deecd0dafb3d110676b70a192cb5d -
OUT < GET /lfs1/download/d28548bc21aabf04d143886d717d72375e3deecd0dafb3d110676b70a192cb5d 200 OK
IN > GET /lfs1/download/d28548bc21aabf04d143886d717d72375e3deecd0dafb3d110676b70a192cb5d?server_hostname=* - (glob)
OUT < GET /lfs1/download/d28548bc21aabf04d143886d717d72375e3deecd0dafb3d110676b70a192cb5d?server_hostname=* 200 OK (glob)
$ cat "$log_proxy" >> "$log_proxy.saved"
$ cat "$log_upstream" >> "$log_upstream.saved"
$ truncate -s 0 "$log_proxy" "$log_upstream"
@ -62,8 +62,8 @@
$ cat "$log_upstream"
IN > POST /lfs1/objects/batch -
OUT < POST /lfs1/objects/batch 200 OK
IN > PUT /lfs1/upload/a1bcf2c963bec9588aaa30bd33ef07873792e3ec241453b0d21635d1c4bbae84/2048 -
OUT < PUT /lfs1/upload/a1bcf2c963bec9588aaa30bd33ef07873792e3ec241453b0d21635d1c4bbae84/2048 200 OK
IN > PUT /lfs1/upload/a1bcf2c963bec9588aaa30bd33ef07873792e3ec241453b0d21635d1c4bbae84/2048?server_hostname=* - (glob)
OUT < PUT /lfs1/upload/a1bcf2c963bec9588aaa30bd33ef07873792e3ec241453b0d21635d1c4bbae84/2048?server_hostname=* 200 OK (glob)
$ cat "$log_proxy" >> "$log_proxy.saved"
$ cat "$log_upstream" >> "$log_upstream.saved"
$ truncate -s 0 "$log_proxy" "$log_upstream"

View File

@ -28,8 +28,8 @@
$ cat "$log_upstream"
IN > POST /lfs_upstream/objects/batch -
OUT < POST /lfs_upstream/objects/batch 200 OK
IN > PUT /lfs_upstream/upload/ab02c2a1923c8eb11cb3ddab70320746d71d32ad63f255698dc67c3295757746/2048 -
OUT < PUT /lfs_upstream/upload/ab02c2a1923c8eb11cb3ddab70320746d71d32ad63f255698dc67c3295757746/2048 200 OK
IN > PUT /lfs_upstream/upload/ab02c2a1923c8eb11cb3ddab70320746d71d32ad63f255698dc67c3295757746/2048?server_hostname=* - (glob)
OUT < PUT /lfs_upstream/upload/ab02c2a1923c8eb11cb3ddab70320746d71d32ad63f255698dc67c3295757746/2048?server_hostname=* 200 OK (glob)
$ truncate -s 0 "$log_proxy" "$log_upstream"
@ -44,8 +44,8 @@
$ cat "$log_upstream"
IN > POST /lfs_upstream/objects/batch -
OUT < POST /lfs_upstream/objects/batch 200 OK
IN > GET /lfs_upstream/download/d28548bc21aabf04d143886d717d72375e3deecd0dafb3d110676b70a192cb5d -
OUT < GET /lfs_upstream/download/d28548bc21aabf04d143886d717d72375e3deecd0dafb3d110676b70a192cb5d 200 OK
IN > GET /lfs_upstream/download/d28548bc21aabf04d143886d717d72375e3deecd0dafb3d110676b70a192cb5d?server_hostname=* - (glob)
OUT < GET /lfs_upstream/download/d28548bc21aabf04d143886d717d72375e3deecd0dafb3d110676b70a192cb5d?server_hostname=* 200 OK (glob)
$ truncate -s 0 "$log_proxy" "$log_upstream"
@ -56,8 +56,8 @@
$ cat "$log_proxy"
IN > POST /lfs_proxy/objects/batch -
OUT < POST /lfs_proxy/objects/batch 200 OK
IN > PUT /lfs_proxy/upload/ab02c2a1923c8eb11cb3ddab70320746d71d32ad63f255698dc67c3295757746/2048 -
OUT < PUT /lfs_proxy/upload/ab02c2a1923c8eb11cb3ddab70320746d71d32ad63f255698dc67c3295757746/2048 200 OK
IN > PUT /lfs_proxy/upload/ab02c2a1923c8eb11cb3ddab70320746d71d32ad63f255698dc67c3295757746/2048?server_hostname=* - (glob)
OUT < PUT /lfs_proxy/upload/ab02c2a1923c8eb11cb3ddab70320746d71d32ad63f255698dc67c3295757746/2048?server_hostname=* 200 OK (glob)
$ cat "$log_upstream"
IN > POST /lfs_upstream/objects/batch -
@ -76,16 +76,16 @@
$ cat "$log_proxy"
IN > POST /lfs_proxy/objects/batch -
OUT < POST /lfs_proxy/objects/batch 200 OK
IN > PUT /lfs_proxy/upload/a1bcf2c963bec9588aaa30bd33ef07873792e3ec241453b0d21635d1c4bbae84/2048 -
OUT < PUT /lfs_proxy/upload/a1bcf2c963bec9588aaa30bd33ef07873792e3ec241453b0d21635d1c4bbae84/2048 200 OK
IN > PUT /lfs_proxy/upload/a1bcf2c963bec9588aaa30bd33ef07873792e3ec241453b0d21635d1c4bbae84/2048?server_hostname=* - (glob)
OUT < PUT /lfs_proxy/upload/a1bcf2c963bec9588aaa30bd33ef07873792e3ec241453b0d21635d1c4bbae84/2048?server_hostname=* 200 OK (glob)
$ cat "$log_upstream"
IN > POST /lfs_upstream/objects/batch -
OUT < POST /lfs_upstream/objects/batch 200 OK
IN > POST /lfs_upstream/objects/batch -
OUT < POST /lfs_upstream/objects/batch 200 OK
IN > PUT /lfs_upstream/upload/a1bcf2c963bec9588aaa30bd33ef07873792e3ec241453b0d21635d1c4bbae84/2048 -
OUT < PUT /lfs_upstream/upload/a1bcf2c963bec9588aaa30bd33ef07873792e3ec241453b0d21635d1c4bbae84/2048 200 OK
IN > PUT /lfs_upstream/upload/a1bcf2c963bec9588aaa30bd33ef07873792e3ec241453b0d21635d1c4bbae84/2048?server_hostname=* - (glob)
OUT < PUT /lfs_upstream/upload/a1bcf2c963bec9588aaa30bd33ef07873792e3ec241453b0d21635d1c4bbae84/2048?server_hostname=* 200 OK (glob)
# Proper user agent should be sent to proxy.
$ wait_for_json_record_count "$SCUBA" 3
@ -104,15 +104,15 @@
$ cat "$log_proxy"
IN > POST /lfs_proxy/objects/batch -
OUT < POST /lfs_proxy/objects/batch 200 OK
IN > GET /lfs_proxy/download/2e8e6e2dda2bb7b6458146a1c1bf301e4856293e1cc258ab789c63df2254693c -
OUT < GET /lfs_proxy/download/2e8e6e2dda2bb7b6458146a1c1bf301e4856293e1cc258ab789c63df2254693c 200 OK
IN > GET /lfs_proxy/download/2e8e6e2dda2bb7b6458146a1c1bf301e4856293e1cc258ab789c63df2254693c?server_hostname=* - (glob)
OUT < GET /lfs_proxy/download/2e8e6e2dda2bb7b6458146a1c1bf301e4856293e1cc258ab789c63df2254693c?server_hostname=* 200 OK (glob)
$ cat "$log_upstream"
IN > POST /lfs_upstream/objects/batch -
OUT < POST /lfs_upstream/objects/batch 200 OK
IN > POST /lfs_upstream/objects/batch -
OUT < POST /lfs_upstream/objects/batch 200 OK
IN > GET /lfs_upstream/download/2e8e6e2dda2bb7b6458146a1c1bf301e4856293e1cc258ab789c63df2254693c -
OUT < GET /lfs_upstream/download/2e8e6e2dda2bb7b6458146a1c1bf301e4856293e1cc258ab789c63df2254693c 200 OK
IN > GET /lfs_upstream/download/2e8e6e2dda2bb7b6458146a1c1bf301e4856293e1cc258ab789c63df2254693c?server_hostname=* - (glob)
OUT < GET /lfs_upstream/download/2e8e6e2dda2bb7b6458146a1c1bf301e4856293e1cc258ab789c63df2254693c?server_hostname=* 200 OK (glob)
$ truncate -s 0 "$log_proxy" "$log_upstream"

View File

@ -123,6 +123,7 @@
"http_host": "*", (glob)
"http_method": "PUT",
"http_path": "/lfs1/upload/ab02c2a1923c8eb11cb3ddab70320746d71d32ad63f255698dc67c3295757746/2048",
"http_query": "server_hostname=*", (glob)
"http_user_agent": "mercurial/* git/*", (glob)
"method": "upload",
"repository": "lfs1",
@ -233,6 +234,7 @@
"http_host": "*", (glob)
"http_method": "GET",
"http_path": "/lfs1/download/d28548bc21aabf04d143886d717d72375e3deecd0dafb3d110676b70a192cb5d",
"http_query": "server_hostname=*", (glob)
"http_user_agent": "mercurial/* git/*", (glob)
"method": "download",
"repository": "lfs1",

View File

@ -30,12 +30,12 @@
$ cat "$lfs_log"
IN > POST /lfs1/objects/batch -
OUT < POST /lfs1/objects/batch 200 OK
IN > PUT /lfs1/upload/ab02c2a1923c8eb11cb3ddab70320746d71d32ad63f255698dc67c3295757746/2048 -
OUT < PUT /lfs1/upload/ab02c2a1923c8eb11cb3ddab70320746d71d32ad63f255698dc67c3295757746/2048 200 OK
IN > PUT /lfs1/upload/ab02c2a1923c8eb11cb3ddab70320746d71d32ad63f255698dc67c3295757746/2048?server_hostname=* - (glob)
OUT < PUT /lfs1/upload/ab02c2a1923c8eb11cb3ddab70320746d71d32ad63f255698dc67c3295757746/2048?server_hostname=* 200 OK (glob)
IN > POST /lfs1/objects/batch -
OUT < POST /lfs1/objects/batch 200 OK
IN > GET /lfs1/download/d28548bc21aabf04d143886d717d72375e3deecd0dafb3d110676b70a192cb5d -
OUT < GET /lfs1/download/d28548bc21aabf04d143886d717d72375e3deecd0dafb3d110676b70a192cb5d 200 OK
IN > GET /lfs1/download/d28548bc21aabf04d143886d717d72375e3deecd0dafb3d110676b70a192cb5d?server_hostname=* - (glob)
OUT < GET /lfs1/download/d28548bc21aabf04d143886d717d72375e3deecd0dafb3d110676b70a192cb5d?server_hostname=* 200 OK (glob)
IN > POST /lfs1/objects/batch -
OUT < POST /lfs1/objects/batch 200 OK
@ -93,5 +93,5 @@
{"message":"Host abcd is not allowlisted","request_id":"*"} (glob)
400 (no-eol)
$ curl -s -w "\n%{http_code}" "${lfs_uri}/objects/batch/" --data-binary "@request"
{"transfer":"basic","objects":[{"oid":"ab02c2a1923c8eb11cb3ddab70320746d71d32ad63f255698dc67c3295757746","size":2048,"authenticated":false,"actions":{"download":{"href":"http://$LOCALIP:*/lfs1/download/d28548bc21aabf04d143886d717d72375e3deecd0dafb3d110676b70a192cb5d"}}}]} (glob)
{"transfer":"basic","objects":[{"oid":"ab02c2a1923c8eb11cb3ddab70320746d71d32ad63f255698dc67c3295757746","size":2048,"authenticated":false,"actions":{"download":{"href":"http://$LOCALIP:*/lfs1/download/d28548bc21aabf04d143886d717d72375e3deecd0dafb3d110676b70a192cb5d?server_hostname=*"}}}]} (glob)
200 (no-eol)

View File

@ -95,8 +95,8 @@
$ cat "$LFS_LOG"
IN > POST /repo/objects/batch -
OUT < POST /repo/objects/batch 200 OK
IN > PUT /repo/upload/2a49733d725b4e6dfa94410d29da9e64803ff946339c54ecc471eccc951047fe/2000 -
OUT < PUT /repo/upload/2a49733d725b4e6dfa94410d29da9e64803ff946339c54ecc471eccc951047fe/2000 200 OK
IN > PUT /repo/upload/2a49733d725b4e6dfa94410d29da9e64803ff946339c54ecc471eccc951047fe/2000?server_hostname=* - (glob)
OUT < PUT /repo/upload/2a49733d725b4e6dfa94410d29da9e64803ff946339c54ecc471eccc951047fe/2000?server_hostname=* 200 OK (glob)
$ truncate -s 0 "$LFS_LOG"
# Create a new hg repository, and update to the new file
@ -129,8 +129,8 @@
$ cat "$LFS_LOG"
IN > POST /repo/objects/batch -
OUT < POST /repo/objects/batch 200 OK
IN > GET /repo/download/1267b7f944920cc2c6a5d48bcf0996735d3fe984b09d5d3bdbccb710c0b99635 -
OUT < GET /repo/download/1267b7f944920cc2c6a5d48bcf0996735d3fe984b09d5d3bdbccb710c0b99635 200 OK
IN > GET /repo/download/1267b7f944920cc2c6a5d48bcf0996735d3fe984b09d5d3bdbccb710c0b99635?server_hostname=* - (glob)
OUT < GET /repo/download/1267b7f944920cc2c6a5d48bcf0996735d3fe984b09d5d3bdbccb710c0b99635?server_hostname=* 200 OK (glob)
# Check that downloading file by its sha256 works
$ DOWNLOAD_URL="${lfs_uri}/download_sha256/2a49733d725b4e6dfa94410d29da9e64803ff946339c54ecc471eccc951047fe"

View File

@ -46,11 +46,11 @@ Update. Check for multiple requests
$ cat "$lfs_log"
IN > POST /repo/objects/batch -
OUT < POST /repo/objects/batch 200 OK
IN > GET /repo/download/ba7c3ab5dd42a490fff73f34356f5f4aa76aaf0b67d14a416bcad80a0ee8d4c9 -
OUT < GET /repo/download/ba7c3ab5dd42a490fff73f34356f5f4aa76aaf0b67d14a416bcad80a0ee8d4c9 206 Partial Content
IN > GET /repo/download/ba7c3ab5dd42a490fff73f34356f5f4aa76aaf0b67d14a416bcad80a0ee8d4c9 -
OUT < GET /repo/download/ba7c3ab5dd42a490fff73f34356f5f4aa76aaf0b67d14a416bcad80a0ee8d4c9 206 Partial Content
IN > GET /repo/download/ba7c3ab5dd42a490fff73f34356f5f4aa76aaf0b67d14a416bcad80a0ee8d4c9 -
OUT < GET /repo/download/ba7c3ab5dd42a490fff73f34356f5f4aa76aaf0b67d14a416bcad80a0ee8d4c9 206 Partial Content
IN > GET /repo/download/ba7c3ab5dd42a490fff73f34356f5f4aa76aaf0b67d14a416bcad80a0ee8d4c9 -
OUT < GET /repo/download/ba7c3ab5dd42a490fff73f34356f5f4aa76aaf0b67d14a416bcad80a0ee8d4c9 206 Partial Content
IN > GET /repo/download/ba7c3ab5dd42a490fff73f34356f5f4aa76aaf0b67d14a416bcad80a0ee8d4c9?server_hostname=* - (glob)
OUT < GET /repo/download/ba7c3ab5dd42a490fff73f34356f5f4aa76aaf0b67d14a416bcad80a0ee8d4c9?server_hostname=* 206 Partial Content (glob)
IN > GET /repo/download/ba7c3ab5dd42a490fff73f34356f5f4aa76aaf0b67d14a416bcad80a0ee8d4c9?server_hostname=* - (glob)
OUT < GET /repo/download/ba7c3ab5dd42a490fff73f34356f5f4aa76aaf0b67d14a416bcad80a0ee8d4c9?server_hostname=* 206 Partial Content (glob)
IN > GET /repo/download/ba7c3ab5dd42a490fff73f34356f5f4aa76aaf0b67d14a416bcad80a0ee8d4c9?server_hostname=* - (glob)
OUT < GET /repo/download/ba7c3ab5dd42a490fff73f34356f5f4aa76aaf0b67d14a416bcad80a0ee8d4c9?server_hostname=* 206 Partial Content (glob)
IN > GET /repo/download/ba7c3ab5dd42a490fff73f34356f5f4aa76aaf0b67d14a416bcad80a0ee8d4c9?server_hostname=* - (glob)
OUT < GET /repo/download/ba7c3ab5dd42a490fff73f34356f5f4aa76aaf0b67d14a416bcad80a0ee8d4c9?server_hostname=* 206 Partial Content (glob)

View File

@ -44,12 +44,10 @@ Update. Check for compression. It shouldn't be used.
76903e148255cbd5ba91d3f47fe04759afcffdf64104977fc83f688892ac0dfd large
$ wait_for_json_record_count "$TESTTMP/scuba.json" 2
$ jq .int.response_content_length < "$TESTTMP/scuba.json"
280|276 (re)
2097152
$ jq .int.response_bytes_sent < "$TESTTMP/scuba.json"
280|276 (re)
2097152
$ jq '(.int.response_content_length|tostring) + " " + .normal.http_path' < "$TESTTMP/scuba.json" | grep "download"
"2097152 /repo/download/ba7c3ab5dd42a490fff73f34356f5f4aa76aaf0b67d14a416bcad80a0ee8d4c9"
$ jq '(.int.response_bytes_sent|tostring) + " " + .normal.http_path' < "$TESTTMP/scuba.json" | grep "download"
"2097152 /repo/download/ba7c3ab5dd42a490fff73f34356f5f4aa76aaf0b67d14a416bcad80a0ee8d4c9"
$ jq .normal.response_content_encoding < "$TESTTMP/scuba.json"
null
null
@ -71,13 +69,10 @@ Update again. This time, we should have compression.
76903e148255cbd5ba91d3f47fe04759afcffdf64104977fc83f688892ac0dfd large
$ wait_for_json_record_count "$TESTTMP/scuba.json" 2
$ jq .int.response_content_length < "$TESTTMP/scuba.json"
280|276 (re)
null
$ jq .int.response_bytes_sent < "$TESTTMP/scuba.json"
280|276 (re)
202
$ jq '(.int.response_content_length|tostring) + " " + .normal.http_path' < "$TESTTMP/scuba.json" | grep "download"
"null /repo/download/ba7c3ab5dd42a490fff73f34356f5f4aa76aaf0b67d14a416bcad80a0ee8d4c9"
$ jq '(.int.response_bytes_sent|tostring) + " " + .normal.http_path' < "$TESTTMP/scuba.json" | grep "download"
"202 /repo/download/ba7c3ab5dd42a490fff73f34356f5f4aa76aaf0b67d14a416bcad80a0ee8d4c9"
$ jq .normal.response_content_encoding < "$TESTTMP/scuba.json"
null
"zstd"
$ truncate -s 0 "$TESTTMP/scuba.json"