mirror of
https://github.com/facebook/sapling.git
synced 2024-10-07 07:17:55 +03:00
remove BlobRepo::get_file_content_metadata
Summary: - remove BlobRepo::get_file_content_metadata - and method associated with alias resolutions Reviewed By: krallin Differential Revision: D19722457 fbshipit-source-id: d45e1cbb4ec769426e2ed7c6cc32173e4dbbac57
This commit is contained in:
parent
7b30ea88fc
commit
6e7c2ced4b
@ -169,9 +169,15 @@ impl EntryWithSizeAndContentHash {
|
||||
})
|
||||
.left_future(),
|
||||
HgEntryId::File(_, nodeid) => repo
|
||||
.clone()
|
||||
.get_file_content_id(ctx.clone(), nodeid)
|
||||
.and_then(move |content_id| repo.get_file_content_metadata(ctx, content_id))
|
||||
.and_then(move |content_id| {
|
||||
filestore::get_metadata(repo.blobstore(), ctx, &content_id.into()).and_then(
|
||||
move |metadata| {
|
||||
metadata.ok_or_else(|| format_err!("Entry {} not found", content_id))
|
||||
},
|
||||
)
|
||||
})
|
||||
.boxify()
|
||||
.map(|metadata| (metadata.total_size, metadata.sha1))
|
||||
.map({
|
||||
cloned!(r#type, name);
|
||||
|
@ -12,7 +12,7 @@ use crate::derive_hg_manifest::derive_hg_manifest;
|
||||
use crate::errors::*;
|
||||
use crate::repo_commit::*;
|
||||
use anyhow::{format_err, Context, Error};
|
||||
use blobstore::{Blobstore, Loadable, LoadableError};
|
||||
use blobstore::{Blobstore, Loadable};
|
||||
use bonsai_globalrev_mapping::{BonsaiGlobalrevMapping, BonsaisOrGlobalrevs};
|
||||
use bonsai_hg_mapping::{BonsaiHgMapping, BonsaiHgMappingEntry, BonsaiOrHgChangesetIds};
|
||||
use bookmarks::{
|
||||
@ -27,7 +27,7 @@ use cloned::cloned;
|
||||
use context::CoreContext;
|
||||
use failure_ext::{Compat, FutureFailureErrorExt, FutureFailureExt};
|
||||
use filenodes::{FilenodeInfo, Filenodes};
|
||||
use filestore::{self, Alias, FetchKey, FilestoreConfig, StoreRequest};
|
||||
use filestore::{self, FilestoreConfig, StoreRequest};
|
||||
use futures::future::{self, loop_fn, ok, Future, Loop};
|
||||
use futures::stream::{self, futures_unordered, FuturesUnordered, Stream};
|
||||
use futures::sync::oneshot;
|
||||
@ -40,17 +40,16 @@ use maplit::hashmap;
|
||||
use mercurial_types::{
|
||||
blobs::{
|
||||
fetch_file_content_from_blobstore, fetch_file_content_id_from_blobstore,
|
||||
fetch_file_content_sha256_from_blobstore, fetch_file_contents,
|
||||
fetch_file_metadata_from_blobstore, ChangesetMetadata, ContentBlobMeta, HgBlobChangeset,
|
||||
HgBlobEntry, HgBlobEnvelope, HgChangesetContent, UploadHgFileContents, UploadHgFileEntry,
|
||||
fetch_file_contents, ChangesetMetadata, ContentBlobMeta, HgBlobChangeset, HgBlobEntry,
|
||||
HgBlobEnvelope, HgChangesetContent, UploadHgFileContents, UploadHgFileEntry,
|
||||
UploadHgNodeHash,
|
||||
},
|
||||
FileBytes, Globalrev, HgChangesetId, HgFileNodeId, HgManifestId, HgNodeHash, HgParents,
|
||||
RepoPath, Type,
|
||||
};
|
||||
use mononoke_types::{
|
||||
hash::Sha256, BlobstoreValue, BonsaiChangeset, ChangesetId, ContentId, ContentMetadata,
|
||||
FileChange, Generation, MPath, RepositoryId, Timestamp,
|
||||
BlobstoreValue, BonsaiChangeset, ChangesetId, ContentId, ContentMetadata, FileChange,
|
||||
Generation, MPath, RepositoryId, Timestamp,
|
||||
};
|
||||
use phases::{HeadsFetcher, Phases, SqlPhasesFactory};
|
||||
use repo_blobstore::{RepoBlobstore, RepoBlobstoreArgs};
|
||||
@ -229,52 +228,6 @@ impl BlobRepo {
|
||||
fetch_file_content_id_from_blobstore(ctx, &self.blobstore.boxed(), key).boxify()
|
||||
}
|
||||
|
||||
pub fn get_file_content_metadata(
|
||||
&self,
|
||||
ctx: CoreContext,
|
||||
key: ContentId,
|
||||
) -> BoxFuture<ContentMetadata, Error> {
|
||||
fetch_file_metadata_from_blobstore(ctx, &self.blobstore.boxed(), key).boxify()
|
||||
}
|
||||
|
||||
pub fn get_file_content_id_by_sha256(
|
||||
&self,
|
||||
ctx: CoreContext,
|
||||
key: Sha256,
|
||||
) -> BoxFuture<ContentId, Error> {
|
||||
FetchKey::Aliased(Alias::Sha256(key))
|
||||
.load(ctx, &self.blobstore)
|
||||
.or_else(move |err| match err {
|
||||
LoadableError::Error(err) => Err(err),
|
||||
LoadableError::Missing(_) => Err(ErrorKind::ContentBlobByAliasMissing(key).into()),
|
||||
})
|
||||
.boxify()
|
||||
}
|
||||
|
||||
pub fn get_file_sha256(
|
||||
&self,
|
||||
ctx: CoreContext,
|
||||
content_id: ContentId,
|
||||
) -> BoxFuture<Sha256, Error> {
|
||||
fetch_file_content_sha256_from_blobstore(ctx, &self.blobstore.boxed(), content_id).boxify()
|
||||
}
|
||||
|
||||
pub fn get_file_content_by_alias(
|
||||
&self,
|
||||
ctx: CoreContext,
|
||||
sha256: Sha256,
|
||||
) -> BoxStream<FileBytes, Error> {
|
||||
filestore::fetch(
|
||||
&self.blobstore,
|
||||
ctx,
|
||||
&FetchKey::Aliased(Alias::Sha256(sha256)),
|
||||
)
|
||||
.and_then(move |stream| stream.ok_or(ErrorKind::ContentBlobByAliasMissing(sha256).into()))
|
||||
.flatten_stream()
|
||||
.map(FileBytes)
|
||||
.boxify()
|
||||
}
|
||||
|
||||
/// Get Mercurial heads, which we approximate as publishing Bonsai Bookmarks.
|
||||
pub fn get_heads_maybe_stale(
|
||||
&self,
|
||||
|
@ -26,7 +26,7 @@ use slog::{debug, info, Logger};
|
||||
use tokio::prelude::stream::iter_ok;
|
||||
|
||||
use blobrepo::BlobRepo;
|
||||
use blobstore::Storable;
|
||||
use blobstore::{Loadable, Storable};
|
||||
use changesets::SqlChangesets;
|
||||
use cmdlib::{args, helpers::block_execute};
|
||||
use context::CoreContext;
|
||||
@ -185,8 +185,8 @@ impl AliasVerification {
|
||||
content_id: ContentId,
|
||||
) -> impl Future<Item = (), Error = Error> {
|
||||
let av = self.clone();
|
||||
self.blobrepo
|
||||
.get_file_content_id_by_sha256(ctx.clone(), alias)
|
||||
FetchKey::from(alias)
|
||||
.load(ctx.clone(), self.blobrepo.blobstore())
|
||||
.then(move |result| match result {
|
||||
Ok(content_id_from_blobstore) => av
|
||||
.check_alias_blob(alias, content_id, content_id_from_blobstore)
|
||||
|
@ -183,11 +183,15 @@ fn check_one_file(
|
||||
}
|
||||
});
|
||||
|
||||
let sha256_check = repo
|
||||
.get_file_sha256(ctx.clone(), file_info.id)
|
||||
.and_then(move |sha256| {
|
||||
repo.get_file_content_id_by_sha256(ctx, sha256)
|
||||
.map(move |id| (sha256, id))
|
||||
let key = filestore::FetchKey::from(file_info.id);
|
||||
let sha256_check = filestore::get_metadata(repo.blobstore(), ctx.clone(), &key)
|
||||
.and_then(move |meta| meta.ok_or(ErrorKind::ContentMissing(key).into()))
|
||||
.boxify() // type is too large
|
||||
.and_then(move |meta| {
|
||||
filestore::FetchKey::from(meta.sha256)
|
||||
.load(ctx, repo.blobstore())
|
||||
.from_err()
|
||||
.map(move |id| (meta.sha256, id))
|
||||
})
|
||||
.and_then(move |(sha256, new_id)| {
|
||||
if new_id != file_info.id {
|
||||
|
@ -8,6 +8,7 @@
|
||||
|
||||
use crate::checks::FileInformation;
|
||||
use anyhow::Error;
|
||||
use filestore::FetchKey;
|
||||
use mercurial_types::HgChangesetId;
|
||||
use mononoke_types::{hash::Sha256, ChangesetId, ContentId};
|
||||
use thiserror::Error;
|
||||
@ -36,4 +37,6 @@ pub enum ErrorKind {
|
||||
ParentsMismatch(HgChangesetId),
|
||||
#[error("Requesting HG {0} fetched changeset {1}")]
|
||||
HgChangesetIdMismatch(HgChangesetId, HgChangesetId),
|
||||
#[error("Missing content {0:?}")]
|
||||
ContentMissing(FetchKey),
|
||||
}
|
||||
|
@ -20,6 +20,24 @@ pub enum FetchKey {
|
||||
Aliased(Alias),
|
||||
}
|
||||
|
||||
impl From<ContentId> for FetchKey {
|
||||
fn from(content_id: ContentId) -> Self {
|
||||
FetchKey::Canonical(content_id)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Alias> for FetchKey {
|
||||
fn from(alias: Alias) -> Self {
|
||||
FetchKey::Aliased(alias)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<hash::Sha256> for FetchKey {
|
||||
fn from(hash: hash::Sha256) -> Self {
|
||||
FetchKey::Aliased(Alias::Sha256(hash))
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||
pub enum Alias {
|
||||
Sha1(hash::Sha1),
|
||||
|
@ -31,7 +31,7 @@ use futures::{
|
||||
};
|
||||
use futures_ext::{BoxFuture, FutureExt, StreamExt};
|
||||
use itertools::Itertools;
|
||||
use mononoke_types::{hash::Sha256, ContentId, ContentMetadata};
|
||||
use mononoke_types::{hash::Sha256, ContentId};
|
||||
use std::{
|
||||
collections::HashMap,
|
||||
io::Write,
|
||||
@ -143,25 +143,6 @@ pub fn fetch_file_content_id_from_blobstore(
|
||||
.map({ |envelope| envelope.content_id() })
|
||||
}
|
||||
|
||||
pub fn fetch_file_metadata_from_blobstore(
|
||||
ctx: CoreContext,
|
||||
blobstore: &Arc<dyn Blobstore>,
|
||||
content_id: ContentId,
|
||||
) -> impl Future<Item = ContentMetadata, Error = Error> {
|
||||
filestore::get_metadata(blobstore, ctx, &FetchKey::Canonical(content_id))
|
||||
.and_then(move |aliases| aliases.ok_or(ErrorKind::ContentBlobMissing(content_id).into()))
|
||||
.context("While fetching content metadata")
|
||||
.from_err()
|
||||
}
|
||||
|
||||
pub fn fetch_file_content_sha256_from_blobstore(
|
||||
ctx: CoreContext,
|
||||
blobstore: &Arc<dyn Blobstore>,
|
||||
content_id: ContentId,
|
||||
) -> impl Future<Item = Sha256, Error = Error> {
|
||||
fetch_file_metadata_from_blobstore(ctx, blobstore, content_id).map(|metadata| metadata.sha256)
|
||||
}
|
||||
|
||||
impl Loadable for HgFileNodeId {
|
||||
type Value = HgFileEnvelope;
|
||||
|
||||
|
@ -16,9 +16,8 @@ pub use errors::ErrorKind;
|
||||
|
||||
pub mod file;
|
||||
pub use file::{
|
||||
fetch_file_content_from_blobstore, fetch_file_content_id_from_blobstore,
|
||||
fetch_file_content_sha256_from_blobstore, fetch_file_contents,
|
||||
fetch_file_metadata_from_blobstore, File, HgBlobEntry, LFSContent, META_MARKER, META_SZ,
|
||||
fetch_file_content_from_blobstore, fetch_file_content_id_from_blobstore, fetch_file_contents,
|
||||
File, HgBlobEntry, LFSContent, META_MARKER, META_SZ,
|
||||
};
|
||||
|
||||
mod manifest;
|
||||
|
@ -21,6 +21,7 @@ use blobstore::Loadable;
|
||||
use bytes::Bytes;
|
||||
use cloned::cloned;
|
||||
use context::CoreContext;
|
||||
use filestore::FetchKey;
|
||||
use futures::{Future, IntoFuture, Stream};
|
||||
use futures_ext::{select_all, BoxFuture, FutureExt};
|
||||
use mercurial_types::{
|
||||
@ -45,6 +46,8 @@ pub enum ErrorKind {
|
||||
|
||||
#[error("Invalid blob kind returned: {kind:?}")]
|
||||
InvalidKind { kind: RemotefilelogBlobKind },
|
||||
#[error("Missing content: {0:?}")]
|
||||
MissingContent(FetchKey),
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
@ -345,8 +348,11 @@ fn prepare_blob(
|
||||
} else {
|
||||
// For LFS blobs, we'll create the LFS pointer. Note that there is no hg-style
|
||||
// metadata encoded for LFS blobs (it's in the LFS pointer instead).
|
||||
let key = FetchKey::from(envelope.content_id());
|
||||
let blob_fut = (
|
||||
repo.get_file_sha256(ctx, envelope.content_id()),
|
||||
filestore::get_metadata(repo.blobstore(), ctx, &key).and_then(move |meta| {
|
||||
Ok(meta.ok_or(ErrorKind::MissingContent(key))?.sha256)
|
||||
}),
|
||||
File::extract_copied_from(envelope.metadata()).into_future(),
|
||||
)
|
||||
.into_future()
|
||||
|
@ -21,6 +21,8 @@ use heapsize::HeapSizeOf;
|
||||
use quickcheck::{Arbitrary, Gen};
|
||||
|
||||
use blobrepo::BlobRepo;
|
||||
use blobstore::Loadable;
|
||||
use filestore::FetchKey;
|
||||
use mercurial_bundles::changegroup::CgDeltaChunk;
|
||||
use mercurial_types::{
|
||||
blobs::{
|
||||
@ -162,7 +164,9 @@ fn generate_lfs_meta_data(
|
||||
.into_future()
|
||||
.and_then(move |lfs_content| {
|
||||
(
|
||||
repo.get_file_content_id_by_sha256(ctx, lfs_content.oid()),
|
||||
FetchKey::from(lfs_content.oid())
|
||||
.load(ctx, repo.blobstore())
|
||||
.from_err(),
|
||||
Ok(lfs_content.copy_from()),
|
||||
Ok(lfs_content.size()),
|
||||
)
|
||||
|
@ -113,15 +113,15 @@ Verify that if we fail to upload LFS blobs first, the push fails
|
||||
remote: Error:
|
||||
remote: While resolving Changegroup
|
||||
remote: Root cause:
|
||||
remote: ContentBlobByAliasMissing(
|
||||
remote: Sha256(4200cad32a33c257258c559e80d19eedb89df109377863c6c16cf8416918b974),
|
||||
remote: Missing(
|
||||
remote: "alias.sha256.4200cad32a33c257258c559e80d19eedb89df109377863c6c16cf8416918b974",
|
||||
remote: )
|
||||
remote: Caused by:
|
||||
remote: While uploading File Blobs
|
||||
remote: Caused by:
|
||||
remote: While decoding delta cache for file id ff714056cdbb88eef0578934980d740a05be8384, path f
|
||||
remote: Caused by:
|
||||
remote: Content blob missing for id: 4200cad32a33c257258c559e80d19eedb89df109377863c6c16cf8416918b974
|
||||
remote: Blob is missing: alias.sha256.4200cad32a33c257258c559e80d19eedb89df109377863c6c16cf8416918b974
|
||||
abort: stream ended unexpectedly (got 0 bytes, expected 4)
|
||||
[255]
|
||||
|
||||
|
@ -18,7 +18,7 @@ use context::CoreContext;
|
||||
use derived_data::BonsaiDerived;
|
||||
use derived_data_filenodes::{FilenodesOnlyPublic, FilenodesOnlyPublicMapping};
|
||||
use failure_ext::chain::ChainExt;
|
||||
use filestore::{self, Alias, FetchKey};
|
||||
use filestore::{self, Alias};
|
||||
use futures::{future, Future as OldFuture, Stream as OldStream};
|
||||
use futures_ext::{
|
||||
bounded_traversal::bounded_traversal_stream, spawn_future, BoxFuture, BoxStream, FutureExt,
|
||||
@ -197,16 +197,11 @@ fn file_content_metadata_step(
|
||||
enable_derive: bool,
|
||||
) -> BoxFuture<StepOutput, Error> {
|
||||
let loader = if enable_derive {
|
||||
repo.get_file_content_metadata(ctx, id)
|
||||
.map(|d| Some(Some(d)))
|
||||
filestore::get_metadata(repo.blobstore(), ctx, &id.into())
|
||||
.map(Some)
|
||||
.left_future()
|
||||
} else {
|
||||
filestore::get_metadata_readonly(
|
||||
&repo.get_blobstore().boxed(),
|
||||
ctx,
|
||||
&FetchKey::Canonical(id),
|
||||
)
|
||||
.right_future()
|
||||
filestore::get_metadata_readonly(repo.blobstore(), ctx, &id.into()).right_future()
|
||||
};
|
||||
|
||||
loader
|
||||
|
Loading…
Reference in New Issue
Block a user