remove BlobRepo::get_file_content_metadata

Summary:
- remove BlobRepo::get_file_content_metadata
- and method associated with alias resolutions

Reviewed By: krallin

Differential Revision: D19722457

fbshipit-source-id: d45e1cbb4ec769426e2ed7c6cc32173e4dbbac57
This commit is contained in:
Pavel Aslanov 2020-02-04 10:23:00 -08:00 committed by Facebook Github Bot
parent 7b30ea88fc
commit 6e7c2ced4b
12 changed files with 69 additions and 100 deletions

View File

@ -169,9 +169,15 @@ impl EntryWithSizeAndContentHash {
}) })
.left_future(), .left_future(),
HgEntryId::File(_, nodeid) => repo HgEntryId::File(_, nodeid) => repo
.clone()
.get_file_content_id(ctx.clone(), nodeid) .get_file_content_id(ctx.clone(), nodeid)
.and_then(move |content_id| repo.get_file_content_metadata(ctx, content_id)) .and_then(move |content_id| {
filestore::get_metadata(repo.blobstore(), ctx, &content_id.into()).and_then(
move |metadata| {
metadata.ok_or_else(|| format_err!("Entry {} not found", content_id))
},
)
})
.boxify()
.map(|metadata| (metadata.total_size, metadata.sha1)) .map(|metadata| (metadata.total_size, metadata.sha1))
.map({ .map({
cloned!(r#type, name); cloned!(r#type, name);

View File

@ -12,7 +12,7 @@ use crate::derive_hg_manifest::derive_hg_manifest;
use crate::errors::*; use crate::errors::*;
use crate::repo_commit::*; use crate::repo_commit::*;
use anyhow::{format_err, Context, Error}; use anyhow::{format_err, Context, Error};
use blobstore::{Blobstore, Loadable, LoadableError}; use blobstore::{Blobstore, Loadable};
use bonsai_globalrev_mapping::{BonsaiGlobalrevMapping, BonsaisOrGlobalrevs}; use bonsai_globalrev_mapping::{BonsaiGlobalrevMapping, BonsaisOrGlobalrevs};
use bonsai_hg_mapping::{BonsaiHgMapping, BonsaiHgMappingEntry, BonsaiOrHgChangesetIds}; use bonsai_hg_mapping::{BonsaiHgMapping, BonsaiHgMappingEntry, BonsaiOrHgChangesetIds};
use bookmarks::{ use bookmarks::{
@ -27,7 +27,7 @@ use cloned::cloned;
use context::CoreContext; use context::CoreContext;
use failure_ext::{Compat, FutureFailureErrorExt, FutureFailureExt}; use failure_ext::{Compat, FutureFailureErrorExt, FutureFailureExt};
use filenodes::{FilenodeInfo, Filenodes}; use filenodes::{FilenodeInfo, Filenodes};
use filestore::{self, Alias, FetchKey, FilestoreConfig, StoreRequest}; use filestore::{self, FilestoreConfig, StoreRequest};
use futures::future::{self, loop_fn, ok, Future, Loop}; use futures::future::{self, loop_fn, ok, Future, Loop};
use futures::stream::{self, futures_unordered, FuturesUnordered, Stream}; use futures::stream::{self, futures_unordered, FuturesUnordered, Stream};
use futures::sync::oneshot; use futures::sync::oneshot;
@ -40,17 +40,16 @@ use maplit::hashmap;
use mercurial_types::{ use mercurial_types::{
blobs::{ blobs::{
fetch_file_content_from_blobstore, fetch_file_content_id_from_blobstore, fetch_file_content_from_blobstore, fetch_file_content_id_from_blobstore,
fetch_file_content_sha256_from_blobstore, fetch_file_contents, fetch_file_contents, ChangesetMetadata, ContentBlobMeta, HgBlobChangeset, HgBlobEntry,
fetch_file_metadata_from_blobstore, ChangesetMetadata, ContentBlobMeta, HgBlobChangeset, HgBlobEnvelope, HgChangesetContent, UploadHgFileContents, UploadHgFileEntry,
HgBlobEntry, HgBlobEnvelope, HgChangesetContent, UploadHgFileContents, UploadHgFileEntry,
UploadHgNodeHash, UploadHgNodeHash,
}, },
FileBytes, Globalrev, HgChangesetId, HgFileNodeId, HgManifestId, HgNodeHash, HgParents, FileBytes, Globalrev, HgChangesetId, HgFileNodeId, HgManifestId, HgNodeHash, HgParents,
RepoPath, Type, RepoPath, Type,
}; };
use mononoke_types::{ use mononoke_types::{
hash::Sha256, BlobstoreValue, BonsaiChangeset, ChangesetId, ContentId, ContentMetadata, BlobstoreValue, BonsaiChangeset, ChangesetId, ContentId, ContentMetadata, FileChange,
FileChange, Generation, MPath, RepositoryId, Timestamp, Generation, MPath, RepositoryId, Timestamp,
}; };
use phases::{HeadsFetcher, Phases, SqlPhasesFactory}; use phases::{HeadsFetcher, Phases, SqlPhasesFactory};
use repo_blobstore::{RepoBlobstore, RepoBlobstoreArgs}; use repo_blobstore::{RepoBlobstore, RepoBlobstoreArgs};
@ -229,52 +228,6 @@ impl BlobRepo {
fetch_file_content_id_from_blobstore(ctx, &self.blobstore.boxed(), key).boxify() fetch_file_content_id_from_blobstore(ctx, &self.blobstore.boxed(), key).boxify()
} }
pub fn get_file_content_metadata(
&self,
ctx: CoreContext,
key: ContentId,
) -> BoxFuture<ContentMetadata, Error> {
fetch_file_metadata_from_blobstore(ctx, &self.blobstore.boxed(), key).boxify()
}
pub fn get_file_content_id_by_sha256(
&self,
ctx: CoreContext,
key: Sha256,
) -> BoxFuture<ContentId, Error> {
FetchKey::Aliased(Alias::Sha256(key))
.load(ctx, &self.blobstore)
.or_else(move |err| match err {
LoadableError::Error(err) => Err(err),
LoadableError::Missing(_) => Err(ErrorKind::ContentBlobByAliasMissing(key).into()),
})
.boxify()
}
pub fn get_file_sha256(
&self,
ctx: CoreContext,
content_id: ContentId,
) -> BoxFuture<Sha256, Error> {
fetch_file_content_sha256_from_blobstore(ctx, &self.blobstore.boxed(), content_id).boxify()
}
pub fn get_file_content_by_alias(
&self,
ctx: CoreContext,
sha256: Sha256,
) -> BoxStream<FileBytes, Error> {
filestore::fetch(
&self.blobstore,
ctx,
&FetchKey::Aliased(Alias::Sha256(sha256)),
)
.and_then(move |stream| stream.ok_or(ErrorKind::ContentBlobByAliasMissing(sha256).into()))
.flatten_stream()
.map(FileBytes)
.boxify()
}
/// Get Mercurial heads, which we approximate as publishing Bonsai Bookmarks. /// Get Mercurial heads, which we approximate as publishing Bonsai Bookmarks.
pub fn get_heads_maybe_stale( pub fn get_heads_maybe_stale(
&self, &self,

View File

@ -26,7 +26,7 @@ use slog::{debug, info, Logger};
use tokio::prelude::stream::iter_ok; use tokio::prelude::stream::iter_ok;
use blobrepo::BlobRepo; use blobrepo::BlobRepo;
use blobstore::Storable; use blobstore::{Loadable, Storable};
use changesets::SqlChangesets; use changesets::SqlChangesets;
use cmdlib::{args, helpers::block_execute}; use cmdlib::{args, helpers::block_execute};
use context::CoreContext; use context::CoreContext;
@ -185,8 +185,8 @@ impl AliasVerification {
content_id: ContentId, content_id: ContentId,
) -> impl Future<Item = (), Error = Error> { ) -> impl Future<Item = (), Error = Error> {
let av = self.clone(); let av = self.clone();
self.blobrepo FetchKey::from(alias)
.get_file_content_id_by_sha256(ctx.clone(), alias) .load(ctx.clone(), self.blobrepo.blobstore())
.then(move |result| match result { .then(move |result| match result {
Ok(content_id_from_blobstore) => av Ok(content_id_from_blobstore) => av
.check_alias_blob(alias, content_id, content_id_from_blobstore) .check_alias_blob(alias, content_id, content_id_from_blobstore)

View File

@ -183,11 +183,15 @@ fn check_one_file(
} }
}); });
let sha256_check = repo let key = filestore::FetchKey::from(file_info.id);
.get_file_sha256(ctx.clone(), file_info.id) let sha256_check = filestore::get_metadata(repo.blobstore(), ctx.clone(), &key)
.and_then(move |sha256| { .and_then(move |meta| meta.ok_or(ErrorKind::ContentMissing(key).into()))
repo.get_file_content_id_by_sha256(ctx, sha256) .boxify() // type is too large
.map(move |id| (sha256, id)) .and_then(move |meta| {
filestore::FetchKey::from(meta.sha256)
.load(ctx, repo.blobstore())
.from_err()
.map(move |id| (meta.sha256, id))
}) })
.and_then(move |(sha256, new_id)| { .and_then(move |(sha256, new_id)| {
if new_id != file_info.id { if new_id != file_info.id {

View File

@ -8,6 +8,7 @@
use crate::checks::FileInformation; use crate::checks::FileInformation;
use anyhow::Error; use anyhow::Error;
use filestore::FetchKey;
use mercurial_types::HgChangesetId; use mercurial_types::HgChangesetId;
use mononoke_types::{hash::Sha256, ChangesetId, ContentId}; use mononoke_types::{hash::Sha256, ChangesetId, ContentId};
use thiserror::Error; use thiserror::Error;
@ -36,4 +37,6 @@ pub enum ErrorKind {
ParentsMismatch(HgChangesetId), ParentsMismatch(HgChangesetId),
#[error("Requesting HG {0} fetched changeset {1}")] #[error("Requesting HG {0} fetched changeset {1}")]
HgChangesetIdMismatch(HgChangesetId, HgChangesetId), HgChangesetIdMismatch(HgChangesetId, HgChangesetId),
#[error("Missing content {0:?}")]
ContentMissing(FetchKey),
} }

View File

@ -20,6 +20,24 @@ pub enum FetchKey {
Aliased(Alias), Aliased(Alias),
} }
impl From<ContentId> for FetchKey {
fn from(content_id: ContentId) -> Self {
FetchKey::Canonical(content_id)
}
}
impl From<Alias> for FetchKey {
fn from(alias: Alias) -> Self {
FetchKey::Aliased(alias)
}
}
impl From<hash::Sha256> for FetchKey {
fn from(hash: hash::Sha256) -> Self {
FetchKey::Aliased(Alias::Sha256(hash))
}
}
#[derive(Debug, Clone, PartialEq, Eq, Hash)] #[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub enum Alias { pub enum Alias {
Sha1(hash::Sha1), Sha1(hash::Sha1),

View File

@ -31,7 +31,7 @@ use futures::{
}; };
use futures_ext::{BoxFuture, FutureExt, StreamExt}; use futures_ext::{BoxFuture, FutureExt, StreamExt};
use itertools::Itertools; use itertools::Itertools;
use mononoke_types::{hash::Sha256, ContentId, ContentMetadata}; use mononoke_types::{hash::Sha256, ContentId};
use std::{ use std::{
collections::HashMap, collections::HashMap,
io::Write, io::Write,
@ -143,25 +143,6 @@ pub fn fetch_file_content_id_from_blobstore(
.map({ |envelope| envelope.content_id() }) .map({ |envelope| envelope.content_id() })
} }
pub fn fetch_file_metadata_from_blobstore(
ctx: CoreContext,
blobstore: &Arc<dyn Blobstore>,
content_id: ContentId,
) -> impl Future<Item = ContentMetadata, Error = Error> {
filestore::get_metadata(blobstore, ctx, &FetchKey::Canonical(content_id))
.and_then(move |aliases| aliases.ok_or(ErrorKind::ContentBlobMissing(content_id).into()))
.context("While fetching content metadata")
.from_err()
}
pub fn fetch_file_content_sha256_from_blobstore(
ctx: CoreContext,
blobstore: &Arc<dyn Blobstore>,
content_id: ContentId,
) -> impl Future<Item = Sha256, Error = Error> {
fetch_file_metadata_from_blobstore(ctx, blobstore, content_id).map(|metadata| metadata.sha256)
}
impl Loadable for HgFileNodeId { impl Loadable for HgFileNodeId {
type Value = HgFileEnvelope; type Value = HgFileEnvelope;

View File

@ -16,9 +16,8 @@ pub use errors::ErrorKind;
pub mod file; pub mod file;
pub use file::{ pub use file::{
fetch_file_content_from_blobstore, fetch_file_content_id_from_blobstore, fetch_file_content_from_blobstore, fetch_file_content_id_from_blobstore, fetch_file_contents,
fetch_file_content_sha256_from_blobstore, fetch_file_contents, File, HgBlobEntry, LFSContent, META_MARKER, META_SZ,
fetch_file_metadata_from_blobstore, File, HgBlobEntry, LFSContent, META_MARKER, META_SZ,
}; };
mod manifest; mod manifest;

View File

@ -21,6 +21,7 @@ use blobstore::Loadable;
use bytes::Bytes; use bytes::Bytes;
use cloned::cloned; use cloned::cloned;
use context::CoreContext; use context::CoreContext;
use filestore::FetchKey;
use futures::{Future, IntoFuture, Stream}; use futures::{Future, IntoFuture, Stream};
use futures_ext::{select_all, BoxFuture, FutureExt}; use futures_ext::{select_all, BoxFuture, FutureExt};
use mercurial_types::{ use mercurial_types::{
@ -45,6 +46,8 @@ pub enum ErrorKind {
#[error("Invalid blob kind returned: {kind:?}")] #[error("Invalid blob kind returned: {kind:?}")]
InvalidKind { kind: RemotefilelogBlobKind }, InvalidKind { kind: RemotefilelogBlobKind },
#[error("Missing content: {0:?}")]
MissingContent(FetchKey),
} }
#[derive(Debug)] #[derive(Debug)]
@ -345,8 +348,11 @@ fn prepare_blob(
} else { } else {
// For LFS blobs, we'll create the LFS pointer. Note that there is no hg-style // For LFS blobs, we'll create the LFS pointer. Note that there is no hg-style
// metadata encoded for LFS blobs (it's in the LFS pointer instead). // metadata encoded for LFS blobs (it's in the LFS pointer instead).
let key = FetchKey::from(envelope.content_id());
let blob_fut = ( let blob_fut = (
repo.get_file_sha256(ctx, envelope.content_id()), filestore::get_metadata(repo.blobstore(), ctx, &key).and_then(move |meta| {
Ok(meta.ok_or(ErrorKind::MissingContent(key))?.sha256)
}),
File::extract_copied_from(envelope.metadata()).into_future(), File::extract_copied_from(envelope.metadata()).into_future(),
) )
.into_future() .into_future()

View File

@ -21,6 +21,8 @@ use heapsize::HeapSizeOf;
use quickcheck::{Arbitrary, Gen}; use quickcheck::{Arbitrary, Gen};
use blobrepo::BlobRepo; use blobrepo::BlobRepo;
use blobstore::Loadable;
use filestore::FetchKey;
use mercurial_bundles::changegroup::CgDeltaChunk; use mercurial_bundles::changegroup::CgDeltaChunk;
use mercurial_types::{ use mercurial_types::{
blobs::{ blobs::{
@ -162,7 +164,9 @@ fn generate_lfs_meta_data(
.into_future() .into_future()
.and_then(move |lfs_content| { .and_then(move |lfs_content| {
( (
repo.get_file_content_id_by_sha256(ctx, lfs_content.oid()), FetchKey::from(lfs_content.oid())
.load(ctx, repo.blobstore())
.from_err(),
Ok(lfs_content.copy_from()), Ok(lfs_content.copy_from()),
Ok(lfs_content.size()), Ok(lfs_content.size()),
) )

View File

@ -113,15 +113,15 @@ Verify that if we fail to upload LFS blobs first, the push fails
remote: Error: remote: Error:
remote: While resolving Changegroup remote: While resolving Changegroup
remote: Root cause: remote: Root cause:
remote: ContentBlobByAliasMissing( remote: Missing(
remote: Sha256(4200cad32a33c257258c559e80d19eedb89df109377863c6c16cf8416918b974), remote: "alias.sha256.4200cad32a33c257258c559e80d19eedb89df109377863c6c16cf8416918b974",
remote: ) remote: )
remote: Caused by: remote: Caused by:
remote: While uploading File Blobs remote: While uploading File Blobs
remote: Caused by: remote: Caused by:
remote: While decoding delta cache for file id ff714056cdbb88eef0578934980d740a05be8384, path f remote: While decoding delta cache for file id ff714056cdbb88eef0578934980d740a05be8384, path f
remote: Caused by: remote: Caused by:
remote: Content blob missing for id: 4200cad32a33c257258c559e80d19eedb89df109377863c6c16cf8416918b974 remote: Blob is missing: alias.sha256.4200cad32a33c257258c559e80d19eedb89df109377863c6c16cf8416918b974
abort: stream ended unexpectedly (got 0 bytes, expected 4) abort: stream ended unexpectedly (got 0 bytes, expected 4)
[255] [255]

View File

@ -18,7 +18,7 @@ use context::CoreContext;
use derived_data::BonsaiDerived; use derived_data::BonsaiDerived;
use derived_data_filenodes::{FilenodesOnlyPublic, FilenodesOnlyPublicMapping}; use derived_data_filenodes::{FilenodesOnlyPublic, FilenodesOnlyPublicMapping};
use failure_ext::chain::ChainExt; use failure_ext::chain::ChainExt;
use filestore::{self, Alias, FetchKey}; use filestore::{self, Alias};
use futures::{future, Future as OldFuture, Stream as OldStream}; use futures::{future, Future as OldFuture, Stream as OldStream};
use futures_ext::{ use futures_ext::{
bounded_traversal::bounded_traversal_stream, spawn_future, BoxFuture, BoxStream, FutureExt, bounded_traversal::bounded_traversal_stream, spawn_future, BoxFuture, BoxStream, FutureExt,
@ -197,16 +197,11 @@ fn file_content_metadata_step(
enable_derive: bool, enable_derive: bool,
) -> BoxFuture<StepOutput, Error> { ) -> BoxFuture<StepOutput, Error> {
let loader = if enable_derive { let loader = if enable_derive {
repo.get_file_content_metadata(ctx, id) filestore::get_metadata(repo.blobstore(), ctx, &id.into())
.map(|d| Some(Some(d))) .map(Some)
.left_future() .left_future()
} else { } else {
filestore::get_metadata_readonly( filestore::get_metadata_readonly(repo.blobstore(), ctx, &id.into()).right_future()
&repo.get_blobstore().boxed(),
ctx,
&FetchKey::Canonical(id),
)
.right_future()
}; };
loader loader