remove not needed BlobRepo::*{store|fetch} methods

Summary: `BlobRepo::*{store|fetch}` methods are subsumed by `Storable|Loadable`

Reviewed By: krallin

Differential Revision: D17133306

fbshipit-source-id: 6379aea6335d57f0d90a9669ba5ef0300f82a399
This commit is contained in:
Pavel Aslanov 2019-08-30 13:25:06 -07:00 committed by Facebook Github Bot
parent 3301714eb9
commit f43b19c431
6 changed files with 44 additions and 126 deletions

View File

@ -6,6 +6,7 @@
//! Utilities to generate reasonably looking stack of changesets
use blobrepo::{save_bonsai_changesets, BlobRepo};
use blobstore::Storable;
use context::CoreContext;
use failure::{err_msg, Error};
use futures::{future, stream, Future, Stream};
@ -113,16 +114,13 @@ impl GenManifest {
}
Some(content) => {
let content = FileContents::new_bytes(content);
store_changes.push(repo.unittest_store(ctx.clone(), content.clone()));
let size = content.size();
let blob = content.into_blob();
let id = *blob.id();
store_changes.push(blob.store(ctx.clone(), &repo.get_blobstore()));
file_changes.insert(
path,
Some(FileChange::new(
*content.into_blob().id(),
FileType::Regular,
size as u64,
None,
)),
Some(FileChange::new(id, FileType::Regular, size as u64, None)),
);
}
}

View File

@ -10,7 +10,7 @@ use crate::derive_hg_manifest::derive_hg_manifest;
use crate::errors::*;
use crate::filenode_lookup::{lookup_filenode_id, store_filenode_id, FileNodeIdPointer};
use crate::repo_commit::*;
use blobstore::{Blobstore, Loadable, LoadableError, Storable};
use blobstore::{Blobstore, Loadable, LoadableError};
use bonsai_hg_mapping::{BonsaiHgMapping, BonsaiHgMappingEntry, BonsaiOrHgChangesetIds};
use bookmarks::{
self, Bookmark, BookmarkName, BookmarkPrefix, BookmarkUpdateReason, Bookmarks, Freshness,
@ -224,48 +224,6 @@ impl BlobRepo {
)
}
fn fetch<Id, V>(&self, ctx: CoreContext, id: Id) -> impl Future<Item = V, Error = Error> + Send
where
Id: Loadable<Value = V> + ::std::fmt::Debug + Send,
V: Send,
{
id.load(ctx, &self.blobstore).from_err()
}
// this is supposed to be used only from unittest
pub fn unittest_fetch<Id, V>(
&self,
ctx: CoreContext,
id: Id,
) -> impl Future<Item = V, Error = Error> + Send
where
Id: Loadable<Value = V> + ::std::fmt::Debug + Send,
V: Send,
{
self.fetch(ctx, id)
}
fn store<K, V>(&self, ctx: CoreContext, value: V) -> impl Future<Item = K, Error = Error> + Send
where
V: BlobstoreValue<Key = K>,
Blob<K>: Storable<Key = K>,
{
value.into_blob().store(ctx, &self.blobstore)
}
// this is supposed to be used only from unittest
pub fn unittest_store<K, V>(
&self,
ctx: CoreContext,
value: V,
) -> impl Future<Item = K, Error = Error> + Send
where
V: BlobstoreValue<Key = K>,
Blob<K>: Storable<Key = K>,
{
self.store(ctx, value)
}
pub fn get_file_content(
&self,
ctx: CoreContext,
@ -781,7 +739,7 @@ impl BlobRepo {
bonsai_cs_id: ChangesetId,
) -> BoxFuture<BonsaiChangeset, Error> {
STATS::get_bonsai_changeset.add_value(1);
self.fetch(ctx, bonsai_cs_id).boxify()
bonsai_cs_id.load(ctx, &self.blobstore).from_err().boxify()
}
// TODO(stash): make it accept ChangesetId
@ -1549,8 +1507,9 @@ impl BlobRepo {
visited,
)))
.left_future(),
None => repo
.fetch(ctx.clone(), bcs_id)
None => bcs_id
.load(ctx.clone(), &repo.get_blobstore())
.from_err()
.map(move |bcs| {
commits_to_generate.push(bcs.clone());
queue.extend(bcs.parents().filter(|p| visited.insert(*p)));

View File

@ -14,7 +14,7 @@ use blobrepo::{
compute_changed_files, BlobRepo, ContentBlobMeta, UploadHgFileContents, UploadHgFileEntry,
UploadHgNodeHash,
};
use blobstore::{Loadable, Storable};
use blobstore::Storable;
use cloned::cloned;
use context::CoreContext;
use failure_ext::Error;
@ -30,16 +30,13 @@ use mercurial_types::{
use mercurial_types_mocks::nodehash::ONES_FNID;
use mononoke_types::bonsai_changeset::BonsaiChangesetMut;
use mononoke_types::{
blob::BlobstoreValue, Blob, BonsaiChangeset, ChangesetId, ContentId, DateTime, FileChange,
FileContents,
blob::BlobstoreValue, BonsaiChangeset, ChangesetId, DateTime, FileChange, FileContents,
};
use quickcheck::{quickcheck, Arbitrary, Gen, TestResult, Testable};
use rand::{distributions::Normal, SeedableRng};
use rand_xorshift::XorShiftRng;
use std::{
collections::{BTreeMap, HashMap, HashSet},
iter::FromIterator,
marker::PhantomData,
sync::Arc,
};
use tests_utils::{create_commit, store_files};
@ -578,53 +575,6 @@ test_both_repotypes!(
check_linknode_creation_eager
);
struct StoreFetchTestable<K> {
repo: BlobRepo,
_key: PhantomData<K>,
}
impl<K> StoreFetchTestable<K> {
fn new(repo: &BlobRepo) -> Self {
StoreFetchTestable {
repo: repo.clone(),
_key: PhantomData,
}
}
}
impl<K, V> Testable for StoreFetchTestable<K>
where
K: Loadable<Value = V> + ::std::fmt::Debug + Send,
V: BlobstoreValue<Key = K> + PartialEq + Arbitrary + Send,
Blob<K>: Storable<Key = K>,
{
fn result<G: Gen>(&self, g: &mut G) -> TestResult {
let ctx = CoreContext::test_mock();
let value = <V as Arbitrary>::arbitrary(g);
let value_cloned = value.clone();
let store_fetch_future = self
.repo
.unittest_store(ctx.clone(), value)
.and_then({
cloned!(ctx, self.repo);
move |key| repo.unittest_fetch(ctx, key)
})
.map(move |value_fetched| TestResult::from_bool(value_fetched == value_cloned));
run_future(store_fetch_future).expect("valid mononoke type")
}
}
fn store_fetch_mononoke_types(repo: BlobRepo) {
quickcheck(StoreFetchTestable::<ChangesetId>::new(&repo));
quickcheck(StoreFetchTestable::<ContentId>::new(&repo));
}
test_both_repotypes!(
store_fetch_mononoke_types,
store_fetch_mononoke_types_lazy,
store_fetch_mononoke_types_eager
);
#[test]
fn test_compute_changed_files_no_parents() {
async_unit::tokio_unit_test(|| {
@ -736,7 +686,9 @@ fn make_file_change(
) -> impl Future<Item = FileChange, Error = Error> + Send {
let content = content.as_ref();
let content_size = content.len() as u64;
repo.unittest_store(ctx, FileContents::new_bytes(content.as_ref()))
FileContents::new_bytes(content.as_ref())
.into_blob()
.store(ctx, &repo.get_blobstore())
.map(move |content_id| FileChange::new(content_id, FileType::Regular, content_size, None))
}

View File

@ -301,6 +301,7 @@ mod tests {
use crate::test_utils::{get_bonsai_changeset, iterate_all_entries};
use blobrepo::save_bonsai_changesets;
use blobrepo_factory::new_memblob_empty;
use blobstore::Storable;
use bytes::Bytes;
use derived_data_unodes::get_file_changes;
use failure_ext::Result;
@ -309,7 +310,8 @@ mod tests {
use maplit::btreemap;
use mercurial_types::{blobs::BlobManifest, Changeset, HgFileNodeId, HgManifestId};
use mononoke_types::{
BonsaiChangeset, BonsaiChangesetMut, DateTime, FileChange, FileContents, RepoPath,
BlobstoreValue, BonsaiChangeset, BonsaiChangesetMut, DateTime, FileChange, FileContents,
RepoPath,
};
use std::collections::{HashSet, VecDeque};
use tokio::runtime::Runtime;
@ -713,9 +715,9 @@ mod tests {
match content {
Some((content, file_type)) => {
let size = content.len();
let content = FileContents::Bytes(Bytes::from(content));
let content = FileContents::Bytes(Bytes::from(content)).into_blob();
let content_id = runtime
.block_on(repo.unittest_store(ctx.clone(), content))
.block_on(content.store(ctx.clone(), &repo.get_blobstore()))
.unwrap();
let file_change = FileChange::new(content_id, file_type, size as u64, None);

View File

@ -6,10 +6,8 @@
#![deny(warnings)]
use std::collections::BTreeMap;
use std::str::FromStr;
use blobrepo::{save_bonsai_changesets, BlobRepo};
use blobstore::Storable;
use bookmarks::{BookmarkName, BookmarkUpdateReason};
use bytes::Bytes;
use context::CoreContext;
@ -19,8 +17,11 @@ use futures_ext::{BoxFuture, FutureExt};
use maplit::btreemap;
use mercurial_types::{HgChangesetId, MPath};
use mononoke_types::{
BonsaiChangeset, BonsaiChangesetMut, ChangesetId, DateTime, FileChange, FileContents, FileType,
BlobstoreValue, BonsaiChangeset, BonsaiChangesetMut, ChangesetId, DateTime, FileChange,
FileContents, FileType,
};
use std::collections::BTreeMap;
use std::str::FromStr;
fn store_files(
ctx: CoreContext,
@ -35,7 +36,11 @@ fn store_files(
Some(content) => {
let size = content.len();
let content = FileContents::new_bytes(Bytes::from(content));
let content_id = repo.unittest_store(ctx.clone(), content).wait().unwrap();
let content_id = content
.into_blob()
.store(ctx.clone(), &repo.get_blobstore())
.wait()
.unwrap();
let file_change = FileChange::new(content_id, FileType::Regular, size as u64, None);
res.insert(path, Some(file_change));

View File

@ -6,22 +6,16 @@
#![deny(warnings)]
extern crate blobrepo;
extern crate bytes;
extern crate context;
extern crate futures;
#[macro_use]
extern crate maplit;
extern crate mononoke_types;
use blobrepo::{save_bonsai_changesets, BlobRepo};
use blobstore::Storable;
use bytes::Bytes;
use context::CoreContext;
use futures::future::Future;
use maplit::btreemap;
use mononoke_types::{
BonsaiChangesetMut, ChangesetId, DateTime, FileChange, FileContents, FileType, MPath,
BlobstoreValue, BonsaiChangesetMut, ChangesetId, DateTime, FileChange, FileContents, FileType,
MPath,
};
use std::collections::BTreeMap;
pub fn store_files(
@ -37,7 +31,11 @@ pub fn store_files(
Some(content) => {
let size = content.len();
let content = FileContents::new_bytes(Bytes::from(content));
let content_id = repo.unittest_store(ctx.clone(), content).wait().unwrap();
let content_id = content
.into_blob()
.store(ctx.clone(), &repo.get_blobstore())
.wait()
.unwrap();
let file_change = FileChange::new(content_id, FileType::Regular, size as u64, None);
res.insert(path, Some(file_change));
@ -60,7 +58,11 @@ pub fn store_rename(
let path = MPath::new(path).unwrap();
let size = content.len();
let content = FileContents::new_bytes(Bytes::from(content));
let content_id = repo.unittest_store(ctx, content).wait().unwrap();
let content_id = content
.into_blob()
.store(ctx, &repo.get_blobstore())
.wait()
.unwrap();
let file_change = FileChange::new(content_id, FileType::Regular, size as u64, Some(copy_src));
(path, Some(file_change))