replace BlobRepoHg::get_hg_from_bonsai_changeset with DeriveHgChangeset::derive_hg_changeset

Summary:
Replace all uses of `BlobRepoHg::get_hg_from_bonsai_changeset` with
`DeriveHgChangeset::derive_hg_changeset`.  These are equivalent, except that
the latter does not depend on `BlobRepo`.

Reviewed By: mitrandir77

Differential Revision: D34971451

fbshipit-source-id: 4c346e9e6f102759a54c9d0cebc6c609b0843581
This commit is contained in:
Mark Juggurnauth-Thomas 2022-03-23 07:06:23 -07:00 committed by Facebook GitHub Bot
parent ab3be4a673
commit 4226f9b3cc
87 changed files with 211 additions and 417 deletions

View File

@ -81,7 +81,6 @@ anyhow = "1.0.51"
ascii = "1.0"
async-compression = { version = "0.3.8", features = ["all-implementations", "brotli", "bzip2", "deflate", "gzip", "zlib", "zstd"] }
blobrepo = { version = "0.1.0", path = "blobrepo" }
blobrepo_hg = { version = "0.1.0", path = "blobrepo/blobrepo_hg" }
blobrepo_utils = { version = "0.1.0", path = "blobrepo_utils" }
blobstore = { version = "0.1.0", path = "blobstore" }
blobstore_factory = { version = "0.1.0", path = "blobstore/factory" }

View File

@ -60,7 +60,6 @@ unodes = { version = "0.1.0", path = "../derived_data/unodes" }
[dev-dependencies]
async-trait = "0.1.52"
blobrepo_hg = { version = "0.1.0", path = "../blobrepo/blobrepo_hg" }
fbinit-tokio = { version = "0.1.0", git = "https://github.com/facebookexperimental/rust-shed.git", branch = "main" }
fixtures = { version = "0.1.0", path = "../tests/fixtures" }
mercurial_types = { version = "0.1.0", path = "../mercurial/types" }

View File

@ -120,12 +120,6 @@ pub trait BlobRepoHg {
path: RepoPath,
limit: Option<u64>,
) -> Result<FilenodeRangeResult<Vec<FilenodeInfo>>, Error>;
async fn get_hg_from_bonsai_changeset(
&self,
ctx: CoreContext,
bcs_id: ChangesetId,
) -> Result<HgChangesetId, Error>;
}
define_stats! {
@ -208,7 +202,7 @@ impl BlobRepoHg for BlobRepo {
.map(Ok),
)
.map_ok(|csid| {
self.get_hg_from_bonsai_changeset(ctx.clone(), csid)
self.derive_hg_changeset(&ctx, csid)
.map_ok(move |hgcsid| (hgcsid, csid))
})
.try_buffer_unordered(100)
@ -234,7 +228,7 @@ impl BlobRepoHg for BlobRepo {
let repo = self.clone();
move |cs| {
cloned!(ctx, repo);
async move { repo.get_hg_from_bonsai_changeset(ctx, cs).await }
async move { repo.derive_hg_changeset(&ctx, cs).await }
}
})
.try_buffer_unordered(100)
@ -280,7 +274,7 @@ impl BlobRepoHg for BlobRepo {
.ok_or(ErrorKind::BonsaiNotFound(csid))?
.parents
.into_iter()
.map(|parent| self.get_hg_from_bonsai_changeset(ctx.clone(), parent));
.map(|parent| self.derive_hg_changeset(&ctx, parent));
Ok(future::try_join_all(parents).await?)
}
@ -295,7 +289,7 @@ impl BlobRepoHg for BlobRepo {
match cs_opt {
None => Ok(None),
Some(cs) => {
let hg_csid = self.get_hg_from_bonsai_changeset(ctx, cs).await?;
let hg_csid = self.derive_hg_changeset(&ctx, cs).await?;
Ok(Some(hg_csid))
}
}
@ -392,14 +386,6 @@ impl BlobRepoHg for BlobRepo {
.get_all_filenodes_maybe_stale(&ctx, &path, limit)
.await
}
async fn get_hg_from_bonsai_changeset(
&self,
ctx: CoreContext,
bcs_id: ChangesetId,
) -> Result<HgChangesetId, Error> {
self.derive_hg_changeset(&ctx, bcs_id).await
}
}
pub fn to_hg_bookmark_stream<BookmarkType>(

View File

@ -16,10 +16,7 @@ use anyhow::Error;
use assert_matches::assert_matches;
use blobrepo::BlobRepo;
use blobrepo_errors::ErrorKind;
use blobrepo_hg::{
repo_commit::{compute_changed_files, UploadEntries},
BlobRepoHg,
};
use blobrepo_hg::repo_commit::{compute_changed_files, UploadEntries};
use blobstore::{Loadable, Storable};
use bytes::Bytes;
use cloned::cloned;
@ -28,7 +25,7 @@ use fbinit::FacebookInit;
use fixtures::{create_bonsai_changeset, many_files_dirs, merge_uneven};
use futures::future::{BoxFuture, FutureExt, TryFutureExt};
use memblob::Memblob;
use mercurial_derived_data::get_manifest_from_bonsai;
use mercurial_derived_data::{get_manifest_from_bonsai, DeriveHgChangeset};
use mercurial_types::{
blobs::{
ContentBlobMeta, File, HgBlobChangeset, HgBlobManifest, UploadHgFileContents,
@ -783,10 +780,7 @@ async fn test_hg_commit_generation_simple(fb: FacebookInit) {
blobrepo::save_bonsai_changesets(vec![bcs], ctx.clone(), &repo)
.await
.unwrap();
let hg_cs_id = repo
.get_hg_from_bonsai_changeset(ctx.clone(), bcs_id)
.await
.unwrap();
let hg_cs_id = repo.derive_hg_changeset(&ctx, bcs_id).await.unwrap();
assert_eq!(
hg_cs_id,
@ -826,10 +820,7 @@ async fn test_hg_commit_generation_stack(fb: FacebookInit) {
.await
.unwrap();
let hg_cs_id = repo
.get_hg_from_bonsai_changeset(ctx, top_of_stack)
.await
.unwrap();
let hg_cs_id = repo.derive_hg_changeset(&ctx, top_of_stack).await.unwrap();
assert_eq!(
hg_cs_id,
HgChangesetId::new(string_to_nodehash(
@ -852,10 +843,7 @@ async fn test_hg_commit_generation_one_after_another(fb: FacebookInit) {
.await
.unwrap();
let hg_cs_id = repo
.get_hg_from_bonsai_changeset(ctx.clone(), first_bcs_id)
.await
.unwrap();
let hg_cs_id = repo.derive_hg_changeset(&ctx, first_bcs_id).await.unwrap();
assert_eq!(
hg_cs_id,
HgChangesetId::new(string_to_nodehash(
@ -863,10 +851,7 @@ async fn test_hg_commit_generation_one_after_another(fb: FacebookInit) {
))
);
let hg_cs_id = repo
.get_hg_from_bonsai_changeset(ctx, second_bcs_id)
.await
.unwrap();
let hg_cs_id = repo.derive_hg_changeset(&ctx, second_bcs_id).await.unwrap();
assert_eq!(
hg_cs_id,
HgChangesetId::new(string_to_nodehash(
@ -884,10 +869,7 @@ async fn test_hg_commit_generation_diamond(fb: FacebookInit) {
.await
.unwrap();
let hg_cs_id = repo
.get_hg_from_bonsai_changeset(ctx.clone(), last_bcs_id)
.await
.unwrap();
let hg_cs_id = repo.derive_hg_changeset(&ctx, last_bcs_id).await.unwrap();
assert_eq!(
hg_cs_id,
HgChangesetId::new(string_to_nodehash(
@ -907,10 +889,7 @@ async fn test_hg_commit_generation_many_diamond(fb: FacebookInit) {
.unwrap()
.unwrap();
let hg_cs_id = repo
.get_hg_from_bonsai_changeset(ctx, bcs_id)
.await
.unwrap();
let hg_cs_id = repo.derive_hg_changeset(&ctx, bcs_id).await.unwrap();
assert_eq!(
hg_cs_id,
HgChangesetId::new(string_to_nodehash(
@ -951,7 +930,7 @@ async fn test_hg_commit_generation_uneven_branch(fb: FacebookInit) {
.unwrap();
let hg_cs_id = repo
.get_hg_from_bonsai_changeset(ctx.clone(), merge.get_changeset_id())
.derive_hg_changeset(&ctx, merge.get_changeset_id())
.await
.unwrap();
assert_eq!(
@ -994,7 +973,7 @@ async fn save_reproducibility_under_load(fb: FacebookInit) -> Result<(), Error>
std::iter::repeat(16).take(50),
)
.await?;
let hgcsid = repo.get_hg_from_bonsai_changeset(ctx, csid).await?;
let hgcsid = repo.derive_hg_changeset(&ctx, csid).await?;
assert_eq!(hgcsid, "e9b73f926c993c5232139d4eefa6f77fa8c41279".parse()?);
@ -1165,10 +1144,7 @@ impl TestHelper {
}
async fn lookup_changeset(&self, cs_id: ChangesetId) -> Result<HgBlobChangeset, Error> {
let hg_cs_id = self
.repo
.get_hg_from_bonsai_changeset(self.ctx.clone(), cs_id)
.await?;
let hg_cs_id = self.repo.derive_hg_changeset(&self.ctx, cs_id).await?;
let hg_cs = hg_cs_id.load(&self.ctx, self.repo.blobstore()).await?;
@ -1266,9 +1242,7 @@ mod octopus_merges {
.commit()
.await?;
let hg_cs_id = repo
.get_hg_from_bonsai_changeset(ctx.clone(), commit)
.await?;
let hg_cs_id = repo.derive_hg_changeset(&ctx, commit).await?;
let hg_cs = hg_cs_id.load(&ctx, repo.blobstore()).await?;

View File

@ -11,7 +11,6 @@ license = "GPLv2+"
anyhow = "1.0.51"
async-trait = "0.1.52"
blobrepo = { version = "0.1.0", path = "../../blobrepo" }
blobrepo_hg = { version = "0.1.0", path = "../../blobrepo/blobrepo_hg" }
blobstore = { version = "0.1.0", path = "../../blobstore" }
bonsai_git_mapping = { version = "0.1.0", path = "../../bonsai_git_mapping" }
bonsai_globalrev_mapping = { version = "0.1.0", path = "../../bonsai_globalrev_mapping" }

View File

@ -24,6 +24,7 @@ filenodes = { version = "0.1.0", path = "../filenodes" }
futures = { version = "0.3.13", features = ["async-await", "compat"] }
futures_stats = { version = "0.1.0", git = "https://github.com/facebookexperimental/rust-shed.git", branch = "main" }
manifest = { version = "0.1.0", path = "../manifest" }
mercurial_derived_data = { version = "0.1.0", path = "../derived_data/mercurial_derived_data" }
mercurial_types = { version = "0.1.0", path = "../mercurial/types" }
metaconfig_types = { version = "0.1.0", path = "../metaconfig/types" }
microwave = { version = "0.1.0", path = "../microwave" }

View File

@ -24,6 +24,7 @@ use futures::{
};
use futures_stats::TimedFutureExt;
use manifest::{Entry, ManifestOps};
use mercurial_derived_data::DeriveHgChangeset;
use mercurial_types::{HgChangesetId, HgFileNodeId, RepoPath};
use metaconfig_types::CacheWarmupParams;
use microwave::{self, SnapshotLocation};
@ -196,9 +197,7 @@ async fn do_cache_warmup(
CacheWarmupTarget::Changeset(bcs_id) => bcs_id,
};
let hg_cs_id = repo
.get_hg_from_bonsai_changeset(ctx.clone(), bcs_id)
.await?;
let hg_cs_id = repo.derive_hg_changeset(&ctx, bcs_id).await?;
let blobstore_warmup = task::spawn({
cloned!(ctx, repo);

View File

@ -10,7 +10,6 @@ license = "GPLv2+"
[dependencies]
anyhow = "1.0.51"
blobrepo = { version = "0.1.0", path = "../blobrepo" }
blobrepo_hg = { version = "0.1.0", path = "../blobrepo/blobrepo_hg" }
blobstore = { version = "0.1.0", path = "../blobstore" }
blobstore_factory = { version = "0.1.0", path = "../blobstore/factory" }
bonsai_hg_mapping = { version = "0.1.0", path = "../bonsai_hg_mapping" }
@ -30,6 +29,7 @@ futures_ext = { package = "futures_01_ext", version = "0.1.0", git = "https://gi
libc = "0.2.98"
maybe-owned = "0.3.4"
megarepo_config = { version = "0.1.0", path = "../megarepo_api/megarepo_config" }
mercurial_derived_data = { version = "0.1.0", path = "../derived_data/mercurial_derived_data" }
mercurial_types = { version = "0.1.0", path = "../mercurial/types" }
metaconfig_parser = { version = "0.1.0", path = "../metaconfig/parser" }
metaconfig_types = { version = "0.1.0", path = "../metaconfig/types" }

View File

@ -25,11 +25,11 @@ use tokio::{
use crate::args::MononokeMatches;
use crate::monitoring;
use blobrepo::BlobRepo;
use blobrepo_hg::BlobRepoHg;
use blobstore::Loadable;
use bonsai_hg_mapping::BonsaiHgMappingRef;
use bookmarks::{BookmarkName, BookmarksRef};
use context::CoreContext;
use mercurial_derived_data::DeriveHgChangeset;
use mercurial_types::{HgChangesetId, HgManifestId};
use mononoke_types::ChangesetId;
use repo_identity::RepoIdentityRef;
@ -142,11 +142,11 @@ pub async fn get_root_manifest_id(
repo: BlobRepo,
hash_or_bookmark: impl ToString,
) -> Result<HgManifestId, Error> {
let cs_id = csid_resolve(&ctx, &repo, hash_or_bookmark).await?;
let cs_id = csid_resolve(ctx, &repo, hash_or_bookmark).await?;
Ok(repo
.get_hg_from_bonsai_changeset(ctx.clone(), cs_id)
.derive_hg_changeset(ctx, cs_id)
.await?
.load(&ctx, repo.blobstore())
.load(ctx, repo.blobstore())
.await?
.manifestid())
}

View File

@ -13,6 +13,7 @@ use cloned::cloned;
use context::CoreContext;
use futures::TryStreamExt;
use humantime::parse_duration;
use mercurial_derived_data::DeriveHgChangeset;
use mononoke_types::Timestamp;
use repo_blobstore::RepoBlobstoreRef;
use serde_json::{json, to_string_pretty};
@ -263,9 +264,7 @@ async fn handle_log(args: &ArgMatches<'_>, ctx: CoreContext, repo: BlobRepo) ->
async move {
match cs_id {
Some(cs_id) => {
let cs = repo
.get_hg_from_bonsai_changeset(ctx.clone(), cs_id)
.await?;
let cs = repo.derive_hg_changeset(&ctx, cs_id).await?;
Ok((entry_id, Some(cs), rs, ts))
}
None => Ok((entry_id, None, rs, ts)),
@ -329,9 +328,7 @@ async fn handle_list(args: &ArgMatches<'_>, ctx: CoreContext, repo: BlobRepo) ->
move |(bookmark, bonsai_cs_id)| {
cloned!(ctx, repo);
async move {
let hg_cs_id = repo
.get_hg_from_bonsai_changeset(ctx.clone(), bonsai_cs_id)
.await?;
let hg_cs_id = repo.derive_hg_changeset(&ctx, bonsai_cs_id).await?;
println!("{}\t{}\t{}", bookmark.into_name(), bonsai_cs_id, hg_cs_id);
Ok(())
}

View File

@ -7,7 +7,6 @@
use anyhow::{format_err, Error};
use blobrepo::BlobRepo;
use blobrepo_hg::BlobRepoHg;
use blobstore::Loadable;
use clap_old::{App, ArgMatches, SubCommand};
use cmdlib::{
@ -17,6 +16,7 @@ use cmdlib::{
use context::CoreContext;
use fbinit::FacebookInit;
use manifest::{Entry, Manifest, ManifestOps};
use mercurial_derived_data::DeriveHgChangeset;
use mercurial_types::{HgFileNodeId, HgManifestId, MPath};
use mononoke_types::FileType;
use slog::Logger;
@ -119,9 +119,7 @@ async fn fetch_entry(
let mpath = MPath::new(path)?;
let bcs_id = helpers::csid_resolve(&ctx, repo.clone(), rev.to_string()).await?;
let hg_cs_id = repo
.get_hg_from_bonsai_changeset(ctx.clone(), bcs_id)
.await?;
let hg_cs_id = repo.derive_hg_changeset(ctx, bcs_id).await?;
let hg_cs = hg_cs_id.load(ctx, repo.blobstore()).await?;
let ret = hg_cs

View File

@ -7,7 +7,6 @@
use anyhow::{anyhow, Error};
use blobrepo::BlobRepo;
use blobrepo_hg::BlobRepoHg;
use blobrepo_override::DangerousOverride;
use blobstore::Loadable;
use cacheblob::{dummy::DummyLease, LeaseOps};
@ -31,6 +30,7 @@ use futures::{
};
use futures_stats::TimedFutureExt;
use manifest::ManifestOps;
use mercurial_derived_data::DeriveHgChangeset;
use mercurial_derived_data::MappedHgChangesetId;
use mononoke_types::{ChangesetId, ContentId, FileType, MPath};
use skeleton_manifest::RootSkeletonManifestId;
@ -603,9 +603,7 @@ async fn list_hg_manifest(
repo: &BlobRepo,
cs_id: ChangesetId,
) -> Result<(ManifestType, HashMap<MPath, ManifestData>), Error> {
let hg_cs_id = repo
.get_hg_from_bonsai_changeset(ctx.clone(), cs_id)
.await?;
let hg_cs_id = repo.derive_hg_changeset(ctx, cs_id).await?;
let hg_cs = hg_cs_id.load(ctx, repo.blobstore()).await?;
let mfid = hg_cs.manifestid();

View File

@ -20,6 +20,7 @@ use filenodes::FilenodeInfo;
use futures::{future::try_join_all, TryStreamExt};
use futures_stats::TimedFutureExt;
use manifest::{Entry, ManifestOps};
use mercurial_derived_data::DeriveHgChangeset;
use mercurial_types::{HgFileEnvelope, HgFileNodeId, MPath};
use mononoke_types::RepoPath;
use slog::{debug, info, Logger};
@ -151,9 +152,7 @@ async fn handle_filenodes_at_revision(
log_envelope: bool,
) -> Result<(), Error> {
let cs_id = helpers::csid_resolve(&ctx, &blobrepo, revision.to_string()).await?;
let cs_id = blobrepo
.get_hg_from_bonsai_changeset(ctx.clone(), cs_id)
.await?;
let cs_id = blobrepo.derive_hg_changeset(&ctx, cs_id).await?;
let filenode_ids = get_file_nodes(
ctx.clone(),
ctx.logger().clone(),

View File

@ -11,9 +11,9 @@ use std::str::FromStr;
use anyhow::{anyhow, Error};
use blobrepo::BlobRepo;
use blobrepo_hg::BlobRepoHg;
use cmdlib::args::{self, MononokeMatches};
use context::CoreContext;
use mercurial_derived_data::DeriveHgChangeset;
use mercurial_types::HgChangesetId;
use mononoke_types::{hash::GitSha1, ChangesetId};
use slog::Logger;
@ -107,9 +107,7 @@ async fn convert_from_bonsai(
to: &str,
) -> Result<String, Error> {
if to == "hg" {
let hg = repo
.get_hg_from_bonsai_changeset(ctx.clone(), cs_id)
.await?;
let hg = repo.derive_hg_changeset(ctx, cs_id).await?;
Ok(format!("{}", hg))
} else if to == "git" {
let maybegit = repo

View File

@ -7,7 +7,6 @@
use anyhow::{format_err, Error};
use blobrepo::BlobRepo;
use blobrepo_hg::BlobRepoHg;
use blobstore::Loadable;
use clap_old::{App, ArgMatches, SubCommand};
use cmdlib::args::{self, MononokeMatches};
@ -15,6 +14,7 @@ use context::CoreContext;
use fbinit::FacebookInit;
use futures::{compat::Stream01CompatExt, TryStreamExt};
use manifest::{bonsai_diff, BonsaiDiffFileChange};
use mercurial_derived_data::DeriveHgChangeset;
use mercurial_types::{HgChangesetId, HgManifestId, MPath};
use revset::RangeNodeStream;
use serde_derive::Serialize;
@ -95,7 +95,7 @@ pub async fn subcommand_hg_changeset<'a>(
let css: Vec<_> =
RangeNodeStream::new(ctx.clone(), repo.get_changeset_fetcher(), start_cs, stop_cs)
.compat()
.map_ok(|cs| repo.get_hg_from_bonsai_changeset(ctx.clone(), cs))
.map_ok(|cs| repo.derive_hg_changeset(&ctx, cs))
.try_buffer_unordered(100)
.map_ok(|cs| cs.to_hex().to_string())
.try_collect()

View File

@ -7,7 +7,6 @@
use anyhow::{format_err, Context, Error};
use blobrepo::BlobRepo;
use blobrepo_hg::BlobRepoHg;
use blobstore::Loadable;
use bookmarks::{BookmarkUpdateLog, BookmarkUpdateLogEntry, BookmarkUpdateReason, Freshness};
use clap_old::{App, Arg, ArgMatches, SubCommand};
@ -18,6 +17,7 @@ use fbinit::FacebookInit;
use futures::stream::StreamExt;
use futures::{compat::Future01CompatExt, future};
use mercurial_bundle_replay_data::BundleReplayData;
use mercurial_derived_data::DeriveHgChangeset;
use mononoke_hg_sync_job_helper_lib::save_bundle_to_file;
use mononoke_types::{BonsaiChangeset, ChangesetId, RepositoryId};
use mutable_counters::{MutableCounters, SqlMutableCounters};
@ -333,10 +333,7 @@ async fn show(
let bundle_id = entry.id;
let hg_cs_id = match entry.to_changeset_id {
Some(bcs_id) => repo
.get_hg_from_bonsai_changeset(ctx.clone(), bcs_id)
.await?
.to_string(),
Some(bcs_id) => repo.derive_hg_changeset(ctx, bcs_id).await?.to_string(),
None => "DELETED".to_string(),
};

View File

@ -8,7 +8,6 @@
use crate::common::get_file_nodes;
use anyhow::{anyhow, format_err, Context, Error};
use blobrepo::BlobRepo;
use blobrepo_hg::BlobRepoHg;
use blobstore::{Loadable, Storable};
use clap_old::{App, Arg, ArgGroup, ArgMatches, SubCommand};
use cloned::cloned;
@ -23,6 +22,7 @@ use futures::{
stream::{StreamExt, TryStreamExt},
};
use manifest::ManifestOps;
use mercurial_derived_data::DeriveHgChangeset;
use mercurial_types::{blobs::HgBlobChangeset, HgChangesetId, MPath};
use mononoke_types::{
blob::BlobstoreValue, typed_hash::MononokeId, ContentId, RedactionKeyList, Timestamp,
@ -276,9 +276,7 @@ async fn get_ctx_blobrepo_cs_id<'a>(
let ctx = CoreContext::new_with_logger(fb, logger);
let cs_id = helpers::csid_resolve(&ctx, blobrepo.clone(), rev.to_string()).await?;
let hg_cs_id = blobrepo
.get_hg_from_bonsai_changeset(ctx.clone(), cs_id)
.await?;
let hg_cs_id = blobrepo.derive_hg_changeset(&ctx, cs_id).await?;
Ok((ctx, blobrepo, hg_cs_id))
}
@ -523,9 +521,7 @@ async fn check_if_content_is_reachable_from_bookmark(
"Checking if redacted content exist in '{}' bookmark...", main_bookmark
);
let csid = helpers::csid_resolve(&ctx, blobrepo, main_bookmark).await?;
let hg_cs_id = blobrepo
.get_hg_from_bonsai_changeset(ctx.clone(), csid)
.await?;
let hg_cs_id = blobrepo.derive_hg_changeset(ctx, csid).await?;
let hg_cs = hg_cs_id.load(ctx, blobrepo.blobstore()).await?;

View File

@ -23,6 +23,7 @@ use derived_data::BonsaiDerived;
use fbinit::FacebookInit;
use futures::{compat::Stream01CompatExt, future, StreamExt, TryStreamExt};
use manifest::{get_implicit_deletes, PathOrPrefix};
use mercurial_derived_data::DeriveHgChangeset;
use mononoke_types::{ChangesetId, DeletedManifestId, MPath};
use revset::AncestorsNodeStream;
use slog::{debug, Logger};
@ -186,9 +187,7 @@ async fn get_file_changes(
};
let parent_manifests_fut = async {
let hg_cs_id = repo
.get_hg_from_bonsai_changeset(ctx.clone(), cs_id)
.await?;
let hg_cs_id = repo.derive_hg_changeset(&ctx, cs_id).await?;
let parents = repo.get_changeset_parents(ctx.clone(), hg_cs_id).await?;
let parents_futs = parents.into_iter().map(|csid| {
cloned!(ctx, repo);

View File

@ -9,7 +9,6 @@ use crate::error::SubcommandError;
use anyhow::{bail, Error};
use blobrepo::BlobRepo;
use blobrepo_hg::BlobRepoHg;
use blobstore::Loadable;
use clap_old::{App, Arg, ArgMatches, SubCommand};
use cmdlib::{
@ -21,6 +20,7 @@ use derived_data::BonsaiDerived;
use fbinit::FacebookInit;
use futures::{compat::Stream01CompatExt, StreamExt, TryStreamExt};
use manifest::{Entry, ManifestOps, PathOrPrefix};
use mercurial_derived_data::DeriveHgChangeset;
use mononoke_types::{ChangesetId, MPath};
use revset::AncestorsNodeStream;
@ -154,7 +154,7 @@ async fn subcommand_verify(
async fn single_verify(ctx: &CoreContext, repo: &BlobRepo, csid: ChangesetId) -> Result<(), Error> {
let hg_paths = async move {
let hg_csid = repo.get_hg_from_bonsai_changeset(ctx.clone(), csid).await?;
let hg_csid = repo.derive_hg_changeset(ctx, csid).await?;
println!("CHANGESET: hg_csid:{:?} csid:{:?}", hg_csid, csid);
let hg_changeset = hg_csid.load(ctx, repo.blobstore()).await?;
let paths = hg_changeset

View File

@ -11,7 +11,6 @@ mod config;
use anyhow::{format_err, Error, Result};
use blobrepo::BlobRepo;
use blobrepo_hg::BlobRepoHg;
use blobrepo_utils::{BonsaiMFVerify, BonsaiMFVerifyResult};
use blobstore::Loadable;
use clap::{Parser, Subcommand};
@ -29,7 +28,7 @@ use futures_old::{
Future, Stream,
};
use lock_ext::LockExt;
use mercurial_derived_data::get_manifest_from_bonsai;
use mercurial_derived_data::{get_manifest_from_bonsai, DeriveHgChangeset};
use mercurial_types::HgChangesetId;
use mononoke_app::{args::RepoArgs, MononokeAppBuilder};
use revset::AncestorsNodeStream;
@ -317,7 +316,7 @@ fn subcommmand_hg_manifest_verify(
.map(|res| async move {
match res {
Ok(csid) => {
let cs_id = repo.get_hg_from_bonsai_changeset(ctx.clone(), csid).await?;
let cs_id = repo.derive_hg_changeset(ctx, csid).await?;
let bonsai_fut = csid.load(ctx, repo.blobstore()).map_err(Error::from);
let parents_fut = async move {

View File

@ -26,6 +26,7 @@ use futures::stream::{self, Stream, StreamExt, TryStreamExt};
use futures::{future::FutureExt, try_join};
use futures_ext::{BoxStream, StreamExt as OldStreamExt};
use manifest::{Diff, Entry, ManifestOps};
use mercurial_derived_data::DeriveHgChangeset;
use mercurial_types::{FileBytes, HgChangesetId, HgFileNodeId, HgManifestId};
use mononoke_types::{FileType, RepositoryId};
use scuba_ext::MononokeScubaSampleBuilder;
@ -313,9 +314,7 @@ pub async fn generate_statistics_from_file<P: AsRef<Path>>(
.map({
move |changeset| async move {
let ChangesetEntry { repo_id, cs_id, .. } = changeset;
let hg_cs_id = repo
.get_hg_from_bonsai_changeset(ctx.clone(), cs_id)
.await?;
let hg_cs_id = repo.derive_hg_changeset(ctx, cs_id).await?;
let cs_timestamp =
get_changeset_timestamp_from_changeset(ctx, repo, &hg_cs_id).await?;
// the error type annotation in principle should be inferred,
@ -648,10 +647,7 @@ mod tests {
)
.await;
let hg_cs_id = repo
.get_hg_from_bonsai_changeset(ctx.clone(), bcs_id)
.await
.unwrap();
let hg_cs_id = repo.derive_hg_changeset(ctx, bcs_id).await.unwrap();
let stats = get_statistics_from_changeset(ctx, repo, blobstore, &hg_cs_id)
.await
@ -701,10 +697,7 @@ mod tests {
)
.await;
let hg_cs_id = repo
.get_hg_from_bonsai_changeset(ctx.clone(), bcs_id)
.await
.unwrap();
let hg_cs_id = repo.derive_hg_changeset(ctx, bcs_id).await.unwrap();
let manifest = get_manifest_from_changeset(ctx, repo, &hg_cs_id)
.await

View File

@ -27,6 +27,7 @@ fbinit = { version = "0.1.0", git = "https://github.com/facebookexperimental/rus
futures = { version = "0.3.13", features = ["async-await", "compat"] }
futures-old = { package = "futures", version = "0.1.31" }
live_commit_sync_config = { version = "0.1.0", path = "../live_commit_sync_config" }
mercurial_derived_data = { version = "0.1.0", path = "../../derived_data/mercurial_derived_data" }
mercurial_types = { version = "0.1.0", path = "../../mercurial/types" }
metaconfig_types = { version = "0.1.0", path = "../../metaconfig/types" }
mononoke_types = { version = "0.1.0", path = "../../mononoke_types" }

View File

@ -27,6 +27,7 @@ use futures::{
try_join,
};
use live_commit_sync_config::{CfgrLiveCommitSyncConfig, LiveCommitSyncConfig};
use mercurial_derived_data::DeriveHgChangeset;
use mercurial_types::HgChangesetId;
use mononoke_types::ChangesetId;
use mutable_counters::MutableCounters;
@ -85,7 +86,7 @@ async fn derive_target_hg_changesets(
Some(target_cs_id) => {
let hg_cs_id = commit_syncer
.get_target_repo()
.get_hg_from_bonsai_changeset(ctx.clone(), target_cs_id)
.derive_hg_changeset(ctx, target_cs_id)
.await?;
info!(
ctx.logger(),

View File

@ -30,6 +30,7 @@ use futures_ext::FbTryFutureExt;
use live_commit_sync_config::TestLiveCommitSyncConfig;
use manifest::{Entry, ManifestOps};
use maplit::{btreemap, hashmap};
use mercurial_derived_data::DeriveHgChangeset;
use mercurial_types::HgChangesetId;
use metaconfig_types::{
CommitSyncConfig, CommitSyncConfigVersion, CommonCommitSyncConfig,
@ -958,12 +959,8 @@ async fn verify_mapping_and_all_wc(
};
}
let source_hg_cs_id = source_repo
.get_hg_from_bonsai_changeset(ctx.clone(), source_cs_id)
.await?;
let target_hg_cs_id = target_repo
.get_hg_from_bonsai_changeset(ctx.clone(), target_cs_id)
.await?;
let source_hg_cs_id = source_repo.derive_hg_changeset(&ctx, source_cs_id).await?;
let target_hg_cs_id = target_repo.derive_hg_changeset(&ctx, target_cs_id).await?;
compare_contents(
&ctx,

View File

@ -10,7 +10,6 @@ license = "GPLv2+"
[dependencies]
anyhow = "1.0.51"
blobrepo = { version = "0.1.0", path = "../../blobrepo" }
blobrepo_hg = { version = "0.1.0", path = "../../blobrepo/blobrepo_hg" }
blobstore = { version = "0.1.0", path = "../../blobstore" }
blobstore_factory = { version = "0.1.0", path = "../../blobstore/factory" }
bookmarks = { version = "0.1.0", path = "../../bookmarks" }
@ -25,6 +24,7 @@ futures = { version = "0.3.13", features = ["async-await", "compat"] }
futures_stats = { version = "0.1.0", git = "https://github.com/facebookexperimental/rust-shed.git", branch = "main" }
live_commit_sync_config = { version = "0.1.0", path = "../live_commit_sync_config" }
manifest = { version = "0.1.0", path = "../../manifest" }
mercurial_derived_data = { version = "0.1.0", path = "../../derived_data/mercurial_derived_data" }
mercurial_types = { version = "0.1.0", path = "../../mercurial/types" }
metaconfig_types = { version = "0.1.0", path = "../../metaconfig/types" }
mononoke_api_types = { version = "0.1.0", path = "../../mononoke_api/types" }

View File

@ -7,7 +7,6 @@
use anyhow::{format_err, Error};
use blobrepo::BlobRepo;
use blobrepo_hg::BlobRepoHg;
use blobstore::Loadable;
use bookmarks::{BookmarkName, BookmarkUpdateLogEntry};
use cloned::cloned;
@ -28,6 +27,7 @@ use futures::{
use futures_stats::TimedFutureExt;
use live_commit_sync_config::{CfgrLiveCommitSyncConfig, LiveCommitSyncConfig};
use manifest::{Diff, Entry, ManifestOps};
use mercurial_derived_data::DeriveHgChangeset;
use mercurial_types::{FileType, HgFileNodeId, HgManifestId};
use metaconfig_types::{CommitSyncConfigVersion, CommitSyncDirection};
use mononoke_api_types::InnerRepo;
@ -1426,9 +1426,7 @@ async fn fetch_root_mf_id(
repo: &BlobRepo,
cs_id: ChangesetId,
) -> Result<HgManifestId, Error> {
let hg_cs_id = repo
.get_hg_from_bonsai_changeset(ctx.clone(), cs_id)
.await?;
let hg_cs_id = repo.derive_hg_changeset(ctx, cs_id).await?;
let changeset = hg_cs_id.load(ctx, repo.blobstore()).await?;
Ok(changeset.manifestid())
}

View File

@ -55,12 +55,12 @@ tunables = { version = "0.1.0", path = "../../tunables" }
[dev-dependencies]
ascii = "1.0"
assert_matches = "1.5"
blobrepo_hg = { version = "0.1.0", path = "../../blobrepo/blobrepo_hg" }
bytes = { version = "1.1", features = ["serde"] }
cloned = { version = "0.1.0", git = "https://github.com/facebookexperimental/rust-shed.git", branch = "main" }
cross_repo_sync_test_utils = { version = "0.1.0", path = "test_utils" }
fbinit-tokio = { version = "0.1.0", git = "https://github.com/facebookexperimental/rust-shed.git", branch = "main" }
fixtures = { version = "0.1.0", path = "../../tests/fixtures" }
mercurial_derived_data = { version = "0.1.0", path = "../../derived_data/mercurial_derived_data" }
mononoke_types-mocks = { version = "0.1.0", path = "../../mononoke_types/mocks" }
revset = { version = "0.1.0", path = "../../revset" }
skiplist = { version = "0.1.0", path = "../../reachabilityindex/skiplist" }

View File

@ -19,7 +19,6 @@ use std::str::FromStr;
use std::sync::Arc;
use blobrepo::{save_bonsai_changesets, BlobRepo};
use blobrepo_hg::BlobRepoHg;
use blobstore::{Loadable, Storable};
use bookmarks::{BookmarkName, BookmarkUpdateReason};
use cacheblob::InProcessLease;
@ -35,6 +34,7 @@ use futures::{future::join_all, FutureExt, TryStreamExt};
use live_commit_sync_config::{TestLiveCommitSyncConfig, TestLiveCommitSyncConfigSource};
use manifest::ManifestOps;
use maplit::{btreemap, hashmap};
use mercurial_derived_data::DeriveHgChangeset;
use mercurial_types::HgChangesetId;
use metaconfig_types::{
CommitSyncConfig, CommitSyncConfigVersion, CommonCommitSyncConfig,
@ -2102,9 +2102,7 @@ async fn assert_working_copy(
cs_id: ChangesetId,
expected_files: Vec<&str>,
) -> Result<(), Error> {
let hg_cs_id = repo
.get_hg_from_bonsai_changeset(ctx.clone(), cs_id)
.await?;
let hg_cs_id = repo.derive_hg_changeset(ctx, cs_id).await?;
let hg_cs = hg_cs_id.load(ctx, repo.blobstore()).await?;
let mf_id = hg_cs.manifestid();

View File

@ -14,7 +14,6 @@ path = "lib.rs"
anyhow = "1.0.51"
ascii = "1.0"
blobrepo = { version = "0.1.0", path = "../../../blobrepo" }
blobrepo_hg = { version = "0.1.0", path = "../../../blobrepo/blobrepo_hg" }
blobstore = { version = "0.1.0", path = "../../../blobstore" }
bookmarks = { version = "0.1.0", path = "../../../bookmarks" }
commit_transformation = { version = "0.1.0", path = "../../../megarepo_api/commit_transformation" }

View File

@ -14,7 +14,6 @@ path = "tool/main.rs"
[dependencies]
anyhow = "1.0.51"
blobrepo = { version = "0.1.0", path = "../../blobrepo" }
blobrepo_hg = { version = "0.1.0", path = "../../blobrepo/blobrepo_hg" }
blobrepo_utils = { version = "0.1.0", path = "../../blobrepo_utils" }
blobstore = { version = "0.1.0", path = "../../blobstore" }
bookmarks = { version = "0.1.0", path = "../../bookmarks" }
@ -37,6 +36,7 @@ itertools = "0.10.3"
live_commit_sync_config = { version = "0.1.0", path = "../live_commit_sync_config" }
manifest = { version = "0.1.0", path = "../../manifest" }
maplit = "1.0"
mercurial_derived_data = { version = "0.1.0", path = "../../derived_data/mercurial_derived_data" }
mercurial_types = { version = "0.1.0", path = "../../mercurial/types" }
metaconfig_types = { version = "0.1.0", path = "../../metaconfig/types" }
mononoke_api_types = { version = "0.1.0", path = "../../mononoke_api/types" }

View File

@ -7,9 +7,9 @@
use anyhow::{format_err, Error};
use blobrepo::{save_bonsai_changesets, BlobRepo};
use blobrepo_hg::BlobRepoHg;
use bookmarks::{BookmarkName, BookmarkUpdateReason};
use context::CoreContext;
use mercurial_derived_data::DeriveHgChangeset;
use mercurial_types::{HgChangesetId, MPath};
use mononoke_types::{BonsaiChangeset, BonsaiChangesetMut, ChangesetId, DateTime, FileChange};
use phases::PhasesRef;
@ -92,9 +92,7 @@ async fn generate_hg_changeset(
bcs_id: ChangesetId,
) -> Result<HgChangesetId, Error> {
info!(ctx.logger(), "Generating an HG equivalent of {:?}", bcs_id);
let hg_cs_id = repo
.get_hg_from_bonsai_changeset(ctx.clone(), bcs_id)
.await?;
let hg_cs_id = repo.derive_hg_changeset(ctx, bcs_id).await?;
info!(
ctx.logger(),

View File

@ -10,12 +10,12 @@
use anyhow::{anyhow, Error};
use blobrepo::BlobRepo;
use blobrepo_hg::BlobRepoHg;
use blobstore::Loadable;
use context::CoreContext;
use futures::{future, Stream, TryStreamExt};
use itertools::Itertools;
use manifest::ManifestOps;
use mercurial_derived_data::DeriveHgChangeset;
use mercurial_types::{
blobs::{HgBlobChangeset, HgBlobEnvelope},
HgChangesetId, MPath,
@ -162,9 +162,7 @@ async fn perform_stack_move_impl<'a, Chunker>(
where
Chunker: Fn(Vec<FileMove>) -> Vec<Vec<FileMove>> + 'a,
{
let parent_hg_cs_id = repo
.get_hg_from_bonsai_changeset(ctx.clone(), parent_bcs_id)
.await?;
let parent_hg_cs_id = repo.derive_hg_changeset(ctx, parent_bcs_id).await?;
let parent_hg_cs = parent_hg_cs_id.load(ctx, repo.blobstore()).await?;

View File

@ -7,13 +7,13 @@
use anyhow::Error;
use blobrepo::BlobRepo;
use blobrepo_hg::BlobRepoHg;
use blobstore::Loadable;
use context::CoreContext;
use derived_data::BonsaiDerived;
use fsnodes::RootFsnodeId;
use futures::{future::try_join, TryStreamExt};
use manifest::{Diff, ManifestOps};
use mercurial_derived_data::DeriveHgChangeset;
use mercurial_types::MPath;
use mononoke_types::ChangesetId;
use slog::info;
@ -24,9 +24,7 @@ pub async fn get_working_copy_paths(
repo: &BlobRepo,
bcs_id: ChangesetId,
) -> Result<Vec<MPath>, Error> {
let hg_cs_id = repo
.get_hg_from_bonsai_changeset(ctx.clone(), bcs_id)
.await?;
let hg_cs_id = repo.derive_hg_changeset(ctx, bcs_id).await?;
let hg_cs = hg_cs_id.load(ctx, repo.blobstore()).await?;
info!(ctx.logger(), "Getting working copy contents");

View File

@ -7,7 +7,6 @@
use anyhow::{anyhow, Error};
use blobrepo::BlobRepo;
use blobrepo_hg::BlobRepoHg;
use blobstore::Loadable;
use bookmarks::BookmarkName;
use context::CoreContext;
@ -20,6 +19,7 @@ use itertools::Itertools;
use manifest::{Diff, ManifestOps};
use maplit::hashset;
use megarepolib::common::{create_and_save_bonsai, ChangesetArgsFactory, StackPosition};
use mercurial_derived_data::DeriveHgChangeset;
use metaconfig_types::PushrebaseFlags;
use mononoke_types::{ChangesetId, FileChange, MPath};
use pushrebase::do_pushrebase_bonsai;
@ -73,9 +73,7 @@ pub async fn create_deletion_head_commits<'a>(
ctx.logger(),
"created bonsai #{}. Deriving hg changeset for it to verify its correctness", num
);
let hg_cs_id = repo
.get_hg_from_bonsai_changeset(ctx.clone(), bcs_id)
.await?;
let hg_cs_id = repo.derive_hg_changeset(ctx, bcs_id).await?;
info!(ctx.logger(), "derived {}, pushrebasing...", hg_cs_id);

View File

@ -7,7 +7,6 @@
use anyhow::Error;
use blobrepo::BlobRepo;
use blobrepo_hg::BlobRepoHg;
use blobstore::Loadable;
use bookmarks::BookmarkName;
use cmdlib::helpers;
@ -17,6 +16,7 @@ use maplit::hashset;
use megarepolib::common::{
create_and_save_bonsai, ChangesetArgs, ChangesetArgsFactory, StackPosition,
};
use mercurial_derived_data::DeriveHgChangeset;
use metaconfig_types::PushrebaseFlags;
use mononoke_api_types::InnerRepo;
use mononoke_types::ChangesetId;
@ -312,9 +312,7 @@ async fn push_merge_commit(
// Generating hg changeset from bonsai changeset will give us a validation
// that this merge commit is correct
let merge_hg_cs_id = repo
.get_hg_from_bonsai_changeset(ctx.clone(), merge_cs_id)
.await?;
let merge_hg_cs_id = repo.derive_hg_changeset(ctx, merge_cs_id).await?;
info!(ctx.logger(), "Generated hg changeset {}", merge_hg_cs_id);
info!(ctx.logger(), "Now running pushrebase...");
@ -669,9 +667,7 @@ mod test {
);
for merge_cs_id in gradual_merge_result.values() {
let hg_cs_id = repo
.get_hg_from_bonsai_changeset(ctx.clone(), *merge_cs_id)
.await?;
let hg_cs_id = repo.derive_hg_changeset(ctx, *merge_cs_id).await?;
let hg_cs = hg_cs_id.load(&ctx, repo.blobstore()).await?;
assert!(hg_cs.files().is_empty());
}

View File

@ -7,11 +7,11 @@
use anyhow::{format_err, Error};
use blobrepo::BlobRepo;
use blobrepo_hg::BlobRepoHg;
use cloned::cloned;
use context::CoreContext;
use futures::try_join;
use megarepolib::working_copy::get_colliding_paths_between_commits;
use mercurial_derived_data::DeriveHgChangeset;
use mercurial_types::HgChangesetId;
use mononoke_types::ChangesetId;
use slog::info;
@ -51,8 +51,8 @@ pub async fn perform_merge(
) -> Result<HgChangesetId, Error> {
cloned!(ctx, repo);
let (first_hg_cs_id, second_hg_cs_id) = try_join!(
repo.get_hg_from_bonsai_changeset(ctx.clone(), first_bcs_id.clone()),
repo.get_hg_from_bonsai_changeset(ctx.clone(), second_bcs_id.clone()),
repo.derive_hg_changeset(&ctx, first_bcs_id.clone()),
repo.derive_hg_changeset(&ctx, second_bcs_id.clone()),
)?;
fail_on_path_conflicts(&ctx, &repo, first_hg_cs_id, second_hg_cs_id).await?;
info!(

View File

@ -12,7 +12,6 @@
/// USE WITH CARE!
use anyhow::{format_err, Context, Error};
use blobrepo::BlobRepo;
use blobrepo_hg::BlobRepoHg;
use blobrepo_utils::convert_diff_result_into_file_change_for_diamond_merge;
use blobstore::Loadable;
use bookmarks::{BookmarkName, BookmarkUpdateReason};
@ -34,6 +33,7 @@ use futures_old::{Future, IntoFuture, Stream};
use live_commit_sync_config::LiveCommitSyncConfig;
use manifest::{bonsai_diff, BonsaiDiffFileChange};
use maplit::hashmap;
use mercurial_derived_data::DeriveHgChangeset;
use mercurial_types::{HgFileNodeId, HgManifestId};
use metaconfig_types::CommitSyncConfigVersion;
use mononoke_api_types::InnerRepo;
@ -455,9 +455,7 @@ fn id_to_manifestid(
bcs_id: ChangesetId,
) -> impl Future<Item = HgManifestId, Error = Error> {
async move {
let cs_id = repo
.get_hg_from_bonsai_changeset(ctx.clone(), bcs_id)
.await?;
let cs_id = repo.derive_hg_changeset(&ctx, bcs_id).await?;
let cs = cs_id.load(&ctx, repo.blobstore()).await?;
Ok(cs.manifestid())
}

View File

@ -25,7 +25,6 @@ sorted_vector_map = { version = "0.1.0", git = "https://github.com/facebookexper
unicode-segmentation = "1.6.0"
[dev-dependencies]
blobrepo_hg = { version = "0.1.0", path = "../../blobrepo/blobrepo_hg" }
fbinit = { version = "0.1.0", git = "https://github.com/facebookexperimental/rust-shed.git", branch = "main" }
fbinit-tokio = { version = "0.1.0", git = "https://github.com/facebookexperimental/rust-shed.git", branch = "main" }
fixtures = { version = "0.1.0", path = "../../tests/fixtures" }

View File

@ -39,13 +39,13 @@ tunables = { version = "0.1.0", path = "../../tunables" }
unodes = { version = "0.1.0", path = "../unodes" }
[dev-dependencies]
blobrepo_hg = { version = "0.1.0", path = "../../blobrepo/blobrepo_hg" }
bookmarks = { version = "0.1.0", path = "../../bookmarks" }
borrowed = { version = "0.1.0", git = "https://github.com/facebookexperimental/rust-shed.git", branch = "main" }
facet = { version = "0.1.0", git = "https://github.com/facebookexperimental/rust-shed.git", branch = "main" }
fbinit = { version = "0.1.0", git = "https://github.com/facebookexperimental/rust-shed.git", branch = "main" }
fbinit-tokio = { version = "0.1.0", git = "https://github.com/facebookexperimental/rust-shed.git", branch = "main" }
fixtures = { version = "0.1.0", path = "../../tests/fixtures" }
mercurial_derived_data = { version = "0.1.0", path = "../mercurial_derived_data" }
mercurial_types = { version = "0.1.0", path = "../../mercurial/types" }
mononoke_types-mocks = { version = "0.1.0", path = "../../mononoke_types/mocks" }
pretty_assertions = "0.6"

View File

@ -186,7 +186,6 @@ mod tests {
use super::*;
use crate::fastlog_impl::{fetch_fastlog_batch_by_unode_id, fetch_flattened};
use blobrepo::{save_bonsai_changesets, BlobRepo};
use blobrepo_hg::BlobRepoHg;
use bookmarks::BookmarkName;
use context::CoreContext;
use fbinit::FacebookInit;
@ -199,6 +198,7 @@ mod tests {
use futures::{Stream, TryFutureExt};
use manifest::ManifestOps;
use maplit::btreemap;
use mercurial_derived_data::DeriveHgChangeset;
use mercurial_types::HgChangesetId;
use mononoke_types::fastlog_batch::{
max_entries_in_fastlog_batch, MAX_BATCHES, MAX_LATEST_LEN,
@ -651,9 +651,7 @@ mod tests {
.and_then(move |new_bcs_id| {
cloned!(ctx, repo);
async move {
let hg_cs_id = repo
.get_hg_from_bonsai_changeset(ctx.clone(), new_bcs_id)
.await?;
let hg_cs_id = repo.derive_hg_changeset(&ctx, new_bcs_id).await?;
Ok((new_bcs_id, hg_cs_id))
}
})

View File

@ -277,6 +277,7 @@ mod tests {
use futures::compat::Stream01CompatExt;
use manifest::ManifestOps;
use maplit::hashmap;
use mercurial_derived_data::DeriveHgChangeset;
use mononoke_types::FileType;
use repo_derived_data::RepoDerivedDataRef;
use revset::AncestorsNodeStream;
@ -314,9 +315,7 @@ mod tests {
);
}
let linknode = repo
.get_hg_from_bonsai_changeset(ctx.clone(), cs_id)
.await?;
let linknode = repo.derive_hg_changeset(ctx, cs_id).await?;
for filenode in filenodes {
assert_eq!(filenode.info.linknode, linknode);
@ -603,9 +602,7 @@ mod tests {
linear::initrepo(fb, &repo).await;
let commit8 =
resolve_cs_id(&ctx, &repo, "a9473beb2eb03ddb1cccc3fbaeb8a4820f9cd157").await?;
let commit8 = repo
.get_hg_from_bonsai_changeset(ctx.clone(), commit8)
.await?;
let commit8 = repo.derive_hg_changeset(&ctx, commit8).await?;
*filenodes_cs_id.lock().unwrap() = Some(commit8);
let master_cs_id = resolve_cs_id(&ctx, &repo, "master").await?;
let mut cs_ids =
@ -704,7 +701,7 @@ mod tests {
let prod_filenodes = repo.get_filenodes();
let backup_filenodes = backup_repo.get_filenodes();
let manifest = repo
.get_hg_from_bonsai_changeset(ctx.clone(), cs)
.derive_hg_changeset(ctx, cs)
.await?
.load(ctx, repo.blobstore())
.await

View File

@ -36,12 +36,12 @@ thiserror = "1.0.29"
[dev-dependencies]
blobrepo = { version = "0.1.0", path = "../../blobrepo" }
blobrepo_hg = { version = "0.1.0", path = "../../blobrepo/blobrepo_hg" }
bookmarks = { version = "0.1.0", path = "../../bookmarks" }
derived_data_test_utils = { version = "0.1.0", path = "../test_utils" }
fbinit = { version = "0.1.0", git = "https://github.com/facebookexperimental/rust-shed.git", branch = "main" }
fbinit-tokio = { version = "0.1.0", git = "https://github.com/facebookexperimental/rust-shed.git", branch = "main" }
fixtures = { version = "0.1.0", path = "../../tests/fixtures" }
mercurial_derived_data = { version = "0.1.0", path = "../mercurial_derived_data" }
mercurial_types = { version = "0.1.0", path = "../../mercurial/types" }
repo_derived_data = { version = "0.1.0", path = "../../repo_attributes/repo_derived_data" }
revset = { version = "0.1.0", path = "../../revset" }

View File

@ -167,7 +167,6 @@ pub(crate) fn get_file_changes(
mod test {
use super::*;
use blobrepo::BlobRepo;
use blobrepo_hg::BlobRepoHg;
use blobstore::Loadable;
use bookmarks::BookmarkName;
use borrowed::borrowed;
@ -183,6 +182,7 @@ mod test {
use futures::stream::{Stream, TryStreamExt};
use futures::try_join;
use manifest::Entry;
use mercurial_derived_data::DeriveHgChangeset;
use mercurial_types::{HgChangesetId, HgManifestId};
use revset::AncestorsNodeStream;
use tokio::runtime::Runtime;
@ -237,9 +237,7 @@ mod test {
AncestorsNodeStream::new(ctx.clone(), &repo.get_changeset_fetcher(), bcs_id.clone())
.compat()
.and_then(move |new_bcs_id| async move {
let hg_cs_id = repo
.get_hg_from_bonsai_changeset(ctx.clone(), new_bcs_id)
.await?;
let hg_cs_id = repo.derive_hg_changeset(ctx, new_bcs_id).await?;
Ok((new_bcs_id, hg_cs_id))
}),
)

View File

@ -35,7 +35,6 @@ tokio = { version = "1.15", features = ["full", "test-util", "tracing"] }
[dev-dependencies]
blobrepo = { version = "0.1.0", path = "../../blobrepo" }
blobrepo_hg = { version = "0.1.0", path = "../../blobrepo/blobrepo_hg" }
bookmarks = { version = "0.1.0", path = "../../bookmarks" }
fbinit = { version = "0.1.0", git = "https://github.com/facebookexperimental/rust-shed.git", branch = "main" }
fbinit-tokio = { version = "0.1.0", git = "https://github.com/facebookexperimental/rust-shed.git", branch = "main" }

View File

@ -236,8 +236,8 @@ impl_bonsai_derived_via_manager!(MappedHgChangesetId);
#[cfg(test)]
mod test {
use super::*;
use crate::DeriveHgChangeset;
use blobrepo::BlobRepo;
use blobrepo_hg::BlobRepoHg;
use bookmarks::BookmarkName;
use borrowed::borrowed;
use cloned::cloned;
@ -265,9 +265,7 @@ mod test {
.and_then(move |new_bcs_id| {
cloned!(ctx, repo);
async move {
let hg_cs_id = repo
.get_hg_from_bonsai_changeset(ctx.clone(), new_bcs_id)
.await?;
let hg_cs_id = repo.derive_hg_changeset(&ctx, new_bcs_id).await?;
Result::<_, Error>::Ok((new_bcs_id, hg_cs_id))
}
})

View File

@ -31,12 +31,12 @@ thiserror = "1.0.29"
[dev-dependencies]
blobrepo = { version = "0.1.0", path = "../../blobrepo" }
blobrepo_hg = { version = "0.1.0", path = "../../blobrepo/blobrepo_hg" }
bookmarks = { version = "0.1.0", path = "../../bookmarks" }
derived_data_test_utils = { version = "0.1.0", path = "../test_utils" }
fbinit = { version = "0.1.0", git = "https://github.com/facebookexperimental/rust-shed.git", branch = "main" }
fbinit-tokio = { version = "0.1.0", git = "https://github.com/facebookexperimental/rust-shed.git", branch = "main" }
fixtures = { version = "0.1.0", path = "../../tests/fixtures" }
mercurial_derived_data = { version = "0.1.0", path = "../mercurial_derived_data" }
mercurial_types = { version = "0.1.0", path = "../../mercurial/types" }
pretty_assertions = "0.6"
repo_derived_data = { version = "0.1.0", path = "../../repo_attributes/repo_derived_data" }

View File

@ -173,7 +173,6 @@ pub(crate) fn get_file_changes(
mod test {
use super::*;
use blobrepo::BlobRepo;
use blobrepo_hg::BlobRepoHg;
use blobstore::Loadable;
use bookmarks::BookmarkName;
use borrowed::borrowed;
@ -188,6 +187,7 @@ mod test {
use futures::stream::{Stream, TryStreamExt};
use futures::try_join;
use manifest::Entry;
use mercurial_derived_data::DeriveHgChangeset;
use mercurial_types::{HgChangesetId, HgManifestId};
use mononoke_types::ChangesetId;
use repo_derived_data::RepoDerivedDataRef;
@ -247,9 +247,7 @@ mod test {
AncestorsNodeStream::new(ctx.clone(), &repo.get_changeset_fetcher(), bcs_id.clone())
.compat()
.and_then(move |new_bcs_id| async move {
let hg_cs_id = repo
.get_hg_from_bonsai_changeset(ctx.clone(), new_bcs_id)
.await?;
let hg_cs_id = repo.derive_hg_changeset(ctx, new_bcs_id).await?;
Ok((new_bcs_id, hg_cs_id))
}),
)

View File

@ -13,7 +13,6 @@ path = "lib.rs"
[dependencies]
anyhow = "1.0.51"
blobrepo = { version = "0.1.0", path = "../../blobrepo" }
blobrepo_hg = { version = "0.1.0", path = "../../blobrepo/blobrepo_hg" }
blobstore = { version = "0.1.0", path = "../../blobstore" }
bounded_traversal = { version = "0.1.0", path = "../../common/bounded_traversal" }
context = { version = "0.1.0", path = "../../server/context" }

View File

@ -41,6 +41,7 @@ fbinit = { version = "0.1.0", git = "https://github.com/facebookexperimental/rus
fbinit-tokio = { version = "0.1.0", git = "https://github.com/facebookexperimental/rust-shed.git", branch = "main" }
fixtures = { version = "0.1.0", path = "../../tests/fixtures" }
maplit = "1.0"
mercurial_derived_data = { version = "0.1.0", path = "../mercurial_derived_data" }
mercurial_types = { version = "0.1.0", path = "../../mercurial/types" }
repo_derived_data = { version = "0.1.0", path = "../../repo_attributes/repo_derived_data" }
revset = { version = "0.1.0", path = "../../revset" }

View File

@ -468,6 +468,7 @@ mod tests {
use futures::TryStreamExt;
use manifest::ManifestOps;
use maplit::btreemap;
use mercurial_derived_data::DeriveHgChangeset;
use mercurial_types::{blobs::HgBlobManifest, HgFileNodeId, HgManifestId};
use mononoke_types::{
BlobstoreValue, BonsaiChangeset, BonsaiChangesetMut, DateTime, FileChange, FileContents,
@ -1155,9 +1156,7 @@ mod tests {
bcs_id: ChangesetId,
) -> Result<HgFileNodeId, Error> {
let ctx = CoreContext::test_mock(fb);
let hg_cs_id = repo
.get_hg_from_bonsai_changeset(ctx.clone(), bcs_id)
.await?;
let hg_cs_id = repo.derive_hg_changeset(&ctx, bcs_id).await?;
let hg_cs = hg_cs_id.load(&ctx, repo.blobstore()).await?;
Ok(HgFileNodeId::new(hg_cs.manifestid().into_nodehash()))
}

View File

@ -234,7 +234,6 @@ pub(crate) fn get_file_changes(
mod test {
use super::*;
use blobrepo::BlobRepo;
use blobrepo_hg::BlobRepoHg;
use blobstore::Loadable;
use bookmarks::BookmarkName;
use borrowed::borrowed;
@ -249,6 +248,7 @@ mod test {
};
use futures::{compat::Stream01CompatExt, Future, Stream, TryStreamExt};
use manifest::Entry;
use mercurial_derived_data::DeriveHgChangeset;
use mercurial_types::{HgChangesetId, HgManifestId};
use mononoke_types::ChangesetId;
use repo_derived_data::RepoDerivedDataRef;
@ -312,9 +312,7 @@ mod test {
.and_then(move |new_bcs_id| {
cloned!(ctx, repo);
async move {
let hg_cs_id = repo
.get_hg_from_bonsai_changeset(ctx.clone(), new_bcs_id)
.await?;
let hg_cs_id = repo.derive_hg_changeset(&ctx, new_bcs_id).await?;
Result::<_, Error>::Ok((new_bcs_id, hg_cs_id))
}
})

View File

@ -11,7 +11,6 @@ license = "GPLv2+"
anyhow = "1.0.51"
async-trait = "0.1.52"
blobrepo = { version = "0.1.0", path = "../../blobrepo" }
blobrepo_hg = { version = "0.1.0", path = "../../blobrepo/blobrepo_hg" }
blobstore = { version = "0.1.0", path = "../../blobstore" }
bonsai_git_mapping = { version = "0.1.0", path = "../../bonsai_git_mapping" }
bytes = { version = "1.1", features = ["serde"] }

View File

@ -18,7 +18,6 @@ pub use crate::gitimport_objects::{
pub use crate::gitlfs::{GitImportLfs, LfsMetaData};
use anyhow::{format_err, Context, Error};
use blobrepo::{save_bonsai_changesets, BlobRepo};
use blobrepo_hg::BlobRepoHg;
use blobstore::Blobstore;
use bonsai_git_mapping::BonsaiGitMappingEntry;
use bytes::Bytes;
@ -33,7 +32,7 @@ pub use git_pool::GitPool;
use git_types::TreeHandle;
use linked_hash_map::LinkedHashMap;
use manifest::{bonsai_diff, BonsaiDiffFileChange, StoreLoadable};
use mercurial_derived_data::get_manifest_from_bonsai;
use mercurial_derived_data::{get_manifest_from_bonsai, DeriveHgChangeset};
use mercurial_types::HgManifestId;
use mononoke_types::{
hash, BonsaiChangeset, BonsaiChangesetMut, ChangesetId, ContentMetadata, FileChange, MPath,
@ -375,7 +374,7 @@ pub async fn gitimport(
let manifest = if let Some(manifest) = hg_manifests.get(&p) {
*manifest
} else {
repo.get_hg_from_bonsai_changeset(ctx.clone(), p)
repo.derive_hg_changeset(ctx, p)
.await?
.load(ctx, repo.blobstore())
.await?

View File

@ -11,7 +11,6 @@ license = "GPLv2+"
anyhow = "1.0.51"
async-trait = "0.1.52"
blobrepo = { version = "0.1.0", path = "../../blobrepo" }
blobrepo_hg = { version = "0.1.0", path = "../../blobrepo/blobrepo_hg" }
blobstore = { version = "0.1.0", path = "../../blobstore" }
bookmarks = { version = "0.1.0", path = "../../bookmarks" }
bytes = { version = "1.1", features = ["serde"] }
@ -22,6 +21,7 @@ filestore = { version = "0.1.0", path = "../../filestore" }
futures = { version = "0.3.13", features = ["async-await", "compat"] }
futures-util = "0.3.7"
manifest = { version = "0.1.0", path = "../../manifest" }
mercurial_derived_data = { version = "0.1.0", path = "../../derived_data/mercurial_derived_data" }
mercurial_types = { version = "0.1.0", path = "../../mercurial/types" }
mononoke_types = { version = "0.1.0", path = "../../mononoke_types" }
thiserror = "1.0.29"

View File

@ -8,7 +8,6 @@
use anyhow::{format_err, Context as _};
use async_trait::async_trait;
use blobrepo::BlobRepo;
use blobrepo_hg::BlobRepoHg;
use blobstore::Loadable;
use bookmarks::BookmarkName;
use bytes::Bytes;
@ -18,6 +17,7 @@ use derived_data::BonsaiDerived;
use futures::{future, stream::TryStreamExt};
use futures_util::future::TryFutureExt;
use manifest::{Diff, Entry, ManifestOps};
use mercurial_derived_data::DeriveHgChangeset;
use mercurial_types::{FileType, HgFileNodeId, HgManifestId};
use mononoke_types::{ChangesetId, ContentId, MPath, ManifestUnodeId};
use std::collections::HashMap;
@ -196,7 +196,7 @@ async fn derive_hg_manifest(
changeset_id: ChangesetId,
) -> Result<HgManifestId, ErrorKind> {
let hg_changeset_id = repo
.get_hg_from_bonsai_changeset(ctx.clone(), changeset_id)
.derive_hg_changeset(ctx, changeset_id)
.await
.with_context(|| format!("Error deriving hg changeset for bonsai: {}", changeset_id))?;
let hg_mf_id = hg_changeset_id

View File

@ -13,7 +13,6 @@ async-trait = "0.1.52"
async_once_cell = { version = "0.1.0", git = "https://github.com/facebookexperimental/rust-shed.git", branch = "main" }
async_requests = { version = "0.1.0", path = "async_requests" }
blobrepo = { version = "0.1.0", path = "../blobrepo" }
blobrepo_hg = { version = "0.1.0", path = "../blobrepo/blobrepo_hg" }
blobstore = { version = "0.1.0", path = "../blobstore" }
bookmarks = { version = "0.1.0", path = "../bookmarks" }
bytes = { version = "1.1", features = ["serde"] }
@ -30,6 +29,7 @@ manifest = { version = "0.1.0", path = "../manifest" }
megarepo_config = { version = "0.1.0", path = "megarepo_config" }
megarepo_error = { version = "0.1.0", path = "megarepo_error" }
megarepo_mapping = { version = "0.1.0", path = "mapping" }
mercurial_derived_data = { version = "0.1.0", path = "../derived_data/mercurial_derived_data" }
mercurial_types = { version = "0.1.0", path = "../mercurial/types" }
metaconfig_parser = { version = "0.1.0", path = "../metaconfig/parser" }
metaconfig_types = { version = "0.1.0", path = "../metaconfig/types" }

View File

@ -10,7 +10,6 @@ license = "GPLv2+"
[dependencies]
anyhow = "1.0.51"
blobrepo = { version = "0.1.0", path = "../../blobrepo" }
blobrepo_hg = { version = "0.1.0", path = "../../blobrepo/blobrepo_hg" }
blobstore = { version = "0.1.0", path = "../../blobstore" }
blobsync = { version = "0.1.0", path = "../../blobrepo/blobsync" }
borrowed = { version = "0.1.0", git = "https://github.com/facebookexperimental/rust-shed.git", branch = "main" }
@ -19,6 +18,7 @@ context = { version = "0.1.0", path = "../../server/context" }
futures = { version = "0.3.13", features = ["async-await", "compat"] }
manifest = { version = "0.1.0", path = "../../manifest" }
megarepo_configs = { version = "0.1.0", path = "../../../../configerator/structs/scm/mononoke/megarepo" }
mercurial_derived_data = { version = "0.1.0", path = "../../derived_data/mercurial_derived_data" }
mercurial_types = { version = "0.1.0", path = "../../mercurial/types" }
mononoke_types = { version = "0.1.0", path = "../../mononoke_types" }
sorted_vector_map = { version = "0.1.0", git = "https://github.com/facebookexperimental/rust-shed.git", branch = "main" }

View File

@ -9,7 +9,6 @@
use anyhow::{anyhow, bail, Error};
use blobrepo::{save_bonsai_changesets, BlobRepo};
use blobrepo_hg::BlobRepoHg;
use blobstore::Loadable;
use blobsync::copy_content;
use borrowed::borrowed;
@ -18,6 +17,7 @@ use context::CoreContext;
use futures::{future::try_join_all, stream, StreamExt, TryStreamExt};
use manifest::get_implicit_deletes;
use megarepo_configs::types::SourceMappingRules;
use mercurial_derived_data::DeriveHgChangeset;
use mercurial_types::HgManifestId;
use mononoke_types::{
mpath_element_iter, BonsaiChangeset, BonsaiChangesetMut, ChangesetId, ContentId, FileChange,
@ -125,9 +125,7 @@ async fn get_manifest_ids<'a, I: IntoIterator<Item = ChangesetId>>(
|bcs_id| {
cloned!(ctx, repo);
async move {
let cs_id = repo
.get_hg_from_bonsai_changeset(ctx.clone(), bcs_id)
.await?;
let cs_id = repo.derive_hg_changeset(&ctx, bcs_id).await?;
let hg_blob_changeset = cs_id.load(&ctx, repo.blobstore()).await?;
Ok(hg_blob_changeset.manifestid())
}

View File

@ -8,7 +8,6 @@
use anyhow::{anyhow, Context, Error};
use async_trait::async_trait;
use blobrepo::{save_bonsai_changesets, BlobRepo};
use blobrepo_hg::BlobRepoHg;
use blobstore::Loadable;
use bookmarks::{BookmarkName, BookmarkUpdateReason};
use bytes::Bytes;
@ -30,6 +29,7 @@ use megarepo_config::{
};
use megarepo_error::MegarepoError;
use megarepo_mapping::{CommitRemappingState, SourceName};
use mercurial_derived_data::DeriveHgChangeset;
use mercurial_types::HgFileNodeId;
use mononoke_api::{ChangesetContext, Mononoke, MononokePath, RepoContext};
use mononoke_types::{
@ -188,16 +188,12 @@ pub trait MegarepoOp {
) -> Result<ChangesetId, MegarepoError> {
let blob_repo = repo.blob_repo();
let hg_cs_merge = async {
let hg_cs_id = blob_repo
.get_hg_from_bonsai_changeset(ctx.clone(), merge_commit)
.await?;
let hg_cs_id = blob_repo.derive_hg_changeset(ctx, merge_commit).await?;
let hg_cs = hg_cs_id.load(ctx, blob_repo.blobstore()).await?;
Ok(hg_cs.manifestid())
};
let parent_hg_css = try_join_all(new_parent_commits.iter().map(|p| async move {
let hg_cs_id = blob_repo
.get_hg_from_bonsai_changeset(ctx.clone(), *p)
.await?;
let hg_cs_id = blob_repo.derive_hg_changeset(ctx, *p).await?;
let hg_cs = hg_cs_id.load(ctx, blob_repo.blobstore()).await?;
Result::<_, Error>::Ok(hg_cs.manifestid())
}));

View File

@ -25,6 +25,7 @@ futures-util = "0.3.7"
getbundle_response = { version = "0.1.0", path = "../repo_client/getbundle_response" }
hgproto = { version = "0.1.0", path = "../hgproto" }
manifest = { version = "0.1.0", path = "../manifest" }
mercurial_derived_data = { version = "0.1.0", path = "../derived_data/mercurial_derived_data" }
mercurial_mutation = { version = "0.1.0", path = "../mercurial/mutation" }
mercurial_types = { version = "0.1.0", path = "../mercurial/types" }
metaconfig_types = { version = "0.1.0", path = "../metaconfig/types" }

View File

@ -24,6 +24,7 @@ use futures::compat::{Future01CompatExt, Stream01CompatExt};
use futures::{future, stream, Stream, StreamExt, TryStream, TryStreamExt};
use futures_util::try_join;
use hgproto::GettreepackArgs;
use mercurial_derived_data::DeriveHgChangeset;
use mercurial_mutation::HgMutationEntry;
use mercurial_types::blobs::{RevlogChangeset, UploadHgNodeHash, UploadHgTreeEntry};
use mercurial_types::{HgChangesetId, HgFileEnvelopeMut, HgFileNodeId, HgManifestId, HgNodeHash};
@ -487,13 +488,14 @@ impl HgRepoContext {
})
}
// TODO(mbthomas): get_hg_from_bonsai -> derive_hg_changeset
pub async fn get_hg_from_bonsai(
&self,
cs_id: ChangesetId,
) -> Result<HgChangesetId, MononokeError> {
Ok(self
.blob_repo()
.get_hg_from_bonsai_changeset(self.ctx().clone(), cs_id)
.derive_hg_changeset(self.ctx(), cs_id)
.await?)
}
@ -525,7 +527,7 @@ impl HgRepoContext {
.await?;
let hg_id_futures = result_csids.iter().map(|result_csid| {
self.blob_repo()
.get_hg_from_bonsai_changeset(self.ctx().clone(), *result_csid)
.derive_hg_changeset(self.ctx(), *result_csid)
});
future::try_join_all(hg_id_futures)
.await
@ -928,9 +930,7 @@ mod tests {
blob_repo: &BlobRepo,
csid: ChangesetId,
) -> Result<HgManifestId, Error> {
let hg_cs_id = blob_repo
.get_hg_from_bonsai_changeset(ctx.clone(), csid)
.await?;
let hg_cs_id = blob_repo.derive_hg_changeset(&ctx, csid).await?;
let hg_cs = hg_cs_id.load(&ctx, &blob_repo.get_blobstore()).await?;
Ok(hg_cs.manifestid())
}

View File

@ -19,7 +19,6 @@ anyhow = "1.0.51"
async-stream = "0.3"
base64 = "0.11.0"
blobrepo = { version = "0.1.0", path = "../blobrepo" }
blobrepo_hg = { version = "0.1.0", path = "../blobrepo/blobrepo_hg" }
blobstore = { version = "0.1.0", path = "../blobstore" }
bonsai_globalrev_mapping = { version = "0.1.0", path = "../bonsai_globalrev_mapping" }
bookmarks = { version = "0.1.0", path = "../bookmarks" }
@ -52,6 +51,7 @@ lfs_protocol = { version = "0.1.0", path = "../lfs_protocol" }
maplit = "1.0"
mercurial_bundle_replay_data = { version = "0.1.0", path = "../mercurial/bundle_replay_data" }
mercurial_bundles = { version = "0.1.0", path = "../mercurial/bundles" }
mercurial_derived_data = { version = "0.1.0", path = "../derived_data/mercurial_derived_data" }
mercurial_revlog = { version = "0.1.0", path = "../mercurial/revlog" }
mercurial_types = { version = "0.1.0", path = "../mercurial/types" }
metaconfig_types = { version = "0.1.0", path = "../metaconfig/types" }

View File

@ -9,7 +9,6 @@ use crate::darkstorm_verifier::DarkstormVerifier;
use crate::lfs_verifier::LfsVerifier;
use anyhow::{bail, Error};
use blobrepo::BlobRepo;
use blobrepo_hg::BlobRepoHg;
use blobstore::Loadable;
use bookmarks::BookmarkName;
use borrowed::borrowed;
@ -30,6 +29,7 @@ use mercurial_bundles::{
changegroup::CgVersion,
create_bundle_stream, parts,
};
use mercurial_derived_data::DeriveHgChangeset;
use mercurial_revlog::RevlogChangeset;
use mercurial_types::{HgBlobNode, HgChangesetId, MPath};
use mononoke_types::{datetime::Timestamp, hash::Sha256, ChangesetId};
@ -162,7 +162,7 @@ impl BookmarkChange {
cloned!(repo);
async move {
let res = match maybe_cs {
Some(cs_id) => Some(repo.get_hg_from_bonsai_changeset(ctx, cs_id).await?),
Some(cs_id) => Some(repo.derive_hg_changeset(&ctx, cs_id).await?),
None => None,
};
Ok(res)
@ -386,7 +386,7 @@ fn find_commits_to_push(
.map(move |bcs_id| {
cloned!(ctx, repo);
async move {
let hg_cs_id = repo.get_hg_from_bonsai_changeset(ctx, bcs_id).await?;
let hg_cs_id = repo.derive_hg_changeset(&ctx, bcs_id).await?;
Ok((bcs_id, hg_cs_id))
}
.boxed()

View File

@ -13,7 +13,6 @@ use crate::errors::{
use crate::{bind_sync_err, BookmarkOverlay, CombinedBookmarkUpdateLogEntry, CommitsInBundle};
use anyhow::{anyhow, Error};
use blobrepo::BlobRepo;
use blobrepo_hg::BlobRepoHg;
use bookmarks::{BookmarkName, BookmarkUpdateLogEntry, BookmarkUpdateReason, RawBundleReplayData};
use changeset_fetcher::ArcChangesetFetcher;
use cloned::cloned;
@ -27,6 +26,7 @@ use futures_watchdog::WatchdogExt;
use getbundle_response::SessionLfsParams;
use itertools::Itertools;
use mercurial_bundle_replay_data::BundleReplayData;
use mercurial_derived_data::DeriveHgChangeset;
use mercurial_types::HgChangesetId;
use metaconfig_types::LfsParams;
use mononoke_api_types::InnerRepo;
@ -328,9 +328,7 @@ impl BundlePreparer {
let cs_id = async {
match batch.to_cs_id {
Some(to_changeset_id) => {
let hg_cs_id = repo
.get_hg_from_bonsai_changeset(ctx.clone(), to_changeset_id)
.await?;
let hg_cs_id = repo.derive_hg_changeset(&ctx, to_changeset_id).await?;
Ok(Some((to_changeset_id, hg_cs_id)))
}
None => Ok(None),

View File

@ -23,7 +23,6 @@ thiserror = "1.0.29"
[dev-dependencies]
blobrepo = { version = "0.1.0", path = "../blobrepo" }
blobrepo_hg = { version = "0.1.0", path = "../blobrepo/blobrepo_hg" }
bookmarks = { version = "0.1.0", path = "../bookmarks" }
borrowed = { version = "0.1.0", git = "https://github.com/facebookexperimental/rust-shed.git", branch = "main" }
cloned = { version = "0.1.0", git = "https://github.com/facebookexperimental/rust-shed.git", branch = "main" }

View File

@ -10,7 +10,6 @@ license = "GPLv2+"
[dependencies]
anyhow = "1.0.51"
blobrepo = { version = "0.1.0", path = "../blobrepo" }
blobrepo_hg = { version = "0.1.0", path = "../blobrepo/blobrepo_hg" }
blobrepo_utils = { version = "0.1.0", path = "../blobrepo_utils" }
blobstore = { version = "0.1.0", path = "../blobstore" }
bookmarks = { version = "0.1.0", path = "../bookmarks" }
@ -22,6 +21,7 @@ futures = { version = "0.3.13", features = ["async-await", "compat"] }
manifest = { version = "0.1.0", path = "../manifest" }
maplit = "1.0"
mercurial_bundle_replay_data = { version = "0.1.0", path = "../mercurial/bundle_replay_data" }
mercurial_derived_data = { version = "0.1.0", path = "../derived_data/mercurial_derived_data" }
mercurial_types = { version = "0.1.0", path = "../mercurial/types" }
metaconfig_types = { version = "0.1.0", path = "../metaconfig/types" }
mononoke_types = { version = "0.1.0", path = "../mononoke_types" }
@ -33,6 +33,7 @@ tunables = { version = "0.1.0", path = "../tunables" }
[dev-dependencies]
async-trait = "0.1.52"
blobrepo_hg = { version = "0.1.0", path = "../blobrepo/blobrepo_hg" }
fbinit = { version = "0.1.0", git = "https://github.com/facebookexperimental/rust-shed.git", branch = "main" }
fbinit-tokio = { version = "0.1.0", git = "https://github.com/facebookexperimental/rust-shed.git", branch = "main" }
filestore = { version = "0.1.0", path = "../filestore" }

View File

@ -50,7 +50,6 @@
/// more information on those;
use anyhow::{format_err, Error, Result};
use blobrepo::{save_bonsai_changesets, BlobRepo};
use blobrepo_hg::BlobRepoHg;
use blobrepo_utils::convert_diff_result_into_file_change_for_diamond_merge;
use blobstore::Loadable;
use bookmarks::{BookmarkName, BookmarkUpdateReason, BundleReplay};
@ -66,6 +65,7 @@ use futures::{
use manifest::{bonsai_diff, BonsaiDiffFileChange, ManifestOps};
use maplit::hashmap;
use mercurial_bundle_replay_data::BundleReplayData;
use mercurial_derived_data::DeriveHgChangeset;
use mercurial_types::{HgChangesetId, HgFileNodeId, HgManifestId, MPath};
use metaconfig_types::PushrebaseFlags;
use mononoke_types::{
@ -163,7 +163,7 @@ impl HgReplayData {
cloned!(ctx, repo);
move |cs_id| {
cloned!(ctx, repo);
async move { repo.get_hg_from_bonsai_changeset(ctx, cs_id).await }.boxed()
async move { repo.derive_hg_changeset(&ctx, cs_id).await }.boxed()
}
});
@ -491,7 +491,7 @@ async fn maybe_validate_commit(
// Generate hg changeset to check that this rebased bonsai commit
// is valid.
repo.get_hg_from_bonsai_changeset(ctx.clone(), *bcs_id)
repo.derive_hg_changeset(ctx, *bcs_id)
.map_err(|err| PushrebaseError::ValidationError {
source_cs_id: *old_id,
rebased_cs_id: *bcs_id,
@ -624,7 +624,7 @@ async fn find_closest_ancestor_root(
if let Some(index) = roots.get(&id) {
if config.forbid_p2_root_rebases && *index != ChildIndex(0) {
let hgcs = repo.get_hg_from_bonsai_changeset(ctx.clone(), id).await?;
let hgcs = repo.derive_hg_changeset(ctx, id).await?;
return Err(PushrebaseError::Error(
PushrebaseInternalError::P2RootRebaseForbidden(hgcs, bookmark.clone()).into(),
));
@ -686,9 +686,7 @@ async fn id_to_manifestid(
repo: &BlobRepo,
bcs_id: ChangesetId,
) -> Result<HgManifestId, Error> {
let hg_cs_id = repo
.get_hg_from_bonsai_changeset(ctx.clone(), bcs_id)
.await?;
let hg_cs_id = repo.derive_hg_changeset(ctx, bcs_id).await?;
let hg_cs = hg_cs_id.load(ctx, repo.blobstore()).await?;
Ok(hg_cs.manifestid())
}
@ -1391,10 +1389,7 @@ mod tests {
let cs_id = commit_ctx.commit().await?;
let hgcss = hashset![
repo.get_hg_from_bonsai_changeset(ctx.clone(), cs_id)
.await?
];
let hgcss = hashset![repo.derive_hg_changeset(ctx, cs_id).await?];
let res = do_pushrebase(
&ctx,
@ -1428,9 +1423,7 @@ mod tests {
.commit()
.await?;
let hg_cs = repo
.get_hg_from_bonsai_changeset(ctx.clone(), bcs_id)
.await?;
let hg_cs = repo.derive_hg_changeset(&ctx, bcs_id).await?;
let book = master_bookmark();
bookmark(&ctx, &repo, book.clone())
@ -1623,12 +1616,8 @@ mod tests {
)
.await?;
let hg_cs_1 = repo
.get_hg_from_bonsai_changeset(ctx.clone(), bcs_id_1)
.await?;
let hg_cs_2 = repo
.get_hg_from_bonsai_changeset(ctx.clone(), bcs_id_2)
.await?;
let hg_cs_1 = repo.derive_hg_changeset(&ctx, bcs_id_1).await?;
let hg_cs_2 = repo.derive_hg_changeset(&ctx, bcs_id_2).await?;
do_pushrebase(
&ctx,
&repo,
@ -1679,12 +1668,8 @@ mod tests {
)
.await?;
let hg_cs_1 = repo
.get_hg_from_bonsai_changeset(ctx.clone(), bcs_id_1)
.await?;
let hg_cs_2 = repo
.get_hg_from_bonsai_changeset(ctx.clone(), bcs_id_2)
.await?;
let hg_cs_1 = repo.derive_hg_changeset(&ctx, bcs_id_1).await?;
let hg_cs_2 = repo.derive_hg_changeset(&ctx, bcs_id_2).await?;
do_pushrebase(
&ctx,
&repo,
@ -1789,15 +1774,9 @@ mod tests {
make_paths(&["f0", "f1", "f2"]),
);
let hg_cs_1 = repo
.get_hg_from_bonsai_changeset(ctx.clone(), bcs_id_1)
.await?;
let hg_cs_2 = repo
.get_hg_from_bonsai_changeset(ctx.clone(), bcs_id_2)
.await?;
let hg_cs_3 = repo
.get_hg_from_bonsai_changeset(ctx.clone(), bcs_id_3)
.await?;
let hg_cs_1 = repo.derive_hg_changeset(&ctx, bcs_id_1).await?;
let hg_cs_2 = repo.derive_hg_changeset(&ctx, bcs_id_2).await?;
let hg_cs_3 = repo.derive_hg_changeset(&ctx, bcs_id_3).await?;
let bcs_id_rebased = do_pushrebase(
&ctx,
&repo,
@ -1867,15 +1846,9 @@ mod tests {
)
.await?;
let hg_cs_1 = repo
.get_hg_from_bonsai_changeset(ctx.clone(), bcs_id_1)
.await?;
let hg_cs_2 = repo
.get_hg_from_bonsai_changeset(ctx.clone(), bcs_id_2)
.await?;
let hg_cs_3 = repo
.get_hg_from_bonsai_changeset(ctx.clone(), bcs_id_3)
.await?;
let hg_cs_1 = repo.derive_hg_changeset(&ctx, bcs_id_1).await?;
let hg_cs_2 = repo.derive_hg_changeset(&ctx, bcs_id_2).await?;
let hg_cs_3 = repo.derive_hg_changeset(&ctx, bcs_id_3).await?;
let result = do_pushrebase(
&ctx,
&repo,
@ -1930,12 +1903,9 @@ mod tests {
.await?;
let hgcss = hashset![
repo.get_hg_from_bonsai_changeset(ctx.clone(), bcs_id_1)
.await?,
repo.get_hg_from_bonsai_changeset(ctx.clone(), bcs_id_2)
.await?,
repo.get_hg_from_bonsai_changeset(ctx.clone(), bcs_id_3)
.await?,
repo.derive_hg_changeset(&ctx, bcs_id_1).await?,
repo.derive_hg_changeset(&ctx, bcs_id_2).await?,
repo.derive_hg_changeset(&ctx, bcs_id_3).await?,
];
let book = master_bookmark();
@ -1980,10 +1950,8 @@ mod tests {
.commit()
.await?;
let hgcss = hashset![
repo.get_hg_from_bonsai_changeset(ctx.clone(), bcs_id_1)
.await?,
repo.get_hg_from_bonsai_changeset(ctx.clone(), bcs_id_2)
.await?,
repo.derive_hg_changeset(&ctx, bcs_id_1).await?,
repo.derive_hg_changeset(&ctx, bcs_id_2).await?,
];
let book = master_bookmark();
@ -2034,11 +2002,8 @@ mod tests {
})
.await?;
let hgcss = try_join_all(
bcss.iter()
.map(|bcs| repo.get_hg_from_bonsai_changeset(ctx.clone(), *bcs)),
)
.await?;
let hgcss =
try_join_all(bcss.iter().map(|bcs| repo.derive_hg_changeset(&ctx, *bcs))).await?;
let book = master_bookmark();
set_bookmark(
ctx.clone(),
@ -2062,7 +2027,7 @@ mod tests {
.commit()
.await?;
let hgcss = hashset![repo.get_hg_from_bonsai_changeset(ctx.clone(), bcs).await?];
let hgcss = hashset![repo.derive_hg_changeset(&ctx, bcs).await?];
// try rebase with small recursion limit
let config = PushrebaseFlags {
@ -2104,7 +2069,7 @@ mod tests {
.add_file("file", "data")
.commit()
.await?;
let hgcss = hashset![repo.get_hg_from_bonsai_changeset(ctx.clone(), bcs).await?];
let hgcss = hashset![repo.derive_hg_changeset(&ctx, bcs).await?];
set_bookmark(
ctx.clone(),
@ -2163,7 +2128,7 @@ mod tests {
.commit()
.await?;
let hgcss = hashset![repo.get_hg_from_bonsai_changeset(ctx.clone(), bcs).await?];
let hgcss = hashset![repo.derive_hg_changeset(&ctx, bcs).await?];
let book = master_bookmark();
set_bookmark(
@ -2276,7 +2241,7 @@ mod tests {
.commit()
.await?;
let hgcss = hashset![repo.get_hg_from_bonsai_changeset(ctx.clone(), bcs).await?];
let hgcss = hashset![repo.derive_hg_changeset(&ctx, bcs).await?];
let book = master_bookmark();
set_bookmark(
@ -2301,9 +2266,7 @@ mod tests {
};
assert_eq!(FileChange::Change(file_1_result), file_1_exec);
let result_hg = repo
.get_hg_from_bonsai_changeset(ctx.clone(), result.head)
.await?;
let result_hg = repo.derive_hg_changeset(&ctx, result.head).await?;
let result_cs = result_hg.load(&ctx, repo.blobstore()).await?;
let result_1_id = result_cs
.manifestid()
@ -2491,9 +2454,7 @@ mod tests {
.commit()
.await?;
let hg_cs = repo
.get_hg_from_bonsai_changeset(ctx.clone(), bcs_id)
.await?;
let hg_cs = repo.derive_hg_changeset(&ctx, bcs_id).await?;
let book = BookmarkName::new("newbook")?;
do_pushrebase(
@ -2594,9 +2555,7 @@ mod tests {
.add_file("file", "content")
.commit()
.await?;
let hg_cs = repo
.get_hg_from_bonsai_changeset(ctx.clone(), bcs_id)
.await?;
let hg_cs = repo.derive_hg_changeset(&ctx, bcs_id).await?;
let book = master_bookmark();
set_bookmark(
@ -2648,9 +2607,7 @@ mod tests {
.set_author_date(DateTime::from_timestamp(0, 100)?)
.commit()
.await?;
let hg_cs = repo
.get_hg_from_bonsai_changeset(ctx.clone(), bcs_id)
.await?;
let hg_cs = repo.derive_hg_changeset(&ctx, bcs_id).await?;
let book = master_bookmark();
set_bookmark(
@ -2715,10 +2672,8 @@ mod tests {
.commit()
.await?;
let hgcss = hashset![
repo.get_hg_from_bonsai_changeset(ctx.clone(), bcs_id_0)
.await?,
repo.get_hg_from_bonsai_changeset(ctx.clone(), bcs_id_1)
.await?,
repo.derive_hg_changeset(&ctx, bcs_id_0).await?,
repo.derive_hg_changeset(&ctx, bcs_id_1).await?,
];
let book = master_bookmark();
@ -2776,9 +2731,7 @@ mod tests {
let book = master_bookmark();
let merge_hg_cs_id = repo
.get_hg_from_bonsai_changeset(ctx.clone(), merge)
.await?;
let merge_hg_cs_id = repo.derive_hg_changeset(&ctx, merge).await?;
set_bookmark(ctx.clone(), repo.clone(), &book, &{
// https://github.com/rust-lang/rust/pull/64856
@ -2865,10 +2818,7 @@ mod tests {
let book = master_bookmark();
let hgcss = hashset![
repo.get_hg_from_bonsai_changeset(ctx.clone(), bcs_id_should_fail)
.await?
];
let hgcss = hashset![repo.derive_hg_changeset(&ctx, bcs_id_should_fail).await?];
let res = do_pushrebase(
&ctx,
@ -2882,7 +2832,7 @@ mod tests {
should_have_conflicts(res);
let hgcss = hashset![
repo.get_hg_from_bonsai_changeset(ctx.clone(), bcs_id_should_succeed)
repo.derive_hg_changeset(&ctx, bcs_id_should_succeed)
.await?,
];
@ -2942,9 +2892,7 @@ mod tests {
.commit()
.await?;
let hg_cs = repo
.get_hg_from_bonsai_changeset(ctx.clone(), bcs_id_master)
.await?;
let hg_cs = repo.derive_hg_changeset(&ctx, bcs_id_master).await?;
let book = master_bookmark();
set_bookmark(ctx.clone(), repo.clone(), &book, &{
@ -2955,10 +2903,8 @@ mod tests {
.await?;
let hgcss = hashset![
repo.get_hg_from_bonsai_changeset(ctx.clone(), bcs_id_first_merge)
.await?,
repo.get_hg_from_bonsai_changeset(ctx.clone(), bcs_id_second_merge)
.await?,
repo.derive_hg_changeset(&ctx, bcs_id_first_merge).await?,
repo.derive_hg_changeset(&ctx, bcs_id_second_merge).await?,
];
do_pushrebase(
@ -2975,9 +2921,7 @@ mod tests {
.await?
.ok_or(Error::msg("master not set"))?;
let master_hg = repo
.get_hg_from_bonsai_changeset(ctx.clone(), new_master)
.await?;
let master_hg = repo.derive_hg_changeset(&ctx, new_master).await?;
ensure_content(
&ctx,
@ -3033,9 +2977,7 @@ mod tests {
.commit()
.await?;
let hg_cs = repo
.get_hg_from_bonsai_changeset(ctx.clone(), bcs_id_master)
.await?;
let hg_cs = repo.derive_hg_changeset(&ctx, bcs_id_master).await?;
let book = master_bookmark();
set_bookmark(ctx.clone(), repo.clone(), &book, &{
@ -3045,10 +2987,7 @@ mod tests {
})
.await?;
let hgcss = hashset![
repo.get_hg_from_bonsai_changeset(ctx.clone(), bcs_id_merge)
.await?,
];
let hgcss = hashset![repo.derive_hg_changeset(&ctx, bcs_id_merge).await?,];
do_pushrebase(
&ctx,
@ -3064,9 +3003,7 @@ mod tests {
.await?
.ok_or(Error::msg("master not set"))?;
let master_hg = repo
.get_hg_from_bonsai_changeset(ctx.clone(), new_master)
.await?;
let master_hg = repo.derive_hg_changeset(&ctx, new_master).await?;
ensure_content(
&ctx,
@ -3132,9 +3069,7 @@ mod tests {
.commit()
.await?;
let hg_cs = repo
.get_hg_from_bonsai_changeset(ctx.clone(), bcs_id_master)
.await?;
let hg_cs = repo.derive_hg_changeset(&ctx, bcs_id_master).await?;
let book = master_bookmark();
set_bookmark(ctx.clone(), repo.clone(), &book, &{
@ -3144,10 +3079,7 @@ mod tests {
})
.await?;
let hgcss = hashset![
repo.get_hg_from_bonsai_changeset(ctx.clone(), bcs_id_merge)
.await?
];
let hgcss = hashset![repo.derive_hg_changeset(&ctx, bcs_id_merge).await?];
do_pushrebase(
&ctx,
@ -3163,9 +3095,7 @@ mod tests {
.await?
.ok_or(Error::msg("master is not set"))?;
let master_hg = repo
.get_hg_from_bonsai_changeset(ctx.clone(), new_master)
.await?;
let master_hg = repo.derive_hg_changeset(&ctx, new_master).await?;
ensure_content(
&ctx,
@ -3205,9 +3135,7 @@ mod tests {
.commit()
.await?;
let hg_cs = repo
.get_hg_from_bonsai_changeset(ctx.clone(), bcs_id)
.await?;
let hg_cs = repo.derive_hg_changeset(&ctx, bcs_id).await?;
let result = do_pushrebase(
&ctx,
@ -3223,9 +3151,7 @@ mod tests {
let bcs = result.head.load(&ctx, repo.blobstore()).await?;
assert_eq!(bcs.file_changes().collect::<Vec<_>>(), vec![]);
let master_hg = repo
.get_hg_from_bonsai_changeset(ctx.clone(), result.head)
.await?;
let master_hg = repo.derive_hg_changeset(&ctx, result.head).await?;
ensure_content(
&ctx,
@ -3374,9 +3300,7 @@ mod tests {
.commit()
.await?;
let hg_cs = repo
.get_hg_from_bonsai_changeset(ctx.clone(), bcs_id)
.await?;
let hg_cs = repo.derive_hg_changeset(&ctx, bcs_id).await?;
let err = do_pushrebase(
&ctx,
@ -3404,9 +3328,7 @@ mod tests {
.commit()
.await?;
let hg_cs = repo
.get_hg_from_bonsai_changeset(ctx.clone(), bcs_id)
.await?;
let hg_cs = repo.derive_hg_changeset(&ctx, bcs_id).await?;
do_pushrebase(
&ctx,

View File

@ -9,7 +9,6 @@ license = "GPLv2+"
[dependencies]
blobrepo = { version = "0.1.0", path = "../../blobrepo" }
blobrepo_hg = { version = "0.1.0", path = "../../blobrepo/blobrepo_hg" }
context = { version = "0.1.0", path = "../../server/context" }
fbinit = { version = "0.1.0", git = "https://github.com/facebookexperimental/rust-shed.git", branch = "main" }
fixtures = { version = "0.1.0", path = "../../tests/fixtures" }

View File

@ -35,6 +35,7 @@ lazy_static = "1.0"
manifest = { version = "0.1.0", path = "../manifest" }
maplit = "1.0"
mercurial_bundles = { version = "0.1.0", path = "../mercurial/bundles" }
mercurial_derived_data = { version = "0.1.0", path = "../derived_data/mercurial_derived_data" }
mercurial_revlog = { version = "0.1.0", path = "../mercurial/revlog" }
mercurial_types = { version = "0.1.0", path = "../mercurial/types" }
metaconfig_types = { version = "0.1.0", path = "../metaconfig/types" }

View File

@ -10,11 +10,11 @@ license = "GPLv2+"
[dependencies]
anyhow = "1.0.51"
blobrepo = { version = "0.1.0", path = "../../blobrepo" }
blobrepo_hg = { version = "0.1.0", path = "../../blobrepo/blobrepo_hg" }
context = { version = "0.1.0", path = "../../server/context" }
futures = { version = "0.3.13", features = ["async-await", "compat"] }
futures-old = { package = "futures", version = "0.1.31" }
mercurial_bundles = { version = "0.1.0", path = "../../mercurial/bundles" }
mercurial_derived_data = { version = "0.1.0", path = "../../derived_data/mercurial_derived_data" }
mercurial_types = { version = "0.1.0", path = "../../mercurial/types" }
mononoke_types = { version = "0.1.0", path = "../../mononoke_types" }
pushrebase = { version = "0.1.0", path = "../../pushrebase" }

View File

@ -9,12 +9,12 @@
use anyhow::{Error, Result};
use blobrepo::BlobRepo;
use blobrepo_hg::BlobRepoHg;
use context::CoreContext;
use futures::{FutureExt, TryFutureExt};
use futures_old::{stream, Stream as StreamOld};
use mercurial_bundles::obsmarkers::MetadataEntry;
use mercurial_bundles::{part_encode::PartEncodeBuilder, parts};
use mercurial_derived_data::DeriveHgChangeset;
use mercurial_types::HgChangesetId;
use mononoke_types::DateTime;
@ -59,8 +59,8 @@ fn pushrebased_changesets_to_hg_stream(
let ctx = ctx.clone();
async move {
let (old, new) = futures::try_join!(
blobrepo.get_hg_from_bonsai_changeset(ctx.clone(), p.id_old),
blobrepo.get_hg_from_bonsai_changeset(ctx.clone(), p.id_new),
blobrepo.derive_hg_changeset(&ctx, p.id_old),
blobrepo.derive_hg_changeset(&ctx, p.id_new),
)?;
Ok((old, vec![new]))
}

View File

@ -32,6 +32,7 @@ borrowed = { version = "0.1.0", git = "https://github.com/facebookexperimental/r
fbinit = { version = "0.1.0", git = "https://github.com/facebookexperimental/rust-shed.git", branch = "main" }
fbinit-tokio = { version = "0.1.0", git = "https://github.com/facebookexperimental/rust-shed.git", branch = "main" }
manifest = { version = "0.1.0", path = "../../manifest" }
mercurial_derived_data = { version = "0.1.0", path = "../../derived_data/mercurial_derived_data" }
metaconfig_types = { version = "0.1.0", path = "../../metaconfig/types" }
mononoke_types = { version = "0.1.0", path = "../../mononoke_types" }
test_repo_factory = { version = "0.1.0", path = "../../repo_factory/test_repo_factory" }

View File

@ -352,10 +352,10 @@ fn prepare_blob_lfs_file(
mod test {
use super::*;
use assert_matches::assert_matches;
use blobrepo_hg::BlobRepoHg;
use borrowed::borrowed;
use fbinit::FacebookInit;
use manifest::{Entry, Manifest};
use mercurial_derived_data::DeriveHgChangeset;
use metaconfig_types::FilestoreParams;
use mononoke_types::MPathElement;
use test_repo_factory::TestRepoFactory;
@ -378,7 +378,7 @@ mod test {
.await?;
let hg_manifest = repo
.get_hg_from_bonsai_changeset(ctx.clone(), bcs)
.derive_hg_changeset(ctx, bcs)
.await?
.load(ctx, repo.blobstore())
.await?

View File

@ -49,6 +49,7 @@ use lazy_static::lazy_static;
use manifest::{Diff, Entry, ManifestOps};
use maplit::hashmap;
use mercurial_bundles::{create_bundle_stream, parts, wirepack, Bundle2Item};
use mercurial_derived_data::DeriveHgChangeset;
use mercurial_revlog::{self, RevlogChangeset};
use mercurial_types::{
blobs::HgBlobChangeset, calculate_hg_node_id, convert_parents_to_remotefilelog_format,
@ -2503,9 +2504,7 @@ impl GitLookup {
match maybe_bonsai {
Some(bcs_id) => {
let hg_cs_id = repo
.get_hg_from_bonsai_changeset(ctx.clone(), bcs_id)
.await?;
let hg_cs_id = repo.derive_hg_changeset(ctx, bcs_id).await?;
Ok(Some(generate_lookup_resp_buf(
true,
hg_cs_id.to_hex().as_bytes(),

View File

@ -19,6 +19,7 @@ use futures::{
use futures_01_ext::StreamExt as OldStreamExt;
use futures_ext::{FbFutureExt, FbTryFutureExt};
use futures_old::Future;
use mercurial_derived_data::DeriveHgChangeset;
use mercurial_types::HgChangesetId;
use mononoke_repo::MononokeRepo;
use std::collections::HashMap;
@ -185,7 +186,7 @@ where
return self
.repo
.blobrepo()
.get_hg_from_bonsai_changeset(ctx.clone(), cs_id)
.derive_hg_changeset(&ctx, cs_id)
.map_ok(Some)
.await;
}
@ -309,10 +310,7 @@ mod test {
.commit()
.await?;
let hg_cs_id = repo
.blob_repo
.get_hg_from_bonsai_changeset(ctx.clone(), cs_id)
.await?;
let hg_cs_id = repo.blob_repo.derive_hg_changeset(&ctx, cs_id).await?;
bookmark(&ctx, &repo.blob_repo, "prefix/scratchbook")
.create_scratch(cs_id)
.await?;

View File

@ -14,6 +14,7 @@ use fixtures::many_files_dirs;
use futures::compat::Future01CompatExt;
use manifest::{Entry, ManifestOps};
use maplit::hashset;
use mercurial_derived_data::DeriveHgChangeset;
use mercurial_types::HgFileNodeId;
use metaconfig_types::LfsParams;
use mononoke_api::Repo;
@ -255,9 +256,7 @@ async fn test_lfs_rollout(fb: FacebookInit) -> Result<(), Error> {
.commit()
.await?;
let hg_cs_id = repo
.get_hg_from_bonsai_changeset(ctx.clone(), commit)
.await?;
let hg_cs_id = repo.derive_hg_changeset(&ctx, commit).await?;
let hg_cs = hg_cs_id.load(&ctx, &repo.get_blobstore()).await?;
@ -323,9 +322,7 @@ async fn test_maybe_validate_pushed_bonsais(fb: FacebookInit) -> Result<(), Erro
.commit()
.await?;
let hg_cs_id = repo
.get_hg_from_bonsai_changeset(ctx.clone(), commit)
.await?;
let hg_cs_id = repo.derive_hg_changeset(&ctx, commit).await?;
// No replay data - ignore
maybe_validate_pushed_bonsais(&ctx, &repo, &None).await?;

View File

@ -35,6 +35,7 @@ manifest = { version = "0.1.0", path = "../../manifest" }
maplit = "1.0"
mercurial_bundle_replay_data = { version = "0.1.0", path = "../../mercurial/bundle_replay_data" }
mercurial_bundles = { version = "0.1.0", path = "../../mercurial/bundles" }
mercurial_derived_data = { version = "0.1.0", path = "../../derived_data/mercurial_derived_data" }
mercurial_mutation = { version = "0.1.0", path = "../../mercurial/mutation" }
mercurial_revlog = { version = "0.1.0", path = "../../mercurial/revlog" }
mercurial_types = { version = "0.1.0", path = "../../mercurial/types" }

View File

@ -9,12 +9,12 @@
use anyhow::{Context, Error};
use blobrepo::BlobRepo;
use blobrepo_hg::BlobRepoHg;
use bookmarks_movement::BookmarkMovementError;
use context::CoreContext;
use futures::future::{BoxFuture, FutureExt};
use futures::stream::{self, StreamExt, TryStreamExt};
use hooks::{CrossRepoPushSource, HookManager, HookRejection};
use mercurial_derived_data::DeriveHgChangeset;
use crate::resolver::{HgHookRejection, PostResolveAction, PostResolvePushRebase};
use crate::BundleResolverError;
@ -41,9 +41,7 @@ pub(crate) fn make_hook_rejection_remapper(
let ctx = ctx.clone();
let repo = repo.clone();
async move {
let hg_cs_id = repo
.get_hg_from_bonsai_changeset(ctx.clone(), cs_id)
.await?;
let hg_cs_id = repo.derive_hg_changeset(&ctx, cs_id).await?;
Ok(HgHookRejection {
hook_name,
hg_cs_id,

View File

@ -25,7 +25,6 @@ use crate::{
use anyhow::{format_err, Context, Error};
use backsyncer::{backsync_latest, BacksyncLimit, TargetRepoDbs};
use blobrepo::BlobRepo;
use blobrepo_hg::BlobRepoHg;
use blobstore::Loadable;
use bookmarks::BookmarkName;
use cacheblob::LeaseOps;
@ -40,6 +39,7 @@ use futures::{
};
use hooks::{CrossRepoPushSource, HookRejection};
use live_commit_sync_config::LiveCommitSyncConfig;
use mercurial_derived_data::DeriveHgChangeset;
use mononoke_repo::MononokeRepo;
use mononoke_types::{BonsaiChangeset, ChangesetId};
use pushrebase::PushrebaseChangesetPair;
@ -218,9 +218,7 @@ impl PushRedirector {
},
};
let hg_cs_id = repo
.get_hg_from_bonsai_changeset(ctx.clone(), cs_id)
.await?;
let hg_cs_id = repo.derive_hg_changeset(&ctx, cs_id).await?;
Ok(HgHookRejection {
hook_name,
@ -380,7 +378,7 @@ impl PushRedirector {
});
match small_cs_id {
Err(err) => Err(err),
Ok(id) => source_repo.get_hg_from_bonsai_changeset(ctx, *id).await,
Ok(id) => source_repo.derive_hg_changeset(&ctx, *id).await,
}
}
.boxed()

View File

@ -22,6 +22,7 @@ use getbundle_response::{
create_getbundle_response, DraftsInBundlesPolicy, PhasesPart, SessionLfsParams,
};
use mercurial_bundles::{create_bundle_stream, parts, Bundle2EncodeBuilder, PartId};
use mercurial_derived_data::DeriveHgChangeset;
use metaconfig_types::PushrebaseParams;
use mononoke_types::ChangesetId;
use obsolete;
@ -135,7 +136,7 @@ impl UnbundleResponse {
// suddenly moved before current pushrebase finished.
let common = commonheads.heads;
let maybe_onto_head = repo.get_bookmark(ctx.clone(), &onto);
let pushrebased_hg_rev = repo.get_hg_from_bonsai_changeset(ctx.clone(), pushrebased_rev);
let pushrebased_hg_rev = repo.derive_hg_changeset(ctx, pushrebased_rev);
let bookmark_reply_part = match bookmark_push_part_id {
Some(part_id) => Some(parts::replypushkey_part(true, part_id)?),

View File

@ -11,7 +11,6 @@ license = "GPLv2+"
anyhow = "1.0.51"
backsyncer = { version = "0.1.0", path = "../commit_rewriting/backsyncer" }
blobrepo = { version = "0.1.0", path = "../blobrepo" }
blobrepo_hg = { version = "0.1.0", path = "../blobrepo/blobrepo_hg" }
blobstore = { version = "0.1.0", path = "../blobstore" }
bonsai_git_mapping = { version = "0.1.0", path = "../bonsai_git_mapping" }
bonsai_globalrev_mapping = { version = "0.1.0", path = "../bonsai_globalrev_mapping" }
@ -34,6 +33,7 @@ itertools = "0.10.3"
live_commit_sync_config = { version = "0.1.0", path = "../commit_rewriting/live_commit_sync_config" }
manifest = { version = "0.1.0", path = "../manifest" }
maplit = "1.0"
mercurial_derived_data = { version = "0.1.0", path = "../derived_data/mercurial_derived_data" }
mercurial_types = { version = "0.1.0", path = "../mercurial/types" }
metaconfig_types = { version = "0.1.0", path = "../metaconfig/types" }
mononoke_hg_sync_job_helper_lib = { version = "0.1.0", path = "../mononoke_hg_sync_job" }

View File

@ -9,7 +9,6 @@
use anyhow::{format_err, Context, Error};
use backsyncer::{backsync_latest, open_backsyncer_dbs, BacksyncLimit, TargetRepoDbs};
use blobrepo::{save_bonsai_changesets, AsBlobRepo};
use blobrepo_hg::BlobRepoHg;
use blobstore::Loadable;
use bookmarks::{BookmarkName, BookmarkUpdateReason, BookmarksRef};
use borrowed::borrowed;
@ -33,6 +32,7 @@ use itertools::Itertools;
use live_commit_sync_config::{CfgrLiveCommitSyncConfig, LiveCommitSyncConfig};
use manifest::ManifestOps;
use maplit::hashset;
use mercurial_derived_data::DeriveHgChangeset;
use mercurial_types::{HgChangesetId, MPath};
use metaconfig_types::{
BookmarkAttrs, CommitSyncConfigVersion, MetadataDatabaseConfig, RepoConfig,
@ -441,7 +441,7 @@ async fn move_bookmark(
let check_repo = async move {
let hg_csid = repo
.as_blob_repo()
.get_hg_from_bonsai_changeset(ctx.clone(), curr_csid.clone())
.derive_hg_changeset(ctx, curr_csid.clone())
.await?;
check_dependent_systems(
&ctx,
@ -485,7 +485,7 @@ async fn move_bookmark(
let small_repo_hg_csid = small_repo_back_sync_vars
.small_repo
.as_blob_repo()
.get_hg_from_bonsai_changeset(ctx.clone(), small_repo_cs_id)
.derive_hg_changeset(ctx, small_repo_cs_id)
.await?;
check_dependent_systems(
@ -631,10 +631,7 @@ async fn get_leaf_entries(
repo: &Repo,
cs_id: ChangesetId,
) -> Result<HashSet<MPath>, Error> {
let hg_cs_id = repo
.as_blob_repo()
.get_hg_from_bonsai_changeset(ctx.clone(), cs_id)
.await?;
let hg_cs_id = repo.as_blob_repo().derive_hg_changeset(ctx, cs_id).await?;
let hg_cs = hg_cs_id.load(ctx, repo.repo_blobstore()).await?;
hg_cs
.manifestid()

View File

@ -27,7 +27,6 @@ uniqueheap = { version = "0.1.0", path = "../common/uniqueheap" }
ascii = "1.0"
async-trait = "0.1.52"
blobrepo = { version = "0.1.0", path = "../blobrepo" }
blobrepo_hg = { version = "0.1.0", path = "../blobrepo/blobrepo_hg" }
failure_ext = { version = "0.1.0", git = "https://github.com/facebookexperimental/rust-shed.git", branch = "main" }
fbinit = { version = "0.1.0", git = "https://github.com/facebookexperimental/rust-shed.git", branch = "main" }
fbinit-tokio = { version = "0.1.0", git = "https://github.com/facebookexperimental/rust-shed.git", branch = "main" }

View File

@ -10,7 +10,6 @@ license = "GPLv2+"
[dependencies]
anyhow = "1.0.51"
blobrepo = { version = "0.1.0", path = "../../blobrepo" }
blobrepo_hg = { version = "0.1.0", path = "../../blobrepo/blobrepo_hg" }
context = { version = "0.1.0", path = "../../server/context" }
fbinit = { version = "0.1.0", git = "https://github.com/facebookexperimental/rust-shed.git", branch = "main" }
futures = { version = "0.3.13", features = ["async-await", "compat"] }

View File

@ -28,6 +28,7 @@ futures = { version = "0.3.13", features = ["async-await", "compat"] }
futures-old = { package = "futures", version = "0.1.31" }
futures_ext = { package = "futures_01_ext", version = "0.1.0", git = "https://github.com/facebookexperimental/rust-shed.git", branch = "main" }
manifest = { version = "0.1.0", path = "../manifest" }
mercurial_derived_data = { version = "0.1.0", path = "../derived_data/mercurial_derived_data" }
mercurial_types = { version = "0.1.0", path = "../mercurial/types" }
mononoke_types = { version = "0.1.0", path = "../mononoke_types" }
scuba_ext = { version = "0.1.0", path = "../common/scuba_ext" }

View File

@ -10,7 +10,6 @@ license = "GPLv2+"
[dependencies]
anyhow = "1.0.51"
blobrepo = { version = "0.1.0", path = "../../blobrepo" }
blobrepo_hg = { version = "0.1.0", path = "../../blobrepo/blobrepo_hg" }
bookmarks = { version = "0.1.0", path = "../../bookmarks" }
borrowed = { version = "0.1.0", git = "https://github.com/facebookexperimental/rust-shed.git", branch = "main" }
bytes = { version = "1.1", features = ["serde"] }
@ -19,6 +18,7 @@ fbinit = { version = "0.1.0", git = "https://github.com/facebookexperimental/rus
filestore = { version = "0.1.0", path = "../../filestore" }
futures = { version = "0.3.13", features = ["async-await", "compat"] }
maplit = "1.0"
mercurial_derived_data = { version = "0.1.0", path = "../../derived_data/mercurial_derived_data" }
mercurial_types = { version = "0.1.0", path = "../../mercurial/types" }
mononoke_api_types = { version = "0.1.0", path = "../../mononoke_api/types" }
mononoke_types = { version = "0.1.0", path = "../../mononoke_types" }

View File

@ -9,7 +9,6 @@
use anyhow::Error;
use blobrepo::{save_bonsai_changesets, BlobRepo};
use blobrepo_hg::BlobRepoHg;
use bookmarks::{BookmarkName, BookmarkUpdateReason};
use borrowed::borrowed;
use bytes::Bytes;
@ -18,6 +17,7 @@ use fbinit::FacebookInit;
use filestore::StoreRequest;
use futures::{future::try_join_all, stream};
use maplit::btreemap;
use mercurial_derived_data::DeriveHgChangeset;
use mercurial_types::{HgChangesetId, MPath};
use mononoke_api_types::InnerRepo;
use mononoke_types::{
@ -116,10 +116,7 @@ async fn create_bonsai_changeset_from_test_data(
.await
.unwrap();
let hg_cs = blobrepo
.get_hg_from_bonsai_changeset(ctx.clone(), bcs_id)
.await
.unwrap();
let hg_cs = blobrepo.derive_hg_changeset(&ctx, bcs_id).await.unwrap();
assert_eq!(
hg_cs,