move create_single_move_commit to common

Summary: I'm planning to reuse it in syncing merge commits.

Reviewed By: farnz

Differential Revision: D28885489

fbshipit-source-id: 6035c0e7290f137b723b73e656b73d4f78e2da9d
This commit is contained in:
Mateusz Kwapich 2021-06-08 05:47:50 -07:00 committed by Facebook GitHub Bot
parent 0a633f9bf8
commit fed6a478a8
2 changed files with 81 additions and 76 deletions

View File

@ -5,18 +5,15 @@
* GNU General Public License version 2. * GNU General Public License version 2.
*/ */
use crate::common::{find_bookmark_and_value, MegarepoOp, SourceName}; use crate::common::{find_bookmark_and_value, MegarepoOp, SourceAndMovedChangesets, SourceName};
use anyhow::{anyhow, Error}; use anyhow::{anyhow, Error};
use blobrepo::{save_bonsai_changesets, BlobRepo}; use blobrepo::{save_bonsai_changesets, BlobRepo};
use bookmarks::{BookmarkName, BookmarkUpdateReason}; use bookmarks::{BookmarkName, BookmarkUpdateReason};
use bytes::Bytes; use bytes::Bytes;
use commit_transformation::{create_source_to_target_multi_mover, MultiMover}; use commit_transformation::{create_source_to_target_multi_mover, MultiMover};
use context::CoreContext; use context::CoreContext;
use derived_data::BonsaiDerived;
use derived_data_utils::derived_data_utils; use derived_data_utils::derived_data_utils;
use fsnodes::RootFsnodeId;
use futures::{future, stream, stream::FuturesUnordered, StreamExt, TryStreamExt}; use futures::{future, stream, stream::FuturesUnordered, StreamExt, TryStreamExt};
use manifest::ManifestOps;
use megarepo_config::{ use megarepo_config::{
MononokeMegarepoConfigs, Source, SourceRevision, SyncConfigVersion, SyncTargetConfig, MononokeMegarepoConfigs, Source, SourceRevision, SyncConfigVersion, SyncTargetConfig,
}; };
@ -24,9 +21,7 @@ use megarepo_error::MegarepoError;
use megarepo_mapping::CommitRemappingState; use megarepo_mapping::CommitRemappingState;
use mononoke_api::Mononoke; use mononoke_api::Mononoke;
use mononoke_api::RepoContext; use mononoke_api::RepoContext;
use mononoke_types::{ use mononoke_types::{BonsaiChangesetMut, ChangesetId, DateTime, FileChange, FileType, MPath};
BonsaiChangeset, BonsaiChangesetMut, ChangesetId, DateTime, FileChange, FileType, MPath,
};
use reachabilityindex::LeastCommonAncestorsHint; use reachabilityindex::LeastCommonAncestorsHint;
use sorted_vector_map::SortedVectorMap; use sorted_vector_map::SortedVectorMap;
use std::{ use std::{
@ -389,67 +384,6 @@ impl<'a> AddSyncTarget<'a> {
Ok(bcs) Ok(bcs)
} }
async fn create_single_move_commit<'b>(
&'b self,
ctx: &'b CoreContext,
repo: &'b BlobRepo,
cs_id: ChangesetId,
mover: &MultiMover,
linkfiles: BTreeMap<MPath, Option<FileChange>>,
source_name: &SourceName,
) -> Result<SourceAndMovedChangesets, MegarepoError> {
let root_fsnode_id = RootFsnodeId::derive(ctx, repo, cs_id)
.await
.map_err(Error::from)?;
let fsnode_id = root_fsnode_id.fsnode_id();
let entries = fsnode_id
.list_leaf_entries(ctx.clone(), repo.get_blobstore())
.try_collect::<Vec<_>>()
.await?;
let mut file_changes = vec![];
for (path, fsnode) in entries {
let moved = mover(&path)?;
// Check that path doesn't move to itself - in that case we don't need to
// delete file
if moved.iter().find(|cur_path| cur_path == &&path).is_none() {
file_changes.push((path.clone(), None));
}
file_changes.extend(moved.into_iter().map(|target| {
let fc = FileChange::new(
*fsnode.content_id(),
*fsnode.file_type(),
fsnode.size(),
Some((path.clone(), cs_id)),
);
(target, Some(fc))
}));
}
file_changes.extend(linkfiles.into_iter());
// TODO(stash): we need to figure out what parameters to set here
let moved_bcs = BonsaiChangesetMut {
parents: vec![cs_id],
author: "svcscm".to_string(),
author_date: DateTime::now(),
committer: None,
committer_date: None,
message: format!("move commit for source {}", source_name.0),
extra: SortedVectorMap::new(),
file_changes: file_changes.into_iter().collect(),
}
.freeze()?;
let source_and_moved_changeset = SourceAndMovedChangesets {
source: cs_id,
moved: moved_bcs,
};
Ok(source_and_moved_changeset)
}
fn prepare_linkfiles( fn prepare_linkfiles(
&self, &self,
source_config: &Source, source_config: &Source,
@ -544,11 +478,6 @@ impl<'a> AddSyncTarget<'a> {
} }
} }
struct SourceAndMovedChangesets {
source: ChangesetId,
moved: BonsaiChangeset,
}
// Verifies that no two sources create the same path in the target // Verifies that no two sources create the same path in the target
fn add_and_check_all_paths<'a>( fn add_and_check_all_paths<'a>(
all_files_in_target: &'a mut HashMap<MPath, SourceName>, all_files_in_target: &'a mut HashMap<MPath, SourceName>,

View File

@ -5,19 +5,34 @@
* GNU General Public License version 2. * GNU General Public License version 2.
*/ */
use anyhow::anyhow; use anyhow::{anyhow, Error};
use async_trait::async_trait; use async_trait::async_trait;
use blobrepo::BlobRepo;
use bookmarks::BookmarkName; use bookmarks::BookmarkName;
use commit_transformation::MultiMover;
use context::CoreContext; use context::CoreContext;
use futures::TryFutureExt; use derived_data::BonsaiDerived;
use fsnodes::RootFsnodeId;
use futures::{TryFutureExt, TryStreamExt};
use manifest::ManifestOps;
use megarepo_error::MegarepoError; use megarepo_error::MegarepoError;
use mononoke_api::{Mononoke, RepoContext}; use mononoke_api::{Mononoke, RepoContext};
use mononoke_types::{ChangesetId, RepositoryId}; use mononoke_types::RepositoryId;
use mononoke_types::{
BonsaiChangeset, BonsaiChangesetMut, ChangesetId, DateTime, FileChange, MPath,
};
use sorted_vector_map::SortedVectorMap;
use std::collections::BTreeMap;
use std::{convert::TryInto, sync::Arc}; use std::{convert::TryInto, sync::Arc};
#[derive(Clone, Debug, Hash, Eq, PartialEq)] #[derive(Clone, Debug, Hash, Eq, PartialEq)]
pub struct SourceName(pub String); pub struct SourceName(pub String);
pub struct SourceAndMovedChangesets {
pub source: ChangesetId,
pub moved: BonsaiChangeset,
}
#[async_trait] #[async_trait]
pub trait MegarepoOp { pub trait MegarepoOp {
fn mononoke(&self) -> &Arc<Mononoke>; fn mononoke(&self) -> &Arc<Mononoke>;
@ -36,6 +51,67 @@ pub trait MegarepoOp {
.ok_or_else(|| MegarepoError::request(anyhow!("repo not found {}", target_repo_id)))?; .ok_or_else(|| MegarepoError::request(anyhow!("repo not found {}", target_repo_id)))?;
Ok(target_repo) Ok(target_repo)
} }
async fn create_single_move_commit(
&self,
ctx: &CoreContext,
repo: &BlobRepo,
cs_id: ChangesetId,
mover: &MultiMover,
linkfiles: BTreeMap<MPath, Option<FileChange>>,
source_name: &SourceName,
) -> Result<SourceAndMovedChangesets, MegarepoError> {
let root_fsnode_id = RootFsnodeId::derive(ctx, repo, cs_id)
.await
.map_err(Error::from)?;
let fsnode_id = root_fsnode_id.fsnode_id();
let entries = fsnode_id
.list_leaf_entries(ctx.clone(), repo.get_blobstore())
.try_collect::<Vec<_>>()
.await?;
let mut file_changes = vec![];
for (path, fsnode) in entries {
let moved = mover(&path)?;
// Check that path doesn't move to itself - in that case we don't need to
// delete file
if moved.iter().find(|cur_path| cur_path == &&path).is_none() {
file_changes.push((path.clone(), None));
}
file_changes.extend(moved.into_iter().map(|target| {
let fc = FileChange::new(
*fsnode.content_id(),
*fsnode.file_type(),
fsnode.size(),
Some((path.clone(), cs_id)),
);
(target, Some(fc))
}));
}
file_changes.extend(linkfiles.into_iter());
// TODO(stash): we need to figure out what parameters to set here
let moved_bcs = BonsaiChangesetMut {
parents: vec![cs_id],
author: "svcscm".to_string(),
author_date: DateTime::now(),
committer: None,
committer_date: None,
message: format!("move commit for source {}", source_name.0),
extra: SortedVectorMap::new(),
file_changes: file_changes.into_iter().collect(),
}
.freeze()?;
let source_and_moved_changeset = SourceAndMovedChangesets {
source: cs_id,
moved: moved_bcs,
};
Ok(source_and_moved_changeset)
}
} }
pub async fn find_bookmark_and_value( pub async fn find_bookmark_and_value(