megarepotool: add gradual-merge-progress subcommand

Summary:
This is to be able to automatically report progress: how many merges has been
done already.

Note: this intentionally uses the same logic as regular `gradual-merge`, so that we always report correct numbers.

Reviewed By: StanislavGlebik

Differential Revision: D23478448

fbshipit-source-id: 3deb081ab99ad34dbdac1057682096b8faebca41
This commit is contained in:
Kostia Balytskyi 2020-09-02 12:14:37 -07:00 committed by Facebook GitHub Bot
parent 18642dbd1f
commit 6e8cbd31b1
3 changed files with 143 additions and 20 deletions

View File

@ -16,6 +16,7 @@ use mononoke_types::DateTime;
pub const COMMIT_HASH: &'static str = "commit-hash";
pub const GRADUAL_MERGE: &'static str = "gradual-merge";
pub const GRADUAL_MERGE_PROGRESS: &'static str = "gradual-merge-progress";
pub const MOVE: &'static str = "move";
pub const MERGE: &'static str = "merge";
pub const MARK_PUBLIC: &'static str = "mark-public";
@ -317,6 +318,29 @@ pub fn setup_app<'a, 'b>() -> App<'a, 'b> {
.required(false),
);
let gradual_merge_progress_subcommand = SubCommand::with_name(GRADUAL_MERGE_PROGRESS)
.about("Display progress of the gradual merge as #MERGED_COMMITS/#TOTAL_COMMITS_TO_MERGE")
.arg(
Arg::with_name(LAST_DELETION_COMMIT)
.long(LAST_DELETION_COMMIT)
.help("Last deletion commit")
.takes_value(true)
.required(true),
)
.arg(
Arg::with_name(PRE_DELETION_COMMIT)
.long(PRE_DELETION_COMMIT)
.help("Commit right before the first deletion commit")
.takes_value(true)
.required(true),
)
.arg(
Arg::with_name(COMMIT_BOOKMARK)
.help("bookmark to point to resulting commits (no sanity checks, will move existing bookmark, be careful)")
.long(COMMIT_BOOKMARK)
.takes_value(true)
);
let manual_commit_sync_subcommand = SubCommand::with_name(MANUAL_COMMIT_SYNC)
.about("Manually sync a commit from source repo to a target repo. It's usually used right after a big merge")
.arg(
@ -352,5 +376,6 @@ pub fn setup_app<'a, 'b>() -> App<'a, 'b> {
.subcommand(add_light_resulting_commit_args(pre_merge_delete_subcommand))
.subcommand(add_light_resulting_commit_args(bonsai_merge_subcommand))
.subcommand(add_light_resulting_commit_args(gradual_merge_subcommand))
.subcommand(gradual_merge_progress_subcommand)
.subcommand(manual_commit_sync_subcommand)
}

View File

@ -38,6 +38,66 @@ pub struct GradualMergeParams {
pub dry_run: bool,
}
/// Get total number of commits to merge and list
/// of commits that haven't been merged yet
async fn get_unmerged_commits_with_total_count(
ctx: &CoreContext,
repo: &BlobRepo,
skiplist: &SkiplistIndex,
pre_deletion_commit: &ChangesetId,
last_deletion_commit: &ChangesetId,
bookmark_to_merge_into: &BookmarkName,
) -> Result<(usize, Vec<(ChangesetId, StackPosition)>), Error> {
let commits_to_merge =
find_all_commits_to_merge(ctx, repo, *pre_deletion_commit, *last_deletion_commit).await?;
info!(
ctx.logger(),
"{} total commits to merge",
commits_to_merge.len()
);
let commits_to_merge = commits_to_merge
.into_iter()
.enumerate()
.map(|(idx, cs_id)| (cs_id, StackPosition(idx)))
.collect::<Vec<_>>();
let total_count = commits_to_merge.len();
let unmerged_commits = find_unmerged_commits(
ctx,
repo,
commits_to_merge,
&bookmark_to_merge_into,
skiplist,
)
.await?;
Ok((total_count, unmerged_commits))
}
/// Get how many merges has been done and how many merges are there in total
pub async fn gradual_merge_progress(
ctx: &CoreContext,
repo: &BlobRepo,
skiplist: &SkiplistIndex,
pre_deletion_commit: &ChangesetId,
last_deletion_commit: &ChangesetId,
bookmark_to_merge_into: &BookmarkName,
) -> Result<(usize, usize), Error> {
let (to_merge_count, unmerged_commits) = get_unmerged_commits_with_total_count(
ctx,
repo,
skiplist,
pre_deletion_commit,
last_deletion_commit,
bookmark_to_merge_into,
)
.await?;
Ok((to_merge_count - unmerged_commits.len(), to_merge_count))
}
// This function implements a strategy to merge a large repository into another
// while avoiding sudden increase in the working copy size.
// Normally this function should be called after a list of deletion has been created
@ -79,28 +139,16 @@ pub async fn gradual_merge(
dry_run,
} = params;
let commits_to_merge =
find_all_commits_to_merge(ctx, repo, *pre_deletion_commit, *last_deletion_commit).await?;
info!(
ctx.logger(),
"{} total commits to merge",
commits_to_merge.len()
);
let commits_to_merge = commits_to_merge
.into_iter()
.enumerate()
.map(|(idx, cs_id)| (cs_id, StackPosition(idx)))
.collect::<Vec<_>>();
let unmerged_commits = find_unmerged_commits(
let (_, unmerged_commits) = get_unmerged_commits_with_total_count(
ctx,
repo,
commits_to_merge,
&bookmark_to_merge_into,
skiplist,
pre_deletion_commit,
last_deletion_commit,
bookmark_to_merge_into,
)
.await?;
let unmerged_commits = if let Some(limit) = limit {
unmerged_commits.into_iter().take(*limit).collect()
} else {

View File

@ -39,9 +39,9 @@ use crate::cli::{
cs_args_from_matches, get_delete_commits_cs_args_factory,
get_gradual_merge_commits_cs_args_factory, setup_app, BONSAI_MERGE, BONSAI_MERGE_P1,
BONSAI_MERGE_P2, CHANGESET, CHUNKING_HINT_FILE, COMMIT_BOOKMARK, COMMIT_HASH, DRY_RUN,
EVEN_CHUNK_SIZE, FIRST_PARENT, GRADUAL_MERGE, LAST_DELETION_COMMIT, LIMIT, MANUAL_COMMIT_SYNC,
MAX_NUM_OF_MOVES_IN_COMMIT, MERGE, MOVE, ORIGIN_REPO, PARENTS, PRE_DELETION_COMMIT,
PRE_MERGE_DELETE, SECOND_PARENT, SYNC_DIAMOND_MERGE,
EVEN_CHUNK_SIZE, FIRST_PARENT, GRADUAL_MERGE, GRADUAL_MERGE_PROGRESS, LAST_DELETION_COMMIT,
LIMIT, MANUAL_COMMIT_SYNC, MAX_NUM_OF_MOVES_IN_COMMIT, MERGE, MOVE, ORIGIN_REPO, PARENTS,
PRE_DELETION_COMMIT, PRE_MERGE_DELETE, SECOND_PARENT, SYNC_DIAMOND_MERGE,
};
use crate::merging::perform_merge;
use megarepolib::chunking::{
@ -325,6 +325,53 @@ async fn run_gradual_merge<'a>(
Ok(())
}
async fn run_gradual_merge_progress<'a>(
ctx: CoreContext,
matches: &ArgMatches<'a>,
sub_m: &ArgMatches<'a>,
) -> Result<(), Error> {
let repo = args::open_repo(ctx.fb, &ctx.logger(), &matches)
.compat()
.await?;
let last_deletion_commit = sub_m
.value_of(LAST_DELETION_COMMIT)
.ok_or(format_err!("last deletion commit is not specified"))?;
let pre_deletion_commit = sub_m
.value_of(PRE_DELETION_COMMIT)
.ok_or(format_err!("pre deletion commit is not specified"))?;
let bookmark = sub_m
.value_of(COMMIT_BOOKMARK)
.ok_or(format_err!("bookmark where to merge is not specified"))?;
let (_, repo_config) = args::get_config_by_repoid(ctx.fb, &matches, repo.get_repoid())?;
let last_deletion_commit =
helpers::csid_resolve(ctx.clone(), repo.clone(), last_deletion_commit).compat();
let pre_deletion_commit =
helpers::csid_resolve(ctx.clone(), repo.clone(), pre_deletion_commit).compat();
let blobstore = repo.get_blobstore().boxed();
let skiplist =
fetch_skiplist_index(&ctx, &repo_config.skiplist_index_blobstore_key, &blobstore);
let (last_deletion_commit, pre_deletion_commit, skiplist) =
try_join3(last_deletion_commit, pre_deletion_commit, skiplist).await?;
let (done, total) = gradual_merge::gradual_merge_progress(
&ctx,
&repo,
&skiplist,
&pre_deletion_commit,
&last_deletion_commit,
&BookmarkName::new(bookmark)?,
)
.await?;
println!("{}/{}", done, total);
Ok(())
}
async fn run_manual_commit_sync<'a>(
ctx: CoreContext,
matches: &ArgMatches<'a>,
@ -414,6 +461,9 @@ fn main(fb: FacebookInit) -> Result<()> {
(PRE_MERGE_DELETE, Some(sub_m)) => run_pre_merge_delete(ctx, &matches, sub_m).await,
(BONSAI_MERGE, Some(sub_m)) => run_bonsai_merge(ctx, &matches, sub_m).await,
(GRADUAL_MERGE, Some(sub_m)) => run_gradual_merge(ctx, &matches, sub_m).await,
(GRADUAL_MERGE_PROGRESS, Some(sub_m)) => {
run_gradual_merge_progress(ctx, &matches, sub_m).await
}
(MANUAL_COMMIT_SYNC, Some(sub_m)) => run_manual_commit_sync(ctx, &matches, sub_m).await,
_ => bail!("oh no, wrong arguments provided!"),
}