new tool preprare the delete commit for history fixup

Summary:
As explained in previous diff summary - we need to make those deletions
before we can fixup history. This diff adds relevant args to megarepo tool.

Reviewed By: StanislavGlebik

Differential Revision: D30456464

fbshipit-source-id: 894e27750684f86e18184e3b98beeda8dfb5b53d
This commit is contained in:
Mateusz Kwapich 2021-08-25 06:24:01 -07:00 committed by Facebook GitHub Bot
parent 92ce782001
commit d745491438
3 changed files with 261 additions and 8 deletions

View File

@ -28,6 +28,7 @@ pub const COMMIT_AUTHOR: &str = "commit-author";
pub const COMMIT_BOOKMARK: &str = "bookmark";
pub const COMMIT_DATE_RFC3339: &str = "commit-date-rfc3339";
pub const COMMIT_HASH: &str = "commit-hash";
pub const COMMIT_HASH_CORRECT_HISTORY: &str = "commit-hash-correct-history";
pub const COMMIT_MESSAGE: &str = "commit-message";
pub const DELETION_CHUNK_SIZE: &str = "deletion-chunk-size";
pub const DIFF_MAPPING_VERSIONS: &str = "diff-mapping-versions";
@ -38,6 +39,7 @@ pub const GRADUAL_MERGE_PROGRESS: &str = "gradual-merge-progress";
pub const GRADUAL_MERGE: &str = "gradual-merge";
pub const GRADUAL_DELETE: &str = "gradual-delete";
pub const HEAD_BOOKMARK: &str = "head-bookmark";
pub const HISTORY_FIXUP_DELETE: &str = "history-fixup-deletes";
pub const INPUT_FILE: &str = "input-file";
pub const LAST_DELETION_COMMIT: &str = "last-deletion-commit";
pub const LIMIT: &str = "limit";
@ -52,6 +54,7 @@ pub const ORIGIN_REPO: &str = "origin-repo";
pub const PARENTS: &str = "parents";
pub const PATH_REGEX: &str = "path-regex";
pub const PATH: &str = "path";
pub const PATHS_FILE: &str = "paths-file";
pub const PRE_DELETION_COMMIT: &str = "pre-deletion-commit";
pub const PRE_MERGE_DELETE: &str = "pre-merge-delete";
pub const RUN_MOVER: &str = "run-mover";
@ -301,6 +304,44 @@ pub fn setup_app<'a, 'b>() -> MononokeClapApp<'a, 'b> {
.required(false)
);
let history_fixup_delete_subcommand =
add_light_resulting_commit_args(SubCommand::with_name(HISTORY_FIXUP_DELETE))
.about("create a set of delete commits before the path fixup.")
.arg(
Arg::with_name(COMMIT_HASH)
.help(
"commit which we want to fixup (the
files specified in paths file will be deleted there)",
)
.takes_value(true)
.required(true),
)
.arg(
Arg::with_name(COMMIT_HASH_CORRECT_HISTORY)
.help(
"commit hash containing the files with correct
history (the files specified in path files will be
preserved there; all the other files will be deleted)",
)
.takes_value(true)
.required(true),
)
.arg(
Arg::with_name(EVEN_CHUNK_SIZE)
.help("chunk size for even chunking")
.long(EVEN_CHUNK_SIZE)
.takes_value(true)
.required(true),
)
.arg(
Arg::with_name(PATHS_FILE)
.long(PATHS_FILE)
.help("file containing paths to fixup separated by newlines")
.takes_value(true)
.required(true)
.multiple(true),
);
// PLease don't move `add_light_resulting_commit_args` to be applied
// after `PATH` arg is added, as in that case `PATH` won't be the last
// positional argument
@ -638,6 +679,7 @@ pub fn setup_app<'a, 'b>() -> MononokeClapApp<'a, 'b> {
.subcommand(add_resulting_commit_args(merge_subcommand))
.subcommand(sync_diamond_subcommand)
.subcommand(add_light_resulting_commit_args(pre_merge_delete_subcommand))
.subcommand(history_fixup_delete_subcommand)
.subcommand(add_light_resulting_commit_args(bonsai_merge_subcommand))
.subcommand(add_light_resulting_commit_args(gradual_merge_subcommand))
.subcommand(gradual_merge_progress_subcommand)

View File

@ -50,7 +50,7 @@ use synced_commit_mapping::{
SyncedCommitMappingEntry,
};
use tokio::{
fs::File,
fs::{read_to_string, File},
io::{AsyncBufReadExt, BufReader},
};
@ -66,13 +66,14 @@ use crate::cli::{
get_delete_commits_cs_args_factory, get_gradual_merge_commits_cs_args_factory, setup_app,
BACKFILL_NOOP_MAPPING, BASE_COMMIT_HASH, BONSAI_MERGE, BONSAI_MERGE_P1, BONSAI_MERGE_P2,
CATCHUP_DELETE_HEAD, CATCHUP_VALIDATE_COMMAND, CHANGESET, CHECK_PUSH_REDIRECTION_PREREQS,
CHUNKING_HINT_FILE, COMMIT_BOOKMARK, COMMIT_HASH, DELETION_CHUNK_SIZE, DIFF_MAPPING_VERSIONS,
DRY_RUN, EVEN_CHUNK_SIZE, FIRST_PARENT, GRADUAL_DELETE, GRADUAL_MERGE, GRADUAL_MERGE_PROGRESS,
HEAD_BOOKMARK, INPUT_FILE, LAST_DELETION_COMMIT, LIMIT, MANUAL_COMMIT_SYNC,
MAPPING_VERSION_NAME, MARK_NOT_SYNCED_COMMAND, MAX_NUM_OF_MOVES_IN_COMMIT, MERGE, MOVE,
ORIGIN_REPO, PARENTS, PATH, PATH_REGEX, PRE_DELETION_COMMIT, PRE_MERGE_DELETE, RUN_MOVER,
SECOND_PARENT, SELECT_PARENTS_AUTOMATICALLY, SOURCE_CHANGESET, SYNC_COMMIT_AND_ANCESTORS,
SYNC_DIAMOND_MERGE, TARGET_CHANGESET, TO_MERGE_CS_ID, VERSION, WAIT_SECS,
CHUNKING_HINT_FILE, COMMIT_BOOKMARK, COMMIT_HASH, COMMIT_HASH_CORRECT_HISTORY,
DELETION_CHUNK_SIZE, DIFF_MAPPING_VERSIONS, DRY_RUN, EVEN_CHUNK_SIZE, FIRST_PARENT,
GRADUAL_DELETE, GRADUAL_MERGE, GRADUAL_MERGE_PROGRESS, HEAD_BOOKMARK, HISTORY_FIXUP_DELETE,
INPUT_FILE, LAST_DELETION_COMMIT, LIMIT, MANUAL_COMMIT_SYNC, MAPPING_VERSION_NAME,
MARK_NOT_SYNCED_COMMAND, MAX_NUM_OF_MOVES_IN_COMMIT, MERGE, MOVE, ORIGIN_REPO, PARENTS, PATH,
PATHS_FILE, PATH_REGEX, PRE_DELETION_COMMIT, PRE_MERGE_DELETE, RUN_MOVER, SECOND_PARENT,
SELECT_PARENTS_AUTOMATICALLY, SOURCE_CHANGESET, SYNC_COMMIT_AND_ANCESTORS, SYNC_DIAMOND_MERGE,
TARGET_CHANGESET, TO_MERGE_CS_ID, VERSION, WAIT_SECS,
};
use crate::merging::perform_merge;
use megarepolib::chunking::{
@ -80,6 +81,7 @@ use megarepolib::chunking::{
};
use megarepolib::commit_sync_config_utils::diff_small_repo_commit_sync_configs;
use megarepolib::common::{create_and_save_bonsai, delete_files_in_chunks};
use megarepolib::history_fixup_delete::{create_history_fixup_deletes, HistoryFixupDeletes};
use megarepolib::pre_merge_delete::{create_pre_merge_delete, PreMergeDelete};
use megarepolib::working_copy::get_working_copy_paths_by_prefixes;
use megarepolib::{common::StackPosition, perform_move, perform_stack_move};
@ -283,6 +285,82 @@ async fn run_pre_merge_delete<'a>(
Ok(())
}
async fn run_history_fixup_delete<'a>(
ctx: CoreContext,
matches: &MononokeMatches<'a>,
sub_m: &ArgMatches<'a>,
) -> Result<(), Error> {
let repo: BlobRepo = args::open_repo(ctx.fb, &ctx.logger().clone(), &matches).await?;
let delete_cs_args_factory = get_delete_commits_cs_args_factory(sub_m)?;
let even_chunk_size: usize = sub_m
.value_of(EVEN_CHUNK_SIZE)
.ok_or_else(|| {
format_err!(
"either {} or {} is required",
CHUNKING_HINT_FILE,
EVEN_CHUNK_SIZE
)
})?
.parse::<usize>()?;
let chunker = even_chunker_with_max_size(even_chunk_size)?;
let fixup_bcs_id = {
let hash = sub_m.value_of(COMMIT_HASH).unwrap().to_owned();
helpers::csid_resolve(&ctx, repo.clone(), hash).await?
};
let correct_bcs_id = {
let hash = sub_m
.value_of(COMMIT_HASH_CORRECT_HISTORY)
.unwrap()
.to_owned();
helpers::csid_resolve(&ctx, repo.clone(), hash).await?
};
let paths_file = sub_m.value_of(PATHS_FILE).unwrap().to_owned();
let s = read_to_string(&paths_file).await?;
let paths: Vec<MPath> = s
.lines()
.map(|path| MPath::new(path))
.collect::<Result<Vec<MPath>>>()?;
let hfd = create_history_fixup_deletes(
&ctx,
&repo,
fixup_bcs_id,
chunker,
delete_cs_args_factory,
correct_bcs_id,
paths,
)
.await?;
let HistoryFixupDeletes {
mut delete_commits_fixup_branch,
mut delete_commits_correct_branch,
} = hfd;
info!(
ctx.logger(),
"Listing deletion commits for fixup branch in top-to-bottom order (first commit is a descendant of the last)"
);
delete_commits_fixup_branch.reverse();
for delete_commit in delete_commits_fixup_branch {
println!("{}", delete_commit);
}
info!(
ctx.logger(),
"Listing deletion commits for branch with correct history in top-to-bottom order (first commit is a descendant of the last)"
);
delete_commits_correct_branch.reverse();
for delete_commit in delete_commits_correct_branch {
println!("{}", delete_commit);
}
Ok(())
}
async fn run_gradual_delete<'a>(
ctx: CoreContext,
matches: &MononokeMatches<'a>,
@ -1089,6 +1167,9 @@ fn main(fb: FacebookInit) -> Result<()> {
run_gradual_merge_progress(ctx, &matches, sub_m).await
}
(PRE_MERGE_DELETE, Some(sub_m)) => run_pre_merge_delete(ctx, &matches, sub_m).await,
(HISTORY_FIXUP_DELETE, Some(sub_m)) => {
run_history_fixup_delete(ctx, &matches, sub_m).await
}
_ => bail!("oh no, wrong arguments provided!"),
}
};

View File

@ -0,0 +1,130 @@
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This software may be used and distributed according to the terms of the
# GNU General Public License found in the LICENSE file in the root
# directory of this source tree.
$ . "${TEST_FIXTURES}/library.sh"
Setup repositories
$ REPOTYPE="blob_files"
$ FBS_REPOID=0
$ NO_BOOKMARKS_CACHE=1 REPOID=$FBS_REPOID REPONAME=repo setup_common_config $REPOTYPE
$ setup_commitsyncmap
$ setup_configerator_configs
$ merge_tunables <<EOF
> {
> "killswitches": {
> "force_unode_v2": true
> }
> }
> EOF
$ cat >> "$HGRCPATH" <<EOF
> [ui]
> ssh="$DUMMYSSH"
> EOF
$ function createfile { mkdir -p "$(dirname $1)" && echo "$1" > "$1" && hg add -q "$1"; }
-- init hg fbsource server repo
$ cd $TESTTMP
$ hginit_treemanifest fbs-hg-srv
$ cd fbs-hg-srv
-- create an initial commits
$ createfile fbcode/file_with_correct_history
$ hg -q ci -m "master commit 1"
$ createfile fbcode/file_with_incorrect_history
$ hg -q ci -m "commit commit 2 [incorrect history]"
$ echo changed > fbcode/file_with_incorrect_history
$ createfile file_with_incorrect_history2
$ hg -q ci -m "commit commit 3 [incorrect history]"
$ hg book -i -r . master
$ hg update -q null
$ createfile fbcode/file_with_incorrect_history
$ hg -q ci -m "small repo commit 1"
$ echo changed_ > fbcode/file_with_incorrect_history
$ createfile file_with_incorrect_history2
$ createfile fbcode/file_with_correct_history
$ hg -q ci -m "small repo commit 2 [corrected history]"
$ createfile some_file_that_should_stay_in_small_repo_only
$ createfile some_file_that_should_stay_in_small_repo_only2
$ createfile some_file_that_should_stay_in_small_repo_only3
$ createfile some_file_that_should_stay_in_small_repo_only4
$ hg -q ci -m "small repo commit 3"
$ hg book -i -r . correct_history_branch
-- blobimport hg server repos into Mononoke repos
$ cd "$TESTTMP"
$ REPOID="$FBS_REPOID" blobimport "fbs-hg-srv/.hg" "repo"
-- setup hg client repos
$ cd "$TESTTMP"
$ hgclone_treemanifest ssh://user@dummy/fbs-hg-srv fbs-hg-cnt --noupdate
Start mononoke server
$ mononoke
$ wait_for_mononoke
$ cat > "paths_to_fixup" <<EOF
> fbcode/file_with_incorrect_history
> file_with_incorrect_history2
> EOF
$ COMMIT_DATE="1985-09-04T00:00:00.00Z"
$ REPOID=$FBS_REPOID megarepo_tool history-fixup-deletes author "history fixup" master correct_history_branch --paths-file paths_to_fixup --even-chunk-size 3 --commit-date-rfc3339 "$COMMIT_DATE" 2> /dev/null
7d84767352730c2af3020ef0d16c1933438724b14a93a87462bcf24f02bc6fc1
81ea05520fa72bf27124fed8d0e0be49683f4695e86c0b57940982291089a15d
d6c0cb28cbef050857dcef87adfc509c6d01d7fec8a0423ebb41d1fa4f0158c9
$ REPOID=$FBS_REPOID megarepo_tool merge 7d84767352730c2af3020ef0d16c1933438724b14a93a87462bcf24f02bc6fc1 81ea05520fa72bf27124fed8d0e0be49683f4695e86c0b57940982291089a15d author "history fixup" --mark-public --commit-date-rfc3339 "$COMMIT_DATE" --bookmark master 2> /dev/null
$ cd "$TESTTMP"/fbs-hg-cnt
$ REPONAME=repo hgmn pull -q
$ hgmn update -q master
$ ls *
file_with_incorrect_history2
fbcode:
file_with_correct_history
file_with_incorrect_history
$ hg log -f fbcode/file_with_incorrect_history -T "{node} {desc}\n"
6c017a8ba0a60b7a82b3cd0a98b52dc68def9f96 small repo commit 2 [corrected history]
11fbaaa53e1b7d7fb87f3831b007c803fb64afa7 small repo commit 1
$ hg log -f fbcode/file_with_correct_history -T "{node} {desc}\n"
835251f7cda8fd1adddf414ce67d58090897e93a master commit 1
$ log
@ history fixup [public;rev=9;dcacf3dd28f1] default/master
o [MEGAREPO DELETE] history fixup (1) [public;rev=8;d3b2dfc1d7dc]
o [MEGAREPO DELETE] history fixup (0) [public;rev=7;94932f105be0]
o [MEGAREPO DELETE] history fixup (0) [public;rev=6;c2a5523610c4]
o small repo commit 3 [public;rev=5;ea8595b036ed]
o small repo commit 2 [corrected history] [public;rev=4;6c017a8ba0a6]
o small repo commit 1 [public;rev=3;11fbaaa53e1b]
o commit commit 3 [incorrect history] [public;rev=2;c3f812992511]
o commit commit 2 [incorrect history] [public;rev=1;4f27e05b6e2a]
o master commit 1 [public;rev=0;835251f7cda8]
$