mononoke/repo_import: add merge functionality

Summary:
Once we have revealed the commits to the user (D22864223 (578207d0dc), D22762800 (f1ef619284)), we need to merge the imported branch into the destination branch (specified by dest-bookmark). To do this, we extract the latest commit of the destination branch, then compare the two commits, if we have merge conflicts. If we have merge conflicts, we inform the user, so they can resolve it. Otherwise, we create a new bonsai having the two commits as parents.

Next step: pushrebase the merge commit

Minor refactor: moved app setup to a separate file for better readability.

Reviewed By: StanislavGlebik

Differential Revision: D23028163

fbshipit-source-id: 7f3e2a67dc089e6bbacbe71b5e4ef5f6eed2a9e1
This commit is contained in:
Viet Hung Nguyen 2020-08-11 03:25:37 -07:00 committed by Facebook GitHub Bot
parent dd79f87e6e
commit f267bec3f7
4 changed files with 297 additions and 94 deletions

View File

@ -9,12 +9,14 @@ include = ["src/**/*.rs"]
[dependencies] [dependencies]
blobrepo = { path = "../blobrepo" } blobrepo = { path = "../blobrepo" }
blobrepo_hg = { path = "../blobrepo/blobrepo_hg" } blobrepo_hg = { path = "../blobrepo/blobrepo_hg" }
blobstore = { path = "../blobstore" }
bookmarks = { path = "../bookmarks" } bookmarks = { path = "../bookmarks" }
cmdlib = { path = "../cmdlib" } cmdlib = { path = "../cmdlib" }
context = { path = "../server/context" } context = { path = "../server/context" }
cross_repo_sync = { path = "../commit_rewriting/cross_repo_sync" } cross_repo_sync = { path = "../commit_rewriting/cross_repo_sync" }
derived_data_utils = { path = "../derived_data/utils" } derived_data_utils = { path = "../derived_data/utils" }
import_tools = { path = "../git/import_tools" } import_tools = { path = "../git/import_tools" }
manifest = { path = "../manifest" }
mercurial_types = { path = "../mercurial/types" } mercurial_types = { path = "../mercurial/types" }
mononoke_types = { path = "../mononoke_types" } mononoke_types = { path = "../mononoke_types" }
movers = { path = "../commit_rewriting/movers" } movers = { path = "../commit_rewriting/movers" }
@ -31,7 +33,6 @@ tokio = { version = "=0.2.13", features = ["full"] }
[dev-dependencies] [dev-dependencies]
blobrepo_factory = { path = "../blobrepo/factory" } blobrepo_factory = { path = "../blobrepo/factory" }
blobstore = { path = "../blobstore" }
mercurial_types-mocks = { path = "../mercurial/types/mocks" } mercurial_types-mocks = { path = "../mercurial/types/mocks" }
mononoke_types-mocks = { path = "../mononoke_types/mocks" } mononoke_types-mocks = { path = "../mononoke_types/mocks" }
sql_construct = { path = "../common/sql_construct" } sql_construct = { path = "../common/sql_construct" }

View File

@ -0,0 +1,125 @@
/*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This software may be used and distributed according to the terms of the
* GNU General Public License version 2.
*/
use clap::{App, Arg};
use cmdlib::args;
pub const ARG_GIT_REPOSITORY_PATH: &str = "git-repository-path";
pub const ARG_DEST_PATH: &str = "dest-path";
pub const ARG_BATCH_SIZE: &str = "batch-size";
pub const ARG_BOOKMARK_SUFFIX: &str = "bookmark-suffix";
pub const ARG_CALL_SIGN: &str = "call-sign";
pub const ARG_PHAB_CHECK_DISABLED: &str = "disable-phabricator-check";
pub const ARG_X_REPO_CHECK_DISABLED: &str = "disable-x-repo-check";
pub const ARG_HG_SYNC_CHECK_DISABLED: &str = "disable-hg-sync-check";
pub const ARG_SLEEP_TIME: &str = "sleep-time";
pub const ARG_BACKUP_HASHES_FILE_PATH: &str = "backup-hashes-file-path";
pub const ARG_DEST_BOOKMARK: &str = "dest-bookmark";
pub const ARG_COMMIT_MESSAGE: &'static str = "commit-message";
pub const ARG_COMMIT_AUTHOR: &'static str = "commit-author";
pub const ARG_COMMIT_DATE_RFC3339: &'static str = "commit-date-rfc3339";
pub fn setup_app<'a, 'b>() -> App<'a, 'b> {
args::MononokeApp::new("Import Repository")
.with_advanced_args_hidden()
.build()
.version("0.0.0")
.about("Automating repository imports")
.arg(
Arg::with_name(ARG_GIT_REPOSITORY_PATH)
.required(true)
.help("Path to a git repository to import"),
)
.arg(
Arg::with_name(ARG_DEST_PATH)
.long(ARG_DEST_PATH)
.required(true)
.takes_value(true)
.help("Path to the destination folder we import to"),
)
.arg(
Arg::with_name(ARG_BATCH_SIZE)
.long(ARG_BATCH_SIZE)
.takes_value(true)
.default_value("100")
.help("Number of commits we make visible when moving the bookmark"),
)
.arg(
Arg::with_name(ARG_BOOKMARK_SUFFIX)
.long(ARG_BOOKMARK_SUFFIX)
.required(true)
.takes_value(true)
.help("Suffix of the bookmark (repo_import_<suffix>)"),
)
.arg(
Arg::with_name(ARG_CALL_SIGN)
.long(ARG_CALL_SIGN)
.takes_value(true)
.help("Call sign to get commit info from Phabricator. e.g. FBS for fbsource"),
)
.arg(
Arg::with_name(ARG_PHAB_CHECK_DISABLED)
.long(ARG_PHAB_CHECK_DISABLED)
.takes_value(false)
.help("Disable waiting for Phabricator to parse commits."),
)
.arg(
Arg::with_name(ARG_X_REPO_CHECK_DISABLED)
.long(ARG_X_REPO_CHECK_DISABLED)
.takes_value(false)
.help("Disable x_repo sync check after moving the bookmark"),
)
.arg(
Arg::with_name(ARG_HG_SYNC_CHECK_DISABLED)
.long(ARG_HG_SYNC_CHECK_DISABLED)
.takes_value(false)
.help("Disable hg sync check after moving the bookmark"),
)
.arg(
Arg::with_name(ARG_SLEEP_TIME)
.long(ARG_SLEEP_TIME)
.takes_value(true)
.default_value("1")
.help(
"Sleep time, if we fail dependent system (phabricator, hg_sync ...) checkers",
),
)
.arg(
Arg::with_name(ARG_BACKUP_HASHES_FILE_PATH)
.long(ARG_BACKUP_HASHES_FILE_PATH)
.takes_value(true)
.required(true)
.help("Backup file path to save bonsai hashes if deriving data types fail"),
)
.arg(
Arg::with_name(ARG_DEST_BOOKMARK)
.long(ARG_DEST_BOOKMARK)
.takes_value(true)
.required(true)
.help("The bookmark branch we want to merge our repo into (e.g. master)"),
)
.arg(
Arg::with_name(ARG_COMMIT_AUTHOR)
.help("commit author to use")
.long(ARG_COMMIT_AUTHOR)
.takes_value(true)
.required(true),
)
.arg(
Arg::with_name(ARG_COMMIT_MESSAGE)
.help("commit message to use")
.long(ARG_COMMIT_MESSAGE)
.takes_value(true)
.required(true),
)
.arg(
Arg::with_name(ARG_COMMIT_DATE_RFC3339)
.help("commit date to use (default is now)")
.long(ARG_COMMIT_DATE_RFC3339)
.takes_value(true),
)
}

View File

@ -9,8 +9,8 @@
use anyhow::{format_err, Error}; use anyhow::{format_err, Error};
use blobrepo::{save_bonsai_changesets, BlobRepo}; use blobrepo::{save_bonsai_changesets, BlobRepo};
use blobrepo_hg::BlobRepoHg; use blobrepo_hg::BlobRepoHg;
use blobstore::Loadable;
use bookmarks::{BookmarkName, BookmarkUpdateLog, BookmarkUpdateReason, Freshness}; use bookmarks::{BookmarkName, BookmarkUpdateLog, BookmarkUpdateReason, Freshness};
use clap::Arg;
use cmdlib::args; use cmdlib::args;
use cmdlib::helpers::block_execute; use cmdlib::helpers::block_execute;
use context::CoreContext; use context::CoreContext;
@ -18,36 +18,36 @@ use cross_repo_sync::rewrite_commit;
use derived_data_utils::derived_data_utils; use derived_data_utils::derived_data_utils;
use fbinit::FacebookInit; use fbinit::FacebookInit;
use futures::{ use futures::{
compat::Future01CompatExt, compat::{Future01CompatExt, Stream01CompatExt},
future::TryFutureExt, future::TryFutureExt,
stream::{self, StreamExt, TryStreamExt}, stream::{self, StreamExt, TryStreamExt},
}; };
use import_tools::{GitimportPreferences, GitimportTarget}; use import_tools::{GitimportPreferences, GitimportTarget};
use manifest::ManifestOps;
use mercurial_types::{HgChangesetId, MPath}; use mercurial_types::{HgChangesetId, MPath};
use mononoke_types::{BonsaiChangeset, ChangesetId}; use mononoke_types::{BonsaiChangeset, BonsaiChangesetMut, ChangesetId, DateTime};
use movers::DefaultAction; use movers::DefaultAction;
use mutable_counters::{MutableCounters, SqlMutableCounters}; use mutable_counters::{MutableCounters, SqlMutableCounters};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use serde_json; use serde_json;
use slog::info; use slog::info;
use std::collections::HashMap; use std::collections::{BTreeMap, HashMap, HashSet};
use std::convert::TryInto; use std::convert::TryInto;
use std::num::NonZeroUsize; use std::num::NonZeroUsize;
use std::path::Path; use std::path::Path;
use tokio::{fs, io::AsyncWriteExt, process, time}; use tokio::{fs, io::AsyncWriteExt, process, time};
use topo_sort::sort_topological; use topo_sort::sort_topological;
const ARG_GIT_REPOSITORY_PATH: &str = "git-repository-path"; mod cli;
const ARG_DEST_PATH: &str = "dest-path";
const ARG_BATCH_SIZE: &str = "batch-size"; use crate::cli::{
const ARG_BOOKMARK_SUFFIX: &str = "bookmark-suffix"; setup_app, ARG_BACKUP_HASHES_FILE_PATH, ARG_BATCH_SIZE, ARG_BOOKMARK_SUFFIX, ARG_CALL_SIGN,
const ARG_CALL_SIGN: &str = "call-sign"; ARG_COMMIT_AUTHOR, ARG_COMMIT_DATE_RFC3339, ARG_COMMIT_MESSAGE, ARG_DEST_BOOKMARK,
const ARG_PHAB_CHECK_DISABLED: &str = "disable-phabricator-check"; ARG_DEST_PATH, ARG_GIT_REPOSITORY_PATH, ARG_HG_SYNC_CHECK_DISABLED, ARG_PHAB_CHECK_DISABLED,
const ARG_X_REPO_CHECK_DISABLED: &str = "disable-x-repo-check"; ARG_SLEEP_TIME, ARG_X_REPO_CHECK_DISABLED,
const ARG_HG_SYNC_CHECK_DISABLED: &str = "disable-hg-sync-check"; };
const ARG_SLEEP_TIME: &str = "sleep-time";
const LATEST_REPLAYED_REQUEST_KEY: &'static str = "latest-replayed-request"; const LATEST_REPLAYED_REQUEST_KEY: &'static str = "latest-replayed-request";
const ARG_BACKUP_HASHES_FILE_PATH: &str = "backup-hashes-file-path";
#[derive(Deserialize, Clone, Debug)] #[derive(Deserialize, Clone, Debug)]
struct GraphqlQueryObj { struct GraphqlQueryObj {
@ -76,6 +76,12 @@ struct CheckerFlags<'a> {
hg_sync_check_disabled: bool, hg_sync_check_disabled: bool,
call_sign: Option<&'a str>, call_sign: Option<&'a str>,
} }
#[derive(Clone, Debug)]
struct ChangesetArgs {
pub author: String,
pub message: String,
pub datetime: DateTime,
}
async fn rewrite_file_paths( async fn rewrite_file_paths(
ctx: &CoreContext, ctx: &CoreContext,
@ -176,6 +182,7 @@ async fn move_bookmark(
sleep_time: u64, sleep_time: u64,
mutable_counters: &SqlMutableCounters, mutable_counters: &SqlMutableCounters,
) -> Result<(), Error> { ) -> Result<(), Error> {
info!(ctx.logger(), "Start moving the bookmark");
if shifted_bcs.is_empty() { if shifted_bcs.is_empty() {
return Err(format_err!("There is no bonsai changeset present")); return Err(format_err!("There is no bonsai changeset present"));
} }
@ -235,9 +242,111 @@ async fn move_bookmark(
.await?; .await?;
old_csid = curr_csid; old_csid = curr_csid;
} }
info!(ctx.logger(), "Finished moving the bookmark");
Ok(()) Ok(())
} }
async fn merge_imported_commit(
ctx: &CoreContext,
repo: &BlobRepo,
shifted_bcs: &[BonsaiChangeset],
dest_bookmark: &str,
changeset_args: ChangesetArgs,
) -> Result<(), Error> {
info!(
ctx.logger(),
"Merging the imported commits into given bookmark, {}", dest_bookmark
);
let master_cs_id = match repo
.get_bonsai_bookmark(ctx.clone(), &BookmarkName::new(dest_bookmark)?)
.compat()
.await?
{
Some(id) => id,
None => {
return Err(format_err!(
"Couldn't extract changeset id from bookmark: {}",
dest_bookmark
))
}
};
let master_leaf_entries = get_leaf_entries(&ctx, &repo, master_cs_id).await?;
let imported_cs_id = match shifted_bcs.last() {
Some(bcs) => bcs.get_changeset_id(),
None => return Err(format_err!("There is no bonsai changeset present")),
};
let imported_leaf_entries = get_leaf_entries(&ctx, &repo, imported_cs_id).await?;
let intersection: Vec<MPath> = imported_leaf_entries
.intersection(&master_leaf_entries)
.cloned()
.collect();
if intersection.len() > 0 {
return Err(format_err!(
"There are paths present in both parents: {:?} ...",
intersection
));
}
info!(ctx.logger(), "Done checking path conflicts");
info!(
ctx.logger(),
"Creating a merge bonsai changeset with parents: {}, {}", master_cs_id, imported_cs_id
);
let ChangesetArgs {
author,
message,
datetime,
} = changeset_args;
let merged_cs = BonsaiChangesetMut {
parents: vec![master_cs_id, imported_cs_id],
author: author.clone(),
author_date: datetime,
committer: Some(author.to_string()),
committer_date: Some(datetime),
message: message.to_string(),
extra: BTreeMap::new(),
file_changes: BTreeMap::new(),
}
.freeze()?;
let merged_cs_id = merged_cs.get_changeset_id();
info!(
ctx.logger(),
"Created merge bonsai: {} and changeset: {:?}", merged_cs_id, merged_cs
);
save_bonsai_changesets(vec![merged_cs], ctx.clone(), repo.clone())
.compat()
.await?;
info!(ctx.logger(), "Finished merging");
Ok(())
}
async fn get_leaf_entries(
ctx: &CoreContext,
repo: &BlobRepo,
cs_id: ChangesetId,
) -> Result<HashSet<MPath>, Error> {
let hg_cs_id = repo
.get_hg_from_bonsai_changeset(ctx.clone(), cs_id)
.compat()
.await?;
let hg_cs = hg_cs_id.load(ctx.clone(), &repo.get_blobstore()).await?;
hg_cs
.manifestid()
.list_leaf_entries(ctx.clone(), repo.get_blobstore())
.compat()
.map_ok(|(path, (_file_type, _filenode_id))| path)
.try_collect::<HashSet<_>>()
.await
}
async fn check_dependent_systems( async fn check_dependent_systems(
ctx: &CoreContext, ctx: &CoreContext,
repo: &BlobRepo, repo: &BlobRepo,
@ -387,78 +496,7 @@ fn sort_bcs(shifted_bcs: &[BonsaiChangeset]) -> Result<Vec<BonsaiChangeset>, Err
#[fbinit::main] #[fbinit::main]
fn main(fb: FacebookInit) -> Result<(), Error> { fn main(fb: FacebookInit) -> Result<(), Error> {
let app = args::MononokeApp::new("Import Repository") let app = setup_app();
.with_advanced_args_hidden()
.build()
.version("0.0.0")
.about("Automating repository imports")
.arg(
Arg::with_name(ARG_GIT_REPOSITORY_PATH)
.required(true)
.help("Path to a git repository to import"),
)
.arg(
Arg::with_name(ARG_DEST_PATH)
.long(ARG_DEST_PATH)
.required(true)
.takes_value(true)
.help("Path to the destination folder we import to"),
)
.arg(
Arg::with_name(ARG_BATCH_SIZE)
.long(ARG_BATCH_SIZE)
.takes_value(true)
.default_value("100")
.help("Number of commits we make visible when moving the bookmark"),
)
.arg(
Arg::with_name(ARG_BOOKMARK_SUFFIX)
.long(ARG_BOOKMARK_SUFFIX)
.required(true)
.takes_value(true)
.help("Suffix of the bookmark (repo_import_<suffix>)"),
)
.arg(
Arg::with_name(ARG_CALL_SIGN)
.long(ARG_CALL_SIGN)
.takes_value(true)
.help("Call sign to get commit info from Phabricator. e.g. FBS for fbsource"),
)
.arg(
Arg::with_name(ARG_PHAB_CHECK_DISABLED)
.long(ARG_PHAB_CHECK_DISABLED)
.takes_value(false)
.help("Disable waiting for Phabricator to parse commits."),
)
.arg(
Arg::with_name(ARG_X_REPO_CHECK_DISABLED)
.long(ARG_X_REPO_CHECK_DISABLED)
.takes_value(false)
.help("Disable x_repo sync check after moving the bookmark"),
)
.arg(
Arg::with_name(ARG_HG_SYNC_CHECK_DISABLED)
.long(ARG_HG_SYNC_CHECK_DISABLED)
.takes_value(false)
.help("Disable hg sync check after moving the bookmark"),
)
.arg(
Arg::with_name(ARG_SLEEP_TIME)
.long(ARG_SLEEP_TIME)
.takes_value(true)
.default_value("1")
.help(
"Sleep time, if we fail dependent system (phabricator, hg_sync ...) checkers",
),
)
.arg(
Arg::with_name(ARG_BACKUP_HASHES_FILE_PATH)
.long(ARG_BACKUP_HASHES_FILE_PATH)
.takes_value(true)
.required(true)
.help("Backup file path to save bonsai hashes if deriving data types fail"),
);
let matches = app.get_matches(); let matches = app.get_matches();
let path = Path::new(matches.value_of(ARG_GIT_REPOSITORY_PATH).unwrap()); let path = Path::new(matches.value_of(ARG_GIT_REPOSITORY_PATH).unwrap());
@ -489,7 +527,18 @@ fn main(fb: FacebookInit) -> Result<(), Error> {
let sleep_time = matches.value_of(ARG_SLEEP_TIME).unwrap(); let sleep_time = matches.value_of(ARG_SLEEP_TIME).unwrap();
let sleep_time = sleep_time.parse::<u64>()?; let sleep_time = sleep_time.parse::<u64>()?;
let backup_hashes_path = matches.value_of(ARG_BACKUP_HASHES_FILE_PATH).unwrap(); let backup_hashes_path = matches.value_of(ARG_BACKUP_HASHES_FILE_PATH).unwrap();
let dest_bookmark = matches.value_of(ARG_DEST_BOOKMARK).unwrap();
let commit_author = matches.value_of(ARG_COMMIT_AUTHOR).unwrap();
let commit_message = matches.value_of(ARG_COMMIT_MESSAGE).unwrap();
let datetime = match matches.value_of(ARG_COMMIT_DATE_RFC3339) {
Some(date) => DateTime::from_rfc3339(date)?,
None => DateTime::now(),
};
let changeset_args = ChangesetArgs {
author: commit_author.to_string(),
message: commit_message.to_string(),
datetime,
};
args::init_cachelib(fb, &matches, None); args::init_cachelib(fb, &matches, None);
let logger = args::init_logging(fb, &matches); let logger = args::init_logging(fb, &matches);
@ -507,7 +556,6 @@ fn main(fb: FacebookInit) -> Result<(), Error> {
info!(ctx.logger(), "Start deriving data types"); info!(ctx.logger(), "Start deriving data types");
derive_bonsais(&ctx, &repo, &shifted_bcs).await?; derive_bonsais(&ctx, &repo, &shifted_bcs).await?;
info!(ctx.logger(), "Finished deriving data types"); info!(ctx.logger(), "Finished deriving data types");
info!(ctx.logger(), "Start moving bookmarks");
move_bookmark( move_bookmark(
&ctx, &ctx,
&repo, &repo,
@ -518,7 +566,8 @@ fn main(fb: FacebookInit) -> Result<(), Error> {
sleep_time, sleep_time,
&mutable_counters, &mutable_counters,
) )
.await .await?;
merge_imported_commit(&ctx, &repo, &shifted_bcs, &dest_bookmark, changeset_args).await
}, },
fb, fb,
"repo_import", "repo_import",

View File

@ -8,6 +8,17 @@
$ setup_common_config $ setup_common_config
$ GIT_REPO="${TESTTMP}/repo-git" $ GIT_REPO="${TESTTMP}/repo-git"
$ HG_REPO="${TESTTMP}/repo-hg" $ HG_REPO="${TESTTMP}/repo-hg"
$ BLOB_TYPE="blob_files" default_setup
hg repo
o C [draft;rev=2;26805aba1e60]
|
o B [draft;rev=1;112478962961]
|
o A [draft;rev=0;426bada5c675]
$
blobimporting
starting Mononoke
cloning repo in hg client 'repo2'
# Setup git repository # Setup git repository
$ mkdir "$GIT_REPO" $ mkdir "$GIT_REPO"
@ -35,7 +46,7 @@
# Import it into Mononoke # Import it into Mononoke
$ cd "$TESTTMP" $ cd "$TESTTMP"
$ repo_import "$GIT_REPO" --dest-path "new_dir/new_repo" --batch-size 3 --bookmark-suffix "new_repo" --disable-phabricator-check --disable-hg-sync-check --backup-hashes-file-path "$GIT_REPO/hashes.txt" $ repo_import "$GIT_REPO" --dest-path "new_dir/new_repo" --batch-size 3 --bookmark-suffix "new_repo" --disable-phabricator-check --disable-hg-sync-check --backup-hashes-file-path "$GIT_REPO/hashes.txt" --dest-bookmark master_bookmark --commit-author user --commit-message "merging"
* using repo "repo" repoid RepositoryId(0) (glob) * using repo "repo" repoid RepositoryId(0) (glob)
* Started importing git commits to Mononoke (glob) * Started importing git commits to Mononoke (glob)
* Created ce435b03d4ef526648f8654c61e26ae5cc1069cc => ChangesetId(Blake2(f7cbf75d9c08ff96896ed2cebd0327aa514e58b1dd9901d50129b9e08f4aa062)) (glob) * Created ce435b03d4ef526648f8654c61e26ae5cc1069cc => ChangesetId(Blake2(f7cbf75d9c08ff96896ed2cebd0327aa514e58b1dd9901d50129b9e08f4aa062)) (glob)
@ -49,9 +60,15 @@
* Saved bonsai changesets (glob) * Saved bonsai changesets (glob)
* Start deriving data types (glob) * Start deriving data types (glob)
* Finished deriving data types (glob) * Finished deriving data types (glob)
* Start moving bookmarks (glob) * Start moving the bookmark (glob)
* Created bookmark BookmarkName { bookmark: "repo_import_new_repo" } pointing to * (glob) * Created bookmark BookmarkName { bookmark: "repo_import_new_repo" } pointing to * (glob)
* Set bookmark BookmarkName { bookmark: "repo_import_new_repo" } to * (glob) * Set bookmark BookmarkName { bookmark: "repo_import_new_repo" } to * (glob)
* Finished moving the bookmark (glob)
* Merging the imported commits into given bookmark, master_bookmark (glob)
* Done checking path conflicts (glob)
* Creating a merge bonsai changeset with parents: *, * (glob)
* Created merge bonsai: * and changeset: * (glob)
* Finished merging (glob)
# Check if we derived all the types # Check if we derived all the types
$ BOOKMARK_NAME="repo_import_new_repo" $ BOOKMARK_NAME="repo_import_new_repo"
@ -78,11 +95,22 @@
# Clone the repository # Clone the repository
$ cd "$TESTTMP" $ cd "$TESTTMP"
$ hgmn_clone 'ssh://user@dummy/repo' "$HG_REPO" $ hgclone_treemanifest ssh://user@dummy/repo-hg repo1 --noupdate -q
$ cat "$GIT_REPO/hashes.txt" $ cat "$GIT_REPO/hashes.txt"
a159bc614d2dbd07a5ecc6476156fa464b69e884d819bbc2e854ade3e4c353b9 a159bc614d2dbd07a5ecc6476156fa464b69e884d819bbc2e854ade3e4c353b9
a2e6329ed60e3dd304f53efd0f92c28b849404a47979fcf48bb43b6fe3a0cad5 a2e6329ed60e3dd304f53efd0f92c28b849404a47979fcf48bb43b6fe3a0cad5
$ cd "$HG_REPO" $ cd repo1
$ hgmn pull
pulling from ssh://user@dummy/repo
searching for changes
adding changesets
adding manifests
adding file changes
added 2 changesets with 0 changes to 0 files
adding remote bookmark repo_import_new_repo
$ hgmn up repo_import_new_repo
3 files updated, 0 files merged, 0 files removed, 0 files unresolved
(activating bookmark repo_import_new_repo)
$ cat "new_dir/new_repo/file1" $ cat "new_dir/new_repo/file1"
this is file1 this is file1
$ cat "new_dir/new_repo/file2_repo/file2" $ cat "new_dir/new_repo/file2_repo/file2"