mirror of
https://github.com/facebook/sapling.git
synced 2024-10-09 16:31:02 +03:00
Add support for backup-repo-name argument
Summary: We have support for backup-repo-id, but tw blobimport doesn't have id and have source repo name to use. Let's add support similar to other repo-id/source-repo-id etc. Reviewed By: StanislavGlebik Differential Revision: D27325583 fbshipit-source-id: 44b5ec7f99005355b8eaa4c066cb7168ec858049
This commit is contained in:
parent
d70aa26a14
commit
8b7dc976e6
@ -1176,8 +1176,10 @@ pub fn get_repo_id_from_value<'a>(
|
||||
config_store: &ConfigStore,
|
||||
matches: &'a MononokeMatches<'a>,
|
||||
repo_id_arg: &str,
|
||||
repo_name_arg: &str,
|
||||
) -> Result<RepositoryId> {
|
||||
let (repo_id, _) = get_repo_id_and_name_from_values(config_store, matches, "", repo_id_arg)?;
|
||||
let (repo_id, _) =
|
||||
get_repo_id_and_name_from_values(config_store, matches, repo_name_arg, repo_id_arg)?;
|
||||
Ok(repo_id)
|
||||
}
|
||||
|
||||
|
@ -12,7 +12,7 @@ use ascii::AsciiString;
|
||||
use blobimport_lib;
|
||||
use blobrepo::BlobRepo;
|
||||
use bonsai_globalrev_mapping::SqlBonsaiGlobalrevMapping;
|
||||
use clap::Arg;
|
||||
use clap::{Arg, ArgGroup};
|
||||
use cmdlib::{
|
||||
args::{self, get_scuba_sample_builder, MononokeClapApp, MononokeMatches, RepoRequirement},
|
||||
helpers::block_execute,
|
||||
@ -44,7 +44,9 @@ use synced_commit_mapping::SqlSyncedCommitMapping;
|
||||
const ARG_DERIVED_DATA_TYPE: &str = "derived-data-type";
|
||||
const ARG_EXCLUDE_DERIVED_DATA_TYPE: &str = "exclude-derived-data-type";
|
||||
const ARG_FIND_ALREADY_IMPORTED_REV_ONLY: &str = "find-already-imported-rev-only";
|
||||
const BACKUP_REPO_GROUP: &str = "backup-from-repo";
|
||||
const BACKUP_FROM_REPO_ID: &str = "backup-from-repo-id";
|
||||
const BACKUP_FROM_REPO_NAME: &str = "backup-from-repo-name";
|
||||
|
||||
fn setup_app<'a, 'b>() -> MononokeClapApp<'a, 'b> {
|
||||
args::MononokeAppBuilder::new("revlog to blob importer")
|
||||
@ -128,6 +130,16 @@ fn setup_app<'a, 'b>() -> MononokeClapApp<'a, 'b> {
|
||||
.value_name("ID")
|
||||
.help("numeric ID of backup source of truth mononoke repository (used only for backup jobs to sync bonsai changesets)"),
|
||||
)
|
||||
.arg(
|
||||
Arg::with_name(BACKUP_FROM_REPO_NAME)
|
||||
.long(BACKUP_FROM_REPO_NAME)
|
||||
.value_name("NAME")
|
||||
.help("Name of backup source of truth mononoke repository (used only for backup jobs to sync bonsai changesets)"),
|
||||
)
|
||||
.group(
|
||||
ArgGroup::with_name(BACKUP_REPO_GROUP)
|
||||
.args(&[BACKUP_FROM_REPO_ID, BACKUP_FROM_REPO_NAME])
|
||||
)
|
||||
}
|
||||
|
||||
fn parse_fixed_parent_order<P: AsRef<Path>>(
|
||||
@ -333,12 +345,18 @@ async fn run_blobimport<'a>(
|
||||
)
|
||||
.await?;
|
||||
|
||||
let origin_repo = if matches.is_present(BACKUP_FROM_REPO_ID) {
|
||||
let repo_id = args::get_repo_id_from_value(config_store, matches, BACKUP_FROM_REPO_ID)?;
|
||||
Some(args::open_repo_with_repo_id(fb, &logger, repo_id, matches).await?)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
let origin_repo =
|
||||
if matches.is_present(BACKUP_FROM_REPO_ID) || matches.is_present(BACKUP_FROM_REPO_NAME) {
|
||||
let repo_id = args::get_repo_id_from_value(
|
||||
config_store,
|
||||
matches,
|
||||
BACKUP_FROM_REPO_ID,
|
||||
BACKUP_FROM_REPO_NAME,
|
||||
)?;
|
||||
Some(args::open_repo_with_repo_id(fb, &logger, repo_id, matches).await?)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
let globalrevs_store = Arc::new(globalrevs_store);
|
||||
let synced_commit_mapping = Arc::new(synced_commit_mapping);
|
||||
|
@ -23,7 +23,7 @@ use bundle_generator::FilenodeVerifier;
|
||||
use bundle_preparer::{maybe_adjust_batch, BundlePreparer};
|
||||
use bytes::Bytes;
|
||||
use cached_config::ConfigStore;
|
||||
use clap::{Arg, SubCommand};
|
||||
use clap::{Arg, ArgGroup, SubCommand};
|
||||
use cloned::cloned;
|
||||
use cmdlib::{
|
||||
args::{self, MononokeMatches},
|
||||
@ -84,7 +84,9 @@ const ARG_BOOKMARK_REGEX_FORCE_GENERATE_LFS: &str = "bookmark-regex-force-genera
|
||||
const ARG_BOOKMARK_MOVE_ANY_DIRECTION: &str = "bookmark-move-any-direction";
|
||||
const ARG_USE_HG_SERVER_BOOKMARK_VALUE_IF_MISMATCH: &str =
|
||||
"use-hg-server-bookmark-value-if-mismatch";
|
||||
const ARG_DARKSTORM_BACKUP_REPO_GROUP: &str = "darkstorm-backup-repo";
|
||||
const ARG_DARKSTORM_BACKUP_REPO_ID: &str = "darkstorm-backup-repo-id";
|
||||
const ARG_DARKSTORM_BACKUP_REPO_NAME: &str = "darkstorm-backup-repo-name";
|
||||
const ARG_BYPASS_READONLY: &str = "bypass-readonly";
|
||||
const GENERATE_BUNDLES: &str = "generate-bundles";
|
||||
const MODE_SYNC_ONCE: &str = "sync-once";
|
||||
@ -713,9 +715,15 @@ async fn run<'a>(ctx: CoreContext, matches: &'a MononokeMatches<'a>) -> Result<(
|
||||
|
||||
let use_hg_server_bookmark_value_if_mismatch =
|
||||
matches.is_present(ARG_USE_HG_SERVER_BOOKMARK_VALUE_IF_MISMATCH);
|
||||
let maybe_darkstorm_backup_repo = if matches.value_of(ARG_DARKSTORM_BACKUP_REPO_ID).is_some() {
|
||||
let backup_repo_id =
|
||||
args::get_repo_id_from_value(config_store, &matches, ARG_DARKSTORM_BACKUP_REPO_ID)?;
|
||||
let maybe_darkstorm_backup_repo = if matches.is_present(ARG_DARKSTORM_BACKUP_REPO_ID)
|
||||
|| matches.is_present(ARG_DARKSTORM_BACKUP_REPO_NAME)
|
||||
{
|
||||
let backup_repo_id = args::get_repo_id_from_value(
|
||||
config_store,
|
||||
&matches,
|
||||
ARG_DARKSTORM_BACKUP_REPO_ID,
|
||||
ARG_DARKSTORM_BACKUP_REPO_NAME,
|
||||
)?;
|
||||
let backup_repo =
|
||||
args::open_repo_by_id(ctx.fb, &ctx.logger(), &matches, backup_repo_id).await?;
|
||||
|
||||
@ -1276,6 +1284,18 @@ fn main(fb: FacebookInit) -> Result<()> {
|
||||
.help("Start hg-sync-job for syncing prod repo and darkstorm backup mononoke repo \
|
||||
and use darkstorm-backup-repo-id value as a target for sync."),
|
||||
)
|
||||
.arg(
|
||||
Arg::with_name(ARG_DARKSTORM_BACKUP_REPO_NAME)
|
||||
.long(ARG_DARKSTORM_BACKUP_REPO_NAME)
|
||||
.takes_value(true)
|
||||
.required(false)
|
||||
.help("Start hg-sync-job for syncing prod repo and darkstorm backup mononoke repo \
|
||||
and use darkstorm-backup-repo-name as a target for sync."),
|
||||
)
|
||||
.group(
|
||||
ArgGroup::with_name(ARG_DARKSTORM_BACKUP_REPO_GROUP)
|
||||
.args(&[ARG_DARKSTORM_BACKUP_REPO_ID, ARG_DARKSTORM_BACKUP_REPO_NAME])
|
||||
)
|
||||
.arg(
|
||||
Arg::with_name(ARG_BYPASS_READONLY)
|
||||
.long(ARG_BYPASS_READONLY)
|
||||
|
@ -76,7 +76,7 @@ setup configuration
|
||||
$ mononoke_admin bookmarks list --kind publishing 2>/dev/null
|
||||
master_bookmark 4b71c845e8783e58fce825fa80254840eba291d323a5d69218ad927fc801153c 26805aba1e600a82e93661149f2313866a221a7b
|
||||
|
||||
$ REPOID=2 blobimport repo-hg/.hg backup --backup-from-repo-id 0
|
||||
$ REPOID=2 blobimport repo-hg/.hg backup --backup-from-repo-name repo
|
||||
$ sqlite3 "$TESTTMP/monsql/sqlite_dbs" "select * from mutable_counters";
|
||||
0|highest-imported-gen-num|2
|
||||
2|highest-imported-gen-num|3
|
||||
|
Loading…
Reference in New Issue
Block a user