Make config store always explicit

Summary: In D24447404, I provided some utility functions that allowed me to avoid constructing and/or passing around a ConfigStore. Remove those functions and fix up the code to run.

Reviewed By: krallin

Differential Revision: D24502692

fbshipit-source-id: 742dbc54fbcf735895d6829745b9317af14dfa0b
This commit is contained in:
Simon Farnsworth 2020-10-24 06:21:31 -07:00 committed by Facebook GitHub Bot
parent 00871310a7
commit 7e06175e61
43 changed files with 309 additions and 221 deletions

View File

@ -19,7 +19,7 @@ use std::str::FromStr;
use std::sync::Arc; use std::sync::Arc;
use std::time::Duration; use std::time::Duration;
use anyhow::{bail, format_err, Context, Error, Result}; use anyhow::{bail, format_err, Error, Result};
use cached_config::{ConfigHandle, ConfigStore, TestSource}; use cached_config::{ConfigHandle, ConfigStore, TestSource};
use clap::{App, Arg, ArgGroup, ArgMatches}; use clap::{App, Arg, ArgGroup, ArgMatches};
use cloned::cloned; use cloned::cloned;
@ -476,13 +476,14 @@ pub fn init_logging<'a>(fb: FacebookInit, matches: &ArgMatches<'a>) -> Logger {
} }
fn get_repo_id_and_name_from_values<'a>( fn get_repo_id_and_name_from_values<'a>(
config_store: &ConfigStore,
matches: &ArgMatches<'a>, matches: &ArgMatches<'a>,
option_repo_name: &str, option_repo_name: &str,
option_repo_id: &str, option_repo_id: &str,
) -> Result<(RepositoryId, String)> { ) -> Result<(RepositoryId, String)> {
let repo_name = matches.value_of(option_repo_name); let repo_name = matches.value_of(option_repo_name);
let repo_id = matches.value_of(option_repo_id); let repo_id = matches.value_of(option_repo_id);
let configs = load_repo_configs(matches)?; let configs = load_repo_configs(config_store, matches)?;
match (repo_name, repo_id) { match (repo_name, repo_id) {
(Some(_), Some(_)) => bail!("both repo-name and repo-id parameters set"), (Some(_), Some(_)) => bail!("both repo-name and repo-id parameters set"),
@ -527,41 +528,64 @@ fn get_repo_id_and_name_from_values<'a>(
} }
} }
pub fn get_repo_id<'a>(matches: &ArgMatches<'a>) -> Result<RepositoryId> { pub fn get_repo_id<'a>(
let (repo_id, _) = get_repo_id_and_name_from_values(matches, REPO_NAME, REPO_ID)?; config_store: &ConfigStore,
matches: &ArgMatches<'a>,
) -> Result<RepositoryId> {
let (repo_id, _) = get_repo_id_and_name_from_values(config_store, matches, REPO_NAME, REPO_ID)?;
Ok(repo_id) Ok(repo_id)
} }
pub fn get_repo_name<'a>(matches: &ArgMatches<'a>) -> Result<String> { pub fn get_repo_name<'a>(config_store: &ConfigStore, matches: &ArgMatches<'a>) -> Result<String> {
let (_, repo_name) = get_repo_id_and_name_from_values(matches, REPO_NAME, REPO_ID)?; let (_, repo_name) =
get_repo_id_and_name_from_values(config_store, matches, REPO_NAME, REPO_ID)?;
Ok(repo_name) Ok(repo_name)
} }
pub fn get_source_repo_id<'a>(matches: &ArgMatches<'a>) -> Result<RepositoryId> { pub fn get_source_repo_id<'a>(
let (repo_id, _) = get_repo_id_and_name_from_values(matches, SOURCE_REPO_NAME, SOURCE_REPO_ID)?; config_store: &ConfigStore,
matches: &ArgMatches<'a>,
) -> Result<RepositoryId> {
let (repo_id, _) =
get_repo_id_and_name_from_values(config_store, matches, SOURCE_REPO_NAME, SOURCE_REPO_ID)?;
Ok(repo_id) Ok(repo_id)
} }
pub fn get_source_repo_id_opt<'a>(matches: &ArgMatches<'a>) -> Result<Option<RepositoryId>> { pub fn get_source_repo_id_opt<'a>(
config_store: &ConfigStore,
matches: &ArgMatches<'a>,
) -> Result<Option<RepositoryId>> {
if matches.is_present(SOURCE_REPO_NAME) || matches.is_present(SOURCE_REPO_ID) { if matches.is_present(SOURCE_REPO_NAME) || matches.is_present(SOURCE_REPO_ID) {
let (repo_id, _) = let (repo_id, _) = get_repo_id_and_name_from_values(
get_repo_id_and_name_from_values(matches, SOURCE_REPO_NAME, SOURCE_REPO_ID)?; config_store,
matches,
SOURCE_REPO_NAME,
SOURCE_REPO_ID,
)?;
Ok(Some(repo_id)) Ok(Some(repo_id))
} else { } else {
Ok(None) Ok(None)
} }
} }
pub fn get_target_repo_id<'a>(matches: &ArgMatches<'a>) -> Result<RepositoryId> { pub fn get_target_repo_id<'a>(
let (repo_id, _) = get_repo_id_and_name_from_values(matches, TARGET_REPO_NAME, TARGET_REPO_ID)?; config_store: &ConfigStore,
matches: &ArgMatches<'a>,
) -> Result<RepositoryId> {
let (repo_id, _) =
get_repo_id_and_name_from_values(config_store, matches, TARGET_REPO_NAME, TARGET_REPO_ID)?;
Ok(repo_id) Ok(repo_id)
} }
pub fn open_sql<T>(fb: FacebookInit, matches: &ArgMatches<'_>) -> BoxFuture<T, Error> pub fn open_sql<T>(
fb: FacebookInit,
config_store: &ConfigStore,
matches: &ArgMatches<'_>,
) -> BoxFuture<T, Error>
where where
T: SqlConstructFromMetadataDatabaseConfig, T: SqlConstructFromMetadataDatabaseConfig,
{ {
let (_, config) = try_boxfuture!(get_config(matches)); let (_, config) = try_boxfuture!(get_config(config_store, matches));
let mysql_options = parse_mysql_options(matches); let mysql_options = parse_mysql_options(matches);
let readonly_storage = parse_readonly_storage(matches); let readonly_storage = parse_readonly_storage(matches);
open_sql_with_config_and_mysql_options( open_sql_with_config_and_mysql_options(
@ -572,12 +596,16 @@ where
) )
} }
pub fn open_source_sql<T>(fb: FacebookInit, matches: &ArgMatches<'_>) -> BoxFuture<T, Error> pub fn open_source_sql<T>(
fb: FacebookInit,
config_store: &ConfigStore,
matches: &ArgMatches<'_>,
) -> BoxFuture<T, Error>
where where
T: SqlConstructFromMetadataDatabaseConfig, T: SqlConstructFromMetadataDatabaseConfig,
{ {
let source_repo_id = try_boxfuture!(get_source_repo_id(matches)); let source_repo_id = try_boxfuture!(get_source_repo_id(config_store, matches));
let (_, config) = try_boxfuture!(get_config_by_repoid(matches, source_repo_id)); let (_, config) = try_boxfuture!(get_config_by_repoid(config_store, matches, source_repo_id));
let mysql_options = parse_mysql_options(matches); let mysql_options = parse_mysql_options(matches);
let readonly_storage = parse_readonly_storage(matches); let readonly_storage = parse_readonly_storage(matches);
open_sql_with_config_and_mysql_options( open_sql_with_config_and_mysql_options(
@ -879,28 +907,41 @@ pub fn get_config_path<'a>(matches: &'a ArgMatches<'a>) -> Result<&'a str> {
.ok_or(Error::msg(format!("{} must be specified", CONFIG_PATH))) .ok_or(Error::msg(format!("{} must be specified", CONFIG_PATH)))
} }
pub fn load_repo_configs<'a>(matches: &ArgMatches<'a>) -> Result<RepoConfigs> { pub fn load_repo_configs<'a>(
metaconfig_parser::load_repo_configs(get_config_path(matches)?, get_config_store()?) config_store: &ConfigStore,
matches: &ArgMatches<'a>,
) -> Result<RepoConfigs> {
metaconfig_parser::load_repo_configs(get_config_path(matches)?, config_store)
} }
pub fn load_common_config<'a>(matches: &ArgMatches<'a>) -> Result<CommonConfig> { pub fn load_common_config<'a>(
metaconfig_parser::load_common_config(get_config_path(matches)?, get_config_store()?) config_store: &ConfigStore,
matches: &ArgMatches<'a>,
) -> Result<CommonConfig> {
metaconfig_parser::load_common_config(get_config_path(matches)?, config_store)
} }
pub fn load_storage_configs<'a>(matches: &ArgMatches<'a>) -> Result<StorageConfigs> { pub fn load_storage_configs<'a>(
metaconfig_parser::load_storage_configs(get_config_path(matches)?, get_config_store()?) config_store: &ConfigStore,
matches: &ArgMatches<'a>,
) -> Result<StorageConfigs> {
metaconfig_parser::load_storage_configs(get_config_path(matches)?, config_store)
} }
pub fn get_config<'a>(matches: &ArgMatches<'a>) -> Result<(String, RepoConfig)> { pub fn get_config<'a>(
let repo_id = get_repo_id(matches)?; config_store: &ConfigStore,
get_config_by_repoid(matches, repo_id) matches: &ArgMatches<'a>,
) -> Result<(String, RepoConfig)> {
let repo_id = get_repo_id(config_store, matches)?;
get_config_by_repoid(config_store, matches, repo_id)
} }
pub fn get_config_by_repoid<'a>( pub fn get_config_by_repoid<'a>(
config_store: &ConfigStore,
matches: &ArgMatches<'a>, matches: &ArgMatches<'a>,
repo_id: RepositoryId, repo_id: RepositoryId,
) -> Result<(String, RepoConfig)> { ) -> Result<(String, RepoConfig)> {
let configs = load_repo_configs(matches)?; let configs = load_repo_configs(config_store, matches)?;
configs configs
.get_repo_config(repo_id) .get_repo_config(repo_id)
.ok_or_else(|| format_err!("unknown repoid {:?}", repo_id)) .ok_or_else(|| format_err!("unknown repoid {:?}", repo_id))
@ -916,7 +957,8 @@ fn open_repo_internal<'a>(
scrub: Scrubbing, scrub: Scrubbing,
redaction_override: Option<Redaction>, redaction_override: Option<Redaction>,
) -> impl Future<Item = BlobRepo, Error = Error> { ) -> impl Future<Item = BlobRepo, Error = Error> {
let repo_id = try_boxfuture!(get_repo_id(matches)); let config_store = try_boxfuture!(init_config_store(fb, logger, matches));
let repo_id = try_boxfuture!(get_repo_id(config_store, matches));
open_repo_internal_with_repo_id( open_repo_internal_with_repo_id(
fb, fb,
logger, logger,
@ -939,11 +981,12 @@ fn open_repo_internal_with_repo_id<'a>(
scrub: Scrubbing, scrub: Scrubbing,
redaction_override: Option<Redaction>, redaction_override: Option<Redaction>,
) -> BoxFuture<BlobRepo, Error> { ) -> BoxFuture<BlobRepo, Error> {
try_boxfuture!(init_config_store(fb, logger, matches)); let config_store = try_boxfuture!(init_config_store(fb, logger, matches));
let common_config = try_boxfuture!(load_common_config(&matches)); let common_config = try_boxfuture!(load_common_config(config_store, &matches));
let (reponame, config) = { let (reponame, config) = {
let (reponame, mut config) = try_boxfuture!(get_config_by_repoid(matches, repo_id)); let (reponame, mut config) =
try_boxfuture!(get_config_by_repoid(config_store, matches, repo_id));
if let Scrubbing::Enabled = scrub { if let Scrubbing::Enabled = scrub {
config config
.storage_config .storage_config
@ -1246,13 +1289,13 @@ pub fn init_tunables<'a>(fb: FacebookInit, matches: &ArgMatches<'a>, logger: Log
return Ok(()); return Ok(());
} }
init_config_store(fb, &logger, matches)?; let config_store = init_config_store(fb, &logger, matches)?;
let tunables_spec = matches let tunables_spec = matches
.value_of(TUNABLES_CONFIG) .value_of(TUNABLES_CONFIG)
.unwrap_or(DEFAULT_TUNABLES_PATH); .unwrap_or(DEFAULT_TUNABLES_PATH);
let config_handle = get_config_handle(fb, logger.clone(), Some(tunables_spec), 1)?; let config_handle = get_config_handle(config_store, &logger, Some(tunables_spec))?;
init_tunables_worker(logger, config_handle) init_tunables_worker(logger, config_handle)
} }
@ -1269,10 +1312,9 @@ pub fn init_runtime(matches: &ArgMatches) -> io::Result<tokio_compat::runtime::R
/// NB: Outside tests, using file:PATH is not recommended because it is inefficient - instead /// NB: Outside tests, using file:PATH is not recommended because it is inefficient - instead
/// use a local configerator path and configerator:PATH /// use a local configerator path and configerator:PATH
pub fn get_config_handle<T>( pub fn get_config_handle<T>(
_fb: FacebookInit, config_store: &ConfigStore,
logger: Logger, logger: &Logger,
source_spec: Option<&str>, source_spec: Option<&str>,
_poll_interval: u64,
) -> Result<ConfigHandle<T>, Error> ) -> Result<ConfigHandle<T>, Error>
where where
T: Default + Send + Sync + 'static + serde::de::DeserializeOwned, T: Default + Send + Sync + 'static + serde::de::DeserializeOwned,
@ -1287,10 +1329,10 @@ where
// disallowed trailing parts. // disallowed trailing parts.
match (iter.next(), iter.next(), iter.next()) { match (iter.next(), iter.next(), iter.next()) {
(Some("configerator"), Some(source), None) => { (Some("configerator"), Some(source), None) => {
get_config_store()?.get_config_handle(source.to_string()) config_store.get_config_handle(source.to_string())
} }
(Some("file"), Some(file), None) => ConfigStore::file( (Some("file"), Some(file), None) => ConfigStore::file(
logger, logger.clone(),
PathBuf::new(), PathBuf::new(),
String::new(), String::new(),
Duration::from_secs(1), Duration::from_secs(1),
@ -1306,10 +1348,6 @@ where
static CONFIGERATOR: OnceCell<ConfigStore> = OnceCell::new(); static CONFIGERATOR: OnceCell<ConfigStore> = OnceCell::new();
pub fn get_config_store() -> Result<&'static ConfigStore, Error> {
CONFIGERATOR.get().context("No configerator available")
}
pub fn is_test_instance<'a>(matches: &ArgMatches<'a>) -> bool { pub fn is_test_instance<'a>(matches: &ArgMatches<'a>) -> bool {
matches.is_present(TEST_INSTANCE_ARG) matches.is_present(TEST_INSTANCE_ARG)
} }

View File

@ -55,11 +55,14 @@ async fn create_commit_syncer_args_and_config_from_matches_impl(
matches: &ArgMatches<'_>, matches: &ArgMatches<'_>,
reverse: bool, reverse: bool,
) -> Result<(CommitSyncerArgs<SqlSyncedCommitMapping>, CommitSyncConfig), Error> { ) -> Result<(CommitSyncerArgs<SqlSyncedCommitMapping>, CommitSyncConfig), Error> {
let source_repo_id = args::get_source_repo_id(&matches)?; let config_store = args::init_config_store(fb, logger, matches)?;
let target_repo_id = args::get_target_repo_id(&matches)?; let source_repo_id = args::get_source_repo_id(config_store, &matches)?;
let target_repo_id = args::get_target_repo_id(config_store, &matches)?;
let (_, source_repo_config) = args::get_config_by_repoid(&matches, source_repo_id)?; let (_, source_repo_config) =
let (_, target_repo_config) = args::get_config_by_repoid(&matches, target_repo_id)?; args::get_config_by_repoid(config_store, &matches, source_repo_id)?;
let (_, target_repo_config) =
args::get_config_by_repoid(config_store, &matches, target_repo_id)?;
let source_repo_fut = args::open_repo_with_repo_id(fb, logger, source_repo_id, &matches); let source_repo_fut = args::open_repo_with_repo_id(fb, logger, source_repo_id, &matches);
let target_repo_fut = args::open_repo_with_repo_id(fb, logger, target_repo_id, &matches); let target_repo_fut = args::open_repo_with_repo_id(fb, logger, target_repo_id, &matches);

View File

@ -162,8 +162,9 @@ pub async fn subcommand_blobstore_fetch<'a>(
matches: &'a ArgMatches<'_>, matches: &'a ArgMatches<'_>,
sub_m: &'a ArgMatches<'_>, sub_m: &'a ArgMatches<'_>,
) -> Result<(), SubcommandError> { ) -> Result<(), SubcommandError> {
let repo_id = args::get_repo_id(&matches)?; let config_store = args::init_config_store(fb, &logger, matches)?;
let (_, config) = args::get_config(&matches)?; let repo_id = args::get_repo_id(config_store, &matches)?;
let (_, config) = args::get_config(config_store, &matches)?;
let redaction = config.redaction; let redaction = config.redaction;
let storage_config = config.storage_config; let storage_config = config.storage_config;
let inner_blobstore_id = args::get_u64_opt(&sub_m, "inner-blobstore-id"); let inner_blobstore_id = args::get_u64_opt(&sub_m, "inner-blobstore-id");
@ -185,7 +186,7 @@ pub async fn subcommand_blobstore_fetch<'a>(
blobstore_options, blobstore_options,
); );
let common_config = args::load_common_config(&matches)?; let common_config = args::load_common_config(config_store, &matches)?;
let censored_scuba_params = common_config.censored_scuba_params; let censored_scuba_params = common_config.censored_scuba_params;
let mut scuba_redaction_builder = let mut scuba_redaction_builder =
ScubaSampleBuilder::with_opt_table(fb, censored_scuba_params.table); ScubaSampleBuilder::with_opt_table(fb, censored_scuba_params.table);
@ -204,7 +205,7 @@ pub async fn subcommand_blobstore_fetch<'a>(
let maybe_output_file = sub_m.value_of_os(RAW_FILE_NAME_ARG); let maybe_output_file = sub_m.value_of_os(RAW_FILE_NAME_ARG);
let maybe_redacted_blobs_fut = match redaction { let maybe_redacted_blobs_fut = match redaction {
Redaction::Enabled => args::open_sql::<SqlRedactedContentStore>(fb, &matches) Redaction::Enabled => args::open_sql::<SqlRedactedContentStore>(fb, config_store, &matches)
.and_then(|redacted_blobs| { .and_then(|redacted_blobs| {
redacted_blobs redacted_blobs
.get_all_redacted_blobs() .get_all_redacted_blobs()

View File

@ -105,11 +105,14 @@ pub async fn subcommand_crossrepo<'a>(
.map_err(|e| e.into()) .map_err(|e| e.into())
} }
(VERIFY_BOOKMARKS_SUBCOMMAND, Some(sub_sub_m)) => { (VERIFY_BOOKMARKS_SUBCOMMAND, Some(sub_sub_m)) => {
let config_store = args::init_config_store(fb, ctx.logger(), matches)?;
let (source_repo, target_repo, mapping) = let (source_repo, target_repo, mapping) =
get_source_target_repos_and_mapping(fb, logger, matches).await?; get_source_target_repos_and_mapping(fb, logger, matches).await?;
let source_repo_id = source_repo.get_repoid(); let source_repo_id = source_repo.get_repoid();
let (_, source_repo_config) = args::get_config_by_repoid(matches, source_repo_id)?; let (_, source_repo_config) =
args::get_config_by_repoid(config_store, matches, source_repo_id)?;
let update_large_repo_bookmarks = sub_sub_m.is_present(UPDATE_LARGE_REPO_BOOKMARKS); let update_large_repo_bookmarks = sub_sub_m.is_present(UPDATE_LARGE_REPO_BOOKMARKS);
@ -141,7 +144,9 @@ async fn run_config_sub_subcommand<'a>(
config_subcommand_matches: &'a ArgMatches<'a>, config_subcommand_matches: &'a ArgMatches<'a>,
live_commit_sync_config: CfgrLiveCommitSyncConfig, live_commit_sync_config: CfgrLiveCommitSyncConfig,
) -> Result<(), SubcommandError> { ) -> Result<(), SubcommandError> {
let repo_id = args::get_repo_id(matches)?; let config_store = args::init_config_store(ctx.fb, ctx.logger(), matches)?;
let repo_id = args::get_repo_id(config_store, matches)?;
match config_subcommand_matches.subcommand() { match config_subcommand_matches.subcommand() {
(SUBCOMMAND_BY_VERSION, Some(sub_m)) => { (SUBCOMMAND_BY_VERSION, Some(sub_m)) => {
@ -173,6 +178,8 @@ async fn run_pushredirection_subcommand<'a>(
config_subcommand_matches: &'a ArgMatches<'a>, config_subcommand_matches: &'a ArgMatches<'a>,
live_commit_sync_config: CfgrLiveCommitSyncConfig, live_commit_sync_config: CfgrLiveCommitSyncConfig,
) -> Result<(), SubcommandError> { ) -> Result<(), SubcommandError> {
let config_store = args::init_config_store(fb, ctx.logger(), matches)?;
let (source_repo, target_repo, mapping) = let (source_repo, target_repo, mapping) =
get_source_target_repos_and_mapping(fb, ctx.logger().clone(), matches).await?; get_source_target_repos_and_mapping(fb, ctx.logger().clone(), matches).await?;
@ -205,10 +212,11 @@ async fn run_pushredirection_subcommand<'a>(
.await? .await?
.ok_or_else(|| anyhow!("No bookmarks update log entries for large repo"))?; .ok_or_else(|| anyhow!("No bookmarks update log entries for large repo"))?;
let mutable_counters = args::open_source_sql::<SqlMutableCounters>(fb, &matches) let mutable_counters =
.context("While opening SqlMutableCounters") args::open_source_sql::<SqlMutableCounters>(fb, config_store, &matches)
.compat() .context("While opening SqlMutableCounters")
.await?; .compat()
.await?;
let counter = format_backsyncer_counter(&large_repo.get_repoid()); let counter = format_backsyncer_counter(&large_repo.get_repoid());
info!( info!(
@ -428,8 +436,10 @@ async fn get_source_target_repos_and_mapping<'a>(
logger: Logger, logger: Logger,
matches: &'a ArgMatches<'_>, matches: &'a ArgMatches<'_>,
) -> Result<(BlobRepo, BlobRepo, SqlSyncedCommitMapping), Error> { ) -> Result<(BlobRepo, BlobRepo, SqlSyncedCommitMapping), Error> {
let source_repo_id = args::get_source_repo_id(matches)?; let config_store = args::init_config_store(fb, &logger, matches)?;
let target_repo_id = args::get_target_repo_id(matches)?;
let source_repo_id = args::get_source_repo_id(config_store, matches)?;
let target_repo_id = args::get_target_repo_id(config_store, matches)?;
let source_repo = args::open_repo_with_repo_id(fb, &logger, source_repo_id, matches) let source_repo = args::open_repo_with_repo_id(fb, &logger, source_repo_id, matches)
.boxify() .boxify()
@ -439,7 +449,8 @@ async fn get_source_target_repos_and_mapping<'a>(
.compat(); .compat();
// TODO(stash): in reality both source and target should point to the same mapping // TODO(stash): in reality both source and target should point to the same mapping
// It'll be nice to verify it // It'll be nice to verify it
let mapping = args::open_source_sql::<SqlSyncedCommitMapping>(fb, &matches).compat(); let mapping =
args::open_source_sql::<SqlSyncedCommitMapping>(fb, config_store, &matches).compat();
try_join!(source_repo, target_repo, mapping) try_join!(source_repo, target_repo, mapping)
} }

View File

@ -534,16 +534,18 @@ pub async fn subcommand_process_hg_sync<'a>(
matches: &'a ArgMatches<'_>, matches: &'a ArgMatches<'_>,
logger: Logger, logger: Logger,
) -> Result<(), SubcommandError> { ) -> Result<(), SubcommandError> {
let repo_id = args::get_repo_id(&matches)?; let config_store = args::init_config_store(fb, &logger, matches)?;
let repo_id = args::get_repo_id(config_store, &matches)?;
let ctx = CoreContext::new_with_logger(fb, logger.clone()); let ctx = CoreContext::new_with_logger(fb, logger.clone());
let mutable_counters = args::open_sql::<SqlMutableCounters>(fb, &matches) let mutable_counters = args::open_sql::<SqlMutableCounters>(fb, config_store, &matches)
.compat() .compat()
.await .await
.context("While opening SqlMutableCounters")?; .context("While opening SqlMutableCounters")?;
let bookmarks = args::open_sql::<SqlBookmarksBuilder>(fb, &matches) let bookmarks = args::open_sql::<SqlBookmarksBuilder>(fb, config_store, &matches)
.compat() .compat()
.await .await
.context("While opening SqlBookmarks")? .context("While opening SqlBookmarks")?

View File

@ -69,11 +69,13 @@ pub async fn subcommand_mutable_counters<'a>(
matches: &'a ArgMatches<'_>, matches: &'a ArgMatches<'_>,
logger: Logger, logger: Logger,
) -> Result<(), SubcommandError> { ) -> Result<(), SubcommandError> {
let repo_id = args::get_repo_id(&matches)?; let config_store = args::init_config_store(fb, &logger, matches)?;
let repo_id = args::get_repo_id(config_store, &matches)?;
let ctx = CoreContext::new_with_logger(fb, logger.clone()); let ctx = CoreContext::new_with_logger(fb, logger.clone());
let mutable_counters = args::open_sql::<SqlMutableCounters>(fb, &matches) let mutable_counters = args::open_sql::<SqlMutableCounters>(fb, config_store, &matches)
.context("While opening SqlMutableCounters") .context("While opening SqlMutableCounters")
.compat() .compat()
.await?; .await?;

View File

@ -240,9 +240,10 @@ fn get_ctx_blobrepo_redacted_blobs_cs_id(
}; };
args::init_cachelib(fb, &matches, None); args::init_cachelib(fb, &matches, None);
let config_store = try_boxfuture!(args::init_config_store(fb, &logger, matches));
let blobrepo = args::open_repo(fb, &logger, &matches); let blobrepo = args::open_repo(fb, &logger, &matches);
let redacted_blobs = args::open_sql::<SqlRedactedContentStore>(fb, &matches) let redacted_blobs = args::open_sql::<SqlRedactedContentStore>(fb, config_store, &matches)
.context("While opening SqlRedactedContentStore") .context("While opening SqlRedactedContentStore")
.from_err(); .from_err();

View File

@ -107,8 +107,9 @@ pub async fn subcommand_skiplist<'a>(
let skiplist_ty = SkiplistType::new(sub_m.is_present(ARG_SPARSE)); let skiplist_ty = SkiplistType::new(sub_m.is_present(ARG_SPARSE));
args::init_cachelib(fb, &matches, None); args::init_cachelib(fb, &matches, None);
let config_store = args::init_config_store(fb, &logger, matches)?;
let ctx = CoreContext::new_with_logger(fb, logger.clone()); let ctx = CoreContext::new_with_logger(fb, logger.clone());
let sql_changesets = args::open_sql::<SqlChangesets>(fb, &matches); let sql_changesets = args::open_sql::<SqlChangesets>(fb, config_store, &matches);
let repo = args::open_repo(fb, &logger, &matches); let repo = args::open_repo(fb, &logger, &matches);
repo.join(sql_changesets) repo.join(sql_changesets)
.and_then(move |(repo, sql_changesets)| { .and_then(move |(repo, sql_changesets)| {

View File

@ -319,8 +319,9 @@ async fn run_aliasverify<'a>(
matches: &'a ArgMatches<'a>, matches: &'a ArgMatches<'a>,
mode: Mode, mode: Mode,
) -> Result<(), Error> { ) -> Result<(), Error> {
let config_store = args::init_config_store(fb, logger, matches)?;
let (sqlchangesets, blobrepo) = try_join!( let (sqlchangesets, blobrepo) = try_join!(
args::open_sql::<SqlChangesets>(fb, matches).compat(), args::open_sql::<SqlChangesets>(fb, config_store, matches).compat(),
args::open_repo(fb, &logger, matches).compat(), args::open_repo(fb, &logger, matches).compat(),
)?; )?;
AliasVerification::new( AliasVerification::new(
@ -342,7 +343,7 @@ fn main(fb: FacebookInit) -> Result<()> {
let ctx = CoreContext::new_with_logger(fb, logger.clone()); let ctx = CoreContext::new_with_logger(fb, logger.clone());
args::init_cachelib(fb, &matches, None); args::init_cachelib(fb, &matches, None);
args::init_config_store(fb, &logger, &matches)?; let config_store = args::init_config_store(fb, &logger, &matches)?;
let mode = match matches.value_of("mode").expect("no default on mode") { let mode = match matches.value_of("mode").expect("no default on mode") {
"verify" => Mode::Verify, "verify" => Mode::Verify,
@ -360,7 +361,7 @@ fn main(fb: FacebookInit) -> Result<()> {
.parse() .parse()
.expect("Minimum Changeset Id should be numeric"); .expect("Minimum Changeset Id should be numeric");
let repoid = args::get_repo_id(&matches).expect("Need repo id"); let repoid = args::get_repo_id(config_store, &matches).expect("Need repo id");
block_execute( block_execute(
run_aliasverify(fb, ctx, &logger, step, min_cs_db_id, repoid, &matches, mode), run_aliasverify(fb, ctx, &logger, step, min_cs_db_id, repoid, &matches, mode),

View File

@ -378,6 +378,7 @@ async fn run_subcmd<'a>(
subcommand_tail(&ctx, unredacted_repo, use_shared_leases, batched).await subcommand_tail(&ctx, unredacted_repo, use_shared_leases, batched).await
} }
(SUBCOMMAND_PREFETCH_COMMITS, Some(sub_m)) => { (SUBCOMMAND_PREFETCH_COMMITS, Some(sub_m)) => {
let config_store = args::init_config_store(fb, logger, &matches)?;
let out_filename = sub_m let out_filename = sub_m
.value_of(ARG_OUT_FILENAME) .value_of(ARG_OUT_FILENAME)
.ok_or_else(|| format_err!("missing required argument: {}", ARG_OUT_FILENAME))? .ok_or_else(|| format_err!("missing required argument: {}", ARG_OUT_FILENAME))?
@ -385,7 +386,7 @@ async fn run_subcmd<'a>(
let (repo, changesets) = try_join( let (repo, changesets) = try_join(
args::open_repo(fb, &logger, &matches).compat(), args::open_repo(fb, &logger, &matches).compat(),
args::open_sql::<SqlChangesets>(fb, &matches).compat(), args::open_sql::<SqlChangesets>(fb, config_store, &matches).compat(),
) )
.await?; .await?;
let phases = repo.get_phases(); let phases = repo.get_phases();

View File

@ -93,9 +93,10 @@ fn main(fb: fbinit::FacebookInit) {
let (caching, logger, mut runtime) = let (caching, logger, mut runtime) =
args::init_mononoke(fb, &matches, None).expect("failed to initialise mononoke"); args::init_mononoke(fb, &matches, None).expect("failed to initialise mononoke");
args::init_config_store(fb, &logger, &matches).expect("failed to start Configerator"); let config_store =
args::init_config_store(fb, &logger, &matches).expect("failed to start Configerator");
let storage_config = args::load_storage_configs(&matches) let storage_config = args::load_storage_configs(config_store, &matches)
.expect("Could not read storage configs") .expect("Could not read storage configs")
.storage .storage
.remove( .remove(

View File

@ -200,6 +200,8 @@ async fn run_blobimport<'a>(
logger: &Logger, logger: &Logger,
matches: &'a ArgMatches<'a>, matches: &'a ArgMatches<'a>,
) -> Result<()> { ) -> Result<()> {
let config_store = args::init_config_store(fb, logger, matches)?;
let revlogrepo_path = matches let revlogrepo_path = matches
.value_of("INPUT") .value_of("INPUT")
.expect("input is not specified") .expect("input is not specified")
@ -283,10 +285,10 @@ async fn run_blobimport<'a>(
let has_globalrev = matches.is_present("has-globalrev"); let has_globalrev = matches.is_present("has-globalrev");
let (_repo_name, repo_config) = args::get_config(&matches)?; let (_repo_name, repo_config) = args::get_config(config_store, &matches)?;
let populate_git_mapping = repo_config.pushrebase.populate_git_mapping.clone(); let populate_git_mapping = repo_config.pushrebase.populate_git_mapping.clone();
let small_repo_id = args::get_source_repo_id_opt(&matches)?; let small_repo_id = args::get_source_repo_id_opt(config_store, &matches)?;
let (blobrepo, globalrevs_store, synced_commit_mapping, mutable_counters) = try_join4( let (blobrepo, globalrevs_store, synced_commit_mapping, mutable_counters) = try_join4(
if matches.is_present("no-create") { if matches.is_present("no-create") {
@ -298,9 +300,9 @@ async fn run_blobimport<'a>(
.compat() .compat()
.boxed() .boxed()
}, },
args::open_sql::<SqlBonsaiGlobalrevMapping>(fb, &matches).compat(), args::open_sql::<SqlBonsaiGlobalrevMapping>(fb, config_store, &matches).compat(),
args::open_sql::<SqlSyncedCommitMapping>(fb, &matches).compat(), args::open_sql::<SqlSyncedCommitMapping>(fb, config_store, &matches).compat(),
args::open_sql::<SqlMutableCounters>(fb, &matches).compat(), args::open_sql::<SqlMutableCounters>(fb, config_store, &matches).compat(),
) )
.await?; .await?;

View File

@ -267,11 +267,11 @@ fn main(fb: FacebookInit) -> Result<()> {
.value_of("storage-id") .value_of("storage-id")
.ok_or(Error::msg("Missing storage-id"))?; .ok_or(Error::msg("Missing storage-id"))?;
let logger = args::init_logging(fb, &matches); let logger = args::init_logging(fb, &matches);
args::init_config_store(fb, &logger, &matches)?; let config_store = args::init_config_store(fb, &logger, &matches)?;
let mysql_options = args::parse_mysql_options(&matches); let mysql_options = args::parse_mysql_options(&matches);
let readonly_storage = args::parse_readonly_storage(&matches); let readonly_storage = args::parse_readonly_storage(&matches);
let blobstore_options = args::parse_blobstore_options(&matches); let blobstore_options = args::parse_blobstore_options(&matches);
let storage_config = args::load_storage_configs(&matches)? let storage_config = args::load_storage_configs(config_store, &matches)?
.storage .storage
.remove(storage_id) .remove(storage_id)
.ok_or(format_err!("Storage id `{}` not found", storage_id))?; .ok_or(format_err!("Storage id `{}` not found", storage_id))?;

View File

@ -29,11 +29,11 @@ fn main(fb: FacebookInit) -> Result<()> {
let quiet = matches.is_present("quiet"); let quiet = matches.is_present("quiet");
let verbose = matches.is_present("verbose"); let verbose = matches.is_present("verbose");
args::init_config_store(fb, None, &matches)?; let config_store = args::init_config_store(fb, None, &matches)?;
// Most of the work is done here - this validates that the files are present, // Most of the work is done here - this validates that the files are present,
// are correctly formed, and have the right fields (not too many, not too few). // are correctly formed, and have the right fields (not too many, not too few).
let configs = match args::load_repo_configs(&matches) { let configs = match args::load_repo_configs(config_store, &matches) {
Err(err) => { Err(err) => {
eprintln!("Error loading configs: {:#?}", err); eprintln!("Error loading configs: {:#?}", err);
return Err(err); return Err(err);

View File

@ -99,11 +99,11 @@ fn main(fb: fbinit::FacebookInit) -> Result<()> {
let matches = app.get_matches(); let matches = app.get_matches();
let (_, logger, mut runtime) = let (_, logger, mut runtime) =
args::init_mononoke(fb, &matches, None).context("failed to initialise mononoke")?; args::init_mononoke(fb, &matches, None).context("failed to initialise mononoke")?;
args::init_config_store(fb, &logger, &matches)?; let config_store = args::init_config_store(fb, &logger, &matches)?;
let scheduled_max = args::get_usize_opt(&matches, ARG_SCHEDULED_MAX).unwrap_or(100) as usize; let scheduled_max = args::get_usize_opt(&matches, ARG_SCHEDULED_MAX).unwrap_or(100) as usize;
let storage_config = args::load_storage_configs(&matches) let storage_config = args::load_storage_configs(config_store, &matches)
.context("Could not read storage configs")? .context("Could not read storage configs")?
.storage .storage
.remove( .remove(

View File

@ -173,15 +173,15 @@ fn parse_args(fb: FacebookInit) -> Result<Config, Error> {
let matches = app.get_matches(); let matches = app.get_matches();
let logger = args::init_logging(fb, &matches); let logger = args::init_logging(fb, &matches);
args::init_config_store(fb, &logger, &matches)?; let config_store = args::init_config_store(fb, &logger, &matches)?;
let ctx = CoreContext::new_with_logger(fb, logger.clone()); let ctx = CoreContext::new_with_logger(fb, logger.clone());
let repo_id = args::get_repo_id(&matches)?; let repo_id = args::get_repo_id(config_store, &matches)?;
let storage_id = matches let storage_id = matches
.value_of("storage-id") .value_of("storage-id")
.ok_or(Error::msg("`storage-id` argument required"))?; .ok_or(Error::msg("`storage-id` argument required"))?;
let storage_config = args::load_storage_configs(&matches)? let storage_config = args::load_storage_configs(config_store, &matches)?
.storage .storage
.remove(storage_id) .remove(storage_id)
.ok_or(Error::msg("Unknown `storage-id`"))?; .ok_or(Error::msg("Unknown `storage-id`"))?;

View File

@ -76,6 +76,7 @@ fn main(fb: FacebookInit) -> Result<(), Error> {
async fn run<'a>(ctx: CoreContext, matches: &'a ArgMatches<'a>) -> Result<(), Error> { async fn run<'a>(ctx: CoreContext, matches: &'a ArgMatches<'a>) -> Result<(), Error> {
let idmap_version_arg: Option<u64> = args::get_u64_opt(&matches, IDMAP_VERSION_ARG); let idmap_version_arg: Option<u64> = args::get_u64_opt(&matches, IDMAP_VERSION_ARG);
let config_store = args::init_config_store(ctx.fb, ctx.logger(), matches)?;
// This is a bit weird from the dependency point of view but I think that it is best. The // This is a bit weird from the dependency point of view but I think that it is best. The
// BlobRepo may have a SegmentedChangelog attached to it but that doesn't hurt us in any way. // BlobRepo may have a SegmentedChangelog attached to it but that doesn't hurt us in any way.
@ -87,7 +88,7 @@ async fn run<'a>(ctx: CoreContext, matches: &'a ArgMatches<'a>) -> Result<(), Er
.context("opening repo")?; .context("opening repo")?;
let mysql_options = args::parse_mysql_options(matches); let mysql_options = args::parse_mysql_options(matches);
let (_, config) = args::get_config(&matches)?; let (_, config) = args::get_config(config_store, &matches)?;
let storage_config = config.storage_config; let storage_config = config.storage_config;
let readonly_storage = ReadOnlyStorage(false); let readonly_storage = ReadOnlyStorage(false);

View File

@ -529,13 +529,13 @@ fn main(fb: FacebookInit) -> Result<(), Error> {
let logger = args::init_logging(fb, &matches); let logger = args::init_logging(fb, &matches);
let ctx = CoreContext::new_with_logger(fb, logger.clone()); let ctx = CoreContext::new_with_logger(fb, logger.clone());
args::init_config_store(fb, &logger, &matches)?; let config_store = args::init_config_store(fb, &logger, &matches)?;
let bookmark = match matches.value_of("bookmark") { let bookmark = match matches.value_of("bookmark") {
Some(name) => name.to_string(), Some(name) => name.to_string(),
None => String::from("master"), None => String::from("master"),
}; };
let bookmark = BookmarkName::new(bookmark.clone())?; let bookmark = BookmarkName::new(bookmark.clone())?;
let repo_name = args::get_repo_name(&matches)?; let repo_name = args::get_repo_name(config_store, &matches)?;
let scuba_logger = if matches.is_present("log-to-scuba") { let scuba_logger = if matches.is_present("log-to-scuba") {
ScubaSampleBuilder::new(fb, SCUBA_DATASET_NAME) ScubaSampleBuilder::new(fb, SCUBA_DATASET_NAME)
} else { } else {

View File

@ -92,9 +92,10 @@ async fn run<'a>(ctx: CoreContext, matches: &'a ArgMatches<'a>) -> Result<(), Er
return Ok(()); return Ok(());
} }
let config_store = args::init_config_store(ctx.fb, ctx.logger(), matches)?;
let mysql_options = args::parse_mysql_options(matches); let mysql_options = args::parse_mysql_options(matches);
let blobstore_options = args::parse_blobstore_options(matches); let blobstore_options = args::parse_blobstore_options(matches);
let configs = args::load_repo_configs(matches)?; let configs = args::load_repo_configs(config_store, matches)?;
// wait for myrouter // wait for myrouter
myrouter_ready( myrouter_ready(

View File

@ -84,8 +84,9 @@ fn main(fb: FacebookInit) -> Result<(), Error> {
args::init_cachelib(fb, &matches, None); args::init_cachelib(fb, &matches, None);
let logger = args::init_logging(fb, &matches); let logger = args::init_logging(fb, &matches);
let config_store = args::init_config_store(fb, &logger, &matches)?;
let ctx = CoreContext::new_with_logger(fb, logger.clone()); let ctx = CoreContext::new_with_logger(fb, logger.clone());
let globalrevs_store = args::open_sql::<SqlBonsaiGlobalrevMapping>(fb, &matches); let globalrevs_store = args::open_sql::<SqlBonsaiGlobalrevMapping>(fb, config_store, &matches);
let blobrepo = args::open_repo(fb, &logger, &matches); let blobrepo = args::open_repo(fb, &logger, &matches);
let run = async { let run = async {

View File

@ -258,14 +258,14 @@ fn main(fb: FacebookInit) -> Result<(), Error> {
let matches = app.get_matches(); let matches = app.get_matches();
let (_, logger, mut runtime) = args::init_mononoke(fb, &matches, None)?; let (_, logger, mut runtime) = args::init_mononoke(fb, &matches, None)?;
args::init_config_store(fb, &logger, &matches)?; let config_store = args::init_config_store(fb, &logger, &matches)?;
let source_repo_id = args::get_source_repo_id(&matches)?; let source_repo_id = args::get_source_repo_id(config_store, &matches)?;
let target_repo_id = args::get_target_repo_id(&matches)?; let target_repo_id = args::get_target_repo_id(config_store, &matches)?;
let (source_repo_name, _) = args::get_config_by_repoid(&matches, source_repo_id)?; let (source_repo_name, _) = args::get_config_by_repoid(config_store, &matches, source_repo_id)?;
let (target_repo_name, target_repo_config) = let (target_repo_name, target_repo_config) =
args::get_config_by_repoid(&matches, target_repo_id)?; args::get_config_by_repoid(config_store, &matches, target_repo_id)?;
let commit_syncer_args = runtime.block_on_std(create_commit_syncer_args_from_matches( let commit_syncer_args = runtime.block_on_std(create_commit_syncer_args_from_matches(
fb, &logger, &matches, fb, &logger, &matches,

View File

@ -103,8 +103,8 @@ fn get_commit_syncer<'a>(
commit_syncer_args: CommitSyncerArgs<SqlSyncedCommitMapping>, commit_syncer_args: CommitSyncerArgs<SqlSyncedCommitMapping>,
) -> Result<CommitSyncer<SqlSyncedCommitMapping>, Error> { ) -> Result<CommitSyncer<SqlSyncedCommitMapping>, Error> {
let config_store = args::init_config_store(ctx.fb, logger, &matches)?; let config_store = args::init_config_store(ctx.fb, logger, &matches)?;
let target_repo_id = args::get_target_repo_id(&matches)?; let target_repo_id = args::get_target_repo_id(config_store, &matches)?;
let live_commit_sync_config = Arc::new(CfgrLiveCommitSyncConfig::new(&logger, &config_store)?); let live_commit_sync_config = Arc::new(CfgrLiveCommitSyncConfig::new(&logger, config_store)?);
let commit_sync_config = let commit_sync_config =
live_commit_sync_config.get_current_commit_sync_config(&ctx, target_repo_id)?; live_commit_sync_config.get_current_commit_sync_config(&ctx, target_repo_id)?;
commit_syncer_args.try_into_commit_syncer(&commit_sync_config, live_commit_sync_config) commit_syncer_args.try_into_commit_syncer(&commit_sync_config, live_commit_sync_config)

View File

@ -155,8 +155,9 @@ async fn run(
ctx: CoreContext, ctx: CoreContext,
matches: ArgMatches<'static>, matches: ArgMatches<'static>,
) -> Result<(), Error> { ) -> Result<(), Error> {
let repo_id = args::get_repo_id(&matches)?; let config_store = args::init_config_store(ctx.fb, ctx.logger(), &matches)?;
let (_, repo_config) = args::get_config_by_repoid(&matches, repo_id)?; let repo_id = args::get_repo_id(config_store, &matches)?;
let (_, repo_config) = args::get_config_by_repoid(config_store, &matches, repo_id)?;
let logger = ctx.logger(); let logger = ctx.logger();
let blobrepo = args::open_repo_with_repo_id(fb, &logger, repo_id, &matches) let blobrepo = args::open_repo_with_repo_id(fb, &logger, repo_id, &matches)

View File

@ -38,6 +38,7 @@ sql_ext = { path = "../../common/rust/sql_ext" }
synced_commit_mapping = { path = "../synced_commit_mapping" } synced_commit_mapping = { path = "../synced_commit_mapping" }
unodes = { path = "../../derived_data/unodes" } unodes = { path = "../../derived_data/unodes" }
borrowed = { git = "https://github.com/facebookexperimental/rust-shed.git", branch = "master" } borrowed = { git = "https://github.com/facebookexperimental/rust-shed.git", branch = "master" }
cached_config = { git = "https://github.com/facebookexperimental/rust-shed.git", branch = "master" }
cloned = { git = "https://github.com/facebookexperimental/rust-shed.git", branch = "master" } cloned = { git = "https://github.com/facebookexperimental/rust-shed.git", branch = "master" }
fbinit = { git = "https://github.com/facebookexperimental/rust-shed.git", branch = "master" } fbinit = { git = "https://github.com/facebookexperimental/rust-shed.git", branch = "master" }
futures_ext = { git = "https://github.com/facebookexperimental/rust-shed.git", branch = "master" } futures_ext = { git = "https://github.com/facebookexperimental/rust-shed.git", branch = "master" }

View File

@ -11,6 +11,7 @@
use anyhow::{bail, format_err, Context, Error, Result}; use anyhow::{bail, format_err, Context, Error, Result};
use bookmarks::BookmarkName; use bookmarks::BookmarkName;
use borrowed::borrowed; use borrowed::borrowed;
use cached_config::ConfigStore;
use clap::ArgMatches; use clap::ArgMatches;
use cmdlib::{args, helpers}; use cmdlib::{args, helpers};
use cmdlib_x_repo::create_commit_syncer_args_from_matches; use cmdlib_x_repo::create_commit_syncer_args_from_matches;
@ -171,8 +172,9 @@ async fn run_sync_diamond_merge<'a>(
matches: &ArgMatches<'a>, matches: &ArgMatches<'a>,
sub_m: &ArgMatches<'a>, sub_m: &ArgMatches<'a>,
) -> Result<(), Error> { ) -> Result<(), Error> {
let source_repo_id = args::get_source_repo_id(matches)?; let config_store = args::init_config_store(ctx.fb, ctx.logger(), matches)?;
let target_repo_id = args::get_target_repo_id(matches)?; let source_repo_id = args::get_source_repo_id(config_store, matches)?;
let target_repo_id = args::get_target_repo_id(config_store, matches)?;
let maybe_bookmark = sub_m let maybe_bookmark = sub_m
.value_of(cli::COMMIT_BOOKMARK) .value_of(cli::COMMIT_BOOKMARK)
.map(|bookmark_str| BookmarkName::new(bookmark_str)) .map(|bookmark_str| BookmarkName::new(bookmark_str))
@ -182,9 +184,10 @@ async fn run_sync_diamond_merge<'a>(
let source_repo = args::open_repo_with_repo_id(ctx.fb, ctx.logger(), source_repo_id, matches); let source_repo = args::open_repo_with_repo_id(ctx.fb, ctx.logger(), source_repo_id, matches);
let target_repo = args::open_repo_with_repo_id(ctx.fb, ctx.logger(), target_repo_id, matches); let target_repo = args::open_repo_with_repo_id(ctx.fb, ctx.logger(), target_repo_id, matches);
let mapping = args::open_source_sql::<SqlSyncedCommitMapping>(ctx.fb, &matches); let mapping = args::open_source_sql::<SqlSyncedCommitMapping>(ctx.fb, config_store, &matches);
let (_, source_repo_config) = args::get_config_by_repoid(matches, source_repo_id)?; let (_, source_repo_config) =
args::get_config_by_repoid(config_store, matches, source_repo_id)?;
let merge_commit_hash = sub_m.value_of(COMMIT_HASH).unwrap().to_owned(); let merge_commit_hash = sub_m.value_of(COMMIT_HASH).unwrap().to_owned();
let (source_repo, target_repo, mapping) = let (source_repo, target_repo, mapping) =
@ -325,6 +328,7 @@ async fn run_gradual_merge<'a>(
matches: &ArgMatches<'a>, matches: &ArgMatches<'a>,
sub_m: &ArgMatches<'a>, sub_m: &ArgMatches<'a>,
) -> Result<(), Error> { ) -> Result<(), Error> {
let config_store = args::init_config_store(ctx.fb, ctx.logger(), matches)?;
let repo = args::open_repo(ctx.fb, &ctx.logger(), &matches) let repo = args::open_repo(ctx.fb, &ctx.logger(), &matches)
.compat() .compat()
.await?; .await?;
@ -341,7 +345,7 @@ async fn run_gradual_merge<'a>(
let dry_run = sub_m.is_present(DRY_RUN); let dry_run = sub_m.is_present(DRY_RUN);
let limit = args::get_usize_opt(sub_m, LIMIT); let limit = args::get_usize_opt(sub_m, LIMIT);
let (_, repo_config) = args::get_config_by_repoid(&matches, repo.get_repoid())?; let (_, repo_config) = args::get_config_by_repoid(config_store, &matches, repo.get_repoid())?;
let last_deletion_commit = let last_deletion_commit =
helpers::csid_resolve(ctx.clone(), repo.clone(), last_deletion_commit).compat(); helpers::csid_resolve(ctx.clone(), repo.clone(), last_deletion_commit).compat();
let pre_deletion_commit = let pre_deletion_commit =
@ -380,6 +384,7 @@ async fn run_gradual_merge_progress<'a>(
matches: &ArgMatches<'a>, matches: &ArgMatches<'a>,
sub_m: &ArgMatches<'a>, sub_m: &ArgMatches<'a>,
) -> Result<(), Error> { ) -> Result<(), Error> {
let config_store = args::init_config_store(ctx.fb, ctx.logger(), matches)?;
let repo = args::open_repo(ctx.fb, &ctx.logger(), &matches) let repo = args::open_repo(ctx.fb, &ctx.logger(), &matches)
.compat() .compat()
.await?; .await?;
@ -394,7 +399,7 @@ async fn run_gradual_merge_progress<'a>(
.value_of(COMMIT_BOOKMARK) .value_of(COMMIT_BOOKMARK)
.ok_or(format_err!("bookmark where to merge is not specified"))?; .ok_or(format_err!("bookmark where to merge is not specified"))?;
let (_, repo_config) = args::get_config_by_repoid(&matches, repo.get_repoid())?; let (_, repo_config) = args::get_config_by_repoid(config_store, &matches, repo.get_repoid())?;
let last_deletion_commit = let last_deletion_commit =
helpers::csid_resolve(ctx.clone(), repo.clone(), last_deletion_commit).compat(); helpers::csid_resolve(ctx.clone(), repo.clone(), last_deletion_commit).compat();
let pre_deletion_commit = let pre_deletion_commit =
@ -472,9 +477,9 @@ async fn run_check_push_redirection_prereqs<'a>(
matches: &ArgMatches<'a>, matches: &ArgMatches<'a>,
sub_m: &ArgMatches<'a>, sub_m: &ArgMatches<'a>,
) -> Result<(), Error> { ) -> Result<(), Error> {
let target_repo_id = args::get_target_repo_id(&matches)?;
let config_store = args::init_config_store(ctx.fb, ctx.logger(), &matches)?; let config_store = args::init_config_store(ctx.fb, ctx.logger(), &matches)?;
let live_commit_sync_config = CfgrLiveCommitSyncConfig::new(ctx.logger(), &config_store)?; let target_repo_id = args::get_target_repo_id(config_store, &matches)?;
let live_commit_sync_config = CfgrLiveCommitSyncConfig::new(ctx.logger(), config_store)?;
let commit_syncer_args = let commit_syncer_args =
create_commit_syncer_args_from_matches(ctx.fb, ctx.logger(), &matches).await?; create_commit_syncer_args_from_matches(ctx.fb, ctx.logger(), &matches).await?;
let commit_sync_config = let commit_sync_config =
@ -576,8 +581,9 @@ async fn run_catchup_delete_head<'a>(
let deletion_chunk_size = args::get_usize(&sub_m, DELETION_CHUNK_SIZE, 10000); let deletion_chunk_size = args::get_usize(&sub_m, DELETION_CHUNK_SIZE, 10000);
let config_store = args::init_config_store(ctx.fb, ctx.logger(), matches)?;
let cs_args_factory = get_catchup_head_delete_commits_cs_args_factory(&sub_m)?; let cs_args_factory = get_catchup_head_delete_commits_cs_args_factory(&sub_m)?;
let (_, repo_config) = args::get_config(&matches)?; let (_, repo_config) = args::get_config(config_store, &matches)?;
let wait_secs = args::get_u64(&sub_m, WAIT_SECS, 0); let wait_secs = args::get_u64(&sub_m, WAIT_SECS, 0);
@ -794,11 +800,14 @@ async fn process_stream_and_wait_for_replication<'a>(
commit_syncer: &CommitSyncer<SqlSyncedCommitMapping>, commit_syncer: &CommitSyncer<SqlSyncedCommitMapping>,
mut s: impl Stream<Item = Result<u64>> + std::marker::Unpin, mut s: impl Stream<Item = Result<u64>> + std::marker::Unpin,
) -> Result<(), Error> { ) -> Result<(), Error> {
let config_store = args::init_config_store(ctx.fb, ctx.logger(), matches)?;
let small_repo = commit_syncer.get_small_repo(); let small_repo = commit_syncer.get_small_repo();
let large_repo = commit_syncer.get_large_repo(); let large_repo = commit_syncer.get_large_repo();
let (_, small_repo_config) = args::get_config_by_repoid(matches, small_repo.get_repoid())?; let (_, small_repo_config) =
let (_, large_repo_config) = args::get_config_by_repoid(matches, large_repo.get_repoid())?; args::get_config_by_repoid(config_store, matches, small_repo.get_repoid())?;
let (_, large_repo_config) =
args::get_config_by_repoid(config_store, matches, large_repo.get_repoid())?;
if small_repo_config.storage_config.metadata != large_repo_config.storage_config.metadata { if small_repo_config.storage_config.metadata != large_repo_config.storage_config.metadata {
return Err(format_err!( return Err(format_err!(
"{} and {} have different db metadata configs: {:?} vs {:?}", "{} and {} have different db metadata configs: {:?} vs {:?}",
@ -857,8 +866,8 @@ async fn get_commit_syncer(
ctx: &CoreContext, ctx: &CoreContext,
matches: &ArgMatches<'_>, matches: &ArgMatches<'_>,
) -> Result<CommitSyncer<SqlSyncedCommitMapping>> { ) -> Result<CommitSyncer<SqlSyncedCommitMapping>> {
let target_repo_id = args::get_target_repo_id(&matches)?; let config_store = args::init_config_store(ctx.fb, ctx.logger(), matches)?;
let config_store = args::get_config_store()?; let target_repo_id = args::get_target_repo_id(config_store, &matches)?;
let live_commit_sync_config = CfgrLiveCommitSyncConfig::new(ctx.logger(), config_store)?; let live_commit_sync_config = CfgrLiveCommitSyncConfig::new(ctx.logger(), config_store)?;
let commit_syncer_args = let commit_syncer_args =
create_commit_syncer_args_from_matches(ctx.fb, ctx.logger(), &matches).await?; create_commit_syncer_args_from_matches(ctx.fb, ctx.logger(), &matches).await?;
@ -877,8 +886,11 @@ fn get_version(matches: &ArgMatches<'_>) -> Result<CommitSyncConfigVersion> {
)) ))
} }
fn get_and_verify_repo_config<'a>(matches: &ArgMatches<'a>) -> Result<RepoConfig> { fn get_and_verify_repo_config<'a>(
args::get_config(&matches).and_then(|(repo_name, repo_config)| { config_store: &ConfigStore,
matches: &ArgMatches<'a>,
) -> Result<RepoConfig> {
args::get_config(config_store, &matches).and_then(|(repo_name, repo_config)| {
let repo_id = repo_config.repoid; let repo_id = repo_config.repoid;
repo_config repo_config
.commit_sync_config .commit_sync_config
@ -903,13 +915,13 @@ fn main(fb: FacebookInit) -> Result<()> {
let matches = app.get_matches(); let matches = app.get_matches();
args::init_cachelib(fb, &matches, None); args::init_cachelib(fb, &matches, None);
let logger = args::init_logging(fb, &matches); let logger = args::init_logging(fb, &matches);
args::init_config_store(fb, &logger, &matches)?; let config_store = args::init_config_store(fb, &logger, &matches)?;
let ctx = CoreContext::new_with_logger(fb, logger.clone()); let ctx = CoreContext::new_with_logger(fb, logger.clone());
let subcommand_future = async { let subcommand_future = async {
match matches.subcommand() { match matches.subcommand() {
(MOVE, Some(sub_m)) => { (MOVE, Some(sub_m)) => {
let repo_config = get_and_verify_repo_config(&matches)?; let repo_config = get_and_verify_repo_config(config_store, &matches)?;
run_move(ctx, &matches, sub_m, repo_config).await run_move(ctx, &matches, sub_m, repo_config).await
} }
(MERGE, Some(sub_m)) => run_merge(ctx, &matches, sub_m).await, (MERGE, Some(sub_m)) => run_merge(ctx, &matches, sub_m).await,

View File

@ -397,13 +397,17 @@ async fn run(
ctx: CoreContext, ctx: CoreContext,
matches: ArgMatches<'static>, matches: ArgMatches<'static>,
) -> Result<(), Error> { ) -> Result<(), Error> {
let config_store = args::init_config_store(ctx.fb, ctx.logger(), &matches)?;
let mut scuba_sample = get_scuba_sample(ctx.clone(), &matches); let mut scuba_sample = get_scuba_sample(ctx.clone(), &matches);
let mutable_counters = args::open_source_sql::<SqlMutableCounters>(fb, &matches).compat(); let mutable_counters =
args::open_source_sql::<SqlMutableCounters>(fb, config_store, &matches).compat();
let source_repo_id = args::get_source_repo_id(&matches)?; let source_repo_id = args::get_source_repo_id(config_store, &matches)?;
let target_repo_id = args::get_target_repo_id(&matches)?; let target_repo_id = args::get_target_repo_id(config_store, &matches)?;
let (_, source_repo_config) = args::get_config_by_repoid(&matches, source_repo_id)?; let (_, source_repo_config) =
let (_, target_repo_config) = args::get_config_by_repoid(&matches, target_repo_id)?; args::get_config_by_repoid(config_store, &matches, source_repo_id)?;
let (_, target_repo_config) =
args::get_config_by_repoid(config_store, &matches, target_repo_id)?;
let logger = ctx.logger(); let logger = ctx.logger();
let source_repo = args::open_repo_with_repo_id(fb, &logger, source_repo_id, &matches).compat(); let source_repo = args::open_repo_with_repo_id(fb, &logger, source_repo_id, &matches).compat();
@ -414,7 +418,6 @@ async fn run(
let commit_syncer_args = create_commit_syncer_args_from_matches(fb, &logger, &matches).await?; let commit_syncer_args = create_commit_syncer_args_from_matches(fb, &logger, &matches).await?;
let config_store = args::init_config_store(ctx.fb, logger, &matches)?;
let live_commit_sync_config = Arc::new(CfgrLiveCommitSyncConfig::new(&logger, &config_store)?); let live_commit_sync_config = Arc::new(CfgrLiveCommitSyncConfig::new(&logger, &config_store)?);
let commit_sync_config = let commit_sync_config =
live_commit_sync_config.get_current_commit_sync_config(&ctx, source_repo.get_repoid())?; live_commit_sync_config.get_current_commit_sync_config(&ctx, source_repo.get_repoid())?;

View File

@ -117,14 +117,14 @@ async fn start(
matches: ArgMatches<'_>, matches: ArgMatches<'_>,
) -> Result<()> { ) -> Result<()> {
debug!(logger, "Reading args"); debug!(logger, "Reading args");
let repo_configs = args::load_repo_configs(&matches)?; let config_store = args::init_config_store(fb, &logger, &matches)?;
let repo_configs = args::load_repo_configs(config_store, &matches)?;
let mysql_options = args::parse_mysql_options(&matches); let mysql_options = args::parse_mysql_options(&matches);
let readonly_storage = args::parse_readonly_storage(&matches); let readonly_storage = args::parse_readonly_storage(&matches);
let blobstore_options = args::parse_blobstore_options(&matches); let blobstore_options = args::parse_blobstore_options(&matches);
let disabled_hooks = args::parse_disabled_hooks_with_repo_prefix(&matches, &logger)?; let disabled_hooks = args::parse_disabled_hooks_with_repo_prefix(&matches, &logger)?;
let trusted_proxy_idents = parse_identities(&matches)?; let trusted_proxy_idents = parse_identities(&matches)?;
let tls_session_data_log = matches.value_of(ARG_TLS_SESSION_DATA_LOG_FILE); let tls_session_data_log = matches.value_of(ARG_TLS_SESSION_DATA_LOG_FILE);
let config_store = args::init_config_store(fb, &logger, &matches)?;
let mut scuba_logger = args::get_scuba_sample_builder(fb, &matches)?; let mut scuba_logger = args::get_scuba_sample_builder(fb, &matches)?;
debug!(logger, "Initializing Mononoke API"); debug!(logger, "Initializing Mononoke API");

View File

@ -74,8 +74,6 @@ const ARG_LIVE_CONFIG: &str = "live-config";
const ARG_COMMAND: &str = "command"; const ARG_COMMAND: &str = "command";
const ARG_MULTIPLEXEDBLOB_SAMPLING: &str = "multiplexblob-sampling"; const ARG_MULTIPLEXEDBLOB_SAMPLING: &str = "multiplexblob-sampling";
const LIVE_CONFIG_POLL_INTERVAL: u64 = 5;
fn should_admit(config: &FastReplayConfig) -> bool { fn should_admit(config: &FastReplayConfig) -> bool {
let admission_rate = config.admission_rate(); let admission_rate = config.admission_rate();
@ -225,7 +223,8 @@ async fn bootstrap_repositories<'a>(
logger: &Logger, logger: &Logger,
scuba: &ScubaSampleBuilder, scuba: &ScubaSampleBuilder,
) -> Result<HashMap<String, FastReplayDispatcher>, Error> { ) -> Result<HashMap<String, FastReplayDispatcher>, Error> {
let config = args::load_repo_configs(&matches)?; let config_store = args::init_config_store(fb, logger, matches)?;
let config = args::load_repo_configs(config_store, &matches)?;
let mysql_options = cmdlib::args::parse_mysql_options(&matches); let mysql_options = cmdlib::args::parse_mysql_options(&matches);
let caching = cmdlib::args::init_cachelib(fb, &matches, None); let caching = cmdlib::args::init_cachelib(fb, &matches, None);
@ -390,13 +389,12 @@ impl ReplayOpts {
}; };
let aliases = Arc::new(aliases); let aliases = Arc::new(aliases);
cmdlib::args::init_config_store(fb, &logger, matches)?; let config_store = cmdlib::args::init_config_store(fb, &logger, matches)?;
let config = cmdlib::args::get_config_handle( let config = cmdlib::args::get_config_handle(
fb, config_store,
logger, &logger,
matches.value_of(ARG_LIVE_CONFIG), matches.value_of(ARG_LIVE_CONFIG),
LIVE_CONFIG_POLL_INTERVAL,
) )
.with_context(|| format!("While parsing --{}", ARG_LIVE_CONFIG))?; .with_context(|| format!("While parsing --{}", ARG_LIVE_CONFIG))?;

View File

@ -69,8 +69,8 @@ async fn get_changesets<'a>(
fn main(fb: FacebookInit) -> Result<()> { fn main(fb: FacebookInit) -> Result<()> {
let matches = setup_app().get_matches(); let matches = setup_app().get_matches();
let logger = cmdlib::args::init_logging(fb, &matches); let logger = cmdlib::args::init_logging(fb, &matches);
cmdlib::args::init_config_store(fb, &logger, &matches)?; let config_store = cmdlib::args::init_config_store(fb, &logger, &matches)?;
let (repo_name, config) = cmdlib::args::get_config(&matches)?; let (repo_name, config) = cmdlib::args::get_config(config_store, &matches)?;
info!(logger, "Hook tailer is starting"); info!(logger, "Hook tailer is starting");
let ctx = CoreContext::new_with_logger(fb, logger.clone()); let ctx = CoreContext::new_with_logger(fb, logger.clone());
@ -93,9 +93,10 @@ async fn run_hook_tailer<'a>(
matches: &'a ArgMatches<'a>, matches: &'a ArgMatches<'a>,
logger: &Logger, logger: &Logger,
) -> Result<(), Error> { ) -> Result<(), Error> {
let config_store = cmdlib::args::init_config_store(fb, logger, matches)?;
let bookmark_name = matches.value_of("bookmark").unwrap(); let bookmark_name = matches.value_of("bookmark").unwrap();
let bookmark = BookmarkName::new(bookmark_name)?; let bookmark = BookmarkName::new(bookmark_name)?;
let common_config = cmdlib::args::load_common_config(&matches)?; let common_config = cmdlib::args::load_common_config(config_store, &matches)?;
let limit = cmdlib::args::get_usize(&matches, "limit", 1000); let limit = cmdlib::args::get_usize(&matches, "limit", 1000);
let concurrency = cmdlib::args::get_usize(&matches, "concurrency", 20); let concurrency = cmdlib::args::get_usize(&matches, "concurrency", 20);
let log_interval = cmdlib::args::get_usize(&matches, "log_interval", 500); let log_interval = cmdlib::args::get_usize(&matches, "log_interval", 500);

View File

@ -222,7 +222,7 @@ fn main(fb: FacebookInit) -> Result<(), Error> {
let (caching, logger, mut runtime) = let (caching, logger, mut runtime) =
args::init_mononoke(fb, &matches, Some(CACHE_OBJECT_SIZE))?; args::init_mononoke(fb, &matches, Some(CACHE_OBJECT_SIZE))?;
args::init_config_store(fb, &logger, &matches)?; let config_store = args::init_config_store(fb, &logger, &matches)?;
let mysql_options = args::parse_mysql_options(&matches); let mysql_options = args::parse_mysql_options(&matches);
let blobstore_options = args::parse_blobstore_options(&matches); let blobstore_options = args::parse_blobstore_options(&matches);
@ -260,7 +260,7 @@ fn main(fb: FacebookInit) -> Result<(), Error> {
matches.value_of(ARG_UPSTREAM_URL), matches.value_of(ARG_UPSTREAM_URL),
)?; )?;
let RepoConfigs { repos, common } = args::load_repo_configs(&matches)?; let RepoConfigs { repos, common } = args::load_repo_configs(config_store, &matches)?;
let futs = repos let futs = repos
.into_iter() .into_iter()
@ -319,18 +319,8 @@ fn main(fb: FacebookInit) -> Result<(), Error> {
let will_exit = Arc::new(AtomicBool::new(false)); let will_exit = Arc::new(AtomicBool::new(false));
let config_interval: u64 = matches let config_handle = get_config_handle(config_store, &logger, matches.value_of(ARG_LIVE_CONFIG))
.value_of(ARG_LIVE_CONFIG_FETCH_INTERVAL) .context(Error::msg("Failed to load configuration"))?;
.unwrap()
.parse()?;
let config_handle = get_config_handle(
fb,
logger.clone(),
matches.value_of(ARG_LIVE_CONFIG),
config_interval,
)
.context(Error::msg("Failed to load configuration"))?;
let max_upload_size: Option<u64> = matches let max_upload_size: Option<u64> = matches
.value_of(ARG_MAX_UPLOAD_SIZE) .value_of(ARG_MAX_UPLOAD_SIZE)

View File

@ -74,6 +74,7 @@ async fn do_main<'a>(
matches: &ArgMatches<'a>, matches: &ArgMatches<'a>,
logger: &Logger, logger: &Logger,
) -> Result<(), Error> { ) -> Result<(), Error> {
let config_store = args::init_config_store(fb, logger, matches)?;
let mut scuba = args::get_scuba_sample_builder(fb, &matches)?; let mut scuba = args::get_scuba_sample_builder(fb, &matches)?;
scuba.add_common_server_data(); scuba.add_common_server_data();
@ -82,7 +83,7 @@ async fn do_main<'a>(
let blobstore_options = cmdlib::args::parse_blobstore_options(&matches); let blobstore_options = cmdlib::args::parse_blobstore_options(&matches);
let caching = cmdlib::args::init_cachelib(fb, &matches, None); let caching = cmdlib::args::init_cachelib(fb, &matches, None);
let RepoConfigs { repos, common } = args::load_repo_configs(&matches)?; let RepoConfigs { repos, common } = args::load_repo_configs(config_store, &matches)?;
let censored_scuba_params = common.censored_scuba_params; let censored_scuba_params = common.censored_scuba_params;
let location = match matches.subcommand() { let location = match matches.subcommand() {

View File

@ -660,9 +660,10 @@ fn run(ctx: CoreContext, matches: ArgMatches<'static>) -> BoxFuture<(), Error> {
let mysql_options = args::parse_mysql_options(&matches); let mysql_options = args::parse_mysql_options(&matches);
let readonly_storage = args::parse_readonly_storage(&matches); let readonly_storage = args::parse_readonly_storage(&matches);
let config_store = try_boxfuture!(args::init_config_store(ctx.fb, ctx.logger(), &matches));
let repo_id = args::get_repo_id(&matches).expect("need repo id"); let repo_id = args::get_repo_id(config_store, &matches).expect("need repo id");
let repo_config = args::get_config(&matches); let repo_config = args::get_config(config_store, &matches);
let (repo_name, repo_config) = try_boxfuture!(repo_config); let (repo_name, repo_config) = try_boxfuture!(repo_config);
let base_retry_delay_ms = args::get_u64_opt(&matches, "base-retry-delay-ms").unwrap_or(1000); let base_retry_delay_ms = args::get_u64_opt(&matches, "base-retry-delay-ms").unwrap_or(1000);
@ -792,7 +793,7 @@ fn run(ctx: CoreContext, matches: ArgMatches<'static>) -> BoxFuture<(), Error> {
ok(()).boxify() ok(()).boxify()
}; };
let bookmarks = args::open_sql::<SqlBookmarksBuilder>(ctx.fb, &matches); let bookmarks = args::open_sql::<SqlBookmarksBuilder>(ctx.fb, config_store, &matches);
myrouter_ready_fut myrouter_ready_fut
.join(bookmarks) .join(bookmarks)
@ -884,7 +885,7 @@ fn run(ctx: CoreContext, matches: ArgMatches<'static>) -> BoxFuture<(), Error> {
); );
} }
let loop_forever = sub_m.is_present("loop-forever"); let loop_forever = sub_m.is_present("loop-forever");
let mutable_counters = args::open_sql::<SqlMutableCounters>(ctx.fb, &matches); let mutable_counters = args::open_sql::<SqlMutableCounters>(ctx.fb, config_store, &matches);
let exit_path = sub_m let exit_path = sub_m
.value_of("exit-file") .value_of("exit-file")
.map(|name| Path::new(name).to_path_buf()); .map(|name| Path::new(name).to_path_buf());
@ -1073,18 +1074,15 @@ fn get_repo_sqldb_address<'a>(
matches: &ArgMatches<'a>, matches: &ArgMatches<'a>,
repo_name: &HgsqlName, repo_name: &HgsqlName,
) -> Result<Option<String>, Error> { ) -> Result<Option<String>, Error> {
let config_store = args::init_config_store(ctx.fb, ctx.logger(), &matches)?;
if let Some(db_addr) = matches.value_of("repo-lock-db-address") { if let Some(db_addr) = matches.value_of("repo-lock-db-address") {
return Ok(Some(db_addr.to_string())); return Ok(Some(db_addr.to_string()));
} }
if !matches.is_present("lock-on-failure") { if !matches.is_present("lock-on-failure") {
return Ok(None); return Ok(None);
} }
let handle = args::get_config_handle( let handle =
ctx.fb, args::get_config_handle(config_store, ctx.logger(), Some(CONFIGERATOR_HGSERVER_PATH))?;
ctx.logger().clone(),
Some(CONFIGERATOR_HGSERVER_PATH),
1,
)?;
let config: Arc<ServerConfig> = handle.get(); let config: Arc<ServerConfig> = handle.get();
match config.sql_confs.get(AsRef::<str>::as_ref(repo_name)) { match config.sql_confs.get(AsRef::<str>::as_ref(repo_name)) {
Some(sql_conf) => Ok(Some(sql_conf.db_tier.clone())), Some(sql_conf) => Ok(Some(sql_conf.db_tier.clone())),

View File

@ -769,6 +769,7 @@ async fn get_pushredirected_vars(
matches: &ArgMatches<'_>, matches: &ArgMatches<'_>,
live_commit_sync_config: CfgrLiveCommitSyncConfig, live_commit_sync_config: CfgrLiveCommitSyncConfig,
) -> Result<(BlobRepo, RepoImportSetting, Syncers<SqlSyncedCommitMapping>), Error> { ) -> Result<(BlobRepo, RepoImportSetting, Syncers<SqlSyncedCommitMapping>), Error> {
let config_store = args::init_config_store(ctx.fb, ctx.logger(), matches)?;
let large_repo_id = large_repo_config.repoid; let large_repo_id = large_repo_config.repoid;
let large_repo = args::open_repo_with_repo_id(ctx.fb, &ctx.logger(), large_repo_id, &matches) let large_repo = args::open_repo_with_repo_id(ctx.fb, &ctx.logger(), large_repo_id, &matches)
.compat() .compat()
@ -790,7 +791,7 @@ async fn get_pushredirected_vars(
large_repo.name() large_repo.name()
)); ));
} }
let mapping = args::open_source_sql::<SqlSyncedCommitMapping>(ctx.fb, &matches) let mapping = args::open_source_sql::<SqlSyncedCommitMapping>(ctx.fb, config_store, &matches)
.compat() .compat()
.await?; .await?;
let syncers = create_commit_syncers( let syncers = create_commit_syncers(
@ -859,7 +860,8 @@ async fn repo_import(
importing_bookmark, importing_bookmark,
dest_bookmark, dest_bookmark,
}; };
let (_, mut repo_config) = args::get_config_by_repoid(&matches, repo.get_repoid())?; let (_, mut repo_config) =
args::get_config_by_repoid(config_store, &matches, repo.get_repoid())?;
let mut call_sign = repo_config.phabricator_callsign.clone(); let mut call_sign = repo_config.phabricator_callsign.clone();
if !recovery_fields.phab_check_disabled && call_sign.is_none() { if !recovery_fields.phab_check_disabled && call_sign.is_none() {
return Err(format_err!( return Err(format_err!(
@ -876,7 +878,7 @@ async fn repo_import(
}; };
let live_commit_sync_config = CfgrLiveCommitSyncConfig::new(ctx.logger(), &config_store)?; let live_commit_sync_config = CfgrLiveCommitSyncConfig::new(ctx.logger(), &config_store)?;
let configs = args::load_repo_configs(&matches)?; let configs = args::load_repo_configs(config_store, &matches)?;
let mysql_options = args::parse_mysql_options(&matches); let mysql_options = args::parse_mysql_options(&matches);
let readonly_storage = args::parse_readonly_storage(&matches); let readonly_storage = args::parse_readonly_storage(&matches);
@ -946,7 +948,7 @@ async fn repo_import(
Ok(Some(mutable_path)) Ok(Some(mutable_path))
}); });
let mutable_counters = args::open_sql::<SqlMutableCounters>(ctx.fb, &matches) let mutable_counters = args::open_sql::<SqlMutableCounters>(ctx.fb, config_store, &matches)
.compat() .compat()
.await?; .await?;
@ -1130,11 +1132,13 @@ async fn check_additional_setup_steps(
dest_bookmark dest_bookmark
); );
let config_store = args::init_config_store(fb, ctx.logger(), &matches)?;
let repo_import_setting = RepoImportSetting { let repo_import_setting = RepoImportSetting {
importing_bookmark, importing_bookmark,
dest_bookmark, dest_bookmark,
}; };
let (_, repo_config) = args::get_config_by_repoid(&matches, repo.get_repoid())?; let (_, repo_config) = args::get_config_by_repoid(config_store, &matches, repo.get_repoid())?;
let call_sign = repo_config.phabricator_callsign; let call_sign = repo_config.phabricator_callsign;
let phab_check_disabled = sub_arg_matches.is_present(ARG_PHAB_CHECK_DISABLED); let phab_check_disabled = sub_arg_matches.is_present(ARG_PHAB_CHECK_DISABLED);
@ -1147,9 +1151,8 @@ async fn check_additional_setup_steps(
)); ));
} }
let config_store = args::init_config_store(fb, ctx.logger(), &matches)?; let live_commit_sync_config = CfgrLiveCommitSyncConfig::new(ctx.logger(), config_store)?;
let live_commit_sync_config = CfgrLiveCommitSyncConfig::new(ctx.logger(), &config_store)?; let configs = args::load_repo_configs(config_store, &matches)?;
let configs = args::load_repo_configs(&matches)?;
let maybe_large_repo_config = get_large_repo_config_if_pushredirected( let maybe_large_repo_config = get_large_repo_config_if_pushredirected(
&ctx, &ctx,
&repo, &repo,

View File

@ -55,7 +55,7 @@ fn main(fb: FacebookInit) -> Result<()> {
info!(root_log, "Starting up"); info!(root_log, "Starting up");
let config = args::load_repo_configs(&matches)?; let config = args::load_repo_configs(config_source, &matches)?;
let acceptor = { let acceptor = {
let cert = matches.value_of("cert").unwrap().to_string(); let cert = matches.value_of("cert").unwrap().to_string();
let private_key = matches.value_of("private_key").unwrap().to_string(); let private_key = matches.value_of("private_key").unwrap().to_string();

View File

@ -60,49 +60,52 @@ fn main(fb: FacebookInit) -> Result<()> {
let ctx = CoreContext::test_mock(fb); let ctx = CoreContext::test_mock(fb);
let matches = setup_app().get_matches(); let matches = setup_app().get_matches();
args::init_config_store(fb, None, &matches)?; let config_store = args::init_config_store(fb, None, &matches)?;
let repo_id = args::get_repo_id(&matches).unwrap(); let repo_id = args::get_repo_id(config_store, &matches).unwrap();
let fut = args::open_sql::<SqlBookmarksBuilder>(fb, &matches).and_then(move |builder| { let fut = args::open_sql::<SqlBookmarksBuilder>(fb, config_store, &matches).and_then(
let bookmarks = builder.with_repo_id(repo_id); move |builder| {
let name = matches.value_of(BOOKMARK).unwrap().to_string(); let bookmarks = builder.with_repo_id(repo_id);
let reason = match matches.is_present(BLOBIMPORT) { let name = matches.value_of(BOOKMARK).unwrap().to_string();
true => BookmarkUpdateReason::Blobimport, let reason = match matches.is_present(BLOBIMPORT) {
false => BookmarkUpdateReason::TestMove, true => BookmarkUpdateReason::Blobimport,
}; false => BookmarkUpdateReason::TestMove,
};
let bookmark = BookmarkName::new(name).unwrap(); let bookmark = BookmarkName::new(name).unwrap();
let mut txn = bookmarks.create_transaction(ctx); let mut txn = bookmarks.create_transaction(ctx);
match matches.subcommand() { match matches.subcommand() {
(CREATE, Some(sub_m)) => { (CREATE, Some(sub_m)) => {
txn.create( txn.create(
&bookmark, &bookmark,
ChangesetId::from_str(&sub_m.value_of(ID).unwrap().to_string()).unwrap(), ChangesetId::from_str(&sub_m.value_of(ID).unwrap().to_string()).unwrap(),
reason, reason,
None, None,
) )
.unwrap(); .unwrap();
}
(UPDATE, Some(sub_m)) => {
txn.update(
&bookmark,
ChangesetId::from_str(&sub_m.value_of(TO_ID).unwrap().to_string()).unwrap(),
ChangesetId::from_str(&sub_m.value_of(FROM_ID).unwrap().to_string())
.unwrap(),
reason,
None,
)
.unwrap();
}
_ => {
println!("{}", matches.usage());
::std::process::exit(1);
}
} }
(UPDATE, Some(sub_m)) => {
txn.update(
&bookmark,
ChangesetId::from_str(&sub_m.value_of(TO_ID).unwrap().to_string()).unwrap(),
ChangesetId::from_str(&sub_m.value_of(FROM_ID).unwrap().to_string()).unwrap(),
reason,
None,
)
.unwrap();
}
_ => {
println!("{}", matches.usage());
::std::process::exit(1);
}
}
txn.commit().compat() txn.commit().compat()
}); },
);
tokio::run(fut.map(|_| ()).map_err(move |err| { tokio::run(fut.map(|_| ()).map_err(move |err| {
println!("{:?}", err); println!("{:?}", err);

View File

@ -26,6 +26,7 @@ scuba_ext = { path = "../common/scuba_ext" }
sql_construct = { path = "../common/sql_construct" } sql_construct = { path = "../common/sql_construct" }
sql_ext = { path = "../common/rust/sql_ext" } sql_ext = { path = "../common/rust/sql_ext" }
unbundle = { path = "../repo_client/unbundle" } unbundle = { path = "../repo_client/unbundle" }
cached_config = { git = "https://github.com/facebookexperimental/rust-shed.git", branch = "master" }
fbinit = { git = "https://github.com/facebookexperimental/rust-shed.git", branch = "master" } fbinit = { git = "https://github.com/facebookexperimental/rust-shed.git", branch = "master" }
futures_stats = { git = "https://github.com/facebookexperimental/rust-shed.git", branch = "master" } futures_stats = { git = "https://github.com/facebookexperimental/rust-shed.git", branch = "master" }
sql = { git = "https://github.com/facebookexperimental/rust-shed.git", branch = "master" } sql = { git = "https://github.com/facebookexperimental/rust-shed.git", branch = "master" }

View File

@ -7,6 +7,7 @@
use anyhow::Error; use anyhow::Error;
use bookmarks::BookmarkName; use bookmarks::BookmarkName;
use cached_config::ConfigStore;
use clap::ArgMatches; use clap::ArgMatches;
use cmdlib::args; use cmdlib::args;
use context::CoreContext; use context::CoreContext;
@ -87,12 +88,13 @@ queries! {
impl HgRecordingClient { impl HgRecordingClient {
pub async fn new( pub async fn new(
fb: FacebookInit, fb: FacebookInit,
config_store: &ConfigStore,
matches: &ArgMatches<'_>, matches: &ArgMatches<'_>,
) -> Result<HgRecordingClient, Error> { ) -> Result<HgRecordingClient, Error> {
let sql = args::open_sql::<HgRecordingConnection>(fb, matches) let sql = args::open_sql::<HgRecordingConnection>(fb, config_store, matches)
.compat() .compat()
.await?; .await?;
let repo_id = args::get_repo_id(matches)?; let repo_id = args::get_repo_id(config_store, matches)?;
Ok(HgRecordingClient { repo_id, sql }) Ok(HgRecordingClient { repo_id, sql })
} }

View File

@ -73,12 +73,13 @@ async fn get_replay_stream<'a>(
repo: &'a BlobRepo, repo: &'a BlobRepo,
matches: &'a ArgMatches<'a>, matches: &'a ArgMatches<'a>,
) -> Result<impl Stream<Item = Result<ReplaySpec<'a>, Error>> + 'a, Error> { ) -> Result<impl Stream<Item = Result<ReplaySpec<'a>, Error>> + 'a, Error> {
let config_store = args::init_config_store(ctx.fb, ctx.logger(), matches)?;
match matches.subcommand() { match matches.subcommand() {
(SUBCOMMAND_HG_RECORDING, Some(sub)) => { (SUBCOMMAND_HG_RECORDING, Some(sub)) => {
let bundle_helper = sub.value_of(ARG_HG_BUNDLE_HELPER).unwrap(); let bundle_helper = sub.value_of(ARG_HG_BUNDLE_HELPER).unwrap();
let bundle_id: i64 = sub.value_of(ARG_HG_RECORDING_ID).unwrap().parse()?; let bundle_id: i64 = sub.value_of(ARG_HG_RECORDING_ID).unwrap().parse()?;
let client = HgRecordingClient::new(ctx.fb, matches).await?; let client = HgRecordingClient::new(ctx.fb, config_store, matches).await?;
let entry = client let entry = client
.next_entry_by_id(ctx, bundle_id - 1) .next_entry_by_id(ctx, bundle_id - 1)
@ -102,7 +103,7 @@ async fn get_replay_stream<'a>(
.transpose()? .transpose()?
.map(Duration::from_secs); .map(Duration::from_secs);
let client = HgRecordingClient::new(ctx.fb, matches).await?; let client = HgRecordingClient::new(ctx.fb, config_store, matches).await?;
let onto_rev = repo let onto_rev = repo
.get_bookmark(ctx.clone(), &onto) .get_bookmark(ctx.clone(), &onto)
@ -380,6 +381,7 @@ async fn do_main(
) -> Result<(), Error> { ) -> Result<(), Error> {
// TODO: Would want Scuba and such here. // TODO: Would want Scuba and such here.
let ctx = CoreContext::new_with_logger(fb, logger.clone()); let ctx = CoreContext::new_with_logger(fb, logger.clone());
let config_store = args::init_config_store(fb, logger, matches)?;
let unbundle_concurrency = matches let unbundle_concurrency = matches
.value_of(ARG_UNBUNDLE_CONCURRENCY) .value_of(ARG_UNBUNDLE_CONCURRENCY)
@ -392,8 +394,8 @@ async fn do_main(
let readonly_storage = args::parse_readonly_storage(&matches); let readonly_storage = args::parse_readonly_storage(&matches);
let caching = args::init_cachelib(fb, &matches, None); let caching = args::init_cachelib(fb, &matches, None);
let repo_id = args::get_repo_id(matches)?; let repo_id = args::get_repo_id(config_store, matches)?;
let (repo_name, repo_config) = args::get_config_by_repoid(&matches, repo_id)?; let (repo_name, repo_config) = args::get_config_by_repoid(config_store, &matches, repo_id)?;
info!( info!(
logger, logger,

View File

@ -318,6 +318,7 @@ pub async fn corpus<'a>(
matches: &'a ArgMatches<'a>, matches: &'a ArgMatches<'a>,
sub_m: &'a ArgMatches<'a>, sub_m: &'a ArgMatches<'a>,
) -> Result<(), Error> { ) -> Result<(), Error> {
let config_store = args::init_config_store(fb, &logger, matches)?;
let output_dir = sub_m.value_of(OUTPUT_DIR_ARG).map(|s| s.to_string()); let output_dir = sub_m.value_of(OUTPUT_DIR_ARG).map(|s| s.to_string());
let corpus_sampler = Arc::new(CorpusSamplingHandler::<CorpusSample>::new( let corpus_sampler = Arc::new(CorpusSamplingHandler::<CorpusSample>::new(
output_dir.clone(), output_dir.clone(),
@ -333,7 +334,7 @@ pub async fn corpus<'a>(
) )
.await?; .await?;
let repo_name = args::get_repo_name(&matches)?; let repo_name = args::get_repo_name(config_store, &matches)?;
let sample_rate = args::get_u64_opt(&sub_m, SAMPLE_RATE_ARG).unwrap_or(100); let sample_rate = args::get_u64_opt(&sub_m, SAMPLE_RATE_ARG).unwrap_or(100);
let sample_offset = args::get_u64_opt(&sub_m, SAMPLE_OFFSET_ARG).unwrap_or(0); let sample_offset = args::get_u64_opt(&sub_m, SAMPLE_OFFSET_ARG).unwrap_or(0);
let progress_interval_secs = args::get_u64_opt(&sub_m, PROGRESS_INTERVAL_ARG); let progress_interval_secs = args::get_u64_opt(&sub_m, PROGRESS_INTERVAL_ARG);

View File

@ -313,6 +313,7 @@ pub async fn scrub_objects<'a>(
sub_m: &'a ArgMatches<'a>, sub_m: &'a ArgMatches<'a>,
) -> Result<(), Error> { ) -> Result<(), Error> {
let scrub_sampler = Arc::new(WalkSampleMapping::<Node, ScrubSample>::new()); let scrub_sampler = Arc::new(WalkSampleMapping::<Node, ScrubSample>::new());
let config_store = args::init_config_store(fb, &logger, matches)?;
let (datasources, walk_params) = setup_common( let (datasources, walk_params) = setup_common(
SCRUB, SCRUB,
@ -324,7 +325,7 @@ pub async fn scrub_objects<'a>(
) )
.await?; .await?;
let repo_stats_key = args::get_repo_name(&matches)?; let repo_stats_key = args::get_repo_name(config_store, &matches)?;
let sample_rate = args::get_u64_opt(&sub_m, SAMPLE_RATE_ARG).unwrap_or(1); let sample_rate = args::get_u64_opt(&sub_m, SAMPLE_RATE_ARG).unwrap_or(1);
let sample_offset = args::get_u64_opt(&sub_m, SAMPLE_OFFSET_ARG).unwrap_or(0); let sample_offset = args::get_u64_opt(&sub_m, SAMPLE_OFFSET_ARG).unwrap_or(0);

View File

@ -644,9 +644,10 @@ pub fn setup_common<'a>(
sub_m: &'a ArgMatches<'a>, sub_m: &'a ArgMatches<'a>,
) -> impl Future<Output = Result<(RepoWalkDatasources, RepoWalkParams), Error>> + 'a { ) -> impl Future<Output = Result<(RepoWalkDatasources, RepoWalkParams), Error>> + 'a {
async move { async move {
let (_, config) = args::get_config(&matches)?; let config_store = args::init_config_store(fb, logger, matches)?;
let (_, config) = args::get_config(config_store, &matches)?;
let quiet = sub_m.is_present(QUIET_ARG); let quiet = sub_m.is_present(QUIET_ARG);
let common_config = cmdlib::args::load_common_config(&matches)?; let common_config = cmdlib::args::load_common_config(config_store, &matches)?;
let scheduled_max = args::get_usize_opt(&sub_m, SCHEDULED_MAX_ARG).unwrap_or(4096) as usize; let scheduled_max = args::get_usize_opt(&sub_m, SCHEDULED_MAX_ARG).unwrap_or(4096) as usize;
let inner_blobstore_id = args::get_u64_opt(&sub_m, INNER_BLOBSTORE_ID_ARG); let inner_blobstore_id = args::get_u64_opt(&sub_m, INNER_BLOBSTORE_ID_ARG);
let tail_secs = args::get_u64_opt(&sub_m, TAIL_INTERVAL_ARG); let tail_secs = args::get_u64_opt(&sub_m, TAIL_INTERVAL_ARG);
@ -767,7 +768,7 @@ pub fn setup_common<'a>(
let storage_id = matches.value_of(STORAGE_ID_ARG); let storage_id = matches.value_of(STORAGE_ID_ARG);
let storage_config = match storage_id { let storage_config = match storage_id {
Some(storage_id) => { Some(storage_id) => {
let mut configs = args::load_storage_configs(&matches)?; let mut configs = args::load_storage_configs(config_store, &matches)?;
configs.storage.remove(storage_id).ok_or_else(|| { configs.storage.remove(storage_id).ok_or_else(|| {
format_err!( format_err!(
"Storage id `{}` not found in {:?}", "Storage id `{}` not found in {:?}",
@ -782,7 +783,7 @@ pub fn setup_common<'a>(
let blobstore_options = args::parse_blobstore_options(&matches); let blobstore_options = args::parse_blobstore_options(&matches);
let scuba_table = sub_m.value_of(SCUBA_TABLE_ARG).map(|a| a.to_string()); let scuba_table = sub_m.value_of(SCUBA_TABLE_ARG).map(|a| a.to_string());
let repo_name = args::get_repo_name(&matches)?; let repo_name = args::get_repo_name(config_store, &matches)?;
let mut scuba_builder = ScubaSampleBuilder::with_opt_table(fb, scuba_table.clone()); let mut scuba_builder = ScubaSampleBuilder::with_opt_table(fb, scuba_table.clone());
scuba_builder.add_common_server_data(); scuba_builder.add_common_server_data();
scuba_builder.add(WALK_TYPE, walk_stats_key); scuba_builder.add(WALK_TYPE, walk_stats_key);

View File

@ -276,6 +276,7 @@ pub async fn compression_benefit<'a>(
sub_m: &'a ArgMatches<'a>, sub_m: &'a ArgMatches<'a>,
) -> Result<(), Error> { ) -> Result<(), Error> {
let sizing_sampler = Arc::new(WalkSampleMapping::<Node, SizingSample>::new()); let sizing_sampler = Arc::new(WalkSampleMapping::<Node, SizingSample>::new());
let config_store = args::init_config_store(fb, &logger, matches)?;
let (datasources, walk_params) = setup_common( let (datasources, walk_params) = setup_common(
COMPRESSION_BENEFIT, COMPRESSION_BENEFIT,
@ -287,7 +288,7 @@ pub async fn compression_benefit<'a>(
) )
.await?; .await?;
let repo_stats_key = args::get_repo_name(&matches)?; let repo_stats_key = args::get_repo_name(config_store, &matches)?;
let compression_level = args::get_i32_opt(&sub_m, COMPRESSION_LEVEL_ARG).unwrap_or(3); let compression_level = args::get_i32_opt(&sub_m, COMPRESSION_LEVEL_ARG).unwrap_or(3);
let sample_rate = args::get_u64_opt(&sub_m, SAMPLE_RATE_ARG).unwrap_or(100); let sample_rate = args::get_u64_opt(&sub_m, SAMPLE_RATE_ARG).unwrap_or(100);

View File

@ -620,9 +620,10 @@ pub async fn validate<'a>(
matches: &'a ArgMatches<'a>, matches: &'a ArgMatches<'a>,
sub_m: &'a ArgMatches<'a>, sub_m: &'a ArgMatches<'a>,
) -> Result<(), Error> { ) -> Result<(), Error> {
let config_store = args::init_config_store(fb, &logger, matches)?;
let (datasources, walk_params) = let (datasources, walk_params) =
setup_common(VALIDATE, fb, &logger, None, matches, sub_m).await?; setup_common(VALIDATE, fb, &logger, None, matches, sub_m).await?;
let repo_stats_key = args::get_repo_name(&matches)?; let repo_stats_key = args::get_repo_name(config_store, &matches)?;
let mut include_check_types = parse_check_types(sub_m)?; let mut include_check_types = parse_check_types(sub_m)?;
include_check_types.retain(|t| walk_params.include_node_types.contains(&t.node_type())); include_check_types.retain(|t| walk_params.include_node_types.contains(&t.node_type()));