mononoke: remove create_commit_syncer_from_matches

Summary:
At the moment CommitSyncConfig can be set in two ways:
1) Set in the normal mononoke config. That means that config can be updated
only after a service is restarted. This is an outdated way, and won't be used
in prod.
2) Set in separate config which can be updated on demand. This is what we are
planning to use in production.

create_commit_syncer_from_matches was used to build a CommitSyncer object via
normal mononoke config (i.e. outdated option #1). According to the comments it
was done so that we can read configs from the disk instead of configerator, but
this doesn't make sense because you already can read configerator configs
from disk. So create_commit_syncer_from_matches doesn't look particularly
useful, and besides it also make further refactorings harder. Let's remove it.

Reviewed By: ikostia

Differential Revision: D23811090

fbshipit-source-id: 114d88d9d9207c831d98dfa1cbb9e8ede5adeb1d
This commit is contained in:
Stanislau Hlebik 2020-09-23 04:29:00 -07:00 committed by Facebook GitHub Bot
parent 063584843b
commit a47464f60d
9 changed files with 80 additions and 66 deletions

View File

@ -14,7 +14,7 @@ use blobrepo::BlobRepo;
use blobrepo_factory::ReadOnlyStorage;
use clap::ArgMatches;
use cmdlib::{args, helpers::open_sql_with_config_and_mysql_options};
use cross_repo_sync::{CommitSyncer, CommitSyncerArgs};
use cross_repo_sync::CommitSyncerArgs;
use fbinit::FacebookInit;
use futures::compat::Future01CompatExt;
use futures_util::try_join;
@ -23,34 +23,6 @@ use slog::Logger;
use sql_ext::facebook::MysqlOptions;
use synced_commit_mapping::SqlSyncedCommitMapping;
// Creates commits syncer from source to target
pub async fn create_commit_syncer_from_matches(
fb: FacebookInit,
logger: &Logger,
matches: &ArgMatches<'_>,
) -> Result<CommitSyncer<SqlSyncedCommitMapping>, Error> {
let (args, config): (CommitSyncerArgs<SqlSyncedCommitMapping>, CommitSyncConfig) =
create_commit_syncer_args_and_config_from_matches_impl(
fb, logger, matches, false, /*reverse*/
)
.await?;
args.try_into_commit_syncer(&config)
}
// Creates commit syncer from target to source
pub async fn create_reverse_commit_syncer_from_matches(
fb: FacebookInit,
logger: &Logger,
matches: &ArgMatches<'_>,
) -> Result<CommitSyncer<SqlSyncedCommitMapping>, Error> {
let (args, config): (CommitSyncerArgs<SqlSyncedCommitMapping>, CommitSyncConfig) =
create_commit_syncer_args_and_config_from_matches_impl(
fb, logger, matches, true, /*reverse*/
)
.await?;
args.try_into_commit_syncer(&config)
}
pub async fn create_commit_syncer_args_from_matches(
fb: FacebookInit,
logger: &Logger,

View File

@ -32,7 +32,6 @@ scuba_ext = { path = "../../common/scuba_ext" }
sql_construct = { path = "../../common/sql_construct" }
sql_ext = { path = "../../common/rust/sql_ext" }
synced_commit_mapping = { path = "../synced_commit_mapping" }
cached_config = { git = "https://github.com/facebookexperimental/rust-shed.git", branch = "master" }
cloned = { git = "https://github.com/facebookexperimental/rust-shed.git", branch = "master" }
fbinit = { git = "https://github.com/facebookexperimental/rust-shed.git", branch = "master" }
sql = { git = "https://github.com/facebookexperimental/rust-shed.git", branch = "master" }

View File

@ -13,11 +13,10 @@ use backsyncer::{
};
use blobrepo_hg::BlobRepoHg;
use bookmarks::Freshness;
use cached_config::ConfigStore;
use clap::{Arg, SubCommand};
use cloned::cloned;
use cmdlib::{args, monitoring};
use cmdlib_x_repo::{create_commit_syncer_args_from_matches, create_commit_syncer_from_matches};
use cmdlib_x_repo::create_commit_syncer_args_from_matches;
use context::{CoreContext, SessionContainer};
use cross_repo_sync::{CommitSyncOutcome, CommitSyncer, CommitSyncerArgs};
use fbinit::FacebookInit;
@ -103,17 +102,16 @@ async fn derive_target_hg_changesets(
pub async fn backsync_forever<M>(
ctx: CoreContext,
config_store: ConfigStore,
commit_syncer_args: CommitSyncerArgs<M>,
target_repo_dbs: TargetRepoDbs,
source_repo_name: String,
target_repo_name: String,
live_commit_sync_config: CfgrLiveCommitSyncConfig,
) -> Result<(), Error>
where
M: SyncedCommitMapping + Clone + 'static,
{
let target_repo_id = commit_syncer_args.get_target_repo_id();
let live_commit_sync_config = CfgrLiveCommitSyncConfig::new(ctx.logger(), &config_store)?;
loop {
// We only care about public pushes because draft pushes are not in the bookmark
@ -281,16 +279,18 @@ fn main(fb: FacebookInit) -> Result<(), Error> {
"syncing from repoid {:?} into repoid {:?}", source_repo_id, target_repo_id,
);
let config_store = args::maybe_init_config_store(fb, &logger, &matches)
.ok_or_else(|| format_err!("Failed initializing ConfigStore"))?;
let live_commit_sync_config = CfgrLiveCommitSyncConfig::new(&logger, &config_store)?;
match matches.subcommand() {
(ARG_MODE_BACKSYNC_ALL, _) => {
// NOTE: this does not use `CfgrLiveCommitSyncConfig`, as I want to allow
// for an opportunity to call this binary in non-forever mode with
// local fs-based configs
let commit_syncer =
runtime.block_on_std(create_commit_syncer_from_matches(fb, &logger, &matches))?;
let scuba_sample = ScubaSampleBuilder::with_discard();
let ctx = session_container.new_context(logger.clone(), scuba_sample);
let commit_sync_config =
live_commit_sync_config.get_current_commit_sync_config(&ctx, target_repo_id)?;
let commit_syncer = commit_syncer_args.try_into_commit_syncer(&commit_sync_config)?;
let db_config = target_repo_config.storage_config.metadata;
let target_repo_dbs = runtime.block_on_std(
open_backsyncer_dbs(
@ -323,9 +323,6 @@ fn main(fb: FacebookInit) -> Result<(), Error> {
.boxed(),
)?;
let config_store = args::maybe_init_config_store(fb, &logger, &matches)
.ok_or_else(|| format_err!("Failed initializing ConfigStore"))?;
let mut scuba_sample = ScubaSampleBuilder::new(fb, SCUBA_TABLE);
scuba_sample.add("source_repo", source_repo_id.id());
scuba_sample.add("source_repo_name", source_repo_name.clone());
@ -336,11 +333,11 @@ fn main(fb: FacebookInit) -> Result<(), Error> {
let ctx = session_container.new_context(logger.clone(), scuba_sample);
let f = backsync_forever(
ctx,
config_store,
commit_syncer_args,
target_repo_dbs,
source_repo_name,
target_repo_name,
live_commit_sync_config,
)
.boxed();
@ -355,13 +352,11 @@ fn main(fb: FacebookInit) -> Result<(), Error> {
runtime.block_on_std(f)?;
}
(ARG_MODE_BACKSYNC_COMMITS, Some(sub_m)) => {
// NOTE: this does not use `CfgrLiveCommitSyncConfig`, as I want to allow
// for an opportunity to call this binary in non-forever mode with
// local fs-based configs
let commit_syncer =
runtime.block_on_std(create_commit_syncer_from_matches(fb, &logger, &matches))?;
let ctx = session_container.new_context(logger, ScubaSampleBuilder::with_discard());
let commit_sync_config =
live_commit_sync_config.get_current_commit_sync_config(&ctx, target_repo_id)?;
let commit_syncer = commit_syncer_args.try_into_commit_syncer(&commit_sync_config)?;
let inputfile = sub_m
.value_of(ARG_INPUT_FILE)
.expect("input file is not set");

View File

@ -21,6 +21,7 @@ cached_config = { git = "https://github.com/facebookexperimental/rust-shed.git",
fbinit = { git = "https://github.com/facebookexperimental/rust-shed.git", branch = "master" }
stats = { git = "https://github.com/facebookexperimental/rust-shed.git", branch = "master" }
anyhow = "1.0"
clap = "2.33"
futures = { version = "0.3.5", features = ["async-await", "compat"] }
futures-old = { package = "futures", version = "0.1" }
slog = { version = "2.5", features = ["max_level_debug"] }

View File

@ -10,24 +10,28 @@
use anyhow::{format_err, Error};
use bookmarks::{BookmarkName, Freshness};
use cached_config::ConfigStore;
use clap::ArgMatches;
use cmdlib::{args, helpers, monitoring};
use cmdlib_x_repo::{create_commit_syncer_from_matches, create_reverse_commit_syncer_from_matches};
use cmdlib_x_repo::{
create_commit_syncer_args_from_matches, create_reverse_commit_syncer_args_from_matches,
};
use context::{CoreContext, SessionContainer};
use cross_repo_sync::{
validation::{self, BookmarkDiff},
CommitSyncOutcome, CommitSyncer, Syncers,
CommitSyncOutcome, CommitSyncer, CommitSyncerArgs, Syncers,
};
use fbinit::FacebookInit;
use futures::{compat::Future01CompatExt, future};
use futures_old::Stream;
use live_commit_sync_config::CONFIGERATOR_PUSHREDIRECT_ENABLE;
use live_commit_sync_config::{CfgrLiveCommitSyncConfig, LiveCommitSyncConfig};
use mononoke_types::ChangesetId;
use pushredirect_enable::types::MononokePushRedirectEnable;
use scuba_ext::ScubaSampleBuilder;
use slog::{debug, error};
use slog::{debug, error, Logger};
use stats::prelude::*;
use std::{sync::Arc, time::Duration};
use synced_commit_mapping::SyncedCommitMapping;
use synced_commit_mapping::{SqlSyncedCommitMapping, SyncedCommitMapping};
const CONFIGERATOR_TIMEOUT: Duration = Duration::from_millis(25);
const CONFIGERATOR_POLL_INTERVAL: Duration = Duration::from_secs(1);
@ -49,9 +53,13 @@ fn main(fb: FacebookInit) -> Result<(), Error> {
.build();
let matches = app.get_matches();
let (_, logger, mut runtime) = args::init_mononoke(fb, &matches, None)?;
let ctx = create_core_context(fb, logger.clone());
let large_to_small_commit_syncer_args = runtime.block_on_std(
create_commit_syncer_args_from_matches(ctx.fb, &logger, &matches),
)?;
let large_to_small_commit_syncer =
runtime.block_on_std(create_commit_syncer_from_matches(fb, &logger, &matches))?;
get_commit_syncer(&ctx, &logger, &matches, large_to_small_commit_syncer_args)?;
if large_to_small_commit_syncer.get_source_repo().get_repoid()
!= large_to_small_commit_syncer.get_large_repo().get_repoid()
@ -62,18 +70,17 @@ fn main(fb: FacebookInit) -> Result<(), Error> {
// Backsyncer works in large -> small direction, however
// for bookmarks vaidator it's simpler to have commits syncer in small -> large direction
// Hence here we are creating a reverse syncer
let small_to_large_commit_syncer = runtime.block_on_std(
create_reverse_commit_syncer_from_matches(fb, &logger, &matches),
let small_to_large_commit_syncer_args = runtime.block_on_std(
create_reverse_commit_syncer_args_from_matches(ctx.fb, &logger, &matches),
)?;
let small_to_large_commit_syncer =
get_commit_syncer(&ctx, &logger, &matches, small_to_large_commit_syncer_args)?;
let syncers = Syncers {
large_to_small: large_to_small_commit_syncer,
small_to_large: small_to_large_commit_syncer,
};
let session_container = SessionContainer::new_with_defaults(fb);
let scuba_sample = ScubaSampleBuilder::with_discard();
let ctx = session_container.new_context(logger.clone(), scuba_sample);
helpers::block_execute(
loop_forever(ctx, syncers),
fb,
@ -84,6 +91,27 @@ fn main(fb: FacebookInit) -> Result<(), Error> {
)
}
fn create_core_context(fb: FacebookInit, logger: Logger) -> CoreContext {
let session_container = SessionContainer::new_with_defaults(fb);
let scuba_sample = ScubaSampleBuilder::with_discard();
session_container.new_context(logger, scuba_sample)
}
fn get_commit_syncer<'a>(
ctx: &CoreContext,
logger: &Logger,
matches: &ArgMatches<'a>,
commit_syncer_args: CommitSyncerArgs<SqlSyncedCommitMapping>,
) -> Result<CommitSyncer<SqlSyncedCommitMapping>, Error> {
let target_repo_id = args::get_target_repo_id(ctx.fb, &matches)?;
let config_store = args::maybe_init_config_store(ctx.fb, &logger, &matches)
.ok_or_else(|| format_err!("Failed initializing ConfigStore"))?;
let live_commit_sync_config = CfgrLiveCommitSyncConfig::new(&logger, &config_store)?;
let commit_sync_config =
live_commit_sync_config.get_current_commit_sync_config(&ctx, target_repo_id)?;
commit_syncer_args.try_into_commit_syncer(&commit_sync_config)
}
async fn loop_forever<M: SyncedCommitMapping + Clone + 'static>(
ctx: CoreContext,
syncers: Syncers<M>,

View File

@ -25,7 +25,7 @@ use bookmarks::{BookmarkName, BookmarkUpdateLog, Freshness};
use cached_config::ConfigStore;
use clap::{App, ArgMatches};
use cmdlib::{args, monitoring};
use cmdlib_x_repo::{create_commit_syncer_args_from_matches, create_commit_syncer_from_matches};
use cmdlib_x_repo::create_commit_syncer_args_from_matches;
use context::CoreContext;
use cross_repo_sync::{
types::{Source, Target},
@ -167,7 +167,7 @@ async fn run_in_tailing_mode<
}
TailingArgs::LoopForever(commit_syncer_args, config_store) => {
let live_commit_sync_config =
CfgrLiveCommitSyncConfig::new(ctx.logger(), &config_store)?;
Arc::new(CfgrLiveCommitSyncConfig::new(ctx.logger(), &config_store)?);
let source_repo_id = commit_syncer_args.get_source_repo().get_repoid();
loop {
@ -431,10 +431,16 @@ async fn run(
try_join3(source_repo, target_repo, mutable_counters).await?;
let commit_syncer_args = create_commit_syncer_args_from_matches(fb, &logger, &matches).await?;
// NOTE: this does not use `CfgrLiveCommitSyncConfig`, as I want to allow
// for an opportunity to call this binary in "once" mode with
// local fs-based configs
let commit_syncer = create_commit_syncer_from_matches(fb, &logger, &matches).await?;
let config_store = args::maybe_init_config_store(ctx.fb, &logger, &matches)
.ok_or_else(|| format_err!("Failed initializing ConfigStore"))?;
let live_commit_sync_config = Arc::new(CfgrLiveCommitSyncConfig::new(&logger, &config_store)?);
let commit_sync_config =
live_commit_sync_config.get_current_commit_sync_config(&ctx, source_repo.get_repoid())?;
let commit_syncer = commit_syncer_args
.clone()
.try_into_commit_syncer(&commit_sync_config)?;
let source_skiplist =
get_skiplist_index(&ctx, &source_repo_config, &source_repo).map_ok(Source);

View File

@ -12,6 +12,8 @@ function verify_wc() {
large_repo_commit="$1"
"$MONONOKE_ADMIN" "${COMMON_ARGS[@]}" --log-level ERROR \
--mononoke-config-path "$TESTTMP"/mononoke-config \
--test-instance \
--local-configerator-path="$TESTTMP/configerator" \
--source-repo-id="$REPOIDLARGE" --target-repo-id="$REPOIDSMALL" \
crossrepo verify-wc "$large_repo_commit"
}
@ -165,6 +167,8 @@ function backsync_large_to_small() {
"$BACKSYNCER" "${COMMON_ARGS[@]}" --debug --source-repo-id "$REPOIDLARGE" \
--mononoke-config-path "$TESTTMP/mononoke-config" \
--target-repo-id "$REPOIDSMALL" \
--test-instance \
--local-configerator-path="$TESTTMP/configerator" \
backsync-all
}

View File

@ -154,6 +154,8 @@ function mononoke_x_repo_sync() {
GLOG_minloglevel=5 "$MONONOKE_X_REPO_SYNC" \
"${COMMON_ARGS[@]}" \
--mononoke-config-path "$TESTTMP/mononoke-config" \
--test-instance \
--local-configerator-path="$TESTTMP/configerator" \
--source-repo-id "$source_repo_id" \
--target-repo-id "$target_repo_id" \
"$@"

View File

@ -11,6 +11,7 @@ setup configuration
$ REPOID=0 REPONAME=meg-mon setup_common_config $REPOTYPE
$ REPOID=1 REPONAME=fbs-mon setup_common_config $REPOTYPE
$ setup_commitsyncmap
$ setup_configerator_configs
$ ls $TESTTMP/monsql/sqlite_dbs
ls: cannot access *: No such file or directory (glob)
[2]
@ -70,6 +71,8 @@ start mononoke server
run the sync, expected to fail, as parent of the synced commit is not present in the mapping
$ mononoke_x_repo_sync 1 0 once --target-bookmark master_bookamrk --commit fbsource_master |& grep -v "using repo"
* Initializing CfgrLiveCommitSyncConfig (glob)
* Done initializing CfgrLiveCommitSyncConfig (glob)
* changeset resolved as: ChangesetId(Blake2(*)) (glob)
* Checking if * is already synced 1->0 (glob)
* syncing without pushrebase (glob)
@ -83,6 +86,8 @@ insert sync mapping entry
run the sync again
$ mononoke_x_repo_sync 1 0 once --target-bookmark bookmarktomerge --commit "$TOMERGE" |& grep -v "using repo"
* Initializing CfgrLiveCommitSyncConfig (glob)
* Done initializing CfgrLiveCommitSyncConfig (glob)
* changeset resolved as: ChangesetId(Blake2(*)) (glob)
* Checking if 6d7f84d613e4cccb4ec27259b7b59335573cdd65ee5dc78887056a5eeb6e6a47 is already synced 1->0 (glob)
* syncing without pushrebase (glob)
@ -90,6 +95,8 @@ run the sync again
* changeset 6d7f84d613e4cccb4ec27259b7b59335573cdd65ee5dc78887056a5eeb6e6a47 synced as fa8f65693524f78f5e0a40099d10acdc3001d6d472c62baabf03231e51b109c7 in * (glob)
* successful sync (glob)
$ mononoke_x_repo_sync 1 0 once --target-bookmark master_bookmark --commit fbsource_master |& grep -v "using repo"
* Initializing CfgrLiveCommitSyncConfig (glob)
* Done initializing CfgrLiveCommitSyncConfig (glob)
* changeset resolved as: ChangesetId(Blake2(*)) (glob)
* Checking if * is already synced 1->0 (glob)
* syncing via pushrebase (glob)