diff --git a/eden/mononoke/segmented_changelog/Cargo.toml b/eden/mononoke/segmented_changelog/Cargo.toml index 902768fb0a..452fa7fa95 100644 --- a/eden/mononoke/segmented_changelog/Cargo.toml +++ b/eden/mononoke/segmented_changelog/Cargo.toml @@ -52,7 +52,6 @@ sql_construct = { version = "0.1.0", path = "../common/sql_construct" } sql_ext = { version = "0.1.0", path = "../common/rust/sql_ext" } stats = { version = "0.1.0", git = "https://github.com/facebookexperimental/rust-shed.git", branch = "main" } tokio = { version = "1.29.1", features = ["full", "test-util", "tracing"] } -tunables = { version = "0.1.0", path = "../tunables" } [dev-dependencies] fbinit = { version = "0.1.2", git = "https://github.com/facebookexperimental/rust-shed.git", branch = "main" } diff --git a/eden/mononoke/segmented_changelog/TARGETS b/eden/mononoke/segmented_changelog/TARGETS index 05cf8e9d75..c52e4d4836 100644 --- a/eden/mononoke/segmented_changelog/TARGETS +++ b/eden/mononoke/segmented_changelog/TARGETS @@ -105,7 +105,6 @@ rust_library( "//eden/mononoke/repo_attributes/repo_identity:repo_identity", "//eden/mononoke/revset:revset", "//eden/mononoke/server/context:context", - "//eden/mononoke/tunables:tunables", "//eden/scm/lib/mincode:mincode", ], ) diff --git a/eden/mononoke/segmented_changelog/src/on_demand.rs b/eden/mononoke/segmented_changelog/src/on_demand.rs index 0f21ffcb3a..cd868cfa20 100644 --- a/eden/mononoke/segmented_changelog/src/on_demand.rs +++ b/eden/mononoke/segmented_changelog/src/on_demand.rs @@ -259,9 +259,11 @@ impl OnDemandUpdateSegmentedChangelog { ) -> Result { let changeset_fetcher = self.changeset_fetcher.clone(); let id_map = self.namedag.read().await.map().clone_idmap(); - let max_commits = tunables::tunables() - .segmented_changelog_client_max_commits_to_traverse() - .unwrap_or_default(); + let max_commits = justknobs::get_as::( + "scm/mononoke:segmented_changelog_client_max_commits_to_traverse", + None, + ) + .unwrap_or_default(); for cs_id in heads { let ancestors = AncestorsNodeStream::new(ctx.clone(), &changeset_fetcher, *cs_id).compat(); diff --git a/eden/mononoke/segmented_changelog/src/tests.rs b/eden/mononoke/segmented_changelog/src/tests.rs index 62bd9356fd..4776b1f3bb 100644 --- a/eden/mononoke/segmented_changelog/src/tests.rs +++ b/eden/mononoke/segmented_changelog/src/tests.rs @@ -39,6 +39,7 @@ use futures::future::FutureExt; use futures::stream; use futures::StreamExt; use justknobs::test_helpers::override_just_knobs; +use justknobs::test_helpers::with_just_knobs_async; use justknobs::test_helpers::JustKnobsInMemory; use justknobs::test_helpers::KnobVal; use maplit::hashmap; @@ -54,7 +55,6 @@ use sql_construct::SqlConstruct; use sql_ext::replication::NoReplicaLagMonitor; use tests_utils::resolve_cs_id; use tests_utils::CreateCommitContext; -use tunables::with_tunables_async; use crate::builder::SegmentedChangelogSqlConnections; use crate::iddag::IdDagSaveStore; @@ -1128,29 +1128,32 @@ async fn test_mismatched_heads(fb: FacebookInit) -> Result<()> { // should fail with the small limit for traversing from client heads let h2 = resolve_cs_id(&ctx, &blobrepo, "16839021e338500b3cf7c9b871c8a07351697d68").await?; - let tunables = tunables::MononokeTunables::default(); - tunables.update_ints(&hashmap! { - "segmented_changelog_client_max_commits_to_traverse".to_string() => 2, - }); let f = dag.changeset_id_to_location(&ctx, vec![h1, h2], h1_parent); - let err = with_tunables_async(tunables, f.boxed()) - .await - .err() - .unwrap(); + let err = with_just_knobs_async( + JustKnobsInMemory::new(hashmap! { + "scm/mononoke:segmented_changelog_client_max_commits_to_traverse".to_string() => KnobVal::Int(2), + }), + f.boxed(), + ) + .await + .err() + .unwrap(); assert!(err.is::()); // should succeed as the client head not far from the commits in SC IdMap let h2 = resolve_cs_id(&ctx, &blobrepo, "16839021e338500b3cf7c9b871c8a07351697d68").await?; - let tunables = tunables::MononokeTunables::default(); - tunables.update_ints(&hashmap! { - "segmented_changelog_client_max_commits_to_traverse".to_string() => 100, - }); let f = dag.changeset_id_to_location(&ctx, vec![h1, h2], h1_parent); assert_eq!( - with_tunables_async(tunables, f.boxed()).await?, + with_just_knobs_async( + JustKnobsInMemory::new(hashmap! { + "scm/mononoke:segmented_changelog_client_max_commits_to_traverse".to_string() => KnobVal::Int(100), + }), + f.boxed(), + ) + .await?, Some(Location::new(h1, 1)) ); diff --git a/eden/mononoke/tests/integration/test-edenapi-server-pull-different-branch.t b/eden/mononoke/tests/integration/test-edenapi-server-pull-different-branch.t index f578401cf1..94e2138dcf 100644 --- a/eden/mononoke/tests/integration/test-edenapi-server-pull-different-branch.t +++ b/eden/mononoke/tests/integration/test-edenapi-server-pull-different-branch.t @@ -99,10 +99,13 @@ I..P are pulled via non-lazy fallback pull path. They can be resolved locally: ~ Allow server to build up temporary segments on demand: - - $ merge_tunables <<'EOS' - > {"ints": {"segmented_changelog_client_max_commits_to_traverse": 100}} - > EOS + $ merge_just_knobs < { + > "ints": { + > "scm/mononoke:segmented_changelog_client_max_commits_to_traverse": 100 + > } + > } + > EOF Pulling branch1 as main branch now uses fastpath: diff --git a/eden/mononoke/tests/integration/test-edenapi-server-pull.t b/eden/mononoke/tests/integration/test-edenapi-server-pull.t index 2633fb082b..c9ccf38191 100644 --- a/eden/mononoke/tests/integration/test-edenapi-server-pull.t +++ b/eden/mononoke/tests/integration/test-edenapi-server-pull.t @@ -29,7 +29,7 @@ Enable Segmented Changelog $ mononoke $ wait_for_mononoke -Lazy clone the repo from mononoke +Lazy clone the repo from mononoke $ cd "$TESTTMP" $ setconfig remotenames.selectivepull=True remotenames.selectivepulldefault=master_bookmark $ setconfig pull.httpcommitgraph2=1 pull.httphashprefix=1 @@ -78,11 +78,11 @@ Check that bookmark moved correctly {"master": None, "master_bookmark": "26805aba1e600a82e93661149f2313866a221a7b"} - $ merge_tunables < { - > "ints": { - > "segmented_changelog_client_max_commits_to_traverse": 100 - > } + > "ints": { + > "scm/mononoke:segmented_changelog_client_max_commits_to_traverse": 100 + > } > } > EOF @@ -92,7 +92,7 @@ Pull should succeed and local bookmark should be moved back. DEBUG pull::fastpath: master_bookmark: c2f72b3cb5e9ea5ce6b764fc5b4f7c7b23208217 => 26805aba1e600a82e93661149f2313866a221a7b imported commit graph for 0 commits (0 segments) -Check that segmented changelog IdMap in DB didn't change. +Check that segmented changelog IdMap in DB didn't change. $ sqlite3 "$TESTTMP/monsql/sqlite_dbs" "select version, vertex, hex(cs_id) from segmented_changelog_idmap" 1|0|9FEB8DDD3E8EDDCFA3A4913B57DF7842BEDF84B8EA3B7B3FCB14C6424AA81FEC 1|1|459F16AE564C501CB408C1E5B60FC98A1E8B8E97B9409C7520658BFA1577FB66 diff --git a/eden/mononoke/tunables/src/lib.rs b/eden/mononoke/tunables/src/lib.rs index f69fdc8ce9..dccb4c567b 100644 --- a/eden/mononoke/tunables/src/lib.rs +++ b/eden/mononoke/tunables/src/lib.rs @@ -215,9 +215,6 @@ pub struct MononokeTunables { megarepo_api_dont_set_file_mutable_renames: TunableBool, megarepo_api_dont_set_directory_mutable_renames: TunableBool, - // How many commits to walk back from the client heads before failing to rebuild SC - segmented_changelog_client_max_commits_to_traverse: TunableI64, - // What timeout to use when doing filenode lookup. // Usually filenode lookup is used while generating hg changesets filenode_lookup_timeout_ms: TunableI64,