add mononoke_new_commit logging for backsyncer

Summary:
I propose that instead of logging from pushredirection code we log from
backsyncer code. In that case it won't matter how the commits made it to
large repo, they'll be logged in small repo regardless.

Differential Revision: D45401754

fbshipit-source-id: 471cf6c3a4afd3da2f6a01729b7443f506e0d092
This commit is contained in:
Mateusz Kwapich 2023-05-02 08:30:52 -07:00 committed by Facebook GitHub Bot
parent 87ee7a2ef7
commit ae58899743
8 changed files with 104 additions and 23 deletions

View File

@ -29,6 +29,7 @@ phases = { version = "0.1.0", path = "../../phases" }
repo_blobstore = { version = "0.1.0", path = "../../blobrepo/repo_blobstore" }
repo_derived_data = { version = "0.1.0", path = "../../repo_attributes/repo_derived_data" }
repo_identity = { version = "0.1.0", path = "../../repo_attributes/repo_identity" }
repo_update_logger = { version = "0.1.0", path = "../../features/repo_update_logger" }
revset = { version = "0.1.0", path = "../../revset" }
skiplist = { version = "0.1.0", path = "../../reachabilityindex/skiplist" }
slog = { version = "2.7", features = ["max_level_trace", "nested-values"] }

View File

@ -35,6 +35,7 @@ use anyhow::Error;
use blobstore::Loadable;
use bonsai_globalrev_mapping::BonsaiGlobalrevMappingEntry;
use bonsai_hg_mapping::BonsaiHgMapping;
use bookmarks::BookmarkKind;
use bookmarks::BookmarkTransactionError;
use bookmarks::BookmarkUpdateLog;
use bookmarks::BookmarkUpdateLogArc;
@ -76,6 +77,8 @@ use repo_blobstore::RepoBlobstoreRef;
use repo_derived_data::RepoDerivedData;
use repo_identity::RepoIdentity;
use repo_identity::RepoIdentityRef;
use repo_update_logger::find_draft_ancestors;
use repo_update_logger::log_new_bonsai_changesets;
use revset::AncestorsNodeStream;
use revset::DifferenceOfUnionsOfAncestorsNodeStream;
use skiplist::SkiplistIndex;
@ -467,6 +470,25 @@ where
} else {
vec![]
};
let commits_to_log = async {
match to_cs_id {
Some(to_cs_id) => {
let res = find_draft_ancestors(&ctx, target_repo, to_cs_id).await;
match res {
Ok(bcss) => bcss,
Err(err) => {
ctx.scuba().clone().log_with_msg(
"Failed to find draft ancestors for logging",
Some(format!("{}", err)),
);
vec![]
}
}
}
None => vec![],
}
}
.await;
let mut bookmark_txn = target_repo_dbs.bookmarks.create_transaction(ctx.clone());
debug!(
@ -539,7 +561,22 @@ where
}
});
return bookmark_txn.commit_with_hook(txn_hook).await;
let res = bookmark_txn.commit_with_hook(txn_hook).await?;
if tunables::tunables()
.log_backsynced_commits_from_backsyncer()
.unwrap_or(false)
{
log_new_bonsai_changesets(
&ctx,
target_repo,
&bookmark,
BookmarkKind::Publishing,
commits_to_log,
)
.await;
}
return Ok(res);
} else {
debug!(
ctx.logger(),

View File

@ -103,21 +103,26 @@ impl RepoContext {
redirector.convert_pushrebased_changesets(ctx, rebased_changesets)
)?;
for pair in rebased_changesets.iter() {
let info = changesets_to_log
.get_mut(&pair.id_old)
.with_context(|| format!("Missing commit info for {}", pair.id_old))?;
info.update_changeset_id(pair.id_old, pair.id_new)?;
}
if !tunables::tunables()
.log_backsynced_commits_from_backsyncer()
.unwrap_or(false)
{
for pair in rebased_changesets.iter() {
let info = changesets_to_log
.get_mut(&pair.id_old)
.with_context(|| format!("Missing commit info for {}", pair.id_old))?;
info.update_changeset_id(pair.id_old, pair.id_new)?;
}
// Also log commits on small repo
log_new_commits(
ctx,
redirector.small_repo.as_ref(),
Some((&bookmark, BookmarkKind::Publishing)),
changesets_to_log.0.into_values().collect(),
)
.await;
// Also log commits on small repo
log_new_commits(
ctx,
redirector.small_repo.as_ref(),
Some((&bookmark, BookmarkKind::Publishing)),
changesets_to_log.0.into_values().collect(),
)
.await;
}
Ok(Small(PushrebaseOutcome {
old_bookmark_value,

View File

@ -584,14 +584,19 @@ impl<R: Repo> PushRedirector<R> {
)
})?;
// Also log commits on small repo
log_new_commits(
ctx,
self.small_repo.as_ref(),
Some((&onto, BookmarkKind::Publishing)),
changesets_to_log.into_values().collect(),
)
.await;
if !tunables::tunables()
.log_backsynced_commits_from_backsyncer()
.unwrap_or(false)
{
// Also log commits on small repo
log_new_commits(
ctx,
self.small_repo.as_ref(),
Some((&onto, BookmarkKind::Publishing)),
changesets_to_log.into_values().collect(),
)
.await;
}
Ok(UnbundlePushRebaseResponse {
commonheads,

View File

@ -9,6 +9,13 @@
$ . "${TEST_FIXTURES}/library-push-redirector.sh"
$ setup_configerator_configs
$ merge_tunables <<EOF
> {
> "killswitches": {
> "log_backsynced_commits_from_backsyncer": true
> }
> }
> EOF
$ cat > "$PUSHREDIRECT_CONF/enable" <<EOF
> {
> "per_repo": {
@ -109,3 +116,6 @@ Config change
["large-mon","*","master_bookmark",true] (glob)
["large-mon","*","master_bookmark",true] (glob)
["large-mon","*","master_bookmark",true] (glob)
["small-mon","*","master_bookmark",true] (glob)
["small-mon","*","master_bookmark",true] (glob)
["small-mon","*","master_bookmark",true] (glob)

View File

@ -9,6 +9,13 @@
$ export BOOKMARK_SCRIBE_CATEGORY=mononoke_bookmark
$ setup_configerator_configs
$ merge_tunables <<EOF
> {
> "killswitches": {
> "log_backsynced_commits_from_backsyncer": true
> }
> }
> EOF
$ cat > "$PUSHREDIRECT_CONF/enable" <<EOF
> {
> "per_repo": {
@ -85,6 +92,9 @@ although the second one became non-merge commit
Make sure we have directory from the first move, but not from the second
$ ls
file.txt
@ -95,3 +105,6 @@ Make sure we have directory from the first move, but not from the second
["large-mon","2e4090631ddfa0b3a2fe26c5d2560c615ebc2b77533e6a2039afcfbd3424c3ac","master_bookmark",true]
["large-mon","3c4cdb0a6b145deb53f79178ba48c6fd1316058982bf6ab618402b91901de75e","master_bookmark",true]
["large-mon","71634714290726837d0f66eff98add40cd621b0aa745b5bfc587cbc89b2fc94f","master_bookmark",true]
["small-mon","77970017bd96dfbfd8b2cf217420bd45c55b6f5ad0073d19db9025e30367ab9f","master_bookmark",true]
["small-mon","a353e74b22a1347ee50cd20a8b9a916f213f7c8e4148dce66c5c2a5c273abf5c","master_bookmark",true]
["small-mon","dfbd8e50164e761bbf2f8ecedbc9e8cf8641cb6e4679fb0487f48311c01ab0a5","master_bookmark",true]

View File

@ -10,6 +10,13 @@
$ . "${TEST_FIXTURES}/library-push-redirector.sh"
$ setup_configerator_configs
$ merge_tunables <<EOF
> {
> "killswitches": {
> "log_backsynced_commits_from_backsyncer": true
> }
> }
> EOF
$ cat > "$PUSHREDIRECT_CONF/enable" <<EOF
> {
> "per_repo": {

View File

@ -391,6 +391,9 @@ pub struct MononokeTunables {
// Disable write restrictions for untrusted clients (e.g. off VPN). Intended
// to be used in case of emergency.
disable_client_security_check: TunableBool,
// Temporary for rollout of logging change
log_backsynced_commits_from_backsyncer: TunableBool,
}
fn log_tunables(tunables: &TunablesStruct) -> String {