Support logging commit cloud commits uploaded via edenapi to scribe

Summary:
markbt noticed some commit cloud commits weren't being logged to the scribe category. This is because the EdenApi path to create changesets doesn't do the logging.

This diff makes sure we also do the logging in that case.

Ideally, we'd have an unified place to create changesets, be it from hg changesets, git, or from raw data, but that needs a lot more refactoring and it's riskier.

Reviewed By: markbt

Differential Revision: D34146757

fbshipit-source-id: e82f14e1be6c598f89722b68c20cd6fc572633e0
This commit is contained in:
Yan Soares Couto 2022-02-21 10:19:00 -08:00 committed by Facebook GitHub Bot
parent 5044acfe04
commit ebaf85e1d8
6 changed files with 15 additions and 1 deletions

View File

@ -458,6 +458,7 @@ impl UploadChangesets {
sub_entries: entries.compat().boxed(),
cs_metadata,
create_bonsai_changeset_hook: Some(create_and_verify_bonsai.clone()),
scribe_category: None,
};
let cshandle =
create_changeset.create(ctx.clone(), &blobrepo, scuba_logger.clone());

View File

@ -10,6 +10,7 @@ use crate::repo_commit::*;
use crate::ErrorKind;
use ::manifest::Entry;
use anyhow::{anyhow, format_err, Context, Error, Result};
use blobrepo::scribe::log_commit_to_scribe;
use blobrepo::BlobRepo;
use blobstore::Loadable;
use bonsai_hg_mapping::{BonsaiHgMapping, BonsaiHgMappingArc, BonsaiHgMappingEntry};
@ -116,6 +117,8 @@ pub struct CreateChangeset {
pub sub_entries: BoxStream<'static, Result<(Entry<HgManifestId, HgFileNodeId>, RepoPath)>>,
pub cs_metadata: ChangesetMetadata,
pub create_bonsai_changeset_hook: Option<Arc<BonsaiChangesetHook>>,
/// Which category to log the changeset to, if any
pub scribe_category: Option<String>,
}
impl CreateChangeset {
@ -302,7 +305,7 @@ impl CreateChangeset {
let complete_changesets = repo.get_changesets_object();
let bonsai_hg_mapping = repo.bonsai_hg_mapping_arc().clone();
let _repo = repo.clone();
cloned!(repo);
let changeset_complete_fut = async move {
let ((hg_cs, bonsai_cs), _) = future::try_join(changeset, parents_complete).await?;
@ -327,6 +330,10 @@ impl CreateChangeset {
.await
.context("While inserting mapping")?;
if let Some(category) = self.scribe_category {
log_commit_to_scribe(&ctx, &category, &repo, &bonsai_cs, None).await;
}
Ok::<_, Error>((bonsai_cs, hg_cs))
}
.try_timed()

View File

@ -172,6 +172,7 @@ pub fn create_changeset_no_parents(
.boxed(),
cs_metadata,
create_bonsai_changeset_hook: None,
scribe_category: None,
};
create_changeset.create(
CoreContext::test_mock(fb),
@ -207,6 +208,7 @@ pub fn create_changeset_one_parent(
.boxed(),
cs_metadata,
create_bonsai_changeset_hook: None,
scribe_category: None,
};
create_changeset.create(
CoreContext::test_mock(fb),

View File

@ -368,6 +368,7 @@ impl HgRepoContext {
for (node, revlog_cs) in changesets {
uploaded_changesets = upload_changeset(
self.ctx().clone(),
self.config().infinitepush.commit_scribe_category.clone(),
self.blob_repo().clone(),
self.ctx().scuba().clone(),
node,

View File

@ -1250,6 +1250,7 @@ impl<'r> Bundle2Resolver<'r> {
for (node, revlog_cs) in chunk {
uploaded_changesets = upload_changeset(
self.ctx.clone(),
None, // No logging to scribe happens through this codepath
self.repo.clone(),
self.ctx.scuba().clone(),
*node,

View File

@ -268,6 +268,7 @@ fn get_parent(
pub async fn upload_changeset(
ctx: CoreContext,
scribe_category: Option<String>,
repo: BlobRepo,
scuba_logger: MononokeScubaSampleBuilder,
node: HgChangesetId,
@ -305,6 +306,7 @@ pub async fn upload_changeset(
// XXX pass content blobs to CreateChangeset here
cs_metadata,
create_bonsai_changeset_hook,
scribe_category,
};
let scheduled_uploading = create_changeset.create(ctx, &repo, scuba_logger);