mononoke/pushrebase_hooks: add a hook that saves prepushrebase changeset id

Summary:
Knowing the prepushrebase changeset id is required for retroactive_review.
retroactive_review checks landed commits, but verify_integrity hook runs on a commit before landing. This way the landed commit has no straightforward connection with the original one and retroactive_review can't acknowledge if verify_integrity have seen it.

Reviewed By: krallin

Differential Revision: D27911317

fbshipit-source-id: f7bb0cfbd54fa6ad2ed27fb9d4d67b9f087879f1
This commit is contained in:
Ilia Medianikov 2021-04-27 03:51:52 -07:00 committed by Facebook GitHub Bot
parent 93b8cf116b
commit 449fd2fd02
18 changed files with 545 additions and 0 deletions

View File

@ -399,6 +399,7 @@ members = [
"phases",
"pushrebase",
"pushrebase/pushrebase_hook",
"pushrebase_mutation_mapping",
"quiet_stream",
"reachabilityindex",
"reachabilityindex/common",

View File

@ -50,6 +50,7 @@ metaconfig_types = { version = "0.1.0", path = "../../metaconfig/types" }
mononoke_types = { version = "0.1.0", path = "../../mononoke_types" }
newfilenodes = { version = "0.1.0", path = "../../newfilenodes" }
phases = { version = "0.1.0", path = "../../phases" }
pushrebase_mutation_mapping = { version = "0.1.0", path = "../../pushrebase_mutation_mapping" }
rand = { version = "0.7", features = ["small_rng"] }
rand_distr = "0.2"
rand_xorshift = "0.2"

View File

@ -48,6 +48,9 @@ use mononoke_types::{
};
use newfilenodes::NewFilenodesBuilder;
use phases::{ArcSqlPhasesFactory, SqlPhasesFactory};
use pushrebase_mutation_mapping::{
ArcPushrebaseMutationMapping, SqlPushrebaseMutationMappingConnection,
};
use rand::Rng;
use rand_distr::Distribution;
use rendezvous::RendezVousOptions;
@ -209,6 +212,16 @@ impl BenchmarkRepoFactory {
Ok(Arc::new(SqlBonsaiGlobalrevMapping::with_sqlite_in_memory()?))
}
pub fn pushrebase_mutation_mapping(
&self,
repo_identity: &ArcRepoIdentity,
) -> Result<ArcPushrebaseMutationMapping> {
Ok(Arc::new(
SqlPushrebaseMutationMappingConnection::with_sqlite_in_memory()?
.with_repo_id(repo_identity.id()),
))
}
pub fn repo_bonsai_svnrev_mapping(
&self,
repo_identity: &ArcRepoIdentity,

View File

@ -30,6 +30,7 @@ mercurial_mutation = { version = "0.1.0", path = "../mercurial/mutation" }
metaconfig_types = { version = "0.1.0", path = "../metaconfig/types" }
mononoke_types = { version = "0.1.0", path = "../mononoke_types" }
phases = { version = "0.1.0", path = "../phases" }
pushrebase_mutation_mapping = { version = "0.1.0", path = "../pushrebase_mutation_mapping" }
repo_blobstore = { version = "0.1.0", path = "repo_blobstore" }
repo_derived_data = { version = "0.1.0", path = "../repo_attributes/repo_derived_data" }
repo_identity = { version = "0.1.0", path = "../repo_attributes/repo_identity" }

View File

@ -36,6 +36,7 @@ use mononoke_types::{
BlobstoreValue, BonsaiChangeset, ChangesetId, Generation, Globalrev, MononokeId, RepositoryId,
};
use phases::{HeadsFetcher, Phases, SqlPhasesFactory};
use pushrebase_mutation_mapping::{ArcPushrebaseMutationMapping, PushrebaseMutationMapping};
use repo_blobstore::{RepoBlobstore, RepoBlobstoreArgs};
use repo_derived_data::RepoDerivedData;
use repo_identity::RepoIdentity;
@ -93,6 +94,9 @@ pub struct BlobRepoInner {
#[facet]
pub bonsai_globalrev_mapping: dyn BonsaiGlobalrevMapping,
#[facet]
pub pushrebase_mutation_mapping: dyn PushrebaseMutationMapping,
#[facet]
pub repo_bonsai_svnrev_mapping: RepoBonsaiSvnrevMapping,
@ -153,6 +157,7 @@ impl BlobRepo {
bonsai_git_mapping: ArcBonsaiGitMapping,
bonsai_globalrev_mapping: ArcBonsaiGlobalrevMapping,
bonsai_svnrev_mapping: RepoBonsaiSvnrevMapping,
pushrebase_mutation_mapping: ArcPushrebaseMutationMapping,
derived_data_lease: Arc<dyn LeaseOps>,
filestore_config: FilestoreConfig,
phases_factory: SqlPhasesFactory,
@ -183,6 +188,7 @@ impl BlobRepo {
bonsai_git_mapping,
bonsai_globalrev_mapping,
repo_bonsai_svnrev_mapping,
pushrebase_mutation_mapping,
repoid,
filestore_config,
sql_phases_factory,
@ -342,6 +348,10 @@ impl BlobRepo {
self.inner.repo_bonsai_svnrev_mapping.as_ref()
}
pub fn pushrebase_mutation_mapping(&self) -> &ArcPushrebaseMutationMapping {
&self.inner.pushrebase_mutation_mapping
}
pub async fn get_bonsai_from_globalrev(
&self,
ctx: &CoreContext,

View File

@ -229,5 +229,10 @@ pub fn get_pushrebase_hooks(
pushrebase_hooks.push(hook);
}
match repo.pushrebase_mutation_mapping().get_hook() {
Some(hook) => pushrebase_hooks.push(hook),
None => {}
}
Ok(pushrebase_hooks)
}

View File

@ -0,0 +1,58 @@
[package]
name = "pushrebase_mutation_mapping"
version = "0.1.0"
authors = ["Facebook"]
edition = "2018"
license = "GPLv2+"
[dependencies]
anyhow = "1.0"
async-trait = "0.1.45"
bookmarks = { version = "0.1.0", path = "../bookmarks" }
context = { version = "0.1.0", path = "../server/context" }
facet = { version = "0.1.0", git = "https://github.com/facebookexperimental/rust-shed.git", branch = "master" }
mononoke_types = { version = "0.1.0", path = "../mononoke_types" }
pushrebase_hook = { version = "0.1.0", path = "../pushrebase/pushrebase_hook" }
sql = { version = "0.1.0", git = "https://github.com/facebookexperimental/rust-shed.git", branch = "master" }
sql_construct = { version = "0.1.0", path = "../common/sql_construct" }
sql_ext = { version = "0.1.0", path = "../common/rust/sql_ext" }
tunables = { version = "0.1.0", path = "../tunables" }
[dev-dependencies]
blobrepo = { version = "0.1.0", path = "../blobrepo" }
blobstore = { version = "0.1.0", path = "../blobstore" }
borrowed = { version = "0.1.0", git = "https://github.com/facebookexperimental/rust-shed.git", branch = "master" }
fbinit = { version = "0.1.0", git = "https://github.com/facebookexperimental/rust-shed.git", branch = "master" }
fbinit-tokio = { version = "0.1.0", git = "https://github.com/facebookexperimental/rust-shed.git", branch = "master" }
maplit = "1.0"
mononoke_types-mocks = { version = "0.1.0", path = "../mononoke_types/mocks" }
pushrebase = { version = "0.1.0", path = "../pushrebase" }
test_repo_factory = { version = "0.1.0", path = "../repo_factory/test_repo_factory" }
tests_utils = { version = "0.1.0", path = "../tests/utils" }
[patch.crates-io]
addr2line = { git = "https://github.com/gimli-rs/addr2line.git", rev = "0b6b6018b5b252a18e628fba03885f7d21844b3c" }
async-compression = { git = "https://github.com/ahornby/async-compression", rev = "c7fe7f9512e742f0c0097a19de0fca89da140b31" }
bytecount = { git = "https://github.com/llogiq/bytecount", rev = "469eaf8395c99397cd64d059737a9054aa014088" }
chashmap = { git = "https://gitlab.redox-os.org/ahornby/chashmap", rev = "901ace2ca3cdbc2095adb1af111d211e254e2aae" }
const-random = { git = "https://github.com/fbsource/const-random", rev = "374c5b46427fe2ffbf6acbd9c1687e0f1a809f95" }
curl = { git = "https://github.com/kulshrax/curl-rust", rev = "2a15bbd8dbbd54734313fa703a64db7ce6ddaff0" }
curl-sys = { git = "https://github.com/kulshrax/curl-rust", rev = "2a15bbd8dbbd54734313fa703a64db7ce6ddaff0" }
enumset = { git = "https://github.com/danobi/enumset", rev = "4c01c583c27a725948fededbfb3461c572a669a4" }
gotham-02 = { package = "gotham", git = "https://github.com/krallin/gotham-02.git", rev = "1eb3b976c31e7e4334b188f3abfa5cc2e5cae033" }
gotham_derive-02 = { package = "gotham_derive", git = "https://github.com/krallin/gotham-02.git", rev = "1eb3b976c31e7e4334b188f3abfa5cc2e5cae033" }
hyper-02 = { package = "hyper", version = "0.13.10", git = "https://github.com/krallin/hyper-02.git", rev = "9c9453c5ae3e92de4ba30edd81bb45371b8fa8d6" }
lru-disk-cache = { git = "https://github.com/mozilla/sccache", rev = "033ebaae69beeb0ac04e8c35d6ff1103487bd9a3" }
openssl = { git = "https://github.com/sfackler/rust-openssl", rev = "68fc8ba890d77986b06ca5ce29d1089285fbbcf9" }
openssl-sys = { git = "https://github.com/sfackler/rust-openssl", rev = "68fc8ba890d77986b06ca5ce29d1089285fbbcf9" }
petgraph = { git = "https://github.com/jkeljo/petgraph", rev = "e3e9dd8632d23973fdc0b42c1117d5e5fc5fa384" }
prost = { git = "https://github.com/gabrielrussoc/prost", branch = "protoc-runtime" }
prost-build = { git = "https://github.com/gabrielrussoc/prost", branch = "protoc-runtime" }
prost-derive = { git = "https://github.com/gabrielrussoc/prost", branch = "protoc-runtime" }
prost-types = { git = "https://github.com/gabrielrussoc/prost", branch = "protoc-runtime" }
r2d2_sqlite = { git = "https://github.com/jsgf/r2d2-sqlite.git", rev = "6d77a828ca0a3c507a3f58561532a1b6c66c7918" }
rustfilt = { git = "https://github.com/jsgf/rustfilt.git", rev = "8141fa7f1caee562ee8daffb2ddeca3d1f0d36e5" }
tokio-02 = { package = "tokio", version = "0.2.25", git = "https://github.com/krallin/tokio.git", rev = "818f943db36e32b7c928351c32fe69ac913409f9" }
tokio-compat = { package = "tokio-compat", version = "0.1.6", git = "https://github.com/krallin/tokio-compat.git", rev = "c2330c2a0141353e467cc5861988daa2eae7cb54" }
tokio-core = { git = "https://github.com/bolinfest/tokio-core", rev = "5f37aa3c627d56ee49154bc851d6930f5ab4398f" }
toml = { git = "https://github.com/jsgf/toml-rs", branch = "dotted-table-0.5.7" }

View File

@ -0,0 +1,15 @@
/*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This software may be used and distributed according to the terms of the
* GNU General Public License version 2.
*/
CREATE TABLE pushrebase_mutation_mapping (
id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
repo_id INTEGER NOT NULL,
predecessor_bcs_id BINARY(32) NOT NULL,
successor_bcs_id BINARY(32) NOT NULL
);
CREATE INDEX repo_successor_key ON pushrebase_mutation_mapping (repo_id, successor_bcs_id);

View File

@ -0,0 +1,46 @@
/*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This software may be used and distributed according to the terms of the
* GNU General Public License version 2.
*/
#![deny(warnings)]
mod save_mapping_pushrebase_hook;
mod sql_queries;
#[cfg(test)]
mod test;
use mononoke_types::{ChangesetId, RepositoryId};
use pushrebase_hook::PushrebaseHook;
pub use sql_queries::{
add_pushrebase_mapping, get_prepushrebase_ids, SqlPushrebaseMutationMapping,
SqlPushrebaseMutationMappingConnection,
};
pub struct PushrebaseMutationMappingEntry {
repo_id: RepositoryId,
predecessor_bcs_id: ChangesetId,
successor_bcs_id: ChangesetId,
}
impl PushrebaseMutationMappingEntry {
fn new(
repo_id: RepositoryId,
predecessor_bcs_id: ChangesetId,
successor_bcs_id: ChangesetId,
) -> Self {
Self {
repo_id,
predecessor_bcs_id,
successor_bcs_id,
}
}
}
#[facet::facet]
pub trait PushrebaseMutationMapping: Send + Sync {
fn get_hook(&self) -> Option<Box<dyn PushrebaseHook>>;
}

View File

@ -0,0 +1,90 @@
/*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This software may be used and distributed according to the terms of the
* GNU General Public License version 2.
*/
#[cfg(test)]
mod test;
use anyhow::Result;
use async_trait::async_trait;
use bookmarks::BookmarkTransactionError;
use context::CoreContext;
use mononoke_types::{BonsaiChangesetMut, ChangesetId, RepositoryId};
use pushrebase_hook::{
PushrebaseCommitHook, PushrebaseHook, PushrebaseTransactionHook, RebasedChangesets,
};
use sql::Transaction;
use crate::sql_queries::add_pushrebase_mapping;
use crate::PushrebaseMutationMappingEntry;
pub struct SaveMappingPushrebaseHook {
repository_id: RepositoryId,
}
impl SaveMappingPushrebaseHook {
pub fn new(repository_id: RepositoryId) -> Box<dyn PushrebaseHook> {
Box::new(Self { repository_id })
}
}
#[async_trait]
impl PushrebaseHook for SaveMappingPushrebaseHook {
async fn prepushrebase(&self) -> Result<Box<dyn PushrebaseCommitHook>> {
Ok(Box::new(SaveMappingCommitHook {
repository_id: self.repository_id,
}))
}
}
pub struct SaveMappingCommitHook {
repository_id: RepositoryId,
}
#[async_trait]
impl PushrebaseCommitHook for SaveMappingCommitHook {
fn post_rebase_changeset(
&mut self,
_bcs_old: ChangesetId,
_bcs_new: &mut BonsaiChangesetMut,
) -> Result<()> {
Ok(())
}
async fn into_transaction_hook(
self: Box<Self>,
_ctx: &CoreContext,
rebased: &RebasedChangesets,
) -> Result<Box<dyn PushrebaseTransactionHook>> {
let entries = rebased
.iter()
.map(|(predecessor_bcs_id, (successor_bcs_id, _))| {
PushrebaseMutationMappingEntry::new(
self.repository_id,
*predecessor_bcs_id,
*successor_bcs_id,
)
})
.collect();
Ok(Box::new(SaveMappingTransactionHook { entries }))
}
}
struct SaveMappingTransactionHook {
entries: Vec<PushrebaseMutationMappingEntry>,
}
#[async_trait]
impl PushrebaseTransactionHook for SaveMappingTransactionHook {
async fn populate_transaction(
&self,
_ctx: &CoreContext,
txn: Transaction,
) -> Result<Transaction, BookmarkTransactionError> {
let txn = add_pushrebase_mapping(txn, &self.entries[..]).await?;
Ok(txn)
}
}

View File

@ -0,0 +1,83 @@
/*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This software may be used and distributed according to the terms of the
* GNU General Public License version 2.
*/
use anyhow::Result;
use blobrepo::BlobRepo;
use blobstore::Loadable;
use borrowed::borrowed;
use context::CoreContext;
use fbinit::FacebookInit;
use maplit::hashset;
use mononoke_types_mocks::repo;
use pushrebase::do_pushrebase_bonsai;
use test_repo_factory::TestRepoFactory;
use tests_utils::{bookmark, CreateCommitContext};
use super::SaveMappingPushrebaseHook;
use crate::get_prepushrebase_ids;
#[fbinit::test]
async fn pushrebase_saves_mapping(fb: FacebookInit) -> Result<()> {
let ctx = CoreContext::test_mock(fb);
let mut repo_factory = TestRepoFactory::new()?;
let repo: BlobRepo = repo_factory.with_id(repo::REPO_ONE).build()?;
borrowed!(ctx, repo);
let root = CreateCommitContext::new_root(ctx, repo).commit().await?;
let master = bookmark(ctx, repo, "master").set_to(root).await?;
let main = bookmark(ctx, repo, "main").set_to(root).await?;
let cs = CreateCommitContext::new(ctx, repo, vec![root])
.commit()
.await?
.load(ctx, repo.blobstore())
.await?;
let hooks = [SaveMappingPushrebaseHook::new(repo.get_repoid())];
// Pushrebase the same commit onto different bookmarks that are pointing to
// the same commit (root).
do_pushrebase_bonsai(
ctx,
repo,
&Default::default(),
&master,
&hashset![cs.clone()],
None,
&hooks,
)
.await?;
let rebased = do_pushrebase_bonsai(
ctx,
repo,
&Default::default(),
&main,
&hashset![cs.clone()],
None,
&hooks,
)
.await?
.head;
let prepushrebase_ids = get_prepushrebase_ids(
&repo_factory.metadata_db().read_connection,
repo.get_repoid(),
rebased,
)
.await?;
assert_eq!(
prepushrebase_ids,
vec![cs.get_changeset_id(), cs.get_changeset_id()]
);
Ok(())
}

View File

@ -0,0 +1,118 @@
/*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This software may be used and distributed according to the terms of the
* GNU General Public License version 2.
*/
use anyhow::Result;
use mononoke_types::{ChangesetId, RepositoryId};
use pushrebase_hook::PushrebaseHook;
use sql::{queries, Connection, Transaction};
use sql_construct::{SqlConstruct, SqlConstructFromMetadataDatabaseConfig};
use sql_ext::SqlConnections;
use tunables::tunables;
use crate::save_mapping_pushrebase_hook::SaveMappingPushrebaseHook;
use crate::{PushrebaseMutationMapping, PushrebaseMutationMappingEntry};
queries! {
read SelectPrepushrebaseIds(
repo_id: RepositoryId,
successor_bcs_id: ChangesetId,
) -> (ChangesetId,) {
"SELECT predecessor_bcs_id
FROM pushrebase_mutation_mapping
WHERE repo_id = {repo_id} AND successor_bcs_id = {successor_bcs_id}"
}
write InsertMappingEntries(values:(
repo_id: RepositoryId,
predecessor_bcs_id: ChangesetId,
successor_bcs_id: ChangesetId,
)) {
insert_or_ignore,
"{insert_or_ignore}
INTO pushrebase_mutation_mapping
(repo_id, predecessor_bcs_id, successor_bcs_id)
VALUES {values}"
}
}
pub async fn add_pushrebase_mapping(
transaction: Transaction,
entries: &[PushrebaseMutationMappingEntry],
) -> Result<Transaction> {
let entries: Vec<_> = entries
.iter()
.map(
|
PushrebaseMutationMappingEntry {
repo_id,
predecessor_bcs_id,
successor_bcs_id,
},
| (repo_id, predecessor_bcs_id, successor_bcs_id),
)
.collect();
let (transaction, _) =
InsertMappingEntries::query_with_transaction(transaction, &entries).await?;
Ok(transaction)
}
// This is only used in tests thus it is unnecessary to keep a SQL connection
// in the mapping. We can just pass the connection to the function.
pub async fn get_prepushrebase_ids(
connection: &Connection,
repo_id: RepositoryId,
successor_bcs_id: ChangesetId,
) -> Result<Vec<ChangesetId>> {
let rows = SelectPrepushrebaseIds::query(&connection, &repo_id, &successor_bcs_id).await?;
Ok(rows.into_iter().map(|r| r.0).collect())
}
pub struct SqlPushrebaseMutationMapping {
repo_id: RepositoryId,
}
impl SqlPushrebaseMutationMapping {
pub fn new(repo_id: RepositoryId, _sql_conn: SqlPushrebaseMutationMappingConnection) -> Self {
Self { repo_id }
}
}
pub struct SqlPushrebaseMutationMappingConnection {}
impl SqlPushrebaseMutationMappingConnection {
pub fn with_repo_id(self, repo_id: RepositoryId) -> SqlPushrebaseMutationMapping {
SqlPushrebaseMutationMapping::new(repo_id, self)
}
}
impl SqlConstruct for SqlPushrebaseMutationMappingConnection {
const LABEL: &'static str = "pushrebase_mutation_mapping";
const CREATION_QUERY: &'static str =
include_str!("../schemas/sqlite-pushrebase-mutation-mapping.sql");
// We don't need the connections because we never use them.
// But we need SqlConstruct to get our SQL tables created in tests.
fn from_sql_connections(_connections: SqlConnections) -> Self {
Self {}
}
}
impl SqlConstructFromMetadataDatabaseConfig for SqlPushrebaseMutationMappingConnection {}
impl PushrebaseMutationMapping for SqlPushrebaseMutationMapping {
fn get_hook(&self) -> Option<Box<dyn PushrebaseHook>> {
if tunables().get_disable_save_mapping_pushrebase_hook() {
None
} else {
Some(SaveMappingPushrebaseHook::new(self.repo_id))
}
}
}

View File

@ -0,0 +1,63 @@
/*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This software may be used and distributed according to the terms of the
* GNU General Public License version 2.
*/
use anyhow::Result;
use fbinit::FacebookInit;
use mononoke_types_mocks::{changesetid, repo};
use sql::Connection;
use sql_construct::SqlConstruct;
use sql_ext::open_sqlite_in_memory;
use crate::{
add_pushrebase_mapping, get_prepushrebase_ids, PushrebaseMutationMappingEntry,
SqlPushrebaseMutationMappingConnection,
};
#[fbinit::test]
async fn test_add_and_get(_fb: FacebookInit) -> Result<()> {
let conn = open_sqlite_in_memory()?;
conn.execute_batch(SqlPushrebaseMutationMappingConnection::CREATION_QUERY)?;
let conn = Connection::with_sqlite(conn);
let entries = vec![
PushrebaseMutationMappingEntry::new(
repo::REPO_ZERO,
changesetid::ONES_CSID,
changesetid::TWOS_CSID,
),
PushrebaseMutationMappingEntry::new(
repo::REPO_ONE,
changesetid::ONES_CSID,
changesetid::TWOS_CSID,
),
PushrebaseMutationMappingEntry::new(
repo::REPO_ONE,
changesetid::TWOS_CSID,
changesetid::TWOS_CSID,
),
PushrebaseMutationMappingEntry::new(
repo::REPO_ONE,
changesetid::ONES_CSID,
changesetid::THREES_CSID,
),
];
let txn = conn.start_transaction().await?;
let txn = add_pushrebase_mapping(txn, &entries).await?;
txn.commit().await?;
let mut prepushrebase_ids =
get_prepushrebase_ids(&conn, repo::REPO_ONE, changesetid::TWOS_CSID).await?;
prepushrebase_ids.sort();
assert_eq!(
prepushrebase_ids,
vec![changesetid::ONES_CSID, changesetid::TWOS_CSID]
);
Ok(())
}

View File

@ -31,6 +31,7 @@ metaconfig_types = { version = "0.1.0", path = "../metaconfig/types" }
newfilenodes = { version = "0.1.0", path = "../newfilenodes" }
parking_lot = "0.10.2"
phases = { version = "0.1.0", path = "../phases" }
pushrebase_mutation_mapping = { version = "0.1.0", path = "../pushrebase_mutation_mapping" }
readonlyblob = { version = "0.1.0", path = "../blobstore/readonlyblob" }
redactedblobstore = { version = "0.1.0", path = "../blobstore/redactedblobstore" }
repo_blobstore = { version = "0.1.0", path = "../blobrepo/repo_blobstore" }

View File

@ -50,6 +50,9 @@ use metaconfig_types::{
use newfilenodes::NewFilenodesBuilder;
use parking_lot::Mutex;
use phases::{ArcSqlPhasesFactory, SqlPhasesFactory};
use pushrebase_mutation_mapping::{
ArcPushrebaseMutationMapping, SqlPushrebaseMutationMappingConnection,
};
use readonlyblob::ReadOnlyBlobstore;
use redactedblobstore::{RedactedMetadata, SqlRedactedContentStore};
use repo_blobstore::{ArcRepoBlobstore, RepoBlobstoreArgs};
@ -320,6 +323,9 @@ pub enum RepoFactoryError {
#[error("Error opening bonsai-svnrev mapping")]
BonsaiSvnrevMapping,
#[error("Error opening pushrebase mutation mapping")]
PushrebaseMutationMapping,
#[error("Error opening filenodes")]
Filenodes,
@ -488,6 +494,20 @@ impl RepoFactory {
}
}
pub async fn pushrebase_mutation_mapping(
&self,
repo_config: &ArcRepoConfig,
) -> Result<ArcPushrebaseMutationMapping> {
let sql_factory = self
.sql_factory(&repo_config.storage_config.metadata)
.await?;
let conn = sql_factory
.open::<SqlPushrebaseMutationMappingConnection>()
.await
.context(RepoFactoryError::PushrebaseMutationMapping)?;
Ok(Arc::new(conn.with_repo_id(repo_config.repoid)))
}
pub async fn repo_bonsai_svnrev_mapping(
&self,
repo_config: &ArcRepoConfig,

View File

@ -37,6 +37,7 @@ mononoke_types = { version = "0.1.0", path = "../../mononoke_types" }
mutable_counters = { version = "0.1.0", path = "../../mutable_counters" }
newfilenodes = { version = "0.1.0", path = "../../newfilenodes" }
phases = { version = "0.1.0", path = "../../phases" }
pushrebase_mutation_mapping = { version = "0.1.0", path = "../../pushrebase_mutation_mapping" }
redactedblobstore = { version = "0.1.0", path = "../../blobstore/redactedblobstore" }
rendezvous = { version = "0.1.0", path = "../../common/rendezvous" }
repo_blobstore = { version = "0.1.0", path = "../../blobrepo/repo_blobstore" }

View File

@ -45,6 +45,9 @@ use mononoke_types::RepositoryId;
use mutable_counters::SqlMutableCounters;
use newfilenodes::NewFilenodesBuilder;
use phases::{ArcSqlPhasesFactory, SqlPhasesFactory};
use pushrebase_mutation_mapping::{
ArcPushrebaseMutationMapping, SqlPushrebaseMutationMappingConnection,
};
use redactedblobstore::RedactedMetadata;
use rendezvous::RendezVousOptions;
use repo_blobstore::{ArcRepoBlobstore, RepoBlobstoreArgs};
@ -136,6 +139,7 @@ impl TestRepoFactory {
con.execute_batch(SqlBonsaiHgMappingBuilder::CREATION_QUERY)?;
con.execute_batch(SqlPhasesFactory::CREATION_QUERY)?;
con.execute_batch(SqlHgMutationStoreBuilder::CREATION_QUERY)?;
con.execute_batch(SqlPushrebaseMutationMappingConnection::CREATION_QUERY)?;
let metadata_db = SqlConnections::new_single(Connection::with_sqlite(con));
Ok(TestRepoFactory {
@ -288,6 +292,18 @@ impl TestRepoFactory {
)))
}
/// Construct Pushrebase Mutation Mapping using the in-memory metadata
/// database.
pub fn pushrebase_mutation_mapping(
&self,
repo_identity: &ArcRepoIdentity,
) -> Result<ArcPushrebaseMutationMapping> {
Ok(Arc::new(
SqlPushrebaseMutationMappingConnection::from_sql_connections(self.metadata_db.clone())
.with_repo_id(repo_identity.id()),
))
}
/// Construct Repo Bonsai Svnrev Mapping using the in-memory metadata
/// database.
pub fn repo_bonsai_svnrev_mapping(

View File

@ -155,6 +155,9 @@ pub struct MononokeTunables {
sql_connection_pool_stats_collection_interval_ms: AtomicI64,
bookmarks_cache_ttl_ms: AtomicI64,
// Disable running SaveMappingPushrebaseHook on every Pushrebase
disable_save_mapping_pushrebase_hook: AtomicBool,
}
fn log_tunables(tunables: &TunablesStruct) -> String {