bonsai_*_mapping: remove auto_impl

Summary:
These mappings are nearly always stored as `Arc<dyn Mapping>`, so we can remove
the generics and switch to always expecting dynamic dispatch.

The generic versions of `MemWrites` and `Caching` mappings can be converted to
always expecting `Arc<dyn Mapping>` as the inner mapping (in practice they
always got this except in some unit tests).

The cache implementations no longer need to be generic over the inner cache type.

We can remove `Clone` from the mappings themselves to ensure we only ever clone
the `Arc`.

Differential Revision: D40357649

fbshipit-source-id: 988f96d9b68e827a4afedfeb785c9731783cc6b6
This commit is contained in:
Mark Juggurnauth-Thomas 2022-10-19 13:35:09 -07:00 committed by Facebook GitHub Bot
parent ea08914663
commit 2adc1b3450
18 changed files with 68 additions and 87 deletions

View File

@ -224,7 +224,7 @@ impl<'a> Blobimport<'a> {
let globalrevs_work = async {
if has_globalrev {
bulk_import_globalrevs(ctx, &globalrevs_store, changesets.iter()).await
bulk_import_globalrevs(ctx, globalrevs_store.as_ref(), changesets.iter()).await
} else {
Ok(())
}

View File

@ -17,7 +17,6 @@ use anyhow::Result;
use blobrepo::scribe::log_commit_to_scribe;
use blobrepo::BlobRepo;
use blobstore::Loadable;
use bonsai_hg_mapping::BonsaiHgMapping;
use bonsai_hg_mapping::BonsaiHgMappingArc;
use bonsai_hg_mapping::BonsaiHgMappingEntry;
use bonsai_hg_mapping::BonsaiHgMappingRef;

View File

@ -36,7 +36,6 @@ define_stats! {
adds: timeseries(Rate, Sum),
}
#[derive(Clone)]
pub struct SqlBonsaiGitMapping {
connections: SqlConnections,
repo_id: RepositoryId,

View File

@ -221,21 +221,17 @@ async fn test_add_with_transaction(fb: FacebookInit) -> Result<(), Error> {
// Inserting duplicates fails
let txn = conn.start_transaction().await?;
let res = {
let ctx = ctx.clone();
let mapping = mapping.clone();
async move {
let entry_conflict = BonsaiGitMappingEntry {
bcs_id: bonsai::TWOS_CSID,
git_sha1: THREES_GIT_SHA1,
};
mapping
.bulk_add_git_mapping_in_transaction(&ctx, &[entry_conflict], txn)
.await?
.commit()
.await?;
Result::<_, AddGitMappingErrorKind>::Ok(())
}
let res = async {
let entry_conflict = BonsaiGitMappingEntry {
bcs_id: bonsai::TWOS_CSID,
git_sha1: THREES_GIT_SHA1,
};
mapping
.bulk_add_git_mapping_in_transaction(&ctx, &[entry_conflict], txn)
.await?
.commit()
.await?;
Result::<_, AddGitMappingErrorKind>::Ok(())
}
.await;
assert_matches!(

View File

@ -16,7 +16,6 @@ abomonation = "0.7"
abomonation_derive = "0.5"
anyhow = "1.0.65"
async-trait = "0.1.56"
auto_impl = "0.4"
bonsai_globalrev_mapping_thrift = { version = "0.1.0", path = "if" }
bytes = { version = "1.1", features = ["serde"] }
cachelib = { version = "0.1.0", git = "https://github.com/facebookexperimental/rust-shed.git", branch = "main" }

View File

@ -7,6 +7,7 @@
use std::collections::HashMap;
use std::collections::HashSet;
use std::sync::Arc;
use abomonation_derive::Abomonation;
use anyhow::anyhow;
@ -75,16 +76,19 @@ impl BonsaiGlobalrevMappingCacheEntry {
}
}
#[derive(Clone)]
pub struct CachingBonsaiGlobalrevMapping<T> {
pub struct CachingBonsaiGlobalrevMapping {
cachelib: CachelibHandler<BonsaiGlobalrevMappingCacheEntry>,
memcache: MemcacheHandler,
keygen: KeyGen,
inner: T,
inner: Arc<dyn BonsaiGlobalrevMapping>,
}
impl<T> CachingBonsaiGlobalrevMapping<T> {
pub fn new(fb: FacebookInit, inner: T, cachelib: VolatileLruCachePool) -> Self {
impl CachingBonsaiGlobalrevMapping {
pub fn new(
fb: FacebookInit,
inner: Arc<dyn BonsaiGlobalrevMapping>,
cachelib: VolatileLruCachePool,
) -> Self {
Self {
inner,
cachelib: cachelib.into(),
@ -95,7 +99,7 @@ impl<T> CachingBonsaiGlobalrevMapping<T> {
}
}
pub fn new_test(inner: T) -> Self {
pub fn new_test(inner: Arc<dyn BonsaiGlobalrevMapping>) -> Self {
Self {
inner,
cachelib: CachelibHandler::create_mock(),
@ -120,12 +124,9 @@ impl<T> CachingBonsaiGlobalrevMapping<T> {
}
#[async_trait]
impl<T> BonsaiGlobalrevMapping for CachingBonsaiGlobalrevMapping<T>
where
T: BonsaiGlobalrevMapping + Clone + Sync + Send + 'static,
{
impl BonsaiGlobalrevMapping for CachingBonsaiGlobalrevMapping {
fn repo_id(&self) -> RepositoryId {
self.inner.repo_id()
self.inner.as_ref().repo_id()
}
async fn bulk_import(
@ -133,7 +134,7 @@ where
ctx: &CoreContext,
entries: &[BonsaiGlobalrevMappingEntry],
) -> Result<(), Error> {
self.inner.bulk_import(ctx, entries).await
self.inner.as_ref().bulk_import(ctx, entries).await
}
async fn get(
@ -171,11 +172,14 @@ where
ctx: &CoreContext,
globalrev: Globalrev,
) -> Result<Option<Globalrev>, Error> {
self.inner.get_closest_globalrev(ctx, globalrev).await
self.inner
.as_ref()
.get_closest_globalrev(ctx, globalrev)
.await
}
async fn get_max(&self, ctx: &CoreContext) -> Result<Option<Globalrev>, Error> {
self.inner.get_max(ctx).await
self.inner.as_ref().get_max(ctx).await
}
}
@ -216,9 +220,9 @@ impl MemcacheEntity for BonsaiGlobalrevMappingCacheEntry {
}
}
type CacheRequest<'a, T> = (&'a CoreContext, &'a CachingBonsaiGlobalrevMapping<T>);
type CacheRequest<'a> = (&'a CoreContext, &'a CachingBonsaiGlobalrevMapping);
impl<T> EntityStore<BonsaiGlobalrevMappingCacheEntry> for CacheRequest<'_, T> {
impl EntityStore<BonsaiGlobalrevMappingCacheEntry> for CacheRequest<'_> {
fn cachelib(&self) -> &CachelibHandler<BonsaiGlobalrevMappingCacheEntry> {
let (_, mapping) = self;
&mapping.cachelib
@ -242,10 +246,7 @@ impl<T> EntityStore<BonsaiGlobalrevMappingCacheEntry> for CacheRequest<'_, T> {
}
#[async_trait]
impl<T> KeyedEntityStore<ChangesetId, BonsaiGlobalrevMappingCacheEntry> for CacheRequest<'_, T>
where
T: BonsaiGlobalrevMapping + Send + Sync + Clone + 'static,
{
impl KeyedEntityStore<ChangesetId, BonsaiGlobalrevMappingCacheEntry> for CacheRequest<'_> {
fn get_cache_key(&self, key: &ChangesetId) -> String {
let (_, mapping) = self;
format!("{}.bonsai.{}", mapping.repo_id(), key)
@ -260,6 +261,7 @@ where
let res = mapping
.inner
.as_ref()
.get(ctx, BonsaisOrGlobalrevs::Bonsai(keys.into_iter().collect()))
.await
.with_context(|| "Error fetching globalrevs from bonsais from SQL")?;
@ -278,10 +280,7 @@ where
}
#[async_trait]
impl<T> KeyedEntityStore<Globalrev, BonsaiGlobalrevMappingCacheEntry> for CacheRequest<'_, T>
where
T: BonsaiGlobalrevMapping + Send + Sync + Clone + 'static,
{
impl KeyedEntityStore<Globalrev, BonsaiGlobalrevMappingCacheEntry> for CacheRequest<'_> {
fn get_cache_key(&self, key: &Globalrev) -> String {
let (_, mapping) = self;
format!("{}.globalrev.{}", mapping.repo_id(), key.id())
@ -296,6 +295,7 @@ where
let res = mapping
.inner
.as_ref()
.get(
ctx,
BonsaisOrGlobalrevs::Globalrev(keys.into_iter().collect()),

View File

@ -10,7 +10,6 @@ mod sql;
use anyhow::Error;
use async_trait::async_trait;
use auto_impl::auto_impl;
use context::CoreContext;
use mononoke_types::ChangesetId;
use mononoke_types::Globalrev;
@ -75,7 +74,6 @@ impl From<Vec<Globalrev>> for BonsaisOrGlobalrevs {
#[facet::facet]
#[async_trait]
#[auto_impl(&, Arc, Box)]
pub trait BonsaiGlobalrevMapping: Send + Sync {
fn repo_id(&self) -> RepositoryId;

View File

@ -77,7 +77,6 @@ queries! {
}
}
#[derive(Clone)]
pub struct SqlBonsaiGlobalrevMapping {
connections: SqlConnections,
repo_id: RepositoryId,
@ -266,7 +265,7 @@ async fn select_mapping(
/// they are correct. Don't use this to assign new Globalrevs.
pub async fn bulk_import_globalrevs<'a>(
ctx: &'a CoreContext,
globalrevs_store: &'a impl BonsaiGlobalrevMapping,
globalrevs_store: &'a dyn BonsaiGlobalrevMapping,
changesets: impl IntoIterator<Item = &'a BonsaiChangeset>,
) -> Result<(), Error> {
let mut entries = vec![];

View File

@ -5,6 +5,8 @@
* GNU General Public License version 2.
*/
use std::sync::Arc;
use anyhow::Error;
use assert_matches::assert_matches;
use bonsai_globalrev_mapping::add_globalrevs;
@ -228,7 +230,8 @@ async fn test_closest_globalrev(fb: FacebookInit) -> Result<(), Error> {
#[fbinit::test]
async fn test_caching(fb: FacebookInit) -> Result<(), Error> {
let ctx = CoreContext::test_mock(fb);
let mapping = SqlBonsaiGlobalrevMappingBuilder::with_sqlite_in_memory()?.build(REPO_ZERO);
let mapping =
Arc::new(SqlBonsaiGlobalrevMappingBuilder::with_sqlite_in_memory()?.build(REPO_ZERO));
let caching = CachingBonsaiGlobalrevMapping::new_test(mapping.clone());
let store = caching
.cachelib()

View File

@ -16,7 +16,6 @@ abomonation = "0.7"
abomonation_derive = "0.5"
anyhow = "1.0.65"
async-trait = "0.1.56"
auto_impl = "0.4"
bonsai_hg_mapping_entry_thrift = { version = "0.1.0", path = "if" }
bytes = { version = "1.1", features = ["serde"] }
cachelib = { version = "0.1.0", git = "https://github.com/facebookexperimental/rust-shed.git", branch = "main" }

View File

@ -131,7 +131,6 @@ impl From<HgChangesetId> for BonsaiOrHgChangesetId {
}
}
#[derive(Clone)]
pub struct CachingBonsaiHgMapping {
mapping: Arc<dyn BonsaiHgMapping>,
cache_pool: CachelibHandler<BonsaiHgMappingCacheEntry>,

View File

@ -10,7 +10,6 @@ use std::sync::Arc;
use anyhow::Error;
use anyhow::Result;
use async_trait::async_trait;
use auto_impl::auto_impl;
use context::CoreContext;
use context::PerfCounterType;
use fbinit::FacebookInit;
@ -95,7 +94,6 @@ impl From<Vec<HgChangesetId>> for BonsaiOrHgChangesetIds {
#[facet::facet]
#[async_trait]
#[auto_impl(&, Arc, Box)]
pub trait BonsaiHgMapping: Send + Sync {
fn repo_id(&self) -> RepositoryId;
@ -186,7 +184,6 @@ impl RendezVousConnection {
}
}
#[derive(Clone)]
pub struct SqlBonsaiHgMapping {
write_connection: Connection,
read_connection: RendezVousConnection,

View File

@ -30,17 +30,16 @@ type Cache = (
HashMap<HgChangesetId, ChangesetId>,
);
#[derive(Clone)]
pub struct MemWritesBonsaiHgMapping<T: BonsaiHgMapping + Clone + 'static> {
inner: T,
pub struct MemWritesBonsaiHgMapping {
inner: Arc<dyn BonsaiHgMapping>,
cache: Arc<Mutex<Cache>>,
no_access_to_inner: Arc<AtomicBool>,
readonly: Arc<AtomicBool>,
save_noop_writes: Arc<AtomicBool>,
}
impl<T: BonsaiHgMapping + Clone + 'static> MemWritesBonsaiHgMapping<T> {
pub fn new(inner: T) -> Self {
impl MemWritesBonsaiHgMapping {
pub fn new(inner: Arc<dyn BonsaiHgMapping>) -> Self {
Self {
inner,
cache: Default::default(),
@ -65,7 +64,7 @@ impl<T: BonsaiHgMapping + Clone + 'static> MemWritesBonsaiHgMapping<T> {
}
}
impl<T: BonsaiHgMapping + Clone + 'static> MemWritesBonsaiHgMapping<T> {
impl MemWritesBonsaiHgMapping {
async fn get_from_cache_and_inner<I, O>(
&self,
ctx: &CoreContext,
@ -101,7 +100,7 @@ impl<T: BonsaiHgMapping + Clone + 'static> MemWritesBonsaiHgMapping<T> {
}
#[async_trait]
impl<T: BonsaiHgMapping + Clone + 'static> BonsaiHgMapping for MemWritesBonsaiHgMapping<T> {
impl BonsaiHgMapping for MemWritesBonsaiHgMapping {
fn repo_id(&self) -> RepositoryId {
self.inner.repo_id()
}

View File

@ -16,7 +16,6 @@ abomonation = "0.7"
abomonation_derive = "0.5"
anyhow = "1.0.65"
async-trait = "0.1.56"
auto_impl = "0.4"
bonsai_svnrev_mapping_thrift = { version = "0.1.0", path = "if" }
bytes = { version = "1.1", features = ["serde"] }
cachelib = { version = "0.1.0", git = "https://github.com/facebookexperimental/rust-shed.git", branch = "main" }

View File

@ -7,6 +7,7 @@
use std::collections::HashMap;
use std::collections::HashSet;
use std::sync::Arc;
use abomonation_derive::Abomonation;
use anyhow::anyhow;
@ -79,16 +80,20 @@ impl BonsaiSvnrevMappingCacheEntry {
}
}
}
#[derive(Clone)]
pub struct CachingBonsaiSvnrevMapping<T> {
pub struct CachingBonsaiSvnrevMapping {
cachelib: CachelibHandler<BonsaiSvnrevMappingCacheEntry>,
memcache: MemcacheHandler,
keygen: KeyGen,
inner: T,
inner: Arc<dyn BonsaiSvnrevMapping>,
}
impl<T> CachingBonsaiSvnrevMapping<T> {
pub fn new(fb: FacebookInit, inner: T, cachelib: VolatileLruCachePool) -> Self {
impl CachingBonsaiSvnrevMapping {
pub fn new(
fb: FacebookInit,
inner: Arc<dyn BonsaiSvnrevMapping>,
cachelib: VolatileLruCachePool,
) -> Self {
Self {
inner,
cachelib: cachelib.into(),
@ -99,7 +104,7 @@ impl<T> CachingBonsaiSvnrevMapping<T> {
}
}
pub fn new_test(inner: T) -> Self {
pub fn new_test(inner: Arc<dyn BonsaiSvnrevMapping>) -> Self {
Self {
inner,
cachelib: CachelibHandler::create_mock(),
@ -124,12 +129,9 @@ impl<T> CachingBonsaiSvnrevMapping<T> {
}
#[async_trait]
impl<T> BonsaiSvnrevMapping for CachingBonsaiSvnrevMapping<T>
where
T: BonsaiSvnrevMapping + Clone + Sync + Send + 'static,
{
impl BonsaiSvnrevMapping for CachingBonsaiSvnrevMapping {
fn repo_id(&self) -> RepositoryId {
self.inner.repo_id()
self.inner.as_ref().repo_id()
}
async fn bulk_import(
@ -137,7 +139,7 @@ where
ctx: &CoreContext,
entries: &[BonsaiSvnrevMappingEntry],
) -> Result<(), Error> {
self.inner.bulk_import(ctx, entries).await
self.inner.as_ref().bulk_import(ctx, entries).await
}
async fn get(
@ -208,9 +210,9 @@ impl MemcacheEntity for BonsaiSvnrevMappingCacheEntry {
}
}
type CacheRequest<'a, T> = (&'a CoreContext, &'a CachingBonsaiSvnrevMapping<T>);
type CacheRequest<'a> = (&'a CoreContext, &'a CachingBonsaiSvnrevMapping);
impl<T> EntityStore<BonsaiSvnrevMappingCacheEntry> for CacheRequest<'_, T> {
impl EntityStore<BonsaiSvnrevMappingCacheEntry> for CacheRequest<'_> {
fn cachelib(&self) -> &CachelibHandler<BonsaiSvnrevMappingCacheEntry> {
let (_, mapping) = self;
&mapping.cachelib
@ -234,10 +236,7 @@ impl<T> EntityStore<BonsaiSvnrevMappingCacheEntry> for CacheRequest<'_, T> {
}
#[async_trait]
impl<T> KeyedEntityStore<ChangesetId, BonsaiSvnrevMappingCacheEntry> for CacheRequest<'_, T>
where
T: BonsaiSvnrevMapping + Clone + Send + Sync + 'static,
{
impl KeyedEntityStore<ChangesetId, BonsaiSvnrevMappingCacheEntry> for CacheRequest<'_> {
fn get_cache_key(&self, key: &ChangesetId) -> String {
let (_, mapping) = self;
format!("{}.bonsai.{}", mapping.repo_id(), key)
@ -252,6 +251,7 @@ where
let res = mapping
.inner
.as_ref()
.get(ctx, BonsaisOrSvnrevs::Bonsai(keys.into_iter().collect()))
.await
.with_context(|| "Error fetching svnrevs from bonsais from SQL")?;
@ -270,10 +270,7 @@ where
}
#[async_trait]
impl<T> KeyedEntityStore<Svnrev, BonsaiSvnrevMappingCacheEntry> for CacheRequest<'_, T>
where
T: BonsaiSvnrevMapping + Clone + Send + Sync + 'static,
{
impl KeyedEntityStore<Svnrev, BonsaiSvnrevMappingCacheEntry> for CacheRequest<'_> {
fn get_cache_key(&self, key: &Svnrev) -> String {
let (_, mapping) = self;
format!("{}.svnrev.{}", mapping.repo_id(), key.id())
@ -288,6 +285,7 @@ where
let res = mapping
.inner
.as_ref()
.get(ctx, BonsaisOrSvnrevs::Svnrev(keys.into_iter().collect()))
.await
.with_context(|| "Error fetching bonsais from svnrevs from SQL")?;

View File

@ -10,7 +10,6 @@ mod sql;
use anyhow::Error;
use async_trait::async_trait;
use auto_impl::auto_impl;
use context::CoreContext;
use mononoke_types::BonsaiChangeset;
use mononoke_types::ChangesetId;
@ -76,7 +75,6 @@ impl From<Vec<Svnrev>> for BonsaisOrSvnrevs {
#[facet::facet]
#[async_trait]
#[auto_impl(&, Arc, Box)]
pub trait BonsaiSvnrevMapping: Send + Sync {
fn repo_id(&self) -> RepositoryId;

View File

@ -56,7 +56,6 @@ queries! {
}
}
#[derive(Clone)]
pub struct SqlBonsaiSvnrevMapping {
connections: SqlConnections,
repo_id: RepositoryId,

View File

@ -75,7 +75,7 @@ pub fn upload<P: AsRef<Path>>(
let ctx = ctx.clone();
let store = globalrevs_store.clone();
async move { bulk_import_globalrevs(&ctx, &store, chunk.iter()).await }
async move { bulk_import_globalrevs(&ctx, store.as_ref(), chunk.iter()).await }
.boxed()
.compat()
})