mirror of
https://github.com/facebook/sapling.git
synced 2024-10-10 08:47:12 +03:00
mononoke: memcache for bonsai_hg_mapping
Reviewed By: jsgf Differential Revision: D10505523 fbshipit-source-id: 2e12b61d6f03215489322b09e65e766d0a252866
This commit is contained in:
parent
b058eabc13
commit
4ee6aa6c03
21
bonsai-hg-mapping/if/bonsai_hg_mapping.thrift
Normal file
21
bonsai-hg-mapping/if/bonsai_hg_mapping.thrift
Normal file
@ -0,0 +1,21 @@
|
||||
// Copyright (c) 2018-present, Facebook, Inc.
|
||||
// All Rights Reserved.
|
||||
//
|
||||
// This software may be used and distributed according to the terms of the
|
||||
// GNU General Public License version 2 or any later version.
|
||||
|
||||
include "scm/mononoke/mercurial-types/if/mercurial_thrift.thrift"
|
||||
include "scm/mononoke/mononoke-types/if/mononoke_types_thrift.thrift"
|
||||
|
||||
# Memcache constants. Should be change when we want to invalidate memcache
|
||||
# entries
|
||||
const i32 MC_CODEVER = 0
|
||||
const i32 MC_SITEVER = 0
|
||||
|
||||
typedef i32 RepoId (hs.newtype)
|
||||
|
||||
struct BonsaiHgMappingEntry {
|
||||
1: required RepoId repo_id,
|
||||
2: required mononoke_types_thrift.ChangesetId bcs_id,
|
||||
3: required mercurial_thrift.HgNodeHash hg_cs_id,
|
||||
}
|
165
bonsai-hg-mapping/src/caching.rs
Normal file
165
bonsai-hg-mapping/src/caching.rs
Normal file
@ -0,0 +1,165 @@
|
||||
// Copyright (c) 2018-present, Facebook, Inc.
|
||||
// All Rights Reserved.
|
||||
//
|
||||
// This software may be used and distributed according to the terms of the
|
||||
// GNU General Public License version 2 or any later version.
|
||||
|
||||
use super::{bonsai_hg_mapping_entry_thrift as thrift, BonsaiHgMapping, BonsaiHgMappingEntry,
|
||||
BonsaiOrHgChangesetId};
|
||||
use cachelib::{get_cached_or_fill, LruCachePool};
|
||||
use errors::Error;
|
||||
use futures::{future, Future};
|
||||
use futures_ext::{BoxFuture, FutureExt};
|
||||
use memcache::{KeyGen, MemcacheClient, MEMCACHE_VALUE_MAX_SIZE};
|
||||
use mercurial_types::RepositoryId;
|
||||
use rust_thrift::compact_protocol;
|
||||
use stats::Timeseries;
|
||||
use std::sync::Arc;
|
||||
use tokio;
|
||||
|
||||
define_stats! {
|
||||
prefix = "mononoke.bonsai_hg_mapping";
|
||||
memcache_hit: timeseries("memcache.hit"; RATE, SUM),
|
||||
memcache_miss: timeseries("memcache.miss"; RATE, SUM),
|
||||
memcache_internal_err: timeseries("memcache.internal_err"; RATE, SUM),
|
||||
memcache_deserialize_err: timeseries("memcache.deserialize_err"; RATE, SUM),
|
||||
}
|
||||
|
||||
pub struct CachingBonsaiHgMapping {
|
||||
mapping: Arc<BonsaiHgMapping>,
|
||||
cache_pool: LruCachePool,
|
||||
memcache: MemcacheClient,
|
||||
keygen: KeyGen,
|
||||
}
|
||||
|
||||
impl CachingBonsaiHgMapping {
|
||||
pub fn new(mapping: Arc<BonsaiHgMapping>, cache_pool: LruCachePool) -> Self {
|
||||
let key_prefix = "scm.mononoke.bonsai_hg_mapping";
|
||||
|
||||
Self {
|
||||
mapping,
|
||||
cache_pool,
|
||||
memcache: MemcacheClient::new(),
|
||||
keygen: KeyGen::new(
|
||||
key_prefix,
|
||||
thrift::MC_CODEVER as u32,
|
||||
thrift::MC_SITEVER as u32,
|
||||
),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl BonsaiHgMapping for CachingBonsaiHgMapping {
|
||||
fn add(&self, entry: BonsaiHgMappingEntry) -> BoxFuture<bool, Error> {
|
||||
self.mapping.add(entry)
|
||||
}
|
||||
|
||||
fn get(
|
||||
&self,
|
||||
repo_id: RepositoryId,
|
||||
cs: BonsaiOrHgChangesetId,
|
||||
) -> BoxFuture<Option<BonsaiHgMappingEntry>, Error> {
|
||||
let cache_key = get_cache_key(&repo_id, &cs);
|
||||
get_cached_or_fill(&self.cache_pool, cache_key, || {
|
||||
cloned!(self.keygen, self.mapping, self.memcache);
|
||||
get_mapping_from_memcache(&memcache, &keygen, &repo_id, &cs)
|
||||
.then(move |res| match res {
|
||||
Ok(res) => {
|
||||
return future::ok(Some(res)).left_future();
|
||||
}
|
||||
Err(()) => mapping
|
||||
.get(repo_id, cs)
|
||||
.inspect(move |res| {
|
||||
if let Some(cs_entry) = res {
|
||||
schedule_fill_mapping_memcache(
|
||||
memcache,
|
||||
keygen,
|
||||
repo_id,
|
||||
&cs,
|
||||
cs_entry.clone(),
|
||||
)
|
||||
}
|
||||
})
|
||||
.right_future(),
|
||||
})
|
||||
.boxify()
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// Local error type to help with proper logging metrics
|
||||
enum ErrorKind {
|
||||
// error came from calling memcache API
|
||||
MemcacheInternal,
|
||||
// value returned from memcache was None
|
||||
Missing,
|
||||
// deserialization of memcache data to Rust structures via thrift failed
|
||||
Deserialization,
|
||||
}
|
||||
|
||||
fn get_cache_key(repo_id: &RepositoryId, cs: &BonsaiOrHgChangesetId) -> String {
|
||||
format!("{}.{:?}", repo_id.prefix(), cs).to_string()
|
||||
}
|
||||
|
||||
fn get_mc_key_for_mapping(
|
||||
keygen: &KeyGen,
|
||||
repo_id: &RepositoryId,
|
||||
bonsai_or_hg: &BonsaiOrHgChangesetId,
|
||||
) -> String {
|
||||
keygen.key(get_cache_key(repo_id, bonsai_or_hg))
|
||||
}
|
||||
|
||||
fn get_mapping_from_memcache(
|
||||
memcache: &MemcacheClient,
|
||||
keygen: &KeyGen,
|
||||
repo_id: &RepositoryId,
|
||||
bonsai_or_hg: &BonsaiOrHgChangesetId,
|
||||
) -> impl Future<Item = BonsaiHgMappingEntry, Error = ()> {
|
||||
memcache
|
||||
.get(get_mc_key_for_mapping(keygen, repo_id, bonsai_or_hg))
|
||||
.map_err(|()| ErrorKind::MemcacheInternal)
|
||||
.and_then(|maybe_serialized| maybe_serialized.ok_or(ErrorKind::Missing))
|
||||
.and_then(|serialized| {
|
||||
let thrift_entry: ::std::result::Result<
|
||||
thrift::BonsaiHgMappingEntry,
|
||||
ErrorKind,
|
||||
> = compact_protocol::deserialize(Vec::from(serialized))
|
||||
.map_err(|_| ErrorKind::Deserialization);
|
||||
|
||||
let thrift_entry = thrift_entry.and_then(|entry| {
|
||||
BonsaiHgMappingEntry::from_thrift(entry).map_err(|_| ErrorKind::Deserialization)
|
||||
});
|
||||
thrift_entry
|
||||
})
|
||||
.then(move |res| {
|
||||
match res {
|
||||
Ok(res) => {
|
||||
STATS::memcache_hit.add_value(1);
|
||||
return Ok(res);
|
||||
}
|
||||
Err(ErrorKind::MemcacheInternal) => STATS::memcache_internal_err.add_value(1),
|
||||
Err(ErrorKind::Missing) => STATS::memcache_miss.add_value(1),
|
||||
Err(ErrorKind::Deserialization) => STATS::memcache_deserialize_err.add_value(1),
|
||||
}
|
||||
Err(())
|
||||
})
|
||||
}
|
||||
|
||||
fn schedule_fill_mapping_memcache(
|
||||
memcache: MemcacheClient,
|
||||
keygen: KeyGen,
|
||||
repo_id: RepositoryId,
|
||||
bonsai_or_hg: &BonsaiOrHgChangesetId,
|
||||
mapping_entry: BonsaiHgMappingEntry,
|
||||
) {
|
||||
let serialized = compact_protocol::serialize(&mapping_entry.into_thrift());
|
||||
|
||||
// Quite unlikely that single changeset id will be bigger than MEMCACHE_VALUE_MAX_SIZE
|
||||
// It's probably not even worth logging it
|
||||
if serialized.len() < MEMCACHE_VALUE_MAX_SIZE {
|
||||
tokio::spawn(memcache.set(
|
||||
get_mc_key_for_mapping(&keygen, &repo_id, &bonsai_or_hg),
|
||||
serialized,
|
||||
));
|
||||
}
|
||||
}
|
@ -12,7 +12,10 @@
|
||||
extern crate abomonation;
|
||||
#[macro_use]
|
||||
extern crate abomonation_derive;
|
||||
extern crate bonsai_hg_mapping_entry_thrift;
|
||||
extern crate cachelib;
|
||||
#[macro_use]
|
||||
extern crate cloned;
|
||||
extern crate db_conn;
|
||||
#[macro_use]
|
||||
extern crate diesel;
|
||||
@ -22,6 +25,7 @@ extern crate futures;
|
||||
extern crate heapsize;
|
||||
#[macro_use]
|
||||
extern crate heapsize_derive;
|
||||
extern crate memcache;
|
||||
extern crate tokio;
|
||||
|
||||
extern crate db;
|
||||
@ -30,6 +34,7 @@ extern crate futures_ext;
|
||||
extern crate lazy_static;
|
||||
extern crate mercurial_types;
|
||||
extern crate mononoke_types;
|
||||
extern crate rust_thrift;
|
||||
#[macro_use]
|
||||
extern crate stats;
|
||||
|
||||
@ -42,17 +47,18 @@ use diesel::prelude::*;
|
||||
use diesel::r2d2::{ConnectionManager, PooledConnection};
|
||||
use diesel::result::{DatabaseErrorKind, Error as DieselError};
|
||||
|
||||
use cachelib::{get_cached_or_fill, LruCachePool};
|
||||
use futures::Future;
|
||||
use futures_ext::{asynchronize, BoxFuture, FutureExt};
|
||||
use mercurial_types::{HgChangesetId, RepositoryId};
|
||||
use mercurial_types::{HgChangesetId, HgNodeHash, RepositoryId};
|
||||
use mononoke_types::ChangesetId;
|
||||
use stats::Timeseries;
|
||||
|
||||
mod caching;
|
||||
mod errors;
|
||||
mod models;
|
||||
mod schema;
|
||||
|
||||
pub use caching::CachingBonsaiHgMapping;
|
||||
pub use errors::*;
|
||||
use models::BonsaiHgMappingRow;
|
||||
use schema::bonsai_hg_mapping;
|
||||
@ -71,6 +77,24 @@ pub struct BonsaiHgMappingEntry {
|
||||
pub bcs_id: ChangesetId,
|
||||
}
|
||||
|
||||
impl BonsaiHgMappingEntry {
|
||||
fn from_thrift(entry: bonsai_hg_mapping_entry_thrift::BonsaiHgMappingEntry) -> Result<Self> {
|
||||
Ok(Self {
|
||||
repo_id: RepositoryId::new(entry.repo_id.0),
|
||||
hg_cs_id: HgChangesetId::new(HgNodeHash::from_thrift(entry.hg_cs_id)?),
|
||||
bcs_id: ChangesetId::from_thrift(entry.bcs_id)?,
|
||||
})
|
||||
}
|
||||
|
||||
fn into_thrift(self) -> bonsai_hg_mapping_entry_thrift::BonsaiHgMappingEntry {
|
||||
bonsai_hg_mapping_entry_thrift::BonsaiHgMappingEntry {
|
||||
repo_id: bonsai_hg_mapping_entry_thrift::RepoId(self.repo_id.id()),
|
||||
hg_cs_id: self.hg_cs_id.into_nodehash().into_thrift(),
|
||||
bcs_id: self.bcs_id.into_thrift(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, Eq, PartialEq, Hash, HeapSizeOf)]
|
||||
pub enum BonsaiOrHgChangesetId {
|
||||
Bonsai(ChangesetId),
|
||||
@ -133,37 +157,6 @@ impl BonsaiHgMapping for Arc<BonsaiHgMapping> {
|
||||
}
|
||||
}
|
||||
|
||||
pub struct CachingBonsaiHgMapping {
|
||||
mapping: Arc<BonsaiHgMapping>,
|
||||
cache_pool: LruCachePool,
|
||||
}
|
||||
|
||||
impl CachingBonsaiHgMapping {
|
||||
pub fn new(mapping: Arc<BonsaiHgMapping>, cache_pool: LruCachePool) -> Self {
|
||||
Self {
|
||||
mapping,
|
||||
cache_pool,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl BonsaiHgMapping for CachingBonsaiHgMapping {
|
||||
fn add(&self, entry: BonsaiHgMappingEntry) -> BoxFuture<bool, Error> {
|
||||
self.mapping.add(entry)
|
||||
}
|
||||
|
||||
fn get(
|
||||
&self,
|
||||
repo_id: RepositoryId,
|
||||
cs: BonsaiOrHgChangesetId,
|
||||
) -> BoxFuture<Option<BonsaiHgMappingEntry>, Error> {
|
||||
let cache_key = format!("{}.{:?}", repo_id.prefix(), cs).to_string();
|
||||
get_cached_or_fill(&self.cache_pool, cache_key, || {
|
||||
self.mapping.get(repo_id, cs)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct SqliteBonsaiHgMapping {
|
||||
inner: SqliteConnInner,
|
||||
|
Loading…
Reference in New Issue
Block a user