Update scs to make it use globalrevs

Summary: Added option to use globalrev, so now we can fetch commits using their globalrevs, and ask about commit's globalrev.

Reviewed By: krallin

Differential Revision: D18324846

fbshipit-source-id: 73e69b697dd7b84b0b15e435a95191243cc75a19
This commit is contained in:
Daniel Grzegorzewski 2019-11-07 12:37:43 -08:00 committed by Facebook Github Bot
parent 124a49a8bc
commit 57aba7e348
9 changed files with 227 additions and 18 deletions

View File

@ -10,6 +10,7 @@
//! to all underlying stores, but which all the caching enabled.
use blobrepo::BlobRepo;
use blobstore::Blobstore;
use bonsai_globalrev_mapping::SqlBonsaiGlobalrevMapping;
use bonsai_hg_mapping::{
BonsaiHgMapping, BonsaiHgMappingEntry, BonsaiOrHgChangesetIds, CachingBonsaiHgMapping,
SqlBonsaiHgMapping,
@ -102,6 +103,8 @@ pub fn new_benchmark_repo(fb: FacebookInit, settings: DelaySettings) -> Result<B
))
};
let bonsai_globalrev_mapping = Arc::new(SqlBonsaiGlobalrevMapping::with_sqlite_in_memory()?);
let bonsai_hg_mapping = {
let mapping: Arc<dyn BonsaiHgMapping> = Arc::new(DelayedBonsaiHgMapping::new(
SqlBonsaiHgMapping::with_sqlite_in_memory()?,
@ -131,6 +134,7 @@ pub fn new_benchmark_repo(fb: FacebookInit, settings: DelaySettings) -> Result<B
blobstore,
filenodes,
changesets,
bonsai_globalrev_mapping,
bonsai_hg_mapping,
Arc::new(DummyLease {}),
FilestoreConfig::default(),

View File

@ -24,6 +24,7 @@ pub enum StateOpenError {
Bookmarks,
Changesets,
Filenodes,
BonsaiGlobalrevMapping,
BonsaiHgMapping,
}
@ -34,6 +35,7 @@ impl fmt::Display for StateOpenError {
StateOpenError::Bookmarks => write!(f, "bookmarks"),
StateOpenError::Changesets => write!(f, "changesets"),
StateOpenError::Filenodes => write!(f, "filenodes"),
StateOpenError::BonsaiGlobalrevMapping => write!(f, "bonsai_globalrev_mapping"),
StateOpenError::BonsaiHgMapping => write!(f, "bonsai_hg_mapping"),
}
}

View File

@ -10,6 +10,7 @@ use blobrepo::BlobRepo;
use blobrepo_errors::*;
use blobstore::Blobstore;
use blobstore_factory::{make_blobstore, make_sql_factory, SqlFactory};
use bonsai_globalrev_mapping::SqlBonsaiGlobalrevMapping;
use bonsai_hg_mapping::{CachingBonsaiHgMapping, SqlBonsaiHgMapping};
use bookmarks::{Bookmarks, CachedBookmarks};
use cacheblob::{
@ -203,6 +204,10 @@ pub fn new_memblob_empty_with_id(
SqlChangesets::with_sqlite_in_memory()
.chain_err(ErrorKind::StateOpen(StateOpenError::Changesets))?,
),
Arc::new(
SqlBonsaiGlobalrevMapping::with_sqlite_in_memory()
.chain_err(ErrorKind::StateOpen(StateOpenError::BonsaiGlobalrevMapping))?,
),
Arc::new(
SqlBonsaiHgMapping::with_sqlite_in_memory()
.chain_err(ErrorKind::StateOpen(StateOpenError::BonsaiHgMapping))?,
@ -237,6 +242,11 @@ fn new_development(
.chain_err(ErrorKind::StateOpen(StateOpenError::Changesets))
.from_err();
let bonsai_globalrev_mapping = sql_factory
.open::<SqlBonsaiGlobalrevMapping>()
.chain_err(ErrorKind::StateOpen(StateOpenError::BonsaiGlobalrevMapping))
.from_err();
let bonsai_hg_mapping = sql_factory
.open::<SqlBonsaiHgMapping>()
.chain_err(ErrorKind::StateOpen(StateOpenError::BonsaiHgMapping))
@ -244,12 +254,18 @@ fn new_development(
bookmarks
.join3(unredacted_blobstore, redacted_blobs)
.join4(filenodes, changesets, bonsai_hg_mapping)
.join5(
filenodes,
changesets,
bonsai_globalrev_mapping,
bonsai_hg_mapping,
)
.map({
move |(
(bookmarks, blobstore, redacted_blobs),
filenodes,
changesets,
bonsai_globalrev_mapping,
bonsai_hg_mapping,
)| {
let scuba_builder = ScubaSampleBuilder::with_opt_table(fb, scuba_censored_table);
@ -259,6 +275,7 @@ fn new_development(
RepoBlobstoreArgs::new(blobstore, redacted_blobs, repoid, scuba_builder),
filenodes,
changesets,
bonsai_globalrev_mapping,
bonsai_hg_mapping,
Arc::new(InProcessLease::new()),
filestore_config,
@ -307,16 +324,23 @@ fn new_production(
let filenodes_tier_and_filenodes = sql_factory.open_filenodes();
let bookmarks = sql_factory.open::<SqlBookmarks>();
let changesets = sql_factory.open::<SqlChangesets>();
let bonsai_globalrev_mapping = sql_factory.open::<SqlBonsaiGlobalrevMapping>();
let bonsai_hg_mapping = sql_factory.open::<SqlBonsaiHgMapping>();
filenodes_tier_and_filenodes
.join3(blobstore, redacted_blobs)
.join4(bookmarks, changesets, bonsai_hg_mapping)
.join5(
bookmarks,
changesets,
bonsai_globalrev_mapping,
bonsai_hg_mapping,
)
.map(
move |(
((filenodes_tier, filenodes), blobstore, redacted_blobs),
bookmarks,
changesets,
bonsai_globalrev_mapping,
bonsai_hg_mapping,
)| {
let filenodes = CachingFilenodes::new(
@ -364,6 +388,7 @@ fn new_production(
RepoBlobstoreArgs::new(blobstore, redacted_blobs, repoid, scuba_builder),
Arc::new(filenodes),
changesets,
bonsai_globalrev_mapping,
Arc::new(bonsai_hg_mapping),
Arc::new(changeset_fetcher_factory),
Arc::new(derive_data_lease),

View File

@ -12,6 +12,7 @@ use crate::derive_hg_manifest::derive_hg_manifest;
use crate::errors::*;
use crate::repo_commit::*;
use blobstore::{Blobstore, Loadable, LoadableError};
use bonsai_globalrev_mapping::{BonsaiGlobalrevMapping, BonsaisOrGlobalrevs};
use bonsai_hg_mapping::{BonsaiHgMapping, BonsaiHgMappingEntry, BonsaiOrHgChangesetIds};
use bookmarks::{
self, Bookmark, BookmarkName, BookmarkPrefix, BookmarkUpdateLogEntry, BookmarkUpdateReason,
@ -44,8 +45,8 @@ use mercurial_types::{
UploadHgFileEntry, UploadHgNodeHash,
},
manifest::Content,
Changeset, FileBytes, HgChangesetId, HgEntry, HgEntryId, HgFileEnvelope, HgFileNodeId,
HgManifest, HgManifestId, HgNodeHash, HgParents, RepoPath, Type,
Changeset, FileBytes, Globalrev, HgChangesetId, HgEntry, HgEntryId, HgFileEnvelope,
HgFileNodeId, HgManifest, HgManifestId, HgNodeHash, HgParents, RepoPath, Type,
};
use mononoke_types::{
hash::Sha256, Blob, BlobstoreBytes, BlobstoreValue, BonsaiChangeset, ChangesetId, ContentId,
@ -112,6 +113,7 @@ pub struct BlobRepo {
bookmarks: Arc<dyn Bookmarks>,
filenodes: Arc<dyn Filenodes>,
changesets: Arc<dyn Changesets>,
bonsai_globalrev_mapping: Arc<dyn BonsaiGlobalrevMapping>,
bonsai_hg_mapping: Arc<dyn BonsaiHgMapping>,
repoid: RepositoryId,
// Returns new ChangesetFetcher that can be used by operation that work with commit graph
@ -128,6 +130,7 @@ impl BlobRepo {
blobstore_args: RepoBlobstoreArgs,
filenodes: Arc<dyn Filenodes>,
changesets: Arc<dyn Changesets>,
bonsai_globalrev_mapping: Arc<dyn BonsaiGlobalrevMapping>,
bonsai_hg_mapping: Arc<dyn BonsaiHgMapping>,
derived_data_lease: Arc<dyn LeaseOps>,
filestore_config: FilestoreConfig,
@ -149,6 +152,7 @@ impl BlobRepo {
blobstore,
filenodes,
changesets,
bonsai_globalrev_mapping,
bonsai_hg_mapping,
repoid,
changeset_fetcher_factory: Arc::new(changeset_fetcher_factory),
@ -162,6 +166,7 @@ impl BlobRepo {
blobstore_args: RepoBlobstoreArgs,
filenodes: Arc<dyn Filenodes>,
changesets: Arc<dyn Changesets>,
bonsai_globalrev_mapping: Arc<dyn BonsaiGlobalrevMapping>,
bonsai_hg_mapping: Arc<dyn BonsaiHgMapping>,
changeset_fetcher_factory: Arc<
dyn Fn() -> Arc<dyn ChangesetFetcher + Send + Sync> + Send + Sync,
@ -175,6 +180,7 @@ impl BlobRepo {
blobstore,
filenodes,
changesets,
bonsai_globalrev_mapping,
bonsai_hg_mapping,
repoid,
changeset_fetcher_factory,
@ -198,6 +204,7 @@ impl BlobRepo {
blobstore,
filenodes,
changesets,
bonsai_globalrev_mapping,
bonsai_hg_mapping,
repoid,
derived_data_lease,
@ -215,6 +222,7 @@ impl BlobRepo {
repo_blobstore_args,
filenodes,
changesets,
bonsai_globalrev_mapping,
bonsai_hg_mapping,
derived_data_lease,
filestore_config,
@ -553,6 +561,37 @@ impl BlobRepo {
self.bookmarks.get(ctx, name, self.repoid)
}
pub fn get_bonsai_from_globalrev(
&self,
globalrev: Globalrev,
) -> BoxFuture<Option<ChangesetId>, Error> {
self.bonsai_globalrev_mapping
.get_bonsai_from_globalrev(self.repoid, globalrev)
}
pub fn get_globalrev_from_bonsai(
&self,
bcs: ChangesetId,
) -> BoxFuture<Option<Globalrev>, Error> {
self.bonsai_globalrev_mapping
.get_globalrev_from_bonsai(self.repoid, bcs)
}
pub fn get_bonsai_globalrev_mapping(
&self,
bonsai_or_globalrev_ids: impl Into<BonsaisOrGlobalrevs>,
) -> BoxFuture<Vec<(ChangesetId, Globalrev)>, Error> {
self.bonsai_globalrev_mapping
.get(self.repoid, bonsai_or_globalrev_ids.into())
.map(|result| {
result
.into_iter()
.map(|entry| (entry.bcs_id, entry.globalrev))
.collect()
})
.boxify()
}
pub fn list_bookmark_log_entries(
&self,
ctx: CoreContext,
@ -2125,6 +2164,7 @@ impl Clone for BlobRepo {
blobstore: self.blobstore.clone(),
filenodes: self.filenodes.clone(),
changesets: self.changesets.clone(),
bonsai_globalrev_mapping: self.bonsai_globalrev_mapping.clone(),
bonsai_hg_mapping: self.bonsai_hg_mapping.clone(),
repoid: self.repoid.clone(),
changeset_fetcher_factory: self.changeset_fetcher_factory.clone(),

View File

@ -60,6 +60,30 @@ impl BonsaisOrGlobalrevs {
}
}
impl From<ChangesetId> for BonsaisOrGlobalrevs {
fn from(cs_id: ChangesetId) -> Self {
BonsaisOrGlobalrevs::Bonsai(vec![cs_id])
}
}
impl From<Vec<ChangesetId>> for BonsaisOrGlobalrevs {
fn from(cs_ids: Vec<ChangesetId>) -> Self {
BonsaisOrGlobalrevs::Bonsai(cs_ids)
}
}
impl From<Globalrev> for BonsaisOrGlobalrevs {
fn from(rev: Globalrev) -> Self {
BonsaisOrGlobalrevs::Globalrev(vec![rev])
}
}
impl From<Vec<Globalrev>> for BonsaisOrGlobalrevs {
fn from(revs: Vec<Globalrev>) -> Self {
BonsaisOrGlobalrevs::Globalrev(revs)
}
}
pub trait BonsaiGlobalrevMapping: Send + Sync {
fn add(&self, entry: BonsaiGlobalrevMappingEntry) -> BoxFuture<bool, Error>;

View File

@ -21,6 +21,7 @@ use futures_preview::future::{FutureExt, Shared};
use futures_util::try_join;
use manifest::ManifestOps;
use manifest::{Diff as ManifestDiff, Entry as ManifestEntry};
use mercurial_types::Globalrev;
use mononoke_types::{BonsaiChangeset, MPath};
use reachabilityindex::ReachabilityIndex;
use unodes::RootUnodeManifestId;
@ -124,6 +125,17 @@ impl ChangesetContext {
Ok(mapping.iter().next().map(|(hg_cs_id, _)| *hg_cs_id))
}
/// The Globalrev for the changeset.
pub async fn globalrev(&self) -> Result<Option<Globalrev>, MononokeError> {
let mapping = self
.repo()
.blob_repo()
.get_globalrev_from_bonsai(self.id)
.compat()
.await?;
Ok(mapping.into_iter().next())
}
pub(crate) async fn root_fsnode_id(&self) -> Result<RootFsnodeId, MononokeError> {
self.root_fsnode_id.clone().await
}

View File

@ -19,6 +19,7 @@ use fsnodes::RootFsnodeMapping;
use futures::stream::{self, Stream};
use futures_ext::StreamExt;
use futures_preview::compat::Future01CompatExt;
use mercurial_types::Globalrev;
use metaconfig_types::{CommonConfig, MetadataDBConfig, RepoConfig};
use mononoke_types::hash::{Sha1, Sha256};
use skiplist::{fetch_skiplist_index, SkiplistIndex};
@ -242,6 +243,12 @@ impl RepoContext {
.compat()
.await?
}
ChangesetSpecifier::Globalrev(rev) => {
self.blob_repo()
.get_bonsai_from_globalrev(rev)
.compat()
.await?
}
};
Ok(id)
}
@ -309,6 +316,21 @@ impl RepoContext {
Ok(mapping)
}
/// Similar to changeset_hg_ids, but returning Globalrevs.
pub async fn changeset_globalrev_ids(
&self,
changesets: Vec<ChangesetId>,
) -> Result<Vec<(ChangesetId, Globalrev)>, MononokeError> {
let mapping = self
.blob_repo()
.get_bonsai_globalrev_mapping(changesets)
.compat()
.await?
.into_iter()
.collect();
Ok(mapping)
}
/// Get a list of bookmarks.
pub fn list_bookmarks(
&self,

View File

@ -14,6 +14,9 @@ pub type ChangesetId = mononoke_types::ChangesetId;
/// A Mercurial changeset ID.
pub type HgChangesetId = mercurial_types::HgChangesetId;
/// A Globalrev.
pub type Globalrev = mercurial_types::Globalrev;
/// A changeset specifier. This is anything that may be used to specify a
/// unique changeset, such as its bonsai changeset ID, a changeset hash in an
/// alternative hashing scheme, a globally unique hash prefix, or an
@ -22,6 +25,7 @@ pub type HgChangesetId = mercurial_types::HgChangesetId;
pub enum ChangesetSpecifier {
Bonsai(ChangesetId),
Hg(HgChangesetId),
Globalrev(Globalrev),
}
impl fmt::Display for ChangesetSpecifier {
@ -29,6 +33,7 @@ impl fmt::Display for ChangesetSpecifier {
match self {
ChangesetSpecifier::Bonsai(cs_id) => write!(f, "changeset {}", cs_id),
ChangesetSpecifier::Hg(hg_cs_id) => write!(f, "hg changeset {}", hg_cs_id),
ChangesetSpecifier::Globalrev(rev) => write!(f, "globalrev {}", rev.id()),
}
}
}

View File

@ -8,7 +8,7 @@
use std::cmp::min;
use std::collections::{BTreeMap, BTreeSet};
use std::convert::TryFrom;
use std::convert::{TryFrom, TryInto};
use std::fmt::{Debug, Display};
use std::ops::RangeBounds;
use std::sync::Arc;
@ -22,7 +22,8 @@ use futures::stream::Stream;
use futures_preview::{
compat::Future01CompatExt, compat::Stream01CompatExt, stream, StreamExt, TryStreamExt,
};
use futures_util::{try_future::try_join_all, try_join};
use futures_util::{future::FutureExt, try_future::try_join_all, try_join};
use mercurial_types::Globalrev;
use mononoke_api::{
ChangesetContext, ChangesetId, ChangesetPathContext, ChangesetPathDiffContext,
ChangesetSpecifier, CoreContext, FileContext, FileId, FileMetadata, FileType, HgChangesetId,
@ -373,12 +374,39 @@ async fn map_commit_identity(
thrift::CommitIdentityScheme::BONSAI,
thrift::CommitId::bonsai(changeset_ctx.id().as_ref().into()),
);
let mut scheme_identities = vec![];
if schemes.contains(&thrift::CommitIdentityScheme::HG) {
if let Some(hg_cs_id) = changeset_ctx.hg_id().await? {
ids.insert(
let identity = async {
if let Some(hg_id) = changeset_ctx.hg_id().await? {
let result: Result<Option<_>, MononokeError> = Ok(Some((
thrift::CommitIdentityScheme::HG,
thrift::CommitId::hg(hg_cs_id.as_ref().into()),
);
thrift::CommitId::hg(hg_id.as_ref().into()),
)));
result
} else {
Ok(None)
}
};
scheme_identities.push(identity.boxed());
}
if schemes.contains(&thrift::CommitIdentityScheme::GLOBALREV) {
let identity = async {
if let Some(globalrev) = changeset_ctx.globalrev().await? {
let result: Result<Option<_>, MononokeError> = Ok(Some((
thrift::CommitIdentityScheme::GLOBALREV,
thrift::CommitId::globalrev(globalrev.id() as i64),
)));
result
} else {
Ok(None)
}
};
scheme_identities.push(identity.boxed());
}
let scheme_identities = try_join_all(scheme_identities).await?;
for maybe_identity in scheme_identities {
if let Some((scheme, id)) = maybe_identity {
ids.insert(scheme, id);
}
}
Ok(ids)
@ -403,12 +431,53 @@ async fn map_commit_identities(
);
result.insert(*id, idmap);
}
let mut scheme_identities = vec![];
if schemes.contains(&thrift::CommitIdentityScheme::HG) {
for (cs_id, hg_cs_id) in repo_ctx.changeset_hg_ids(ids).await?.into_iter() {
result.entry(cs_id).or_insert_with(BTreeMap::new).insert(
let ids = ids.clone();
let identities = async {
let bonsai_hg_ids = repo_ctx
.changeset_hg_ids(ids)
.await?
.into_iter()
.map(|(cs_id, hg_cs_id)| {
(
cs_id,
thrift::CommitIdentityScheme::HG,
thrift::CommitId::hg(hg_cs_id.as_ref().into()),
);
)
})
.collect::<Vec<_>>();
let result: Result<_, MononokeError> = Ok(bonsai_hg_ids);
result
};
scheme_identities.push(identities.boxed());
}
if schemes.contains(&thrift::CommitIdentityScheme::GLOBALREV) {
let identities = async {
let bonsai_globalrev_ids = repo_ctx
.changeset_globalrev_ids(ids)
.await?
.into_iter()
.map(|(cs_id, globalrev)| {
(
cs_id,
thrift::CommitIdentityScheme::GLOBALREV,
thrift::CommitId::globalrev(globalrev.id() as i64),
)
})
.collect::<Vec<_>>();
let result: Result<_, MononokeError> = Ok(bonsai_globalrev_ids);
result
};
scheme_identities.push(identities.boxed());
}
let scheme_identities = try_join_all(scheme_identities).await?;
for ids in scheme_identities {
for (cs_id, commit_identity_scheme, commit_id) in ids {
result
.entry(cs_id)
.or_insert_with(BTreeMap::new)
.insert(commit_identity_scheme, commit_id);
}
}
Ok(result)
@ -505,7 +574,7 @@ impl CommitIdExt for thrift::CommitId {
thrift::CommitId::bonsai(_) => thrift::CommitIdentityScheme::BONSAI,
thrift::CommitId::hg(_) => thrift::CommitIdentityScheme::HG,
thrift::CommitId::git(_) => thrift::CommitIdentityScheme::GIT,
thrift::CommitId::global_rev(_) => thrift::CommitIdentityScheme::GLOBAL_REV,
thrift::CommitId::globalrev(_) => thrift::CommitIdentityScheme::GLOBALREV,
thrift::CommitId::UnknownField(t) => (*t).into(),
}
}
@ -518,7 +587,7 @@ impl CommitIdExt for thrift::CommitId {
thrift::CommitId::bonsai(id) => hex_string(&id).expect("hex_string should never fail"),
thrift::CommitId::hg(id) => hex_string(&id).expect("hex_string should never fail"),
thrift::CommitId::git(id) => hex_string(&id).expect("hex_string should never fail"),
thrift::CommitId::global_rev(rev) => rev.to_string(),
thrift::CommitId::globalrev(rev) => rev.to_string(),
thrift::CommitId::UnknownField(t) => format!("unknown id type ({})", t),
}
}
@ -555,6 +624,12 @@ impl FromRequest<thrift::CommitId> for ChangesetSpecifier {
})?;
Ok(ChangesetSpecifier::Hg(hg_cs_id))
}
thrift::CommitId::globalrev(rev) => {
let rev = Globalrev::new((*rev).try_into().map_err(|_| {
errors::invalid_request(format!("cannot parse globalrev {} to u64", rev))
})?);
Ok(ChangesetSpecifier::Globalrev(rev))
}
_ => Err(errors::invalid_request(format!(
"unsupported commit identity scheme ({})",
commit.scheme()