mononoke: rename BlobChangeset and ChangesetContent

Summary:
These types are Hg specific. Since we are going to add bonsai changeset
creation soon, let's make it clear in the types

Reviewed By: farnz

Differential Revision: D8911359

fbshipit-source-id: 8b6cc45122402d7b7e074e66d904d979030de705
This commit is contained in:
Stanislau Hlebik 2018-07-24 12:33:25 -07:00 committed by Facebook Github Bot
parent bef6bcfaad
commit ff04a36412
10 changed files with 56 additions and 54 deletions

View File

@ -25,7 +25,7 @@ use errors::*;
use repo::{ChangesetMetadata, RepoBlobstore};
#[derive(Debug, Clone)]
pub struct ChangesetContent {
pub struct HgChangesetContent {
p1: Option<HgNodeHash>,
p2: Option<HgNodeHash>,
manifestid: HgManifestId,
@ -36,7 +36,7 @@ pub struct ChangesetContent {
comments: Vec<u8>,
}
impl ChangesetContent {
impl HgChangesetContent {
pub fn new_from_parts(
// XXX replace parents with p1 and p2
parents: HgParents,
@ -122,17 +122,17 @@ impl ChangesetContent {
}
#[derive(Debug, Clone)]
pub struct BlobChangeset {
pub struct HgBlobChangeset {
changesetid: HgChangesetId, // redundant - can be computed from revlogcs?
content: ChangesetContent,
content: HgChangesetContent,
}
impl BlobChangeset {
pub fn new(content: ChangesetContent) -> Result<Self> {
impl HgBlobChangeset {
pub fn new(content: HgChangesetContent) -> Result<Self> {
Ok(Self::new_with_id(&content.compute_hash()?, content))
}
pub fn new_with_id(changesetid: &HgChangesetId, content: ChangesetContent) -> Self {
pub fn new_with_id(changesetid: &HgChangesetId, content: HgChangesetContent) -> Self {
Self {
changesetid: *changesetid,
content,
@ -150,8 +150,10 @@ impl BlobChangeset {
let changesetid = *changesetid;
if changesetid == HgChangesetId::new(NULL_HASH) {
let revlogcs = RevlogChangeset::new_null();
let cs =
BlobChangeset::new_with_id(&changesetid, ChangesetContent::from_revlogcs(revlogcs));
let cs = HgBlobChangeset::new_with_id(
&changesetid,
HgChangesetContent::from_revlogcs(revlogcs),
);
Either::A(Ok(Some(cs)).into_future())
} else {
let key = changesetid.blobstore_key();
@ -170,9 +172,9 @@ impl BlobChangeset {
);
}
let revlogcs = RevlogChangeset::from_envelope(envelope)?;
let cs = BlobChangeset::new_with_id(
let cs = HgBlobChangeset::new_with_id(
&changesetid,
ChangesetContent::from_revlogcs(revlogcs),
HgChangesetContent::from_revlogcs(revlogcs),
);
Ok(Some(cs))
}
@ -221,7 +223,7 @@ impl BlobChangeset {
}
}
impl Changeset for BlobChangeset {
impl Changeset for HgBlobChangeset {
fn manifestid(&self) -> &HgManifestId {
&self.content.manifestid
}

View File

@ -15,7 +15,7 @@ use mercurial_types::{HgBlob, HgBlobHash, HgChangesetId, HgFileNodeId, HgNodeHas
MPath, RepoPath, Type};
use mononoke_types::ContentId;
use BlobChangeset;
use HgBlobChangeset;
#[derive(Debug)]
pub enum StateOpenError {
@ -85,7 +85,7 @@ pub enum ErrorKind {
#[fail(display = "Inconsistent node hash for changeset: provided: {}, \
computed: {} for blob: {:#?}",
_0, _1, _2)]
InconsistentChangesetHash(HgNodeHash, HgNodeHash, BlobChangeset),
InconsistentChangesetHash(HgNodeHash, HgNodeHash, HgBlobChangeset),
#[fail(display = "Bookmark {} does not exist", _0)] BookmarkNotFound(AsciiString),
#[fail(display = "Unresolved conflicts when converting BonsaiChangeset to Manifest")]
UnresolvedConflicts,

View File

@ -68,7 +68,7 @@ mod repo_commit;
pub use errors::*;
pub use changeset::{BlobChangeset, ChangesetContent};
pub use changeset::{HgBlobChangeset, HgChangesetContent};
pub use file::HgBlobEntry;
pub use manifest::BlobManifest;
pub use repo::{BlobRepo, ChangesetMetadata, ContentBlobInfo, ContentBlobMeta, CreateChangeset,

View File

@ -48,8 +48,8 @@ use mononoke_types::{Blob, BlobstoreValue, BonsaiChangeset, ContentId, DateTime,
use rocksblob::Rocksblob;
use rocksdb;
use BlobChangeset;
use BlobManifest;
use HgBlobChangeset;
use errors::*;
use file::{fetch_file_content_and_renames_from_blobstore, fetch_raw_filenode_bytes, HgBlobEntry};
use memory_manifest::MemoryRootManifest;
@ -433,7 +433,7 @@ impl BlobRepo {
pub fn get_changesets(&self) -> BoxStream<HgNodeHash, Error> {
STATS::get_changesets.add_value(1);
BlobChangesetStream {
HgBlobChangesetStream {
repo: self.clone(),
state: BCState::Idle,
heads: self.get_heads().boxify(),
@ -472,10 +472,10 @@ impl BlobRepo {
pub fn get_changeset_by_changesetid(
&self,
changesetid: &HgChangesetId,
) -> BoxFuture<BlobChangeset, Error> {
) -> BoxFuture<HgBlobChangeset, Error> {
STATS::get_changeset_by_changesetid.add_value(1);
let chid = changesetid.clone();
BlobChangeset::load(&self.blobstore, &chid)
HgBlobChangeset::load(&self.blobstore, &chid)
.and_then(move |cs| cs.ok_or(ErrorKind::ChangesetMissing(chid).into()))
.boxify()
}
@ -1184,7 +1184,7 @@ impl CreateChangeset {
STATS::create_changeset_cf_count.add_value(files.len() as i64);
let fut: BoxFuture<
BlobChangeset,
HgBlobChangeset,
Error,
> = (move || {
let blobcs = try_boxfuture!(make_new_changeset(
@ -1327,7 +1327,7 @@ impl Clone for BlobRepo {
}
}
pub struct BlobChangesetStream {
pub struct HgBlobChangesetStream {
repo: BlobRepo,
seen: HashSet<HgNodeHash>,
heads: BoxStream<HgNodeHash, Error>,
@ -1336,10 +1336,10 @@ pub struct BlobChangesetStream {
enum BCState {
Idle,
WaitCS(HgNodeHash, BoxFuture<BlobChangeset, Error>),
WaitCS(HgNodeHash, BoxFuture<HgBlobChangeset, Error>),
}
impl Stream for BlobChangesetStream {
impl Stream for HgBlobChangesetStream {
type Item = HgNodeHash;
type Error = Error;

View File

@ -27,9 +27,9 @@ use mercurial_types::manifest::{self, Content};
use mercurial_types::manifest_utils::{changed_entry_stream, EntryStatus};
use mercurial_types::nodehash::{HgFileNodeId, HgManifestId};
use BlobChangeset;
use BlobRepo;
use changeset::ChangesetContent;
use HgBlobChangeset;
use changeset::HgChangesetContent;
use errors::*;
use file::HgBlobEntry;
use repo::{ChangesetMetadata, RepoBlobstore};
@ -48,8 +48,8 @@ define_stats! {
finalize_compute_copy_from_info: timeseries(RATE, SUM),
}
/// A handle to a possibly incomplete BlobChangeset. This is used instead of
/// Future<Item = BlobChangeset> where we don't want to fully serialize waiting for completion.
/// A handle to a possibly incomplete HgBlobChangeset. This is used instead of
/// Future<Item = HgBlobChangeset> where we don't want to fully serialize waiting for completion.
/// For example, `create_changeset` takes these as p1/p2 so that it can handle the blobstore side
/// of creating a new changeset before its parent changesets are complete.
/// See `get_completed_changeset()` for the public API you can use to extract the final changeset
@ -61,13 +61,13 @@ pub struct ChangesetHandle {
// * The Compat<Error> here is because the error type for Shared (a cloneable wrapper called
// SharedError) doesn't implement Fail, and only implements Error if the wrapped type
// implements Error.
completion_future: Shared<BoxFuture<BlobChangeset, Compat<Error>>>,
completion_future: Shared<BoxFuture<HgBlobChangeset, Compat<Error>>>,
}
impl ChangesetHandle {
pub fn new_pending(
can_be_parent: Shared<oneshot::Receiver<(HgNodeHash, HgManifestId)>>,
completion_future: Shared<BoxFuture<BlobChangeset, Compat<Error>>>,
completion_future: Shared<BoxFuture<HgBlobChangeset, Compat<Error>>>,
) -> Self {
Self {
can_be_parent,
@ -75,13 +75,13 @@ impl ChangesetHandle {
}
}
pub fn get_completed_changeset(self) -> Shared<BoxFuture<BlobChangeset, Compat<Error>>> {
pub fn get_completed_changeset(self) -> Shared<BoxFuture<HgBlobChangeset, Compat<Error>>> {
self.completion_future
}
}
impl From<BlobChangeset> for ChangesetHandle {
fn from(bcs: BlobChangeset) -> Self {
impl From<HgBlobChangeset> for ChangesetHandle {
fn from(bcs: HgBlobChangeset) -> Self {
let (trigger, can_be_parent) = oneshot::channel();
// The send cannot fail at this point, barring an optimizer noticing that `can_be_parent`
// is unused and dropping early. Eat the error, as in this case, nothing is blocked waiting
@ -96,8 +96,8 @@ impl From<BlobChangeset> for ChangesetHandle {
/// This implementation can be used to convert a result of
/// BlobRepo::get_changeset_by_changesetid into ChangesetHandle
impl From<BoxFuture<BlobChangeset, Error>> for ChangesetHandle {
fn from(bcs: BoxFuture<BlobChangeset, Error>) -> Self {
impl From<BoxFuture<HgBlobChangeset, Error>> for ChangesetHandle {
fn from(bcs: BoxFuture<HgBlobChangeset, Error>) -> Self {
let (trigger, can_be_parent) = oneshot::channel();
Self {
@ -661,9 +661,9 @@ pub fn make_new_changeset(
root_hash: HgManifestId,
cs_metadata: ChangesetMetadata,
files: Vec<MPath>,
) -> Result<BlobChangeset> {
let changeset = ChangesetContent::new_from_parts(parents, root_hash, cs_metadata, files);
BlobChangeset::new(changeset)
) -> Result<HgBlobChangeset> {
let changeset = HgChangesetContent::new_from_parts(parents, root_hash, cs_metadata, files);
HgBlobChangeset::new(changeset)
}
#[cfg(test)]

View File

@ -16,7 +16,7 @@ use slog::Logger;
use futures_ext::{BoxFuture, FutureExt};
use blobrepo::{BlobChangeset, BlobManifest, BlobRepo, HgBlobEntry};
use blobrepo::{BlobManifest, BlobRepo, HgBlobChangeset, HgBlobEntry};
use blobrepo::internal::MemoryRootManifest;
use bonsai_utils::{bonsai_diff, BonsaiDiffResult};
use mercurial_types::{Changeset, Entry, HgChangesetId, HgManifestId, HgNodeHash, Type};
@ -144,7 +144,7 @@ impl ChangesetVisitor for BonsaiVerifyVisitor {
self,
logger: Logger,
repo: BlobRepo,
changeset: BlobChangeset,
changeset: HgBlobChangeset,
_follow_remaining: usize,
) -> BoxFuture<Self::Item, Error> {
let changeset_id = changeset.get_changeset_id();
@ -329,7 +329,7 @@ fn make_entry(repo: &BlobRepo, diff_result: &BonsaiDiffResult) -> Option<HgBlobE
}
#[inline]
fn get_root_entry(repo: &BlobRepo, changeset: &BlobChangeset) -> Box<Entry + Sync> {
fn get_root_entry(repo: &BlobRepo, changeset: &HgBlobChangeset) -> Box<Entry + Sync> {
let manifest_id = changeset.manifestid();
repo.get_root_entry(manifest_id)
}

View File

@ -14,7 +14,7 @@ use tokio;
use futures_ext::{send_discard, BoxFuture};
use blobrepo::{BlobChangeset, BlobRepo};
use blobrepo::{BlobRepo, HgBlobChangeset};
use mercurial_types::HgChangesetId;
/// This trait enables parallelized walks over changesets.
@ -31,7 +31,7 @@ pub trait ChangesetVisitor: Clone + Send + Sync + 'static {
self,
logger: Logger,
repo: BlobRepo,
changeset: BlobChangeset,
changeset: HgBlobChangeset,
follow_remaining: usize,
) -> BoxFuture<Self::Item, Error>;
}

View File

@ -16,7 +16,7 @@ use futures::stream::{self, Stream};
use futures_ext::{BoxFuture, BoxStream, FutureExt, StreamExt};
use scuba_ext::ScubaSampleBuilder;
use blobrepo::{BlobChangeset, BlobRepo, ChangesetHandle, ChangesetMetadata, CreateChangeset,
use blobrepo::{BlobRepo, ChangesetHandle, ChangesetMetadata, CreateChangeset, HgBlobChangeset,
HgBlobEntry, UploadHgFileContents, UploadHgFileEntry, UploadHgNodeHash,
UploadHgTreeEntry};
use mercurial::{manifest, RevlogChangeset, RevlogEntry, RevlogRepo};
@ -211,7 +211,7 @@ pub struct UploadChangesets {
}
impl UploadChangesets {
pub fn upload(self) -> BoxStream<BoxFuture<SharedItem<BlobChangeset>, Error>, Error> {
pub fn upload(self) -> BoxStream<BoxFuture<SharedItem<HgBlobChangeset>, Error>, Error> {
let Self {
blobrepo,
revlogrepo,

View File

@ -49,7 +49,7 @@ pub mod hook_loader;
pub mod errors;
use asyncmemo::{Asyncmemo, Filler, Weight};
use blobrepo::{BlobChangeset, BlobRepo};
use blobrepo::{BlobRepo, HgBlobChangeset};
use bookmarks::Bookmark;
pub use errors::*;
use failure::Error;
@ -377,7 +377,7 @@ pub trait ChangesetStore: Send + Sync {
fn get_changeset_by_changesetid(
&self,
changesetid: &HgChangesetId,
) -> BoxFuture<BlobChangeset, Error>;
) -> BoxFuture<HgBlobChangeset, Error>;
}
pub struct BlobRepoChangesetStore {
@ -388,7 +388,7 @@ impl ChangesetStore for BlobRepoChangesetStore {
fn get_changeset_by_changesetid(
&self,
changesetid: &HgChangesetId,
) -> BoxFuture<BlobChangeset, Error> {
) -> BoxFuture<HgBlobChangeset, Error> {
self.repo.get_changeset_by_changesetid(changesetid)
}
}
@ -400,14 +400,14 @@ impl BlobRepoChangesetStore {
}
pub struct InMemoryChangesetStore {
map: HashMap<HgChangesetId, BlobChangeset>,
map: HashMap<HgChangesetId, HgBlobChangeset>,
}
impl ChangesetStore for InMemoryChangesetStore {
fn get_changeset_by_changesetid(
&self,
changesetid: &HgChangesetId,
) -> BoxFuture<BlobChangeset, Error> {
) -> BoxFuture<HgBlobChangeset, Error> {
match self.map.get(changesetid) {
Some(cs) => Box::new(finished(cs.clone())),
None => Box::new(failed(
@ -424,7 +424,7 @@ impl InMemoryChangesetStore {
}
}
pub fn insert(&mut self, changeset_id: &HgChangesetId, changeset: &BlobChangeset) {
pub fn insert(&mut self, changeset_id: &HgChangesetId, changeset: &HgBlobChangeset) {
self.map.insert(changeset_id.clone(), changeset.clone());
}
}
@ -477,9 +477,9 @@ pub enum HookChangesetParents {
Two(String, String),
}
impl TryFrom<BlobChangeset> for HookChangeset {
impl TryFrom<HgBlobChangeset> for HookChangeset {
type Error = Error;
fn try_from(changeset: BlobChangeset) -> Result<Self, Error> {
fn try_from(changeset: HgBlobChangeset) -> Result<Self, Error> {
let author = str::from_utf8(changeset.user())?.into();
let files = changeset.files();
let files = files

View File

@ -21,7 +21,7 @@ use futures_stats::{Timed, TimedStreamTrait};
use itertools::Itertools;
use slog::Logger;
use blobrepo::BlobChangeset;
use blobrepo::HgBlobChangeset;
use bundle2_resolver;
use mercurial::{self, RevlogChangeset};
use mercurial_bundles::{create_bundle_stream, parts, Bundle2EncodeBuilder, Bundle2Item};
@ -363,7 +363,7 @@ impl HgCommands for RepoClient {
}
}
impl Stream for ParentStream<BoxFuture<BlobChangeset, hgproto::Error>> {
impl Stream for ParentStream<BoxFuture<HgBlobChangeset, hgproto::Error>> {
type Item = HgNodeHash;
type Error = hgproto::Error;