CODEMOD: rename mercurial_types::HgManifestId to DManifestId

Summary: mercurial_types::DManifestId should be replaced by types from mononoke_types in most cases and by mercurial::HgManifestId in others. This rename should help with tracking this

Reviewed By: sid0

Differential Revision: D7619062

fbshipit-source-id: 447224194c6555334b64dc29ebabe3ef0d0cb87e
This commit is contained in:
Lukas Piatkowski 2018-04-16 03:34:03 -07:00 committed by Facebook Github Bot
parent f6c3f72745
commit 7e05a01ce5
12 changed files with 55 additions and 56 deletions

View File

@ -20,7 +20,7 @@ use mercurial::{self, NodeHashConversion};
use mercurial::changeset::Extra; use mercurial::changeset::Extra;
use mercurial::revlogrepo::RevlogChangeset; use mercurial::revlogrepo::RevlogChangeset;
use mercurial_types::{BlobNode, Changeset, HgBlob, MPath, Parents, Time}; use mercurial_types::{BlobNode, Changeset, HgBlob, MPath, Parents, Time};
use mercurial_types::nodehash::{DChangesetId, HgManifestId, D_NULL_HASH}; use mercurial_types::nodehash::{DChangesetId, DManifestId, D_NULL_HASH};
use errors::*; use errors::*;
@ -39,7 +39,7 @@ struct RawCSBlob<'a> {
pub struct ChangesetContent { pub struct ChangesetContent {
parents: Parents, parents: Parents,
manifestid: HgManifestId, manifestid: DManifestId,
user: Vec<u8>, user: Vec<u8>,
time: Time, time: Time,
extra: Extra, extra: Extra,
@ -56,7 +56,7 @@ impl From<RevlogChangeset> for ChangesetContent {
Parents::new(p1.as_ref(), p2.as_ref()) Parents::new(p1.as_ref(), p2.as_ref())
}; };
let manifestid = HgManifestId::new(revlogcs.manifestid.into_nodehash().into_mononoke()); let manifestid = DManifestId::new(revlogcs.manifestid.into_nodehash().into_mononoke());
Self { Self {
parents, parents,
@ -73,7 +73,7 @@ impl From<RevlogChangeset> for ChangesetContent {
impl ChangesetContent { impl ChangesetContent {
pub fn new_from_parts( pub fn new_from_parts(
parents: Parents, parents: Parents,
manifestid: HgManifestId, manifestid: DManifestId,
user: Vec<u8>, user: Vec<u8>,
time: Time, time: Time,
extra: BTreeMap<Vec<u8>, Vec<u8>>, extra: BTreeMap<Vec<u8>, Vec<u8>>,
@ -218,7 +218,7 @@ impl BlobChangeset {
} }
impl Changeset for BlobChangeset { impl Changeset for BlobChangeset {
fn manifestid(&self) -> &HgManifestId { fn manifestid(&self) -> &DManifestId {
&self.content.manifestid &self.content.manifestid
} }

View File

@ -13,7 +13,7 @@ use futures::future::Future;
use futures_ext::{BoxFuture, FutureExt}; use futures_ext::{BoxFuture, FutureExt};
use mercurial::file; use mercurial::file;
use mercurial_types::{BlobNode, DNodeHash, FileType, HgBlob, HgManifestId, MPath, MPathElement, use mercurial_types::{BlobNode, DManifestId, DNodeHash, FileType, HgBlob, MPath, MPathElement,
Parents}; Parents};
use mercurial_types::manifest::{Content, Entry, Manifest, Type}; use mercurial_types::manifest::{Content, Entry, Manifest, Type};
use mercurial_types::nodehash::EntryId; use mercurial_types::nodehash::EntryId;
@ -82,7 +82,7 @@ impl BlobEntry {
}) })
} }
pub fn new_root(blobstore: Arc<Blobstore>, manifestid: HgManifestId) -> Self { pub fn new_root(blobstore: Arc<Blobstore>, manifestid: DManifestId) -> Self {
Self { Self {
blobstore, blobstore,
name: None, name: None,

View File

@ -15,7 +15,7 @@ use futures::stream::{self, Stream};
use futures_ext::{BoxFuture, BoxStream, FutureExt, StreamExt}; use futures_ext::{BoxFuture, BoxStream, FutureExt, StreamExt};
use mercurial_types::{Entry, FileType, MPathElement, Manifest, Type}; use mercurial_types::{Entry, FileType, MPathElement, Manifest, Type};
use mercurial_types::nodehash::{DNodeHash, EntryId, HgManifestId, D_NULL_HASH}; use mercurial_types::nodehash::{DManifestId, DNodeHash, EntryId, D_NULL_HASH};
use blobstore::Blobstore; use blobstore::Blobstore;
@ -92,7 +92,7 @@ pub struct BlobManifest {
impl BlobManifest { impl BlobManifest {
pub fn load( pub fn load(
blobstore: &Arc<Blobstore>, blobstore: &Arc<Blobstore>,
manifestid: &HgManifestId, manifestid: &DManifestId,
) -> BoxFuture<Option<Self>, Error> { ) -> BoxFuture<Option<Self>, Error> {
let nodehash = manifestid.clone().into_nodehash(); let nodehash = manifestid.clone().into_nodehash();
if nodehash == D_NULL_HASH { if nodehash == D_NULL_HASH {

View File

@ -40,7 +40,7 @@ use memheads::MemHeads;
use mercurial_types::{BlobNode, Changeset, DChangesetId, DNodeHash, Entry, HgBlob, HgFileNodeId, use mercurial_types::{BlobNode, Changeset, DChangesetId, DNodeHash, Entry, HgBlob, HgFileNodeId,
Manifest, Parents, RepoPath, RepositoryId, Time}; Manifest, Parents, RepoPath, RepositoryId, Time};
use mercurial_types::manifest; use mercurial_types::manifest;
use mercurial_types::nodehash::HgManifestId; use mercurial_types::nodehash::DManifestId;
use rocksblob::Rocksblob; use rocksblob::Rocksblob;
use rocksdb; use rocksdb;
use tokio_core::reactor::Remote; use tokio_core::reactor::Remote;
@ -292,14 +292,14 @@ impl BlobRepo {
nodeid: &DNodeHash, nodeid: &DNodeHash,
) -> BoxFuture<Box<Manifest + Sync>, Error> { ) -> BoxFuture<Box<Manifest + Sync>, Error> {
let nodeid = *nodeid; let nodeid = *nodeid;
let manifestid = HgManifestId::new(nodeid); let manifestid = DManifestId::new(nodeid);
BlobManifest::load(&self.blobstore, &manifestid) BlobManifest::load(&self.blobstore, &manifestid)
.and_then(move |mf| mf.ok_or(ErrorKind::ManifestMissing(nodeid).into())) .and_then(move |mf| mf.ok_or(ErrorKind::ManifestMissing(nodeid).into()))
.map(|m| m.boxed()) .map(|m| m.boxed())
.boxify() .boxify()
} }
pub fn get_root_entry(&self, manifestid: &HgManifestId) -> Box<Entry + Sync> { pub fn get_root_entry(&self, manifestid: &DManifestId) -> Box<Entry + Sync> {
Box::new(BlobEntry::new_root(self.blobstore.clone(), *manifestid)) Box::new(BlobEntry::new_root(self.blobstore.clone(), *manifestid))
} }

View File

@ -26,7 +26,7 @@ use mercurial_types::{BlobNode, Changeset, DChangesetId, DNodeHash, Entry, Entry
Manifest, Parents, RepoPath, RepositoryId, Time}; Manifest, Parents, RepoPath, RepositoryId, Time};
use mercurial_types::manifest::{self, Content}; use mercurial_types::manifest::{self, Content};
use mercurial_types::manifest_utils::{changed_entry_stream, EntryStatus}; use mercurial_types::manifest_utils::{changed_entry_stream, EntryStatus};
use mercurial_types::nodehash::{HgFileNodeId, HgManifestId}; use mercurial_types::nodehash::{DManifestId, HgFileNodeId};
use BlobChangeset; use BlobChangeset;
use BlobRepo; use BlobRepo;
@ -42,13 +42,13 @@ use utils::get_node_key;
/// See `get_completed_changeset()` for the public API you can use to extract the final changeset /// See `get_completed_changeset()` for the public API you can use to extract the final changeset
#[derive(Clone)] #[derive(Clone)]
pub struct ChangesetHandle { pub struct ChangesetHandle {
can_be_parent: Shared<oneshot::Receiver<(DNodeHash, HgManifestId)>>, can_be_parent: Shared<oneshot::Receiver<(DNodeHash, DManifestId)>>,
completion_future: Shared<BoxFuture<BlobChangeset, Compat<Error>>>, completion_future: Shared<BoxFuture<BlobChangeset, Compat<Error>>>,
} }
impl ChangesetHandle { impl ChangesetHandle {
pub fn new_pending( pub fn new_pending(
can_be_parent: Shared<oneshot::Receiver<(DNodeHash, HgManifestId)>>, can_be_parent: Shared<oneshot::Receiver<(DNodeHash, DManifestId)>>,
completion_future: Shared<BoxFuture<BlobChangeset, Compat<Error>>>, completion_future: Shared<BoxFuture<BlobChangeset, Compat<Error>>>,
) -> Self { ) -> Self {
Self { Self {
@ -362,7 +362,7 @@ pub fn process_entries(
entry_processor: &UploadEntries, entry_processor: &UploadEntries,
root_manifest: BoxFuture<(BlobEntry, RepoPath), Error>, root_manifest: BoxFuture<(BlobEntry, RepoPath), Error>,
new_child_entries: BoxStream<(BlobEntry, RepoPath), Error>, new_child_entries: BoxStream<(BlobEntry, RepoPath), Error>,
) -> BoxFuture<(Box<Manifest + Sync>, HgManifestId), Error> { ) -> BoxFuture<(Box<Manifest + Sync>, DManifestId), Error> {
root_manifest root_manifest
.and_then({ .and_then({
let entry_processor = entry_processor.clone(); let entry_processor = entry_processor.clone();
@ -388,7 +388,7 @@ pub fn process_entries(
}) })
.and_then(move |root_hash| { .and_then(move |root_hash| {
repo.get_manifest_by_nodeid(&root_hash) repo.get_manifest_by_nodeid(&root_hash)
.map(move |m| (m, HgManifestId::new(root_hash))) .map(move |m| (m, DManifestId::new(root_hash)))
}) })
.timed(move |stats, result| { .timed(move |stats, result| {
if result.is_ok() { if result.is_ok() {
@ -482,7 +482,7 @@ pub fn handle_parents(
pub fn make_new_changeset( pub fn make_new_changeset(
parents: Parents, parents: Parents,
root_hash: HgManifestId, root_hash: DManifestId,
user: String, user: String,
time: Time, time: Time,
extra: BTreeMap<Vec<u8>, Vec<u8>>, extra: BTreeMap<Vec<u8>, Vec<u8>>,

View File

@ -29,8 +29,8 @@ use bytes::Bytes;
use futures::Future; use futures::Future;
use blobrepo::{compute_changed_files, BlobRepo}; use blobrepo::{compute_changed_files, BlobRepo};
use mercurial_types::{manifest, Changeset, DChangesetId, Entry, EntryId, FileType, HgBlob, use mercurial_types::{manifest, Changeset, DChangesetId, DManifestId, Entry, EntryId, FileType,
HgManifestId, MPath, MPathElement, RepoPath}; HgBlob, MPath, MPathElement, RepoPath};
mod stats_units; mod stats_units;
#[macro_use] #[macro_use]
@ -143,7 +143,7 @@ fn create_one_changeset(repo: BlobRepo) {
); );
let cs = run_future(commit.get_completed_changeset()).unwrap(); let cs = run_future(commit.get_completed_changeset()).unwrap();
assert!(cs.manifestid() == &HgManifestId::new(roothash)); assert!(cs.manifestid() == &DManifestId::new(roothash));
assert!(cs.user() == author.as_bytes()); assert!(cs.user() == author.as_bytes());
assert!(cs.parents().get_nodes() == (None, None)); assert!(cs.parents().get_nodes() == (None, None));
let files: Vec<_> = cs.files().into(); let files: Vec<_> = cs.files().into();
@ -197,7 +197,7 @@ fn create_two_changesets(repo: BlobRepo) {
.join(commit2.get_completed_changeset()), .join(commit2.get_completed_changeset()),
).unwrap(); ).unwrap();
assert!(commit2.manifestid() == &HgManifestId::new(roothash)); assert!(commit2.manifestid() == &DManifestId::new(roothash));
assert!(commit2.user() == utf_author.as_bytes()); assert!(commit2.user() == utf_author.as_bytes());
let files: Vec<_> = commit2.files().into(); let files: Vec<_> = commit2.files().into();
let expected_files = vec![MPath::new("dir/file").unwrap(), MPath::new("file").unwrap()]; let expected_files = vec![MPath::new("dir/file").unwrap(), MPath::new("file").unwrap()];
@ -337,7 +337,7 @@ fn check_linknode_creation(repo: BlobRepo) {
let commit = create_changeset_no_parents(&repo, root_manifest_future, uploads); let commit = create_changeset_no_parents(&repo, root_manifest_future, uploads);
let cs = run_future(commit.get_completed_changeset()).unwrap(); let cs = run_future(commit.get_completed_changeset()).unwrap();
assert!(cs.manifestid() == &HgManifestId::new(roothash)); assert!(cs.manifestid() == &DManifestId::new(roothash));
assert!(cs.user() == author.as_bytes()); assert!(cs.user() == author.as_bytes());
assert!(cs.parents().get_nodes() == (None, None)); assert!(cs.parents().get_nodes() == (None, None));

View File

@ -7,7 +7,7 @@
// Ignore deprecation of NodeHash::new // Ignore deprecation of NodeHash::new
#![allow(deprecated)] #![allow(deprecated)]
use mercurial_types::{DChangesetId, DNodeHash, HgFileNodeId, HgManifestId}; use mercurial_types::{DChangesetId, DManifestId, DNodeHash, HgFileNodeId};
// D_NULL_HASH is exported for convenience. // D_NULL_HASH is exported for convenience.
pub use mercurial_types::D_NULL_HASH; pub use mercurial_types::D_NULL_HASH;
@ -48,21 +48,21 @@ pub const ES_CSID: DChangesetId = DChangesetId::new(ES_HASH);
pub const FS_CSID: DChangesetId = DChangesetId::new(FS_HASH); pub const FS_CSID: DChangesetId = DChangesetId::new(FS_HASH);
// Definitions for manifest IDs 1111...ffff // Definitions for manifest IDs 1111...ffff
pub const ONES_MID: HgManifestId = HgManifestId::new(ONES_HASH); pub const ONES_MID: DManifestId = DManifestId::new(ONES_HASH);
pub const TWOS_MID: HgManifestId = HgManifestId::new(TWOS_HASH); pub const TWOS_MID: DManifestId = DManifestId::new(TWOS_HASH);
pub const THREES_MID: HgManifestId = HgManifestId::new(THREES_HASH); pub const THREES_MID: DManifestId = DManifestId::new(THREES_HASH);
pub const FOURS_MID: HgManifestId = HgManifestId::new(FOURS_HASH); pub const FOURS_MID: DManifestId = DManifestId::new(FOURS_HASH);
pub const FIVES_MID: HgManifestId = HgManifestId::new(FIVES_HASH); pub const FIVES_MID: DManifestId = DManifestId::new(FIVES_HASH);
pub const SIXES_MID: HgManifestId = HgManifestId::new(SIXES_HASH); pub const SIXES_MID: DManifestId = DManifestId::new(SIXES_HASH);
pub const SEVENS_MID: HgManifestId = HgManifestId::new(SEVENS_HASH); pub const SEVENS_MID: DManifestId = DManifestId::new(SEVENS_HASH);
pub const EIGHTS_MID: HgManifestId = HgManifestId::new(EIGHTS_HASH); pub const EIGHTS_MID: DManifestId = DManifestId::new(EIGHTS_HASH);
pub const NINES_MID: HgManifestId = HgManifestId::new(NINES_HASH); pub const NINES_MID: DManifestId = DManifestId::new(NINES_HASH);
pub const AS_MID: HgManifestId = HgManifestId::new(AS_HASH); pub const AS_MID: DManifestId = DManifestId::new(AS_HASH);
pub const BS_MID: HgManifestId = HgManifestId::new(BS_HASH); pub const BS_MID: DManifestId = DManifestId::new(BS_HASH);
pub const CS_MID: HgManifestId = HgManifestId::new(CS_HASH); pub const CS_MID: DManifestId = DManifestId::new(CS_HASH);
pub const DS_MID: HgManifestId = HgManifestId::new(DS_HASH); pub const DS_MID: DManifestId = DManifestId::new(DS_HASH);
pub const ES_MID: HgManifestId = HgManifestId::new(ES_HASH); pub const ES_MID: DManifestId = DManifestId::new(ES_HASH);
pub const FS_MID: HgManifestId = HgManifestId::new(FS_HASH); pub const FS_MID: DManifestId = DManifestId::new(FS_HASH);
// Definitions for filenode IDs 1111...ffff // Definitions for filenode IDs 1111...ffff
pub const ONES_FNID: HgFileNodeId = HgFileNodeId::new(ONES_HASH); pub const ONES_FNID: HgFileNodeId = HgFileNodeId::new(ONES_HASH);

View File

@ -9,10 +9,10 @@ use std::collections::BTreeMap;
use mononoke_types::MPath; use mononoke_types::MPath;
use blobnode::Parents; use blobnode::Parents;
use nodehash::HgManifestId; use nodehash::DManifestId;
pub trait Changeset: Send + 'static { pub trait Changeset: Send + 'static {
fn manifestid(&self) -> &HgManifestId; fn manifestid(&self) -> &DManifestId;
fn user(&self) -> &[u8]; fn user(&self) -> &[u8];
fn extra(&self) -> &BTreeMap<Vec<u8>, Vec<u8>>; fn extra(&self) -> &BTreeMap<Vec<u8>, Vec<u8>>;
fn comments(&self) -> &[u8]; fn comments(&self) -> &[u8];
@ -29,7 +29,7 @@ pub trait Changeset: Send + 'static {
} }
impl Changeset for Box<Changeset> { impl Changeset for Box<Changeset> {
fn manifestid(&self) -> &HgManifestId { fn manifestid(&self) -> &DManifestId {
(**self).manifestid() (**self).manifestid()
} }

View File

@ -101,7 +101,7 @@ pub use delta::Delta;
pub use fsencode::{fncache_fsencode, simple_fsencode}; pub use fsencode::{fncache_fsencode, simple_fsencode};
pub use manifest::{Entry, Manifest, Type}; pub use manifest::{Entry, Manifest, Type};
pub use node::Node; pub use node::Node;
pub use nodehash::{DChangesetId, DNodeHash, EntryId, HgFileNodeId, HgManifestId, D_NULL_HASH}; pub use nodehash::{DChangesetId, DManifestId, DNodeHash, EntryId, HgFileNodeId, D_NULL_HASH};
pub use repo::RepositoryId; pub use repo::RepositoryId;
pub use utils::percent_encode; pub use utils::percent_encode;

View File

@ -17,7 +17,7 @@ use quickcheck::{single_shrinker, Arbitrary, Gen};
use errors::*; use errors::*;
use hash::{self, Sha1}; use hash::{self, Sha1};
use serde; use serde;
use sql_types::{DChangesetIdSql, HgFileNodeIdSql, HgManifestIdSql}; use sql_types::{DChangesetIdSql, DManifestIdSql, HgFileNodeIdSql};
pub const D_NULL_HASH: DNodeHash = DNodeHash(hash::NULL); pub const D_NULL_HASH: DNodeHash = DNodeHash(hash::NULL);
pub const NULL_CSID: DChangesetId = DChangesetId(D_NULL_HASH); pub const NULL_CSID: DChangesetId = DChangesetId(D_NULL_HASH);
@ -213,10 +213,10 @@ impl<'de> serde::de::Deserialize<'de> for DChangesetId {
#[derive(Clone, Copy, Eq, PartialEq, Ord, PartialOrd, Debug, Hash)] #[derive(Clone, Copy, Eq, PartialEq, Ord, PartialOrd, Debug, Hash)]
#[derive(HeapSizeOf, FromSqlRow, AsExpression)] #[derive(HeapSizeOf, FromSqlRow, AsExpression)]
#[sql_type = "HgManifestIdSql"] #[sql_type = "DManifestIdSql"]
pub struct HgManifestId(DNodeHash); pub struct DManifestId(DNodeHash);
impl HgManifestId { impl DManifestId {
#[inline] #[inline]
pub(crate) fn as_nodehash(&self) -> &DNodeHash { pub(crate) fn as_nodehash(&self) -> &DNodeHash {
&self.0 &self.0
@ -227,11 +227,11 @@ impl HgManifestId {
} }
pub const fn new(hash: DNodeHash) -> Self { pub const fn new(hash: DNodeHash) -> Self {
HgManifestId(hash) DManifestId(hash)
} }
} }
impl Display for HgManifestId { impl Display for DManifestId {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
self.0.fmt(fmt) self.0.fmt(fmt)
} }

View File

@ -13,7 +13,7 @@ use diesel::deserialize::{self, FromSql};
use diesel::serialize::{self, IsNull, Output, ToSql}; use diesel::serialize::{self, IsNull, Output, ToSql};
use diesel::sql_types::{Binary, Integer}; use diesel::sql_types::{Binary, Integer};
use {DChangesetId, DNodeHash, HgFileNodeId, HgManifestId, RepositoryId}; use {DChangesetId, DManifestId, DNodeHash, HgFileNodeId, RepositoryId};
use errors::*; use errors::*;
#[derive(QueryId, SqlType)] #[derive(QueryId, SqlType)]
@ -24,7 +24,7 @@ pub struct DChangesetIdSql;
#[derive(QueryId, SqlType)] #[derive(QueryId, SqlType)]
#[mysql_type = "Blob"] #[mysql_type = "Blob"]
#[sqlite_type = "Binary"] #[sqlite_type = "Binary"]
pub struct HgManifestIdSql; pub struct DManifestIdSql;
#[derive(QueryId, SqlType)] #[derive(QueryId, SqlType)]
#[mysql_type = "Blob"] #[mysql_type = "Blob"]
@ -51,14 +51,14 @@ where
} }
} }
impl<DB: Backend> ToSql<HgManifestIdSql, DB> for HgManifestId { impl<DB: Backend> ToSql<DManifestIdSql, DB> for DManifestId {
fn to_sql<W: Write>(&self, out: &mut Output<W, DB>) -> serialize::Result { fn to_sql<W: Write>(&self, out: &mut Output<W, DB>) -> serialize::Result {
out.write_all(self.as_nodehash().0.as_ref())?; out.write_all(self.as_nodehash().0.as_ref())?;
Ok(IsNull::No) Ok(IsNull::No)
} }
} }
impl<DB: Backend> FromSql<HgManifestIdSql, DB> for HgManifestId impl<DB: Backend> FromSql<DManifestIdSql, DB> for DManifestId
where where
*const [u8]: FromSql<Binary, DB>, *const [u8]: FromSql<Binary, DB>,
{ {

View File

@ -37,7 +37,7 @@ use blobrepo::BlobChangeset;
use bundle2_resolver; use bundle2_resolver;
use mercurial::{self, NodeHashConversion, RevlogChangeset}; use mercurial::{self, NodeHashConversion, RevlogChangeset};
use mercurial_bundles::{parts, Bundle2EncodeBuilder, Bundle2Item}; use mercurial_bundles::{parts, Bundle2EncodeBuilder, Bundle2Item};
use mercurial_types::{percent_encode, Changeset, DChangesetId, DNodeHash, Entry, HgManifestId, use mercurial_types::{percent_encode, Changeset, DChangesetId, DManifestId, DNodeHash, Entry,
MPath, Parents, RepoPath, RepositoryId, Type, D_NULL_HASH}; MPath, Parents, RepoPath, RepositoryId, Type, D_NULL_HASH};
use mercurial_types::manifest_utils::{changed_entry_stream, EntryStatus}; use mercurial_types::manifest_utils::{changed_entry_stream, EntryStatus};
use metaconfig::repoconfig::RepoType; use metaconfig::repoconfig::RepoType;
@ -819,8 +819,7 @@ fn get_changed_entry_stream(
}); });
// Append root manifest // Append root manifest
let root_entry_stream = let root_entry_stream = stream::once(Ok((repo.get_root_entry(&DManifestId::new(*mfid)), None)));
stream::once(Ok((repo.get_root_entry(&HgManifestId::new(*mfid)), None)));
changed_entries.chain(root_entry_stream).boxify() changed_entries.chain(root_entry_stream).boxify()
} }