CODEMOD: rename mercurial::NodeHash to HgNodeHash

Reviewed By: sid0

Differential Revision: D7619973

fbshipit-source-id: 229fea891788c33eb1f45446ba2333e945ca5553
This commit is contained in:
Lukas Piatkowski 2018-04-16 03:34:19 -07:00 committed by Facebook Github Bot
parent 086640fdd0
commit 03255529fa
34 changed files with 278 additions and 293 deletions

View File

@ -8,7 +8,7 @@ use futures::Stream;
use futures_ext::{BoxStream, StreamExt};
use bytes::Bytes;
use mercurial::{BlobNode, NodeHash, NULL_HASH};
use mercurial::{BlobNode, HgNodeHash, NULL_HASH};
use mercurial::changeset::RevlogChangeset;
use mercurial_bundles::changegroup::CgDeltaChunk;
use mercurial_types::{delta, HgBlob};
@ -20,7 +20,9 @@ pub struct ChangesetDeltaed {
pub chunk: CgDeltaChunk,
}
pub fn convert_to_revlog_changesets<S>(deltaed: S) -> BoxStream<(NodeHash, RevlogChangeset), Error>
pub fn convert_to_revlog_changesets<S>(
deltaed: S,
) -> BoxStream<(HgNodeHash, RevlogChangeset), Error>
where
S: Stream<Item = ChangesetDeltaed, Error = Error> + Send + 'static,
{
@ -67,11 +69,11 @@ mod tests {
use self::CheckResult::*;
fn check_null_changeset(
node: NodeHash,
linknode: NodeHash,
base: NodeHash,
p1: NodeHash,
p2: NodeHash,
node: HgNodeHash,
linknode: HgNodeHash,
base: HgNodeHash,
p1: HgNodeHash,
p2: HgNodeHash,
) -> CheckResult {
let blobnode = BlobNode::new(
RevlogChangeset::new_null()
@ -108,11 +110,11 @@ mod tests {
quickcheck!{
fn null_changeset_random(
node: NodeHash,
linknode: NodeHash,
base: NodeHash,
p1: NodeHash,
p2: NodeHash
node: HgNodeHash,
linknode: HgNodeHash,
base: HgNodeHash,
p1: HgNodeHash,
p2: HgNodeHash
) -> bool {
match check_null_changeset(node, linknode, base, p1, p2) {
ExpectedOk(true) | ExpectedErr(true) => true,
@ -120,7 +122,7 @@ mod tests {
}
}
fn null_changeset_correct(node: NodeHash, p1: NodeHash, p2: NodeHash) -> bool {
fn null_changeset_correct(node: HgNodeHash, p1: HgNodeHash, p2: HgNodeHash) -> bool {
match check_null_changeset(node.clone(), node, NULL_HASH, p1, p2) {
ExpectedOk(true) => true,
_ => false

View File

@ -17,7 +17,7 @@ use heapsize::HeapSizeOf;
use quickcheck::{Arbitrary, Gen};
use blobrepo::{BlobEntry, BlobRepo};
use mercurial;
use mercurial::{self, HgNodeHash};
use mercurial_bundles::changegroup::CgDeltaChunk;
use mercurial_types::{delta, manifest, Delta, FileType, HgBlob, MPath, RepoPath};
@ -34,9 +34,9 @@ pub struct FilelogDeltaed {
#[derive(Debug, Clone, Eq, PartialEq)]
pub struct Filelog {
pub node_key: mercurial::HgNodeKey,
pub p1: Option<mercurial::NodeHash>,
pub p2: Option<mercurial::NodeHash>,
pub linknode: mercurial::NodeHash,
pub p1: Option<HgNodeHash>,
pub p2: Option<HgNodeHash>,
pub linknode: HgNodeHash,
pub data: Bytes,
}
@ -95,7 +95,7 @@ where
struct DeltaCache {
repo: Arc<BlobRepo>,
bytes_cache: HashMap<mercurial::NodeHash, Shared<BoxFuture<Bytes, Compat<Error>>>>,
bytes_cache: HashMap<HgNodeHash, Shared<BoxFuture<Bytes, Compat<Error>>>>,
}
impl DeltaCache {
@ -108,8 +108,8 @@ impl DeltaCache {
fn decode(
&mut self,
node: mercurial::NodeHash,
base: Option<mercurial::NodeHash>,
node: HgNodeHash,
base: Option<HgNodeHash>,
delta: Delta,
) -> BoxFuture<Bytes, Error> {
let bytes = match self.bytes_cache.get(&node).cloned() {
@ -169,11 +169,11 @@ impl Arbitrary for Filelog {
Filelog {
node_key: mercurial::HgNodeKey {
path: RepoPath::FilePath(MPath::arbitrary(g)),
hash: mercurial::NodeHash::arbitrary(g),
hash: HgNodeHash::arbitrary(g),
},
p1: mercurial::NodeHash::arbitrary(g).into_option(),
p2: mercurial::NodeHash::arbitrary(g).into_option(),
linknode: mercurial::NodeHash::arbitrary(g),
p1: HgNodeHash::arbitrary(g).into_option(),
p2: HgNodeHash::arbitrary(g).into_option(),
linknode: HgNodeHash::arbitrary(g),
data: Bytes::from(Vec::<u8>::arbitrary(g)),
}
}
@ -245,13 +245,13 @@ mod tests {
}
}
fn next(&mut self) -> mercurial::NodeHash {
fn next(&mut self) -> HgNodeHash {
for i in 0..self.bytes.len() {
if self.bytes[i] == 255 {
self.bytes[i] = 0;
} else {
self.bytes[i] = self.bytes[i] + 1;
return mercurial::NodeHash::from_bytes(self.bytes.as_slice()).unwrap();
return HgNodeHash::from_bytes(self.bytes.as_slice()).unwrap();
}
}

View File

@ -16,7 +16,7 @@ use futures::{Future, IntoFuture, Stream};
use futures::future::{err, ok};
use futures::stream;
use futures_ext::{BoxFuture, BoxStream, FutureExt, StreamExt};
use mercurial;
use mercurial::{self, HgNodeHash};
use mercurial::changeset::RevlogChangeset;
use mercurial::manifest::ManifestContent;
use mercurial_bundles::{parts, Bundle2EncodeBuilder, Bundle2Item};
@ -30,10 +30,10 @@ use upload_blobs::{upload_hg_blobs, UploadBlobsType, UploadableHgBlob};
use wirepackparser::{TreemanifestBundle2Parser, TreemanifestEntry};
type PartId = u32;
type Changesets = Vec<(mercurial::NodeHash, RevlogChangeset)>;
type Changesets = Vec<(HgNodeHash, RevlogChangeset)>;
type Filelogs = HashMap<mercurial::HgNodeKey, <Filelog as UploadableHgBlob>::Value>;
type Manifests = HashMap<mercurial::HgNodeKey, <TreemanifestEntry as UploadableHgBlob>::Value>;
type UploadedChangesets = HashMap<mercurial::NodeHash, ChangesetHandle>;
type UploadedChangesets = HashMap<HgNodeHash, ChangesetHandle>;
/// The resolve function takes a bundle2, interprets it's content as Changesets, Filelogs and
/// Manifests and uploades all of them to the provided BlobRepo in the correct order.
@ -334,7 +334,7 @@ impl Bundle2Resolver {
) -> BoxFuture<(), Error> {
fn upload_changeset(
repo: Arc<BlobRepo>,
node: mercurial::NodeHash,
node: HgNodeHash,
revlog_cs: RevlogChangeset,
mut uploaded_changesets: UploadedChangesets,
filelogs: &Filelogs,
@ -455,7 +455,7 @@ impl Bundle2Resolver {
fn get_parent(
repo: &BlobRepo,
map: &UploadedChangesets,
p: Option<mercurial::NodeHash>,
p: Option<HgNodeHash>,
) -> BoxFuture<Option<ChangesetHandle>, Error> {
match p {
None => ok(None).boxify(),

View File

@ -14,7 +14,7 @@ use futures::future::Shared;
use futures_ext::{BoxFuture, FutureExt};
use blobrepo::{BlobEntry, BlobRepo};
use mercurial;
use mercurial::{self, HgNodeHash};
use mercurial::manifest::ManifestContent;
use mercurial_bundles::wirepack::{DataEntry, HistoryEntry, Part};
use mercurial_bundles::wirepack::converter::{WirePackConverter, WirePackPartProcessor};
@ -60,8 +60,8 @@ where
pub struct TreemanifestEntry {
pub node_key: mercurial::HgNodeKey,
pub data: Bytes,
pub p1: Option<mercurial::NodeHash>,
pub p2: Option<mercurial::NodeHash>,
pub p1: Option<HgNodeHash>,
pub p2: Option<HgNodeHash>,
pub manifest_content: ManifestContent,
}
@ -69,8 +69,8 @@ impl TreemanifestEntry {
fn new(
node_key: mercurial::HgNodeKey,
data: Bytes,
p1: mercurial::NodeHash,
p2: mercurial::NodeHash,
p1: HgNodeHash,
p2: HgNodeHash,
) -> Result<Self> {
let manifest_content = ManifestContent::parse(data.as_ref())?;
@ -114,9 +114,9 @@ impl UploadableHgBlob for TreemanifestEntry {
}
struct TreemanifestPartProcessor {
node: Option<mercurial::NodeHash>,
p1: Option<mercurial::NodeHash>,
p2: Option<mercurial::NodeHash>,
node: Option<HgNodeHash>,
p1: Option<HgNodeHash>,
p2: Option<HgNodeHash>,
path: Option<RepoPath>,
}

View File

@ -22,7 +22,7 @@ use filenodes::FilenodeInfo;
use futures::sync::mpsc::UnboundedSender;
use futures_ext::{BoxFuture, BoxStream, FutureExt, StreamExt};
use heads::Heads;
use mercurial::{self, RevlogManifest, RevlogRepo};
use mercurial::{self, HgNodeHash, RevlogManifest, RevlogRepo};
use mercurial::revlog::RevIdx;
use mercurial::revlogrepo::RevlogRepoBlobimportExt;
use mercurial_types::{DBlobNode, DFileNodeId, HgBlob, RepoPath, RepositoryId};
@ -59,19 +59,18 @@ where
let skip = self.skip;
let commits_limit = self.commits_limit;
let changesets: BoxStream<mercurial::NodeHash, mercurial::Error> = if let Some(skip) = skip
{
let changesets: BoxStream<HgNodeHash, mercurial::Error> = if let Some(skip) = skip {
self.repo.changesets().skip(skip).boxify()
} else {
self.repo.changesets().boxify()
};
let changesets: BoxStream<mercurial::NodeHash, mercurial::Error> =
if let Some(limit) = commits_limit {
changesets.take(limit).boxify()
} else {
changesets.boxify()
};
let changesets: BoxStream<HgNodeHash, mercurial::Error> = if let Some(limit) = commits_limit
{
changesets.take(limit).boxify()
} else {
changesets.boxify()
};
// Generate stream of changesets. For each changeset, save the cs blob, and the manifest
// blob, and the files.
@ -148,13 +147,12 @@ where
.boxify()
}
fn get_changesets_stream(&self) -> BoxStream<mercurial::NodeHash, mercurial::Error> {
let changesets: BoxStream<mercurial::NodeHash, mercurial::Error> =
if let Some(skip) = self.skip {
self.repo.changesets().skip(skip).boxify()
} else {
self.repo.changesets().boxify()
};
fn get_changesets_stream(&self) -> BoxStream<HgNodeHash, mercurial::Error> {
let changesets: BoxStream<HgNodeHash, mercurial::Error> = if let Some(skip) = self.skip {
self.repo.changesets().skip(skip).boxify()
} else {
self.repo.changesets().boxify()
};
if let Some(limit) = self.commits_limit {
changesets.take(limit).boxify()
@ -231,7 +229,7 @@ fn put_blobs(
revlog_repo: RevlogRepo,
sender: SyncSender<BlobstoreEntry>,
filenodes: UnboundedSender<FilenodeInfo>,
mfid: mercurial::NodeHash,
mfid: HgNodeHash,
linkrev: RevIdx,
) -> impl Future<Item = (), Error = Error> + Send + 'static {
let cs_entry_fut = revlog_repo
@ -317,10 +315,10 @@ fn put_blobs(
fn create_filenode(
blob: HgBlob,
filenode_hash: mercurial::NodeHash,
filenode_hash: HgNodeHash,
parents: mercurial::Parents,
repopath: RepoPath,
linknode: mercurial::NodeHash,
linknode: HgNodeHash,
) -> FilenodeInfo {
let (p1, p2) = parents.get_nodes();
let p1 = p1.map(|p| p.into_mononoke());

View File

@ -15,7 +15,7 @@ use futures::{self, stream, Future, IntoFuture, Stream};
use blobrepo::RawNodeBlob;
use futures_ext::StreamExt;
use mercurial::{self, RevlogEntry, RevlogRepo};
use mercurial::{self, HgNodeHash, RevlogEntry, RevlogRepo};
use mercurial::revlog::RevIdx;
use mercurial::revlogrepo::RevlogRepoBlobimportExt;
use mercurial_types::{DParents, HgBlob, HgBlobHash, MPath, RepoPath, Type};
@ -24,7 +24,7 @@ use BlobstoreEntry;
pub(crate) fn put_entry(
sender: SyncSender<BlobstoreEntry>,
entry_hash: mercurial::NodeHash,
entry_hash: HgNodeHash,
blob: HgBlob,
parents: mercurial::Parents,
) -> impl Future<Item = (), Error = Error> + Send + 'static

View File

@ -39,19 +39,12 @@ use tokio_core::reactor::Core;
use blobrepo::{BlobEntry, BlobRepo, ChangesetHandle};
use changesets::SqliteChangesets;
use mercurial::{RevlogChangeset, RevlogEntry, RevlogRepo};
use mercurial::{HgNodeHash, RevlogChangeset, RevlogEntry, RevlogRepo};
use mercurial_types::{HgBlob, MPath, RepoPath, RepositoryId, Type};
struct ParseChangeset {
revlogcs: BoxFuture<SharedItem<RevlogChangeset>, Error>,
rootmf: BoxFuture<
(
HgBlob,
Option<mercurial::NodeHash>,
Option<mercurial::NodeHash>,
),
Error,
>,
rootmf: BoxFuture<(HgBlob, Option<HgNodeHash>, Option<HgNodeHash>), Error>,
entries: BoxStream<(Option<MPath>, RevlogEntry), Error>,
}
@ -285,8 +278,7 @@ fn main() {
bad => panic!("unexpected blobstore type: {}", bad),
};
let mut parent_changeset_handles: HashMap<mercurial::NodeHash, ChangesetHandle> =
HashMap::new();
let mut parent_changeset_handles: HashMap<HgNodeHash, ChangesetHandle> = HashMap::new();
let csstream = revlogrepo
.changesets()

View File

@ -22,9 +22,9 @@ use futures::future::{self, err, ok, Either, Future};
use futures::stream::{self, futures_ordered, once, Stream};
use futures::sync::oneshot;
use HgNodeHash;
use dechunker::Dechunker;
use futures_ext::{BoxFuture, BoxStream, BytesStream, FutureExt, StreamExt};
use mercurial::NodeHash;
use mercurial_bundles::Bundle2Item;
use mercurial_bundles::bundle2::{self, Bundle2Stream, StreamEvent};
use mercurial_types::MPath;
@ -253,7 +253,7 @@ struct GetfilesArgDecoder {}
// Parses one (hash, path) pair
impl Decoder for GetfilesArgDecoder {
// If None has been decoded, then that means that client has sent all the data
type Item = Option<(NodeHash, MPath)>;
type Item = Option<(HgNodeHash, MPath)>;
type Error = Error;
fn decode(&mut self, src: &mut BytesMut) -> Result<Option<Self::Item>> {
@ -290,7 +290,7 @@ impl Decoder for GetfilesArgDecoder {
Err(err_msg("Expected non-empty file"))
} else {
let nodehashstr = String::from_utf8(nodehashbytes.to_vec())?;
let nodehash = NodeHash::from_str(&nodehashstr)?;
let nodehash = HgNodeHash::from_str(&nodehashstr)?;
// Some here means that new entry has been parsed
let parsed_res = Some((nodehash, MPath::new(&buf)?));
// 'Ok' means no error, 'Some' means that no more bytes needed.
@ -310,7 +310,7 @@ impl Decoder for GetfilesArgDecoder {
fn decode_getfiles_arg_stream<S>(
input: BytesStream<S>,
) -> (
BoxStream<(NodeHash, MPath), Error>,
BoxStream<(HgNodeHash, MPath), Error>,
BoxFuture<BytesStream<S>, Error>,
)
where
@ -453,12 +453,12 @@ pub type HgCommandRes<T> = BoxFuture<T, Error>;
// TODO: placeholder types are generally `()`
pub trait HgCommands {
// @wireprotocommand('between', 'pairs')
fn between(&self, _pairs: Vec<(NodeHash, NodeHash)>) -> HgCommandRes<Vec<Vec<NodeHash>>> {
fn between(&self, _pairs: Vec<(HgNodeHash, HgNodeHash)>) -> HgCommandRes<Vec<Vec<HgNodeHash>>> {
unimplemented("between")
}
// @wireprotocommand('branchmap')
fn branchmap(&self) -> HgCommandRes<HashMap<String, HashSet<NodeHash>>> {
fn branchmap(&self) -> HgCommandRes<HashMap<String, HashSet<HgNodeHash>>> {
// We have no plans to support mercurial branches and hence no plans for branchmap,
// so just return fake response.
future::ok(HashMap::new()).boxify()
@ -476,7 +476,7 @@ pub trait HgCommands {
}
// @wireprotocommand('heads')
fn heads(&self) -> HgCommandRes<HashSet<NodeHash>> {
fn heads(&self) -> HgCommandRes<HashSet<HgNodeHash>> {
unimplemented("heads")
}
@ -496,7 +496,7 @@ pub trait HgCommands {
}
// @wireprotocommand('known', 'nodes *')
fn known(&self, _nodes: Vec<NodeHash>) -> HgCommandRes<Vec<bool>> {
fn known(&self, _nodes: Vec<HgNodeHash>) -> HgCommandRes<Vec<bool>> {
unimplemented("known")
}
@ -515,7 +515,7 @@ pub trait HgCommands {
}
// @wireprotocommand('getfiles', 'files*')
fn getfiles(&self, _params: BoxStream<(NodeHash, MPath), Error>) -> BoxStream<Bytes, Error> {
fn getfiles(&self, _params: BoxStream<(HgNodeHash, MPath), Error>) -> BoxStream<Bytes, Error> {
once(Err(ErrorKind::Unimplemented("getfiles".into()).into())).boxify()
}
}
@ -542,11 +542,11 @@ mod test {
vs.into_iter().next().unwrap()
}
fn hash_ones() -> NodeHash {
fn hash_ones() -> HgNodeHash {
"1111111111111111111111111111111111111111".parse().unwrap()
}
fn hash_twos() -> NodeHash {
fn hash_twos() -> HgNodeHash {
"2222222222222222222222222222222222222222".parse().unwrap()
}

View File

@ -48,7 +48,7 @@ use std::fmt::{self, Debug};
use bytes::Bytes;
use mercurial::NodeHash;
use mercurial::HgNodeHash;
mod batch;
mod dechunker;
@ -66,7 +66,7 @@ pub enum Request {
#[derive(Debug, Eq, PartialEq)]
pub enum SingleRequest {
Between {
pairs: Vec<(NodeHash, NodeHash)>,
pairs: Vec<(HgNodeHash, HgNodeHash)>,
},
Branchmap,
Capabilities,
@ -85,7 +85,7 @@ pub enum SingleRequest {
key: String,
},
Known {
nodes: Vec<NodeHash>,
nodes: Vec<HgNodeHash>,
},
Unbundle {
heads: Vec<String>,
@ -98,8 +98,8 @@ pub enum SingleRequest {
/// the convenience of callers.
#[derive(Eq, PartialEq)]
pub struct GetbundleArgs {
pub heads: Vec<NodeHash>,
pub common: Vec<NodeHash>,
pub heads: Vec<HgNodeHash>,
pub common: Vec<HgNodeHash>,
pub bundlecaps: Vec<Vec<u8>>,
pub listkeys: Vec<Vec<u8>>,
}
@ -131,9 +131,9 @@ pub struct GettreepackArgs {
/// "root of the repo".
pub rootdir: Bytes,
/// The manifest nodes of the specified root directory to send.
pub mfnodes: Vec<NodeHash>,
pub mfnodes: Vec<HgNodeHash>,
/// The manifest nodes of the rootdir that are already on the client.
pub basemfnodes: Vec<NodeHash>,
pub basemfnodes: Vec<HgNodeHash>,
/// The fullpath (not relative path) of directories underneath
/// the rootdir that should be sent.
pub directories: Vec<Bytes>,
@ -147,12 +147,12 @@ pub enum Response {
#[derive(Debug)]
pub enum SingleResponse {
Between(Vec<Vec<NodeHash>>),
Branchmap(HashMap<String, HashSet<NodeHash>>),
Between(Vec<Vec<HgNodeHash>>),
Branchmap(HashMap<String, HashSet<HgNodeHash>>),
Capabilities(Vec<String>),
Debugwireargs(Bytes),
Getbundle(Bytes),
Heads(HashSet<NodeHash>),
Heads(HashSet<HgNodeHash>),
Hello(HashMap<String, Vec<String>>),
Listkeys(HashMap<Vec<u8>, Vec<u8>>),
Lookup(Bytes),

View File

@ -11,7 +11,7 @@ use std::str::{self, FromStr};
use bytes::{Bytes, BytesMut};
use nom::{is_alphanumeric, is_digit, ErrorKind, FindSubstring, IResult, Needed, Slice};
use mercurial::NodeHash;
use HgNodeHash;
use {GetbundleArgs, GettreepackArgs, Request, SingleRequest};
use batch;
@ -166,25 +166,25 @@ named_args!(batch_params(_count: usize)<HashMap<Vec<u8>, Vec<u8>>>,
/// A nodehash is simply 40 hex digits.
named!(
nodehash<NodeHash>,
nodehash<HgNodeHash>,
map_res!(take!(40), |v: &[u8]| str::parse(str::from_utf8(v)?))
);
/// A pair of nodehashes, separated by '-'
named!(
pair<(NodeHash, NodeHash)>,
pair<(HgNodeHash, HgNodeHash)>,
do_parse!(a: nodehash >> tag!("-") >> b: nodehash >> ((a, b)))
);
/// A space-separated list of pairs.
named!(
pairlist<Vec<(NodeHash, NodeHash)>>,
pairlist<Vec<(HgNodeHash, HgNodeHash)>>,
separated_list_complete!(tag!(" "), pair)
);
/// A space-separated list of node hashes
named!(
hashlist<Vec<NodeHash>>,
hashlist<Vec<HgNodeHash>>,
separated_list_complete!(tag!(" "), nodehash)
);
@ -972,19 +972,19 @@ mod test_parse {
use super::*;
use std::fmt::Debug;
fn hash_ones() -> NodeHash {
fn hash_ones() -> HgNodeHash {
"1111111111111111111111111111111111111111".parse().unwrap()
}
fn hash_twos() -> NodeHash {
fn hash_twos() -> HgNodeHash {
"2222222222222222222222222222222222222222".parse().unwrap()
}
fn hash_threes() -> NodeHash {
fn hash_threes() -> HgNodeHash {
"3333333333333333333333333333333333333333".parse().unwrap()
}
fn hash_fours() -> NodeHash {
fn hash_fours() -> HgNodeHash {
"4444444444444444444444444444444444444444".parse().unwrap()
}

View File

@ -4,7 +4,7 @@
// This software may be used and distributed according to the terms of the
// GNU General Public License version 2 or any later version.
use mercurial::NodeHash;
use mercurial::HgNodeHash;
use mercurial_types::{Delta, MPath};
pub mod packer;
@ -35,11 +35,11 @@ impl Part {
#[derive(Clone, Debug, Eq, PartialEq)]
pub struct CgDeltaChunk {
pub node: NodeHash,
pub p1: NodeHash,
pub p2: NodeHash,
pub base: NodeHash,
pub linknode: NodeHash,
pub node: HgNodeHash,
pub p1: HgNodeHash,
pub p2: HgNodeHash,
pub base: HgNodeHash,
pub linknode: HgNodeHash,
pub delta: Delta,
}

View File

@ -191,11 +191,11 @@ impl Cg2Unpacker {
// A chunk header has:
// ---
// node: NodeHash (20 bytes)
// p1: NodeHash (20 bytes)
// p2: NodeHash (20 bytes) -- NULL_HASH if only 1 parent
// base node: NodeHash (20 bytes) (new in changegroup2)
// link node: NodeHash (20 bytes)
// node: HgNodeHash (20 bytes)
// p1: HgNodeHash (20 bytes)
// p2: HgNodeHash (20 bytes) -- NULL_HASH if only 1 parent
// base node: HgNodeHash (20 bytes) (new in changegroup2)
// link node: HgNodeHash (20 bytes)
// ---
let node = buf.drain_node();

View File

@ -17,7 +17,7 @@ use super::wirepack;
use super::wirepack::packer::WirePackPacker;
use errors::*;
use mercurial::{BlobNode, NodeHash, NULL_HASH};
use mercurial::{BlobNode, HgNodeHash, NULL_HASH};
use mercurial_types::{Delta, MPath, MPathElement, RepoPath};
use part_encode::PartEncodeBuilder;
use part_header::PartHeaderType;
@ -50,7 +50,7 @@ where
pub fn changegroup_part<S>(changelogentries: S) -> Result<PartEncodeBuilder>
where
S: Stream<Item = (NodeHash, BlobNode), Error = Error> + Send + 'static,
S: Stream<Item = (HgNodeHash, BlobNode), Error = Error> + Send + 'static,
{
let mut builder = PartEncodeBuilder::mandatory(PartHeaderType::Changegroup)?;
builder.add_mparam("version", "02")?;
@ -95,12 +95,12 @@ where
}
pub struct TreepackPartInput {
pub node: NodeHash,
pub p1: Option<NodeHash>,
pub p2: Option<NodeHash>,
pub node: HgNodeHash,
pub p1: Option<HgNodeHash>,
pub p2: Option<HgNodeHash>,
pub content: Bytes,
pub name: Option<MPathElement>,
pub linknode: NodeHash,
pub linknode: HgNodeHash,
pub basepath: Option<MPath>,
}

View File

@ -19,7 +19,7 @@ use bytes::Bytes;
use futures::stream;
use quickcheck::{empty_shrinker, Arbitrary, Gen};
use mercurial::NodeHash;
use mercurial::HgNodeHash;
use mercurial_types::{Delta, MPath};
use changegroup;
@ -192,11 +192,11 @@ impl Arbitrary for changegroup::CgDeltaChunk {
fn arbitrary<G: Gen>(g: &mut G) -> Self {
// TODO: should these be more structured? e.g. base = p1 some of the time
changegroup::CgDeltaChunk {
node: NodeHash::arbitrary(g),
p1: NodeHash::arbitrary(g),
p2: NodeHash::arbitrary(g),
base: NodeHash::arbitrary(g),
linknode: NodeHash::arbitrary(g),
node: HgNodeHash::arbitrary(g),
p1: HgNodeHash::arbitrary(g),
p2: HgNodeHash::arbitrary(g),
base: HgNodeHash::arbitrary(g),
linknode: HgNodeHash::arbitrary(g),
delta: Delta::arbitrary(g),
}
}

View File

@ -20,7 +20,7 @@ use tokio_io::AsyncRead;
use async_compression::{Bzip2Compression, CompressorType, FlateCompression};
use async_compression::membuf::MemBuf;
use mercurial::{NodeHash, NULL_HASH};
use mercurial::{HgNodeHash, NULL_HASH};
use mercurial_types::{MPath, RepoPath};
use partial_io::{GenWouldBlock, PartialAsyncRead, PartialWithErrors};
use quickcheck::{QuickCheck, StdGen};
@ -253,7 +253,7 @@ fn verify_cg2(core: &mut Core, stream: BoxStream<changegroup::Part, Error>) {
let chunk = res.chunk();
// Verify that changesets parsed correctly.
let changeset1_hash = NodeHash::from_str(CHANGESET1_HASH_STR).unwrap();
let changeset1_hash = HgNodeHash::from_str(CHANGESET1_HASH_STR).unwrap();
assert_eq!(chunk.node, changeset1_hash);
assert_eq!(chunk.p1, NULL_HASH);
assert_eq!(chunk.p2, NULL_HASH);
@ -271,7 +271,7 @@ fn verify_cg2(core: &mut Core, stream: BoxStream<changegroup::Part, Error>) {
assert_eq!(*res.section(), changegroup::Section::Changeset);
let chunk = res.chunk();
let changeset2_hash = NodeHash::from_str(CHANGESET2_HASH_STR).unwrap();
let changeset2_hash = HgNodeHash::from_str(CHANGESET2_HASH_STR).unwrap();
assert_eq!(chunk.node, changeset2_hash);
assert_eq!(chunk.p1, changeset1_hash);
assert_eq!(chunk.p2, NULL_HASH);
@ -298,7 +298,7 @@ fn verify_cg2(core: &mut Core, stream: BoxStream<changegroup::Part, Error>) {
assert_eq!(*res.section(), changegroup::Section::Manifest);
let chunk = res.chunk();
let manifest1_hash = NodeHash::from_str(MANIFEST1_HASH_STR).unwrap();
let manifest1_hash = HgNodeHash::from_str(MANIFEST1_HASH_STR).unwrap();
assert_eq!(chunk.node, manifest1_hash);
assert_eq!(chunk.p1, NULL_HASH);
assert_eq!(chunk.p2, NULL_HASH);
@ -311,7 +311,7 @@ fn verify_cg2(core: &mut Core, stream: BoxStream<changegroup::Part, Error>) {
assert_eq!(*res.section(), changegroup::Section::Manifest);
let chunk = res.chunk();
let manifest2_hash = NodeHash::from_str(MANIFEST2_HASH_STR).unwrap();
let manifest2_hash = HgNodeHash::from_str(MANIFEST2_HASH_STR).unwrap();
assert_eq!(chunk.node, manifest2_hash);
assert_eq!(chunk.p1, manifest1_hash);
assert_eq!(chunk.p2, NULL_HASH);
@ -333,7 +333,7 @@ fn verify_cg2(core: &mut Core, stream: BoxStream<changegroup::Part, Error>) {
assert_eq!(*res.section(), changegroup::Section::Filelog(path(b"abc")));
let chunk = res.chunk();
let abch = NodeHash::from_str(ABC_HASH_STR).unwrap();
let abch = HgNodeHash::from_str(ABC_HASH_STR).unwrap();
assert_eq!(chunk.node, abch);
assert_eq!(chunk.p1, NULL_HASH);
assert_eq!(chunk.p2, NULL_HASH);
@ -354,7 +354,7 @@ fn verify_cg2(core: &mut Core, stream: BoxStream<changegroup::Part, Error>) {
assert_eq!(*res.section(), changegroup::Section::Filelog(path(b"def")));
let chunk = res.chunk();
let defh = NodeHash::from_str(DEF_HASH_STR).unwrap();
let defh = HgNodeHash::from_str(DEF_HASH_STR).unwrap();
assert_eq!(chunk.node, defh);
assert_eq!(chunk.p1, NULL_HASH);
assert_eq!(chunk.p2, NULL_HASH);
@ -428,9 +428,9 @@ fn parse_wirepack(read_ops: PartialWithErrors<GenWouldBlock>) {
// These are a few identifiers present in the bundle.
let baz_dir = RepoPath::dir("baz").unwrap();
let baz_hash = NodeHash::from_str("dcb9fa4bb7cdb673cd5752088b48d4c3f9c1fc23").unwrap();
let root_hash = NodeHash::from_str("7d315c7a04cce5404f7ef16bf55eb7f4e90d159f").unwrap();
let root_p1 = NodeHash::from_str("e313fc172615835d205f5881f8f34dd9bb0f0092").unwrap();
let baz_hash = HgNodeHash::from_str("dcb9fa4bb7cdb673cd5752088b48d4c3f9c1fc23").unwrap();
let root_hash = HgNodeHash::from_str("7d315c7a04cce5404f7ef16bf55eb7f4e90d159f").unwrap();
let root_p1 = HgNodeHash::from_str("e313fc172615835d205f5881f8f34dd9bb0f0092").unwrap();
let (res, wirepacks) = core.next_stream(wirepacks);
let res = res.expect("expected part");

View File

@ -13,7 +13,7 @@ use byteorder::{BigEndian, ByteOrder};
use bytes::{Bytes, BytesMut};
use async_compression::{CompressorType, DecompressorType};
use mercurial::NodeHash;
use mercurial::HgNodeHash;
use mercurial_types::MPath;
use errors::*;
@ -44,7 +44,7 @@ pub trait BytesExt {
fn drain_i32(&mut self) -> i32;
fn drain_str(&mut self, len: usize) -> Result<String>;
fn drain_path(&mut self, len: usize) -> Result<MPath>;
fn drain_node(&mut self) -> NodeHash;
fn drain_node(&mut self) -> HgNodeHash;
fn peek_u16(&self) -> u16;
fn peek_u32(&self) -> u32;
fn peek_i32(&self) -> i32;
@ -94,10 +94,10 @@ where
}
#[inline]
fn drain_node(&mut self) -> NodeHash {
fn drain_node(&mut self) -> HgNodeHash {
// This only fails if the size of input passed in isn't 20
// bytes. drain_to would have panicked in that case anyway.
NodeHash::from_bytes(self.split_to(20).as_ref()).unwrap()
HgNodeHash::from_bytes(self.split_to(20).as_ref()).unwrap()
}
#[inline]

View File

@ -11,7 +11,7 @@ use std::fmt;
use byteorder::{BigEndian, ByteOrder};
use bytes::{BufMut, BytesMut};
use mercurial::{NodeHash, NULL_HASH};
use mercurial::{HgNodeHash, NULL_HASH};
use mercurial_types::{Delta, RepoPath};
use delta;
@ -96,11 +96,11 @@ const DATA_HEADER_SIZE: usize = DATA_DELTA_OFFSET + 8;
// TODO: move to mercurial-types
#[derive(Clone, Debug, Eq, PartialEq)]
pub struct HistoryEntry {
pub node: NodeHash,
pub node: HgNodeHash,
// TODO: replace with Parents?
pub p1: NodeHash,
pub p2: NodeHash,
pub linknode: NodeHash,
pub p1: HgNodeHash,
pub p2: HgNodeHash,
pub linknode: HgNodeHash,
pub copy_from: Option<RepoPath>,
}
@ -112,10 +112,10 @@ impl HistoryEntry {
// A history revision has:
// ---
// node: NodeHash (20 bytes)
// p1: NodeHash (20 bytes)
// p2: NodeHash (20 bytes)
// link node: NodeHash (20 bytes)
// node: HgNodeHash (20 bytes)
// p1: HgNodeHash (20 bytes)
// p2: HgNodeHash (20 bytes)
// link node: HgNodeHash (20 bytes)
// copy from len: u16 (2 bytes) -- 0 if this revision is not a copy
// copy from: RepoPath (<copy from len> bytes)
// ---
@ -218,8 +218,8 @@ impl HistoryEntry {
#[derive(Clone, Debug, Eq, PartialEq)]
pub struct DataEntry {
pub node: NodeHash,
pub delta_base: NodeHash,
pub node: HgNodeHash,
pub delta_base: HgNodeHash,
pub delta: Delta,
}
@ -231,8 +231,8 @@ impl DataEntry {
// A data revision has:
// ---
// node: NodeHash (20 bytes)
// delta base: NodeHash (20 bytes) -- NULL_HASH if full text
// node: HgNodeHash (20 bytes)
// delta base: HgNodeHash (20 bytes) -- NULL_HASH if full text
// delta len: u64 (8 bytes)
// delta: Delta (<delta len> bytes)
// ---

View File

@ -8,7 +8,7 @@
use quickcheck::{Arbitrary, Gen};
use mercurial::{NodeHash, NULL_HASH};
use mercurial::{HgNodeHash, NULL_HASH};
use mercurial_types::{Delta, MPath, RepoPath};
use super::{DataEntry, HistoryEntry, Kind};
@ -114,10 +114,10 @@ impl HistoryEntry {
Kind::Tree => None,
};
Self {
node: NodeHash::arbitrary(g),
p1: NodeHash::arbitrary(g),
p2: NodeHash::arbitrary(g),
linknode: NodeHash::arbitrary(g),
node: HgNodeHash::arbitrary(g),
p1: HgNodeHash::arbitrary(g),
p2: HgNodeHash::arbitrary(g),
linknode: HgNodeHash::arbitrary(g),
copy_from: copy_from,
}
}
@ -140,12 +140,12 @@ impl Arbitrary for DataEntry {
} else {
let mut delta_base = NULL_HASH;
while delta_base == NULL_HASH {
delta_base = NodeHash::arbitrary(g);
delta_base = HgNodeHash::arbitrary(g);
}
(delta_base, Delta::arbitrary(g))
};
Self {
node: NodeHash::arbitrary(g),
node: HgNodeHash::arbitrary(g),
delta_base,
delta,
}

View File

@ -4,7 +4,7 @@
// This software may be used and distributed according to the terms of the
// GNU General Public License version 2 or any later version.
// Ignore deprecation of NodeHash::new
// Ignore deprecation of HgNodeHash::new
#![allow(deprecated)]
use mercurial_types::{DChangesetId, DFileNodeId, DManifestId, DNodeHash};

View File

@ -161,7 +161,6 @@ impl Context {
}
}
#[cfg(test)]
mod test {
use super::{Sha1, NULL};

View File

@ -27,7 +27,7 @@
//!
//! Changesets, manifests and files are uniformly represented by a `Node`. A `Node` has
//! 0-2 parents and some content. A node's identity is computed by hashing over (p1, p2, content),
//! resulting in `NodeHash` (TODO: rename NodeHash -> NodeId?). This means manifests and files
//! resulting in `HgNodeHash` (TODO: rename HgNodeHash -> NodeId?). This means manifests and files
//! have a notion of history independent of the changeset(s) they're embedded in.
//!
//! Nodes are stored as blobs in the blobstore, but with their content in a separate blob. This

View File

@ -24,9 +24,9 @@ pub const NULL_CSID: DChangesetId = DChangesetId(D_NULL_HASH);
/// This structure represents Sha1 based hashes that are used in Mononoke. It is a temporary
/// structure that will be entirely replaced by structures from mononoke-types::typed_hash.
/// It's current distinction from mercurial::NodeHash serves two purposes:
/// It's current distinction from HgNodeHash serves two purposes:
/// - make it relatively straightforward to replace it in future with typed_hash
/// - easily distinguish between the NodeHash values provided by Mercurial client that might
/// - easily distinguish between the HgNodeHash values provided by Mercurial client that might
/// require remapping, f.e. hashes of Changeset and hashes of Root Manifests since the client
/// provides Flat Manifest hashes as aliases for Root Manifest hashes
#[derive(Clone, Copy, Eq, PartialEq, Ord, PartialOrd, Debug, Hash)]
@ -35,7 +35,7 @@ pub struct DNodeHash(pub(crate) Sha1);
impl DNodeHash {
#[deprecated(note = "This constructor is only used in two places: \
conversion from mercurial NodeHash and creation of NodeHash mocks")]
conversion from mercurial HgNodeHash and creation of HgNodeHash mocks")]
pub const fn new(sha1: Sha1) -> Self {
DNodeHash(sha1)
}
@ -63,7 +63,7 @@ impl DNodeHash {
}
#[deprecated(note = "This method is used only to have a \
zero-cost conversion to mercurial::NodeHash")]
zero-cost conversion to HgNodeHash")]
pub fn into_sha1(self) -> Sha1 {
self.0
}

View File

@ -7,18 +7,18 @@
use mercurial_types::HgBlob;
use mercurial_types::hash::{self, Context};
use nodehash::NodeHash;
use nodehash::HgNodeHash;
#[derive(Clone, Copy, Eq, PartialEq, Ord, PartialOrd, Debug, Hash)]
#[derive(Serialize, Deserialize, HeapSizeOf)]
pub enum Parents {
None,
One(NodeHash),
Two(NodeHash, NodeHash),
One(HgNodeHash),
Two(HgNodeHash, HgNodeHash),
}
impl Parents {
pub fn new(p1: Option<&NodeHash>, p2: Option<&NodeHash>) -> Self {
pub fn new(p1: Option<&HgNodeHash>, p2: Option<&HgNodeHash>) -> Self {
match (p1, p2) {
(None, None) => Parents::None,
(Some(p1), None) => Parents::One(*p1),
@ -28,7 +28,7 @@ impl Parents {
}
}
pub fn get_nodes(&self) -> (Option<&NodeHash>, Option<&NodeHash>) {
pub fn get_nodes(&self) -> (Option<&HgNodeHash>, Option<&HgNodeHash>) {
match self {
&Parents::None => (None, None),
&Parents::One(ref p1) => (Some(p1), None),
@ -39,7 +39,7 @@ impl Parents {
impl<'a> IntoIterator for &'a Parents {
type IntoIter = ParentIter;
type Item = NodeHash;
type Item = HgNodeHash;
fn into_iter(self) -> ParentIter {
ParentIter(*self)
}
@ -49,7 +49,7 @@ impl<'a> IntoIterator for &'a Parents {
pub struct ParentIter(Parents);
impl Iterator for ParentIter {
type Item = NodeHash;
type Item = HgNodeHash;
fn next(&mut self) -> Option<Self::Item> {
let (ret, new) = match self.0 {
Parents::None => (None, Parents::None),
@ -81,7 +81,7 @@ impl BlobNode {
/// parent that's copied.
/// * If both p1 and p2 are None, it shouldn't really be possible to have copy info. But
/// the Mercurial Python client tries to parse metadata anyway, so match that behavior.
pub fn new<B>(blob: B, p1: Option<&NodeHash>, p2: Option<&NodeHash>) -> BlobNode
pub fn new<B>(blob: B, p1: Option<&HgNodeHash>, p2: Option<&HgNodeHash>) -> BlobNode
where
B: Into<HgBlob>,
{
@ -112,7 +112,7 @@ impl BlobNode {
// Annoyingly, filenode is defined as sha1(p1 || p2 || content), not
// sha1(p1 || p2 || sha1(content)), so we can't compute a filenode for
// a blob we don't have
pub fn nodeid(&self) -> Option<NodeHash> {
pub fn nodeid(&self) -> Option<HgNodeHash> {
let null = hash::NULL;
let (h1, h2) = match &self.parents {
@ -129,7 +129,7 @@ impl BlobNode {
ctxt.update(h2);
ctxt.update(data);
NodeHash(ctxt.finish())
HgNodeHash(ctxt.finish())
})
}
}
@ -153,19 +153,19 @@ mod test {
let p = &BlobNode::new(blob.clone(), None, None);
assert!(p.maybe_copied);
{
let pid: Option<NodeHash> = p.nodeid();
let pid: Option<HgNodeHash> = p.nodeid();
let n = BlobNode::new(blob.clone(), pid.as_ref(), None);
assert_eq!(n.parents, Parents::One(pid.unwrap()));
assert!(!n.maybe_copied);
}
{
let pid: Option<NodeHash> = p.nodeid();
let pid: Option<HgNodeHash> = p.nodeid();
let n = BlobNode::new(blob.clone(), None, pid.as_ref());
assert_eq!(n.parents, Parents::One(pid.unwrap()));
assert!(n.maybe_copied);
}
{
let pid: Option<NodeHash> = p.nodeid();
let pid: Option<HgNodeHash> = p.nodeid();
let n = BlobNode::new(blob.clone(), pid.as_ref(), pid.as_ref());
assert_eq!(n.parents, Parents::One(pid.unwrap()));
assert!(!n.maybe_copied);
@ -184,8 +184,8 @@ mod test {
mem::swap(&mut p1, &mut p2);
}
let pid1: Option<NodeHash> = (&p1).nodeid();
let pid2: Option<NodeHash> = (&p2).nodeid();
let pid1: Option<HgNodeHash> = (&p1).nodeid();
let pid2: Option<HgNodeHash> = (&p2).nodeid();
let node1 = {
let n = BlobNode::new(

View File

@ -15,7 +15,7 @@ use mercurial_types::MPath;
use mercurial_types::changeset::Time;
use blobnode::{BlobNode, Parents};
use nodehash::{HgManifestId, NodeHash, NULL_HASH};
use nodehash::{HgManifestId, HgNodeHash, NULL_HASH};
#[cfg(test)]
mod test;
@ -242,7 +242,7 @@ impl RevlogChangeset {
.ok_or(failure::err_msg("node has no data"))?;
let mut lines = data.split(|b| *b == b'\n');
let nodehash = parseline(&mut lines, |l| NodeHash::from_str(str::from_utf8(l)?))
let nodehash = parseline(&mut lines, |l| HgNodeHash::from_str(str::from_utf8(l)?))
.context("can't get hash")?;
ret.manifestid = HgManifestId::new(nodehash);
ret.user =

View File

@ -12,7 +12,7 @@ use mercurial_types::{HgBlob, MPath};
use blobnode::BlobNode;
use changeset::{escape, serialize_extras, unescape, Extra, RevlogChangeset, Time};
use nodehash::{HgManifestId, NodeHash};
use nodehash::{HgManifestId, HgNodeHash};
use bytes::Bytes;
@ -21,8 +21,8 @@ const CHANGESET_NOEXTRA: &[u8] = include_bytes!("cset_noextra.bin");
#[test]
fn test_parse() {
let csid: NodeHash = "0849d280663e46b3e247857f4a68fabd2ba503c3".parse().unwrap();
let p1: NodeHash = "169cb9e47f8e86079ee9fd79972092f78fbf68b1".parse().unwrap();
let csid: HgNodeHash = "0849d280663e46b3e247857f4a68fabd2ba503c3".parse().unwrap();
let p1: HgNodeHash = "169cb9e47f8e86079ee9fd79972092f78fbf68b1".parse().unwrap();
let node = BlobNode::new(HgBlob::Dirty(Bytes::from(CHANGESET)), Some(&p1), None);
let cset = RevlogChangeset::parse(node.clone()).expect("parsed");
@ -54,8 +54,8 @@ the user expected."#.into(),
}
);
let csid: NodeHash = "526722d24ee5b3b860d4060e008219e083488356".parse().unwrap();
let p1: NodeHash = "db5eb6a86179ce819db03da9ef2090b32f8e3fc4".parse().unwrap();
let csid: HgNodeHash = "526722d24ee5b3b860d4060e008219e083488356".parse().unwrap();
let p1: HgNodeHash = "db5eb6a86179ce819db03da9ef2090b32f8e3fc4".parse().unwrap();
let node = BlobNode::new(
HgBlob::Dirty(Bytes::from(CHANGESET_NOEXTRA)),
Some(&p1),
@ -88,7 +88,7 @@ clean up html code for w3c validation
#[test]
fn test_generate() {
fn test(csid: NodeHash, p1: Option<&NodeHash>, blob: HgBlob, cs: &[u8]) {
fn test(csid: HgNodeHash, p1: Option<&HgNodeHash>, blob: HgBlob, cs: &[u8]) {
let node = BlobNode::new(blob, p1, None);
let cset = RevlogChangeset::parse(node.clone()).expect("parsed");
@ -101,8 +101,8 @@ fn test_generate() {
assert_eq!(new, cs);
}
let csid: NodeHash = "0849d280663e46b3e247857f4a68fabd2ba503c3".parse().unwrap();
let p1: NodeHash = "169cb9e47f8e86079ee9fd79972092f78fbf68b1".parse().unwrap();
let csid: HgNodeHash = "0849d280663e46b3e247857f4a68fabd2ba503c3".parse().unwrap();
let p1: HgNodeHash = "169cb9e47f8e86079ee9fd79972092f78fbf68b1".parse().unwrap();
test(
csid,
Some(&p1),
@ -110,8 +110,8 @@ fn test_generate() {
CHANGESET,
);
let csid: NodeHash = "526722d24ee5b3b860d4060e008219e083488356".parse().unwrap();
let p1: NodeHash = "db5eb6a86179ce819db03da9ef2090b32f8e3fc4".parse().unwrap();
let csid: HgNodeHash = "526722d24ee5b3b860d4060e008219e083488356".parse().unwrap();
let p1: HgNodeHash = "db5eb6a86179ce819db03da9ef2090b32f8e3fc4".parse().unwrap();
test(
csid,
Some(&p1),

View File

@ -72,6 +72,6 @@ pub use errors::*;
pub use blobnode::{BlobNode, Parents};
pub use changeset::RevlogChangeset;
pub use manifest::{EntryContent, RevlogEntry};
pub use nodehash::{EntryId, HgChangesetId, HgManifestId, HgNodeKey, NodeHash, NodeHashConversion,
NULL_HASH};
pub use nodehash::{EntryId, HgChangesetId, HgManifestId, HgNodeHash, HgNodeKey,
NodeHashConversion, NULL_HASH};
pub use revlogrepo::{RevlogManifest, RevlogRepo, RevlogRepoOptions};

View File

@ -21,7 +21,7 @@ use mercurial_types::{FileType, HgBlob, MPath, MPathElement, RepoPath};
use mercurial_types::manifest::Type;
use blobnode::{BlobNode, Parents};
use nodehash::{EntryId, NodeHash};
use nodehash::{EntryId, HgNodeHash};
use RevlogRepo;
@ -210,7 +210,7 @@ impl Details {
let (hash, flags) = data.split_at(40);
let hash = str::from_utf8(hash)
.map_err(|err| Error::from(err))
.and_then(|hash| hash.parse::<NodeHash>())
.and_then(|hash| hash.parse::<HgNodeHash>())
.with_context(|_| format!("malformed hash: {:?}", hash))?;
let entryid = EntryId::new(hash);

View File

@ -6,21 +6,21 @@
use mercurial_types_mocks::hash;
use nodehash::NodeHash;
use nodehash::HgNodeHash;
// Definitions for hashes 1111...ffff.
pub const ONES_HASH: NodeHash = NodeHash(hash::ONES);
pub const TWOS_HASH: NodeHash = NodeHash(hash::TWOS);
pub const THREES_HASH: NodeHash = NodeHash(hash::THREES);
pub const FOURS_HASH: NodeHash = NodeHash(hash::FOURS);
pub const FIVES_HASH: NodeHash = NodeHash(hash::FIVES);
pub const SIXES_HASH: NodeHash = NodeHash(hash::SIXES);
pub const SEVENS_HASH: NodeHash = NodeHash(hash::SEVENS);
pub const EIGHTS_HASH: NodeHash = NodeHash(hash::EIGHTS);
pub const NINES_HASH: NodeHash = NodeHash(hash::NINES);
pub const AS_HASH: NodeHash = NodeHash(hash::AS);
pub const BS_HASH: NodeHash = NodeHash(hash::BS);
pub const CS_HASH: NodeHash = NodeHash(hash::CS);
pub const DS_HASH: NodeHash = NodeHash(hash::DS);
pub const ES_HASH: NodeHash = NodeHash(hash::ES);
pub const FS_HASH: NodeHash = NodeHash(hash::FS);
pub const ONES_HASH: HgNodeHash = HgNodeHash(hash::ONES);
pub const TWOS_HASH: HgNodeHash = HgNodeHash(hash::TWOS);
pub const THREES_HASH: HgNodeHash = HgNodeHash(hash::THREES);
pub const FOURS_HASH: HgNodeHash = HgNodeHash(hash::FOURS);
pub const FIVES_HASH: HgNodeHash = HgNodeHash(hash::FIVES);
pub const SIXES_HASH: HgNodeHash = HgNodeHash(hash::SIXES);
pub const SEVENS_HASH: HgNodeHash = HgNodeHash(hash::SEVENS);
pub const EIGHTS_HASH: HgNodeHash = HgNodeHash(hash::EIGHTS);
pub const NINES_HASH: HgNodeHash = HgNodeHash(hash::NINES);
pub const AS_HASH: HgNodeHash = HgNodeHash(hash::AS);
pub const BS_HASH: HgNodeHash = HgNodeHash(hash::BS);
pub const CS_HASH: HgNodeHash = HgNodeHash(hash::CS);
pub const DS_HASH: HgNodeHash = HgNodeHash(hash::DS);
pub const ES_HASH: HgNodeHash = HgNodeHash(hash::ES);
pub const FS_HASH: HgNodeHash = HgNodeHash(hash::FS);

View File

@ -19,20 +19,20 @@ use mercurial_types::hash::{self, Sha1};
use serde;
use sql_types::{HgChangesetIdSql, HgManifestIdSql};
pub const NULL_HASH: NodeHash = NodeHash(hash::NULL);
pub const NULL_HASH: HgNodeHash = HgNodeHash(hash::NULL);
/// This structure represents Sha1 based hashes that are used in Mercurial, but the Sha1
/// structure is private outside this crate to keep it an implementation detail.
/// This is why the main constructors to create this structure are from_bytes and from_ascii_str
/// which parses raw bytes or hex string to create NodeHash.
/// which parses raw bytes or hex string to create HgNodeHash.
#[derive(Clone, Copy, Eq, PartialEq, Ord, PartialOrd, Debug, Hash)]
#[derive(HeapSizeOf)]
pub struct NodeHash(pub(crate) Sha1);
pub struct HgNodeHash(pub(crate) Sha1);
impl NodeHash {
/// Constructor to be used to parse 20 raw bytes that represent a sha1 hash into NodeHash
pub fn from_bytes(bytes: &[u8]) -> Result<NodeHash> {
Sha1::from_bytes(bytes).map(NodeHash)
impl HgNodeHash {
/// Constructor to be used to parse 20 raw bytes that represent a sha1 hash into HgNodeHash
pub fn from_bytes(bytes: &[u8]) -> Result<HgNodeHash> {
Sha1::from_bytes(bytes).map(HgNodeHash)
}
/// Returns the underlying 20 raw bytes that represent a sha1 hash
@ -40,10 +40,10 @@ impl NodeHash {
self.0.as_ref()
}
/// Constructor to be used to parse 40 hex digits that represent a sha1 hash into NodeHash
/// Constructor to be used to parse 40 hex digits that represent a sha1 hash into HgNodeHash
#[inline]
pub fn from_ascii_str(s: &AsciiStr) -> Result<NodeHash> {
Sha1::from_ascii_str(s).map(NodeHash)
pub fn from_ascii_str(s: &AsciiStr) -> Result<HgNodeHash> {
Sha1::from_ascii_str(s).map(HgNodeHash)
}
/// Returns a 40 hex digits representation of the sha1 hash
@ -61,7 +61,7 @@ impl NodeHash {
}
}
/// Method used to convert a Mercurial Sha1 based NodeHash into Mononoke Sha1 based NodeHash
/// Method used to convert a Mercurial Sha1 based HgNodeHash into Mononoke Sha1 based HgNodeHash
/// without performing lookups in a remapping tables. It should be used only on Filenodes and
/// Manifests that are not Root Manifests.
/// This method is temporary (as the mercurial_types hashes are) and will go away once
@ -78,26 +78,26 @@ impl NodeHash {
}
}
/// Trait to convieniently track the places where Mononoke to Mercurial NodeHash coversion is
/// Trait to convieniently track the places where Mononoke to Mercurial HgNodeHash coversion is
/// taking place without performing a lookup in remapping tables.
pub trait NodeHashConversion {
fn into_mercurial(self) -> NodeHash;
fn into_mercurial(self) -> HgNodeHash;
}
impl NodeHashConversion for DNodeHash {
/// Method used to convert a Mononoke Sha1 based NodeHash into Mercurial Sha1 based NodeHash
/// Method used to convert a Mononoke Sha1 based HgNodeHash into Mercurial Sha1 based HgNodeHash
/// without performing lookups in a remapping tables. It should be used only on Filenodes and
/// Manifests that are not Root Manifests.
/// This method is temporary (as the mercurial_types hashes are) and will go away once
/// transision to BonsaiChangesets is complete
fn into_mercurial(self) -> NodeHash {
fn into_mercurial(self) -> HgNodeHash {
#![allow(deprecated)]
NodeHash(self.into_sha1())
HgNodeHash(self.into_sha1())
}
}
impl From<Option<NodeHash>> for NodeHash {
fn from(h: Option<NodeHash>) -> Self {
impl From<Option<HgNodeHash>> for HgNodeHash {
fn from(h: Option<HgNodeHash>) -> Self {
match h {
None => NULL_HASH,
Some(h) => h,
@ -129,7 +129,7 @@ impl<'de> serde::de::Visitor<'de> for StringVisitor {
}
}
impl serde::ser::Serialize for NodeHash {
impl serde::ser::Serialize for HgNodeHash {
fn serialize<S>(&self, serializer: S) -> ::std::result::Result<S::Ok, S::Error>
where
S: serde::Serializer,
@ -138,42 +138,42 @@ impl serde::ser::Serialize for NodeHash {
}
}
impl<'de> serde::de::Deserialize<'de> for NodeHash {
fn deserialize<D>(deserializer: D) -> ::std::result::Result<NodeHash, D::Error>
impl<'de> serde::de::Deserialize<'de> for HgNodeHash {
fn deserialize<D>(deserializer: D) -> ::std::result::Result<HgNodeHash, D::Error>
where
D: serde::de::Deserializer<'de>,
{
let hex = deserializer.deserialize_string(StringVisitor)?;
match Sha1::from_str(hex.as_str()) {
Ok(sha1) => Ok(NodeHash(sha1)),
Ok(sha1) => Ok(HgNodeHash(sha1)),
Err(error) => Err(serde::de::Error::custom(error)),
}
}
}
impl AsRef<[u8]> for NodeHash {
impl AsRef<[u8]> for HgNodeHash {
fn as_ref(&self) -> &[u8] {
self.0.as_ref()
}
}
impl FromStr for NodeHash {
impl FromStr for HgNodeHash {
type Err = <Sha1 as FromStr>::Err;
fn from_str(s: &str) -> result::Result<NodeHash, Self::Err> {
Sha1::from_str(s).map(NodeHash)
fn from_str(s: &str) -> result::Result<HgNodeHash, Self::Err> {
Sha1::from_str(s).map(HgNodeHash)
}
}
impl Display for NodeHash {
impl Display for HgNodeHash {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
self.0.fmt(fmt)
}
}
impl Arbitrary for NodeHash {
impl Arbitrary for HgNodeHash {
fn arbitrary<G: Gen>(g: &mut G) -> Self {
NodeHash(Sha1::arbitrary(g))
HgNodeHash(Sha1::arbitrary(g))
}
fn shrink(&self) -> Box<Iterator<Item = Self>> {
@ -184,24 +184,24 @@ impl Arbitrary for NodeHash {
#[derive(Clone, Copy, Eq, PartialEq, Ord, PartialOrd, Debug, Hash)]
#[derive(HeapSizeOf, FromSqlRow, AsExpression)]
#[sql_type = "HgChangesetIdSql"]
pub struct HgChangesetId(NodeHash);
pub struct HgChangesetId(HgNodeHash);
impl HgChangesetId {
#[inline]
pub fn from_ascii_str(s: &AsciiStr) -> Result<HgChangesetId> {
NodeHash::from_ascii_str(s).map(HgChangesetId)
HgNodeHash::from_ascii_str(s).map(HgChangesetId)
}
#[inline]
pub(crate) fn as_nodehash(&self) -> &NodeHash {
pub(crate) fn as_nodehash(&self) -> &HgNodeHash {
&self.0
}
pub fn into_nodehash(self) -> NodeHash {
pub fn into_nodehash(self) -> HgNodeHash {
self.0
}
pub const fn new(hash: NodeHash) -> Self {
pub const fn new(hash: HgNodeHash) -> Self {
HgChangesetId(hash)
}
@ -212,10 +212,10 @@ impl HgChangesetId {
}
impl FromStr for HgChangesetId {
type Err = <NodeHash as FromStr>::Err;
type Err = <HgNodeHash as FromStr>::Err;
fn from_str(s: &str) -> result::Result<HgChangesetId, Self::Err> {
NodeHash::from_str(s).map(HgChangesetId)
HgNodeHash::from_str(s).map(HgChangesetId)
}
}
@ -240,7 +240,7 @@ impl<'de> serde::de::Deserialize<'de> for HgChangesetId {
D: serde::de::Deserializer<'de>,
{
let hex = deserializer.deserialize_string(StringVisitor)?;
match NodeHash::from_str(hex.as_str()) {
match HgNodeHash::from_str(hex.as_str()) {
Ok(hash) => Ok(HgChangesetId::new(hash)),
Err(error) => Err(serde::de::Error::custom(error)),
}
@ -250,19 +250,19 @@ impl<'de> serde::de::Deserialize<'de> for HgChangesetId {
#[derive(Clone, Copy, Eq, PartialEq, Ord, PartialOrd, Debug, Hash)]
#[derive(HeapSizeOf, FromSqlRow, AsExpression)]
#[sql_type = "HgManifestIdSql"]
pub struct HgManifestId(NodeHash);
pub struct HgManifestId(HgNodeHash);
impl HgManifestId {
#[inline]
pub(crate) fn as_nodehash(&self) -> &NodeHash {
pub(crate) fn as_nodehash(&self) -> &HgNodeHash {
&self.0
}
pub fn into_nodehash(self) -> NodeHash {
pub fn into_nodehash(self) -> HgNodeHash {
self.0
}
pub const fn new(hash: NodeHash) -> Self {
pub const fn new(hash: HgNodeHash) -> Self {
HgManifestId(hash)
}
}
@ -276,14 +276,14 @@ impl Display for HgManifestId {
/// TODO: (jsgf) T25576292 EntryId should be a (Type, NodeId) tuple
#[derive(Clone, Copy, Eq, PartialEq, Ord, PartialOrd, Debug, Hash)]
#[derive(HeapSizeOf)]
pub struct EntryId(NodeHash);
pub struct EntryId(HgNodeHash);
impl EntryId {
pub fn into_nodehash(self) -> NodeHash {
pub fn into_nodehash(self) -> HgNodeHash {
self.0
}
pub fn new(hash: NodeHash) -> Self {
pub fn new(hash: HgNodeHash) -> Self {
EntryId(hash)
}
}
@ -299,5 +299,5 @@ impl Display for EntryId {
#[derive(Clone, Debug, Eq, Hash, PartialEq)]
pub struct HgNodeKey {
pub path: RepoPath,
pub hash: NodeHash,
pub hash: HgNodeHash,
}

View File

@ -24,7 +24,7 @@ pub use mercurial_types::{delta, HgBlob};
pub use mercurial_types::bdiff::{self, Delta};
use blobnode::BlobNode;
use nodehash::{EntryId, NodeHash};
use nodehash::{EntryId, HgNodeHash};
// Submodules
mod parser;
@ -92,8 +92,8 @@ struct RevlogInner {
header: Header,
idx: Datafile,
data: Option<Datafile>,
idxoff: BTreeMap<RevIdx, usize>, // cache of index -> offset
nodeidx: HashMap<NodeHash, RevIdx>, // cache of nodeid -> index
idxoff: BTreeMap<RevIdx, usize>, // cache of index -> offset
nodeidx: HashMap<HgNodeHash, RevIdx>, // cache of nodeid -> index
}
impl PartialEq<Self> for Revlog {
@ -212,7 +212,7 @@ impl Revlog {
}
/// Return the ordinal index of an entry with the given nodeid.
pub fn get_idx_by_nodeid(&self, nodeid: &NodeHash) -> Result<RevIdx> {
pub fn get_idx_by_nodeid(&self, nodeid: &HgNodeHash) -> Result<RevIdx> {
self.inner.get_idx_by_nodeid(nodeid)
}
@ -236,12 +236,12 @@ impl Revlog {
self.inner.get_rev(tgtidx)
}
pub fn get_rev_by_nodeid(&self, id: &NodeHash) -> Result<BlobNode> {
pub fn get_rev_by_nodeid(&self, id: &HgNodeHash) -> Result<BlobNode> {
self.inner.get_rev_by_nodeid(id)
}
/// Return the set of head revisions in a revlog
pub fn get_heads(&self) -> Result<HashSet<NodeHash>> {
pub fn get_heads(&self) -> Result<HashSet<HgNodeHash>> {
self.inner.get_heads()
}
}
@ -319,14 +319,14 @@ impl RevlogInner {
}
/// Return the ordinal index of an entry with the given nodeid.
fn get_idx_by_nodeid(&self, nodeid: &NodeHash) -> Result<RevIdx> {
fn get_idx_by_nodeid(&self, nodeid: &HgNodeHash) -> Result<RevIdx> {
match self.nodeidx.get(nodeid).cloned() {
Some(idx) => Ok(idx), // cache hit
None => Err(ErrorKind::Revlog(format!("nodeid {} not found", nodeid)).into()),
}
}
fn get_entry_by_nodeid(&self, nodeid: &NodeHash) -> Result<Entry> {
fn get_entry_by_nodeid(&self, nodeid: &HgNodeHash) -> Result<Entry> {
self.get_idx_by_nodeid(nodeid)
.and_then(|idx| self.get_entry(idx))
}
@ -521,7 +521,7 @@ impl RevlogInner {
self.make_node(&entry, HgBlob::from(Bytes::from(data)))
}
fn get_rev_by_nodeid(&self, id: &NodeHash) -> Result<BlobNode> {
fn get_rev_by_nodeid(&self, id: &HgNodeHash) -> Result<BlobNode> {
self.get_idx_by_nodeid(&id).and_then(move |idx| {
self.get_rev(idx)
.with_context(|_| format!("can't get rev for id {}", id))
@ -530,7 +530,7 @@ impl RevlogInner {
}
/// Return the set of head revisions in a revlog
fn get_heads(&self) -> Result<HashSet<NodeHash>> {
fn get_heads(&self) -> Result<HashSet<HgNodeHash>> {
// Current set of candidate heads
let mut heads = HashMap::new();

View File

@ -14,7 +14,7 @@ use nom::{ErrorKind, IResult, Needed, be_u16, be_u32};
use mercurial_types::bdiff::Delta;
use nodehash::NodeHash;
use nodehash::HgNodeHash;
use revlog::revidx::RevIdx;
use super::lz4;
@ -81,11 +81,11 @@ pub struct Entry {
pub linkrev: RevIdx, // changeset id
pub p1: Option<RevIdx>, // parent p1
pub p2: Option<RevIdx>, // parent p2
pub nodeid: NodeHash, // nodeid
pub nodeid: HgNodeHash, // nodeid
}
impl Entry {
pub fn nodeid(&self) -> &NodeHash {
pub fn nodeid(&self) -> &HgNodeHash {
&self.nodeid
}
}
@ -140,7 +140,7 @@ named!(pub indexng<Entry>,
linkrev: linkrev.into(),
p1: if p1 == !0 { None } else { Some(p1.into()) },
p2: if p2 == !0 { None } else { Some(p2.into()) },
nodeid: NodeHash::from_bytes(&hash[..20]).expect("bad bytes for sha"),
nodeid: HgNodeHash::from_bytes(&hash[..20]).expect("bad bytes for sha"),
}
})
)
@ -171,7 +171,7 @@ named!(pub index0<Entry>,
linkrev: linkrev.into(),
p1: if p1 == !0 { None } else { Some(p1.into()) },
p2: if p2 == !0 { None } else { Some(p2.into()) },
nodeid: NodeHash::from_bytes(&hash[..20]).expect("bad bytes for sha"),
nodeid: HgNodeHash::from_bytes(&hash[..20]).expect("bad bytes for sha"),
}
})
)

View File

@ -29,7 +29,7 @@ use blobnode::BlobNode;
pub use changeset::RevlogChangeset;
use errors::*;
pub use manifest::RevlogManifest;
use nodehash::{EntryId, HgChangesetId, HgManifestId, NodeHash, NULL_HASH};
use nodehash::{EntryId, HgChangesetId, HgManifestId, HgNodeHash, NULL_HASH};
use revlog::{self, RevIdx, Revlog, RevlogIter};
const DEFAULT_LOGS_CAPACITY: usize = 1000000;
@ -176,7 +176,7 @@ impl RevlogRepo {
})
}
pub fn get_heads(&self) -> BoxStream<NodeHash, Error> {
pub fn get_heads(&self) -> BoxStream<HgNodeHash, Error> {
match self.changelog.get_heads() {
Err(e) => stream::once(Err(e)).boxify(),
Ok(set) => stream::iter_ok(set.into_iter()).boxify(),
@ -324,7 +324,7 @@ pub trait RevlogRepoBlobimportExt {
fn get_changelog_entry_by_idx(&self, revidx: RevIdx) -> Result<revlog::Entry>;
fn get_manifest_blob_by_id(&self, nodeid: &NodeHash) -> Result<BlobNode>;
fn get_manifest_blob_by_id(&self, nodeid: &HgNodeHash) -> Result<BlobNode>;
fn get_path_revlog(&self, path: &RepoPath) -> Result<Revlog>;
}
@ -339,7 +339,7 @@ impl RevlogRepoBlobimportExt for RevlogRepo {
self.changelog.get_entry(revidx)
}
fn get_manifest_blob_by_id(&self, nodeid: &NodeHash) -> Result<BlobNode> {
fn get_manifest_blob_by_id(&self, nodeid: &HgNodeHash) -> Result<BlobNode> {
// It's possible that commit has null pointer to manifest hash.
// In that case we want to return empty blobnode
if nodeid == &NULL_HASH {
@ -366,10 +366,10 @@ impl ChangesetStream {
}
impl Stream for ChangesetStream {
type Item = NodeHash;
type Item = HgNodeHash;
type Error = Error;
fn poll(&mut self) -> Poll<Option<NodeHash>, Error> {
fn poll(&mut self) -> Poll<Option<HgNodeHash>, Error> {
match self.0.next() {
Some((_, e)) => Ok(Async::Ready(Some(e.nodeid))),
None => Ok(Async::Ready(None)),

View File

@ -14,7 +14,7 @@ use diesel::serialize::{self, IsNull, Output, ToSql};
use diesel::sql_types::Binary;
use errors::*;
use nodehash::{HgChangesetId, HgManifestId, NodeHash};
use nodehash::{HgChangesetId, HgManifestId, HgNodeHash};
#[derive(QueryId, SqlType)]
#[mysql_type = "Blob"]
@ -41,7 +41,7 @@ where
// Using unsafe here saves on a heap allocation. See https://goo.gl/K6hapb.
let raw_bytes: *const [u8] = FromSql::<Binary, DB>::from_sql(bytes)?;
let raw_bytes: &[u8] = unsafe { &*raw_bytes };
let hash = NodeHash::from_bytes(raw_bytes).compat()?;
let hash = HgNodeHash::from_bytes(raw_bytes).compat()?;
Ok(Self::new(hash))
}
}
@ -61,7 +61,7 @@ where
// Using unsafe here saves on a heap allocation. See https://goo.gl/K6hapb.
let raw_bytes: *const [u8] = FromSql::<Binary, DB>::from_sql(bytes)?;
let raw_bytes: &[u8] = unsafe { &*raw_bytes };
let hash = NodeHash::from_bytes(raw_bytes).compat()?;
let hash = HgNodeHash::from_bytes(raw_bytes).compat()?;
Ok(Self::new(hash))
}
}

View File

@ -35,7 +35,7 @@ use slog_scuba::ScubaDrain;
use blobrepo::BlobChangeset;
use bundle2_resolver;
use mercurial::{self, NodeHashConversion, RevlogChangeset};
use mercurial::{self, HgNodeHash, NodeHashConversion, RevlogChangeset};
use mercurial_bundles::{parts, Bundle2EncodeBuilder, Bundle2Item};
use mercurial_types::{percent_encode, Changeset, DChangesetId, DManifestId, DNodeHash, DParents,
Entry, MPath, RepoPath, RepositoryId, Type, D_NULL_HASH};
@ -486,10 +486,7 @@ impl RepoClient {
impl HgCommands for RepoClient {
// @wireprotocommand('between', 'pairs')
fn between(
&self,
pairs: Vec<(mercurial::NodeHash, mercurial::NodeHash)>,
) -> HgCommandRes<Vec<Vec<mercurial::NodeHash>>> {
fn between(&self, pairs: Vec<(HgNodeHash, HgNodeHash)>) -> HgCommandRes<Vec<Vec<HgNodeHash>>> {
info!(self.logger, "between pairs {:?}", pairs);
struct ParentStream<CS> {
@ -573,7 +570,7 @@ impl HgCommands for RepoClient {
}
// @wireprotocommand('heads')
fn heads(&self) -> HgCommandRes<HashSet<mercurial::NodeHash>> {
fn heads(&self) -> HgCommandRes<HashSet<HgNodeHash>> {
// Get a stream of heads and collect them into a HashSet
// TODO: directly return stream of heads
let logger = self.logger.clone();
@ -599,7 +596,7 @@ impl HgCommands for RepoClient {
let scuba = self.repo.scuba.clone();
let sample = self.repo.scuba_sample(ops::LOOKUP);
let remote = self.repo.remote.clone();
mercurial::NodeHash::from_str(&key)
HgNodeHash::from_str(&key)
.into_future()
.map(|h| (h, h.into_mononoke()))
.and_then(move |(mercurial_node, node)| {
@ -630,7 +627,7 @@ impl HgCommands for RepoClient {
}
// @wireprotocommand('known', 'nodes *'), but the '*' is ignored
fn known(&self, nodes: Vec<mercurial::NodeHash>) -> HgCommandRes<Vec<bool>> {
fn known(&self, nodes: Vec<HgNodeHash>) -> HgCommandRes<Vec<bool>> {
info!(self.logger, "known: {:?}", nodes);
let hgrepo = self.repo.hgrepo.clone();
let scuba = self.repo.scuba.clone();
@ -751,10 +748,7 @@ impl HgCommands for RepoClient {
}
// @wireprotocommand('getfiles', 'files*')
fn getfiles(
&self,
params: BoxStream<(mercurial::NodeHash, MPath), Error>,
) -> BoxStream<Bytes, Error> {
fn getfiles(&self, params: BoxStream<(HgNodeHash, MPath), Error>) -> BoxStream<Bytes, Error> {
let logger = self.logger.clone();
info!(logger, "getfiles");
let repo = self.repo.clone();