2017-07-28 04:00:19 +03:00
|
|
|
// Copyright (c) 2004-present, Facebook, Inc.
|
|
|
|
// All Rights Reserved.
|
|
|
|
//
|
|
|
|
// This software may be used and distributed according to the terms of the
|
|
|
|
// GNU General Public License version 2 or any later version.
|
|
|
|
|
|
|
|
use std::collections::hash_map::{Entry, HashMap};
|
2019-02-21 17:23:48 +03:00
|
|
|
use std::collections::HashSet;
|
2017-08-08 19:55:49 +03:00
|
|
|
use std::fmt::{self, Display};
|
|
|
|
use std::fs;
|
|
|
|
use std::io::{BufRead, BufReader};
|
|
|
|
use std::path::PathBuf;
|
|
|
|
use std::str::FromStr;
|
2017-09-25 15:29:36 +03:00
|
|
|
use std::sync::{Arc, RwLock};
|
2017-07-28 04:00:19 +03:00
|
|
|
|
2017-10-31 21:07:37 +03:00
|
|
|
use futures::future;
|
|
|
|
use futures::stream;
|
2019-02-21 17:23:48 +03:00
|
|
|
use futures::{Async, IntoFuture, Poll, Stream};
|
2019-05-23 21:57:45 +03:00
|
|
|
use futures_ext::{try_boxfuture, BoxFuture, BoxStream, FutureExt, StreamExt};
|
2017-07-28 04:00:19 +03:00
|
|
|
|
2019-05-23 21:57:45 +03:00
|
|
|
use crate::stockbookmarks::StockBookmarks;
|
2019-02-21 17:23:48 +03:00
|
|
|
use mercurial_types::{
|
2019-08-27 14:21:31 +03:00
|
|
|
blobs::RevlogChangeset, fncache_fsencode, simple_fsencode, HgChangesetId, HgManifestId,
|
|
|
|
HgNodeHash, MPath, MPathElement, RepoPath,
|
2019-02-21 17:23:48 +03:00
|
|
|
};
|
2017-07-28 04:00:19 +03:00
|
|
|
|
2019-05-23 21:57:45 +03:00
|
|
|
use crate::errors::*;
|
|
|
|
pub use crate::manifest::RevlogManifest;
|
|
|
|
use crate::revlog::{Revlog, RevlogIter};
|
2017-07-28 04:00:19 +03:00
|
|
|
|
2017-12-13 17:55:28 +03:00
|
|
|
const DEFAULT_LOGS_CAPACITY: usize = 1000000;
|
2017-10-18 11:34:36 +03:00
|
|
|
|
2017-07-28 04:00:19 +03:00
|
|
|
#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)]
|
|
|
|
pub enum Required {
|
|
|
|
Store,
|
|
|
|
Fncache,
|
|
|
|
Dotencode,
|
|
|
|
Generaldelta,
|
|
|
|
Treemanifest,
|
|
|
|
Manifestv2,
|
|
|
|
Usefncache,
|
|
|
|
Revlogv1,
|
|
|
|
Largefiles,
|
|
|
|
Lz4revlog,
|
2018-09-17 14:38:40 +03:00
|
|
|
StoreRequirements,
|
2017-07-28 04:00:19 +03:00
|
|
|
SqlDirstate,
|
2017-12-03 19:28:22 +03:00
|
|
|
HgSql,
|
2017-12-07 21:24:43 +03:00
|
|
|
TreeDirstate,
|
2018-06-23 00:38:36 +03:00
|
|
|
TreeState,
|
2018-11-22 21:32:00 +03:00
|
|
|
LFS,
|
2017-07-28 04:00:19 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
impl Display for Required {
|
2019-05-23 21:57:45 +03:00
|
|
|
fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
|
2017-07-28 04:00:19 +03:00
|
|
|
use self::Required::*;
|
|
|
|
|
|
|
|
let s = match self {
|
|
|
|
&Store => "store",
|
|
|
|
&Fncache => "fncache",
|
|
|
|
&Dotencode => "dotencode",
|
|
|
|
&Generaldelta => "generaldelta",
|
|
|
|
&Treemanifest => "treemanifest",
|
|
|
|
&Manifestv2 => "manifestv2",
|
|
|
|
&Usefncache => "usefncache",
|
|
|
|
&Revlogv1 => "revlogv1",
|
|
|
|
&Largefiles => "largefiles",
|
|
|
|
&Lz4revlog => "lz4revlog",
|
2018-09-17 14:38:40 +03:00
|
|
|
&StoreRequirements => "storerequirements",
|
2017-07-28 04:00:19 +03:00
|
|
|
&SqlDirstate => "sqldirstate",
|
2017-12-03 19:28:22 +03:00
|
|
|
&HgSql => "hgsql",
|
2017-12-07 21:24:43 +03:00
|
|
|
&TreeDirstate => "treedirstate",
|
2018-06-23 00:38:36 +03:00
|
|
|
&TreeState => "treestate",
|
2018-11-22 21:32:00 +03:00
|
|
|
&LFS => "lfs",
|
2017-07-28 04:00:19 +03:00
|
|
|
};
|
|
|
|
write!(fmt, "{}", s)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl FromStr for Required {
|
|
|
|
type Err = Error;
|
|
|
|
|
|
|
|
fn from_str(s: &str) -> Result<Required> {
|
|
|
|
use self::Required::*;
|
|
|
|
|
|
|
|
match s {
|
|
|
|
"store" => Ok(Store),
|
|
|
|
"fncache" => Ok(Fncache),
|
|
|
|
"dotencode" => Ok(Dotencode),
|
|
|
|
"generaldelta" => Ok(Generaldelta),
|
|
|
|
"treemanifest" => Ok(Treemanifest),
|
|
|
|
"manifestv2" => Ok(Manifestv2),
|
|
|
|
"usefncache" => Ok(Usefncache),
|
|
|
|
"revlogv1" => Ok(Revlogv1),
|
|
|
|
"largefiles" => Ok(Largefiles),
|
|
|
|
"lz4revlog" => Ok(Lz4revlog),
|
2018-09-17 14:38:40 +03:00
|
|
|
"storerequirements" => Ok(StoreRequirements),
|
2017-07-28 04:00:19 +03:00
|
|
|
"sqldirstate" => Ok(SqlDirstate),
|
2017-12-03 19:28:22 +03:00
|
|
|
"hgsql" => Ok(HgSql),
|
2017-12-07 21:24:43 +03:00
|
|
|
"treedirstate" => Ok(TreeDirstate),
|
2018-06-23 00:38:36 +03:00
|
|
|
"treestate" => Ok(TreeState),
|
2018-11-22 21:32:00 +03:00
|
|
|
"lfs" => Ok(LFS),
|
2017-07-28 04:00:19 +03:00
|
|
|
unk => Err(ErrorKind::UnknownReq(unk.into()).into()),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-09-17 14:38:40 +03:00
|
|
|
#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)]
|
2019-02-21 17:23:48 +03:00
|
|
|
pub enum StoreRequired {}
|
2018-09-17 14:38:40 +03:00
|
|
|
|
|
|
|
impl Display for StoreRequired {
|
2019-05-23 21:57:45 +03:00
|
|
|
fn fmt(&self, _fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
|
2018-09-17 14:38:40 +03:00
|
|
|
// This library currently dooesn't support any store requirements.
|
|
|
|
unimplemented!()
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl FromStr for StoreRequired {
|
|
|
|
type Err = Error;
|
|
|
|
|
|
|
|
fn from_str(s: &str) -> Result<StoreRequired> {
|
|
|
|
match s {
|
|
|
|
unk => Err(ErrorKind::UnknownReq(unk.into()).into()),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-07-28 04:00:19 +03:00
|
|
|
/// Representation of a whole Mercurial repo
|
|
|
|
///
|
|
|
|
/// `Repo` represents a whole repo: ie, the complete history of a set of files.
|
|
|
|
/// It consists of an overall history in the form of a DAG of revisions, or changesets.
|
|
|
|
/// This DAG will typically have a single initial version (though it could have more if
|
|
|
|
/// histories are merged) and one or more heads, which are revisions which have no children.
|
|
|
|
///
|
|
|
|
/// Some revisions can be explicitly named with "bookmarks", and they're often heads as well.
|
|
|
|
///
|
|
|
|
/// At the filesystem level, the repo consists of:
|
|
|
|
/// - the changelog: .hg/store/00changelog.[di]
|
|
|
|
/// - the manifest: .hg/store/00manifest.[di]
|
2017-08-18 14:23:06 +03:00
|
|
|
/// - the tree manifests: .hg/store/00manifesttree.[di] and .hg/store/meta/.../00manifest.i
|
2017-07-28 04:00:19 +03:00
|
|
|
/// - per-file histories: .hg/store/data/.../<file>.[di]
|
|
|
|
#[derive(Debug, Clone)]
|
|
|
|
pub struct RevlogRepo {
|
2018-09-17 14:38:40 +03:00
|
|
|
basepath: PathBuf, // path to .hg directory
|
|
|
|
requirements: HashSet<Required>, // requirements
|
|
|
|
store_requirements: HashSet<StoreRequired>, // store requirements
|
|
|
|
changelog: Revlog, // changes
|
|
|
|
inner: Arc<RwLock<RevlogInner>>, // Inner parts
|
|
|
|
inmemory_logs_capacity: usize, // Limit on the number of filelogs and tree revlogs in memory.
|
|
|
|
// Note: there can be 2 * inmemory_logs_capacity revlogs in
|
|
|
|
// memory in total: half for filelogs and half for revlogs.
|
2017-12-13 17:55:28 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
pub struct RevlogRepoOptions {
|
|
|
|
pub inmemory_logs_capacity: usize,
|
2017-07-28 04:00:19 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
#[derive(Debug)]
|
|
|
|
struct RevlogInner {
|
2018-03-22 01:47:47 +03:00
|
|
|
logcache: HashMap<RepoPath, Revlog>,
|
2017-07-28 04:00:19 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
impl PartialEq<Self> for RevlogRepo {
|
|
|
|
fn eq(&self, other: &Self) -> bool {
|
2019-02-21 17:23:48 +03:00
|
|
|
self.basepath == other.basepath
|
|
|
|
&& self.requirements == other.requirements
|
2017-10-10 17:25:36 +03:00
|
|
|
&& Arc::ptr_eq(&self.inner, &other.inner)
|
2017-07-28 04:00:19 +03:00
|
|
|
}
|
|
|
|
}
|
|
|
|
impl Eq for RevlogRepo {}
|
|
|
|
|
|
|
|
impl RevlogRepo {
|
|
|
|
pub fn open<P: Into<PathBuf>>(base: P) -> Result<RevlogRepo> {
|
2017-12-13 17:55:28 +03:00
|
|
|
let options = RevlogRepoOptions {
|
|
|
|
inmemory_logs_capacity: DEFAULT_LOGS_CAPACITY,
|
|
|
|
};
|
|
|
|
RevlogRepo::open_with_options(base, options)
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn open_with_options<P: Into<PathBuf>>(
|
|
|
|
base: P,
|
|
|
|
options: RevlogRepoOptions,
|
|
|
|
) -> Result<RevlogRepo> {
|
2017-07-28 04:00:19 +03:00
|
|
|
let base = base.into();
|
|
|
|
let store = base.as_path().join("store");
|
|
|
|
|
2018-03-20 21:42:43 +03:00
|
|
|
let changelog =
|
|
|
|
Revlog::from_idx_with_data(store.join("00changelog.i"), None as Option<String>)?;
|
2017-07-28 04:00:19 +03:00
|
|
|
|
2018-03-20 21:42:43 +03:00
|
|
|
let mut requirements = HashSet::new();
|
2017-12-06 04:52:31 +03:00
|
|
|
let file = fs::File::open(base.join("requires")).context("Can't open `requires`")?;
|
2017-07-28 04:00:19 +03:00
|
|
|
for line in BufReader::new(file).lines() {
|
2018-03-20 21:42:43 +03:00
|
|
|
requirements.insert(line.context("Line read failed")?.parse()?);
|
2017-07-28 04:00:19 +03:00
|
|
|
}
|
|
|
|
|
2018-09-17 14:38:40 +03:00
|
|
|
let mut store_requirements = HashSet::new();
|
|
|
|
if requirements.contains(&Required::StoreRequirements) {
|
|
|
|
let store_requirements_file = store.join("requires");
|
|
|
|
// A missing store/requires files is the same as an empty one.
|
|
|
|
if store_requirements_file.exists() {
|
2019-02-21 17:23:48 +03:00
|
|
|
let file = fs::File::open(store_requirements_file)
|
|
|
|
.context("Can't open `store/requires`")?;
|
2018-09-17 14:38:40 +03:00
|
|
|
for line in BufReader::new(file).lines() {
|
|
|
|
store_requirements.insert(line.context("Line read failed")?.parse()?);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-07-28 04:00:19 +03:00
|
|
|
Ok(RevlogRepo {
|
|
|
|
basepath: base.into(),
|
2018-03-20 21:42:43 +03:00
|
|
|
requirements,
|
2018-09-17 14:38:40 +03:00
|
|
|
store_requirements,
|
2018-03-20 21:42:43 +03:00
|
|
|
changelog,
|
2017-09-25 15:29:36 +03:00
|
|
|
inner: Arc::new(RwLock::new(RevlogInner {
|
2018-03-22 01:47:47 +03:00
|
|
|
logcache: HashMap::new(),
|
2017-07-28 04:00:19 +03:00
|
|
|
})),
|
2017-12-13 17:55:28 +03:00
|
|
|
inmemory_logs_capacity: options.inmemory_logs_capacity,
|
2017-07-28 04:00:19 +03:00
|
|
|
})
|
|
|
|
}
|
|
|
|
|
2018-04-16 13:34:19 +03:00
|
|
|
pub fn get_heads(&self) -> BoxStream<HgNodeHash, Error> {
|
2017-08-15 20:27:41 +03:00
|
|
|
match self.changelog.get_heads() {
|
2017-10-31 21:07:37 +03:00
|
|
|
Err(e) => stream::once(Err(e)).boxify(),
|
|
|
|
Ok(set) => stream::iter_ok(set.into_iter()).boxify(),
|
2017-07-28 04:00:19 +03:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-03-20 21:42:43 +03:00
|
|
|
pub fn get_bookmarks(&self) -> Result<StockBookmarks> {
|
|
|
|
Ok(StockBookmarks::read(self.basepath.clone())?)
|
2017-07-28 04:00:19 +03:00
|
|
|
}
|
|
|
|
|
2019-05-23 21:57:45 +03:00
|
|
|
pub fn get_bookmark_value(
|
|
|
|
&self,
|
|
|
|
key: &dyn AsRef<[u8]>,
|
|
|
|
) -> BoxFuture<Option<HgChangesetId>, Error> {
|
2018-03-20 21:42:43 +03:00
|
|
|
match self.get_bookmarks() {
|
|
|
|
Ok(b) => b.get(key).boxify(),
|
|
|
|
Err(e) => future::err(e).boxify(),
|
|
|
|
}
|
2017-07-28 04:00:19 +03:00
|
|
|
}
|
|
|
|
|
2018-03-20 21:42:43 +03:00
|
|
|
pub fn changesets(&self) -> ChangesetStream {
|
|
|
|
ChangesetStream::new(&self.changelog)
|
2018-02-01 00:25:42 +03:00
|
|
|
}
|
|
|
|
|
2019-02-07 01:58:25 +03:00
|
|
|
pub fn get_changeset(&self, changesetid: HgChangesetId) -> BoxFuture<RevlogChangeset, Error> {
|
2018-02-01 00:25:42 +03:00
|
|
|
// TODO: (jsgf) T17932873 distinguish between not existing vs some other error
|
|
|
|
let nodeid = changesetid.clone().into_nodehash();
|
2018-03-20 21:42:43 +03:00
|
|
|
self.changelog
|
2019-02-07 01:58:25 +03:00
|
|
|
.get_idx_by_nodeid(nodeid)
|
2018-03-20 21:42:43 +03:00
|
|
|
.and_then(|idx| self.changelog.get_rev(idx))
|
2018-02-01 00:25:42 +03:00
|
|
|
.and_then(|rev| RevlogChangeset::new(rev))
|
2017-08-21 12:56:07 +03:00
|
|
|
.into_future()
|
2018-03-20 21:42:43 +03:00
|
|
|
.boxify()
|
2017-08-21 12:56:07 +03:00
|
|
|
}
|
|
|
|
|
2019-02-07 01:58:25 +03:00
|
|
|
pub fn get_root_manifest(&self, manifestid: HgManifestId) -> BoxFuture<RevlogManifest, Error> {
|
2017-07-28 04:00:19 +03:00
|
|
|
// TODO: (jsgf) T17932873 distinguish between not existing vs some other error
|
2018-03-20 21:42:43 +03:00
|
|
|
let nodeid = manifestid.clone().into_nodehash();
|
2017-07-28 04:00:19 +03:00
|
|
|
let repo = self.clone();
|
2018-03-20 21:42:43 +03:00
|
|
|
let revlog = try_boxfuture!(self.get_path_revlog(&RepoPath::root()));
|
|
|
|
revlog
|
2019-02-07 01:58:25 +03:00
|
|
|
.get_idx_by_nodeid(nodeid)
|
2018-03-20 21:42:43 +03:00
|
|
|
.and_then(|idx| revlog.get_rev(idx))
|
|
|
|
.and_then(move |rev| RevlogManifest::new(repo, rev))
|
|
|
|
.into_future()
|
2017-10-31 21:07:37 +03:00
|
|
|
.boxify()
|
2017-07-28 04:00:19 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
pub fn get_requirements(&self) -> &HashSet<Required> {
|
|
|
|
&self.requirements
|
|
|
|
}
|
|
|
|
|
2018-09-17 14:38:40 +03:00
|
|
|
pub fn get_store_requirements(&self) -> &HashSet<StoreRequired> {
|
|
|
|
&self.store_requirements
|
|
|
|
}
|
|
|
|
|
2018-03-20 21:42:43 +03:00
|
|
|
/// This method is used by RevlogManifest to traverse the Revlogs in search of manifests and
|
|
|
|
/// files. Users of this crate should rely on RevlogManifest traversal or use
|
|
|
|
/// RevlogRepo::get_manifest directly.
|
|
|
|
pub(crate) fn get_path_revlog(&self, path: &RepoPath) -> Result<Revlog> {
|
|
|
|
use mercurial_types::RepoPath::*;
|
2017-11-01 00:16:08 +03:00
|
|
|
|
2018-03-20 21:42:43 +03:00
|
|
|
if let Some(revlog) = self.get_path_revlog_from_cache(path) {
|
|
|
|
return Ok(revlog);
|
2017-09-25 15:29:36 +03:00
|
|
|
}
|
|
|
|
let mut inner = self.inner.write().expect("poisoned lock");
|
2017-08-18 14:23:01 +03:00
|
|
|
|
2017-10-18 11:34:36 +03:00
|
|
|
// We may have memory issues if we are keeping too many revlogs in memory.
|
|
|
|
// Let's clear them when we have too much
|
2018-03-22 01:47:47 +03:00
|
|
|
if inner.logcache.len() > self.inmemory_logs_capacity {
|
|
|
|
inner.logcache.clear();
|
2017-10-18 11:34:36 +03:00
|
|
|
}
|
2018-03-20 21:42:43 +03:00
|
|
|
|
2018-03-22 01:47:47 +03:00
|
|
|
match inner.logcache.entry(path.clone()) {
|
2017-07-28 04:00:19 +03:00
|
|
|
Entry::Occupied(log) => Ok(log.get().clone()),
|
|
|
|
|
|
|
|
Entry::Vacant(missing) => {
|
2018-03-20 21:42:43 +03:00
|
|
|
let revlog_path = match *path {
|
|
|
|
// .hg/store/00manifesttree
|
2018-03-22 01:47:47 +03:00
|
|
|
RootPath => MPath::new("00manifesttree")?,
|
2018-03-20 21:42:43 +03:00
|
|
|
// .hg/store/meta/<path>/00manifest
|
|
|
|
DirectoryPath(_) => MPath::new("meta")?
|
2018-03-22 01:47:47 +03:00
|
|
|
.join(MPath::iter_opt(path.mpath()))
|
2018-03-20 21:42:43 +03:00
|
|
|
.join(&MPath::new("00manifest")?),
|
|
|
|
// .hg/store/data/<path>
|
2018-03-22 01:47:47 +03:00
|
|
|
FilePath(_) => MPath::new("data")?.join(MPath::iter_opt(path.mpath())),
|
2018-03-20 21:42:43 +03:00
|
|
|
};
|
|
|
|
Ok(missing
|
|
|
|
.insert(self.init_revlog_from_path(revlog_path)?)
|
|
|
|
.clone())
|
2017-07-28 04:00:19 +03:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-03-20 21:42:43 +03:00
|
|
|
fn get_path_revlog_from_cache(&self, path: &RepoPath) -> Option<Revlog> {
|
|
|
|
let inner = self.inner.read().expect("poisoned lock");
|
2018-03-22 01:47:47 +03:00
|
|
|
inner.logcache.get(path).cloned()
|
2017-09-17 22:04:27 +03:00
|
|
|
}
|
|
|
|
|
2018-03-20 21:42:43 +03:00
|
|
|
/// path is the path to the revlog files, but without the .i or .d extensions
|
|
|
|
fn init_revlog_from_path(&self, path: MPath) -> Result<Revlog> {
|
|
|
|
let mut elements: Vec<MPathElement> = path.into_iter().collect();
|
2019-06-22 01:37:56 +03:00
|
|
|
let basename = elements.pop().ok_or_else(|| {
|
2018-03-20 21:42:43 +03:00
|
|
|
format_err!("empty path provided to RevlogRepo::init_revlog_from_path")
|
|
|
|
})?;
|
2017-09-17 22:04:27 +03:00
|
|
|
|
2018-03-20 21:42:43 +03:00
|
|
|
let index_path = {
|
2019-06-22 01:37:56 +03:00
|
|
|
let mut basename = Vec::from(basename.as_ref());
|
2018-03-20 21:42:43 +03:00
|
|
|
basename.extend(b".i");
|
2019-06-22 01:37:56 +03:00
|
|
|
elements.push(MPathElement::new(basename)?);
|
2018-03-20 21:42:43 +03:00
|
|
|
self.fsencode_path(&elements)
|
|
|
|
};
|
|
|
|
elements.pop();
|
2017-09-17 22:04:27 +03:00
|
|
|
|
2018-03-20 21:42:43 +03:00
|
|
|
let data_path = {
|
2019-06-22 01:37:56 +03:00
|
|
|
let mut basename = Vec::from(basename.as_ref());
|
2018-03-20 21:42:43 +03:00
|
|
|
basename.extend(b".d");
|
2019-06-22 01:37:56 +03:00
|
|
|
elements.push(MPathElement::new(basename)?);
|
2018-03-20 21:42:43 +03:00
|
|
|
self.fsencode_path(&elements)
|
|
|
|
};
|
2017-09-17 22:04:27 +03:00
|
|
|
|
2018-03-20 21:42:43 +03:00
|
|
|
let store_path = self.basepath.join("store");
|
|
|
|
Revlog::from_idx_with_data(
|
|
|
|
store_path.join(index_path),
|
|
|
|
Some(store_path.join(data_path)),
|
|
|
|
)
|
2017-12-03 19:28:21 +03:00
|
|
|
}
|
|
|
|
|
2018-03-20 21:42:43 +03:00
|
|
|
fn fsencode_path(&self, elements: &[MPathElement]) -> PathBuf {
|
2017-12-03 19:28:21 +03:00
|
|
|
// Mercurial has a complicated logic of path encoding.
|
|
|
|
// Code below matches core Mercurial logic from the commit
|
|
|
|
// 75013952d8d9608f73cd45f68405fbd6ec112bf2 from file mercurial/store.py from the function
|
|
|
|
// store(). The only caveat is that basicstore is not yet implemented
|
|
|
|
if self.requirements.contains(&Required::Store) {
|
|
|
|
if self.requirements.contains(&Required::Fncache) {
|
|
|
|
let dotencode = self.requirements.contains(&Required::Dotencode);
|
|
|
|
fncache_fsencode(&elements, dotencode)
|
|
|
|
} else {
|
|
|
|
simple_fsencode(&elements)
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
unimplemented!();
|
|
|
|
}
|
2017-09-17 22:04:27 +03:00
|
|
|
}
|
2018-03-20 21:42:43 +03:00
|
|
|
}
|
2017-09-17 22:04:27 +03:00
|
|
|
|
2017-07-28 04:00:19 +03:00
|
|
|
pub struct ChangesetStream(RevlogIter);
|
|
|
|
|
|
|
|
impl ChangesetStream {
|
|
|
|
fn new(changelog: &Revlog) -> Self {
|
|
|
|
ChangesetStream(changelog.into_iter())
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl Stream for ChangesetStream {
|
2018-04-16 13:34:19 +03:00
|
|
|
type Item = HgNodeHash;
|
2017-07-28 04:00:19 +03:00
|
|
|
type Error = Error;
|
|
|
|
|
2018-04-16 13:34:19 +03:00
|
|
|
fn poll(&mut self) -> Poll<Option<HgNodeHash>, Error> {
|
2017-07-28 04:00:19 +03:00
|
|
|
match self.0.next() {
|
|
|
|
Some((_, e)) => Ok(Async::Ready(Some(e.nodeid))),
|
|
|
|
None => Ok(Async::Ready(None)),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|