clone: tell user about "checkout --continue" on errors

Summary:
If the clone fails during checkout and we think it is resumable, we now emit a message telling that to the user.

I added a minimal effort abstraction to avoid putting "hg" directly in user facing messages. We probably need something better for this.

Reviewed By: DurhamG

Differential Revision: D37324757

fbshipit-source-id: 1b4a9d994c6a4499068cd9e12c002769a1ccf68b
This commit is contained in:
Muir Manders 2022-06-23 16:50:53 -07:00 committed by Facebook GitHub Bot
parent 6775fc62c0
commit d324c74963
8 changed files with 146 additions and 76 deletions

View File

@ -21,6 +21,7 @@ repolock = { version = "0.1.0", path = "../repolock" }
serde_json = { version = "1.0.79", features = ["float_roundtrip", "unbounded_depth"] }
status = { version = "0.1.0", path = "../status" }
storemodel = { version = "0.1.0", path = "../storemodel" }
thiserror = "1.0.30"
tokio = { version = "1.15", features = ["full", "test-util", "tracing"] }
tracing = "0.1.32"
treestate = { version = "0.1.0", path = "../treestate" }

View File

@ -72,6 +72,13 @@ impl std::fmt::Display for CheckoutStats {
}
}
#[derive(Debug, thiserror::Error)]
#[error("checkout error: {source}")]
pub struct CheckoutError {
pub resumable: bool,
pub source: anyhow::Error,
}
/// A somewhat simplified/specialized checkout suitable for use during a clone.
pub fn checkout(
config: &dyn Config,
@ -81,85 +88,116 @@ pub fn checkout(
file_store: Arc<dyn ReadFileContents<Error = anyhow::Error> + Send + Sync>,
ts: &mut TreeState,
target: HgId,
) -> anyhow::Result<CheckoutStats> {
let dot_hg = wc_path.join(".hg");
) -> anyhow::Result<CheckoutStats, CheckoutError> {
let mut state = CheckoutState::default();
state
.checkout(
config, wc_path, source_mf, target_mf, file_store, ts, target,
)
.map_err(|err| CheckoutError {
resumable: state.resumable,
source: err,
})
}
let _wlock = repolock::lock_working_copy(config, &dot_hg)?;
#[derive(Default)]
struct CheckoutState {
resumable: bool,
}
let mut sparse_overrides = None;
impl CheckoutState {
fn checkout(
&mut self,
config: &dyn Config,
wc_path: &Path,
source_mf: &TreeManifest,
target_mf: &TreeManifest,
file_store: Arc<dyn ReadFileContents<Error = anyhow::Error> + Send + Sync>,
ts: &mut TreeState,
target: HgId,
) -> anyhow::Result<CheckoutStats> {
let dot_hg = wc_path.join(".hg");
let matcher: Box<dyn Matcher> = match fs::read_to_string(dot_hg.join("sparse")) {
Ok(contents) => {
let overrides = sparse::config_overrides(config);
sparse_overrides = Some(overrides.clone());
Box::new(sparse::sparse_matcher(
sparse::Root::from_bytes(contents.as_bytes(), ".hg/sparse".to_string())?,
target_mf.clone(),
file_store.clone(),
overrides,
)?)
let _wlock = repolock::lock_working_copy(config, &dot_hg)?;
let mut sparse_overrides = None;
let matcher: Box<dyn Matcher> = match fs::read_to_string(dot_hg.join("sparse")) {
Ok(contents) => {
let overrides = sparse::config_overrides(config);
sparse_overrides = Some(overrides.clone());
Box::new(sparse::sparse_matcher(
sparse::Root::from_bytes(contents.as_bytes(), ".hg/sparse".to_string())?,
target_mf.clone(),
file_store.clone(),
overrides,
)?)
}
Err(e) if e.kind() == io::ErrorKind::NotFound => {
Box::new(pathmatcher::AlwaysMatcher::new())
}
Err(e) => {
return Err(e.into());
}
};
let diff = Diff::new(source_mf, target_mf, &matcher)?;
let actions = ActionMap::from_diff(diff)?;
let vfs = VFS::new(wc_path.to_path_buf())?;
let checkout = Checkout::from_config(vfs.clone(), config)?;
let mut plan = checkout.plan_action_map(actions);
// Write out overrides first so they don't change when resuming
// this checkout.
if let Some(sparse_overrides) = sparse_overrides {
atomic_write(&dot_hg.join(CONFIG_OVERRIDE_CACHE), |f| {
serde_json::to_writer(f, &sparse_overrides)?;
Ok(())
})?;
}
Err(e) if e.kind() == io::ErrorKind::NotFound => {
Box::new(pathmatcher::AlwaysMatcher::new())
if config.get_or_default("checkout", "resumable")? {
let progress_path = dot_hg.join("updateprogress");
plan.add_progress(progress_path)?;
self.resumable = true;
}
Err(e) => {
return Err(e.into());
}
};
let diff = Diff::new(source_mf, target_mf, &matcher)?;
let actions = ActionMap::from_diff(diff)?;
let vfs = VFS::new(wc_path.to_path_buf())?;
let checkout = Checkout::from_config(vfs.clone(), config)?;
let mut plan = checkout.plan_action_map(actions);
if config.get_or_default("checkout", "resumable")? {
let progress_path = dot_hg.join("updateprogress");
plan.add_progress(progress_path)?;
}
atomic_write(&dot_hg.join("updatestate"), |f| {
f.write_all(target.to_hex().as_bytes())
})?;
block_on(plan.apply_store(&file_store))?;
let ts_meta = Metadata(BTreeMap::from([("p1".to_string(), target.to_hex())]));
let mut ts_buf: Vec<u8> = Vec::new();
ts_meta.serialize(&mut ts_buf)?;
ts.set_metadata(&ts_buf);
// Probably not required for clone.
for removed in plan.removed_files() {
ts.remove(removed)?;
}
for updated in plan
.updated_content_files()
.chain(plan.updated_meta_files())
{
let fstate = file_state(&vfs, updated)?;
ts.insert(updated, &fstate)?;
}
flush_dirstate(config, ts, &dot_hg, target)?;
remove_file(dot_hg.join("updatestate"))?;
if let Some(sparse_overrides) = sparse_overrides {
atomic_write(&dot_hg.join(CONFIG_OVERRIDE_CACHE), |f| {
serde_json::to_writer(f, &sparse_overrides)?;
Ok(())
atomic_write(&dot_hg.join("updatestate"), |f| {
f.write_all(target.to_hex().as_bytes())
})?;
}
Ok(CheckoutStats {
updated: plan.stats().0,
merged: 0,
removed: 0,
unresolved: 0,
})
block_on(plan.apply_store(&file_store))?;
let ts_meta = Metadata(BTreeMap::from([("p1".to_string(), target.to_hex())]));
let mut ts_buf: Vec<u8> = Vec::new();
ts_meta.serialize(&mut ts_buf)?;
ts.set_metadata(&ts_buf);
// Probably not required for clone.
for removed in plan.removed_files() {
ts.remove(removed)?;
}
for updated in plan
.updated_content_files()
.chain(plan.updated_meta_files())
{
let fstate = file_state(&vfs, updated)?;
ts.insert(updated, &fstate)?;
}
flush_dirstate(config, ts, &dot_hg, target)?;
remove_file(dot_hg.join("updatestate"))?;
Ok(CheckoutStats {
updated: plan.stats().0,
merged: 0,
removed: 0,
unresolved: 0,
})
}
}
fn flush_dirstate(

View File

@ -110,7 +110,7 @@ pub fn init_working_copy(
let mut ts = TreeState::open(&ts_path, None)?;
let stats = checkout::clone::checkout(
match checkout::clone::checkout(
repo.config(),
repo.path(),
&source_mf,
@ -118,11 +118,23 @@ pub fn init_working_copy(
file_store.clone(),
&mut ts,
target,
)?;
) {
Ok(stats) => {
logger.status(format!("{}", stats));
logger.status(format!("{}", stats));
Ok(())
}
Err(err) => {
if err.resumable {
logger.status(format!(
"Checkout failed. Resume with '{} checkout --continue'",
logger.cli_name(),
));
}
Ok(())
Err(err.source.into())
}
}
}
#[derive(Debug, thiserror::Error)]

View File

@ -9,3 +9,4 @@ edition = "2021"
[dependencies]
io = { version = "0.1.0", path = "../io" }
tracing = "0.1.32"
util = { version = "0.1.0", path = "../util" }

View File

@ -70,6 +70,11 @@ impl TermLogger {
}
}
/// Short client program name.
pub fn cli_name(&self) -> &'static str {
util::identity::cli_name()
}
pub fn flush(&mut self) {
let _ = self.output.flush();
let _ = self.error.flush();

View File

@ -0,0 +1,11 @@
/*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This software may be used and distributed according to the terms of the
* GNU General Public License version 2.
*/
/// CLI name to be used in user facing messaging.
pub fn cli_name() -> &'static str {
"hg"
}

View File

@ -16,6 +16,7 @@
mod bgprocess;
pub mod file;
pub mod identity;
pub mod lock;
pub mod path;

View File

@ -29,7 +29,7 @@ class TestResumeClone(BaseTest):
clone_wc = WorkingCopy(repo, new_dir())
with self.assertRaises(CommandFailure):
with self.assertRaises(CommandFailure) as cm:
repo.hg.clone(
repo.url,
clone_wc.root,
@ -37,6 +37,7 @@ class TestResumeClone(BaseTest):
)
self.assertEqual(len(clone_wc.status().untracked), 1)
self.assertIn("hg checkout --continue", cm.exception.result.stdout)
# Make sure "checkout --continue" works and skips the file.
self.assertRegex(