Centralise hook running in the HookManager

Summary: We had hooks logic scattered around the place - move it all into the hooks crate, so that it's easier to refactor to use Bonsai changesets instead of hg.

Reviewed By: StanislavGlebik

Differential Revision: D20198725

fbshipit-source-id: fb8bdc2cdbd1714c7181a5a0562c1dacce9fcc7d
This commit is contained in:
Simon Farnsworth 2020-03-18 09:13:05 -07:00 committed by Facebook GitHub Bot
parent a908be34b3
commit d2cba5a169
7 changed files with 216 additions and 269 deletions

View File

@ -18,12 +18,17 @@ use cloned::cloned;
use cmdlib::helpers::block_execute;
use context::CoreContext;
use fbinit::FacebookInit;
use futures::{compat::Future01CompatExt, FutureExt, TryFutureExt};
use futures::{
compat::Future01CompatExt,
future::{FutureExt, TryFutureExt},
};
use futures_ext::{try_boxfuture, BoxFuture, FutureExt as OldFutureExt};
use futures_old::future::{err, ok, Future};
use futures_old::stream::repeat;
use futures_old::Stream;
use hooks::{ChangesetHookExecutionID, FileHookExecutionID, HookExecution};
use futures_old::{
future::{err, ok},
stream::repeat,
Future, Stream,
};
use hooks::HookOutcome;
use manifold::{ManifoldHttpClient, RequestContext};
use mercurial_types::{HgChangesetId, HgNodeHash};
use slog::{debug, info, o, Drain, Level, Logger};
@ -38,11 +43,6 @@ use tailer::Tailer;
use thiserror::Error;
use tokio_timer::sleep;
pub struct HookResults {
file_hooks_results: Vec<(FileHookExecutionID, HookExecution)>,
cs_hooks_result: Vec<(ChangesetHookExecutionID, HookExecution)>,
}
#[fbinit::main]
fn main(fb: FacebookInit) -> Result<()> {
panichandler::set_panichandler(panichandler::Fate::Abort);
@ -194,55 +194,27 @@ fn main(fb: FacebookInit) -> Result<()> {
}
fn process_hook_results(
fut: BoxFuture<Vec<HookResults>, Error>,
fut: BoxFuture<Vec<HookOutcome>, Error>,
logger: Logger,
) -> BoxFuture<(), Error> {
fut.and_then(move |res| {
let mut file_hooks_stat = HookExecutionStat::new();
let mut cs_hooks_stat = HookExecutionStat::new();
let mut hooks_stat = HookExecutionStat::new();
res.into_iter().for_each(|hook_results| {
let HookResults {
file_hooks_results,
cs_hooks_result,
} = hook_results;
debug!(logger, "==== File hooks results ====");
file_hooks_results.into_iter().for_each(|(exec_id, exec)| {
file_hooks_stat.record_hook_execution(&exec);
let output = format!(
"changeset:{} hook_name:{} path:{} result: {}",
exec_id.cs_id, exec_id.hook_name, exec_id.file.path, exec
);
debug!(logger, "==== Hooks results ====");
res.into_iter().for_each(|outcome| {
hooks_stat.record_hook_execution(&outcome);
match exec {
HookExecution::Accepted => debug!(logger, "{}", output),
HookExecution::Rejected(_) => info!(logger, "{}", output),
}
});
debug!(logger, "==== Changeset hooks results ====");
cs_hooks_result.into_iter().for_each(|(exec_id, exec)| {
cs_hooks_stat.record_hook_execution(&exec);
let output = format!(
"changeset:{} hook_name:{} result: {}",
exec_id.cs_id, exec_id.hook_name, exec
);
match exec {
HookExecution::Accepted => debug!(logger, "{}", output),
HookExecution::Rejected(_) => info!(logger, "{}", output),
}
});
if outcome.is_rejection() {
info!(logger, "{}", outcome);
} else {
debug!(logger, "{}", outcome);
}
});
info!(logger, "==== File hooks stat: {} ====", file_hooks_stat);
info!(logger, "==== Changeset hooks stat: {} ====", cs_hooks_stat);
info!(logger, "==== Hooks stat: {} ====", hooks_stat);
if cs_hooks_stat.rejected > 0 || file_hooks_stat.rejected > 0 {
err(format_err!(
"Hook rejections: changeset: {} file: {}",
cs_hooks_stat.rejected,
file_hooks_stat.rejected
))
if hooks_stat.rejected > 0 {
err(format_err!("Hook rejections: {}", hooks_stat.rejected,))
} else {
ok(())
}
@ -263,15 +235,12 @@ impl HookExecutionStat {
}
}
pub fn record_hook_execution(&mut self, exec: &hooks::HookExecution) {
match exec {
hooks::HookExecution::Accepted => {
self.accepted += 1;
}
hooks::HookExecution::Rejected(_) => {
self.rejected += 1;
}
};
pub fn record_hook_execution(&mut self, outcome: &hooks::HookOutcome) {
if outcome.is_rejection() {
self.rejected += 1;
} else {
self.accepted += 1;
}
}
}

View File

@ -7,7 +7,6 @@
#![deny(warnings)]
use super::HookResults;
use anyhow::{format_err, Error, Result};
use blobrepo::BlobRepo;
use bookmarks::BookmarkName;
@ -16,7 +15,7 @@ use context::CoreContext;
use futures::{FutureExt, TryFutureExt};
use futures_ext::{spawn_future, BoxFuture, FutureExt as OldFutureExt};
use futures_old::{Future, Stream};
use hooks::{hook_loader::load_hooks, HookManager};
use hooks::{hook_loader::load_hooks, HookManager, HookOutcome};
use hooks_content_stores::{blobrepo_text_only_store, BlobRepoChangesetStore};
use manifold::{ManifoldHttpClient, PayloadRange};
use mercurial_types::HgChangesetId;
@ -88,7 +87,7 @@ impl Tailer {
end_rev: HgChangesetId,
bm: BookmarkName,
excludes: HashSet<ChangesetId>,
) -> BoxFuture<Vec<HookResults>, Error> {
) -> BoxFuture<Vec<HookOutcome>, Error> {
debug!(ctx.logger(), "Running in range {} to {}", last_rev, end_rev);
nodehash_to_bonsai(ctx.clone(), &repo, end_rev)
.and_then(move |end_rev| {
@ -107,7 +106,7 @@ impl Tailer {
Ok(*hg_cs != last_rev)
})
.map(|(_, res)| res)
.collect()
.concat2()
})
.boxify()
}
@ -116,7 +115,7 @@ impl Tailer {
&self,
last_rev: HgChangesetId,
end_rev: HgChangesetId,
) -> BoxFuture<Vec<HookResults>, Error> {
) -> BoxFuture<Vec<HookOutcome>, Error> {
cloned!(
self.ctx,
self.repo,
@ -138,7 +137,7 @@ impl Tailer {
pub fn run_single_changeset(
&self,
changeset: HgChangesetId,
) -> BoxFuture<Vec<HookResults>, Error> {
) -> BoxFuture<Vec<HookOutcome>, Error> {
cloned!(self.ctx, self.repo, self.hook_manager, self.bookmark,);
repo.get_bonsai_from_hg(ctx, changeset)
.and_then(move |maybe_bonsai| {
@ -151,11 +150,11 @@ impl Tailer {
cloned!(self.ctx);
move |bonsai| run_hooks_for_changeset(ctx, repo, hook_manager, bookmark, bonsai)
})
.map(|(_, result)| vec![result])
.map(|(_, result)| result)
.boxify()
}
pub fn run_with_limit(&self, limit: u64) -> BoxFuture<Vec<HookResults>, Error> {
pub fn run_with_limit(&self, limit: u64) -> BoxFuture<Vec<HookOutcome>, Error> {
let ctx = self.ctx.clone();
let bm = self.bookmark.clone();
let hm = self.hook_manager.clone();
@ -188,12 +187,12 @@ impl Tailer {
.map(spawn_future)
.buffered(100)
.map(|(_, res)| res)
.collect()
.concat2()
})
.boxify()
}
pub fn run(&self) -> BoxFuture<Vec<HookResults>, Error> {
pub fn run(&self) -> BoxFuture<Vec<HookOutcome>, Error> {
info!(
self.ctx.logger(),
"Running tailer on bookmark {}",
@ -283,28 +282,17 @@ fn run_hooks_for_changeset(
hm: Arc<HookManager>,
bm: BookmarkName,
cs: ChangesetId,
) -> impl Future<Item = (HgChangesetId, HookResults), Error = Error> {
) -> impl Future<Item = (HgChangesetId, Vec<HookOutcome>), Error = Error> {
repo.get_hg_from_bonsai_changeset(ctx.clone(), cs)
.and_then(move |hg_cs| {
let ctx = ctx.clone();
let hm = hm.clone();
let bm = bm.clone();
async move {
debug!(ctx.logger(), "Running file hooks for changeset {:?}", hg_cs);
let file_hooks_results = hm
.run_file_hooks_for_bookmark(&ctx, hg_cs, &bm, None)
debug!(ctx.logger(), "Running hooks for changeset {:?}", hg_cs);
let hook_results = hm
.run_hooks_for_bookmark(&ctx, vec![hg_cs], &bm, None)
.await?;
debug!(
ctx.logger(),
"Running changeset hooks for changeset {:?}", hg_cs
);
let cs_hooks_result = hm
.run_changeset_hooks_for_bookmark(&ctx, hg_cs.clone(), &bm, None)
.await?;
let hook_results = HookResults {
file_hooks_results,
cs_hooks_result,
};
Ok((hg_cs, hook_results))
}
.boxed()

View File

@ -1073,9 +1073,9 @@ async fn run_changeset_hooks_with_mgr(
hook_manager.register_changeset_hook(&hook_name, hook.into(), Default::default());
}
let res = hook_manager
.run_changeset_hooks_for_bookmark(
.run_hooks_for_bookmark(
&ctx,
default_changeset_id(),
vec![default_changeset_id()],
&BookmarkName::new(bookmark_name).unwrap(),
None,
)
@ -1083,7 +1083,7 @@ async fn run_changeset_hooks_with_mgr(
.unwrap();
let map: HashMap<String, HookExecution> = res
.into_iter()
.map(|(exec_id, exec)| (exec_id.hook_name, exec))
.map(|outcome| (outcome.get_hook_name().to_string(), outcome.into()))
.collect();
assert_eq!(expected, map);
}
@ -1153,23 +1153,23 @@ async fn run_file_hooks_with_mgr(
hook_manager.register_file_hook(&hook_name, hook.into(), Default::default());
}
let res = hook_manager
.run_file_hooks_for_bookmark(
.run_hooks_for_bookmark(
&ctx,
hg_cs_id,
vec![hg_cs_id],
&BookmarkName::new(bookmark_name).unwrap(),
None,
)
.await
.unwrap();
let map: HashMap<String, HashMap<String, HookExecution>> =
res.into_iter()
.fold(HashMap::new(), |mut m, (exec_id, exec)| {
match m.entry(exec_id.hook_name) {
Entry::Vacant(v) => v.insert(HashMap::new()).insert(exec_id.file.path, exec),
Entry::Occupied(mut v) => v.get_mut().insert(exec_id.file.path, exec),
};
m
});
res.into_iter().fold(HashMap::new(), |mut m, outcome| {
let path = outcome.get_file_path().expect("Changeset hook").to_string();
match m.entry(outcome.get_hook_name().to_string()) {
Entry::Vacant(v) => v.insert(HashMap::new()).insert(path, outcome.into()),
Entry::Occupied(mut v) => v.get_mut().insert(path, outcome.into()),
};
m
});
assert_eq!(expected, map);
}

View File

@ -31,6 +31,7 @@ pub use errors::*;
use fbinit::FacebookInit;
use futures::{
future::{try_join, try_join_all},
stream::{futures_unordered::FuturesUnordered, TryStreamExt},
Future, TryFutureExt,
};
use futures_stats::TimedFutureExt;
@ -188,38 +189,57 @@ impl HookManager {
.collect()
}
// Changeset hooks
pub async fn run_hooks_for_bookmark(
&self,
ctx: &CoreContext,
changesets: impl IntoIterator<Item = HgChangesetId>,
bookmark: &BookmarkName,
maybe_pushvars: Option<&HashMap<String, Bytes>>,
) -> Result<Vec<HookOutcome>, Error> {
debug!(ctx.logger(), "Running hooks for bookmark {:?}", bookmark);
pub fn run_changeset_hooks_for_bookmark<'a, 'b: 'a, 'c: 'a, 'd: 'a>(
&'a self,
ctx: &'b CoreContext,
changeset_id: HgChangesetId,
bookmark: &'c BookmarkName,
maybe_pushvars: Option<&'d HashMap<String, Bytes>>,
) -> impl Future<Output = Result<Vec<(ChangesetHookExecutionID, HookExecution)>, Error>> + 'a
{
debug!(
ctx.logger(),
"Running changeset hooks for bookmark {:?}", bookmark
);
let cs_hooks = self.changeset_hooks_for_bookmark(bookmark);
let file_hooks = self.file_hooks_for_bookmark(bookmark);
self.run_changeset_hooks_for_changeset_id(
ctx,
changeset_id,
self.changeset_hooks_for_bookmark(bookmark),
maybe_pushvars,
bookmark,
)
let cs_futs = FuturesUnordered::new();
let file_futs = FuturesUnordered::new();
for cs_id in changesets {
cs_futs.push(self.run_changeset_hooks_for_changeset_id(
ctx,
cs_id.clone(),
&cs_hooks,
maybe_pushvars,
bookmark,
));
file_futs.push(self.run_file_hooks_for_changeset_id(
ctx,
cs_id,
&file_hooks,
maybe_pushvars,
bookmark,
));
}
let (cs_hook_results, file_hook_results): (Vec<_>, Vec<_>) =
try_join(cs_futs.try_collect(), file_futs.try_collect()).await?;
Ok(cs_hook_results
.into_iter()
.flat_map(|r| r.into_iter())
.chain(file_hook_results.into_iter().flat_map(|r| r.into_iter()))
.collect())
}
// Changeset hooks
async fn run_changeset_hooks_for_changeset_id(
&self,
ctx: &CoreContext,
changeset_id: HgChangesetId,
hooks: Vec<String>,
hooks: &Vec<String>,
maybe_pushvars: Option<&HashMap<String, Bytes>>,
bookmark: &BookmarkName,
) -> Result<Vec<(ChangesetHookExecutionID, HookExecution)>, Error> {
) -> Result<Vec<HookOutcome>, Error> {
debug!(
ctx.logger(),
"Running changeset hooks for changeset id {:?}", changeset_id
@ -246,7 +266,7 @@ impl HookManager {
Ok(res
.into_iter()
.map(|(hook_name, exec)| {
(
HookOutcome::ChangesetHook(
ChangesetHookExecutionID {
cs_id: changeset_id,
hook_name,
@ -277,38 +297,14 @@ impl HookManager {
// File hooks
pub async fn run_file_hooks_for_bookmark(
&self,
ctx: &CoreContext,
changeset_id: HgChangesetId,
bookmark: &BookmarkName,
maybe_pushvars: Option<&HashMap<String, Bytes>>,
) -> Result<Vec<(FileHookExecutionID, HookExecution)>, Error> {
debug!(
ctx.logger(),
"Running file hooks for bookmark {:?}", bookmark
);
let file_hooks = self.file_hooks_for_bookmark(&bookmark);
self.run_file_hooks_for_changeset_id(
&ctx,
changeset_id,
file_hooks,
maybe_pushvars,
&bookmark,
)
.await
}
async fn run_file_hooks_for_changeset_id(
&self,
ctx: &CoreContext,
changeset_id: HgChangesetId,
hooks: Vec<String>,
hooks: &Vec<String>,
maybe_pushvars: Option<&HashMap<String, Bytes>>,
bookmark: &BookmarkName,
) -> Result<Vec<(FileHookExecutionID, HookExecution)>, Error> {
) -> Result<Vec<HookOutcome>, Error> {
debug!(
ctx.logger(),
"Running file hooks for changeset id {:?}", changeset_id
@ -341,7 +337,7 @@ impl HookManager {
hooks: Vec<(String, Arc<dyn Hook<HookFile>>, HookConfig)>,
bookmark: &'book BookmarkName,
scuba: ScubaSampleBuilder,
) -> impl Future<Output = Result<Vec<(FileHookExecutionID, HookExecution)>, Error>> + 'cs {
) -> impl Future<Output = Result<Vec<HookOutcome>, Error>> + 'cs {
let v: Vec<_> = changeset
.files
.iter()
@ -372,7 +368,7 @@ impl HookManager {
hooks: Vec<(String, Arc<dyn Hook<HookFile>>, HookConfig)>,
bookmark: &'book BookmarkName,
scuba: ScubaSampleBuilder,
) -> Result<Vec<(FileHookExecutionID, HookExecution)>, Error> {
) -> Result<Vec<HookOutcome>, Error> {
let hook_futs = hooks.into_iter().map(move |(hook_name, hook, config)| {
let hook_context =
HookContext::new(hook_name.to_string(), config, file.clone(), bookmark);
@ -383,7 +379,7 @@ impl HookManager {
HookManager::run_hook(ctx, hook, hook_context, scuba).map_ok({
cloned!(file, bookmark);
move |(hook_name, exec)| {
(
HookOutcome::FileHook(
FileHookExecutionID {
cs_id,
hook_name,
@ -686,17 +682,88 @@ impl HookChangeset {
}
}
#[derive(Clone, Debug, PartialEq)]
pub enum HookOutcome {
ChangesetHook(ChangesetHookExecutionID, HookExecution),
FileHook(FileHookExecutionID, HookExecution),
}
impl fmt::Display for HookOutcome {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
HookOutcome::ChangesetHook(id, exec) => {
write!(f, "{} for {}: {}", id.hook_name, id.cs_id, exec)
}
HookOutcome::FileHook(id, exec) => write!(
f,
"{} for {} file {}: {}",
id.hook_name, id.cs_id, id.file.path, exec
),
}
}
}
impl HookOutcome {
pub fn is_rejection(&self) -> bool {
let exec = match self {
HookOutcome::ChangesetHook(_, exec) => exec,
HookOutcome::FileHook(_, exec) => exec,
};
match exec {
HookExecution::Accepted => false,
HookExecution::Rejected(_) => true,
}
}
pub fn get_hook_name(&self) -> &str {
match self {
HookOutcome::ChangesetHook(id, _) => &id.hook_name,
HookOutcome::FileHook(id, _) => &id.hook_name,
}
}
pub fn get_file_path(&self) -> Option<&str> {
match self {
HookOutcome::ChangesetHook(..) => None,
HookOutcome::FileHook(id, _) => Some(&id.file.path),
}
}
pub fn get_cs_id(&self) -> HgChangesetId {
match self {
HookOutcome::ChangesetHook(id, _) => id.cs_id,
HookOutcome::FileHook(id, _) => id.cs_id,
}
}
pub fn get_execution(&self) -> &HookExecution {
match self {
HookOutcome::ChangesetHook(_, exec) => exec,
HookOutcome::FileHook(_, exec) => exec,
}
}
}
#[derive(Clone, Debug, PartialEq)]
pub enum HookExecution {
Accepted,
Rejected(HookRejectionInfo),
}
impl From<HookOutcome> for HookExecution {
fn from(outcome: HookOutcome) -> Self {
match outcome {
HookOutcome::ChangesetHook(_, r) => r,
HookOutcome::FileHook(_, r) => r,
}
}
}
impl fmt::Display for HookExecution {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
HookExecution::Accepted => write!(f, "Accepted"),
HookExecution::Rejected(reason) => write!(f, "Rejected: {}", reason.description),
HookExecution::Rejected(reason) => write!(f, "Rejected: {}", reason.long_description),
}
}
}

View File

@ -31,7 +31,6 @@ use getbundle_response::{
create_getbundle_response, DraftsInBundlesPolicy, PhasesPart, SessionLfsParams,
};
use hgproto::{GetbundleArgs, GettreepackArgs, HgCommandRes, HgCommands};
use hooks::HookExecution;
use itertools::Itertools;
use lazy_static::lazy_static;
use load_limiter::Metric;
@ -1475,18 +1474,8 @@ impl HgCommands for RepoClient {
move |err| {
use unbundle::BundleResolverError::*;
match err {
HookError((cs_hooks, file_hooks)) => {
let mut failed_hooks = HashSet::new();
for (exec_id, exec_info) in cs_hooks {
if let HookExecution::Rejected(_) = exec_info {
failed_hooks.insert(exec_id.hook_name.clone());
}
}
for (exec_id, exec_info) in file_hooks {
if let HookExecution::Rejected(_) = exec_info {
failed_hooks.insert(exec_id.hook_name.clone());
}
}
HookError(hooks) => {
let failed_hooks: HashSet<String> = hooks.into_iter().filter_map(|res| if res.is_rejection() { Some(res.get_hook_name().to_string())} else {None}).collect();
for failed_hook in failed_hooks {
STATS::push_hook_failure.add_value(

View File

@ -7,17 +7,12 @@
#![deny(warnings)]
use crate::{
BundleResolverError, PostResolveAction, PostResolvePushRebase, UploadedHgChangesetIds,
};
use bookmarks::BookmarkName;
use bytes::Bytes;
use crate::{BundleResolverError, PostResolveAction, PostResolvePushRebase};
use context::CoreContext;
use futures::{future, stream::futures_unordered, FutureExt, TryFutureExt, TryStreamExt};
use futures::{FutureExt, TryFutureExt};
use futures_ext::{BoxFuture, FutureExt as _};
use futures_old::future::{ok, Future};
use hooks::{ChangesetHookExecutionID, FileHookExecutionID, HookExecution, HookManager};
use std::collections::HashMap;
use futures_old::future::ok;
use hooks::{HookManager, HookOutcome};
use std::sync::Arc;
pub fn run_hooks(
@ -26,6 +21,7 @@ pub fn run_hooks(
action: &PostResolveAction,
) -> BoxFuture<(), BundleResolverError> {
match action {
// TODO: Need to run hooks on Push, not just PushRebase
PostResolveAction::Push(_) => ok(()).boxify(),
PostResolveAction::InfinitePush(_) => ok(()).boxify(),
PostResolveAction::PushRebase(action) => run_pushrebase_hooks(ctx, action, hook_manager),
@ -41,76 +37,21 @@ fn run_pushrebase_hooks(
let changesets = action.uploaded_hg_changeset_ids.clone();
let maybe_pushvars = action.maybe_pushvars.clone();
let bookmark = action.bookmark_spec.get_bookmark_name();
run_pushrebase_hooks_impl(ctx, changesets, maybe_pushvars, bookmark, hook_manager)
}
fn run_pushrebase_hooks_impl(
ctx: CoreContext,
changesets: UploadedHgChangesetIds,
pushvars: Option<HashMap<String, Bytes>>,
onto_bookmark: BookmarkName,
hook_manager: Arc<HookManager>,
) -> BoxFuture<(), BundleResolverError> {
// TODO: should we also accept the Option<HgBookmarkPush> and run hooks on that?
let futs: futures_unordered::FuturesUnordered<_> = changesets
.into_iter()
.map({
|hg_cs_id| {
let ctx = ctx.clone();
let hook_manager = hook_manager.clone();
let onto_bookmark = onto_bookmark.clone();
let pushvars = pushvars.clone();
async move {
future::try_join(
hook_manager.run_changeset_hooks_for_bookmark(
&ctx,
hg_cs_id.clone(),
&onto_bookmark,
pushvars.as_ref(),
),
hook_manager.run_file_hooks_for_bookmark(
&ctx,
hg_cs_id,
&onto_bookmark,
pushvars.as_ref(),
),
)
.await
}
.boxed()
}
})
.collect();
futs.try_collect()
.boxed()
.compat()
.from_err()
.and_then(|res: Vec<_>| {
let (cs_hook_results, file_hook_results): (Vec<_>, Vec<_>) = res.into_iter().unzip();
let cs_hook_failures: Vec<(ChangesetHookExecutionID, HookExecution)> = cs_hook_results
.into_iter()
.flatten()
.filter(|(_, exec)| match exec {
HookExecution::Accepted => false,
HookExecution::Rejected(_) => true,
})
.collect();
let file_hook_failures: Vec<(FileHookExecutionID, HookExecution)> = file_hook_results
.into_iter()
.flatten()
.filter(|(_, exec)| match exec {
HookExecution::Accepted => false,
HookExecution::Rejected(_) => true,
})
.collect();
if cs_hook_failures.len() > 0 || file_hook_failures.len() > 0 {
Err(BundleResolverError::HookError((
cs_hook_failures,
file_hook_failures,
)))
} else {
Ok(())
}
})
.boxify()
async move {
let hook_failures: Vec<_> = hook_manager
.run_hooks_for_bookmark(&ctx, changesets, &bookmark, maybe_pushvars.as_ref())
.await?
.into_iter()
.filter(HookOutcome::is_rejection)
.collect();
if hook_failures.is_empty() {
Ok(())
} else {
Err(BundleResolverError::HookError(hook_failures))
}
}
.boxed()
.compat()
.boxify()
}

View File

@ -27,7 +27,7 @@ use futures_ext::{try_boxfuture, BoxFuture, BoxStream, FutureExt, StreamExt};
use futures_old::future::{self, err, ok, Shared};
use futures_old::stream;
use futures_old::{Future, IntoFuture, Stream};
use hooks::{ChangesetHookExecutionID, FileHookExecutionID, HookExecution};
use hooks::{HookExecution, HookOutcome};
use lazy_static::lazy_static;
use limits::types::RateLimit;
use mercurial_bundles::{Bundle2Item, PartHeader, PartHeaderInner, PartHeaderType, PartId};
@ -79,12 +79,7 @@ impl From<bool> for NonFastForwardPolicy {
}
pub enum BundleResolverError {
HookError(
(
Vec<(ChangesetHookExecutionID, HookExecution)>,
Vec<(FileHookExecutionID, HookExecution)>,
),
),
HookError(Vec<HookOutcome>),
PushrebaseConflicts(Vec<pushrebase::PushrebaseConflict>),
Error(Error),
RateLimitExceeded {
@ -105,24 +100,22 @@ impl From<BundleResolverError> for Error {
// DO NOT CHANGE FORMATTING WITHOUT UPDATING https://fburl.com/diffusion/bs9fys78 first!!
use BundleResolverError::*;
match error {
HookError((cs_hook_failures, file_hook_failures)) => {
let mut err_msgs = vec![];
for (exec_id, exec_info) in cs_hook_failures {
if let HookExecution::Rejected(info) = exec_info {
err_msgs.push(format!(
"{} for {}: {}",
exec_id.hook_name, exec_id.cs_id, info.long_description
));
}
}
for (exec_id, exec_info) in file_hook_failures {
if let HookExecution::Rejected(info) = exec_info {
err_msgs.push(format!(
"{} for {}: {}",
exec_id.hook_name, exec_id.cs_id, info.long_description
));
}
}
HookError(hook_outcomes) => {
let err_msgs: Vec<_> = hook_outcomes
.into_iter()
.filter_map(|outcome| {
let exec = outcome.get_execution();
match exec {
HookExecution::Accepted => None,
HookExecution::Rejected(info) => Some(format!(
"{} for {}: {}",
outcome.get_hook_name(),
outcome.get_cs_id(),
info.long_description
)),
}
})
.collect();
format_err!("hooks failed:\n{}", err_msgs.join("\n"))
}
PushrebaseConflicts(conflicts) => {