Delete HgChangeset hook handling completely

Summary: Not in use any more - all hooks are now Bonsai form - so remove it.

Reviewed By: krallin

Differential Revision: D20891164

fbshipit-source-id: b92f169a0ec3a4832f8e9ec8dc9696ce81f7edb3
This commit is contained in:
Simon Farnsworth 2020-04-11 04:25:18 -07:00 committed by Facebook GitHub Bot
parent 25b29257a3
commit f8cc1c6e97
19 changed files with 80 additions and 1230 deletions

View File

@ -27,9 +27,7 @@ use futures::{
use futures_stats::TimedFutureExt;
use hgproto::HgCommands;
use hooks::HookManager;
use hooks_content_stores::{
InMemoryChangesetStore, InMemoryFileContentFetcher, InMemoryFileContentStore,
};
use hooks_content_stores::InMemoryFileContentFetcher;
use metaconfig_types::{BlobConfig, HookManagerParams};
use mononoke_types::Timestamp;
use nonzero_ext::nonzero;
@ -196,8 +194,6 @@ async fn dispatch(
fn build_noop_hook_manager(fb: FacebookInit) -> HookManager {
HookManager::new(
fb,
Box::new(InMemoryChangesetStore::new()),
Arc::new(InMemoryFileContentStore::new()),
Box::new(InMemoryFileContentFetcher::new()),
HookManagerParams {
disable_acl_checker: true,

View File

@ -13,13 +13,11 @@ use blobstore::Loadable;
use bookmarks::BookmarkName;
use cloned::cloned;
use context::CoreContext;
use futures::{compat::Future01CompatExt, FutureExt, TryFutureExt};
use futures::{FutureExt, TryFutureExt};
use futures_ext::{spawn_future, BoxFuture, FutureExt as OldFutureExt};
use futures_old::{Future, Stream};
use hooks::{hook_loader::load_hooks, HookManager, HookOutcome};
use hooks_content_stores::{
blobrepo_text_only_fetcher, blobrepo_text_only_store, BlobRepoChangesetStore,
};
use hooks_content_stores::blobrepo_text_only_fetcher;
use manifold::{ManifoldHttpClient, PayloadRange};
use mercurial_types::HgChangesetId;
use metaconfig_types::RepoConfig;
@ -51,14 +49,10 @@ impl Tailer {
excludes: HashSet<ChangesetId>,
disabled_hooks: &HashSet<String>,
) -> Result<Tailer> {
let changeset_store = BlobRepoChangesetStore::new(repo.clone());
let content_store = blobrepo_text_only_store(repo.clone(), config.hook_max_file_size);
let content_fetcher = blobrepo_text_only_fetcher(repo.clone(), config.hook_max_file_size);
let mut hook_manager = HookManager::new(
ctx.fb,
Box::new(changeset_store),
content_store,
content_fetcher,
Default::default(),
ScubaSampleBuilder::with_discard(),
@ -294,17 +288,9 @@ fn run_hooks_for_changeset(
let bm = bm.clone();
async move {
debug!(ctx.logger(), "Running hooks for changeset {:?}", cs);
let mut hook_results = hm
.run_hooks_for_bookmark_bonsai(&ctx, vec![cs].iter(), &bm, None)
let hook_results = hm
.run_hooks_for_bookmark(&ctx, vec![cs].iter(), &bm, None)
.await?;
let hg_cs = repo
.get_hg_from_bonsai_changeset(ctx.clone(), cs_id)
.compat()
.await?;
let old_hook_results = hm
.run_hooks_for_bookmark(&ctx, vec![hg_cs], &bm, None)
.await?;
hook_results.extend(old_hook_results);
Ok((cs_id, hook_results))
}
.boxed()

View File

@ -5,174 +5,14 @@
* GNU General Public License version 2.
*/
use anyhow::Error;
use async_trait::async_trait;
use blobrepo::BlobRepo;
use blobstore::Loadable;
use bytes::Bytes;
use context::CoreContext;
use futures::{
compat::{Future01CompatExt, Stream01CompatExt},
future,
stream::TryStreamExt,
};
use manifest::{Diff, Entry, ManifestOps};
use mercurial_types::{blobs::HgBlobChangeset, FileBytes, HgChangesetId, HgFileNodeId, MPath};
use futures::compat::Future01CompatExt;
use mononoke_types::ContentId;
use mononoke_types::FileType;
use crate::{ChangedFileType, ChangesetStore, ErrorKind, FileContentFetcher, FileContentStore};
// TODO this can cache file content locally to prevent unnecessary lookup of changeset,
// manifest and walk of manifest each time
// It's likely that multiple hooks will want to see the same content for the same changeset
pub struct BlobRepoFileContentStore {
pub repo: BlobRepo,
}
pub struct BlobRepoChangesetStore {
pub repo: BlobRepo,
}
#[async_trait]
impl FileContentStore for BlobRepoFileContentStore {
async fn resolve_path<'a, 'b: 'a>(
&'a self,
ctx: &'b CoreContext,
changeset_id: HgChangesetId,
path: MPath,
) -> Result<Option<HgFileNodeId>, Error> {
let cs = changeset_id
.load(ctx.clone(), self.repo.blobstore())
.compat()
.await?;
let entry = cs
.manifestid()
.find_entry(ctx.clone(), self.repo.get_blobstore(), Some(path))
.compat()
.await?;
Ok(entry.and_then(|entry| entry.into_leaf()).map(|leaf| leaf.1))
}
async fn get_file_text<'a, 'b: 'a>(
&'a self,
ctx: &'b CoreContext,
id: HgFileNodeId,
) -> Result<Option<FileBytes>, Error> {
let store = self.repo.get_blobstore();
let envelope = id.load(ctx.clone(), &store).compat().await?;
let content = filestore::fetch_concat(&store, ctx.clone(), envelope.content_id())
.compat()
.await?;
Ok(Some(FileBytes(content)))
}
async fn get_file_size<'a, 'b: 'a>(
&'a self,
ctx: &'b CoreContext,
id: HgFileNodeId,
) -> Result<u64, Error> {
let envelope = id.load(ctx.clone(), self.repo.blobstore()).compat().await?;
Ok(envelope.content_size())
}
}
impl BlobRepoFileContentStore {
pub fn new(repo: BlobRepo) -> BlobRepoFileContentStore {
BlobRepoFileContentStore { repo }
}
}
#[async_trait]
impl ChangesetStore for BlobRepoChangesetStore {
async fn get_changeset_by_changesetid<'a, 'b: 'a>(
&'a self,
ctx: &'b CoreContext,
changesetid: HgChangesetId,
) -> Result<HgBlobChangeset, Error> {
changesetid
.load(ctx.clone(), self.repo.blobstore())
.compat()
.await
.map_err(|e| e.into())
}
async fn get_changed_files<'a, 'b: 'a>(
&'a self,
ctx: &'b CoreContext,
changesetid: HgChangesetId,
) -> Result<Vec<(String, ChangedFileType, Option<(HgFileNodeId, FileType)>)>, Error> {
let cs = changesetid
.load(ctx.clone(), self.repo.blobstore())
.compat()
.await?;
let mf_id = cs.manifestid();
let parents = cs.parents();
let (maybe_p1, _) = parents.get_nodes();
match maybe_p1 {
Some(p1) => {
let p1 = HgChangesetId::new(p1)
.load(ctx.clone(), self.repo.blobstore())
.compat()
.await?;
let p_mf_id = p1.manifestid();
p_mf_id
.diff(ctx.clone(), self.repo.get_blobstore(), mf_id)
.compat()
.try_filter_map(|diff| {
let (path, change_type, entry) = match diff {
Diff::Added(path, entry) => (path, ChangedFileType::Added, entry),
Diff::Removed(path, entry) => (path, ChangedFileType::Deleted, entry),
Diff::Changed(path, .., entry) => {
(path, ChangedFileType::Modified, entry)
}
};
match (change_type, entry) {
(ChangedFileType::Deleted, Entry::Leaf(_)) => {
future::ok(Some((path, ChangedFileType::Deleted, None)))
}
(change_type, Entry::Leaf((ty, hash))) => {
future::ok(Some((path, change_type, Some((hash, ty)))))
}
(_, Entry::Tree(_)) => future::ok(None),
}
})
.try_filter_map(|(maybe_path, ty, hash_and_type)| {
future::ok(maybe_path.map(|path| {
(
String::from_utf8_lossy(&path.to_vec()).into_owned(),
ty,
hash_and_type,
)
}))
})
.try_collect()
.await
}
None => {
mf_id
.list_leaf_entries(ctx.clone(), self.repo.get_blobstore())
.compat()
.map_ok(|(path, (ty, filenode))| {
(
String::from_utf8_lossy(&path.to_vec()).into_owned(),
ChangedFileType::Added,
Some((filenode, ty)),
)
})
.try_collect()
.await
}
}
}
}
impl BlobRepoChangesetStore {
pub fn new(repo: BlobRepo) -> BlobRepoChangesetStore {
BlobRepoChangesetStore { repo }
}
}
use crate::{ErrorKind, FileContentFetcher};
pub struct BlobRepoFileContentFetcher {
pub repo: BlobRepo,

View File

@ -11,8 +11,6 @@ use mononoke_types::ContentId;
#[derive(Debug, Error)]
pub enum ErrorKind {
#[error("No changeset with id '{0}'")]
NoSuchChangeset(String),
#[error("Content with id '{0}' not found")]
ContentIdNotFound(ContentId),
#[error(transparent)]

View File

@ -6,19 +6,12 @@
*/
#![deny(warnings)]
use std::sync::Arc;
mod blobrepo;
mod errors;
mod memory;
mod store;
mod text_only;
pub use crate::blobrepo::{BlobRepoChangesetStore, BlobRepoFileContentStore};
pub use crate::memory::{InMemoryChangesetStore, InMemoryFileContentStore};
pub use crate::text_only::TextOnlyFileContentStore;
pub use store::{ChangedFileType, ChangesetStore, FileContentStore};
pub use crate::blobrepo::BlobRepoFileContentFetcher;
pub use crate::memory::{InMemoryFileContentFetcher, InMemoryFileText};
pub use crate::text_only::TextOnlyFileContentFetcher;
@ -33,11 +26,3 @@ pub fn blobrepo_text_only_fetcher(
let store = BlobRepoFileContentFetcher::new(blobrepo);
Box::new(TextOnlyFileContentFetcher::new(store, max_file_size))
}
pub fn blobrepo_text_only_store(
blobrepo: ::blobrepo::BlobRepo,
max_file_size: u64,
) -> Arc<dyn FileContentStore> {
let store = BlobRepoFileContentStore::new(blobrepo);
Arc::new(TextOnlyFileContentStore::new(store, max_file_size))
}

View File

@ -5,70 +5,14 @@
* GNU General Public License version 2.
*/
use crate::{ChangedFileType, ChangesetStore, FileContentStore};
use crate::{ErrorKind, FileContentFetcher};
use anyhow::Error;
use async_trait::async_trait;
use bytes::Bytes;
use context::CoreContext;
use mercurial_types::{blobs::HgBlobChangeset, FileBytes, HgChangesetId, HgFileNodeId, MPath};
use mononoke_types::ContentId;
use mononoke_types::FileType;
use std::collections::HashMap;
pub struct InMemoryChangesetStore {
map_files:
HashMap<HgChangesetId, Vec<(String, ChangedFileType, Option<(HgFileNodeId, FileType)>)>>,
map_cs: HashMap<HgChangesetId, HgBlobChangeset>,
}
#[async_trait]
impl ChangesetStore for InMemoryChangesetStore {
async fn get_changeset_by_changesetid<'a, 'b: 'a>(
&'a self,
_ctx: &'b CoreContext,
changesetid: HgChangesetId,
) -> Result<HgBlobChangeset, Error> {
match self.map_cs.get(&changesetid) {
Some(cs) => Ok(cs.clone()),
None => Err(ErrorKind::NoSuchChangeset(changesetid.to_string()).into()),
}
}
async fn get_changed_files<'a, 'b: 'a>(
&'a self,
_ctx: &'b CoreContext,
changesetid: HgChangesetId,
) -> Result<Vec<(String, ChangedFileType, Option<(HgFileNodeId, FileType)>)>, Error> {
match self.map_files.get(&changesetid) {
Some(files) => Ok(files.clone()),
None => Err(ErrorKind::NoSuchChangeset(changesetid.to_string()).into()),
}
}
}
impl InMemoryChangesetStore {
pub fn new() -> InMemoryChangesetStore {
InMemoryChangesetStore {
map_cs: HashMap::new(),
map_files: HashMap::new(),
}
}
pub fn insert_files(
&mut self,
changeset_id: HgChangesetId,
files: Vec<(String, ChangedFileType, Option<(HgFileNodeId, FileType)>)>,
) {
self.map_files.insert(changeset_id.clone(), files);
}
pub fn insert_changeset(&mut self, changeset_id: HgChangesetId, cs: HgBlobChangeset) {
self.map_cs.insert(changeset_id.clone(), cs);
}
}
#[derive(Clone)]
pub enum InMemoryFileText {
Present(Bytes),
@ -94,72 +38,6 @@ impl Into<InMemoryFileText> for u64 {
}
}
#[derive(Clone)]
pub struct InMemoryFileContentStore {
id_to_text: HashMap<HgFileNodeId, InMemoryFileText>,
path_to_filenode: HashMap<(HgChangesetId, MPath), HgFileNodeId>,
}
#[async_trait]
impl FileContentStore for InMemoryFileContentStore {
async fn resolve_path<'a, 'b: 'a>(
&'a self,
_ctx: &'b CoreContext,
cs_id: HgChangesetId,
path: MPath,
) -> Result<Option<HgFileNodeId>, Error> {
Ok(self.path_to_filenode.get(&(cs_id, path)).cloned())
}
async fn get_file_text<'a, 'b: 'a>(
&'a self,
_ctx: &'b CoreContext,
id: HgFileNodeId,
) -> Result<Option<FileBytes>, Error> {
self.id_to_text
.get(&id)
.ok_or(Error::msg("file not found"))
.map(|c| match c {
InMemoryFileText::Present(ref bytes) => Some(FileBytes(bytes.clone())),
InMemoryFileText::Elided(_) => None,
})
}
async fn get_file_size<'a, 'b: 'a>(
&'a self,
_ctx: &'b CoreContext,
id: HgFileNodeId,
) -> Result<u64, Error> {
self.id_to_text
.get(&id)
.ok_or(Error::msg("file not found"))
.map(|c| match c {
InMemoryFileText::Present(ref bytes) => bytes.len() as u64,
InMemoryFileText::Elided(size) => *size,
})
}
}
impl InMemoryFileContentStore {
pub fn new() -> InMemoryFileContentStore {
InMemoryFileContentStore {
id_to_text: HashMap::new(),
path_to_filenode: HashMap::new(),
}
}
pub fn insert(
&mut self,
cs_id: HgChangesetId,
path: MPath,
key: HgFileNodeId,
text: impl Into<InMemoryFileText>,
) {
self.id_to_text.insert(key, text.into());
self.path_to_filenode.insert((cs_id, path), key);
}
}
#[derive(Clone)]
pub struct InMemoryFileContentFetcher {
id_to_text: HashMap<ContentId, InMemoryFileText>,

View File

@ -7,21 +7,10 @@
use crate::ErrorKind;
use anyhow::Error;
use async_trait::async_trait;
use bytes::Bytes;
use context::CoreContext;
use mercurial_types::{blobs::HgBlobChangeset, FileBytes, HgChangesetId, HgFileNodeId, MPath};
use mononoke_types::ContentId;
use mononoke_types::FileType;
#[derive(Clone, PartialEq, Eq)]
pub enum ChangedFileType {
Added,
Deleted,
Modified,
}
#[async_trait]
pub trait FileContentFetcher: Send + Sync {
@ -37,40 +26,3 @@ pub trait FileContentFetcher: Send + Sync {
id: ContentId,
) -> Result<Option<Bytes>, ErrorKind>;
}
#[async_trait]
pub trait FileContentStore: Send + Sync {
async fn resolve_path<'a, 'b: 'a>(
&'a self,
ctx: &'b CoreContext,
changeset_id: HgChangesetId,
path: MPath,
) -> Result<Option<HgFileNodeId>, Error>;
async fn get_file_text<'a, 'b: 'a>(
&'a self,
ctx: &'b CoreContext,
id: HgFileNodeId,
) -> Result<Option<FileBytes>, Error>;
async fn get_file_size<'a, 'b: 'a>(
&'a self,
ctx: &'b CoreContext,
id: HgFileNodeId,
) -> Result<u64, Error>;
}
#[async_trait]
pub trait ChangesetStore: Send + Sync {
async fn get_changeset_by_changesetid<'a, 'b: 'a>(
&'a self,
ctx: &'b CoreContext,
changesetid: HgChangesetId,
) -> Result<HgBlobChangeset, Error>;
async fn get_changed_files<'a, 'b: 'a>(
&'a self,
ctx: &'b CoreContext,
changesetid: HgChangesetId,
) -> Result<Vec<(String, ChangedFileType, Option<(HgFileNodeId, FileType)>)>, Error>;
}

View File

@ -5,70 +5,16 @@
* GNU General Public License version 2.
*/
use crate::FileContentStore;
use crate::{ErrorKind, FileContentFetcher};
use anyhow::Error;
use async_trait::async_trait;
use bytes::Bytes;
use context::CoreContext;
use mercurial_types::{FileBytes, HgChangesetId, HgFileNodeId, MPath};
use mononoke_types::ContentId;
use std::sync::Arc;
const NULL: u8 = 0;
pub struct TextOnlyFileContentStore<T> {
inner: Arc<T>,
max_size: u64,
}
impl<T> TextOnlyFileContentStore<T> {
pub fn new(inner: T, max_size: u64) -> Self {
Self {
inner: Arc::new(inner),
max_size,
}
}
}
#[async_trait]
impl<T: FileContentStore + 'static> FileContentStore for TextOnlyFileContentStore<T> {
async fn resolve_path<'a, 'b: 'a>(
&'a self,
ctx: &'b CoreContext,
changeset_id: HgChangesetId,
path: MPath,
) -> Result<Option<HgFileNodeId>, Error> {
self.inner.resolve_path(ctx, changeset_id, path).await
}
async fn get_file_text<'a, 'b: 'a>(
&'a self,
ctx: &'b CoreContext,
id: HgFileNodeId,
) -> Result<Option<FileBytes>, Error> {
let file_size = self.get_file_size(ctx, id).await?;
if file_size > self.max_size {
return Ok(None);
}
let file_bytes = self.inner.get_file_text(ctx, id).await?;
Ok(match file_bytes {
Some(ref file_bytes) if looks_like_binary(file_bytes.as_bytes()) => None,
_ => file_bytes,
})
}
async fn get_file_size<'a, 'b: 'a>(
&'a self,
ctx: &'b CoreContext,
id: HgFileNodeId,
) -> Result<u64, Error> {
self.inner.get_file_size(ctx, id).await
}
}
pub struct TextOnlyFileContentFetcher<T> {
inner: Arc<T>,
max_size: u64,

View File

@ -24,8 +24,7 @@ use hooks::{
HookRejectionInfo,
};
use hooks_content_stores::{
BlobRepoFileContentFetcher, FileContentFetcher, InMemoryChangesetStore,
InMemoryFileContentFetcher, InMemoryFileContentStore,
BlobRepoFileContentFetcher, FileContentFetcher, InMemoryFileContentFetcher,
};
use maplit::{btreemap, hashmap, hashset};
use metaconfig_types::{
@ -41,7 +40,6 @@ use regex::Regex;
use scuba_ext::ScubaSampleBuilder;
use std::collections::hash_map::Entry;
use std::collections::{BTreeMap, HashMap, HashSet};
use std::sync::Arc;
use tests_utils::{create_commit, store_files};
#[derive(Clone, Debug)]
@ -966,10 +964,10 @@ async fn run_changeset_hooks_with_mgr(
let mut hook_manager =
setup_hook_manager(ctx.fb, bookmarks, regexes, content_fetcher_type).await;
for (hook_name, hook) in hooks {
hook_manager.register_changeset_hook_new(&hook_name, hook, Default::default());
hook_manager.register_changeset_hook(&hook_name, hook, Default::default());
}
let res = hook_manager
.run_hooks_for_bookmark_bonsai(
.run_hooks_for_bookmark(
&ctx,
vec![default_changeset()].iter(),
&BookmarkName::new(bookmark_name).unwrap(),
@ -1047,10 +1045,10 @@ async fn run_file_hooks_with_mgr(
let mut hook_manager =
setup_hook_manager(ctx.fb, bookmarks, regexes, content_fetcher_type).await;
for (hook_name, hook) in hooks {
hook_manager.register_file_hook_new(&hook_name, hook, Default::default());
hook_manager.register_file_hook(&hook_name, hook, Default::default());
}
let res = hook_manager
.run_hooks_for_bookmark_bonsai(
.run_hooks_for_bookmark(
&ctx,
vec![cs].iter(),
&BookmarkName::new(bookmark_name).unwrap(),
@ -1116,14 +1114,10 @@ fn default_changeset() -> BonsaiChangeset {
fn hook_manager_blobrepo(fb: FacebookInit, repo: BlobRepo) -> HookManager {
let ctx = CoreContext::test_mock(fb);
let changeset_store = Box::new(InMemoryChangesetStore::new());
let content_store = Arc::new(InMemoryFileContentStore::new());
let content_fetcher = BlobRepoFileContentFetcher::new(repo);
HookManager::new(
ctx.fb,
changeset_store,
content_store,
Box::new(content_fetcher),
Default::default(),
ScubaSampleBuilder::with_discard(),
@ -1142,9 +1136,6 @@ fn to_mpath(string: &str) -> MPath {
async fn hook_manager_inmem(fb: FacebookInit) -> HookManager {
let ctx = CoreContext::test_mock(fb);
let changeset_store = Box::new(InMemoryChangesetStore::new());
let content_store = Arc::new(InMemoryFileContentStore::new());
let mut content_fetcher = InMemoryFileContentFetcher::new();
content_fetcher.insert(ONES_CTID, "elephants");
content_fetcher.insert(TWOS_CTID, "hippopatami");
@ -1152,8 +1143,6 @@ async fn hook_manager_inmem(fb: FacebookInit) -> HookManager {
HookManager::new(
ctx.fb,
changeset_store,
content_store,
Box::new(content_fetcher),
Default::default(),
ScubaSampleBuilder::with_discard(),

View File

@ -23,19 +23,15 @@ use crate::facebook::rust_hooks::{
tp2_symlinks_only::TP2SymlinksOnly, verify_integrity::VerifyIntegrityHook,
verify_reviewedby_info::VerifyReviewedbyInfo,
};
use crate::{ChangesetHook, FileHook, Hook, HookChangeset, HookFile, HookManager};
use crate::{ChangesetHook, FileHook, HookManager};
use anyhow::Error;
use fbinit::FacebookInit;
use metaconfig_types::RepoConfig;
use std::{collections::HashSet, sync::Arc};
use std::collections::HashSet;
enum LoadedRustHook {
#[allow(dead_code)]
ChangesetHook(Arc<dyn Hook<HookChangeset>>),
#[allow(dead_code)]
FileHook(Arc<dyn Hook<HookFile>>),
BonsaiChangesetHook(Box<dyn ChangesetHook>),
BonsaiFileHook(Box<dyn FileHook>),
ChangesetHook(Box<dyn ChangesetHook>),
FileHook(Box<dyn FileHook>),
}
pub fn load_hooks(
@ -64,40 +60,32 @@ pub fn load_hooks(
};
let rust_hook = match hook_name.as_ref() {
"always_fail_changeset" => BonsaiChangesetHook(Box::new(AlwaysFailChangeset::new())),
"block_cross_repo_commits" => BonsaiFileHook(Box::new(BlockCrossRepoCommits::new()?)),
"block_empty_commit" => BonsaiChangesetHook(Box::new(BlockEmptyCommit::new())),
"check_nocommit" => BonsaiFileHook(Box::new(CheckNocommitHook::new(&hook.config)?)),
"check_unittests" => {
BonsaiChangesetHook(Box::new(CheckUnittestsHook::new(&hook.config)?))
}
"conflict_markers" => BonsaiFileHook(Box::new(ConflictMarkers::new())),
"deny_files" => BonsaiFileHook(Box::new(DenyFiles::new()?)),
"always_fail_changeset" => ChangesetHook(Box::new(AlwaysFailChangeset::new())),
"block_cross_repo_commits" => FileHook(Box::new(BlockCrossRepoCommits::new()?)),
"block_empty_commit" => ChangesetHook(Box::new(BlockEmptyCommit::new())),
"check_nocommit" => FileHook(Box::new(CheckNocommitHook::new(&hook.config)?)),
"check_unittests" => ChangesetHook(Box::new(CheckUnittestsHook::new(&hook.config)?)),
"conflict_markers" => FileHook(Box::new(ConflictMarkers::new())),
"deny_files" => FileHook(Box::new(DenyFiles::new()?)),
"ensure_valid_email" => {
BonsaiChangesetHook(Box::new(EnsureValidEmailHook::new(fb, &hook.config)?))
ChangesetHook(Box::new(EnsureValidEmailHook::new(fb, &hook.config)?))
}
"gitattributes-textdirectives" => {
BonsaiFileHook(Box::new(GitattributesTextDirectives::new()?))
FileHook(Box::new(GitattributesTextDirectives::new()?))
}
"limit_commit_message_length" => {
BonsaiChangesetHook(Box::new(LimitCommitMessageLength::new(&hook.config)?))
ChangesetHook(Box::new(LimitCommitMessageLength::new(&hook.config)?))
}
"limit_commitsize" => BonsaiChangesetHook(Box::new(LimitCommitsize::new(&hook.config))),
"limit_filesize" => BonsaiFileHook(Box::new(LimitFilesize::new(&hook.config))),
"limit_path_length" => {
BonsaiFileHook(Box::new(LimitPathLengthHook::new(&hook.config)?))
}
"no_bad_filenames" => BonsaiFileHook(Box::new(NoBadFilenames::new()?)),
"no_insecure_filenames" => BonsaiFileHook(Box::new(NoInsecureFilenames::new()?)),
"no_questionable_filenames" => {
BonsaiFileHook(Box::new(NoQuestionableFilenames::new()?))
}
"signed_source" => BonsaiFileHook(Box::new(SignedSourceHook::new(&hook.config)?)),
"tp2_symlinks_only" => BonsaiFileHook(Box::new(TP2SymlinksOnly::new()?)),
"verify_integrity" => {
BonsaiChangesetHook(Box::new(VerifyIntegrityHook::new(&hook.config)?))
}
"verify_reviewedby_info" => BonsaiChangesetHook(Box::new(VerifyReviewedbyInfo::new(
"limit_commitsize" => ChangesetHook(Box::new(LimitCommitsize::new(&hook.config))),
"limit_filesize" => FileHook(Box::new(LimitFilesize::new(&hook.config))),
"limit_path_length" => FileHook(Box::new(LimitPathLengthHook::new(&hook.config)?)),
"no_bad_filenames" => FileHook(Box::new(NoBadFilenames::new()?)),
"no_insecure_filenames" => FileHook(Box::new(NoInsecureFilenames::new()?)),
"no_questionable_filenames" => FileHook(Box::new(NoQuestionableFilenames::new()?)),
"signed_source" => FileHook(Box::new(SignedSourceHook::new(&hook.config)?)),
"tp2_symlinks_only" => FileHook(Box::new(TP2SymlinksOnly::new()?)),
"verify_integrity" => ChangesetHook(Box::new(VerifyIntegrityHook::new(&hook.config)?)),
"verify_reviewedby_info" => ChangesetHook(Box::new(VerifyReviewedbyInfo::new(
&hook.config,
hook_manager.get_reviewers_acl_checker(),
)?)),
@ -109,12 +97,6 @@ pub fn load_hooks(
ChangesetHook(rust_hook) => {
hook_manager.register_changeset_hook(&name, rust_hook, hook.config)
}
BonsaiFileHook(rust_hook) => {
hook_manager.register_file_hook_new(&name, rust_hook, hook.config)
}
BonsaiChangesetHook(rust_hook) => {
hook_manager.register_changeset_hook_new(&name, rust_hook, hook.config)
}
}
hook_set.insert(name);

View File

@ -24,45 +24,34 @@ use anyhow::{bail, Error};
use async_trait::async_trait;
use bookmarks::BookmarkName;
use bytes::Bytes;
use cloned::cloned;
use context::CoreContext;
pub use errors::*;
use fbinit::FacebookInit;
use futures::{
future::{try_join, try_join_all},
stream::{futures_unordered::FuturesUnordered, TryStreamExt},
Future, TryFutureExt,
};
use futures_stats::TimedFutureExt;
use hooks_content_stores::FileContentFetcher;
use hooks_content_stores::{ChangedFileType, ChangesetStore, FileContentStore};
use mercurial_types::{FileBytes, HgChangesetId, HgFileNodeId, HgParents};
use metaconfig_types::{BookmarkOrRegex, HookBypass, HookConfig, HookManagerParams};
use mononoke_types::{BonsaiChangeset, ChangesetId, FileChange, FileType, MPath};
use mononoke_types::{BonsaiChangeset, ChangesetId, FileChange, MPath};
use regex::Regex;
use scuba::builder::ServerData;
use scuba_ext::ScubaSampleBuilder;
use slog::debug;
use std::collections::HashMap;
use std::fmt;
use std::hash::{Hash, Hasher};
use std::hash::Hash;
use std::str;
use std::sync::Arc;
type ChangesetHooks = HashMap<String, (Arc<dyn Hook<HookChangeset>>, HookConfig)>;
type FileHooks = HashMap<String, (Arc<dyn Hook<HookFile>>, HookConfig)>;
/// Manages hooks and allows them to be installed and uninstalled given a name
/// Knows how to run hooks
pub struct HookManager {
changeset_hooks: ChangesetHooks,
file_hooks: FileHooks,
hooks: HashMap<String, BonsaiHook>,
hooks: HashMap<String, Hook>,
bookmark_hooks: HashMap<BookmarkName, Vec<String>>,
regex_hooks: Vec<(Regex, Vec<String>)>,
changeset_store: Box<dyn ChangesetStore>,
content_store: Arc<dyn FileContentStore>,
content_fetcher: Box<dyn FileContentFetcher>,
reviewers_acl_checker: Arc<Option<AclChecker>>,
scuba: ScubaSampleBuilder,
@ -71,14 +60,10 @@ pub struct HookManager {
impl HookManager {
pub fn new(
fb: FacebookInit,
changeset_store: Box<dyn ChangesetStore>,
content_store: Arc<dyn FileContentStore>,
content_fetcher: Box<dyn FileContentFetcher>,
hook_manager_params: HookManagerParams,
mut scuba: ScubaSampleBuilder,
) -> HookManager {
let changeset_hooks = HashMap::new();
let file_hooks = HashMap::new();
let hooks = HashMap::new();
scuba
@ -107,13 +92,9 @@ impl HookManager {
};
HookManager {
changeset_hooks,
file_hooks,
hooks,
bookmark_hooks: HashMap::new(),
regex_hooks: Vec::new(),
changeset_store,
content_store,
content_fetcher,
reviewers_acl_checker: Arc::new(reviewers_acl_checker),
scuba,
@ -121,45 +102,23 @@ impl HookManager {
}
pub fn register_changeset_hook(
&mut self,
hook_name: &str,
hook: Arc<dyn Hook<HookChangeset>>,
config: HookConfig,
) {
self.changeset_hooks
.insert(hook_name.to_string(), (hook, config));
}
pub fn register_file_hook(
&mut self,
hook_name: &str,
hook: Arc<dyn Hook<HookFile>>,
config: HookConfig,
) {
self.file_hooks
.insert(hook_name.to_string(), (hook, config));
}
pub fn register_changeset_hook_new(
&mut self,
hook_name: &str,
hook: Box<dyn ChangesetHook>,
config: HookConfig,
) {
self.hooks.insert(
hook_name.to_string(),
BonsaiHook::from_changeset(hook, config),
);
self.hooks
.insert(hook_name.to_string(), Hook::from_changeset(hook, config));
}
pub fn register_file_hook_new(
pub fn register_file_hook(
&mut self,
hook_name: &str,
hook: Box<dyn FileHook>,
config: HookConfig,
) {
self.hooks
.insert(hook_name.to_string(), BonsaiHook::from_file(hook, config));
.insert(hook_name.to_string(), Hook::from_file(hook, config));
}
pub fn set_hooks_for_bookmark(&mut self, bookmark: BookmarkOrRegex, hooks: Vec<String>) {
@ -173,42 +132,10 @@ impl HookManager {
}
}
// Temporary as hooks will need it later
#[allow(dead_code)]
pub(crate) fn get_reviewers_acl_checker(&self) -> Arc<Option<AclChecker>> {
self.reviewers_acl_checker.clone()
}
fn hooks_for_bookmark_old(&self, bookmark: &BookmarkName) -> Vec<String> {
let mut hooks: Vec<_> = match self.bookmark_hooks.get(bookmark) {
Some(hooks) => hooks.clone().into_iter().collect(),
None => Vec::new(),
};
let bookmark_str = bookmark.to_string();
for (regex, r_hooks) in &self.regex_hooks {
if regex.is_match(&bookmark_str) {
hooks.extend(r_hooks.iter().cloned());
}
}
hooks
}
fn file_hooks_for_bookmark(&self, bookmark: &BookmarkName) -> Vec<String> {
self.hooks_for_bookmark_old(bookmark)
.into_iter()
.filter(|name| self.file_hooks.contains_key(name))
.collect()
}
fn changeset_hooks_for_bookmark(&self, bookmark: &BookmarkName) -> Vec<String> {
self.hooks_for_bookmark_old(bookmark)
.into_iter()
.filter(|name| self.changeset_hooks.contains_key(name))
.collect()
}
fn hooks_for_bookmark<'a>(
&'a self,
bookmark: &BookmarkName,
@ -225,318 +152,10 @@ impl HookManager {
}
}
cloned!(self.file_hooks, self.changeset_hooks);
hooks.into_iter().filter(move |hook| {
!(file_hooks.contains_key(*hook) || changeset_hooks.contains_key(*hook))
})
hooks.into_iter()
}
pub async fn run_hooks_for_bookmark(
&self,
ctx: &CoreContext,
changesets: impl IntoIterator<Item = HgChangesetId>,
bookmark: &BookmarkName,
maybe_pushvars: Option<&HashMap<String, Bytes>>,
) -> Result<Vec<HookOutcome>, Error> {
debug!(ctx.logger(), "Running hooks for bookmark {:?}", bookmark);
let cs_hooks = self.changeset_hooks_for_bookmark(bookmark);
let file_hooks = self.file_hooks_for_bookmark(bookmark);
let cs_futs = FuturesUnordered::new();
let file_futs = FuturesUnordered::new();
for cs_id in changesets {
cs_futs.push(self.run_changeset_hooks_for_changeset_id(
ctx,
cs_id.clone(),
&cs_hooks,
maybe_pushvars,
bookmark,
));
file_futs.push(self.run_file_hooks_for_changeset_id(
ctx,
cs_id,
&file_hooks,
maybe_pushvars,
bookmark,
));
}
let (cs_hook_results, file_hook_results): (Vec<_>, Vec<_>) =
try_join(cs_futs.try_collect(), file_futs.try_collect()).await?;
Ok(cs_hook_results
.into_iter()
.flat_map(|r| r.into_iter())
.chain(file_hook_results.into_iter().flat_map(|r| r.into_iter()))
.collect())
}
// Changeset hooks
async fn run_changeset_hooks_for_changeset_id(
&self,
ctx: &CoreContext,
changeset_id: HgChangesetId,
hooks: &Vec<String>,
maybe_pushvars: Option<&HashMap<String, Bytes>>,
bookmark: &BookmarkName,
) -> Result<Vec<HookOutcome>, Error> {
debug!(
ctx.logger(),
"Running changeset hooks for changeset id {:?}", changeset_id
);
let hooks: Vec<_> = hooks
.iter()
.map(|hook_name| {
let hook = self
.changeset_hooks
.get(hook_name)
.ok_or(ErrorKind::NoSuchHook(hook_name.to_string()))?;
Ok((hook_name.clone(), hook.clone()))
})
.collect::<Result<_, Error>>()?;
cloned!(mut self.scuba);
scuba.add("hash", changeset_id.to_hex().to_string());
let hcs = self.get_hook_changeset(&ctx, changeset_id).await?;
let hooks = HookManager::filter_bypassed_hooks(hooks, &hcs.comments, maybe_pushvars);
let res = HookManager::run_changeset_hooks_for_changeset(ctx, hcs, hooks, bookmark, scuba)
.await?;
Ok(res
.into_iter()
.map(|(hook_name, exec)| {
HookOutcome::ChangesetHook(
ChangesetHookExecutionID {
cs_id: changeset_id,
hook_name,
},
exec,
)
})
.collect())
}
async fn run_changeset_hooks_for_changeset<'book, 'ctx: 'book>(
ctx: &'ctx CoreContext,
changeset: HookChangeset,
hooks: Vec<(String, Arc<dyn Hook<HookChangeset>>, HookConfig)>,
bookmark: &'book BookmarkName,
scuba: ScubaSampleBuilder,
) -> Result<Vec<(String, HookExecution)>, Error> {
try_join_all(hooks.into_iter().map(|(hook_name, hook, config)| {
HookManager::run_hook(
ctx,
hook,
HookContext::new(hook_name, config, changeset.clone(), bookmark),
scuba.clone(),
)
}))
.await
}
// File hooks
async fn run_file_hooks_for_changeset_id(
&self,
ctx: &CoreContext,
changeset_id: HgChangesetId,
hooks: &Vec<String>,
maybe_pushvars: Option<&HashMap<String, Bytes>>,
bookmark: &BookmarkName,
) -> Result<Vec<HookOutcome>, Error> {
debug!(
ctx.logger(),
"Running file hooks for changeset id {:?}", changeset_id
);
let hooks: Vec<_> = hooks
.iter()
.map(|hook_name| {
let hook = self
.file_hooks
.get(hook_name)
.ok_or(ErrorKind::NoSuchHook(hook_name.to_string()))?;
Ok((hook_name.clone(), hook.clone()))
})
.collect::<Result<_, Error>>()?;
cloned!(mut self.scuba);
scuba.add("hash", changeset_id.to_hex().to_string());
let hcs = self.get_hook_changeset(ctx, changeset_id).await?;
let hooks = HookManager::filter_bypassed_hooks(hooks, &hcs.comments, maybe_pushvars);
HookManager::run_file_hooks_for_changeset(ctx, changeset_id, &hcs, hooks, bookmark, scuba)
.await
}
fn run_file_hooks_for_changeset<'cs, 'book: 'cs, 'ctx: 'cs>(
ctx: &'ctx CoreContext,
changeset_id: HgChangesetId,
changeset: &'cs HookChangeset,
hooks: Vec<(String, Arc<dyn Hook<HookFile>>, HookConfig)>,
bookmark: &'book BookmarkName,
scuba: ScubaSampleBuilder,
) -> impl Future<Output = Result<Vec<HookOutcome>, Error>> + 'cs {
let v: Vec<_> = changeset
.files
.iter()
// Do not run file hooks for deleted files
.filter_map(move |file| {
match file.ty {
ChangedFileType::Added | ChangedFileType::Modified => Some(
HookManager::run_file_hooks(
ctx,
changeset_id,
file.clone(),
hooks.clone(),
bookmark,
scuba.clone(),
)
),
ChangedFileType::Deleted => None,
}
})
.collect();
try_join_all(v).map_ok(|vv| vv.into_iter().flatten().collect())
}
async fn run_file_hooks<'book, 'ctx: 'book>(
ctx: &'ctx CoreContext,
cs_id: HgChangesetId,
file: HookFile,
hooks: Vec<(String, Arc<dyn Hook<HookFile>>, HookConfig)>,
bookmark: &'book BookmarkName,
scuba: ScubaSampleBuilder,
) -> Result<Vec<HookOutcome>, Error> {
let hook_futs = hooks.into_iter().map(move |(hook_name, hook, config)| {
let hook_context =
HookContext::new(hook_name.to_string(), config, file.clone(), bookmark);
cloned!(mut scuba);
scuba.add("hash", cs_id.to_hex().to_string());
HookManager::run_hook(ctx, hook, hook_context, scuba).map_ok({
cloned!(file, bookmark);
move |(hook_name, exec)| {
let path = MPath::new(&file.path).expect("Path did not roundtrip via String");
HookOutcome::FileHook(
FileHookExecutionID {
cs_id,
hook_name,
file,
bookmark,
path,
},
exec,
)
}
})
});
try_join_all(hook_futs).await
}
async fn run_hook<T: Clone>(
ctx: &CoreContext,
hook: Arc<dyn Hook<T>>,
hook_context: HookContext<T>,
mut scuba: ScubaSampleBuilder,
) -> Result<(String, HookExecution), Error> {
let hook_name = hook_context.hook_name.clone();
debug!(ctx.logger(), "Running hook {:?}", hook_context.hook_name);
// Try getting the source hostname, otherwise use the unix name.
let user_option = ctx
.source_hostname()
.as_ref()
.or(ctx.user_unix_name().as_ref())
.map(|s| s.as_str());
if let Some(user) = user_option {
scuba.add("user", user);
}
scuba.add("hook", hook_name.clone());
let (stats, result) = hook.run(ctx, hook_context).timed().await;
if let Err(e) = result.as_ref() {
scuba.add("stderr", e.to_string());
}
let elapsed = stats.completion_time.as_millis() as i64;
scuba
.add("elapsed", elapsed)
.add("total_time", elapsed)
.add("errorcode", result.is_err() as i32)
.add("failed_hooks", result.is_err() as i32)
.log();
let he = result.map_err(|e| e.context(format!("while executing hook {}", hook_name)))?;
Ok((hook_name, he))
}
async fn get_hook_changeset(
&self,
ctx: &CoreContext,
changeset_id: HgChangesetId,
) -> Result<HookChangeset, Error> {
let content_store = self.content_store.clone();
let hg_changeset = self
.changeset_store
.get_changeset_by_changesetid(ctx, changeset_id);
let changed_files = self.changeset_store.get_changed_files(ctx, changeset_id);
let reviewers_acl_checker = self.reviewers_acl_checker.clone();
let (changeset, changed_files) = try_join(hg_changeset, changed_files).await?;
let author = str::from_utf8(changeset.user())?.into();
let files = changed_files
.into_iter()
.map(|(path, ty, hash_and_type)| {
HookFile::new(
path,
content_store.clone(),
changeset_id.clone(),
ty,
hash_and_type,
)
})
.collect();
let comments = str::from_utf8(changeset.comments())?.into();
let parents = HookChangesetParents::from(changeset.parents());
Ok(HookChangeset::new(
author,
files,
comments,
parents,
changeset_id,
content_store,
reviewers_acl_checker,
))
}
fn filter_bypassed_hooks<T: Clone>(
hooks: Vec<(String, (T, HookConfig))>,
commit_msg: &String,
maybe_pushvars: Option<&HashMap<String, Bytes>>,
) -> Vec<(String, T, HookConfig)> {
hooks
.clone()
.into_iter()
.filter_map(|(hook_name, (hook, config))| {
if is_hook_bypassed(config.bypass.as_ref(), commit_msg, maybe_pushvars) {
None
} else {
Some((hook_name, hook, config))
}
})
.collect()
}
pub async fn run_hooks_for_bookmark_bonsai(
&self,
ctx: &CoreContext,
changesets: impl Iterator<Item = &BonsaiChangeset> + Clone + itertools::Itertools,
@ -609,176 +228,17 @@ fn is_hook_bypassed(
})
}
#[async_trait]
pub trait Hook<T>: Send + Sync
where
T: Clone,
{
async fn run<'a, 'b: 'a>(
&'a self,
ctx: &'b CoreContext,
hook_context: HookContext<T>,
) -> Result<HookExecution, Error>;
}
/// Represents a changeset - more user friendly than the blob changeset
/// as this uses String not Vec[u8]
#[derive(Clone)]
pub struct HookChangeset {
pub author: String,
pub files: Vec<HookFile>,
pub comments: String,
pub parents: HookChangesetParents,
content_store: Arc<dyn FileContentStore>,
changeset_id: HgChangesetId,
reviewers_acl_checker: Arc<Option<AclChecker>>,
}
impl fmt::Debug for HookChangeset {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(
f,
"HookChangeset changeset_id: {:?} files: {:?}, comments: {:?}",
self.changeset_id, self.files, self.comments
)
}
}
impl PartialEq for HookChangeset {
fn eq(&self, other: &HookChangeset) -> bool {
self.changeset_id == other.changeset_id
}
}
#[derive(Clone)]
pub struct HookFile {
pub path: String,
content_store: Arc<dyn FileContentStore>,
changeset_id: HgChangesetId,
ty: ChangedFileType,
hash_and_type: Option<(HgFileNodeId, FileType)>,
}
impl fmt::Debug for HookFile {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(
f,
"HookFile path: {}, changeset_id: {}",
self.path, self.changeset_id
)
}
}
impl PartialEq for HookFile {
fn eq(&self, other: &HookFile) -> bool {
self.path == other.path && self.changeset_id == other.changeset_id
}
}
impl Eq for HookFile {}
impl Hash for HookFile {
fn hash<H: Hasher>(&self, state: &mut H) {
self.path.hash(state);
self.changeset_id.hash(state);
}
}
impl HookFile {
pub fn new(
path: String,
content_store: Arc<dyn FileContentStore>,
changeset_id: HgChangesetId,
ty: ChangedFileType,
hash_and_type: Option<(HgFileNodeId, FileType)>,
) -> HookFile {
HookFile {
path,
content_store,
changeset_id,
ty,
hash_and_type,
}
}
pub async fn len(&self, ctx: &CoreContext) -> Result<u64, Error> {
let path = MPath::new(self.path.as_bytes())?;
match self.hash_and_type {
Some((entry_id, _)) => self.content_store.get_file_size(ctx, entry_id).await,
None => Err(ErrorKind::MissingFile(self.changeset_id, path.into()).into()),
}
}
pub async fn file_text(&self, ctx: &CoreContext) -> Result<Option<FileBytes>, Error> {
let path = MPath::new(self.path.as_bytes())?;
match self.hash_and_type {
Some((id, _)) => self.content_store.get_file_text(ctx, id).await,
None => Err(ErrorKind::MissingFile(self.changeset_id, path.into()).into()),
}
}
pub fn file_type(&self, _ctx: &CoreContext) -> Result<FileType, Error> {
let path = MPath::new(self.path.as_bytes())?;
self.hash_and_type
.ok_or(ErrorKind::MissingFile(self.changeset_id, path.into()).into())
.map(|(_, file_type)| file_type)
}
pub fn changed_file_type(&self) -> ChangedFileType {
self.ty.clone()
}
}
impl HookChangeset {
pub fn new(
author: String,
files: Vec<HookFile>,
comments: String,
parents: HookChangesetParents,
changeset_id: HgChangesetId,
content_store: Arc<dyn FileContentStore>,
reviewers_acl_checker: Arc<Option<AclChecker>>,
) -> HookChangeset {
HookChangeset {
author,
files,
comments,
parents,
content_store,
changeset_id,
reviewers_acl_checker,
}
}
pub async fn file_text(
&self,
ctx: &CoreContext,
path: String,
) -> Result<Option<FileBytes>, Error> {
let path = MPath::new(path.as_bytes())?;
let id = self
.content_store
.resolve_path(ctx, self.changeset_id, path)
.await?;
match id {
Some(id) => self.content_store.get_file_text(ctx, id).await,
None => Ok(None),
}
}
}
enum BonsaiHook {
enum Hook {
Changeset(Box<dyn ChangesetHook>, HookConfig),
File(Box<dyn FileHook>, HookConfig),
}
enum BonsaiHookInstance<'a> {
enum HookInstance<'a> {
Changeset(&'a dyn ChangesetHook),
File(&'a dyn FileHook, &'a MPath, Option<&'a FileChange>),
}
impl<'a> BonsaiHookInstance<'a> {
impl<'a> HookInstance<'a> {
async fn run(
self,
ctx: &CoreContext,
@ -792,8 +252,8 @@ impl<'a> BonsaiHookInstance<'a> {
Self::Changeset(hook) => {
hook.run(ctx, bookmark, cs, content_fetcher)
.map_ok(|exec| {
HookOutcome::BonsaiChangesetHook(
BonsaiChangesetHookExecutionID {
HookOutcome::ChangesetHook(
ChangesetHookExecutionID {
cs_id: cs.get_changeset_id(),
hook_name: hook_name.to_string(),
},
@ -806,8 +266,8 @@ impl<'a> BonsaiHookInstance<'a> {
Self::File(hook, path, change) => {
hook.run(ctx, content_fetcher, change, path)
.map_ok(|exec| {
HookOutcome::BonsaiFileHook(
BonsaiFileHookExecutionID {
HookOutcome::FileHook(
FileHookExecutionID {
cs_id: cs.get_changeset_id(),
path: path.clone(),
hook_name: hook_name.to_string(),
@ -836,7 +296,7 @@ impl<'a> BonsaiHookInstance<'a> {
}
}
impl BonsaiHook {
impl Hook {
pub fn from_changeset(hook: Box<dyn ChangesetHook>, config: HookConfig) -> Self {
Self::Changeset(hook, config)
}
@ -863,7 +323,7 @@ impl BonsaiHook {
) -> impl Iterator<Item = impl Future<Output = Result<HookOutcome, Error>> + 'cs> + 'cs {
let mut futures = Vec::new();
match self {
Self::Changeset(hook, _) => futures.push(BonsaiHookInstance::Changeset(&**hook).run(
Self::Changeset(hook, _) => futures.push(HookInstance::Changeset(&**hook).run(
ctx,
bookmark,
content_fetcher,
@ -872,7 +332,7 @@ impl BonsaiHook {
cs,
)),
Self::File(hook, _) => futures.extend(cs.file_changes().map(move |(path, change)| {
BonsaiHookInstance::File(&**hook, path, change).run(
HookInstance::File(&**hook, path, change).run(
ctx,
bookmark,
content_fetcher,
@ -910,8 +370,6 @@ pub trait FileHook: Send + Sync {
#[derive(Clone, Debug, PartialEq)]
pub enum HookOutcome {
BonsaiChangesetHook(BonsaiChangesetHookExecutionID, HookExecution),
BonsaiFileHook(BonsaiFileHookExecutionID, HookExecution),
ChangesetHook(ChangesetHookExecutionID, HookExecution),
FileHook(FileHookExecutionID, HookExecution),
}
@ -919,21 +377,13 @@ pub enum HookOutcome {
impl fmt::Display for HookOutcome {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
HookOutcome::BonsaiChangesetHook(id, exec) => {
write!(f, "{} for {}: {}", id.hook_name, id.cs_id, exec)
}
HookOutcome::BonsaiFileHook(id, exec) => write!(
f,
"{} for {} file {}: {}",
id.hook_name, id.cs_id, id.path, exec
),
HookOutcome::ChangesetHook(id, exec) => {
write!(f, "{} for {}: {}", id.hook_name, id.cs_id, exec)
}
HookOutcome::FileHook(id, exec) => write!(
f,
"{} for {} file {}: {}",
id.hook_name, id.cs_id, id.file.path, exec
id.hook_name, id.cs_id, id.path, exec
),
}
}
@ -955,8 +405,6 @@ impl HookOutcome {
match self {
HookOutcome::ChangesetHook(id, _) => &id.hook_name,
HookOutcome::FileHook(id, _) => &id.hook_name,
HookOutcome::BonsaiChangesetHook(id, _) => &id.hook_name,
HookOutcome::BonsaiFileHook(id, _) => &id.hook_name,
}
}
@ -964,24 +412,13 @@ impl HookOutcome {
match self {
HookOutcome::ChangesetHook(..) => None,
HookOutcome::FileHook(id, _) => Some(&id.path),
HookOutcome::BonsaiChangesetHook(..) => None,
HookOutcome::BonsaiFileHook(id, _) => Some(&id.path),
}
}
pub fn get_cs_id(&self) -> HgChangesetId {
match self {
HookOutcome::ChangesetHook(id, _) => id.cs_id,
HookOutcome::FileHook(id, _) => id.cs_id,
_ => panic!("Can't get Mercurial ID from Bonsai hook run"),
}
}
pub fn get_changeset_id(&self) -> ChangesetId {
match self {
HookOutcome::BonsaiChangesetHook(id, _) => id.cs_id,
HookOutcome::BonsaiFileHook(id, _) => id.cs_id,
_ => panic!("Can't get bonsai ID from Mercurial hook run"),
HookOutcome::ChangesetHook(id, _) => id.cs_id,
HookOutcome::FileHook(id, _) => id.cs_id,
}
}
@ -989,8 +426,6 @@ impl HookOutcome {
match self {
HookOutcome::ChangesetHook(_, exec) => exec,
HookOutcome::FileHook(_, exec) => exec,
HookOutcome::BonsaiChangesetHook(_, exec) => exec,
HookOutcome::BonsaiFileHook(_, exec) => exec,
}
}
}
@ -1006,8 +441,6 @@ impl From<HookOutcome> for HookExecution {
match outcome {
HookOutcome::ChangesetHook(_, r) => r,
HookOutcome::FileHook(_, r) => r,
HookOutcome::BonsaiChangesetHook(_, r) => r,
HookOutcome::BonsaiFileHook(_, r) => r,
}
}
}
@ -1052,79 +485,15 @@ impl HookRejectionInfo {
}
}
#[derive(Clone, Debug, PartialEq, Hash, Eq)]
pub struct BonsaiFileHookExecutionID {
pub cs_id: ChangesetId,
pub hook_name: String,
pub path: MPath,
}
#[derive(Clone, Debug, PartialEq, Hash, Eq)]
pub struct BonsaiChangesetHookExecutionID {
pub cs_id: ChangesetId,
pub hook_name: String,
}
#[derive(Clone, Debug, PartialEq, Hash, Eq)]
pub struct FileHookExecutionID {
pub cs_id: HgChangesetId,
pub cs_id: ChangesetId,
pub hook_name: String,
pub file: HookFile,
pub bookmark: BookmarkName,
pub path: MPath,
}
#[derive(Clone, Debug, PartialEq, Hash, Eq)]
pub struct ChangesetHookExecutionID {
pub cs_id: HgChangesetId,
pub cs_id: ChangesetId,
pub hook_name: String,
}
#[derive(Clone, Debug, PartialEq)]
pub enum HookChangesetParents {
None,
One(String),
Two(String, String),
}
impl From<HgParents> for HookChangesetParents {
fn from(parents: HgParents) -> Self {
match parents {
HgParents::None => HookChangesetParents::None,
HgParents::One(p1_hash) => HookChangesetParents::One(p1_hash.to_string()),
HgParents::Two(p1_hash, p2_hash) => {
HookChangesetParents::Two(p1_hash.to_string(), p2_hash.to_string())
}
}
}
}
#[derive(Clone, Debug, PartialEq)]
pub struct HookContext<T>
where
T: Clone,
{
pub hook_name: String,
pub config: HookConfig,
pub data: T,
pub bookmark: BookmarkName,
}
impl<T> HookContext<T>
where
T: Clone,
{
fn new(
hook_name: String,
config: HookConfig,
data: T,
bookmark: &BookmarkName,
) -> HookContext<T> {
HookContext {
hook_name,
config,
data,
bookmark: bookmark.clone(),
}
}
}

View File

@ -12,9 +12,7 @@ use fbinit::FacebookInit;
use fixtures::many_files_dirs;
use futures::compat::Future01CompatExt;
use hooks::HookManager;
use hooks_content_stores::{
InMemoryChangesetStore, InMemoryFileContentFetcher, InMemoryFileContentStore,
};
use hooks_content_stores::InMemoryFileContentFetcher;
use manifest::{Entry, ManifestOps};
use maplit::hashset;
use mercurial_types::HgFileNodeId;
@ -140,7 +138,6 @@ fn test_pushredirect_config() {
maybe_pushvars: None,
commonheads: CommonHeads { heads: Vec::new() },
uploaded_bonsais: HashSet::new(),
uploaded_hg_changeset_ids: HashSet::new(),
});
let bookmark_only_action =
PostResolveAction::BookmarkOnlyPushRebase(PostResolveBookmarkOnlyPushRebase {
@ -481,8 +478,6 @@ async fn run_and_check_if_lfs(
vec![],
Arc::new(HookManager::new(
ctx.fb,
Box::new(InMemoryChangesetStore::new()),
Arc::new(InMemoryFileContentStore::new()),
Box::new(InMemoryFileContentFetcher::new()),
HookManagerParams {
disable_acl_checker: true,

View File

@ -12,16 +12,10 @@ use blobrepo::BlobRepo;
use bookmarks::BookmarkName;
use bytes::Bytes;
use context::CoreContext;
use futures::{
compat::Future01CompatExt,
future::try_join,
stream::{self, TryStreamExt},
FutureExt, TryFutureExt,
};
use futures::{compat::Future01CompatExt, stream::TryStreamExt, FutureExt, TryFutureExt};
use futures_ext::{BoxFuture, FutureExt as _};
use futures_old::future::ok;
use hooks::{HookExecution, HookManager, HookOutcome};
use mercurial_types::HgChangesetId;
use mononoke_types::BonsaiChangeset;
use std::{collections::HashMap, sync::Arc};
@ -50,22 +44,18 @@ fn run_pushrebase_hooks(
) -> BoxFuture<(), BundleResolverError> {
// The changesets that will be pushed
let changesets = action.uploaded_bonsais.clone();
let hg = action.uploaded_hg_changeset_ids.clone();
let maybe_pushvars = action.maybe_pushvars.clone();
// FIXME: stop cloning when this fn is async
let bookmark = action.bookmark_spec.get_bookmark_name().clone();
async move {
let ((), ()) = try_join(
run_hooks_on_changesets(
&ctx,
&repo,
&*hook_manager,
changesets.iter(),
bookmark.clone(),
maybe_pushvars.clone(),
),
run_hooks_on_changesets_hg(&ctx, &*hook_manager, hg, bookmark, maybe_pushvars),
run_hooks_on_changesets(
&ctx,
&repo,
&*hook_manager,
changesets.iter(),
bookmark,
maybe_pushvars,
)
.await?;
Ok(())
@ -84,7 +74,7 @@ async fn run_hooks_on_changesets(
maybe_pushvars: Option<HashMap<String, Bytes>>,
) -> Result<(), BundleResolverError> {
let hook_outcomes = hook_manager
.run_hooks_for_bookmark_bonsai(&ctx, changesets, &bookmark, maybe_pushvars.as_ref())
.run_hooks_for_bookmark(&ctx, changesets, &bookmark, maybe_pushvars.as_ref())
.await?;
if hook_outcomes.iter().all(HookOutcome::is_accept) {
Ok(())
@ -120,39 +110,3 @@ async fn run_hooks_on_changesets(
Err(BundleResolverError::HookError(hook_failures))
}
}
async fn run_hooks_on_changesets_hg(
ctx: &CoreContext,
hook_manager: &HookManager,
changesets: impl IntoIterator<Item = HgChangesetId>,
bookmark: BookmarkName,
maybe_pushvars: Option<HashMap<String, Bytes>>,
) -> Result<(), BundleResolverError> {
let hook_outcomes = hook_manager
.run_hooks_for_bookmark(&ctx, changesets, &bookmark, maybe_pushvars.as_ref())
.await?;
if hook_outcomes.iter().all(HookOutcome::is_accept) {
Ok(())
} else {
let hook_failures = stream::iter(
hook_outcomes
.into_iter()
.map(|o| -> Result<_, BundleResolverError> { Ok(o) }),
)
.try_filter_map(|outcome| async move {
let hook_name = outcome.get_hook_name().to_string();
let cs_id = outcome.get_cs_id();
match outcome.into() {
HookExecution::Accepted => Ok(None),
HookExecution::Rejected(info) => Ok(Some(HookFailure {
hook_name,
cs_id,
info,
})),
}
})
.try_collect()
.await?;
Err(BundleResolverError::HookError(hook_failures))
}
}

View File

@ -27,7 +27,7 @@ pub use resolver::{
resolve, BundleResolverError, Changesets, CommonHeads, InfiniteBookmarkPush,
NonFastForwardPolicy, PlainBookmarkPush, PostResolveAction, PostResolveBookmarkOnlyPushRebase,
PostResolveInfinitePush, PostResolvePush, PostResolvePushRebase, PushrebaseBookmarkSpec,
UploadedBonsais, UploadedHgChangesetIds,
UploadedBonsais,
};
pub use response::{
UnbundleBookmarkOnlyPushRebaseResponse, UnbundleInfinitePushResponse,

View File

@ -246,7 +246,6 @@ fn run_pushrebase(
maybe_pushvars: _,
commonheads,
uploaded_bonsais,
uploaded_hg_changeset_ids: _,
} = action;
// FIXME: stop cloning when this fn is async

View File

@ -208,7 +208,6 @@ impl PushRedirector {
maybe_pushvars,
commonheads,
uploaded_bonsais,
uploaded_hg_changeset_ids,
} = orig;
let uploaded_bonsais = self
@ -252,7 +251,6 @@ impl PushRedirector {
maybe_pushvars,
commonheads,
uploaded_bonsais: uploaded_bonsais.values().cloned().map(|bcs| bcs).collect(),
uploaded_hg_changeset_ids,
})
}

View File

@ -74,7 +74,6 @@ type Filelogs = HashMap<HgNodeKey, Shared<OldBoxFuture<(HgBlobEntry, RepoPath),
type ContentBlobs = HashMap<HgNodeKey, ContentBlobInfo>;
type Manifests = HashMap<HgNodeKey, <TreemanifestEntry as UploadableHgBlob>::Value>;
pub type UploadedBonsais = HashSet<BonsaiChangeset>;
pub type UploadedHgChangesetIds = HashSet<HgChangesetId>;
// This is to match the core hg behavior from https://fburl.com/jf3iyl7y
// Mercurial substitutes the `onto` parameter with this bookmark name when
@ -196,7 +195,6 @@ pub struct PostResolvePushRebase {
pub maybe_pushvars: Option<HashMap<String, Bytes>>,
pub commonheads: CommonHeads,
pub uploaded_bonsais: UploadedBonsais,
pub uploaded_hg_changeset_ids: UploadedHgChangesetIds,
}
/// Data, needed to perform post-resolve `BookmarkOnlyPushRebase` action
@ -367,8 +365,7 @@ async fn resolve_push<'r>(
let (changegroup_id, uploaded_bonsais) = if let Some((cg_push, manifests)) = cg_and_manifests {
let changegroup_id = Some(cg_push.part_id);
let (uploaded_bonsais, _uploaded_hg_changesets) =
resolver.upload_changesets(cg_push, manifests).await?;
let uploaded_bonsais = resolver.upload_changesets(cg_push, manifests).await?;
// Note: we do not care about `_uploaded_hg_changesets`, as we currently
// do not run hooks on pure pushes. This probably has to be changed later.
@ -476,8 +473,7 @@ async fn resolve_pushrebase<'r>(
}
}
let (uploaded_bonsais, uploaded_hg_changeset_ids) =
resolver.upload_changesets(cg_push, manifests).await?;
let uploaded_bonsais = resolver.upload_changesets(cg_push, manifests).await?;
let (pushkeys, bundle2) = resolver
.resolve_multiple_parts(bundle2, Bundle2Resolver::maybe_resolve_pushkey)
@ -536,7 +532,6 @@ async fn resolve_pushrebase<'r>(
maybe_pushvars,
commonheads,
uploaded_bonsais,
uploaded_hg_changeset_ids,
}))
}
@ -983,7 +978,7 @@ impl<'r> Bundle2Resolver<'r> {
&self,
cg_push: ChangegroupPush,
manifests: Manifests,
) -> Result<(UploadedBonsais, UploadedHgChangesetIds), Error> {
) -> Result<UploadedBonsais, Error> {
let changesets = toposort_changesets(cg_push.changesets)?;
let filelogs = cg_push.filelogs;
let content_blobs = cg_push.content_blobs;
@ -1021,7 +1016,6 @@ impl<'r> Bundle2Resolver<'r> {
let chunk_size = 100;
let mut bonsais = UploadedBonsais::new();
let mut hg_css = UploadedHgChangesetIds::new();
for chunk in changesets.chunks(chunk_size) {
let mut uploaded_changesets: HashMap<HgChangesetId, ChangesetHandle> = HashMap::new();
for (node, revlog_cs) in chunk {
@ -1041,8 +1035,8 @@ impl<'r> Bundle2Resolver<'r> {
.with_context(err_context)?;
}
let uploaded: Vec<(BonsaiChangeset, HgChangesetId)> = stream::iter(uploaded_changesets)
.map(move |(hg_cs_id, handle): (HgChangesetId, _)| async move {
let uploaded: Vec<BonsaiChangeset> = stream::iter(uploaded_changesets)
.map(move |(_, handle)| async move {
let shared_item_bcs_and_something = handle
.get_completed_changeset()
.map_err(Error::from)
@ -1050,19 +1044,17 @@ impl<'r> Bundle2Resolver<'r> {
.await?;
let bcs = shared_item_bcs_and_something.0.clone();
Result::<_, Error>::Ok((bcs, hg_cs_id))
Result::<_, Error>::Ok(bcs)
})
.buffered(chunk_size)
.try_collect()
.await
.with_context(err_context)?;
let (more_bonsais, more_hg_css): (Vec<_>, Vec<_>) = uploaded.into_iter().unzip();
bonsais.extend(more_bonsais.into_iter());
hg_css.extend(more_hg_css.into_iter());
bonsais.extend(uploaded.into_iter());
}
Ok((bonsais, hg_css))
Ok(bonsais)
}
/// Ensures that the next item in stream is None

View File

@ -27,9 +27,7 @@ use context::CoreContext;
use cross_repo_sync::create_commit_syncers;
use fbinit::FacebookInit;
use hooks::{hook_loader::load_hooks, HookManager};
use hooks_content_stores::{
blobrepo_text_only_fetcher, blobrepo_text_only_store, BlobRepoChangesetStore,
};
use hooks_content_stores::blobrepo_text_only_fetcher;
use metaconfig_types::{
CommitSyncConfig, MetadataDatabaseConfig, RepoConfig, WireprotoLoggingConfig,
};
@ -288,8 +286,6 @@ pub fn repo_handlers(
info!(logger, "Creating HookManager");
let mut hook_manager = HookManager::new(
ctx.fb,
Box::new(BlobRepoChangesetStore::new(blobrepo.clone())),
blobrepo_text_only_store(blobrepo.clone(), hook_max_file_size),
blobrepo_text_only_fetcher(blobrepo.clone(), hook_max_file_size),
hook_manager_params.unwrap_or_default(),
hooks_scuba,

View File

@ -29,9 +29,7 @@ use futures::{
};
use futures_old::stream::Stream as OldStream;
use futures_stats::{FutureStats, TimedFutureExt};
use hooks_content_stores::{
blobrepo_text_only_fetcher, blobrepo_text_only_store, BlobRepoChangesetStore,
};
use hooks_content_stores::blobrepo_text_only_fetcher;
use mercurial_bundles::bundle2::{Bundle2Stream, StreamEvent};
use metaconfig_types::{RepoConfig, RepoReadOnly};
use mononoke_types::{BonsaiChangeset, ChangesetId, Timestamp};
@ -323,7 +321,6 @@ async fn maybe_unbundle(
maybe_pushvars: _,
commonheads: _,
uploaded_bonsais: changesets,
uploaded_hg_changeset_ids: _,
} = action;
let onto_params = match bookmark_spec {
@ -420,8 +417,6 @@ async fn do_main(
info!(logger, "Creating HookManager");
let mut hook_manager = HookManager::new(
ctx.fb,
Box::new(BlobRepoChangesetStore::new(repo.clone())),
blobrepo_text_only_store(repo.clone(), repo_config.hook_max_file_size),
blobrepo_text_only_fetcher(repo.clone(), repo_config.hook_max_file_size),
repo_config.hook_manager_params.clone().unwrap_or_default(),
ScubaSampleBuilder::with_discard(),