bookmarks_movement: prepare for running hooks on additional changesets

Summary:
When bookmarks are moved or created, work out what additional changesets
should have the hooks run on them.  This may apply to plain pushes,
force pushrebases, or bookmark only pushrebases.

At first, this will run in logging-only mode where we will count how many
changesets would have hooks run on them (up to a tunable limit).  We can
enable running of hooks with a tunable killswitch later on.

Reviewed By: StanislavGlebik

Differential Revision: D23194240

fbshipit-source-id: 8031fdc1634168308c7fe2ad3c22ae4389a04711
This commit is contained in:
Mark Thomas 2020-08-25 09:10:07 -07:00 committed by Facebook GitHub Bot
parent a2bbb7e259
commit 61d45865de
11 changed files with 661 additions and 36 deletions

View File

@ -20,6 +20,7 @@ metaconfig_types = { path = "../../metaconfig/types" }
mononoke_types = { path = "../../mononoke_types" }
pushrebase = { path = "../../pushrebase" }
reachabilityindex = { path = "../../reachabilityindex" }
revset = { path = "../../revset" }
scuba_ext = { path = "../../common/scuba_ext" }
tunables = { path = "../../tunables" }
futures_stats = { git = "https://github.com/facebookexperimental/rust-shed.git", branch = "master" }

View File

@ -6,7 +6,9 @@
*/
use std::collections::HashMap;
use std::sync::Arc;
use anyhow::Context;
use blobrepo::BlobRepo;
use bookmarks::{BookmarkUpdateReason, BundleReplay};
use bookmarks_types::BookmarkName;
@ -15,9 +17,10 @@ use context::CoreContext;
use hooks::HookManager;
use metaconfig_types::{BookmarkAttrs, InfinitepushParams, PushrebaseParams};
use mononoke_types::{BonsaiChangeset, ChangesetId};
use reachabilityindex::LeastCommonAncestorsHint;
use tunables::tunables;
use crate::hook_running::run_hooks;
use crate::hook_running::{load_additional_bonsais, run_hooks};
use crate::{BookmarkKindRestrictions, BookmarkMoveAuthorization, BookmarkMovementError};
pub struct CreateBookmarkOp<'op> {
@ -87,7 +90,10 @@ impl<'op> CreateBookmarkOp<'op> {
async fn run_hooks(
&self,
ctx: &CoreContext,
repo: &BlobRepo,
hook_manager: &HookManager,
lca_hint: &Arc<dyn LeastCommonAncestorsHint>,
bookmark_attrs: &BookmarkAttrs,
) -> Result<(), BookmarkMovementError> {
if self.reason == BookmarkUpdateReason::Push && tunables().get_disable_hooks_on_plain_push()
{
@ -96,11 +102,26 @@ impl<'op> CreateBookmarkOp<'op> {
}
if hook_manager.hooks_exist_for_bookmark(self.bookmark) {
let additional_changesets = load_additional_bonsais(
ctx,
repo,
lca_hint,
bookmark_attrs,
self.bookmark,
self.target,
None,
&self.new_changesets,
)
.await
.context("Failed to load additional changesets")?;
run_hooks(
ctx,
hook_manager,
self.bookmark,
self.new_changesets.values(),
self.new_changesets
.values()
.chain(additional_changesets.iter()),
self.pushvars,
)
.await?;
@ -113,6 +134,7 @@ impl<'op> CreateBookmarkOp<'op> {
self,
ctx: &'op CoreContext,
repo: &'op BlobRepo,
lca_hint: &'op Arc<dyn LeastCommonAncestorsHint>,
infinitepush_params: &'op InfinitepushParams,
pushrebase_params: &'op PushrebaseParams,
bookmark_attrs: &'op BookmarkAttrs,
@ -126,7 +148,8 @@ impl<'op> CreateBookmarkOp<'op> {
.check_kind(infinitepush_params, self.bookmark)?;
if !is_scratch {
self.run_hooks(ctx, hook_manager).await?;
self.run_hooks(ctx, repo, hook_manager, lca_hint, bookmark_attrs)
.await?;
}
let mut txn = repo.update_bookmark_transaction(ctx.clone());

View File

@ -5,16 +5,26 @@
* GNU General Public License version 2.
*/
use std::collections::HashMap;
use std::collections::{HashMap, HashSet};
use std::sync::Arc;
use anyhow::Context;
use anyhow::{anyhow, Context, Result};
use blobrepo::BlobRepo;
use blobstore::Loadable;
use bookmarks_types::BookmarkName;
use bytes::Bytes;
use context::CoreContext;
use futures::compat::Stream01CompatExt;
use futures::future;
use futures::stream::{self, StreamExt, TryStreamExt};
use futures_stats::TimedFutureExt;
use hooks::{HookManager, HookOutcome};
use mononoke_types::BonsaiChangeset;
use metaconfig_types::BookmarkAttrs;
use mononoke_types::{BonsaiChangeset, ChangesetId};
use reachabilityindex::LeastCommonAncestorsHint;
use revset::DifferenceOfUnionsOfAncestorsNodeStream;
use scuba_ext::ScubaSampleBuilderExt;
use tunables::tunables;
use crate::BookmarkMovementError;
@ -48,3 +58,102 @@ pub async fn run_hooks(
Err(BookmarkMovementError::HookFailure(rejections))
}
}
/// Load bonsais not already in `new_changesets` that are ancestors of `head`
/// but not ancestors of `base` or any of the `hooks_skip_ancestors_of`
/// bookmarks for the named bookmark.
///
/// These are the additional bonsais that we need to run hooks on for bookmark
/// moves.
pub async fn load_additional_bonsais(
ctx: &CoreContext,
repo: &BlobRepo,
lca_hint: &Arc<dyn LeastCommonAncestorsHint>,
bookmark_attrs: &BookmarkAttrs,
bookmark: &BookmarkName,
head: ChangesetId,
base: Option<ChangesetId>,
new_changesets: &HashMap<ChangesetId, BonsaiChangeset>,
) -> Result<HashSet<BonsaiChangeset>> {
let mut exclude_bookmarks: HashSet<_> = bookmark_attrs
.select(bookmark)
.map(|params| params.hooks_skip_ancestors_of.iter())
.flatten()
.cloned()
.collect();
exclude_bookmarks.remove(bookmark);
let mut excludes: HashSet<_> = stream::iter(exclude_bookmarks)
.map(|bookmark| repo.bookmarks().get(ctx.clone(), &bookmark))
.buffered(100)
.try_filter_map(|maybe_cs_id| async move { Ok(maybe_cs_id) })
.try_collect()
.await?;
excludes.extend(base);
let range = DifferenceOfUnionsOfAncestorsNodeStream::new_with_excludes(
ctx.clone(),
&repo.get_changeset_fetcher(),
lca_hint.clone(),
vec![head],
excludes.into_iter().collect(),
)
.compat()
.try_filter(|bcs_id| {
let exists = new_changesets.contains_key(bcs_id);
future::ready(!exists)
});
let limit = match tunables().get_hooks_additional_changesets_limit() {
limit if limit > 0 => limit as usize,
_ => std::usize::MAX,
};
if tunables().get_run_hooks_on_additional_changesets() {
let bonsais = range
.and_then({
let mut count = 0;
move |bcs_id| {
count += 1;
if count > limit {
future::ready(Err(anyhow!(
"hooks additional changesets limit reached at {}",
bcs_id
)))
} else {
future::ready(Ok(bcs_id))
}
}
})
.map(|res| async move {
match res {
Ok(bcs_id) => Ok(bcs_id.load(ctx.clone(), repo.blobstore()).await?),
Err(e) => Err(e),
}
})
.buffered(100)
.try_collect::<HashSet<_>>()
.await?;
ctx.scuba()
.clone()
.add("hook_running_additional_changesets", bonsais.len())
.log_with_msg("Running hooks for additional changesets", None);
Ok(bonsais)
} else {
// Logging-only mode. Work out how many changesets we would have run
// on, and whether the limit would have been reached.
let count = range
.take(limit)
.try_fold(0usize, |acc, _| async move { Ok(acc + 1) })
.await?;
let mut scuba = ctx.scuba().clone();
scuba.add("hook_running_additional_changesets", count);
if count >= limit {
scuba.add("hook_running_additional_changesets_limit_reached", true);
}
scuba.log_with_msg("Hook running skipping additional changesets", None);
Ok(HashSet::new())
}
}

View File

@ -6,7 +6,9 @@
*/
use std::collections::HashMap;
use std::sync::Arc;
use anyhow::{Context, Result};
use blobrepo::BlobRepo;
use bookmarks::{BookmarkUpdateReason, BundleReplay};
use bookmarks_types::BookmarkName;
@ -18,7 +20,7 @@ use mononoke_types::{BonsaiChangeset, ChangesetId};
use reachabilityindex::LeastCommonAncestorsHint;
use tunables::tunables;
use crate::hook_running::run_hooks;
use crate::hook_running::{load_additional_bonsais, run_hooks};
use crate::{BookmarkKindRestrictions, BookmarkMoveAuthorization, BookmarkMovementError};
/// The old and new changeset during a bookmark update.
@ -140,7 +142,10 @@ impl<'op> UpdateBookmarkOp<'op> {
async fn run_hooks(
&self,
ctx: &CoreContext,
repo: &BlobRepo,
hook_manager: &HookManager,
lca_hint: &Arc<dyn LeastCommonAncestorsHint>,
bookmark_attrs: &BookmarkAttrs,
) -> Result<(), BookmarkMovementError> {
if self.reason == BookmarkUpdateReason::Push && tunables().get_disable_hooks_on_plain_push()
{
@ -149,11 +154,26 @@ impl<'op> UpdateBookmarkOp<'op> {
}
if hook_manager.hooks_exist_for_bookmark(self.bookmark) {
let additional_changesets = load_additional_bonsais(
ctx,
repo,
lca_hint,
bookmark_attrs,
self.bookmark,
self.targets.new,
Some(self.targets.old),
&self.new_changesets,
)
.await
.context("Failed to load additional changesets")?;
run_hooks(
ctx,
hook_manager,
self.bookmark,
self.new_changesets.values(),
self.new_changesets
.values()
.chain(additional_changesets.iter()),
self.pushvars,
)
.await?;
@ -166,7 +186,7 @@ impl<'op> UpdateBookmarkOp<'op> {
self,
ctx: &'op CoreContext,
repo: &'op BlobRepo,
lca_hint: &'op dyn LeastCommonAncestorsHint,
lca_hint: &'op Arc<dyn LeastCommonAncestorsHint>,
infinitepush_params: &'op InfinitepushParams,
pushrebase_params: &'op PushrebaseParams,
bookmark_attrs: &'op BookmarkAttrs,
@ -183,7 +203,7 @@ impl<'op> UpdateBookmarkOp<'op> {
.check_update_permitted(
ctx,
repo,
lca_hint,
lca_hint.as_ref(),
bookmark_attrs,
&self.bookmark,
&self.targets,
@ -191,7 +211,8 @@ impl<'op> UpdateBookmarkOp<'op> {
.await?;
if !is_scratch {
self.run_hooks(ctx, hook_manager).await?;
self.run_hooks(ctx, repo, hook_manager, lca_hint, bookmark_attrs)
.await?;
}
let mut txn = repo.update_bookmark_transaction(ctx.clone());

View File

@ -5,9 +5,12 @@
* GNU General Public License version 2.
*/
use std::sync::Arc;
use bookmarks::BookmarkName;
use metaconfig_types::BookmarkAttrs;
use mononoke_types::ChangesetId;
use reachabilityindex::LeastCommonAncestorsHint;
use unbundle::{
run_post_resolve_action, InfiniteBookmarkPush, PlainBookmarkPush, PostResolveAction,
@ -99,11 +102,13 @@ impl RepoWriteContext {
})
};
let lca_hint: Arc<dyn LeastCommonAncestorsHint> = self.skiplist_index().clone();
let _response = run_post_resolve_action(
self.ctx(),
self.blob_repo(),
&bookmark_attrs,
self.skiplist_index().as_ref(),
&lca_hint,
&self.config().infinitepush,
&self.config().pushrebase,
&self.config().push,

View File

@ -1603,7 +1603,7 @@ impl HgCommands for RepoClient {
&ctx,
&blobrepo,
&bookmark_attrs,
&*lca_hint,
&lca_hint,
&infinitepush_params,
&pushrebase_params,
&push_params,

View File

@ -37,6 +37,7 @@ use scuba_ext::ScubaSampleBuilderExt;
use slog::{debug, warn};
use stats::prelude::*;
use std::collections::{HashMap, HashSet};
use std::sync::Arc;
use tunables::tunables;
use crate::hook_running::{map_hook_rejections, HookRejectionRemapper};
@ -58,7 +59,7 @@ pub async fn run_post_resolve_action(
ctx: &CoreContext,
repo: &BlobRepo,
bookmark_attrs: &BookmarkAttrs,
lca_hint: &dyn LeastCommonAncestorsHint,
lca_hint: &Arc<dyn LeastCommonAncestorsHint>,
infinitepush_params: &InfinitepushParams,
pushrebase_params: &PushrebaseParams,
push_params: &PushParams,
@ -147,7 +148,7 @@ async fn run_push(
ctx: &CoreContext,
repo: &BlobRepo,
bookmark_attrs: &BookmarkAttrs,
lca_hint: &dyn LeastCommonAncestorsHint,
lca_hint: &Arc<dyn LeastCommonAncestorsHint>,
hook_manager: &HookManager,
infinitepush_params: &InfinitepushParams,
pushrebase_params: &PushrebaseParams,
@ -268,7 +269,7 @@ async fn run_infinitepush(
ctx: &CoreContext,
repo: &BlobRepo,
bookmark_attrs: &BookmarkAttrs,
lca_hint: &dyn LeastCommonAncestorsHint,
lca_hint: &Arc<dyn LeastCommonAncestorsHint>,
hook_manager: &HookManager,
infinitepush_params: &InfinitepushParams,
pushrebase_params: &PushrebaseParams,
@ -349,7 +350,7 @@ async fn run_pushrebase(
ctx: &CoreContext,
repo: &BlobRepo,
bookmark_attrs: &BookmarkAttrs,
lca_hint: &dyn LeastCommonAncestorsHint,
lca_hint: &Arc<dyn LeastCommonAncestorsHint>,
infinitepush_params: &InfinitepushParams,
pushrebase_params: &PushrebaseParams,
hook_manager: &HookManager,
@ -439,7 +440,7 @@ async fn run_bookmark_only_pushrebase(
ctx: &CoreContext,
repo: &BlobRepo,
bookmark_attrs: &BookmarkAttrs,
lca_hint: &dyn LeastCommonAncestorsHint,
lca_hint: &Arc<dyn LeastCommonAncestorsHint>,
hook_manager: &HookManager,
infinitepush_params: &InfinitepushParams,
pushrebase_params: &PushrebaseParams,
@ -534,7 +535,7 @@ async fn force_pushrebase(
ctx: &CoreContext,
repo: &BlobRepo,
pushrebase_params: &PushrebaseParams,
lca_hint: &dyn LeastCommonAncestorsHint,
lca_hint: &Arc<dyn LeastCommonAncestorsHint>,
hook_manager: &HookManager,
uploaded_bonsais: HashSet<BonsaiChangeset>,
bookmark_push: PlainBookmarkPush<ChangesetId>,
@ -600,7 +601,7 @@ async fn force_pushrebase(
async fn plain_push_bookmark(
ctx: &CoreContext,
repo: &BlobRepo,
lca_hint: &dyn LeastCommonAncestorsHint,
lca_hint: &Arc<dyn LeastCommonAncestorsHint>,
infinitepush_params: &InfinitepushParams,
pushrebase_params: &PushrebaseParams,
bookmark_attrs: &BookmarkAttrs,
@ -615,20 +616,37 @@ async fn plain_push_bookmark(
) -> Result<(), BundleResolverError> {
match (bookmark_push.old, bookmark_push.new) {
(None, Some(new_target)) => {
bookmarks_movement::CreateBookmarkOp::new(&bookmark_push.name, new_target, reason)
.only_if_public()
.with_new_changesets(new_changesets)
.with_bundle_replay_data(bundle_replay_data)
.run(
ctx,
repo,
infinitepush_params,
pushrebase_params,
bookmark_attrs,
hook_manager,
)
.await
.context("Failed to create bookmark")?;
let res =
bookmarks_movement::CreateBookmarkOp::new(&bookmark_push.name, new_target, reason)
.only_if_public()
.with_new_changesets(new_changesets)
.with_pushvars(maybe_pushvars)
.with_bundle_replay_data(bundle_replay_data)
.run(
ctx,
repo,
lca_hint,
infinitepush_params,
pushrebase_params,
bookmark_attrs,
hook_manager,
)
.await;
match res {
Ok(()) => (),
Err(err) => match err {
BookmarkMovementError::HookFailure(rejections) => {
let rejections =
map_hook_rejections(rejections, hook_rejection_remapper).await?;
return Err(BundleResolverError::HookError(rejections));
}
_ => {
return Err(BundleResolverError::Error(
Error::from(err).context("Failed to create bookmark"),
))
}
},
}
}
(Some(old_target), Some(new_target)) => {
@ -697,7 +715,7 @@ async fn plain_push_bookmark(
async fn infinitepush_scratch_bookmark(
ctx: &CoreContext,
repo: &BlobRepo,
lca_hint: &dyn LeastCommonAncestorsHint,
lca_hint: &Arc<dyn LeastCommonAncestorsHint>,
infinitepush_params: &InfinitepushParams,
pushrebase_params: &PushrebaseParams,
bookmark_attrs: &BookmarkAttrs,
@ -716,6 +734,7 @@ async fn infinitepush_scratch_bookmark(
.run(
ctx,
repo,
lca_hint,
infinitepush_params,
pushrebase_params,
bookmark_attrs,

View File

@ -169,7 +169,7 @@ impl PushRedirector {
&ctx,
&large_repo,
&bookmark_attrs,
&*lca_hint,
&lca_hint,
&infinitepush_params,
&puhsrebase_params,
&push_params,

View File

@ -0,0 +1,289 @@
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This software may be used and distributed according to the terms of the
# GNU General Public License found in the LICENSE file in the root
# directory of this source tree.
$ . "${TEST_FIXTURES}/library.sh"
setup configuration
$ setup_mononoke_config
$ cd "$TESTTMP/mononoke-config"
$ cat >> repos/repo/server.toml <<CONFIG
> [[bookmarks]]
> name="main"
> [[bookmarks]]
> regex=".*"
> hooks_skip_ancestors_of=["main"]
> CONFIG
$ register_hook limit_filesize <(
> cat <<CONF
> bypass_pushvar="ALLOW_LARGE_FILES=true"
> config_ints={filesizelimit=10}
> CONF
> )
$ cat > $TESTTMP/mononoke_tunables.json <<EOF
> {
> "killswitches": {
> "run_hooks_on_additional_changesets": true
> }
> }
> EOF
$ setup_common_hg_configs
$ cd $TESTTMP
$ configure dummyssh
$ enable amend
setup repo
$ hg init repo-hg
$ cd repo-hg
$ setup_hg_server
$ drawdag <<EOF
> D F # C/large = file_too_large
> | | # E/large = file_too_large
> C E Z # Y/large = file_too_large
> |/ |
> B Y
> | |
> A X
> EOF
$ hg bookmark main -r $A
$ hg bookmark head_d -r $D
$ hg bookmark head_f -r $F
$ hg bookmark head_z -r $Z
blobimport
$ cd ..
$ blobimport repo-hg/.hg repo
start mononoke
$ mononoke
$ wait_for_mononoke
clone
$ hgclone_treemanifest ssh://user@dummy/repo-hg repo2 --noupdate --config extensions.remotenames= -q
$ cd repo2
$ setup_hg_client
$ enable pushrebase remotenames
fast-forward the bookmark
$ hg up -q $B
$ hgmn push -r . --to main
pushing rev 112478962961 to destination ssh://user@dummy/repo bookmark main
searching for changes
no changes found
updating bookmark main
[1]
fast-forward the bookmark over a commit that fails the hook
$ hg up -q $D
$ hgmn push -r . --to main
pushing rev 7ff4b7c298ec to destination ssh://user@dummy/repo bookmark main
searching for changes
no changes found
remote: Command failed
remote: Error:
remote: hooks failed:
remote: limit_filesize for 5e6585e50f1bf5a236028609e131851379bb311a: File size limit is 10 bytes. You tried to push file large that is over the limit (14 bytes). See https://fburl.com/landing_big_diffs for instructions.
remote:
remote: Root cause:
remote: hooks failed:
remote: limit_filesize for 5e6585e50f1bf5a236028609e131851379bb311a: File size limit is 10 bytes. You tried to push file large that is over the limit (14 bytes). See https://fburl.com/landing_big_diffs for instructions.
remote:
remote: Debug context:
remote: "hooks failed:\nlimit_filesize for 5e6585e50f1bf5a236028609e131851379bb311a: File size limit is 10 bytes. You tried to push file large that is over the limit (14 bytes). See https://fburl.com/landing_big_diffs for instructions."
abort: stream ended unexpectedly (got 0 bytes, expected 4)
[255]
bypass the hook, the push will now work
$ hgmn push -r . --to main --pushvar ALLOW_LARGE_FILES=true
pushing rev 7ff4b7c298ec to destination ssh://user@dummy/repo bookmark main
searching for changes
no changes found
updating bookmark main
[1]
attempt a non-fast-forward move, it should fail
$ hg up -q $F
$ hgmn push -r . --to main
pushing rev af09fbbc2f05 to destination ssh://user@dummy/repo bookmark main
searching for changes
no changes found
remote: Command failed
remote: Error:
remote: While doing a bookmark-only pushrebase
remote:
remote: Root cause:
remote: Non fast-forward bookmark move from cbe5624248da659ef8f938baaf65796e68252a0a735e885a814b94f38b901d5b to 2b7843b3fb41a99743420b26286cc5e7bc94ebf7576eaf1bbceb70cd36ffe8b0
remote:
remote: Caused by:
remote: Failed to fast-forward bookmark (try --force?)
remote: Caused by:
remote: Non fast-forward bookmark move from cbe5624248da659ef8f938baaf65796e68252a0a735e885a814b94f38b901d5b to 2b7843b3fb41a99743420b26286cc5e7bc94ebf7576eaf1bbceb70cd36ffe8b0
remote:
remote: Debug context:
remote: Error {
remote: context: "While doing a bookmark-only pushrebase",
remote: source: Error {
remote: context: "Failed to fast-forward bookmark (try --force?)",
remote: source: NonFastForwardMove {
remote: from: ChangesetId(
remote: Blake2(cbe5624248da659ef8f938baaf65796e68252a0a735e885a814b94f38b901d5b),
remote: ),
remote: to: ChangesetId(
remote: Blake2(2b7843b3fb41a99743420b26286cc5e7bc94ebf7576eaf1bbceb70cd36ffe8b0),
remote: ),
remote: },
remote: },
remote: }
abort: stream ended unexpectedly (got 0 bytes, expected 4)
[255]
the error message is misleading - we must specify a pushvar to allow the non-fast-forward move.
$ hgmn push -r . --to main --pushvar NON_FAST_FORWARD=true
pushing rev af09fbbc2f05 to destination ssh://user@dummy/repo bookmark main
searching for changes
no changes found
remote: Command failed
remote: Error:
remote: hooks failed:
remote: limit_filesize for 18c1f749e0296aca8bbb023822506c1eff9bc8a9: File size limit is 10 bytes. You tried to push file large that is over the limit (14 bytes). See https://fburl.com/landing_big_diffs for instructions.
remote:
remote: Root cause:
remote: hooks failed:
remote: limit_filesize for 18c1f749e0296aca8bbb023822506c1eff9bc8a9: File size limit is 10 bytes. You tried to push file large that is over the limit (14 bytes). See https://fburl.com/landing_big_diffs for instructions.
remote:
remote: Debug context:
remote: "hooks failed:\nlimit_filesize for 18c1f749e0296aca8bbb023822506c1eff9bc8a9: File size limit is 10 bytes. You tried to push file large that is over the limit (14 bytes). See https://fburl.com/landing_big_diffs for instructions."
abort: stream ended unexpectedly (got 0 bytes, expected 4)
[255]
bypass the hook too, and it should work
$ hgmn push -r . --to main --pushvar NON_FAST_FORWARD=true --pushvar ALLOW_LARGE_FILES=true
pushing rev af09fbbc2f05 to destination ssh://user@dummy/repo bookmark main
searching for changes
no changes found
updating bookmark main
[1]
attempt a move to a completely unrelated commit (no common ancestor), with an ancestor that
fails the hook
$ hg up -q $Z
$ hgmn push -r . --to main --pushvar NON_FAST_FORWARD=true
pushing rev e3295448b1ef to destination ssh://user@dummy/repo bookmark main
searching for changes
no changes found
remote: Command failed
remote: Error:
remote: hooks failed:
remote: limit_filesize for 1cb9b9c4b7dd2e82083766050d166fffe209df6a: File size limit is 10 bytes. You tried to push file large that is over the limit (14 bytes). See https://fburl.com/landing_big_diffs for instructions.
remote:
remote: Root cause:
remote: hooks failed:
remote: limit_filesize for 1cb9b9c4b7dd2e82083766050d166fffe209df6a: File size limit is 10 bytes. You tried to push file large that is over the limit (14 bytes). See https://fburl.com/landing_big_diffs for instructions.
remote:
remote: Debug context:
remote: "hooks failed:\nlimit_filesize for 1cb9b9c4b7dd2e82083766050d166fffe209df6a: File size limit is 10 bytes. You tried to push file large that is over the limit (14 bytes). See https://fburl.com/landing_big_diffs for instructions."
abort: stream ended unexpectedly (got 0 bytes, expected 4)
[255]
bypass the hook, and it should work
$ hgmn push -r . --to main --pushvar NON_FAST_FORWARD=true --pushvar ALLOW_LARGE_FILES=true
pushing rev e3295448b1ef to destination ssh://user@dummy/repo bookmark main
searching for changes
no changes found
updating bookmark main
[1]
pushing another bookmark to the same commit shouldn't require running that hook
$ hg up -q $X
$ hgmn push -r . --to other --create
pushing rev ba2b7fa7166d to destination ssh://user@dummy/repo bookmark other
searching for changes
no changes found
exporting bookmark other
[1]
$ hg up -q $Z
$ hgmn push -r . --to other
pushing rev e3295448b1ef to destination ssh://user@dummy/repo bookmark other
searching for changes
no changes found
updating bookmark other
[1]
but pushing to another commit will run the hook
$ hg up -q $C
$ hgmn push -r . --to other --pushvar NON_FAST_FORWARD=true
pushing rev 5e6585e50f1b to destination ssh://user@dummy/repo bookmark other
searching for changes
no changes found
remote: Command failed
remote: Error:
remote: hooks failed:
remote: limit_filesize for 5e6585e50f1bf5a236028609e131851379bb311a: File size limit is 10 bytes. You tried to push file large that is over the limit (14 bytes). See https://fburl.com/landing_big_diffs for instructions.
remote:
remote: Root cause:
remote: hooks failed:
remote: limit_filesize for 5e6585e50f1bf5a236028609e131851379bb311a: File size limit is 10 bytes. You tried to push file large that is over the limit (14 bytes). See https://fburl.com/landing_big_diffs for instructions.
remote:
remote: Debug context:
remote: "hooks failed:\nlimit_filesize for 5e6585e50f1bf5a236028609e131851379bb311a: File size limit is 10 bytes. You tried to push file large that is over the limit (14 bytes). See https://fburl.com/landing_big_diffs for instructions."
abort: stream ended unexpectedly (got 0 bytes, expected 4)
[255]
bypassing that also works
$ hgmn push -r . --to other --pushvar NON_FAST_FORWARD=true --pushvar ALLOW_LARGE_FILES=true
pushing rev 5e6585e50f1b to destination ssh://user@dummy/repo bookmark other
searching for changes
no changes found
updating bookmark other
[1]
we can now extend that bookmark further without a bypass needed
$ hg up -q $D
$ hgmn push -r . --to other
pushing rev 7ff4b7c298ec to destination ssh://user@dummy/repo bookmark other
searching for changes
no changes found
updating bookmark other
[1]
create a new bookmark at this location - it should fail because of the hook
$ hgmn push -r . --to created --create
pushing rev 7ff4b7c298ec to destination ssh://user@dummy/repo bookmark created
searching for changes
no changes found
remote: Command failed
remote: Error:
remote: hooks failed:
remote: limit_filesize for 5e6585e50f1bf5a236028609e131851379bb311a: File size limit is 10 bytes. You tried to push file large that is over the limit (14 bytes). See https://fburl.com/landing_big_diffs for instructions.
remote:
remote: Root cause:
remote: hooks failed:
remote: limit_filesize for 5e6585e50f1bf5a236028609e131851379bb311a: File size limit is 10 bytes. You tried to push file large that is over the limit (14 bytes). See https://fburl.com/landing_big_diffs for instructions.
remote:
remote: Debug context:
remote: "hooks failed:\nlimit_filesize for 5e6585e50f1bf5a236028609e131851379bb311a: File size limit is 10 bytes. You tried to push file large that is over the limit (14 bytes). See https://fburl.com/landing_big_diffs for instructions."
abort: stream ended unexpectedly (got 0 bytes, expected 4)
[255]
bypass the hook to allow the creation
$ hgmn push -r . --to created --create --pushvar ALLOW_LARGE_FILES=true
pushing rev 7ff4b7c298ec to destination ssh://user@dummy/repo bookmark created
searching for changes
no changes found
exporting bookmark created
[1]
we can, however, create a bookmark at the same location as main
$ hgmn push -r $Z --to main-copy --create
pushing rev e3295448b1ef to destination ssh://user@dummy/repo bookmark main-copy
searching for changes
no changes found
exporting bookmark main-copy
[1]

View File

@ -0,0 +1,156 @@
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This software may be used and distributed according to the terms of the
# GNU General Public License found in the LICENSE file in the root
# directory of this source tree.
$ . "${TEST_FIXTURES}/library.sh"
$ . "${TEST_FIXTURES}/library-push-redirector.sh"
$ setup_configerator_configs
$ cat > "$PUSHREDIRECT_CONF/enable" <<EOF
> {
> "per_repo": {
> "1": {
> "draft_push": false,
> "public_push": true
> }
> }
> }
> EOF
$ create_large_small_repo
Setting up hg server repos
Blobimporting them
Adding synced mapping entry
$ cd "$TESTTMP/mononoke-config"
$ cat >> repos/large-mon/server.toml << CONFIG
> [[bookmarks]]
> name="bookprefix/test_bookmark"
> [[bookmarks.hooks]]
> hook_name="deny_files"
> [[hooks]]
> name="deny_files"
> CONFIG
$ cat > $TESTTMP/mononoke_tunables.json <<EOF
> {
> "killswitches": {
> "run_hooks_on_additional_changesets": true
> }
> }
> EOF
$ start_large_small_repo --local-configerator-path="$TESTTMP/configerator"
Starting Mononoke server
We can't force pushrebase to a shared bookmark, so create a test bookmark that only belongs
to the small repo
$ cd "$TESTTMP/small-hg-client"
$ REPONAME=small-mon hgmn up -q master_bookmark
$ REPONAME=small-mon hgmn push -r . --to test_bookmark --create
pushing rev 11f848659bfc to destination ssh://user@dummy/small-mon bookmark test_bookmark
searching for changes
no changes found
exporting bookmark test_bookmark
[1]
Force pushrebase to the small repo with one commit succeeds, and does not get
blocked by deny_files
$ echo 2 > 2 && hg addremove -q && hg ci -q -m newcommit
$ REPONAME=small-mon hgmn push -r . --to test_bookmark --force | grep updating
updating bookmark test_bookmark
-- newcommit was correctly pushed to test_bookmark
$ log -r test_bookmark
@ newcommit [public;rev=2;ce81c7d38286] default/test_bookmark
|
~
-- newcommit is also present in the large repo (after a pull)
$ cd "$TESTTMP"/large-hg-client
$ REPONAME=large-mon hgmn pull -q
$ log -r bookprefix/test_bookmark
o newcommit [public;rev=3;819e91b238b7] default/bookprefix/test_bookmark
|
~
- compare the working copies
$ verify_wc bookprefix/test_bookmark
Pushing to the small repo triggers deny_files, even though deny_files is only configured on the large repo.
Note that the node is from the small repo, even though the hook is in the large repo
$ cd "$TESTTMP"/small-hg-client
$ REPONAME=small-mon hgmn up -q test_bookmark
$ mkdir -p f/.git
$ echo 2 > f/.git/HEAD && hg addremove -q && hg ci -q -m .git
$ hg log -T"small_node: {node}\n" -r .
small_node: 6e6a22d48eb51db1e7b8af685d9c99c0d7f10f70
$ REPONAME=small-mon hgmn push -r . --to test_bookmark --force
pushing rev 6e6a22d48eb5 to destination ssh://user@dummy/small-mon bookmark test_bookmark
searching for changes
remote: Command failed
remote: Error:
remote: hooks failed:
remote: deny_files for 6e6a22d48eb51db1e7b8af685d9c99c0d7f10f70: Denied filename 'smallrepofolder/f/.git/HEAD' matched name pattern '/[.]git/'. Rename or remove this file and try again.
remote:
remote: Root cause:
remote: hooks failed:
remote: deny_files for 6e6a22d48eb51db1e7b8af685d9c99c0d7f10f70: Denied filename 'smallrepofolder/f/.git/HEAD' matched name pattern '/[.]git/'. Rename or remove this file and try again.
remote:
remote: Debug context:
remote: "hooks failed:\ndeny_files for 6e6a22d48eb51db1e7b8af685d9c99c0d7f10f70: Denied filename \'smallrepofolder/f/.git/HEAD\' matched name pattern \'/[.]git/\'. Rename or remove this file and try again."
abort: stream ended unexpectedly (got 0 bytes, expected 4)
[255]
Create a commit in the large repo that triggers deny_files. Since we haven't enabled the hook
there, we are ok to create it. Create a commit on top of that that is backsynced.
$ cd "$TESTTMP"/large-hg-client
$ REPONAME=large-mon hgmn up -q master_bookmark
$ mkdir -p x/.git
$ echo 2 > x/.git/HEAD && hg addremove -q && hg ci -q -m .git-large
$ hg log -T "large_node: {node}\n" -r .
large_node: d967862de4d54c47ba51e0259fb1f72d881efd73
$ echo 3 > smallrepofolder/largerepofile && hg addremove -q && hg ci -q -m backsync
$ REPONAME=large-mon hgmn push --to master_bookmark
pushing rev 148264a57519 to destination ssh://user@dummy/large-mon bookmark master_bookmark
searching for changes
adding changesets
adding manifests
adding file changes
added 0 changesets with 0 changes to 0 files
updating bookmark master_bookmark
$ backsync_large_to_small 2>&1 | grep "syncing bookmark"
* syncing bookmark master_bookmark to * (glob)
Commit has been backsynced
$ cd "$TESTTMP"/small-hg-client
$ REPONAME=small-mon hgmn pull -q
$ log -r master_bookmark
o backsync [public;rev=4;cd9bfa9f25eb] default/master_bookmark
|
~
Attempt to move test_bookmark to the new master_bookmark commit. It fails because of the
hook in the large repo.
Note that since the large repo commit doesn't map to the small repo, we see the large repo
changeset id.
$ REPONAME=small-mon hgmn up -q master_bookmark
$ REPONAME=small-mon hgmn push -r . --to test_bookmark --pushvar NON_FAST_FORWARD=true
pushing rev cd9bfa9f25eb to destination ssh://user@dummy/small-mon bookmark test_bookmark
searching for changes
no changes found
remote: Command failed
remote: Error:
remote: hooks failed:
remote: deny_files for d967862de4d54c47ba51e0259fb1f72d881efd73: Denied filename 'x/.git/HEAD' matched name pattern '/[.]git/'. Rename or remove this file and try again.
remote:
remote: Root cause:
remote: hooks failed:
remote: deny_files for d967862de4d54c47ba51e0259fb1f72d881efd73: Denied filename 'x/.git/HEAD' matched name pattern '/[.]git/'. Rename or remove this file and try again.
remote:
remote: Debug context:
remote: "hooks failed:\ndeny_files for d967862de4d54c47ba51e0259fb1f72d881efd73: Denied filename \'x/.git/HEAD\' matched name pattern \'/[.]git/\'. Rename or remove this file and try again."
abort: stream ended unexpectedly (got 0 bytes, expected 4)
[255]

View File

@ -78,6 +78,8 @@ pub struct MononokeTunables {
disable_repo_client_warm_bookmarks_cache: AtomicBool,
remotefilelog_file_history_limit: AtomicI64,
disable_hooks_on_plain_push: AtomicBool,
run_hooks_on_additional_changesets: AtomicBool,
hooks_additional_changesets_limit: AtomicI64,
}
fn log_tunables(tunables: &TunablesStruct) -> String {